Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-12-14 15:17:06 +00:00
parent 79a7da2537
commit be320585cb
35 changed files with 477 additions and 128 deletions

View file

@ -2,10 +2,8 @@
Style/OpenStructUse:
Exclude:
- app/helpers/application_settings_helper.rb
- ee/spec/db/production/license_spec.rb
- ee/spec/features/projects/new_project_spec.rb
- ee/spec/finders/template_finder_spec.rb
- ee/spec/graphql/ee/resolvers/board_lists_resolver_spec.rb
- ee/spec/helpers/ee/blob_helper_spec.rb
- ee/spec/lib/gitlab/auth/group_saml/failure_handler_spec.rb
- ee/spec/lib/gitlab/legacy_github_import/project_creator_spec.rb

View file

@ -906,7 +906,7 @@ GEM
orm_adapter (0.5.0)
os (1.1.1)
parallel (1.20.1)
parser (3.0.2.0)
parser (3.0.3.2)
ast (~> 2.4.1)
parslet (1.8.2)
pastel (0.8.0)

View file

@ -40,10 +40,14 @@ module Packages
# access to packages is ruled by:
# - project is public or the current user has access to it with at least the reporter level
# - the repository feature is available to the current_user
::Project
.in_namespace(groups)
.public_or_visible_to_user(current_user, Gitlab::Access::REPORTER)
.with_feature_available_for_user(:repository, current_user)
if current_user.is_a?(DeployToken)
current_user.accessible_projects
else
::Project
.in_namespace(groups)
.public_or_visible_to_user(current_user, Gitlab::Access::REPORTER)
.with_feature_available_for_user(:repository, current_user)
end
end
def groups

View file

@ -8,7 +8,7 @@ module Transactions
# transaction. Handles special cases when running inside a test environment,
# where tests may be wrapped in transactions
def inside_transaction?
base = Rails.env.test? ? @open_transactions_baseline.to_i : 0
base = Rails.env.test? ? open_transactions_baseline.to_i : 0
connection.open_transactions > base
end
@ -24,5 +24,15 @@ module Transactions
def reset_open_transactions_baseline
@open_transactions_baseline = 0
end
def open_transactions_baseline
return unless Rails.env.test?
if @open_transactions_baseline.nil?
return self == ApplicationRecord ? nil : superclass.open_transactions_baseline
end
@open_transactions_baseline
end
end
end

View file

@ -1,7 +1,7 @@
# frozen_string_literal: true
class BuildDetailsEntity < Ci::JobEntity
expose :coverage, :erased_at, :duration
expose :coverage, :erased_at, :finished_at, :duration
expose :tag_list, as: :tags
expose :has_trace?, as: :has_trace
expose :stage

View file

@ -33,13 +33,13 @@ module Ci
end
def parse!(artifact)
variables = []
variables = {}
artifact.each_blob do |blob|
blob.each_line do |line|
key, value = scan_line!(line)
variables << Ci::JobVariable.new(job_id: artifact.job_id,
variables[key] = Ci::JobVariable.new(job_id: artifact.job_id,
source: :dotenv, key: key, value: value)
end
end
@ -49,7 +49,7 @@ module Ci
"Dotenv files cannot have more than #{dotenv_variable_limit} variables"
end
variables
variables.values
end
def scan_line!(line)

View file

@ -15635,6 +15635,19 @@ Represents a link related to a vulnerability.
| <a id="vulnerabilitylinkname"></a>`name` | [`String`](#string) | Name of the link. |
| <a id="vulnerabilitylinkurl"></a>`url` | [`String!`](#string) | URL of the link. |
### `VulnerabilityLocationClusterImageScanning`
Represents the location of a vulnerability found by a cluster image scan.
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="vulnerabilitylocationclusterimagescanningdependency"></a>`dependency` | [`VulnerableDependency`](#vulnerabledependency) | Dependency containing the vulnerability. |
| <a id="vulnerabilitylocationclusterimagescanningimage"></a>`image` | [`String`](#string) | Name of the vulnerable container image. |
| <a id="vulnerabilitylocationclusterimagescanningkubernetesresource"></a>`kubernetesResource` | [`VulnerableKubernetesResource`](#vulnerablekubernetesresource) | Kubernetes resource which uses the vulnerable container image. |
| <a id="vulnerabilitylocationclusterimagescanningoperatingsystem"></a>`operatingSystem` | [`String`](#string) | Operating system that runs on the vulnerable container image. |
### `VulnerabilityLocationContainerScanning`
Represents the location of a vulnerability found by a container security scan.
@ -15785,6 +15798,21 @@ Represents a vulnerable dependency. Used in vulnerability location data.
| <a id="vulnerabledependencypackage"></a>`package` | [`VulnerablePackage`](#vulnerablepackage) | Package associated with the vulnerable dependency. |
| <a id="vulnerabledependencyversion"></a>`version` | [`String`](#string) | Version of the vulnerable dependency. |
### `VulnerableKubernetesResource`
Represents a vulnerable Kubernetes resource. Used in vulnerability location data.
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="vulnerablekubernetesresourceagent"></a>`agent` | [`ClusterAgent`](#clusteragent) | Kubernetes Agent which performed the scan. |
| <a id="vulnerablekubernetesresourceclusterid"></a>`clusterId` | [`ClustersClusterID`](#clustersclusterid) | ID of the Cluster integration which was used to perform the scan. |
| <a id="vulnerablekubernetesresourcecontainername"></a>`containerName` | [`String!`](#string) | Name of the container that had its image scanned. |
| <a id="vulnerablekubernetesresourcekind"></a>`kind` | [`String!`](#string) | Kind of the Kubernetes resource. |
| <a id="vulnerablekubernetesresourcename"></a>`name` | [`String!`](#string) | Name of the Kubernetes resource. |
| <a id="vulnerablekubernetesresourcenamespace"></a>`namespace` | [`String!`](#string) | Kubernetes namespace which the resource resides in. |
### `VulnerablePackage`
Represents a vulnerable package. Used in vulnerability dependency data.
@ -17967,6 +17995,7 @@ Represents a vulnerability location. The fields with data will depend on the vul
One of:
- [`VulnerabilityLocationClusterImageScanning`](#vulnerabilitylocationclusterimagescanning)
- [`VulnerabilityLocationContainerScanning`](#vulnerabilitylocationcontainerscanning)
- [`VulnerabilityLocationCoverageFuzzing`](#vulnerabilitylocationcoveragefuzzing)
- [`VulnerabilityLocationDast`](#vulnerabilitylocationdast)

View file

@ -169,6 +169,11 @@ The `dotenv` report collects a set of environment variables as artifacts.
The collected variables are registered as runtime-created variables of the job,
which you can use to [set dynamic environment URLs after a job finishes](../environments/index.md#set-dynamic-environment-urls-after-a-job-finishes).
If duplicate environment variables are present in a `dotenv` report:
- In GitLab 14.6 and later, the last one specified is used.
- In GitLab 14.5 and earlier, an error occurs.
The exceptions to the [original dotenv rules](https://github.com/motdotla/dotenv#rules) are:
- The variable key can contain only letters, digits, and underscores (`_`).

View file

@ -47,9 +47,9 @@ This is more complicated than is ideal. It makes the query construction more
prone to errors (such as
[issue #15557](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/15557)).
## Attempt A: WHERE EXISTS
## Attempt A: `WHERE EXISTS`
### Attempt A1: use multiple subqueries with WHERE EXISTS
### Attempt A1: use multiple subqueries with `WHERE EXISTS`
In [issue #37137](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/37137)
and its associated [merge request](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/14022),
@ -82,7 +82,7 @@ AND (EXISTS (
While this worked without schema changes, and did improve readability somewhat,
it did not improve query performance.
### Attempt A2: use label IDs in the WHERE EXISTS clause
### Attempt A2: use label IDs in the `WHERE EXISTS` clause
In [merge request #34503](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/34503), we followed a similar approach to A1. But this time, we
did a separate query to fetch the IDs of the labels used in the filter so that we avoid the `JOIN` in the `EXISTS` clause and filter directly by

View file

@ -48,7 +48,7 @@ each node should have:
Elasticsearch is *not* included in the Omnibus packages or when you install from
source. You must [install it separately](https://www.elastic.co/guide/en/elasticsearch/reference/7.x/install-elasticsearch.html "Elasticsearch 7.x installation documentation") and ensure you select your version. Detailed information on how to install Elasticsearch is out of the scope of this page.
You can install Elasticsearch yourself, or use a cloud hosted offering such as [Elasticsearch Service](https://www.elastic.co/elasticsearch/service)(available on AWS, GCP, or Azure) or the [Amazon Elasticsearch](https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-gsg.html)
You can install Elasticsearch yourself, or use a cloud hosted offering such as [Elasticsearch Service](https://www.elastic.co/elasticsearch/service) (available on AWS, GCP, or Azure) or the [Amazon OpenSearch](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/gsg.html)
service.
You should install Elasticsearch on a separate server. Running Elasticsearch on the same server as GitLab is not recommended and can cause a degradation in GitLab instance performance.
@ -208,8 +208,8 @@ The following Elasticsearch settings are available:
| `Number of Elasticsearch shards` | Elasticsearch indexes are split into multiple shards for performance reasons. In general, you should use at least 5 shards, and indexes with tens of millions of documents need to have more shards ([see below](#guidance-on-choosing-optimal-cluster-configuration)). Changes to this value do not take effect until the index is recreated. You can read more about tradeoffs in the [Elasticsearch documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/scalability.html). |
| `Number of Elasticsearch replicas` | Each Elasticsearch shard can have a number of replicas. These are a complete copy of the shard, and can provide increased query performance or resilience against hardware failure. Increasing this value increases total disk space required by the index. |
| `Limit namespaces and projects that can be indexed` | Enabling this allows you to select namespaces and projects to index. All other namespaces and projects use database search instead. If you enable this option but do not select any namespaces or projects, none are indexed. [Read more below](#limit-namespaces-and-projects).
| `Using AWS hosted Elasticsearch with IAM credentials` | Sign your Elasticsearch requests using [AWS IAM authorization](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_access-keys.html), [AWS EC2 Instance Profile Credentials](https://docs.aws.amazon.com/codedeploy/latest/userguide/getting-started-create-iam-instance-profile.html#getting-started-create-iam-instance-profile-cli), or [AWS ECS Tasks Credentials](https://docs.aws.amazon.com/AmazonECS/latest/userguide/task-iam-roles.html). Please refer to [Identity and Access Management in Amazon Elasticsearch Service](https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-ac.html) for details of AWS hosted Elasticsearch domain access policy configuration. |
| `AWS Region` | The AWS region in which your Elasticsearch service is located. |
| `Using AWS hosted Elasticsearch with IAM credentials` | Sign your Elasticsearch requests using [AWS IAM authorization](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_access-keys.html), [AWS EC2 Instance Profile Credentials](https://docs.aws.amazon.com/codedeploy/latest/userguide/getting-started-create-iam-instance-profile.html#getting-started-create-iam-instance-profile-cli), or [AWS ECS Tasks Credentials](https://docs.aws.amazon.com/AmazonECS/latest/userguide/task-iam-roles.html). Please refer to [Identity and Access Management in Amazon OpenSearch Service](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/ac.html) for details of AWS hosted OpenSearch domain access policy configuration. |
| `AWS Region` | The AWS region in which your OpenSearch Service is located. |
| `AWS Access Key` | The AWS access key. |
| `AWS Secret Access Key` | The AWS secret access key. |
| `Maximum file size indexed` | See [the explanation in instance limits.](../administration/instance_limits.md#maximum-file-size-indexed). |
@ -890,11 +890,11 @@ There is also an easy way to check it automatically with `sudo gitlab-rake gitla
This exception is seen when your Elasticsearch cluster is configured to reject requests above a certain size (10MiB in this case). This corresponds to the `http.max_content_length` setting in `elasticsearch.yml`. Increase it to a larger size and restart your Elasticsearch cluster.
AWS has [fixed limits](https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/aes-limits.html) for this setting ("Maximum Size of HTTP Request Payloads"), based on the size of the underlying instance.
AWS has [fixed limits](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/aes-limits.html) for this setting ("Maximum Size of HTTP Request Payloads"), based on the size of the underlying instance.
### My single node Elasticsearch cluster status never goes from `yellow` to `green` even though everything seems to be running properly
**For a single node Elasticsearch cluster the functional cluster health status will be yellow** (never green) because the primary shard is allocated but replicas cannot be as there is no other node to which Elasticsearch can assign a replica. This also applies if you are using the [Amazon Elasticsearch](https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/aes-handling-errors.html#aes-handling-errors-yellow-cluster-status) service.
**For a single node Elasticsearch cluster the functional cluster health status will be yellow** (never green) because the primary shard is allocated but replicas cannot be as there is no other node to which Elasticsearch can assign a replica. This also applies if you are using the [Amazon OpenSearch](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/aes-handling-errors.html#aes-handling-errors-yellow-cluster-status) service.
WARNING:
Setting the number of replicas to `0` is discouraged (this is not allowed in the GitLab Elasticsearch Integration menu). If you are planning to add more Elasticsearch nodes (for a total of more than 1 Elasticsearch) the number of replicas will need to be set to an integer value larger than `0`. Failure to do so will result in lack of redundancy (losing one node will corrupt the index).
@ -981,11 +981,11 @@ however searches will only surface results that can be viewed by the user.
Advanced Search will honor all permission checks in the application by
filtering out projects that a user does not have access to at search time.
### Access requirements for the self-managed AWS Elasticsearch Service
### Access requirements for the self-managed AWS OpenSearch Service
To use the self-managed AWS Elasticsearch Service with GitLab, configure your instance's domain access policies
To use the self-managed AWS OpenSearch Service with GitLab, configure your instance's domain access policies
to contain the actions below.
See [Identity and Access Management in Amazon Elasticsearch Service](https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-ac.html) for details.
See [Identity and Access Management in Amazon OpenSearch Service](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/ac.html) for details.
```plaintext
es:ESHttpDelete

View file

@ -9,6 +9,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/15886) in GitLab 13.2.
> - [Moved](https://gitlab.com/gitlab-org/gitlab/-/issues/221259) from GitLab Premium to GitLab Free in 13.3.
> - Support for Composer 2.0 [added](https://gitlab.com/gitlab-org/gitlab/-/issues/259840) in GitLab 13.10.
> - Deploy token support [added](https://gitlab.com/gitlab-org/gitlab/-/issues/240897) in GitLab 14.6.
WARNING:
The Composer package registry for GitLab is under development and isn't ready for production use due to
@ -88,13 +89,12 @@ Prerequisites:
- A valid `composer.json` file.
- The Packages feature is enabled in a GitLab repository.
- The project ID, which is on the project's home page.
- A [personal access token](../../../user/profile/personal_access_tokens.md) with the scope set to `api`.
- One of the following token types:
- A [personal access token](../../../user/profile/personal_access_tokens.md) with the scope set to `api`.
- A [deploy token](../../project/deploy_tokens/index.md)
with the scope set to `write_package_registry`.
NOTE:
[Deploy tokens](../../project/deploy_tokens/index.md) are
[not yet supported](https://gitlab.com/gitlab-org/gitlab/-/issues/240897) for use with Composer.
To publish the package:
To publish the package with a personal access token:
- Send a `POST` request to the [Packages API](../../../api/packages.md).
@ -109,6 +109,21 @@ To publish the package:
- `<tag>` is the Git tag name of the version you want to publish.
To publish a branch, use `branch=<branch>` instead of `tag=<tag>`.
To publish the package with a deploy token:
- Send a `POST` request to the [Packages API](../../../api/packages.md).
For example, you can use `curl`:
```shell
curl --data tag=<tag> --header "Deploy-Token: <deploy-token>" "https://gitlab.example.com/api/v4/projects/<project_id>/packages/composer"
```
- `<deploy-token>` is your deploy token
- `<project_id>` is your project ID.
- `<tag>` is the Git tag name of the version you want to publish.
To publish a branch, use `branch=<branch>` instead of `tag=<tag>`.
You can view the published package by going to **Packages & Registries > Package Registry** and
selecting the **Composer** tab.
@ -159,11 +174,11 @@ Prerequisites:
- A package in the Package Registry.
- The group ID, which is on the group's home page.
- A [personal access token](../../../user/profile/personal_access_tokens.md) with the scope set to, at minimum, `read_api`.
NOTE:
[Deploy tokens](../../project/deploy_tokens/index.md) are
[not yet supported](https://gitlab.com/gitlab-org/gitlab/-/issues/240897) for use with Composer.
- One of the following token types:
- A [personal access token](../../../user/profile/personal_access_tokens.md)
with the scope set to, at minimum, `api`.
- A [deploy token](../../project/deploy_tokens/index.md)
with the scope set to `read_package_registry`, `write_package_registry`, or both.
To install a package:
@ -213,6 +228,8 @@ To install a package:
1. Create an `auth.json` file with your GitLab credentials:
Using a personal access token:
```shell
composer config gitlab-token.<DOMAIN-NAME> <personal_access_token>
```
@ -229,6 +246,26 @@ To install a package:
}
```
Using a deploy token:
```shell
composer config gitlab-token.<DOMAIN-NAME> <deploy_token_username> <deploy_token>
```
Result in the `auth.json` file:
```json
{
...
"gitlab-token": {
"<DOMAIN-NAME>": {
"username": "<deploy_token_username>",
"token": "<deploy_token>",
...
}
}
```
You can unset this with the command:
```shell
@ -236,7 +273,8 @@ To install a package:
```
- `<DOMAIN-NAME>` is the GitLab instance URL `gitlab.com` or `gitlab.example.com`.
- `<personal_access_token>` with the scope set to `read_api`.
- `<personal_access_token>` with the scope set to `api`, or `<deploy_token>` with the scope set
to `read_package_registry` and/or `write_package_registry`.
1. If you are on a GitLab self-managed instance, add `gitlab-domains` to `composer.json`.
@ -298,10 +336,19 @@ To install a package:
WARNING:
Never commit the `auth.json` file to your repository. To install packages from a CI/CD job,
consider using the [`composer config`](https://getcomposer.org/doc/articles/handling-private-packages.md#satis) tool with your personal access token
consider using the [`composer config`](https://getcomposer.org/doc/articles/handling-private-packages.md#satis) tool with your access token
stored in a [GitLab CI/CD variable](../../../ci/variables/index.md) or in
[HashiCorp Vault](../../../ci/secrets/index.md).
### Working with Deploy Tokens
Although Composer packages are accessed at the group level, a group or project deploy token can be
used to access them:
- A group deploy token has access to all packages published to projects in that group or its
subgroups.
- A project deploy token only has access to packages published to that particular project.
## Supported CLI commands
The GitLab Composer repository supports the following Composer CLI commands:

View file

@ -70,7 +70,7 @@ module API
end
desc 'Composer packages endpoint at group level'
route_setting :authentication, job_token_allowed: true, basic_auth_personal_access_token: true
route_setting :authentication, job_token_allowed: true, basic_auth_personal_access_token: true, deploy_token_allowed: true
get ':id/-/packages/composer/packages' do
presenter.root
end
@ -79,7 +79,7 @@ module API
params do
requires :sha, type: String, desc: 'Shasum of current json'
end
route_setting :authentication, job_token_allowed: true, basic_auth_personal_access_token: true
route_setting :authentication, job_token_allowed: true, basic_auth_personal_access_token: true, deploy_token_allowed: true
get ':id/-/packages/composer/p/:sha' do
presenter.provider
end
@ -88,7 +88,7 @@ module API
params do
requires :package_name, type: String, file_path: true, desc: 'The Composer package name'
end
route_setting :authentication, job_token_allowed: true, basic_auth_personal_access_token: true
route_setting :authentication, job_token_allowed: true, basic_auth_personal_access_token: true, deploy_token_allowed: true
get ':id/-/packages/composer/p2/*package_name', requirements: COMPOSER_ENDPOINT_REQUIREMENTS, file_path: true do
not_found! if packages.empty?
@ -99,7 +99,7 @@ module API
params do
requires :package_name, type: String, file_path: true, desc: 'The Composer package name'
end
route_setting :authentication, job_token_allowed: true, basic_auth_personal_access_token: true
route_setting :authentication, job_token_allowed: true, basic_auth_personal_access_token: true, deploy_token_allowed: true
get ':id/-/packages/composer/*package_name', requirements: COMPOSER_ENDPOINT_REQUIREMENTS, file_path: true do
not_found! if packages.empty?
not_found! if params[:sha].blank?
@ -119,7 +119,7 @@ module API
desc 'Composer packages endpoint for registering packages'
namespace ':id/packages/composer' do
route_setting :authentication, job_token_allowed: true, basic_auth_personal_access_token: true
route_setting :authentication, job_token_allowed: true, basic_auth_personal_access_token: true, deploy_token_allowed: true
params do
optional :branch, type: String, desc: 'The name of the branch'

View file

@ -26,7 +26,7 @@ module Gitlab
validates :name, length: { maximum: 255 }, if: -> { ::Feature.enabled?(:ci_validate_job_length, default_enabled: :yaml) }
validates :config, disallowed_keys: {
in: %i[only except when start_in],
in: %i[only except start_in],
message: 'key may not be used with `rules`'
},
if: :has_rules?

View file

@ -28,7 +28,7 @@ module Gitlab
@except = Gitlab::Ci::Build::Policy
.fabricate(attributes.delete(:except))
@rules = Gitlab::Ci::Build::Rules
.new(attributes.delete(:rules), default_when: 'on_success')
.new(attributes.delete(:rules), default_when: attributes[:when])
@cache = Gitlab::Ci::Build::Cache
.new(attributes.delete(:cache), @pipeline)

View file

@ -6,7 +6,7 @@ module Gitlab
class SidekiqServerMiddleware
JobReplicaNotUpToDate = Class.new(StandardError)
MINIMUM_DELAY_INTERVAL = 1
MINIMUM_DELAY_INTERVAL_SECONDS = 0.8
def call(worker, job, _queue)
worker_class = worker.class
@ -46,11 +46,13 @@ module Gitlab
return :primary_no_wal if wal_locations.blank?
# Happy case: we can read from a replica.
return replica_strategy(worker_class, job) if databases_in_sync?(wal_locations)
sleep_if_needed(job)
if databases_in_sync?(wal_locations)
# Happy case: we can read from a replica.
retried_before?(worker_class, job) ? :replica_retried : :replica
replica_strategy(worker_class, job)
elsif can_retry?(worker_class, job)
# Optimistic case: The worker allows retries and we have retries left.
:retry
@ -61,9 +63,9 @@ module Gitlab
end
def sleep_if_needed(job)
remaining_delay = MINIMUM_DELAY_INTERVAL - (Time.current.to_f - job['created_at'].to_f)
remaining_delay = MINIMUM_DELAY_INTERVAL_SECONDS - (Time.current.to_f - job['created_at'].to_f)
sleep remaining_delay if remaining_delay > 0 && remaining_delay < MINIMUM_DELAY_INTERVAL
sleep remaining_delay if remaining_delay > 0 && remaining_delay < MINIMUM_DELAY_INTERVAL_SECONDS
end
def get_wal_locations(job)
@ -80,6 +82,10 @@ module Gitlab
worker_class.get_data_consistency == :delayed && not_yet_retried?(job)
end
def replica_strategy(worker_class, job)
retried_before?(worker_class, job) ? :replica_retried : :replica
end
def retried_before?(worker_class, job)
worker_class.get_data_consistency == :delayed && !not_yet_retried?(job)
end

View file

@ -65,8 +65,7 @@ module Gitlab
).to_i
if num_zero_buckets > 0 && num_uniques < 2.5 * TOTAL_BUCKETS
((0.7213 / (1 + 1.079 / TOTAL_BUCKETS)) * (TOTAL_BUCKETS *
Math.log2(TOTAL_BUCKETS.to_f / num_zero_buckets)))
TOTAL_BUCKETS * Math.log(TOTAL_BUCKETS.to_f / num_zero_buckets)
else
num_uniques
end

View file

@ -281,6 +281,8 @@ module QA
end
def knapsack?
return false unless ENV['CI_NODE_TOTAL'].to_i > 1
!!(ENV['KNAPSACK_GENERATE_REPORT'] || ENV['KNAPSACK_REPORT_PATH'] || ENV['KNAPSACK_TEST_FILE_PATTERN'])
end

View file

@ -169,6 +169,10 @@ RSpec.describe QA::Runtime::Env do
end
describe '.knapsack?' do
before do
stub_env('CI_NODE_TOTAL', '2')
end
it 'returns true if KNAPSACK_GENERATE_REPORT is defined' do
stub_env('KNAPSACK_GENERATE_REPORT', 'true')
@ -190,28 +194,11 @@ RSpec.describe QA::Runtime::Env do
it 'returns false if neither KNAPSACK_GENERATE_REPORT nor KNAPSACK_REPORT_PATH nor KNAPSACK_TEST_FILE_PATTERN are defined' do
expect(described_class.knapsack?).to be_falsey
end
end
describe '.knapsack?' do
it 'returns true if KNAPSACK_GENERATE_REPORT is defined' do
it 'returns false if not running in parallel job' do
stub_env('CI_NODE_TOTAL', '1')
stub_env('KNAPSACK_GENERATE_REPORT', 'true')
expect(described_class.knapsack?).to be_truthy
end
it 'returns true if KNAPSACK_REPORT_PATH is defined' do
stub_env('KNAPSACK_REPORT_PATH', '/a/path')
expect(described_class.knapsack?).to be_truthy
end
it 'returns true if KNAPSACK_TEST_FILE_PATTERN is defined' do
stub_env('KNAPSACK_TEST_FILE_PATTERN', '/a/**/pattern')
expect(described_class.knapsack?).to be_truthy
end
it 'returns false if neither KNAPSACK_GENERATE_REPORT nor KNAPSACK_REPORT_PATH nor KNAPSACK_TEST_FILE_PATTERN are defined' do
expect(described_class.knapsack?).to be_falsey
end
end

View file

@ -263,6 +263,12 @@ function deploy() {
gitaly_image_tag=$(parse_gitaly_image_tag)
gitlab_shell_image_repository="${IMAGE_REPOSITORY}/gitlab-shell"
gitlab_workhorse_image_repository="${IMAGE_REPOSITORY}/gitlab-workhorse-ee"
sentry_enabled="false"
if [ -n ${REVIEW_APPS_SENTRY_DSN} ]; then
echo "REVIEW_APPS_SENTRY_DSN detected, enabling Sentry"
sentry_enabled="true"
fi
ensure_namespace "${namespace}"
label_namespace "${namespace}" "tls=review-apps-tls" # label namespace for kubed to sync tls
@ -283,6 +289,9 @@ HELM_CMD=$(cat << EOF
--set releaseOverride="${release}" \
--set global.hosts.hostSuffix="${HOST_SUFFIX}" \
--set global.hosts.domain="${REVIEW_APPS_DOMAIN}" \
--set global.sentry.enabled="${sentry_enabled}" \
--set global.sentry.dsn="${REVIEW_APPS_SENTRY_DSN}" \
--set global.sentry.environment="review" \
--set gitlab.migrations.image.repository="${gitlab_toolbox_image_repository}" \
--set gitlab.migrations.image.tag="${CI_COMMIT_REF_SLUG}" \
--set gitlab.gitaly.image.repository="${gitlab_gitaly_image_repository}" \

View file

@ -107,6 +107,28 @@ RSpec.describe Packages::GroupPackagesFinder do
end
end
context 'deploy tokens' do
let(:add_user_to_group) { false }
context 'group deploy token' do
let_it_be(:deploy_token_for_group) { create(:deploy_token, :group, read_package_registry: true) }
let_it_be(:group_deploy_token) { create(:group_deploy_token, deploy_token: deploy_token_for_group, group: group) }
let(:user) { deploy_token_for_group }
it { is_expected.to match_array([package1, package2, package4]) }
end
context 'project deploy token' do
let_it_be(:deploy_token_for_project) { create(:deploy_token, read_package_registry: true) }
let_it_be(:project_deploy_token) { create(:project_deploy_token, deploy_token: deploy_token_for_project, project: subproject) }
let(:user) { deploy_token_for_project }
it { is_expected.to match_array([package4]) }
end
end
context 'avoid N+1 query' do
it 'avoids N+1 database queries' do
count = ActiveRecord::QueryRecorder.new { subject }

View file

@ -163,7 +163,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Bridge do
})
end
it { is_expected.not_to be_valid }
it { is_expected.to be_valid }
end
context 'when bridge configuration uses rules with only' do

View file

@ -118,6 +118,20 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
end
end
context 'when config uses both "when:" and "rules:"' do
let(:config) do
{
script: 'echo',
when: 'on_failure',
rules: [{ if: '$VARIABLE', when: 'on_success' }]
}
end
it 'is valid' do
expect(entry).to be_valid
end
end
context 'when delayed job' do
context 'when start_in is specified' do
let(:config) { { script: 'echo', when: 'delayed', start_in: '1 week' } }
@ -268,21 +282,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
end
end
context 'when it uses both "when:" and "rules:"' do
let(:config) do
{
script: 'echo',
when: 'on_failure',
rules: [{ if: '$VARIABLE', when: 'on_success' }]
}
end
it 'returns an error about when: being combined with rules' do
expect(entry).not_to be_valid
expect(entry.errors).to include 'job config key may not be used with `rules`: when'
end
end
context 'when delayed job' do
context 'when start_in is specified' do
let(:config) { { script: 'echo', when: 'delayed', start_in: '1 week' } }

View file

@ -33,6 +33,20 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
end
end
context 'when config uses both "when:" and "rules:"' do
let(:config) do
{
script: 'echo',
when: 'on_failure',
rules: [{ if: '$VARIABLE', when: 'on_success' }]
}
end
it 'is valid' do
expect(entry).to be_valid
end
end
context 'when job name is more than 255' do
let(:entry) { node_class.new(config, name: ('a' * 256).to_sym) }
@ -90,21 +104,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
end
end
context 'when it uses both "when:" and "rules:"' do
let(:config) do
{
script: 'echo',
when: 'on_failure',
rules: [{ if: '$VARIABLE', when: 'on_success' }]
}
end
it 'returns an error about when: being combined with rules' do
expect(entry).not_to be_valid
expect(entry.errors).to include 'job config key may not be used with `rules`: when'
end
end
context 'when only: is used with rules:' do
let(:config) { { only: ['merge_requests'], rules: [{ if: '$THIS' }] } }

View file

@ -9,7 +9,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
let(:pipeline) { build(:ci_empty_pipeline, project: project, sha: head_sha) }
let(:root_variables) { [] }
let(:seed_context) { double(pipeline: pipeline, root_variables: root_variables) }
let(:attributes) { { name: 'rspec', ref: 'master', scheduling_type: :stage } }
let(:attributes) { { name: 'rspec', ref: 'master', scheduling_type: :stage, when: 'on_success' } }
let(:previous_stages) { [] }
let(:current_stage) { double(seeds_names: [attributes[:name]]) }
@ -61,17 +61,35 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
end
end
context 'with job:rules but no explicit when:' do
context 'is matched' do
let(:attributes) { { name: 'rspec', ref: 'master', rules: [{ if: '$VAR == null' }] } }
context 'with job: rules but no explicit when:' do
let(:base_attributes) { { name: 'rspec', ref: 'master' } }
it { is_expected.to include(when: 'on_success') }
context 'with a manual job' do
context 'with a matched rule' do
let(:attributes) { base_attributes.merge(when: 'manual', rules: [{ if: '$VAR == null' }]) }
it { is_expected.to include(when: 'manual') }
end
context 'is not matched' do
let(:attributes) { base_attributes.merge(when: 'manual', rules: [{ if: '$VAR != null' }]) }
it { is_expected.to include(when: 'never') }
end
end
context 'is not matched' do
let(:attributes) { { name: 'rspec', ref: 'master', rules: [{ if: '$VAR != null' }] } }
context 'with an automatic job' do
context 'is matched' do
let(:attributes) { base_attributes.merge(when: 'on_success', rules: [{ if: '$VAR == null' }]) }
it { is_expected.to include(when: 'never') }
it { is_expected.to include(when: 'on_success') }
end
context 'is not matched' do
let(:attributes) { base_attributes.merge(when: 'on_success', rules: [{ if: '$VAR != null' }]) }
it { is_expected.to include(when: 'never') }
end
end
end
@ -901,7 +919,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
context 'using rules:' do
using RSpec::Parameterized
let(:attributes) { { name: 'rspec', rules: rule_set } }
let(:attributes) { { name: 'rspec', rules: rule_set, when: 'on_success' } }
context 'with a matching if: rule' do
context 'with an explicit `when: never`' do

View file

@ -2139,7 +2139,7 @@ module Gitlab
end
end
context 'with when/rules conflict' do
context 'with when/rules' do
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute }
let(:config) do
@ -2174,7 +2174,7 @@ module Gitlab
}
end
it_behaves_like 'returns errors', /may not be used with `rules`: when/
it { is_expected.to be_valid }
end
context 'used with job-level when:delayed' do
@ -2190,7 +2190,7 @@ module Gitlab
}
end
it_behaves_like 'returns errors', /may not be used with `rules`: when, start_in/
it_behaves_like 'returns errors', /may not be used with `rules`: start_in/
end
end

View file

@ -122,7 +122,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
shared_examples_for 'sleeps when necessary' do
context 'when WAL locations are blank', :freeze_time do
let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", "wal_locations" => {}, "created_at" => Time.current.to_f - (described_class::MINIMUM_DELAY_INTERVAL - 0.3) } }
let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", "wal_locations" => {}, "created_at" => Time.current.to_f - (described_class::MINIMUM_DELAY_INTERVAL_SECONDS - 0.3) } }
it 'does not sleep' do
expect(middleware).not_to receive(:sleep)
@ -135,17 +135,39 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'wal_locations' => wal_locations, "created_at" => Time.current.to_f - elapsed_time } }
context 'when delay interval has not elapsed' do
let(:elapsed_time) { described_class::MINIMUM_DELAY_INTERVAL - 0.3 }
let(:elapsed_time) { described_class::MINIMUM_DELAY_INTERVAL_SECONDS - 0.3 }
it 'sleeps until the minimum delay is reached' do
expect(middleware).to receive(:sleep).with(be_within(0.01).of(described_class::MINIMUM_DELAY_INTERVAL - elapsed_time))
context 'when replica is up to date' do
before do
Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
allow(lb).to receive(:select_up_to_date_host).and_return(true)
end
end
run_middleware
it 'does not sleep' do
expect(middleware).not_to receive(:sleep)
run_middleware
end
end
context 'when replica is not up to date' do
before do
Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
allow(lb).to receive(:select_up_to_date_host).and_return(false, true)
end
end
it 'sleeps until the minimum delay is reached' do
expect(middleware).to receive(:sleep).with(be_within(0.01).of(described_class::MINIMUM_DELAY_INTERVAL_SECONDS - elapsed_time))
run_middleware
end
end
end
context 'when delay interval has elapsed' do
let(:elapsed_time) { described_class::MINIMUM_DELAY_INTERVAL + 0.3 }
let(:elapsed_time) { described_class::MINIMUM_DELAY_INTERVAL_SECONDS + 0.3 }
it 'does not sleep' do
expect(middleware).not_to receive(:sleep)
@ -179,7 +201,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
context 'when delay interval has not elapsed', :freeze_time do
let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'wal_locations' => wal_locations, "created_at" => Time.current.to_f - elapsed_time } }
let(:elapsed_time) { described_class::MINIMUM_DELAY_INTERVAL - 0.3 }
let(:elapsed_time) { described_class::MINIMUM_DELAY_INTERVAL_SECONDS - 0.3 }
it 'does not sleep' do
expect(middleware).not_to receive(:sleep)

View file

@ -11,6 +11,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_
let(:metric_1) { 'metric_1' }
let(:metric_2) { 'metric_2' }
let(:metric_names) { [metric_1, metric_2] }
let(:error_rate) { Gitlab::Database::PostgresHll::BatchDistinctCounter::ERROR_RATE }
describe 'metric calculations' do
before do
@ -38,7 +39,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_
end
it 'returns the number of unique events in the union of all metrics' do
expect(calculate_metrics_union.round(2)).to eq(3.12)
expect(calculate_metrics_union.round(2)).to be_within(error_rate).percent_of(3)
end
context 'when there is no aggregated data saved' do
@ -53,7 +54,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_
let(:metric_names) { [metric_1] }
it 'returns the number of unique events for that metric' do
expect(calculate_metrics_union.round(2)).to eq(2.08)
expect(calculate_metrics_union.round(2)).to be_within(error_rate).percent_of(2)
end
end
end
@ -64,7 +65,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_
end
it 'returns the number of common events in the intersection of all metrics' do
expect(calculate_metrics_intersections.round(2)).to eq(1.04)
expect(calculate_metrics_intersections.round(2)).to be_within(error_rate).percent_of(1)
end
context 'when there is no aggregated data saved' do
@ -79,7 +80,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_
let(:metric_names) { [metric_1] }
it 'returns the number of common/unique events for the intersection of that metric' do
expect(calculate_metrics_intersections.round(2)).to eq(2.08)
expect(calculate_metrics_intersections.round(2)).to be_within(error_rate).percent_of(2)
end
end
end

View file

@ -82,7 +82,7 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DatabaseMetric do
end.new(time_frame: 'all')
end
it 'calculates a correct result', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/348139' do
it 'calculates a correct result' do
expect(subject.value).to be_within(Gitlab::Database::PostgresHll::BatchDistinctCounter::ERROR_RATE).percent_of(3)
end

View file

@ -118,7 +118,7 @@ RSpec.describe Gitlab::Utils::UsageData do
# build_needs set: ['1', '2', '3', '4', '5']
# ci_build set ['a', 'b']
# with them, current implementation is expected to consistently report
# 5.217656147118495 and 2.0809220082170614 values
# the same static values
# This test suite is expected to assure, that HyperLogLog implementation
# behaves consistently between changes made to other parts of codebase.
# In case of fine tuning or changes to HyperLogLog algorithm implementation
@ -130,8 +130,8 @@ RSpec.describe Gitlab::Utils::UsageData do
let(:model) { Ci::BuildNeed }
let(:column) { :name }
let(:build_needs_estimated_cardinality) { 5.217656147118495 }
let(:ci_builds_estimated_cardinality) { 2.0809220082170614 }
let(:build_needs_estimated_cardinality) { 5.024574181542231 }
let(:ci_builds_estimated_cardinality) { 2.003916452421793 }
before do
allow(model.connection).to receive(:transaction_open?).and_return(false)

View file

@ -0,0 +1,21 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Transactions do
let(:model) { build(:project) }
it 'is not in a transaction' do
expect(model.class).not_to be_inside_transaction
end
it 'is in a transaction', :aggregate_failures do
Project.transaction do
expect(model.class).to be_inside_transaction
end
ApplicationRecord.transaction do
expect(model.class).to be_inside_transaction
end
end
end

View file

@ -9,6 +9,10 @@ RSpec.describe API::ComposerPackages do
let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
let_it_be(:package_name) { 'package-name' }
let_it_be(:project, reload: true) { create(:project, :custom_repo, files: { 'composer.json' => { name: package_name }.to_json }, group: group) }
let_it_be(:deploy_token_for_project) { create(:deploy_token, read_package_registry: true, write_package_registry: true) }
let_it_be(:project_deploy_token) { create(:project_deploy_token, deploy_token: deploy_token_for_project, project: project) }
let_it_be(:deploy_token_for_group) { create(:deploy_token, :group, read_package_registry: true, write_package_registry: true) }
let_it_be(:group_deploy_token) { create(:group_deploy_token, deploy_token: deploy_token_for_group, group: group) }
let(:snowplow_gitlab_standard_context) { { project: project, namespace: project.namespace, user: user } }
let(:headers) { {} }
@ -92,6 +96,8 @@ RSpec.describe API::ComposerPackages do
group.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
end
it_behaves_like 'Composer access with deploy tokens'
context 'with access to the api' do
where(:project_visibility_level, :user_role, :member, :user_token, :include_package) do
'PRIVATE' | :developer | true | true | :include_package
@ -162,6 +168,8 @@ RSpec.describe API::ComposerPackages do
it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
end
it_behaves_like 'Composer access with deploy tokens'
end
it_behaves_like 'rejects Composer access with unknown group id'
@ -219,6 +227,8 @@ RSpec.describe API::ComposerPackages do
end
end
end
it_behaves_like 'Composer access with deploy tokens'
end
it_behaves_like 'rejects Composer access with unknown group id'
@ -265,6 +275,8 @@ RSpec.describe API::ComposerPackages do
it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
end
it_behaves_like 'Composer access with deploy tokens'
end
it_behaves_like 'rejects Composer access with unknown group id'
@ -308,6 +320,8 @@ RSpec.describe API::ComposerPackages do
it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
end
it_behaves_like 'Composer publish with deploy tokens'
end
it_behaves_like 'rejects Composer access with unknown project id'

View file

@ -29,7 +29,7 @@ RSpec.describe BuildDetailsEntity do
end
it 'contains the needed key value pairs' do
expect(subject).to include(:coverage, :erased_at, :duration)
expect(subject).to include(:coverage, :erased_at, :finished_at, :duration)
expect(subject).to include(:runner, :pipeline)
expect(subject).to include(:raw_path, :new_issue_path)
end

View file

@ -1992,6 +1992,75 @@ RSpec.describe Ci::CreatePipelineService do
let(:rules_job) { find_job('rules-job') }
let(:delayed_job) { find_job('delayed-job') }
context 'with when:manual' do
let(:config) do
<<-EOY
job-with-rules:
script: 'echo hey'
rules:
- if: $CI_COMMIT_REF_NAME =~ /master/
job-when-with-rules:
script: 'echo hey'
when: manual
rules:
- if: $CI_COMMIT_REF_NAME =~ /master/
job-when-with-rules-when:
script: 'echo hey'
when: manual
rules:
- if: $CI_COMMIT_REF_NAME =~ /master/
when: on_success
job-with-rules-when:
script: 'echo hey'
rules:
- if: $CI_COMMIT_REF_NAME =~ /master/
when: manual
job-without-rules:
script: 'echo this is a job with NO rules'
EOY
end
let(:job_with_rules) { find_job('job-with-rules') }
let(:job_when_with_rules) { find_job('job-when-with-rules') }
let(:job_when_with_rules_when) { find_job('job-when-with-rules-when') }
let(:job_with_rules_when) { find_job('job-with-rules-when') }
let(:job_without_rules) { find_job('job-without-rules') }
context 'when matching the rules' do
let(:ref_name) { 'refs/heads/master' }
it 'adds the job-with-rules with a when:manual' do
expect(job_with_rules).to be_persisted
expect(job_when_with_rules).to be_persisted
expect(job_when_with_rules_when).to be_persisted
expect(job_with_rules_when).to be_persisted
expect(job_without_rules).to be_persisted
expect(job_with_rules.when).to eq('on_success')
expect(job_when_with_rules.when).to eq('manual')
expect(job_when_with_rules_when.when).to eq('on_success')
expect(job_with_rules_when.when).to eq('manual')
expect(job_without_rules.when).to eq('on_success')
end
end
context 'when there is no match to the rule' do
let(:ref_name) { 'refs/heads/wip' }
it 'does not add job_with_rules' do
expect(job_with_rules).to be_nil
expect(job_when_with_rules).to be_nil
expect(job_when_with_rules_when).to be_nil
expect(job_with_rules_when).to be_nil
expect(job_without_rules).to be_persisted
end
end
end
shared_examples 'rules jobs are excluded' do
it 'only persists the job without rules' do
expect(pipeline).to be_persisted

View file

@ -37,6 +37,32 @@ RSpec.describe Ci::ParseDotenvArtifactService do
end
end
context 'when dotenv variables have duplicate variables' do
let!(:artifact) { create(:ci_job_artifact, :dotenv, job: build) }
let(:blob) do
<<~EOS
KEY1=VAR1
KEY2=VAR2
KEY2=VAR3
KEY1=VAR4
EOS
end
before do
allow(artifact).to receive(:each_blob).and_yield(blob)
end
it 'latest values get used' do
subject
expect(subject[:status]).to eq(:success)
expect(build.job_variables.as_json).to contain_exactly(
hash_including('key' => 'KEY1', 'value' => 'VAR4'),
hash_including('key' => 'KEY2', 'value' => 'VAR3'))
end
end
context 'when parse error happens' do
before do
allow(service).to receive(:scan_line!) { raise described_class::ParserError, 'Invalid Format' }

View file

@ -173,3 +173,65 @@ RSpec.shared_examples 'rejects Composer access with unknown project id' do
end
end
end
RSpec.shared_examples 'Composer access with deploy tokens' do
shared_examples 'a deploy token for Composer GET requests' do
context 'with deploy token headers' do
let(:headers) { basic_auth_header(deploy_token.username, deploy_token.token) }
before do
group.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
end
context 'valid token' do
it_behaves_like 'returning response status', :success
end
context 'invalid token' do
let(:headers) { basic_auth_header(deploy_token.username, 'bar') }
it_behaves_like 'returning response status', :not_found
end
end
end
context 'group deploy token' do
let(:deploy_token) { deploy_token_for_group }
it_behaves_like 'a deploy token for Composer GET requests'
end
context 'project deploy token' do
let(:deploy_token) { deploy_token_for_project }
it_behaves_like 'a deploy token for Composer GET requests'
end
end
RSpec.shared_examples 'Composer publish with deploy tokens' do
shared_examples 'a deploy token for Composer publish requests' do
let(:headers) { basic_auth_header(deploy_token.username, deploy_token.token) }
context 'valid token' do
it_behaves_like 'returning response status', :success
end
context 'invalid token' do
let(:headers) { basic_auth_header(deploy_token.username, 'bar') }
it_behaves_like 'returning response status', :unauthorized
end
end
context 'group deploy token' do
let(:deploy_token) { deploy_token_for_group }
it_behaves_like 'a deploy token for Composer publish requests'
end
context 'group deploy token' do
let(:deploy_token) { deploy_token_for_project }
it_behaves_like 'a deploy token for Composer publish requests'
end
end