Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-03-10 15:08:10 +00:00
parent e3624c5be1
commit 6ebe886c82
52 changed files with 963 additions and 269 deletions

View File

@ -657,18 +657,6 @@ Style/NumericLiteralPrefix:
Style/PercentLiteralDelimiters:
Enabled: false
# Offense count: 26
# Cop supports --auto-correct.
# Configuration parameters: SafeForConstants.
Style/RedundantFetchBlock:
Exclude:
- 'app/finders/admin/projects_finder.rb'
- 'lib/gitlab/diff/file.rb'
- 'spec/lib/gitlab/json_cache_spec.rb'
- 'spec/lib/gitlab/metrics/dashboard/cache_spec.rb'
- 'spec/lib/gitlab/null_request_store_spec.rb'
- 'spec/lib/gitlab/safe_request_store_spec.rb'
# Offense count: 206
# Cop supports --auto-correct.
Style/RedundantInterpolation:

View File

@ -7,6 +7,7 @@ class Projects::ReleasesController < Projects::ApplicationController
before_action :authorize_read_release!
before_action :authorize_update_release!, only: %i[edit update]
before_action :authorize_create_release!, only: :new
before_action :validate_suffix_path, :fetch_latest_tag, only: :latest_permalink
before_action only: :index do
push_frontend_feature_flag(:releases_index_apollo_client, project, default_enabled: :yaml)
end
@ -26,10 +27,24 @@ class Projects::ReleasesController < Projects::ApplicationController
redirect_to link.url
end
def latest_permalink
unless @latest_tag.present?
return render_404
end
query_parameters_except_order_by = request.query_parameters.except(:order_by)
redirect_url = project_release_url(@project, @latest_tag)
redirect_url += "/#{params[:suffix_path]}" if params[:suffix_path]
redirect_url += "?#{query_parameters_except_order_by.compact.to_param}" if query_parameters_except_order_by.present?
redirect_to redirect_url
end
private
def releases
ReleasesFinder.new(@project, current_user).execute
def releases(params = {})
ReleasesFinder.new(@project, current_user, params).execute
end
def authorize_update_release!
@ -51,4 +66,18 @@ class Projects::ReleasesController < Projects::ApplicationController
def sanitized_tag_name
CGI.unescape(params[:tag])
end
# Default order_by is 'released_at', which is set in ReleasesFinder.
# Also if the passed order_by is invalid, we reject and default to 'released_at'.
def fetch_latest_tag
allowed_values = ['released_at']
params.reject! { |key, value| key.to_sym == :order_by && allowed_values.any?(value) }
@latest_tag = releases(order_by: params[:order_by]).first&.tag
end
def validate_suffix_path
Gitlab::Utils.check_path_traversal!(params[:suffix_path]) if params[:suffix_path]
end
end

View File

@ -69,7 +69,7 @@ class Admin::ProjectsFinder
end
def sort(items)
sort = params.fetch(:sort) { 'latest_activity_desc' }
sort = params.fetch(:sort, 'latest_activity_desc')
items.sort_by_attribute(sort)
end
end

View File

@ -64,35 +64,50 @@ module Ci
def create_archive(artifacts)
return unless artifacts[:untracked] || artifacts[:paths]
archive = {
artifact_type: :archive,
artifact_format: :zip,
name: artifacts[:name],
untracked: artifacts[:untracked],
paths: artifacts[:paths],
when: artifacts[:when],
expire_in: artifacts[:expire_in]
}
if artifacts.dig(:exclude).present?
archive.merge(exclude: artifacts[:exclude])
else
archive
BuildArtifact.for_archive(artifacts).to_h.tap do |artifact|
artifact.delete(:exclude) unless artifact[:exclude].present?
end
end
def create_reports(reports, expire_in:)
return unless reports&.any?
reports.map do |report_type, report_paths|
{
artifact_type: report_type.to_sym,
artifact_format: ::Ci::JobArtifact::TYPE_AND_FORMAT_PAIRS.fetch(report_type.to_sym),
name: ::Ci::JobArtifact::DEFAULT_FILE_NAMES.fetch(report_type.to_sym),
paths: report_paths,
reports.map { |report| BuildArtifact.for_report(report, expire_in).to_h.compact }
end
BuildArtifact = Struct.new(:name, :untracked, :paths, :exclude, :when, :expire_in, :artifact_type, :artifact_format, keyword_init: true) do
def self.for_archive(artifacts)
self.new(
artifact_type: :archive,
artifact_format: :zip,
name: artifacts[:name],
untracked: artifacts[:untracked],
paths: artifacts[:paths],
when: artifacts[:when],
expire_in: artifacts[:expire_in],
exclude: artifacts[:exclude]
)
end
def self.for_report(report, expire_in)
type, params = report
if type == :coverage_report
artifact_type = params[:coverage_format].to_sym
paths = [params[:path]]
else
artifact_type = type
paths = params
end
self.new(
artifact_type: artifact_type,
artifact_format: ::Ci::JobArtifact::TYPE_AND_FORMAT_PAIRS.fetch(artifact_type),
name: ::Ci::JobArtifact::DEFAULT_FILE_NAMES.fetch(artifact_type),
paths: paths,
when: 'always',
expire_in: expire_in
}
)
end
end

View File

@ -1,8 +1,8 @@
---
name: allow_unsafe_ruby_regexp
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/10566
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/257849
milestone: '11.10'
name: disable_unsafe_regexp
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/79611
rollout_issue_url:
milestone: '14.9'
type: development
group: group::pipeline execution
default_enabled: false

View File

@ -241,6 +241,8 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
end
end
get 'releases/permalink/latest(/)(*suffix_path)', to: 'releases#latest_permalink', as: :latest_release_permalink, format: false
resources :logs, only: [:index] do
collection do
get :k8s

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class RemoveExternalPullRequestTracking < Gitlab::Database::Migration[1.0]
include Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers
enable_lock_retries!
def up
untrack_record_deletions(:external_pull_requests)
end
def down
track_record_deletions(:external_pull_requests)
end
end

View File

@ -0,0 +1,30 @@
# frozen_string_literal: true
class RemoveLeftoverExternalPullRequestDeletions < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
def up
# Delete all pending record deletions in the public.external_pull_requests until
# there are no more rows left.
loop do
result = execute <<~SQL
DELETE FROM "loose_foreign_keys_deleted_records"
WHERE
("loose_foreign_keys_deleted_records"."partition", "loose_foreign_keys_deleted_records"."id") IN (
SELECT "loose_foreign_keys_deleted_records"."partition", "loose_foreign_keys_deleted_records"."id"
FROM "loose_foreign_keys_deleted_records"
WHERE
"loose_foreign_keys_deleted_records"."fully_qualified_table_name" = 'public.external_pull_requests' AND
"loose_foreign_keys_deleted_records"."status" = 1
LIMIT 100
)
SQL
break if result.cmd_tuples == 0
end
end
def down
# no-op
end
end

View File

@ -0,0 +1 @@
d9d17f94f54840eace48f210e3886423a8dc04109f2ebca8d8edb7d53e0b5688

View File

@ -0,0 +1 @@
6d9c5454372317955c4e16b5a02dece575221f15af60c33df45fffbca169c08c

View File

@ -30858,8 +30858,6 @@ CREATE TRIGGER ci_pipelines_loose_fk_trigger AFTER DELETE ON ci_pipelines REFERE
CREATE TRIGGER ci_runners_loose_fk_trigger AFTER DELETE ON ci_runners REFERENCING OLD TABLE AS old_table FOR EACH STATEMENT EXECUTE FUNCTION insert_into_loose_foreign_keys_deleted_records();
CREATE TRIGGER external_pull_requests_loose_fk_trigger AFTER DELETE ON external_pull_requests REFERENCING OLD TABLE AS old_table FOR EACH STATEMENT EXECUTE FUNCTION insert_into_loose_foreign_keys_deleted_records();
CREATE TRIGGER merge_requests_loose_fk_trigger AFTER DELETE ON merge_requests REFERENCING OLD TABLE AS old_table FOR EACH STATEMENT EXECUTE FUNCTION insert_into_loose_foreign_keys_deleted_records();
CREATE TRIGGER namespaces_loose_fk_trigger AFTER DELETE ON namespaces REFERENCING OLD TABLE AS old_table FOR EACH STATEMENT EXECUTE FUNCTION insert_into_loose_foreign_keys_deleted_records();

View File

@ -834,13 +834,9 @@ due to computational complexity, and some features, like negative lookaheads, be
Only a subset of features provided by [Ruby Regexp](https://ruby-doc.org/core/Regexp.html)
are now supported.
From GitLab 11.9.7 to GitLab 12.0, GitLab provided a feature flag to
let you use unsafe regexp syntax. After migrating to safe syntax, you should disable
this feature flag again:
```ruby
Feature.enable(:allow_unsafe_ruby_regexp)
```
From GitLab 11.9.7 to GitLab 14.9, GitLab provided a feature flag to let you
use unsafe regexp syntax. We've fully migrated to RE2 now, and that feature
flag is no longer available.
## CI/CD variable expressions

View File

@ -23,6 +23,8 @@ Otherwise, the pipeline is not created. No error message is displayed.
## Add a pipeline schedule
> Scheduled pipelines for tags [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/23292) in GitLab 14.9.
To add a pipeline schedule:
1. On the top bar, select **Menu > Projects** and find your project.
@ -32,6 +34,7 @@ To add a pipeline schedule:
interval in [cron notation](../../topics/cron/index.md). You can use any cron value,
but scheduled pipelines cannot run more frequently than the instance's
[maximum scheduled pipeline frequency](../../administration/cicd.md#change-maximum-scheduled-pipeline-frequency).
- **Target branch or tag**: Select the branch or tag for the pipeline.
- **Variables**: Add any number of [CI/CD variables](../variables/index.md) to the schedule.
These variables are available only when the scheduled pipeline runs,
and not in any other pipeline run.

View File

@ -80,9 +80,14 @@ GitLab can display the results of one or more reports in:
- The [security dashboard](../../user/application_security/security_dashboard/index.md).
- The [Project Vulnerability report](../../user/application_security/vulnerability_report/index.md).
## `artifacts:reports:cobertura`
## `artifacts:reports:cobertura` (DEPRECATED)
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/3708) in GitLab 12.9.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/3708) in GitLab 12.9.
> - [Deprecated](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/78132) in GitLab 14.9.
WARNING:
This feature is in its end-of-life process. It is [deprecated](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/78132) for use in GitLab
14.8 and replaced with `artifacts:reports:coverage_report`.
The `cobertura` report collects [Cobertura coverage XML files](../../user/project/merge_requests/test_coverage_visualization.md).
The collected Cobertura coverage reports upload to GitLab as an artifact.
@ -93,6 +98,28 @@ GitLab can display the results of one or more reports in the merge request
Cobertura was originally developed for Java, but there are many third-party ports for other languages such as
JavaScript, Python, and Ruby.
## `artifacts:reports:coverage_report`
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/344533) in GitLab 14.9.
Use `coverage_report` to collect coverage report in Cobertura format, similar to `artifacts:reports:cobertura`.
NOTE:
`artifacts:reports:coverage_report` cannot be used at the same time with `artifacts:reports:cobertura`.
```yaml
artifacts:
reports:
coverage_report:
coverage_format: cobertura
path: coverage/cobertura-coverage.xml
```
The collected coverage report is uploaded to GitLab as an artifact.
GitLab can display the results of coverage report in the merge request
[diff annotations](../../user/project/merge_requests/test_coverage_visualization.md).
## `artifacts:reports:codequality`
> [Moved](https://gitlab.com/gitlab-org/gitlab/-/issues/212499) to GitLab Free in 13.2.

View File

@ -249,6 +249,67 @@ end
At this point, the setup phase is concluded. The deleted `projects` records should be automatically
picked up by the scheduled cleanup worker job.
### Remove the loose foreign key
When the loose foreign key definition is no longer needed (parent table is removed, or FK is restored),
we need to remove the definition from the YAML file and ensure that we don't leave pending deleted
records in the database.
1. Remove the loose foreign key definition from the config (`config/gitlab_loose_foreign_keys.yml`).
1. Remove the deletion tracking trigger from the parent table (if the parent table is still there).
1. Remove leftover deleted records from the `loose_foreign_keys_deleted_records` table.
Migration for removing the trigger:
```ruby
class UnTrackProjectRecordChanges < Gitlab::Database::Migration[1.0]
include Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers
enable_lock_retries!
def up
untrack_record_deletions(:projects)
end
def down
track_record_deletions(:projects)
end
end
```
With the trigger removal, we prevent further records to be inserted in the `loose_foreign_keys_deleted_records`
table however, there is still a chance for having leftover pending records in the table. These records
must be removed with an inline data migration.
```ruby
class RemoveLeftoverProjectDeletions < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
def up
loop do
result = execute <<~SQL
DELETE FROM "loose_foreign_keys_deleted_records"
WHERE
("loose_foreign_keys_deleted_records"."partition", "loose_foreign_keys_deleted_records"."id") IN (
SELECT "loose_foreign_keys_deleted_records"."partition", "loose_foreign_keys_deleted_records"."id"
FROM "loose_foreign_keys_deleted_records"
WHERE
"loose_foreign_keys_deleted_records"."fully_qualified_table_name" = 'public.projects' AND
"loose_foreign_keys_deleted_records"."status" = 1
LIMIT 100
)
SQL
break if result.cmd_tuples == 0
end
end
def down
# no-op
end
end
```
## Testing
The "`it has loose foreign keys`" shared example can be used to test the presence of the `ON DELETE` trigger and the

View File

@ -9,13 +9,26 @@ info: To determine the technical writer assigned to the Stage/Group associated w
This page is for developers trying to better understand the history behind GitLab uploads and the
technical challenges associated with uploads.
## The problem description
## Problem description
[GitLab Workhorse](https://gitlab.com/gitlab-org/gitlab-workhorse) has special rules for handling uploads.
We process the upload in Workhorse to prevent occupying a Ruby process on I/O operations and because it is cheaper.
This process can also directly upload to object storage.
GitLab and [GitLab Workhorse](https://gitlab.com/gitlab-org/gitlab-workhorse) use special rules for handling file uploads,
because in an ordinary Rails application file uploads can become expensive as files grow in size.
Rails often sacrifices performance to provide a better developer experience, including how it handles
`multipart/form-post` uploads. In any Rack server, Rails applications included, when such a request arrives at the application server,
several things happen:
The following graph explains machine boundaries in a scalable GitLab installation. Without any Workhorse optimization in place, we can expect incoming requests to follow the numbers on the arrows.
1. A [Rack middleware](https://github.com/rack/rack/blob/main/lib/rack/multipart.rb) intercepts the request and parses the request body.
1. The middleware writes each file in the multipart request to a temporary directory on disk.
1. A `params` hash is constructed with entries pointing to the respective files on disk.
1. A Rails controller acts on the file contents.
While this is convenient for developers, it is costly for the Ruby server process to buffer large files on disk.
Because of Ruby's [global interpreter lock](https://en.wikipedia.org/wiki/Global_interpreter_lock),
only a single thread of execution of a given Ruby process can be on CPU. This means the amount of CPU
time spent doing this is not available to other worker threads serving user requests.
Buffering files to disk also means spending more time in I/O routines and mode switches, which are expensive operations.
The following diagram shows how GitLab handled such a request prior to putting optimizations in place.
```mermaid
graph TB
@ -45,13 +58,28 @@ graph TB
s-- "6 (read files)" -->nfs
```
We have three challenges here: performance, availability, and scalability.
We went through two major iterations of our uploads architecture to improve on these problems:
### Performance
1. [Moving disk buffering to Workhorse.](#moving-disk-buffering-to-workhorse)
1. [Uploading to Object Storage from Workhorse.](#moving-to-object-storage-and-direct-uploads)
Rails process are expensive in terms of both CPU and memory. Ruby [global interpreter lock](https://en.wikipedia.org/wiki/Global_interpreter_lock) adds to cost too because the Ruby process spends time on I/O operations on step 3 causing incoming requests to pile up.
### Moving disk buffering to Workhorse
In order to improve this, [disk buffered upload](implementation.md#disk-buffered-upload) was implemented. With this, Rails no longer deals with writing uploaded files to disk.
To address the performance issues resulting from buffering files in Ruby, we moved this logic to Workhorse instead,
our reverse proxy fronting the GitLab Rails application.
Workhorse is written in Go, and is much better at dealing with stream processing and I/O than Rails.
There are two parts to this implementation:
1. In Workhorse, a request handler detects `multipart/form-data` content in an incoming user request.
If such a request is detected, Workhorse hijacks the request body before forwarding it to Rails.
Workhorse writes all files to disk, rewrites the multipart form fields to point to the new locations, signs the
request, then forwards it to Rails.
1. In Rails, a [custom multipart Rack middleware](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/middleware/multipart.rb)
identifies any signed multipart requests coming from Workhorse and prepares the `params` hash Rails
would expect, now pointing to the files cached by Workhorse. This makes it a drop-in replacement for `Rack::Multipart`.
The diagram below shows how GitLab handles such a request today:
```mermaid
graph TB
@ -81,14 +109,46 @@ graph TB
s-- "6 (read files)" -->nfs
```
### Availability
While this "one-size-fits-all" solution greatly improves performance for multipart uploads without compromising
developer ergonomics, it severely limits GitLab [availability](#availability-challenges)
and [scalability](#scalability-challenges).
There's also an availability problem in this setup, NFS is a [single point of failure](https://en.wikipedia.org/wiki/Single_point_of_failure).
#### Availability challenges
To address this problem an HA object storage can be used and it's supported by [direct upload](implementation.md#direct-upload)
Moving file buffering to Workhorse addresses the immediate performance problems stemming from Ruby not being good at
handling large file uploads. However, a remaining issue of this solution is its reliance on attached storage,
whether via ordinary hard drives or network attached storage like NFS.
NFS is a [single point of failure](https://en.wikipedia.org/wiki/Single_point_of_failure), and is unsuitable for
deploying GitLab in highly available, cloud native environments.
### Scalability
#### Scalability challenges
Scaling NFS is outside of our support scope, and NFS is not a part of cloud native installations.
NFS is not a part of cloud native installations, such as those running in Kubernetes.
In Kubernetes, machine boundaries translate to pods, and without network-attached storage, disk-buffered uploads
must be written directly to the pod's file system.
All features that require Sidekiq and do not use direct upload doesn't work without NFS. In Kubernetes, machine boundaries translate to PODs, and in this case the uploaded file is written into the POD private disk. Since Sidekiq POD cannot reach into other pods, the operation fails to read it.
Using disk buffering presents us with a scalability challenge here. If Workhorse can only
write files to a pod's private file system, then these files are inaccessible outside of this particular pod.
With disk buffering, a Rails controller will accept a file upload and enqueue it for upload in a Sidekiq
background job. Therefore, Sidekiq requires access to these files.
However, in a cloud native environment all Sidekiq instances run on separate pods, so they are
not able to access files buffered to disk on a web server pod.
Therefore, all features that involve Sidekiq uploading disk-buffered files severely limit the scalability of GitLab.
## Moving to object storage and direct uploads
To address these availability and scalability problems,
instead of buffering files to disk, we have added support for uploading files directly
from Workhorse to a given destination. While it remains possible to upload to local or network-attached storage
this way, you should use a highly available
[object store](https://en.wikipedia.org/wiki/Object_storage),
such as AWS S3, Google GCS, or Azure, for scalability reasons.
With direct uploads, Workhorse does not buffer files to disk. Instead, it first authorizes the request with
the Rails application to find out where to upload it, then streams the file directly to its ultimate destination.
To learn more about how disk buffering and direct uploads are implemented, see:
- [How uploads work technically](implementation.md)
- [Adding new uploads](working_with_uploads.md)

View File

@ -28,7 +28,7 @@ between pipeline completion and the visualization loading on the page.
For the coverage analysis to work, you have to provide a properly formatted
[Cobertura XML](https://cobertura.github.io/cobertura/) report to
[`artifacts:reports:cobertura`](../../../ci/yaml/artifacts_reports.md#artifactsreportscobertura).
[`artifacts:reports:cobertura`](../../../ci/yaml/artifacts_reports.md#artifactsreportscobertura-deprecated).
This format was originally developed for Java, but most coverage analysis frameworks
for other languages have plugins to add support for it, like:

View File

@ -6,7 +6,7 @@ module API
module JobRequest
class Artifacts < Grape::Entity
expose :name
expose :untracked
expose :untracked, expose_nil: false
expose :paths
expose :exclude, expose_nil: false
expose :when

View File

@ -36,7 +36,7 @@ module Gitlab
# the pattern matching does not work for merge requests pipelines
if pipeline.branch? || pipeline.tag?
regexp = Gitlab::UntrustedRegexp::RubySyntax
.fabricate(pattern, fallback: true, project: pipeline.project)
.fabricate(pattern, project: pipeline.project)
if regexp
regexp.match?(pipeline.ref)

View File

@ -17,7 +17,7 @@ module Gitlab
include ::Gitlab::Config::Entry::Validatable
validations do
validates :config, array_of_strings_or_regexps_with_fallback: true
validates :config, array_of_strings_or_regexps: true
end
def value
@ -38,7 +38,7 @@ module Gitlab
validate :variables_expressions_syntax
with_options allow_nil: true do
validates :refs, array_of_strings_or_regexps_with_fallback: true
validates :refs, array_of_strings_or_regexps: true
validates :kubernetes, allowed_values: %w[active]
validates :variables, array_of_strings: true
validates :changes, array_of_strings: true

View File

@ -8,6 +8,7 @@ module Gitlab
# Entry that represents a configuration of job artifacts.
#
class Reports < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Configurable
include ::Gitlab::Config::Entry::Validatable
include ::Gitlab::Config::Entry::Attributable
@ -15,10 +16,13 @@ module Gitlab
%i[junit codequality sast secret_detection dependency_scanning container_scanning
dast performance browser_performance load_performance license_scanning metrics lsif
dotenv cobertura terraform accessibility cluster_applications
requirements coverage_fuzzing api_fuzzing cluster_image_scanning].freeze
requirements coverage_fuzzing api_fuzzing cluster_image_scanning
coverage_report].freeze
attributes ALLOWED_KEYS
entry :coverage_report, Reports::CoverageReport, description: 'Coverage report configuration.'
validations do
validates :config, type: Hash
validates :config, allowed_keys: ALLOWED_KEYS
@ -47,10 +51,18 @@ module Gitlab
validates :cluster_applications, array_of_strings_or_string: true # DEPRECATED: https://gitlab.com/gitlab-org/gitlab/-/issues/333441
validates :requirements, array_of_strings_or_string: true
end
validates :config, mutually_exclusive_keys: [:coverage_report, :cobertura]
end
def value
@config.transform_values { |v| Array(v) }
@config.transform_values do |value|
if value.is_a?(Hash)
value
else
Array(value)
end
end
end
end
end

View File

@ -0,0 +1,31 @@
# frozen_string_literal: true
module Gitlab
module Ci
class Config
module Entry
class Reports
class CoverageReport < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Validatable
include ::Gitlab::Config::Entry::Attributable
ALLOWED_KEYS = %i[coverage_format path].freeze
SUPPORTED_COVERAGE = %w[cobertura].freeze
attributes ALLOWED_KEYS
validations do
validates :config, type: Hash
validates :config, allowed_keys: ALLOWED_KEYS
with_options(presence: true) do
validates :coverage_format, inclusion: { in: SUPPORTED_COVERAGE, message: "must be one of supported formats: #{SUPPORTED_COVERAGE.join(', ')}." }
validates :path, type: String
end
end
end
end
end
end
end
end

View File

@ -1,5 +1,5 @@
variables:
DAST_AUTO_DEPLOY_IMAGE_VERSION: 'v2.17.0'
DAST_AUTO_DEPLOY_IMAGE_VERSION: 'v2.22.0'
.dast-auto-deploy:
image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:${DAST_AUTO_DEPLOY_IMAGE_VERSION}"

View File

@ -1,5 +1,5 @@
variables:
AUTO_DEPLOY_IMAGE_VERSION: 'v2.18.1'
AUTO_DEPLOY_IMAGE_VERSION: 'v2.22.0'
.auto-deploy:
image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:${AUTO_DEPLOY_IMAGE_VERSION}"

View File

@ -1,5 +1,5 @@
variables:
AUTO_DEPLOY_IMAGE_VERSION: 'v2.18.1'
AUTO_DEPLOY_IMAGE_VERSION: 'v2.22.0'
.auto-deploy:
image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:${AUTO_DEPLOY_IMAGE_VERSION}"

View File

@ -39,6 +39,17 @@ module Gitlab
end
end
class MutuallyExclusiveKeysValidator < ActiveModel::EachValidator
def validate_each(record, attribute, value)
mutually_exclusive_keys = value.try(:keys).to_a & options[:in]
if mutually_exclusive_keys.length > 1
record.errors.add(attribute, "please use only one the following keys: " +
mutually_exclusive_keys.join(', '))
end
end
end
class AllowedValuesValidator < ActiveModel::EachValidator
def validate_each(record, attribute, value)
unless options[:in].include?(value.to_s)
@ -217,12 +228,6 @@ module Gitlab
end
end
protected
def fallback
false
end
private
def matches_syntax?(value)
@ -231,7 +236,7 @@ module Gitlab
def validate_regexp(value)
matches_syntax?(value) &&
Gitlab::UntrustedRegexp::RubySyntax.valid?(value, fallback: fallback)
Gitlab::UntrustedRegexp::RubySyntax.valid?(value)
end
end
@ -260,27 +265,6 @@ module Gitlab
end
end
class ArrayOfStringsOrRegexpsWithFallbackValidator < ArrayOfStringsOrRegexpsValidator
protected
# TODO
#
# Remove ArrayOfStringsOrRegexpsWithFallbackValidator class too when
# you are removing the `:allow_unsafe_ruby_regexp` feature flag.
#
def validation_message
if ::Feature.enabled?(:allow_unsafe_ruby_regexp, default_enabled: :yaml)
'should be an array of strings or regular expressions'
else
super
end
end
def fallback
true
end
end
class ArrayOfStringsOrStringValidator < RegexpValidator
def validate_each(record, attribute, value)
unless validate_array_of_strings_or_string(value)

View File

@ -383,7 +383,7 @@ module Gitlab
private
def diffable_by_attribute?
repository.attributes(file_path).fetch('diff') { true }
repository.attributes(file_path).fetch('diff', true)
end
# NOTE: Files with unsupported encodings (e.g. UTF-16) are treated as binary by git, but they are recognized as text files during encoding detection. These files have `Binary files a/filename and b/filename differ' as their raw diff content which cannot be used. We need to handle this special case and avoid displaying incorrect diff.

View File

@ -61,6 +61,16 @@ module Gitlab
def self.with_fallback(pattern, multiline: false)
UntrustedRegexp.new(pattern, multiline: multiline)
rescue RegexpError
raise if Feature.enabled?(:disable_unsafe_regexp, default_enabled: :yaml)
if Feature.enabled?(:ci_unsafe_regexp_logger, type: :ops, default_enabled: :yaml)
Gitlab::AppJsonLogger.info(
class: self.name,
regexp: pattern.to_s,
fabricated: 'unsafe ruby regexp'
)
end
Regexp.new(pattern)
end

View File

@ -16,40 +16,23 @@ module Gitlab
# The regexp can match the pattern `/.../`, but may not be fabricatable:
# it can be invalid or incomplete: `/match ( string/`
def self.valid?(pattern, fallback: false)
!!self.fabricate(pattern, fallback: fallback)
def self.valid?(pattern)
!!self.fabricate(pattern)
end
def self.fabricate(pattern, fallback: false, project: nil)
self.fabricate!(pattern, fallback: fallback, project: project)
def self.fabricate(pattern, project: nil)
self.fabricate!(pattern, project: project)
rescue RegexpError
nil
end
def self.fabricate!(pattern, fallback: false, project: nil)
def self.fabricate!(pattern, project: nil)
raise RegexpError, 'Pattern is not string!' unless pattern.is_a?(String)
matches = pattern.match(PATTERN)
raise RegexpError, 'Invalid regular expression!' if matches.nil?
begin
create_untrusted_regexp(matches[:regexp], matches[:flags])
rescue RegexpError
raise unless fallback &&
Feature.enabled?(:allow_unsafe_ruby_regexp, default_enabled: :yaml)
if Feature.enabled?(:ci_unsafe_regexp_logger, type: :ops, default_enabled: :yaml)
Gitlab::AppJsonLogger.info(
class: self.name,
regexp: pattern.to_s,
fabricated: 'unsafe ruby regexp',
project_id: project&.id,
project_path: project&.full_path
)
end
create_ruby_regexp(matches[:regexp], matches[:flags])
end
create_untrusted_regexp(matches[:regexp], matches[:flags])
end
def self.create_untrusted_regexp(pattern, flags)
@ -58,15 +41,6 @@ module Gitlab
UntrustedRegexp.new(pattern, multiline: false)
end
private_class_method :create_untrusted_regexp
def self.create_ruby_regexp(pattern, flags)
options = 0
options += Regexp::IGNORECASE if flags&.include?('i')
options += Regexp::MULTILINE if flags&.include?('m')
Regexp.new(pattern, options)
end
private_class_method :create_ruby_regexp
end
end
end

View File

@ -27614,7 +27614,7 @@ msgstr ""
msgid "Potentially unwanted character detected: Unicode BiDi Control"
msgstr ""
msgid "Pre-defined push rules."
msgid "Pre-defined push rules"
msgstr ""
msgid "Preferences"
@ -29900,6 +29900,9 @@ msgstr ""
msgid "Push project from command line"
msgstr ""
msgid "Push rules"
msgstr ""
msgid "Push the result of the merge to GitLab"
msgstr ""

View File

@ -58,6 +58,16 @@ module QA
artifacts:
paths:
- my-artifacts/
test-coverage-report:
tags:
- #{executor}
script: mkdir coverage; echo "CONTENTS" > coverage/cobertura.xml
artifacts:
reports:
coverage_report:
coverage_format: cobertura
path: coverage/cobertura.xml
YAML
}
]
@ -71,7 +81,8 @@ module QA
'test-success': 'passed',
'test-failure': 'failed',
'test-tags-mismatch': 'pending',
'test-artifacts': 'passed'
'test-artifacts': 'passed',
'test-coverage-report': 'passed'
}.each do |job, status|
Page::Project::Pipeline::Show.perform do |pipeline|
pipeline.click_job(job)

View File

@ -21,16 +21,19 @@ module RuboCop
# via `Enabled: false` in .rubocop_todo.yml or .rubocop_todo/.
MAX_OFFENSE_COUNT = 15
Todo = Struct.new(:cop_name, :files, :offense_count) do
def initialize(cop_name)
super(cop_name, Set.new, 0)
class Todo
attr_reader :cop_name, :files, :offense_count
def initialize(cop_name)
@cop_name = cop_name
@files = Set.new
@offense_count = 0
@cop_class = RuboCop::Cop::Registry.global.find_by_cop_name(cop_name)
end
def record(file, offense_count)
files << file
self.offense_count += offense_count
@files << file
@offense_count += offense_count
end
def autocorrectable?

View File

@ -222,6 +222,166 @@ RSpec.describe Projects::ReleasesController do
end
end
describe 'GET #latest_permalink' do
# Uses default order_by=released_at parameter.
subject do
get :latest_permalink, params: { namespace_id: project.namespace, project_id: project }
end
before do
sign_in(user)
end
let(:release) { create(:release, project: project) }
let(:tag) { CGI.escape(release.tag) }
context 'when user is a guest' do
let(:project) { private_project }
let(:user) { guest }
it 'proceeds with the redirect' do
subject
expect(response).to have_gitlab_http_status(:redirect)
end
end
context 'when user is an external user for the project' do
let(:project) { private_project }
let(:user) { create(:user) }
it 'behaves like not found' do
subject
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'when there are no releases for the project' do
let(:project) { create(:project, :repository, :public) }
let(:user) { developer }
before do
project.releases.destroy_all # rubocop: disable Cop/DestroyAll
end
it 'behaves like not found' do
subject
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'multiple releases' do
let(:user) { developer }
it 'redirects to the latest release' do
create(:release, project: project, released_at: 1.day.ago)
latest_release = create(:release, project: project, released_at: Time.current)
subject
expect(response).to redirect_to("#{project_releases_path(project)}/#{latest_release.tag}")
end
end
context 'suffix path redirection' do
let(:user) { developer }
let(:suffix_path) { 'downloads/zips/helm-hello-world.zip' }
let!(:latest_release) { create(:release, project: project, released_at: Time.current) }
subject do
get :latest_permalink, params: {
namespace_id: project.namespace,
project_id: project,
suffix_path: suffix_path
}
end
it 'redirects to the latest release with suffix path and format' do
subject
expect(response).to redirect_to(
"#{project_releases_path(project)}/#{latest_release.tag}/#{suffix_path}")
end
context 'suffix path abuse' do
let(:suffix_path) { 'downloads/zips/../../../../../../../robots.txt'}
it 'raises attack error' do
expect do
subject
end.to raise_error(Gitlab::Utils::PathTraversalAttackError)
end
end
context 'url parameters' do
let(:suffix_path) { 'downloads/zips/helm-hello-world.zip' }
subject do
get :latest_permalink, params: {
namespace_id: project.namespace,
project_id: project,
suffix_path: suffix_path,
order_by: 'released_at',
param_1: 1,
param_2: 2
}
end
it 'carries over query parameters without order_by parameter in the redirect' do
subject
expect(response).to redirect_to(
"#{project_releases_path(project)}/#{latest_release.tag}/#{suffix_path}?param_1=1&param_2=2")
end
end
end
context 'order_by parameter' do
let!(:latest_release) { create(:release, project: project, released_at: Time.current) }
shared_examples_for 'redirects to latest release ordered by using released_at' do
it do
subject
expect(response).to redirect_to("#{project_releases_path(project)}/#{latest_release.tag}")
end
end
before do
create(:release, project: project, released_at: 1.day.ago)
create(:release, project: project, released_at: 2.days.ago)
end
context 'invalid parameter' do
let(:user) { developer }
subject do
get :latest_permalink, params: {
namespace_id: project.namespace,
project_id: project,
order_by: 'unsupported'
}
end
it_behaves_like 'redirects to latest release ordered by using released_at'
end
context 'valid parameter' do
subject do
get :latest_permalink, params: {
namespace_id: project.namespace,
project_id: project,
order_by: 'released_at'
}
end
it_behaves_like 'redirects to latest release ordered by using released_at'
end
end
end
# `GET #downloads` is addressed in spec/requests/projects/releases_controller_spec.rb
private

View File

@ -497,6 +497,22 @@ FactoryBot.define do
options { {} }
end
trait :coverage_report_cobertura do
options do
{
artifacts: {
expire_in: '7d',
reports: {
coverage_report: {
coverage_format: 'cobertura',
path: 'cobertura.xml'
}
}
}
}
end
end
# TODO: move Security traits to ee_ci_build
# https://gitlab.com/gitlab-org/gitlab/-/issues/210486
trait :dast do

View File

@ -149,26 +149,9 @@ RSpec.describe Gitlab::Ci::Build::Policy::Refs do
context 'when unsafe regexp is used' do
let(:subject) { described_class.new(['/^(?!master).+/']) }
context 'when allow_unsafe_ruby_regexp is disabled' do
before do
stub_feature_flags(allow_unsafe_ruby_regexp: false)
end
it 'ignores invalid regexp' do
expect(subject)
.not_to be_satisfied_by(pipeline)
end
end
context 'when allow_unsafe_ruby_regexp is enabled' do
before do
stub_feature_flags(allow_unsafe_ruby_regexp: true)
end
it 'is satisfied by regexp' do
expect(subject)
.to be_satisfied_by(pipeline)
end
it 'ignores invalid regexp' do
expect(subject)
.not_to be_satisfied_by(pipeline)
end
end
end

View File

@ -59,9 +59,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules::Rule do
context 'when using an if: clause with lookahead regex character "?"' do
let(:config) { { if: '$CI_COMMIT_REF =~ /^(?!master).+/' } }
context 'when allow_unsafe_ruby_regexp is disabled' do
it_behaves_like 'an invalid config', /invalid expression syntax/
end
it_behaves_like 'an invalid config', /invalid expression syntax/
end
context 'when specifying unknown policy' do

View File

@ -1,6 +1,6 @@
# frozen_string_literal: true
require 'spec_helper'
require 'fast_spec_helper'
RSpec.describe Gitlab::Ci::Config::Entry::Policy do
let(:entry) { described_class.new(config) }
@ -45,29 +45,10 @@ RSpec.describe Gitlab::Ci::Config::Entry::Policy do
end
context 'when using unsafe regexp' do
# When removed we could use `require 'fast_spec_helper'` again.
include StubFeatureFlags
let(:config) { ['/^(?!master).+/'] }
context 'when allow_unsafe_ruby_regexp is disabled' do
before do
stub_feature_flags(allow_unsafe_ruby_regexp: false)
end
it 'is not valid' do
expect(entry).not_to be_valid
end
end
context 'when allow_unsafe_ruby_regexp is enabled' do
before do
stub_feature_flags(allow_unsafe_ruby_regexp: true)
end
it 'is valid' do
expect(entry).to be_valid
end
it 'is not valid' do
expect(entry).not_to be_valid
end
end
@ -106,29 +87,10 @@ RSpec.describe Gitlab::Ci::Config::Entry::Policy do
end
context 'when using unsafe regexp' do
# When removed we could use `require 'fast_spec_helper'` again.
include StubFeatureFlags
let(:config) { { refs: ['/^(?!master).+/'] } }
context 'when allow_unsafe_ruby_regexp is disabled' do
before do
stub_feature_flags(allow_unsafe_ruby_regexp: false)
end
it 'is not valid' do
expect(entry).not_to be_valid
end
end
context 'when allow_unsafe_ruby_regexp is enabled' do
before do
stub_feature_flags(allow_unsafe_ruby_regexp: true)
end
it 'is valid' do
expect(entry).to be_valid
end
it 'is not valid' do
expect(entry).not_to be_valid
end
end

View File

@ -0,0 +1,57 @@
# frozen_string_literal: true
require 'fast_spec_helper'
RSpec.describe Gitlab::Ci::Config::Entry::Reports::CoverageReport do
let(:entry) { described_class.new(config) }
describe 'validations' do
context 'when it is valid' do
let(:config) { { coverage_format: 'cobertura', path: 'cobertura-coverage.xml' } }
it { expect(entry).to be_valid }
it { expect(entry.value).to eq(config) }
end
context 'with unsupported coverage format' do
let(:config) { { coverage_format: 'jacoco', path: 'jacoco.xml' } }
it { expect(entry).not_to be_valid }
it { expect(entry.errors).to include /format must be one of supported formats/ }
end
context 'without coverage format' do
let(:config) { { path: 'cobertura-coverage.xml' } }
it { expect(entry).not_to be_valid }
it { expect(entry.errors).to include /format can't be blank/ }
end
context 'without path' do
let(:config) { { coverage_format: 'cobertura' } }
it { expect(entry).not_to be_valid }
it { expect(entry.errors).to include /path can't be blank/ }
end
context 'with invalid path' do
let(:config) { { coverage_format: 'cobertura', path: 123 } }
it { expect(entry).not_to be_valid }
it { expect(entry.errors).to include /path should be a string/ }
end
context 'with unknown keys' do
let(:config) { { coverage_format: 'cobertura', path: 'cobertura-coverage.xml', foo: :bar } }
it { expect(entry).not_to be_valid }
it { expect(entry.errors).to include /contains unknown keys/ }
end
end
end

View File

@ -6,12 +6,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Reports do
let(:entry) { described_class.new(config) }
describe 'validates ALLOWED_KEYS' do
let(:artifact_file_types) { Ci::JobArtifact.file_types }
described_class::ALLOWED_KEYS.each do |keyword, _|
it "expects #{keyword} to be an artifact file_type" do
expect(artifact_file_types).to include(keyword)
end
it "expects ALLOWED_KEYS to be an artifact file_type or coverage_report" do
expect(Ci::JobArtifact.file_types.keys.map(&:to_sym) + [:coverage_report]).to include(*described_class::ALLOWED_KEYS)
end
end
@ -68,6 +64,45 @@ RSpec.describe Gitlab::Ci::Config::Entry::Reports do
it_behaves_like 'a valid entry', params[:keyword], params[:file]
end
end
context 'when coverage_report is specified' do
let(:coverage_format) { :cobertura }
let(:filename) { 'cobertura-coverage.xml' }
let(:coverage_report) { { path: filename, coverage_format: coverage_format } }
let(:config) { { coverage_report: coverage_report } }
it 'is valid' do
expect(entry).to be_valid
end
it 'returns artifacts configuration' do
expect(entry.value).to eq(config)
end
context 'and another report is specified' do
let(:config) { { coverage_report: coverage_report, dast: 'gl-dast-report.json' } }
it 'is valid' do
expect(entry).to be_valid
end
it 'returns artifacts configuration' do
expect(entry.value).to eq({ coverage_report: coverage_report, dast: ['gl-dast-report.json'] })
end
end
context 'and a direct coverage report format is specified' do
let(:config) { { coverage_report: coverage_report, cobertura: 'cobertura-coverage.xml' } }
it 'is not valid' do
expect(entry).not_to be_valid
end
it 'reports error' do
expect(entry.errors).to include /please use only one the following keys: coverage_report, cobertura/
end
end
end
end
context 'when entry value is not correct' do

View File

@ -92,12 +92,10 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
context 'when using an if: clause with lookahead regex character "?"' do
let(:config) { { if: '$CI_COMMIT_REF =~ /^(?!master).+/' } }
context 'when allow_unsafe_ruby_regexp is disabled' do
it { is_expected.not_to be_valid }
it { is_expected.not_to be_valid }
it 'reports an error about invalid expression syntax' do
expect(subject.errors).to include(/invalid expression syntax/)
end
it 'reports an error about invalid expression syntax' do
expect(subject.errors).to include(/invalid expression syntax/)
end
end

View File

@ -9,10 +9,6 @@ module Gitlab
subject { described_class.new(config, user: nil).execute }
before do
stub_feature_flags(allow_unsafe_ruby_regexp: false)
end
shared_examples 'returns errors' do |error_message|
it 'adds a message when an error is encountered' do
expect(subject.errors).to include(error_message)

View File

@ -0,0 +1,43 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Config::Entry::Validators do
let(:klass) do
Class.new do
include ActiveModel::Validations
include Gitlab::Config::Entry::Validators
end
end
let(:instance) { klass.new }
describe described_class::MutuallyExclusiveKeysValidator do
using RSpec::Parameterized::TableSyntax
before do
klass.instance_eval do
validates :config, mutually_exclusive_keys: [:foo, :bar]
end
allow(instance).to receive(:config).and_return(config)
end
where(:context, :config, :valid_result) do
'with mutually exclusive keys' | { foo: 1, bar: 2 } | false
'without mutually exclusive keys' | { foo: 1 } | true
'without mutually exclusive keys' | { bar: 1 } | true
'with other keys' | { foo: 1, baz: 2 } | true
end
with_them do
it 'validates the instance' do
expect(instance.valid?).to be(valid_result)
unless valid_result
expect(instance.errors.messages_for(:config)).to include /please use only one the following keys: foo, bar/
end
end
end
end
end

View File

@ -1,4 +1,5 @@
# frozen_string_literal: true
# rubocop:disable Style/RedundantFetchBlock
require 'spec_helper'
@ -547,3 +548,4 @@ RSpec.describe Gitlab::JsonCache do
end
end
end
# rubocop:enable Style/RedundantFetchBlock

View File

@ -1,4 +1,5 @@
# frozen_string_literal: true
# rubocop:disable Style/RedundantFetchBlock
require 'spec_helper'
@ -84,3 +85,4 @@ RSpec.describe Gitlab::Metrics::Dashboard::Cache, :use_clean_rails_memory_store_
end
end
end
# rubocop:enable Style/RedundantFetchBlock

View File

@ -49,7 +49,7 @@ RSpec.describe Gitlab::NullRequestStore do
describe '#fetch' do
it 'returns the block result' do
expect(null_store.fetch('key') { 'block result' }).to eq('block result')
expect(null_store.fetch('key') { 'block result' }).to eq('block result') # rubocop:disable Style/RedundantFetchBlock
end
end

View File

@ -183,7 +183,7 @@ RSpec.describe Gitlab::SafeRequestStore do
context 'when RequestStore is active', :request_store do
it 'uses RequestStore' do
expect do
described_class.fetch('foo') { 'block result' }
described_class.fetch('foo') { 'block result' } # rubocop:disable Style/RedundantFetchBlock
end.to change { described_class.read('foo') }.from(nil).to('block result')
end
end
@ -193,7 +193,7 @@ RSpec.describe Gitlab::SafeRequestStore do
RequestStore.clear! # Ensure clean
expect do
described_class.fetch('foo') { 'block result' }
described_class.fetch('foo') { 'block result' } # rubocop:disable Style/RedundantFetchBlock
end.not_to change { described_class.read('foo') }.from(nil)
RequestStore.clear! # Clean up

View File

@ -1,6 +1,6 @@
# frozen_string_literal: true
require 'spec_helper'
require 'fast_spec_helper'
RSpec.describe Gitlab::UntrustedRegexp::RubySyntax do
describe '.matches_syntax?' do
@ -71,44 +71,6 @@ RSpec.describe Gitlab::UntrustedRegexp::RubySyntax do
end
end
context 'when unsafe regexp is used' do
include StubFeatureFlags
before do
# When removed we could use `require 'fast_spec_helper'` again.
stub_feature_flags(allow_unsafe_ruby_regexp: true)
allow(Gitlab::UntrustedRegexp).to receive(:new).and_raise(RegexpError)
end
context 'when no fallback is enabled' do
it 'raises an exception' do
expect { described_class.fabricate!('/something/') }
.to raise_error(RegexpError)
end
end
context 'when fallback is used' do
it 'fabricates regexp with a single flag' do
regexp = described_class.fabricate!('/something/i', fallback: true)
expect(regexp).to eq Regexp.new('something', Regexp::IGNORECASE)
end
it 'fabricates regexp with multiple flags' do
regexp = described_class.fabricate!('/something/im', fallback: true)
expect(regexp).to eq Regexp.new('something', Regexp::IGNORECASE | Regexp::MULTILINE)
end
it 'fabricates regexp without flags' do
regexp = described_class.fabricate!('/something/', fallback: true)
expect(regexp).to eq Regexp.new('something')
end
end
end
context 'when regexp is a raw pattern' do
it 'raises an error' do
expect { described_class.fabricate!('some .* thing') }

View File

@ -0,0 +1,43 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe RemoveLeftoverExternalPullRequestDeletions do
let(:deleted_records) { table(:loose_foreign_keys_deleted_records) }
let(:pending_record1) { deleted_records.create!(id: 1, fully_qualified_table_name: 'public.external_pull_requests', primary_key_value: 1, status: 1) }
let(:pending_record2) { deleted_records.create!(id: 2, fully_qualified_table_name: 'public.external_pull_requests', primary_key_value: 2, status: 1) }
let(:other_pending_record1) { deleted_records.create!(id: 3, fully_qualified_table_name: 'public.projects', primary_key_value: 1, status: 1) }
let(:other_pending_record2) { deleted_records.create!(id: 4, fully_qualified_table_name: 'public.ci_builds', primary_key_value: 1, status: 1) }
let(:processed_record1) { deleted_records.create!(id: 5, fully_qualified_table_name: 'public.external_pull_requests', primary_key_value: 3, status: 2) }
let(:other_processed_record1) { deleted_records.create!(id: 6, fully_qualified_table_name: 'public.ci_builds', primary_key_value: 2, status: 2) }
let!(:persisted_ids_before) do
[
pending_record1,
pending_record2,
other_pending_record1,
other_pending_record2,
processed_record1,
other_processed_record1
].map(&:id).sort
end
let!(:persisted_ids_after) do
[
other_pending_record1,
other_pending_record2,
processed_record1,
other_processed_record1
].map(&:id).sort
end
def all_ids
deleted_records.all.map(&:id).sort
end
it 'deletes pending external_pull_requests records' do
expect { migrate! }.to change { all_ids }.from(persisted_ids_before).to(persisted_ids_after)
end
end

View File

@ -78,16 +78,72 @@ RSpec.describe Ci::BuildRunnerPresenter do
artifact_format: Ci::JobArtifact::TYPE_AND_FORMAT_PAIRS.fetch(file_type),
paths: [filename],
when: 'always'
}
}.compact
end
it 'presents correct hash' do
expect(presenter.artifacts.first).to include(report_expectation)
expect(presenter.artifacts).to contain_exactly(report_expectation)
end
end
end
end
context 'when a specific coverage_report type is given' do
let(:coverage_format) { :cobertura }
let(:filename) { 'cobertura-coverage.xml' }
let(:coverage_report) { { path: filename, coverage_format: coverage_format } }
let(:report) { { coverage_report: coverage_report } }
let(:build) { create(:ci_build, options: { artifacts: { reports: report } }) }
let(:expected_coverage_report) do
{
name: filename,
artifact_type: coverage_format,
artifact_format: Ci::JobArtifact::TYPE_AND_FORMAT_PAIRS.fetch(coverage_format),
paths: [filename],
when: 'always'
}
end
it 'presents the coverage report hash with the coverage format' do
expect(presenter.artifacts).to contain_exactly(expected_coverage_report)
end
end
context 'when a specific coverage_report type is given with another report type' do
let(:coverage_format) { :cobertura }
let(:coverage_filename) { 'cobertura-coverage.xml' }
let(:coverage_report) { { path: coverage_filename, coverage_format: coverage_format } }
let(:ds_filename) { 'gl-dependency-scanning-report.json' }
let(:report) { { coverage_report: coverage_report, dependency_scanning: [ds_filename] } }
let(:build) { create(:ci_build, options: { artifacts: { reports: report } }) }
let(:expected_coverage_report) do
{
name: coverage_filename,
artifact_type: coverage_format,
artifact_format: Ci::JobArtifact::TYPE_AND_FORMAT_PAIRS.fetch(coverage_format),
paths: [coverage_filename],
when: 'always'
}
end
let(:expected_ds_report) do
{
name: ds_filename,
artifact_type: :dependency_scanning,
artifact_format: Ci::JobArtifact::TYPE_AND_FORMAT_PAIRS.fetch(:dependency_scanning),
paths: [ds_filename],
when: 'always'
}
end
it 'presents both reports' do
expect(presenter.artifacts).to contain_exactly(expected_coverage_report, expected_ds_report)
end
end
context "when option has both archive and reports specification" do
let(:report) { { junit: ['junit.xml'] } }
let(:build) { create(:ci_build, options: { script: 'echo', artifacts: { **archive, reports: report } }) }

View File

@ -611,6 +611,40 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
end
end
context 'when job has code coverage report' do
let(:job) do
create(:ci_build, :pending, :queued, :coverage_report_cobertura,
pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0)
end
let(:expected_artifacts) do
[
{
'name' => 'cobertura-coverage.xml',
'paths' => ['cobertura.xml'],
'when' => 'always',
'expire_in' => '7d',
"artifact_type" => "cobertura",
"artifact_format" => "gzip"
}
]
end
it 'returns job with the correct artifact specification' do
request_job info: { platform: :darwin, features: { upload_multiple_artifacts: true } }
expect(response).to have_gitlab_http_status(:created)
expect(response.headers['Content-Type']).to eq('application/json')
expect(response.headers).not_to have_key('X-GitLab-Last-Update')
expect(runner.reload.platform).to eq('darwin')
expect(json_response['id']).to eq(job.id)
expect(json_response['token']).to eq(job.token)
expect(json_response['job_info']).to eq(expected_job_info)
expect(json_response['git_info']).to eq(expected_git_info)
expect(json_response['artifacts']).to eq(expected_artifacts)
end
end
context 'when triggered job is available' do
let(:expected_variables) do
[{ 'key' => 'CI_JOB_NAME', 'value' => 'spinach', 'public' => true, 'masked' => false },

View File

@ -680,6 +680,32 @@ RSpec.describe 'project routing' do
end
end
describe Projects::ReleasesController, 'routing' do
it 'to #latest_permalink with a valid permalink path' do
expect(get('/gitlab/gitlabhq/-/releases/permalink/latest/downloads/release-binary.zip')).to route_to(
'projects/releases#latest_permalink',
namespace_id: 'gitlab',
project_id: 'gitlabhq',
suffix_path: 'downloads/release-binary.zip'
)
expect(get('/gitlab/gitlabhq/-/releases/permalink/latest')).to route_to(
'projects/releases#latest_permalink',
namespace_id: 'gitlab',
project_id: 'gitlabhq'
)
end
it 'to #show for the release with tag named permalink' do
expect(get('/gitlab/gitlabhq/-/releases/permalink')).to route_to(
'projects/releases#show',
namespace_id: 'gitlab',
project_id: 'gitlabhq',
tag: 'permalink'
)
end
end
describe Projects::Registry::TagsController, 'routing' do
describe '#destroy' do
it 'correctly routes to a destroy action' do

View File

@ -0,0 +1,67 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Ci::CreatePipelineService do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
let(:ref) { 'refs/heads/master' }
let(:source) { :push }
let(:service) { described_class.new(project, user, { ref: ref }) }
let(:pipeline) { service.execute(source).payload }
describe 'artifacts:' do
before do
stub_ci_pipeline_yaml_file(config)
allow_next_instance_of(Ci::BuildScheduleWorker) do |instance|
allow(instance).to receive(:perform).and_return(true)
end
end
describe 'reports:' do
context 'with valid config' do
let(:config) do
<<~YAML
test-job:
script: "echo 'hello world' > cobertura.xml"
artifacts:
reports:
coverage_report:
coverage_format: 'cobertura'
path: 'cobertura.xml'
dependency-scanning-job:
script: "echo 'hello world' > gl-dependency-scanning-report.json"
artifacts:
reports:
dependency_scanning: 'gl-dependency-scanning-report.json'
YAML
end
it 'creates pipeline with builds' do
expect(pipeline).to be_persisted
expect(pipeline).not_to have_yaml_errors
expect(pipeline.builds.pluck(:name)).to contain_exactly('test-job', 'dependency-scanning-job')
end
end
context 'with invalid config' do
let(:config) do
<<~YAML
test-job:
script: "echo 'hello world' > cobertura.xml"
artifacts:
reports:
foo: 'bar'
YAML
end
it 'creates pipeline with yaml errors' do
expect(pipeline).to be_persisted
expect(pipeline).to have_yaml_errors
end
end
end
end
end