Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-06-22 09:07:12 +00:00
parent be3e27e39d
commit c76417338e
31 changed files with 778 additions and 350 deletions

View file

@ -213,17 +213,20 @@ Dangerfile @gl-quality/eng-prod
# Secure & Threat Management ownership delineation
# https://about.gitlab.com/handbook/engineering/development/threat-management/delineate-secure-threat-management.html#technical-boundaries
[Threat Insights]
/app/finders/security/ @gitlab-org/secure/threat-insights-backend-team
/app/models/vulnerability.rb @gitlab-org/secure/threat-insights-backend-team
/ee/app/finders/security/ @gitlab-org/secure/threat-insights-backend-team
/ee/app/models/security/ @gitlab-org/secure/threat-insights-backend-team
/ee/app/models/vulnerabilities/ @gitlab-org/secure/threat-insights-backend-team
/ee/app/policies/vulnerabilities/ @gitlab-org/secure/threat-insights-backend-team
/ee/app/policies/vulnerability*.rb @gitlab-org/secure/threat-insights-backend-team
/ee/app/presenters/projects/security/ @gitlab-org/secure/threat-insights-backend-team
/ee/lib/api/vulnerabilit*.rb @gitlab-org/secure/threat-insights-backend-team
/ee/lib/gitlab/ci/reports/security/vulnerability_reports_comparer.rb @gitlab-org/secure/threat-insights-backend-team
/ee/spec/lib/gitlab/ci/reports/security/vulnerability_reports_comparer_spec.rb @gitlab-org/secure/threat-insights-backend-team
/ee/spec/policies/vulnerabilities/ @gitlab-org/secure/threat-insights-backend-team
/ee/spec/policies/vulnerability*.rb @gitlab-org/secure/threat-insights-backend-team
/ee/spec/presenters/projects/security/ @gitlab-org/secure/threat-insights-backend-team
[Secure]
/ee/lib/gitlab/ci/parsers/license_compliance/ @gitlab-org/secure/composition-analysis-be

View file

@ -1,43 +1,48 @@
# This image is used by the `review-qa-*` jobs. The image name is also passed to the downstream `omnibus-gitlab-mirror` pipeline
# triggered by `package-and-qa` so that it doesn't have to rebuild it a second time. The downstream `omnibus-gitlab-mirror` pipeline
# itself passes the image name to the `gitlab-qa-mirror` pipeline so that it can use it instead of inferring an end-to-end image
# from the GitLab image built by the downstream `omnibus-gitlab-mirror` pipeline.
.base-image-build:
extends: .use-kaniko
script:
# With .git/hooks/post-checkout in place, Git tries to pull LFS objects, but the image doesn't have Git LFS, and we actually don't care about it for this specific so we just remove the file.
# Without removing the file, the error is as follows: "This repository is configured for Git LFS but 'git-lfs' was not found on your path. If you no longer wish to use Git LFS, remove this hook by deleting .git/hooks/post-checkout."
- rm .git/hooks/post-checkout
- if [ -n "$CI_MERGE_REQUEST_SOURCE_BRANCH_SHA" ]; then
echo "Checking out \$CI_MERGE_REQUEST_SOURCE_BRANCH_SHA ($CI_MERGE_REQUEST_SOURCE_BRANCH_SHA) instead of \$CI_COMMIT_SHA (merge result commit $CI_COMMIT_SHA) so that GitLab image built in omnibus-gitlab-mirror and QA image are in sync.";
git checkout -f ${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA};
else
echo "Building the image from \$CI_COMMIT_SHA ($CI_COMMIT_SHA) for this non-merge result pipeline.";
fi;
- echo "See https://docs.gitlab.com/ee/development/testing_guide/end_to_end/index.html#with-pipeline-for-merged-results for more details.";
retry: 2
# This image is used by:
# - The `review-qa-*` jobs
# - The downstream `omnibus-gitlab-mirror` pipeline triggered by `package-and-qa` so that it doesn't have to rebuild it again.
# The downstream `omnibus-gitlab-mirror` pipeline itself passes the image name to the `gitlab-qa-mirror` pipeline so that
# it can use it instead of inferring an end-to-end imag from the GitLab image built by the downstream `omnibus-gitlab-mirror` pipeline.
# See https://docs.gitlab.com/ee/development/testing_guide/end_to_end/index.html#testing-code-in-merge-requests for more details.
build-qa-image:
extends:
- .use-kaniko
- .base-image-build
- .build-images:rules:build-qa-image
stage: build-images
needs: []
variables:
QA_IMAGE: "${CI_REGISTRY}/${CI_PROJECT_PATH}/gitlab-ee-qa:${CI_COMMIT_REF_SLUG}"
script:
# With .git/hooks/post-checkout in place, Git tries to pull LFS objects, but the image doesn't have Git LFS, and we actually don't care about it for this specific so we just remove the file.
# Without removing the file, the error is as follows: "This repository is configured for Git LFS but 'git-lfs' was not found on your path. If you no longer wish to use Git LFS, remove this hook by deleting .git/hooks/post-checkout."
- rm .git/hooks/post-checkout
# Use $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA so that GitLab image built in omnibus-gitlab-mirror and QA image are in sync.
# This falls back to $CI_COMMIT_SHA (the default checked out commit) for the non-merged result pipelines.
# See https://docs.gitlab.com/ee/development/testing_guide/end_to_end/index.html#with-pipeline-for-merged-results.
- if [ -n "$CI_MERGE_REQUEST_SOURCE_BRANCH_SHA" ]; then
git checkout -f ${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA};
fi
- !reference [.base-image-build, script]
- /kaniko/executor --context=${CI_PROJECT_DIR} --dockerfile=${CI_PROJECT_DIR}/qa/Dockerfile --destination=${QA_IMAGE} --cache=true
retry: 2
# This image is used by:
# - The `CNG` pipelines (via the `review-build-cng` job): https://gitlab.com/gitlab-org/build/CNG/-/blob/cfc67136d711e1c8c409bf8e57427a644393da2f/.gitlab-ci.yml#L335
# - The `omnibus-gitlab` pipelines (via the `package-and-qa` job): https://gitlab.com/gitlab-org/omnibus-gitlab/-/blob/dfd1ad475868fc84e91ab7b5706aa03e46dc3a86/.gitlab-ci.yml#L130
build-assets-image:
extends:
- .use-kaniko
- .base-image-build
- .build-images:rules:build-assets-image
stage: build-images
needs: ["compile-production-assets"]
variables:
GIT_DEPTH: "1"
script:
- !reference [.base-image-build, script]
# TODO: Change the image tag to be the MD5 of assets files and skip image building if the image exists
# We'll also need to pass GITLAB_ASSETS_TAG to the trigerred omnibus-gitlab pipeline similarly to how we do it for trigerred CNG pipelines
# https://gitlab.com/gitlab-org/gitlab/issues/208389
- run_timed_command "scripts/build_assets_image"
retry: 2

View file

@ -1 +1 @@
14.0.0
14.0.1

View file

@ -610,8 +610,6 @@ module Ci
# rubocop: enable CodeReuse/ServiceClass
def lazy_ref_commit
return unless ::Gitlab::Ci::Features.pipeline_latest?
BatchLoader.for(ref).batch do |refs, loader|
next unless project.repository_exists?
@ -623,11 +621,6 @@ module Ci
def latest?
return false unless git_ref && commit.present?
unless ::Gitlab::Ci::Features.pipeline_latest?
return project.commit(git_ref) == commit
end
return false if lazy_ref_commit.nil?
lazy_ref_commit.id == commit.id

View file

@ -9,13 +9,18 @@ module Avatarable
ALLOWED_IMAGE_SCALER_WIDTHS = (USER_AVATAR_SIZES | PROJECT_AVATAR_SIZES | GROUP_AVATAR_SIZES).freeze
# This value must not be bigger than then: https://gitlab.com/gitlab-org/gitlab/-/blob/master/workhorse/config.toml.example#L20
#
# https://docs.gitlab.com/ee/development/image_scaling.html
MAXIMUM_FILE_SIZE = 200.kilobytes.to_i
included do
prepend ShadowMethods
include ObjectStorage::BackgroundMove
include Gitlab::Utils::StrongMemoize
validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? }
validates :avatar, file_size: { maximum: 200.kilobytes.to_i }, if: :avatar_changed?
validates :avatar, file_size: { maximum: MAXIMUM_FILE_SIZE }, if: :avatar_changed?
mount_uploader :avatar, AvatarUploader

View file

@ -1,18 +1,20 @@
# frozen_string_literal: true
# Downloads a remote file. If no filename is given, it'll use the remote filename
module BulkImports
class FileDownloadService
FILE_SIZE_LIMIT = 5.gigabytes
ALLOWED_CONTENT_TYPES = %w(application/gzip application/octet-stream).freeze
ServiceError = Class.new(StandardError)
def initialize(configuration:, relative_url:, dir:, filename:)
REMOTE_FILENAME_PATTERN = %r{filename="(?<filename>[^"]+)"}.freeze
FILENAME_SIZE_LIMIT = 255 # chars before the extension
def initialize(configuration:, relative_url:, dir:, file_size_limit:, allowed_content_types:, filename: nil)
@configuration = configuration
@relative_url = relative_url
@filename = filename
@dir = dir
@filepath = File.join(@dir, @filename)
@file_size_limit = file_size_limit
@allowed_content_types = allowed_content_types
end
def execute
@ -30,7 +32,7 @@ module BulkImports
private
attr_reader :configuration, :relative_url, :dir, :filename, :filepath
attr_reader :configuration, :relative_url, :dir, :file_size_limit, :allowed_content_types
def download_file
File.open(filepath, 'wb') do |file|
@ -39,7 +41,7 @@ module BulkImports
http_client.stream(relative_url) do |chunk|
bytes_downloaded += chunk.size
raise(ServiceError, 'Invalid downloaded file') if bytes_downloaded > FILE_SIZE_LIMIT
validate_size!(bytes_downloaded)
raise(ServiceError, "File download error #{chunk.code}") unless chunk.code == 200
file.write(chunk)
@ -88,15 +90,59 @@ module BulkImports
end
def validate_content_length
content_size = headers['content-length']
validate_size!(headers['content-length'])
end
raise(ServiceError, 'Invalid content length') if content_size.blank? || content_size.to_i > FILE_SIZE_LIMIT
def validate_size!(size)
if size.blank?
raise ServiceError, 'Missing content-length header'
elsif size.to_i > file_size_limit
raise ServiceError, "File size %{size} exceeds limit of %{limit}" % {
size: ActiveSupport::NumberHelper.number_to_human_size(size),
limit: ActiveSupport::NumberHelper.number_to_human_size(file_size_limit)
}
end
end
def validate_content_type
content_type = headers['content-type']
raise(ServiceError, 'Invalid content type') if content_type.blank? || ALLOWED_CONTENT_TYPES.exclude?(content_type)
raise(ServiceError, 'Invalid content type') if content_type.blank? || allowed_content_types.exclude?(content_type)
end
def filepath
@filepath ||= File.join(@dir, filename)
end
def filename
@filename.presence || remote_filename
end
# Fetch the remote filename information from the request content-disposition header
# - Raises if the filename does not exist
# - If the filename is longer then 255 chars truncate it
# to be a total of 255 chars (with the extension)
def remote_filename
@remote_filename ||=
headers['content-disposition'].to_s
.match(REMOTE_FILENAME_PATTERN) # matches the filename pattern
.then { |match| match&.named_captures || {} } # ensures the match is a hash
.fetch('filename') # fetches the 'filename' key or raise KeyError
.then(&File.method(:basename)) # Ensures to remove path from the filename (../ for instance)
.then(&method(:ensure_filename_size)) # Ensures the filename is within the FILENAME_SIZE_LIMIT
rescue KeyError
raise ServiceError, 'Remote filename not provided in content-disposition header'
end
def ensure_filename_size(filename)
if filename.length <= FILENAME_SIZE_LIMIT
filename
else
extname = File.extname(filename)
basename = File.basename(filename, extname)[0, FILENAME_SIZE_LIMIT]
"#{basename}#{extname}"
end
end
end
end

View file

@ -1,8 +0,0 @@
---
name: ci_pipeline_latest
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/34160
rollout_issue_url:
milestone: '13.2'
type: development
group: group::pipeline execution
default_enabled: true

View file

@ -161,18 +161,18 @@ To enable it:
```ruby
# For the instance
Feature.enable(:ff_compliance_approval_gates)
Feature.enable(:ff_external_status_checks)
# For a single project
Feature.enable(:ff_compliance_approval_gates, Project.find(<project id>))
Feature.enable(:ff_external_status_checks, Project.find(<project id>))
```
To disable it:
```ruby
# For the instance
Feature.disable(:ff_compliance_approval_gates)
Feature.disable(:ff_external_status_checks)
# For a single project
Feature.disable(:ff_compliance_approval_gates, Project.find(<project id>))
Feature.disable(:ff_external_status_checks, Project.find(<project id>))
```
## Related links

View file

@ -1551,32 +1551,40 @@ The following example creates four paths of execution:
```yaml
linux:build:
stage: build
script: echo "Building linux..."
mac:build:
stage: build
script: echo "Building mac..."
lint:
stage: test
needs: []
script: echo "Linting..."
linux:rspec:
stage: test
needs: ["linux:build"]
script: echo "Running rspec on linux..."
linux:rubocop:
stage: test
needs: ["linux:build"]
script: echo "Running rubocop on linux..."
mac:rspec:
stage: test
needs: ["mac:build"]
script: echo "Running rspec on mac..."
mac:rubocop:
stage: test
needs: ["mac:build"]
script: echo "Running rubocop on mac..."
production:
stage: deploy
script: echo "Running production..."
```
#### Requirements and limitations

View file

@ -0,0 +1,43 @@
---
stage: Growth
group: Product Intelligence
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---
# Snowplow review guidelines
This page includes introductory material for a
[Product Intelligence](https://about.gitlab.com/handbook/engineering/development/growth/product-intelligence/)
review, and is specific to Snowplow related reviews. For broader advice and
general best practices for code reviews, refer to our [code review guide](../code_review.md).
## Resources for reviewers
- [Snowplow Guide](index.md)
- [Event Dictionary](dictionary.md)
## Review process
We recommend a Product Intelligence review when a merge request (MR) involves changes in
events or touches Snowplow related files.
### Roles and process
#### The merge request **author** should
- For frontend events, when relevant, add a screenshot of the event in
the [testing tool](../snowplow/index.md#developing-and-testing-snowplow) used.
- For backend events, when relevant, add the output of the
[Snowplow Micro](index.md#snowplow-mini) good events
`GET http://localhost:9090/micro/good` (it might be a good idea
to reset with `GET http://localhost:9090/micro/reset` first).
- Update the [Event Dictionary](event_dictionary_guide.md).
#### The Product Intelligence **reviewer** should
- Check that the [event taxonomy](../snowplow/index.md#structured-event-taxonomy) is correct.
- Check the [usage recommendations](../snowplow/index.md#usage-recommendations).
- Check that the [Event Dictionary](event_dictionary_guide.md) is up-to-date.
- If needed, check that the events are firing locally using one of the
[testing tools](../snowplow/index.md#developing-and-testing-snowplow) available.
- Approve the MR, and relabel the MR with `~"product intelligence::approved"`.

View file

@ -1071,7 +1071,7 @@ Ensure you comply with the [Changelog entries guide](../changelog.md).
### 9. Ask for a Product Intelligence Review
On GitLab.com, we have DangerBot setup to monitor Product Intelligence related files and DangerBot recommends a [Product Intelligence review](product_intelligence_review.md). Mention `@gitlab-org/growth/product_intelligence/engineers` in your MR for a review.
On GitLab.com, we have DangerBot set up to monitor Product Intelligence related files and DangerBot recommends a [Product Intelligence review](review_guidelines.md). Mention `@gitlab-org/growth/product_intelligence/engineers` in your MR for a review.
### 10. Verify your metric

View file

@ -1,91 +1,9 @@
---
stage: Growth
group: Product Intelligence
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
redirect_to: 'review_guidelines.md'
remove_date: '2021-09-16'
---
# Product Intelligence review guidelines
This file was moved to [another location](review_guidelines.md).
This page includes introductory material for a
[Product Intelligence](https://about.gitlab.com/handbook/engineering/development/growth/product-intelligence/)
review, and is specific to Product Intelligence reviews. For broader advice and
general best practices for code reviews, refer to our [code review guide](../code_review.md).
## Resources for Product Intelligence reviewers
- [Usage Ping Guide](index.md)
- [Snowplow Guide](../snowplow/index.md)
- [Metrics Dictionary](metrics_dictionary.md)
## Review process
We recommend a Product Intelligence review when an application update touches
Product Intelligence files.
- Changes that touch `usage_data*` files.
- Changes to the Metrics Dictionary including files in:
- [`config/metrics`](https://gitlab.com/gitlab-org/gitlab/-/tree/master/config/metrics).
- [`ee/config/metrics`](https://gitlab.com/gitlab-org/gitlab/-/tree/master/ee/config/metrics).
- [`dictionary.md`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/doc/development/usage_ping/dictionary.md).
- [`schema.json`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/config/metrics/schema.json).
- Changes to `tracking` files.
- Changes to Product Intelligence tooling. For example,
[`Gitlab::UsageMetricDefinitionGenerator`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/generators/gitlab/usage_metric_definition_generator.rb)
### Roles and process
#### The merge request **author** should
- Decide whether a Product Intelligence review is needed.
- If a Product Intelligence review is needed, add the labels
`~product intelligence` and `~product intelligence::review pending`.
- Assign an
[engineer](https://gitlab.com/groups/gitlab-org/growth/product-intelligence/engineers/-/group_members?with_inherited_permissions=exclude) from the Product Intelligence team for a review.
- Set the correct attributes in YAML metrics:
- `product_section`, `product_stage`, `product_group`, `product_category`
- Provide a clear description of the metric.
- Update the
[Metrics Dictionary](https://gitlab.com/gitlab-org/gitlab/-/blob/master/doc/development/usage_ping/dictionary.md) if it is needed.
- Add a changelog [according to guidelines](../changelog.md).
##### When adding or modifying Snowplow events
- For frontend events, when relevant, add a screenshot of the event in
the [testing tool](../snowplow/index.md#developing-and-testing-snowplow) used.
- For backend events, when relevant, add the output of the Snowplow Micro
good events `GET http://localhost:9090/micro/good` (it might be a good idea
to reset with `GET http://localhost:9090/micro/reset` first).
#### The Product Intelligence **reviewer** should
- Perform a first-pass review on the merge request and suggest improvements to the author.
- Approve the MR, and relabel the MR with `~"product intelligence::approved"`.
## Review workload distribution
[Danger bot](../dangerbot.md) adds the list of Product Intelligence changed files
and pings the
[`@gitlab-org/growth/product-intelligence/engineers`](https://gitlab.com/groups/gitlab-org/growth/product-intelligence/engineers/-/group_members?with_inherited_permissions=exclude) group for merge requests
that are not drafts.
Any of the Product Intelligence engineers can be assigned for the Product Intelligence review.
### How to review for Product Intelligence
- Check the [metrics location](index.md#1-naming-and-placing-the-metrics) in
the Usage Ping JSON payload.
- Add `~database` label and ask for [database review](../database_review.md) for
metrics that are based on Database.
- For tracking using Redis HLL (HyperLogLog):
- Check the Redis slot.
- Check if a [feature flag is needed](index.md#recommendations).
- For tracking with Snowplow:
- Check that the [event taxonomy](../snowplow/index.md#structured-event-taxonomy) is correct.
- Check the [usage recommendations](../snowplow/index.md#usage-recommendations).
- Metrics YAML definitions:
- Check the metric `description`.
- Check the metrics `key_path`.
- Check the `product_section`, `product_stage`, `product_group`, `product_category`.
Read the [stages file](https://gitlab.com/gitlab-com/www-gitlab-com/blob/master/data/stages.yml).
- Check the file location. Consider the time frame, and if the file should be under `ee`.
- Check the tiers.
<!-- This redirect file can be deleted after <2021-09-16>. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->

View file

@ -0,0 +1,75 @@
---
stage: Growth
group: Product Intelligence
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---
# Usage Ping review guidelines
This page includes introductory material for a
[Product Intelligence](https://about.gitlab.com/handbook/engineering/development/growth/product-intelligence/)
review, and is specific to Usage Ping related reviews. For broader advice and
general best practices for code reviews, refer to our [code review guide](../code_review.md).
## Resources for reviewers
- [Usage Ping Guide](index.md)
- [Metrics Dictionary](metrics_dictionary.md)
## Review process
We recommend a Product Intelligence review when a merge request (MR) touches
any of the following Usage Ping files:
- `usage_data*` files.
- The Metrics Dictionary, including files in:
- [`config/metrics`](https://gitlab.com/gitlab-org/gitlab/-/tree/master/config/metrics).
- [`ee/config/metrics`](https://gitlab.com/gitlab-org/gitlab/-/tree/master/ee/config/metrics).
- [`dictionary.md`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/doc/development/usage_ping/dictionary.md).
- [`schema.json`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/config/metrics/schema.json).
- Product Intelligence tooling. For example,
[`Gitlab::UsageMetricDefinitionGenerator`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/generators/gitlab/usage_metric_definition_generator.rb)
### Roles and process
#### The merge request **author** should
- Decide whether a Product Intelligence review is needed.
- If a Product Intelligence review is needed, add the labels
`~product intelligence` and `~product intelligence::review pending`.
- Assign an
[engineer](https://gitlab.com/groups/gitlab-org/growth/product-intelligence/engineers/-/group_members?with_inherited_permissions=exclude) from the Product Intelligence team for a review.
- Set the correct attributes in the metric's YAML definition:
- `product_section`, `product_stage`, `product_group`, `product_category`
- Provide a clear description of the metric.
- Update the
[Metrics Dictionary](https://gitlab.com/gitlab-org/gitlab/-/blob/master/doc/development/usage_ping/dictionary.md) if needed.
- Add a changelog [according to guidelines](../changelog.md).
#### The Product Intelligence **reviewer** should
- Perform a first-pass review on the merge request and suggest improvements to the author.
- Check the [metrics location](index.md#1-naming-and-placing-the-metrics) in
the Usage Ping JSON payload.
- Add the `~database` label and ask for a [database review](../database_review.md) for
metrics that are based on Database.
- For tracking using Redis HLL (HyperLogLog):
- Check the Redis slot.
- Check if a [feature flag is needed](index.md#recommendations).
- For a metric's YAML definition:
- Check the metric's `description`.
- Check the metric's `key_path`.
- Check the `product_section`, `product_stage`, `product_group`, and `product_category` fields.
Read the [stages file](https://gitlab.com/gitlab-com/www-gitlab-com/blob/master/data/stages.yml).
- Check the file location. Consider the time frame, and if the file should be under `ee`.
- Check the tiers.
- Approve the MR, and relabel the MR with `~"product intelligence::approved"`.
## Review workload distribution
[Danger bot](../dangerbot.md) adds the list of changed Product Intelligence files
and pings the
[`@gitlab-org/growth/product-intelligence/engineers`](https://gitlab.com/groups/gitlab-org/growth/product-intelligence/engineers/-/group_members?with_inherited_permissions=exclude) group for merge requests
that are not drafts.
Any of the Product Intelligence engineers can be assigned for the Product Intelligence review.

View file

@ -22,6 +22,7 @@ The following resources are migrated to the target instance:
- description
- attributes
- subgroups
- avatar ([Introduced in 14.0](https://gitlab.com/gitlab-org/gitlab/-/issues/322904))
- Group Labels ([Introduced in 13.9](https://gitlab.com/gitlab-org/gitlab/-/issues/292429))
- title
- description

View file

@ -160,18 +160,18 @@ To enable it:
```ruby
# For the instance
Feature.enable(:ff_compliance_approval_gates)
Feature.enable(:ff_external_status_checks)
# For a single project
Feature.enable(:ff_compliance_approval_gates, Project.find(<project id>))
Feature.enable(:ff_external_status_checks, Project.find(<project id>))
```
To disable it:
```ruby
# For the instance
Feature.disable(:ff_compliance_approval_gates)
Feature.disable(:ff_external_status_checks)
# For a single project
Feature.disable(:ff_compliance_approval_gates, Project.find(<project id>)
Feature.disable(:ff_external_status_checks, Project.find(<project id>)
```
## Related links

View file

@ -8,19 +8,12 @@
module API
module Helpers
module Caching
# @return [ActiveSupport::Duration]
DEFAULT_EXPIRY = 1.day
include Gitlab::Cache::Helpers
# @return [Hash]
DEFAULT_CACHE_OPTIONS = {
race_condition_ttl: 5.seconds
}.freeze
# @return [ActiveSupport::Cache::Store]
def cache
Rails.cache
end
# This is functionally equivalent to the standard `#present` used in
# Grape endpoints, but the JSON for the object, or for each object of
# a collection, will be cached.
@ -45,7 +38,7 @@ module API
# @param expires_in [ActiveSupport::Duration, Integer] an expiry time for the cache entry
# @param presenter_args [Hash] keyword arguments to be passed to the entity
# @return [Gitlab::Json::PrecompiledJson]
def present_cached(obj_or_collection, with:, cache_context: -> (_) { current_user&.cache_key }, expires_in: DEFAULT_EXPIRY, **presenter_args)
def present_cached(obj_or_collection, with:, cache_context: -> (_) { current_user&.cache_key }, expires_in: Gitlab::Cache::Helpers::DEFAULT_EXPIRY, **presenter_args)
json =
if obj_or_collection.is_a?(Enumerable)
cached_collection(
@ -120,77 +113,6 @@ module API
def apply_default_cache_options(opts = {})
DEFAULT_CACHE_OPTIONS.merge(opts)
end
# Optionally uses a `Proc` to add context to a cache key
#
# @param object [Object] must respond to #cache_key
# @param context [Proc] a proc that will be called with the object as an argument, and which should return a
# string or array of strings to be combined into the cache key
# @return [String]
def contextual_cache_key(object, context)
return object.cache_key if context.nil?
[object.cache_key, context.call(object)].flatten.join(":")
end
# Used for fetching or rendering a single object
#
# @param object [Object] the object to render
# @param presenter [Grape::Entity]
# @param presenter_args [Hash] keyword arguments to be passed to the entity
# @param context [Proc]
# @param expires_in [ActiveSupport::Duration, Integer] an expiry time for the cache entry
# @return [String]
def cached_object(object, presenter:, presenter_args:, context:, expires_in:)
cache.fetch(contextual_cache_key(object, context), expires_in: expires_in) do
Gitlab::Json.dump(presenter.represent(object, **presenter_args).as_json)
end
end
# Used for fetching or rendering multiple objects
#
# @param objects [Enumerable<Object>] the objects to render
# @param presenter [Grape::Entity]
# @param presenter_args [Hash] keyword arguments to be passed to the entity
# @param context [Proc]
# @param expires_in [ActiveSupport::Duration, Integer] an expiry time for the cache entry
# @return [Array<String>]
def cached_collection(collection, presenter:, presenter_args:, context:, expires_in:)
json = fetch_multi(collection, context: context, expires_in: expires_in) do |obj|
Gitlab::Json.dump(presenter.represent(obj, **presenter_args).as_json)
end
json.values
end
# An adapted version of ActiveSupport::Cache::Store#fetch_multi.
#
# The original method only provides the missing key to the block,
# not the missing object, so we have to create a map of cache keys
# to the objects to allow us to pass the object to the missing value
# block.
#
# The result is that this is functionally identical to `#fetch`.
def fetch_multi(*objs, context:, **kwargs)
objs.flatten!
map = multi_key_map(objs, context: context)
# TODO: `contextual_cache_key` should be constructed based on the guideline https://docs.gitlab.com/ee/development/redis.html#multi-key-commands.
Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
cache.fetch_multi(*map.keys, **kwargs) do |key|
yield map[key]
end
end
end
# @param objects [Enumerable<Object>] objects which _must_ respond to `#cache_key`
# @param context [Proc] a proc that can be called to help generate each cache key
# @return [Hash]
def multi_key_map(objects, context:)
objects.index_by do |object|
contextual_cache_key(object, context)
end
end
end
end
end

View file

@ -7,6 +7,8 @@ module BulkImports
include Gitlab::ImportExport::CommandLineUtil
include Gitlab::Utils::StrongMemoize
FILE_SIZE_LIMIT = 5.gigabytes
ALLOWED_CONTENT_TYPES = %w(application/gzip application/octet-stream).freeze
EXPORT_DOWNLOAD_URL_PATH = "/%{resource}/%{full_path}/export_relations/download?relation=%{relation}"
def initialize(relation:)
@ -39,7 +41,9 @@ module BulkImports
configuration: context.configuration,
relative_url: relative_resource_url(context),
dir: tmp_dir,
filename: filename
filename: filename,
file_size_limit: FILE_SIZE_LIMIT,
allowed_content_types: ALLOWED_CONTENT_TYPES
)
end

View file

@ -0,0 +1,53 @@
# frozen_string_literal: true
module BulkImports
module Groups
module Pipelines
class GroupAvatarPipeline
include Pipeline
ALLOWED_AVATAR_DOWNLOAD_TYPES = (AvatarUploader::MIME_WHITELIST + %w(application/octet-stream)).freeze
GroupAvatarLoadingError = Class.new(StandardError)
def extract(context)
context.extra[:tmpdir] = Dir.mktmpdir
filepath = BulkImports::FileDownloadService.new(
configuration: context.configuration,
relative_url: "/groups/#{context.entity.encoded_source_full_path}/avatar",
dir: context.extra[:tmpdir],
file_size_limit: Avatarable::MAXIMUM_FILE_SIZE,
allowed_content_types: ALLOWED_AVATAR_DOWNLOAD_TYPES
).execute
BulkImports::Pipeline::ExtractedData.new(data: { filepath: filepath })
end
def transform(_, data)
data
end
def load(context, data)
return if data.blank?
File.open(data[:filepath]) do |avatar|
service = ::Groups::UpdateService.new(
portable,
current_user,
avatar: avatar
)
unless service.execute
raise GroupAvatarLoadingError, portable.errors.full_messages.first
end
end
end
def after_run(context, _)
FileUtils.remove_entry(context.extra[:tmpdir]) if context.extra[:tmpdir].present?
end
end
end
end
end

View file

@ -9,6 +9,10 @@ module BulkImports
pipeline: BulkImports::Groups::Pipelines::GroupPipeline,
stage: 0
},
avatar: {
pipeline: BulkImports::Groups::Pipelines::GroupAvatarPipeline,
stage: 1
},
subgroups: {
pipeline: BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline,
stage: 1

111
lib/gitlab/cache/helpers.rb vendored Normal file
View file

@ -0,0 +1,111 @@
# frozen_string_literal: true
module Gitlab
module Cache
module Helpers
# @return [ActiveSupport::Duration]
DEFAULT_EXPIRY = 1.day
# @return [ActiveSupport::Cache::Store]
def cache
Rails.cache
end
def render_cached(obj_or_collection, with:, cache_context: -> (_) { current_user&.cache_key }, expires_in: Gitlab::Cache::Helpers::DEFAULT_EXPIRY, **presenter_args)
json =
if obj_or_collection.is_a?(Enumerable)
cached_collection(
obj_or_collection,
presenter: with,
presenter_args: presenter_args,
context: cache_context,
expires_in: expires_in
)
else
cached_object(
obj_or_collection,
presenter: with,
presenter_args: presenter_args,
context: cache_context,
expires_in: expires_in
)
end
render Gitlab::Json::PrecompiledJson.new(json)
end
private
# Optionally uses a `Proc` to add context to a cache key
#
# @param object [Object] must respond to #cache_key
# @param context [Proc] a proc that will be called with the object as an argument, and which should return a
# string or array of strings to be combined into the cache key
# @return [String]
def contextual_cache_key(presenter, object, context)
return object.cache_key if context.nil?
[presenter.class.name, object.cache_key, context.call(object)].flatten.join(":")
end
# Used for fetching or rendering a single object
#
# @param object [Object] the object to render
# @param presenter [Grape::Entity]
# @param presenter_args [Hash] keyword arguments to be passed to the entity
# @param context [Proc]
# @param expires_in [ActiveSupport::Duration, Integer] an expiry time for the cache entry
# @return [String]
def cached_object(object, presenter:, presenter_args:, context:, expires_in:)
cache.fetch(contextual_cache_key(presenter, object, context), expires_in: expires_in) do
Gitlab::Json.dump(presenter.represent(object, **presenter_args).as_json)
end
end
# Used for fetching or rendering multiple objects
#
# @param objects [Enumerable<Object>] the objects to render
# @param presenter [Grape::Entity]
# @param presenter_args [Hash] keyword arguments to be passed to the entity
# @param context [Proc]
# @param expires_in [ActiveSupport::Duration, Integer] an expiry time for the cache entry
# @return [Array<String>]
def cached_collection(collection, presenter:, presenter_args:, context:, expires_in:)
json = fetch_multi(presenter, collection, context: context, expires_in: expires_in) do |obj|
Gitlab::Json.dump(presenter.represent(obj, **presenter_args).as_json)
end
json.values
end
# An adapted version of ActiveSupport::Cache::Store#fetch_multi.
#
# The original method only provides the missing key to the block,
# not the missing object, so we have to create a map of cache keys
# to the objects to allow us to pass the object to the missing value
# block.
#
# The result is that this is functionally identical to `#fetch`.
def fetch_multi(presenter, *objs, context:, **kwargs)
objs.flatten!
map = multi_key_map(presenter, objs, context: context)
# TODO: `contextual_cache_key` should be constructed based on the guideline https://docs.gitlab.com/ee/development/redis.html#multi-key-commands.
Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
cache.fetch_multi(*map.keys, **kwargs) do |key|
yield map[key]
end
end
end
# @param objects [Enumerable<Object>] objects which _must_ respond to `#cache_key`
# @param context [Proc] a proc that can be called to help generate each cache key
# @return [Hash]
def multi_key_map(presenter, objects, context:)
objects.index_by do |object|
contextual_cache_key(presenter, object, context)
end
end
end
end
end

View file

@ -10,10 +10,6 @@ module Gitlab
::Feature.enabled?(:ci_artifacts_exclude, default_enabled: true)
end
def self.pipeline_latest?
::Feature.enabled?(:ci_pipeline_latest, default_enabled: true)
end
# NOTE: The feature flag `disallow_to_create_merge_request_pipelines_in_target_project`
# is a safe switch to disable the feature for a particular project when something went wrong,
# therefore it's not supposed to be enabled by default.

View file

@ -228,6 +228,14 @@ module Gitlab
raise UnsupportedFormatError
end
def render_in(_view_context)
to_s
end
def format
:json
end
end
class LimitedEncoder

View file

@ -19,7 +19,12 @@ cp -r public/assets assets_container.build/public/
cp Dockerfile.assets assets_container.build/
COMMIT_REF_SLUG_DESTINATION=${ASSETS_IMAGE_PATH}:${CI_COMMIT_REF_SLUG}
COMMIT_SHA_DESTINATION=${ASSETS_IMAGE_PATH}:${CI_COMMIT_SHA}
# Use CI_MERGE_REQUEST_SOURCE_BRANCH_SHA (MR HEAD commit) so that the image is in sync with Omnibus/CNG images.
# Background: Due to the fact that we cannot retrieve the Merged Commit in the downstream omnibus/CNG pipelines,
# we're building the Omnibus/CNG images for the MR HEAD commit.
# In turn, the assets image also needs to be built from the MR HEAD commit, so that everything is build from the same commit.
# For non-MR commits, we fallback to $CI_COMMIT_SHA.
COMMIT_SHA_DESTINATION=${ASSETS_IMAGE_PATH}:${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA:-$CI_COMMIT_SHA}
COMMIT_REF_NAME_DESTINATION=${ASSETS_IMAGE_PATH}:${CI_COMMIT_REF_NAME}
DESTINATIONS="--destination=$COMMIT_REF_SLUG_DESTINATION --destination=$COMMIT_SHA_DESTINATION"

View file

@ -166,8 +166,9 @@ function label_namespace() {
local label="${2}"
echoinfo "Labeling the ${namespace} namespace with ${label}" true
echoinfo "We should pass the --overwrite option!"
kubectl label namespace "${namespace}" "${label}"
kubectl label --overwrite namespace "${namespace}" "${label}"
}
function create_application_secret() {

View file

@ -135,11 +135,11 @@ module Trigger
end
def extra_variables
# Use CI_MERGE_REQUEST_SOURCE_BRANCH_SHA for omnibus checkouts due to pipeline for merged results
# and fallback to CI_COMMIT_SHA for the non-MR pipelines.
# Use CI_MERGE_REQUEST_SOURCE_BRANCH_SHA (MR HEAD commit) so that the image is in sync with the assets and QA images.
# See https://docs.gitlab.com/ee/development/testing_guide/end_to_end/index.html#with-pipeline-for-merged-results.
# We also set IMAGE_TAG so the GitLab Docker image is tagged with that SHA.
source_sha = Trigger.non_empty_variable_value('CI_MERGE_REQUEST_SOURCE_BRANCH_SHA') || ENV['CI_COMMIT_SHA']
{
'GITLAB_VERSION' => source_sha,
'IMAGE_TAG' => source_sha,
@ -177,12 +177,14 @@ module Trigger
def extra_variables
edition = Trigger.ee? ? 'EE' : 'CE'
# Use CI_MERGE_REQUEST_SOURCE_BRANCH_SHA (MR HEAD commit) so that the image is in sync with the assets and QA images.
source_sha = Trigger.non_empty_variable_value('CI_MERGE_REQUEST_SOURCE_BRANCH_SHA') || ENV['CI_COMMIT_SHA']
{
"ee" => Trigger.ee? ? "true" : "false",
"GITLAB_VERSION" => ENV['CI_COMMIT_SHA'],
"GITLAB_VERSION" => source_sha,
"GITLAB_TAG" => ENV['CI_COMMIT_TAG'],
"GITLAB_ASSETS_TAG" => ENV['CI_COMMIT_TAG'] ? ENV['CI_COMMIT_REF_NAME'] : ENV['CI_COMMIT_SHA'],
"GITLAB_ASSETS_TAG" => ENV['CI_COMMIT_TAG'] ? ENV['CI_COMMIT_REF_NAME'] : source_sha,
"FORCE_RAILS_IMAGE_BUILDS" => 'true',
"#{edition}_PIPELINE" => 'true'
}

View file

@ -44,108 +44,16 @@ RSpec.describe API::Helpers::Caching, :use_clean_rails_redis_caching do
}
end
context "single object" do
context 'single object' do
let_it_be(:presentable) { create(:todo, project: project) }
it { is_expected.to be_a(Gitlab::Json::PrecompiledJson) }
it "uses the presenter" do
expect(presenter).to receive(:represent).with(presentable, project: project)
subject
end
it "is valid JSON" do
parsed = Gitlab::Json.parse(subject.to_s)
expect(parsed).to be_a(Hash)
expect(parsed["id"]).to eq(presentable.id)
end
it "fetches from the cache" do
expect(instance.cache).to receive(:fetch).with("#{presentable.cache_key}:#{user.cache_key}", expires_in: described_class::DEFAULT_EXPIRY).once
subject
end
context "when a cache context is supplied" do
before do
kwargs[:cache_context] = -> (todo) { todo.project.cache_key }
end
it "uses the context to augment the cache key" do
expect(instance.cache).to receive(:fetch).with("#{presentable.cache_key}:#{project.cache_key}", expires_in: described_class::DEFAULT_EXPIRY).once
subject
end
end
context "when expires_in is supplied" do
it "sets the expiry when accessing the cache" do
kwargs[:expires_in] = 7.days
expect(instance.cache).to receive(:fetch).with("#{presentable.cache_key}:#{user.cache_key}", expires_in: 7.days).once
subject
end
end
it_behaves_like 'object cache helper'
end
context "for a collection of objects" do
context 'collection of objects' do
let_it_be(:presentable) { Array.new(5).map { create(:todo, project: project) } }
it { is_expected.to be_an(Gitlab::Json::PrecompiledJson) }
it "uses the presenter" do
presentable.each do |todo|
expect(presenter).to receive(:represent).with(todo, project: project)
end
subject
end
it "is valid JSON" do
parsed = Gitlab::Json.parse(subject.to_s)
expect(parsed).to be_an(Array)
presentable.each_with_index do |todo, i|
expect(parsed[i]["id"]).to eq(todo.id)
end
end
it "fetches from the cache" do
keys = presentable.map { |todo| "#{todo.cache_key}:#{user.cache_key}" }
expect(instance.cache).to receive(:fetch_multi).with(*keys, expires_in: described_class::DEFAULT_EXPIRY).once.and_call_original
subject
end
context "when a cache context is supplied" do
before do
kwargs[:cache_context] = -> (todo) { todo.project.cache_key }
end
it "uses the context to augment the cache key" do
keys = presentable.map { |todo| "#{todo.cache_key}:#{project.cache_key}" }
expect(instance.cache).to receive(:fetch_multi).with(*keys, expires_in: described_class::DEFAULT_EXPIRY).once.and_call_original
subject
end
end
context "expires_in is supplied" do
it "sets the expiry when accessing the cache" do
keys = presentable.map { |todo| "#{todo.cache_key}:#{user.cache_key}" }
kwargs[:expires_in] = 7.days
expect(instance.cache).to receive(:fetch_multi).with(*keys, expires_in: 7.days).once.and_call_original
subject
end
end
it_behaves_like 'collection cache helper'
end
end

View file

@ -0,0 +1,69 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::GroupAvatarPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:entity) do
create(
:bulk_import_entity,
group: group,
bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: 'My Destination Group',
destination_namespace: group.full_path
)
end
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
subject { described_class.new(context) }
describe '#extract' do
it 'downloads the group avatar' do
expect_next_instance_of(
BulkImports::FileDownloadService,
configuration: context.configuration,
relative_url: "/groups/source%2Ffull%2Fpath/avatar",
dir: an_instance_of(String),
file_size_limit: Avatarable::MAXIMUM_FILE_SIZE,
allowed_content_types: described_class::ALLOWED_AVATAR_DOWNLOAD_TYPES
) do |downloader|
expect(downloader).to receive(:execute)
end
subject.run
end
end
describe '#transform' do
it 'returns the given data' do
expect(subject.transform(nil, :value)).to eq(:value)
end
end
describe '#load' do
it 'updates the group avatar' do
avatar_path = 'spec/fixtures/dk.png'
data = { filepath: fixture_file_upload(avatar_path) }
expect { subject.load(context, data) }.to change(group, :avatar)
expect(FileUtils.identical?(avatar_path, group.avatar.file.file)).to eq(true)
end
it 'raises an error when the avatar upload fails' do
avatar_path = 'spec/fixtures/aosp_manifest.xml'
data = { filepath: fixture_file_upload(avatar_path) }
expect { subject.load(context, data) }.to raise_error(
described_class::GroupAvatarLoadingError,
"Avatar file format is not supported. Please try one of the following supported formats: image/png, image/jpeg, image/gif, image/bmp, image/tiff, image/vnd.microsoft.icon"
)
end
end
end

View file

@ -6,6 +6,7 @@ RSpec.describe BulkImports::Stage do
let(:pipelines) do
[
[0, BulkImports::Groups::Pipelines::GroupPipeline],
[1, BulkImports::Groups::Pipelines::GroupAvatarPipeline],
[1, BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline],
[1, BulkImports::Groups::Pipelines::MembersPipeline],
[1, BulkImports::Groups::Pipelines::LabelsPipeline],

49
spec/lib/gitlab/cache/helpers_spec.rb vendored Normal file
View file

@ -0,0 +1,49 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Cache::Helpers, :use_clean_rails_redis_caching do
subject(:instance) { Class.new.include(described_class).new }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:presenter) { MergeRequestSerializer.new(current_user: user, project: project) }
before do
# We have to stub #render as it's a Rails controller method unavailable in
# the module by itself
allow(instance).to receive(:render) { |data| data }
allow(instance).to receive(:current_user) { user }
end
describe "#render_cached" do
subject do
instance.render_cached(presentable, **kwargs)
end
let(:kwargs) do
{
with: presenter,
project: project
}
end
context 'single object' do
let_it_be(:presentable) { create(:merge_request, source_project: project, source_branch: 'wip') }
it_behaves_like 'object cache helper'
end
context 'collection of objects' do
let_it_be(:presentable) do
[
create(:merge_request, source_project: project, source_branch: 'fix'),
create(:merge_request, source_project: project, source_branch: 'master')
]
end
it_behaves_like 'collection cache helper'
end
end
end

View file

@ -4,26 +4,41 @@ require 'spec_helper'
RSpec.describe BulkImports::FileDownloadService do
describe '#execute' do
let_it_be(:allowed_content_types) { %w(application/gzip application/octet-stream) }
let_it_be(:file_size_limit) { 5.gigabytes }
let_it_be(:config) { build(:bulk_import_configuration) }
let_it_be(:content_type) { 'application/octet-stream' }
let_it_be(:content_disposition) { nil }
let_it_be(:filename) { 'file_download_service_spec' }
let_it_be(:tmpdir) { Dir.tmpdir }
let_it_be(:filepath) { File.join(tmpdir, filename) }
let_it_be(:content_length) { 1000 }
let(:chunk_double) { double('chunk', size: 100, code: 200) }
let(:chunk_double) { double('chunk', size: 1000, code: 200) }
let(:response_double) do
double(
code: 200,
success?: true,
parsed_response: {},
headers: {
'content-length' => 100,
'content-type' => content_type
'content-length' => content_length,
'content-type' => content_type,
'content-disposition' => content_disposition
}
)
end
subject { described_class.new(configuration: config, relative_url: '/test', dir: tmpdir, filename: filename) }
subject do
described_class.new(
configuration: config,
relative_url: '/test',
dir: tmpdir,
filename: filename,
file_size_limit: file_size_limit,
allowed_content_types: allowed_content_types
)
end
before do
allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
@ -54,7 +69,14 @@ RSpec.describe BulkImports::FileDownloadService do
stub_application_setting(allow_local_requests_from_web_hooks_and_services: false)
double = instance_double(BulkImports::Configuration, url: 'https://localhost', access_token: 'token')
service = described_class.new(configuration: double, relative_url: '/test', dir: tmpdir, filename: filename)
service = described_class.new(
configuration: double,
relative_url: '/test',
dir: tmpdir,
filename: filename,
file_size_limit: file_size_limit,
allowed_content_types: allowed_content_types
)
expect { service.execute }.to raise_error(Gitlab::UrlBlocker::BlockedUrlError)
end
@ -70,31 +92,46 @@ RSpec.describe BulkImports::FileDownloadService do
context 'when content-length is not valid' do
context 'when content-length exceeds limit' do
before do
stub_const("#{described_class}::FILE_SIZE_LIMIT", 1)
end
let(:file_size_limit) { 1 }
it 'raises an error' do
expect { subject.execute }.to raise_error(described_class::ServiceError, 'Invalid content length')
expect { subject.execute }.to raise_error(
described_class::ServiceError,
'File size 1000 Bytes exceeds limit of 1 Byte'
)
end
end
context 'when content-length is missing' do
let(:response_double) { double(success?: true, headers: { 'content-type' => content_type }) }
let(:content_length) { nil }
it 'raises an error' do
expect { subject.execute }.to raise_error(described_class::ServiceError, 'Invalid content length')
expect { subject.execute }.to raise_error(
described_class::ServiceError,
'Missing content-length header'
)
end
end
end
context 'when partially downloaded file exceeds limit' do
before do
stub_const("#{described_class}::FILE_SIZE_LIMIT", 150)
context 'when content-length is equals the file size limit' do
let(:content_length) { 150 }
let(:file_size_limit) { 150 }
it 'does not raise an error' do
expect { subject.execute }.not_to raise_error
end
end
context 'when partially downloaded file exceeds limit' do
let(:content_length) { 151 }
let(:file_size_limit) { 150 }
it 'raises an error' do
expect { subject.execute }.to raise_error(described_class::ServiceError, 'Invalid downloaded file')
expect { subject.execute }.to raise_error(
described_class::ServiceError,
'File size 151 Bytes exceeds limit of 150 Bytes'
)
end
end
@ -102,7 +139,10 @@ RSpec.describe BulkImports::FileDownloadService do
let(:chunk_double) { double('chunk', size: 1000, code: 307) }
it 'raises an error' do
expect { subject.execute }.to raise_error(described_class::ServiceError, 'File download error 307')
expect { subject.execute }.to raise_error(
described_class::ServiceError,
'File download error 307'
)
end
end
@ -110,23 +150,88 @@ RSpec.describe BulkImports::FileDownloadService do
let_it_be(:symlink) { File.join(tmpdir, 'symlink') }
before do
FileUtils.ln_s(File.join(tmpdir, filename), symlink)
FileUtils.ln_s(File.join(tmpdir, filename), symlink, force: true)
end
subject { described_class.new(configuration: config, relative_url: '/test', dir: tmpdir, filename: 'symlink') }
subject do
described_class.new(
configuration: config,
relative_url: '/test',
dir: tmpdir,
filename: 'symlink',
file_size_limit: file_size_limit,
allowed_content_types: allowed_content_types
)
end
it 'raises an error and removes the file' do
expect { subject.execute }.to raise_error(described_class::ServiceError, 'Invalid downloaded file')
expect { subject.execute }.to raise_error(
described_class::ServiceError,
'Invalid downloaded file'
)
expect(File.exist?(symlink)).to eq(false)
end
end
context 'when dir is not in tmpdir' do
subject { described_class.new(configuration: config, relative_url: '/test', dir: '/etc', filename: filename) }
subject do
described_class.new(
configuration: config,
relative_url: '/test',
dir: '/etc',
filename: filename,
file_size_limit: file_size_limit,
allowed_content_types: allowed_content_types
)
end
it 'raises an error' do
expect { subject.execute }.to raise_error(described_class::ServiceError, 'Invalid target directory')
expect { subject.execute }.to raise_error(
described_class::ServiceError,
'Invalid target directory'
)
end
end
context 'when using the remote filename' do
let_it_be(:filename) { nil }
context 'when no filename is given' do
it 'raises an error when the filename is not provided in the request header' do
expect { subject.execute }.to raise_error(
described_class::ServiceError,
'Remote filename not provided in content-disposition header'
)
end
end
context 'with a given filename' do
let_it_be(:content_disposition) { 'filename="avatar.png"' }
it 'uses the given filename' do
expect(subject.execute).to eq(File.join(tmpdir, "avatar.png"))
end
end
context 'when the filename is a path' do
let_it_be(:content_disposition) { 'filename="../../avatar.png"' }
it 'raises an error when the filename is not provided in the request header' do
expect(subject.execute).to eq(File.join(tmpdir, "avatar.png"))
end
end
context 'when the filename is longer the the limit' do
let_it_be(:content_disposition) { 'filename="../../xxx.b"' }
before do
stub_const("#{described_class}::FILENAME_SIZE_LIMIT", 1)
end
it 'raises an error when the filename is not provided in the request header' do
expect(subject.execute).to eq(File.join(tmpdir, "x.b"))
end
end
end
end

View file

@ -0,0 +1,101 @@
# frozen_string_literal: true
RSpec.shared_examples_for 'object cache helper' do
it { is_expected.to be_a(Gitlab::Json::PrecompiledJson) }
it "uses the presenter" do
expect(presenter).to receive(:represent).with(presentable, project: project)
subject
end
it "is valid JSON" do
parsed = Gitlab::Json.parse(subject.to_s)
expect(parsed).to be_a(Hash)
expect(parsed["id"]).to eq(presentable.id)
end
it "fetches from the cache" do
expect(instance.cache).to receive(:fetch).with("#{presenter.class.name}:#{presentable.cache_key}:#{user.cache_key}", expires_in: described_class::DEFAULT_EXPIRY).once
subject
end
context "when a cache context is supplied" do
before do
kwargs[:cache_context] = -> (item) { item.project.cache_key }
end
it "uses the context to augment the cache key" do
expect(instance.cache).to receive(:fetch).with("#{presenter.class.name}:#{presentable.cache_key}:#{project.cache_key}", expires_in: described_class::DEFAULT_EXPIRY).once
subject
end
end
context "when expires_in is supplied" do
it "sets the expiry when accessing the cache" do
kwargs[:expires_in] = 7.days
expect(instance.cache).to receive(:fetch).with("#{presenter.class.name}:#{presentable.cache_key}:#{user.cache_key}", expires_in: 7.days).once
subject
end
end
end
RSpec.shared_examples_for 'collection cache helper' do
it { is_expected.to be_an(Gitlab::Json::PrecompiledJson) }
it "uses the presenter" do
presentable.each do |item|
expect(presenter).to receive(:represent).with(item, project: project)
end
subject
end
it "is valid JSON" do
parsed = Gitlab::Json.parse(subject.to_s)
expect(parsed).to be_an(Array)
presentable.each_with_index do |item, i|
expect(parsed[i]["id"]).to eq(item.id)
end
end
it "fetches from the cache" do
keys = presentable.map { |item| "#{presenter.class.name}:#{item.cache_key}:#{user.cache_key}" }
expect(instance.cache).to receive(:fetch_multi).with(*keys, expires_in: described_class::DEFAULT_EXPIRY).once.and_call_original
subject
end
context "when a cache context is supplied" do
before do
kwargs[:cache_context] = -> (item) { item.project.cache_key }
end
it "uses the context to augment the cache key" do
keys = presentable.map { |item| "#{presenter.class.name}:#{item.cache_key}:#{project.cache_key}" }
expect(instance.cache).to receive(:fetch_multi).with(*keys, expires_in: described_class::DEFAULT_EXPIRY).once.and_call_original
subject
end
end
context "expires_in is supplied" do
it "sets the expiry when accessing the cache" do
keys = presentable.map { |item| "#{presenter.class.name}:#{item.cache_key}:#{user.cache_key}" }
kwargs[:expires_in] = 7.days
expect(instance.cache).to receive(:fetch_multi).with(*keys, expires_in: 7.days).once.and_call_original
subject
end
end
end