Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-10-13 12:10:13 +00:00
parent 61ca90e0b4
commit 70ce746bd0
38 changed files with 1055 additions and 668 deletions

View File

@ -51,3 +51,15 @@ cache-assets:production:
variables:
NODE_ENV: "production"
RAILS_ENV: "production"
packages-cleanup:
extends:
- .default-retry
- .caching:rules:packages-cleanup
image: ${GITLAB_DEPENDENCY_PROXY}ruby:${RUBY_VERSION}
stage: prepare
before_script:
- source scripts/utils.sh
- install_gitlab_gem
script:
- scripts/packages/automated_cleanup.rb

View File

@ -17,7 +17,7 @@ review-cleanup:
script:
- delete_release
- delete_namespace
- ruby -rrubygems scripts/review_apps/automated_cleanup.rb
- scripts/review_apps/automated_cleanup.rb
- gcp_cleanup
start-review-app-pipeline:

View File

@ -673,7 +673,9 @@
#################
.caching:rules:cache-workhorse:
rules:
- <<: *if-dot-com-ee-schedule
# That would run for any project that has a "maintenance" pipeline schedule
# but in fact, the cache package is only uploaded for gitlab.com/gitlab-org/gitlab and jihulab.com/gitlab-cn/gitlab
- <<: *if-default-branch-schedule-maintenance
- <<: *if-dot-com-gitlab-org-default-branch
changes: ["workhorse/**/*"]
- <<: *if-dot-com-gitlab-org-merge-request
@ -688,7 +690,9 @@
# The new strategy to cache assets as generic packages is experimental and can be disabled by removing the `CACHE_ASSETS_AS_PACKAGE` variable
- if: '$CACHE_ASSETS_AS_PACKAGE != "true"'
when: never
- <<: *if-dot-com-ee-schedule
# That would run for any project that has a "maintenance" pipeline schedule
# but in fact, the cache package is only uploaded for gitlab.com/gitlab-org/gitlab and jihulab.com/gitlab-cn/gitlab
- <<: *if-default-branch-schedule-maintenance
- <<: *if-dot-com-gitlab-org-default-branch
changes: *assets-compilation-patterns
- <<: *if-dot-com-gitlab-org-merge-request
@ -699,6 +703,21 @@
when: manual
allow_failure: true
.caching:rules:packages-cleanup:
rules:
# The new strategy to cache assets as generic packages is experimental and can be disabled by removing the `CACHE_ASSETS_AS_PACKAGE` variable
- if: '$CACHE_ASSETS_AS_PACKAGE != "true"'
when: never
# That would run for any project that has a "maintenance" pipeline schedule
# but in fact, the cache package is only uploaded for gitlab.com/gitlab-org/gitlab and jihulab.com/gitlab-cn/gitlab
- <<: *if-default-branch-schedule-maintenance
- <<: *if-dot-com-gitlab-org-merge-request
changes:
- ".gitlab/ci/caching.gitlab-ci.yml"
- "scripts/packages/automated_cleanup.rb"
when: manual
allow_failure: true
######################
# CI Templates Rules #
######################

View File

@ -86,6 +86,7 @@ export default {
<template>
<gl-intersection-observer
class="gl-relative gl-top-2"
@appear="setStickyHeaderVisible(false)"
@disappear="setStickyHeaderVisible(true)"
>

View File

@ -471,8 +471,24 @@ module ProjectsHelper
}
end
def localized_project_human_access(access)
localized_access_names[access] || Gitlab::Access.human_access(access)
end
private
def localized_access_names
{
Gitlab::Access::NO_ACCESS => _('No access'),
Gitlab::Access::MINIMAL_ACCESS => _("Minimal Access"),
Gitlab::Access::GUEST => _('Guest'),
Gitlab::Access::REPORTER => _('Reporter'),
Gitlab::Access::DEVELOPER => _('Developer'),
Gitlab::Access::MAINTAINER => _('Maintainer'),
Gitlab::Access::OWNER => _('Owner')
}
end
def configure_oauth_import_message(provider, help_url)
str = if current_user.admin?
'ImportProjects|To enable importing projects from %{provider}, as administrator you need to configure %{link_start}OAuth integration%{link_end}'

View File

@ -130,6 +130,10 @@ class Namespace < ApplicationRecord
to: :namespace_settings, allow_nil: true
delegate :show_diff_preview_in_email, :show_diff_preview_in_email?, :show_diff_preview_in_email=,
to: :namespace_settings
delegate :maven_package_requests_forwarding,
:pypi_package_requests_forwarding,
:npm_package_requests_forwarding,
to: :package_settings
after_save :reload_namespace_details

View File

@ -1,9 +1,15 @@
# frozen_string_literal: true
class Namespace::PackageSetting < ApplicationRecord
include CascadingNamespaceSettingAttribute
self.primary_key = :namespace_id
self.table_name = 'namespace_package_settings'
cascading_attr :maven_package_requests_forwarding
cascading_attr :npm_package_requests_forwarding
cascading_attr :pypi_package_requests_forwarding
PackageSettingNotImplemented = Class.new(StandardError)
PACKAGES_WITH_SETTINGS = %w[maven generic].freeze

View File

@ -493,6 +493,11 @@ class Project < ApplicationRecord
delegate :log_jira_dvcs_integration_usage, :jira_dvcs_server_last_sync_at, :jira_dvcs_cloud_last_sync_at, to: :feature_usage
delegate :maven_package_requests_forwarding,
:pypi_package_requests_forwarding,
:npm_package_requests_forwarding,
to: :namespace
# Validations
validates :creator, presence: true, on: :create
validates :description, length: { maximum: 2000 }, allow_blank: true

View File

@ -40,7 +40,7 @@ module Ci
key, value = scan_line!(line)
variables[key] = Ci::JobVariable.new(job_id: artifact.job_id,
source: :dotenv, key: key, value: value)
source: :dotenv, key: key, value: value, raw: false)
end
end

View File

@ -179,9 +179,12 @@ module MergeRequests
old_title_draft = MergeRequest.draft?(old_title)
new_title_draft = MergeRequest.draft?(new_title)
# notify the draft status changed. Added/removed message is handled in the
# email template itself, see `change_in_merge_request_draft_status_email` template.
notify_draft_status_changed(merge_request) if old_title_draft || new_title_draft
if old_title_draft || new_title_draft
# notify the draft status changed. Added/removed message is handled in the
# email template itself, see `change_in_merge_request_draft_status_email` template.
notify_draft_status_changed(merge_request)
trigger_merge_request_status_updated(merge_request)
end
if !old_title_draft && new_title_draft
# Marked as Draft
@ -320,6 +323,10 @@ module MergeRequests
def filter_sentinel_values(param)
param.reject { _1 == 0 }
end
def trigger_merge_request_status_updated(merge_request)
GraphqlTriggers.merge_request_merge_status_updated(merge_request)
end
end
end

View File

@ -52,7 +52,7 @@
-# haml-lint:disable UnnecessaryStringOutput
= ' ' # prevent haml from eating the space between elements
.metadata-info.gl-mt-3
%span.user-access-role.d-block{ data: { qa_selector: 'user_role_content' } }= Gitlab::Access.human_access(access)
%span.user-access-role.gl-display-block{ data: { qa_selector: 'user_role_content' } }= localized_project_human_access(access)
- if !explore_projects_tab?
.metadata-info.gl-mt-3

View File

@ -0,0 +1,8 @@
---
name: cascade_package_forwarding_settings
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/99285
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/375761
milestone: '15.5'
type: development
group: group::package
default_enabled: false

View File

@ -0,0 +1,12 @@
# frozen_string_literal: true
class RedoRemoveAndAddCiJobVariablesRawWithNewDefault < Gitlab::Database::Migration[2.0]
enable_lock_retries!
# We are removing and adding the same column with the same parameters to refresh the table
# because we have some wrong `ci_job_variables.raw` data (`TRUE`) in the database.
def change
remove_column :ci_job_variables, :raw, :boolean, null: false, default: false
add_column :ci_job_variables, :raw, :boolean, null: false, default: false
end
end

View File

@ -0,0 +1 @@
70f78c3af8d43ea71a795d0cd28bf74aa8a5232ea5e5b9dcb59699b5f8cccfaa

View File

@ -505,7 +505,7 @@ Parameters:
| `bio` | No | User's biography |
| `can_create_group` | No | User can create groups - true or false |
| `color_scheme_id` | No | User's color scheme for the file viewer (see [the user preference docs](../user/profile/preferences.md#syntax-highlighting-theme) for more information) |
| `commit_email` | No | User's commit email, `_private` to use the private commit email. |
| `commit_email` | No | User's commit email. Set to `_private` to use the private commit email. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/375148) in GitLab 15.5. |
| `email` | No | Email |
| `extern_uid` | No | External UID |
| `external` | No | Flags the user as external - true or false (default) |

View File

@ -11,20 +11,78 @@ info: To determine the technical writer assigned to the Stage/Group associated w
> - [Moved](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/29280) from **Settings > CI/CD** to **Settings > Repository** in GitLab 12.10.1.
> - [Added](https://gitlab.com/gitlab-org/gitlab/-/issues/213566) package registry scopes in GitLab 13.0.
Deploy tokens allow you to download (`git clone`) or push and pull packages and
container registry images of a project without having a user and a password.
You can use a deploy token to enable authentication of deployment tasks, independent of a user
account. In most cases you use a deploy token from an external host, like a build server or CI/CD
server.
Deploy tokens can be managed only by users with the Maintainer role.
With a deploy token, automated tasks can:
- Clone Git repositories.
- Pull from and push to a GitLab container registry.
- Pull from and push to a GitLab package registry.
A deploy token is a pair of values:
- **username**: `username` in the HTTP authentication framework. The default username format is
`gitlab+deploy-token-{n}`. You can specify a custom username when you create the deploy token.
- **token**: `password` in the HTTP authentication framework.
You can use a deploy token for [HTTP authentication](https://developer.mozilla.org/en-US/docs/Web/HTTP/Authentication)
to the following endpoints:
- GitLab Package Registry public API.
- [Git commands](https://git-scm.com/docs/gitcredentials#_description).
You can create deploy tokens at either the project or group level:
- **Project deploy token**: Permissions apply only to the project.
- **Group deploy token**: Permissions apply to all projects in the group.
By default, a deploy token does not expire. You can optionally set an expiry date when you create
it. Expiry occurs at midnight UTC on that date.
## Scope
A deploy token's scope determines the actions it can perform.
| Scope | Description |
|--------------------------|--------------------------------------------------------------------------------------------------------------|
| `read_repository` | Read-only access to the repository using `git clone`. |
| `read_registry` | Read-only access to the images in the project's [container registry](../../packages/container_registry/index.md). |
| `write_registry` | Write access (push) to the project's [container registry](../../packages/container_registry/index.md). |
| `read_package_registry` | Read-only access to the project's package registry. |
| `write_package_registry` | Write access to the project's package registry. |
## GitLab deploy token
> - Support for `gitlab-deploy-token` at the group level [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/214014) in GitLab 15.1 [with a flag](../../../administration/feature_flags.md) named `ci_variable_for_group_gitlab_deploy_token`. Enabled by default.
> - [Feature flag `ci_variable_for_group_gitlab_deploy_token`](https://gitlab.com/gitlab-org/gitlab/-/issues/363621) removed in GitLab 15.4.
A GitLab deploy token is a special type of deploy token. If you create a deploy token named
`gitlab-deploy-token`, the deploy token is automatically exposed to the CI/CD jobs as variables, for
use in a CI/CD pipeline:
- `CI_DEPLOY_USER`: Username
- `CI_DEPLOY_PASSWORD`: Token
For example, to use a GitLab token to log in to your GitLab container registry:
```shell
docker login -u $CI_DEPLOY_USER -p $CI_DEPLOY_PASSWORD $CI_REGISTRY
```
NOTE:
In GitLab 15.0 and earlier, the special handling for the `gitlab-deploy-token` deploy token does not
work for group deploy tokens. To make a group deploy token available for CI/CD jobs, set the
`CI_DEPLOY_USER` and `CI_DEPLOY_PASSWORD` CI/CD variables in **Settings > CI/CD > Variables** to the
name and token of the group deploy token.
### GitLab public API
Deploy tokens can't be used with the GitLab public API. However, you can use deploy tokens with some
endpoints, such as those from the Package Registry. For details, see
endpoints, such as those from the Package Registry. For more information, see
[Authenticate with the registry](../../packages/package_registry/index.md#authenticate-with-the-registry).
Deploy tokens are tied to the project and stay enabled even when the user who created the token is removed from the project.
If you have a key pair, you might want to use [deploy keys](../../project/deploy_keys/index.md)
instead.
## Creating a Deploy token
You can create as many deploy tokens as you need from the settings of your
@ -37,7 +95,7 @@ project. Alternatively, you can also create [group-scoped deploy tokens](#group-
1. On the left sidebar, select **Settings > Repository**.
1. Expand **Deploy tokens**.
1. Choose a name, and optionally, an expiration date and username for the token.
1. Choose the [desired scopes](#limiting-scopes-of-a-deploy-token).
1. Choose the [desired scopes](#scope).
1. Select **Create deploy token**.
Save the deploy token somewhere safe. After you leave or refresh
@ -45,10 +103,6 @@ the page, **you can't access it again**.
![Personal access tokens page](img/deploy_tokens_ui.png)
## Deploy token expiration
Deploy tokens expire at midnight UTC on the date you define.
## Revoking a deploy token
To revoke a deploy token:
@ -60,28 +114,6 @@ To revoke a deploy token:
1. Expand **Deploy tokens**.
1. In the **Active Deploy Tokens** section, by the token you want to revoke, select **Revoke**.
## Limiting scopes of a deploy token
Deploy tokens can be created with different scopes that allow various actions
that a given token can perform. The available scopes are depicted in the
following table along with GitLab version it was introduced in:
| Scope | Description | Introduced in GitLab Version |
|--------------------------|-------------|------------------------------|
| `read_repository` | Grants read-access to the repository through `git clone` | -- |
| `read_registry` | Grants read-access to [container registry](../../packages/container_registry/index.md) images if a project is private and authorization is required. | -- |
| `write_registry` | Grants write-access (push) to [container registry](../../packages/container_registry/index.md). | 12.10 |
| `read_package_registry` | Grants read access to the package registry. | 13.0 |
| `write_package_registry` | Grants write access to the package registry. | 13.0 |
## Deploy token custom username
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/29639) in GitLab 12.1.
The default username format is `gitlab+deploy-token-{n}`. Some tools or
platforms may not support this format; in this case you can specify a custom
username to be used when creating the deploy token.
## Usage
### Git clone a repository
@ -192,29 +224,6 @@ To pull images from the Dependency Proxy, you must:
1. Take note of your `username` and `token`.
1. Follow the Dependency Proxy [authentication instructions](../../packages/dependency_proxy/index.md).
### GitLab deploy token
> - Support for `gitlab-deploy-token` at the group level [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/214014) in GitLab 15.1 [with a flag](../../../administration/feature_flags.md) named `ci_variable_for_group_gitlab_deploy_token`. Enabled by default.
> - [Feature flag `ci_variable_for_group_gitlab_deploy_token`](https://gitlab.com/gitlab-org/gitlab/-/issues/363621) removed in GitLab 15.4.
There's a special case when it comes to deploy tokens. If a user creates one
named `gitlab-deploy-token`, the username and token of the deploy token is
automatically exposed to the CI/CD jobs as CI/CD variables: `CI_DEPLOY_USER`
and `CI_DEPLOY_PASSWORD`, respectively.
After you create the token, you can sign in to the Container Registry by using
those variables:
```shell
docker login -u $CI_DEPLOY_USER -p $CI_DEPLOY_PASSWORD $CI_REGISTRY
```
NOTE:
In GitLab 15.0 and earlier, the special handling for the `gitlab-deploy-token` deploy token
does not work for group deploy tokens. To make the group-level deploy token available
for CI/CD jobs, the `CI_DEPLOY_USER` and `CI_DEPLOY_PASSWORD` CI/CD variables must be
set in **Settings > CI/CD > Variables** to the name and token of the group deploy token.
## Troubleshooting
### Group deploy tokens and LFS

View File

@ -116,7 +116,12 @@ module API
redirect_request = project_or_nil.blank? || packages.empty?
redirect_registry_request(redirect_request, :npm, package_name: package_name) do
redirect_registry_request(
forward_to_registry: redirect_request,
package_type: :npm,
target: project_or_nil,
package_name: package_name
) do
authorize_read_package!(project)
not_found!('Packages') if packages.empty?

View File

@ -16,8 +16,8 @@ module API
maven: 'maven_package_requests_forwarding'
}.freeze
def redirect_registry_request(forward_to_registry, package_type, options)
if forward_to_registry && redirect_registry_request_available?(package_type) && maven_forwarding_ff_enabled?(package_type, options[:target])
def redirect_registry_request(forward_to_registry: false, package_type: nil, target: nil, **options)
if forward_to_registry && redirect_registry_request_available?(package_type, target) && maven_forwarding_ff_enabled?(package_type, target)
::Gitlab::Tracking.event(self.options[:for].name, "#{package_type}_request_forward")
redirect(registry_url(package_type, options))
else
@ -40,15 +40,19 @@ module API
end
end
def redirect_registry_request_available?(package_type)
def redirect_registry_request_available?(package_type, target)
application_setting_name = APPLICATION_SETTING_NAMES[package_type]
raise ArgumentError, "Can't find application setting for package_type #{package_type}" unless application_setting_name
::Gitlab::CurrentSettings
.current_application_settings
.attributes
.fetch(application_setting_name, false)
if target.present? && Feature.enabled?(:cascade_package_forwarding_settings, target)
target.public_send(application_setting_name) # rubocop:disable GitlabSecurity/PublicSend
else
::Gitlab::CurrentSettings
.current_application_settings
.attributes
.fetch(application_setting_name, false)
end
end
private

View File

@ -125,7 +125,13 @@ module API
no_package_found = package_file ? false : true
redirect_registry_request(no_package_found, :maven, path: params[:path], file_name: params[:file_name], target: params[:target]) do
redirect_registry_request(
forward_to_registry: no_package_found,
package_type: :maven,
target: params[:target],
path: params[:path],
file_name: params[:file_name]
) do
not_found!('Package') if no_package_found
case format

View File

@ -56,7 +56,12 @@ module API
packages = Packages::Pypi::PackagesFinder.new(current_user, group_or_project, { package_name: params[:package_name] }).execute
empty_packages = packages.empty?
redirect_registry_request(empty_packages, :pypi, package_name: params[:package_name]) do
redirect_registry_request(
forward_to_registry: empty_packages,
package_type: :pypi,
target: group_or_project,
package_name: params[:package_name]
) do
not_found!('Package') if empty_packages
presenter = ::Packages::Pypi::SimplePackageVersionsPresenter.new(packages, group_or_project)

View File

@ -13671,6 +13671,9 @@ msgstr ""
msgid "DevOps adoption"
msgstr ""
msgid "Developer"
msgstr ""
msgid "Development"
msgstr ""
@ -19476,6 +19479,9 @@ msgstr ""
msgid "Groups|You're creating a new top-level group"
msgstr ""
msgid "Guest"
msgstr ""
msgid "Guideline"
msgstr ""
@ -24457,6 +24463,9 @@ msgstr ""
msgid "Main menu"
msgstr ""
msgid "Maintainer"
msgstr ""
msgid "Maintenance mode"
msgstr ""
@ -26034,6 +26043,9 @@ msgstr ""
msgid "Min Value"
msgstr ""
msgid "Minimal Access"
msgstr ""
msgid "Minimum capacity to be available before we schedule more mirrors preemptively."
msgstr ""
@ -26752,6 +26764,9 @@ msgstr ""
msgid "No Work Item Link found"
msgstr ""
msgid "No access"
msgstr ""
msgid "No active admin user found"
msgstr ""
@ -33754,6 +33769,9 @@ msgstr ""
msgid "Reported by %{reporter}"
msgstr ""
msgid "Reporter"
msgstr ""
msgid "Reporting"
msgstr ""

View File

@ -0,0 +1,120 @@
#!/usr/bin/env ruby
# frozen_string_literal: true
require 'optparse'
require 'gitlab'
module Packages
class AutomatedCleanup
PACKAGES_PER_PAGE = 100
# $GITLAB_PROJECT_PACKAGES_CLEANUP_API_TOKEN => `Packages Cleanup` project token
def initialize(
project_path: ENV['CI_PROJECT_PATH'],
gitlab_token: ENV['GITLAB_PROJECT_PACKAGES_CLEANUP_API_TOKEN'],
options: {}
)
@project_path = project_path
@gitlab_token = gitlab_token
@dry_run = options[:dry_run]
puts "Dry-run mode." if dry_run
end
def gitlab
@gitlab ||= begin
Gitlab.configure do |config|
config.endpoint = 'https://gitlab.com/api/v4'
config.private_token = gitlab_token
end
Gitlab
end
end
def perform_gitlab_package_cleanup!(package_name:, days_for_delete:)
puts "Checking for '#{package_name}' packages created at least #{days_for_delete} days ago..."
gitlab.project_packages(project_path,
package_type: 'generic',
package_name: package_name,
per_page: PACKAGES_PER_PAGE).auto_paginate do |package|
next unless package.name == package_name # the search is fuzzy, so we better check the actual package name
if old_enough(package, days_for_delete) && not_recently_downloaded(package, days_for_delete)
delete_package(package)
end
end
end
private
attr_reader :project_path, :gitlab_token, :dry_run
def delete_package(package)
print_package_state(package)
gitlab.delete_project_package(project_path, package.id) unless dry_run
rescue Gitlab::Error::Forbidden
puts "Package #{package_full_name(package)} is forbidden: skipping it"
end
def time_ago(days:)
Time.now - days * 24 * 3600
end
def old_enough(package, days_for_delete)
Time.parse(package.created_at) < time_ago(days: days_for_delete)
end
def not_recently_downloaded(package, days_for_delete)
package.last_downloaded_at.nil? ||
Time.parse(package.last_downloaded_at) < time_ago(days: days_for_delete)
end
def print_package_state(package)
download_text =
if package.last_downloaded_at
"last downloaded on #{package.last_downloaded_at}"
else
"never downloaded"
end
puts "\nPackage #{package_full_name(package)} (created on #{package.created_at}) was " \
"#{download_text}: deleting it.\n"
end
def package_full_name(package)
"'#{package.name}/#{package.version}'"
end
end
end
def timed(task)
start = Time.now
yield(self)
puts "#{task} finished in #{Time.now - start} seconds.\n"
end
if $0 == __FILE__
options = {
dry_run: false
}
OptionParser.new do |opts|
opts.on("-d", "--dry-run", "Whether to perform a dry-run or not.") do |value|
options[:dry_run] = true
end
opts.on("-h", "--help", "Prints this help") do
puts opts
exit
end
end.parse!
automated_cleanup = Packages::AutomatedCleanup.new(options: options)
timed('"assets" packages cleanup') do
automated_cleanup.perform_gitlab_package_cleanup!(package_name: 'assets', days_for_delete: 7)
end
end

View File

@ -1,252 +1,261 @@
#!/usr/bin/env ruby
# frozen_string_literal: true
require 'optparse'
require 'gitlab'
require_relative File.expand_path('../../tooling/lib/tooling/helm3_client.rb', __dir__)
require_relative File.expand_path('../../tooling/lib/tooling/kubernetes_client.rb', __dir__)
class AutomatedCleanup
attr_reader :project_path, :gitlab_token
module ReviewApps
class AutomatedCleanup
DEPLOYMENTS_PER_PAGE = 100
ENVIRONMENT_PREFIX = {
review_app: 'review/',
docs_review_app: 'review-docs/'
}.freeze
IGNORED_HELM_ERRORS = [
'transport is closing',
'error upgrading connection',
'not found'
].freeze
IGNORED_KUBERNETES_ERRORS = [
'NotFound'
].freeze
DEPLOYMENTS_PER_PAGE = 100
ENVIRONMENT_PREFIX = {
review_app: 'review/',
docs_review_app: 'review-docs/'
}.freeze
IGNORED_HELM_ERRORS = [
'transport is closing',
'error upgrading connection',
'not found'
].freeze
IGNORED_KUBERNETES_ERRORS = [
'NotFound'
].freeze
# $GITLAB_PROJECT_REVIEW_APP_CLEANUP_API_TOKEN => `Automated Review App Cleanup` project token
def initialize(
project_path: ENV['CI_PROJECT_PATH'],
gitlab_token: ENV['GITLAB_PROJECT_REVIEW_APP_CLEANUP_API_TOKEN'],
options: {}
)
@project_path = project_path
@gitlab_token = gitlab_token
@dry_run = options[:dry_run]
def self.ee?
# Support former project name for `dev`
%w[gitlab gitlab-ee].include?(ENV['CI_PROJECT_NAME'])
end
puts "Dry-run mode." if dry_run
end
# $GITLAB_PROJECT_REVIEW_APP_CLEANUP_API_TOKEN => `Automated Review App Cleanup` project token
def initialize(project_path: ENV['CI_PROJECT_PATH'], gitlab_token: ENV['GITLAB_PROJECT_REVIEW_APP_CLEANUP_API_TOKEN'])
@project_path = project_path
@gitlab_token = gitlab_token
end
def gitlab
@gitlab ||= begin
Gitlab.configure do |config|
config.endpoint = 'https://gitlab.com/api/v4'
# gitlab-bot's token "GitLab review apps cleanup"
config.private_token = gitlab_token
end
def gitlab
@gitlab ||= begin
Gitlab.configure do |config|
config.endpoint = 'https://gitlab.com/api/v4'
# gitlab-bot's token "GitLab review apps cleanup"
config.private_token = gitlab_token
Gitlab
end
end
def review_apps_namespace
'review-apps'
end
def helm
@helm ||= Tooling::Helm3Client.new(namespace: review_apps_namespace)
end
def kubernetes
@kubernetes ||= Tooling::KubernetesClient.new(namespace: review_apps_namespace)
end
def perform_gitlab_environment_cleanup!(days_for_stop:, days_for_delete:)
puts "Checking for Review Apps not updated in the last #{days_for_stop} days..."
checked_environments = []
delete_threshold = threshold_time(days: days_for_delete)
stop_threshold = threshold_time(days: days_for_stop)
deployments_look_back_threshold = threshold_time(days: days_for_delete * 5)
releases_to_delete = []
# Delete environments via deployments
gitlab.deployments(project_path, per_page: DEPLOYMENTS_PER_PAGE, sort: 'desc').auto_paginate do |deployment|
break if Time.parse(deployment.created_at) < deployments_look_back_threshold
environment = deployment.environment
next unless environment
next unless environment.name.start_with?(ENVIRONMENT_PREFIX[:review_app])
next if checked_environments.include?(environment.slug)
last_deploy = deployment.created_at
deployed_at = Time.parse(last_deploy)
if deployed_at < delete_threshold
deleted_environment = delete_environment(environment, deployment)
if deleted_environment
release = Tooling::Helm3Client::Release.new(environment.slug, 1, deployed_at.to_s, nil, nil, review_apps_namespace)
releases_to_delete << release
end
else
if deployed_at >= stop_threshold
print_release_state(subject: 'Review App', release_name: environment.slug, release_date: last_deploy, action: 'leaving')
else
environment_state = fetch_environment(environment)&.state
stop_environment(environment, deployment) if environment_state && environment_state != 'stopped'
end
end
checked_environments << environment.slug
end
Gitlab
delete_stopped_environments(environment_type: :review_app, checked_environments: checked_environments, last_updated_threshold: delete_threshold) do |environment|
releases_to_delete << Tooling::Helm3Client::Release.new(environment.slug, 1, environment.updated_at, nil, nil, review_apps_namespace)
end
delete_helm_releases(releases_to_delete)
end
end
def review_apps_namespace
'review-apps'
end
def perform_gitlab_docs_environment_cleanup!(days_for_stop:, days_for_delete:)
puts "Checking for Docs Review Apps not updated in the last #{days_for_stop} days..."
def helm
@helm ||= Tooling::Helm3Client.new(namespace: review_apps_namespace)
end
checked_environments = []
stop_threshold = threshold_time(days: days_for_stop)
delete_threshold = threshold_time(days: days_for_delete)
def kubernetes
@kubernetes ||= Tooling::KubernetesClient.new(namespace: review_apps_namespace)
end
# Delete environments via deployments
gitlab.deployments(project_path, per_page: DEPLOYMENTS_PER_PAGE, sort: 'desc').auto_paginate do |deployment|
environment = deployment.environment
def perform_gitlab_environment_cleanup!(days_for_stop:, days_for_delete:)
puts "Checking for Review Apps not updated in the last #{days_for_stop} days..."
next unless environment
next unless environment.name.start_with?(ENVIRONMENT_PREFIX[:docs_review_app])
next if checked_environments.include?(environment.slug)
checked_environments = []
delete_threshold = threshold_time(days: days_for_delete)
stop_threshold = threshold_time(days: days_for_stop)
deployments_look_back_threshold = threshold_time(days: days_for_delete * 5)
last_deploy = deployment.created_at
deployed_at = Time.parse(last_deploy)
releases_to_delete = []
# Delete environments via deployments
gitlab.deployments(project_path, per_page: DEPLOYMENTS_PER_PAGE, sort: 'desc').auto_paginate do |deployment|
break if Time.parse(deployment.created_at) < deployments_look_back_threshold
environment = deployment.environment
next unless environment
next unless environment.name.start_with?(ENVIRONMENT_PREFIX[:review_app])
next if checked_environments.include?(environment.slug)
last_deploy = deployment.created_at
deployed_at = Time.parse(last_deploy)
if deployed_at < delete_threshold
deleted_environment = delete_environment(environment, deployment)
if deleted_environment
release = Tooling::Helm3Client::Release.new(environment.slug, 1, deployed_at.to_s, nil, nil, review_apps_namespace)
releases_to_delete << release
end
else
if deployed_at >= stop_threshold
print_release_state(subject: 'Review App', release_name: environment.slug, release_date: last_deploy, action: 'leaving')
else
if deployed_at < stop_threshold
environment_state = fetch_environment(environment)&.state
stop_environment(environment, deployment) if environment_state && environment_state != 'stopped'
end
delete_environment(environment, deployment) if deployed_at < delete_threshold
checked_environments << environment.slug
end
checked_environments << environment.slug
delete_stopped_environments(environment_type: :docs_review_app, checked_environments: checked_environments, last_updated_threshold: delete_threshold)
end
delete_stopped_environments(environment_type: :review_app, checked_environments: checked_environments, last_updated_threshold: delete_threshold) do |environment|
releases_to_delete << Tooling::Helm3Client::Release.new(environment.slug, 1, environment.updated_at, nil, nil, review_apps_namespace)
end
def perform_helm_releases_cleanup!(days:)
puts "Checking for Helm releases that are failed or not updated in the last #{days} days..."
delete_helm_releases(releases_to_delete)
end
threshold = threshold_time(days: days)
def perform_gitlab_docs_environment_cleanup!(days_for_stop:, days_for_delete:)
puts "Checking for Docs Review Apps not updated in the last #{days_for_stop} days..."
releases_to_delete = []
checked_environments = []
stop_threshold = threshold_time(days: days_for_stop)
delete_threshold = threshold_time(days: days_for_delete)
helm_releases.each do |release|
# Prevents deleting `dns-gitlab-review-app` releases or other unrelated releases
next unless release.name.start_with?('review-')
# Delete environments via deployments
gitlab.deployments(project_path, per_page: DEPLOYMENTS_PER_PAGE, sort: 'desc').auto_paginate do |deployment|
environment = deployment.environment
next unless environment
next unless environment.name.start_with?(ENVIRONMENT_PREFIX[:docs_review_app])
next if checked_environments.include?(environment.slug)
last_deploy = deployment.created_at
deployed_at = Time.parse(last_deploy)
if deployed_at < stop_threshold
environment_state = fetch_environment(environment)&.state
stop_environment(environment, deployment) if environment_state && environment_state != 'stopped'
if release.status == 'failed' || release.last_update < threshold
releases_to_delete << release
else
print_release_state(subject: 'Release', release_name: release.name, release_date: release.last_update, action: 'leaving')
end
end
delete_environment(environment, deployment) if deployed_at < delete_threshold
checked_environments << environment.slug
delete_helm_releases(releases_to_delete)
end
delete_stopped_environments(environment_type: :docs_review_app, checked_environments: checked_environments, last_updated_threshold: delete_threshold)
end
def perform_stale_namespace_cleanup!(days:)
kubernetes_client = Tooling::KubernetesClient.new(namespace: nil)
def perform_helm_releases_cleanup!(days:)
puts "Checking for Helm releases that are failed or not updated in the last #{days} days..."
kubernetes_client.cleanup_review_app_namespaces(created_before: threshold_time(days: days), wait: false) unless dry_run
end
threshold = threshold_time(days: days)
def perform_stale_pvc_cleanup!(days:)
kubernetes.cleanup_by_created_at(resource_type: 'pvc', created_before: threshold_time(days: days), wait: false) unless dry_run
end
releases_to_delete = []
private
helm_releases.each do |release|
# Prevents deleting `dns-gitlab-review-app` releases or other unrelated releases
next unless release.name.start_with?('review-')
attr_reader :project_path, :gitlab_token, :dry_run
if release.status == 'failed' || release.last_update < threshold
releases_to_delete << release
else
print_release_state(subject: 'Release', release_name: release.name, release_date: release.last_update, action: 'leaving')
def fetch_environment(environment)
gitlab.environment(project_path, environment.id)
rescue Errno::ETIMEDOUT => ex
puts "Failed to fetch '#{environment.name}' / '#{environment.slug}' (##{environment.id}):\n#{ex.message}"
nil
end
def delete_environment(environment, deployment = nil)
release_date = deployment ? deployment.created_at : environment.updated_at
print_release_state(subject: 'Review app', release_name: environment.slug, release_date: release_date, action: 'deleting')
gitlab.delete_environment(project_path, environment.id) unless dry_run
rescue Gitlab::Error::Forbidden
puts "Review app '#{environment.name}' / '#{environment.slug}' (##{environment.id}) is forbidden: skipping it"
end
def stop_environment(environment, deployment)
print_release_state(subject: 'Review app', release_name: environment.slug, release_date: deployment.created_at, action: 'stopping')
gitlab.stop_environment(project_path, environment.id) unless dry_run
rescue Gitlab::Error::Forbidden
puts "Review app '#{environment.name}' / '#{environment.slug}' (##{environment.id}) is forbidden: skipping it"
end
def delete_stopped_environments(environment_type:, checked_environments:, last_updated_threshold:)
gitlab.environments(project_path, per_page: DEPLOYMENTS_PER_PAGE, sort: 'desc', states: 'stopped', search: ENVIRONMENT_PREFIX[environment_type]).auto_paginate do |environment|
next if skip_environment?(environment: environment, checked_environments: checked_environments, last_updated_threshold: last_updated_threshold, environment_type: environment_type)
yield environment if delete_environment(environment)
checked_environments << environment.slug
end
end
delete_helm_releases(releases_to_delete)
end
def skip_environment?(environment:, checked_environments:, last_updated_threshold:, environment_type:)
return true unless environment.name.start_with?(ENVIRONMENT_PREFIX[environment_type])
return true if checked_environments.include?(environment.slug)
return true if Time.parse(environment.updated_at) > last_updated_threshold
def perform_stale_namespace_cleanup!(days:)
kubernetes_client = Tooling::KubernetesClient.new(namespace: nil)
kubernetes_client.cleanup_review_app_namespaces(created_before: threshold_time(days: days), wait: false)
end
def perform_stale_pvc_cleanup!(days:)
kubernetes.cleanup_by_created_at(resource_type: 'pvc', created_before: threshold_time(days: days), wait: false)
end
private
def fetch_environment(environment)
gitlab.environment(project_path, environment.id)
rescue Errno::ETIMEDOUT => ex
puts "Failed to fetch '#{environment.name}' / '#{environment.slug}' (##{environment.id}):\n#{ex.message}"
nil
end
def delete_environment(environment, deployment = nil)
release_date = deployment ? deployment.created_at : environment.updated_at
print_release_state(subject: 'Review app', release_name: environment.slug, release_date: release_date, action: 'deleting')
gitlab.delete_environment(project_path, environment.id)
rescue Gitlab::Error::Forbidden
puts "Review app '#{environment.name}' / '#{environment.slug}' (##{environment.id}) is forbidden: skipping it"
end
def stop_environment(environment, deployment)
print_release_state(subject: 'Review app', release_name: environment.slug, release_date: deployment.created_at, action: 'stopping')
gitlab.stop_environment(project_path, environment.id)
rescue Gitlab::Error::Forbidden
puts "Review app '#{environment.name}' / '#{environment.slug}' (##{environment.id}) is forbidden: skipping it"
end
def delete_stopped_environments(environment_type:, checked_environments:, last_updated_threshold:)
gitlab.environments(project_path, per_page: DEPLOYMENTS_PER_PAGE, sort: 'desc', states: 'stopped', search: ENVIRONMENT_PREFIX[environment_type]).auto_paginate do |environment|
next if skip_environment?(environment: environment, checked_environments: checked_environments, last_updated_threshold: last_updated_threshold, environment_type: environment_type)
yield environment if delete_environment(environment)
checked_environments << environment.slug
end
end
def skip_environment?(environment:, checked_environments:, last_updated_threshold:, environment_type:)
return true unless environment.name.start_with?(ENVIRONMENT_PREFIX[environment_type])
return true if checked_environments.include?(environment.slug)
return true if Time.parse(environment.updated_at) > last_updated_threshold
false
end
def helm_releases
args = ['--all', '--date']
helm.releases(args: args)
end
def delete_helm_releases(releases)
return if releases.empty?
releases.each do |release|
print_release_state(subject: 'Release', release_name: release.name, release_status: release.status, release_date: release.last_update, action: 'cleaning')
false
end
releases_names = releases.map(&:name)
helm.delete(release_name: releases_names)
kubernetes.cleanup_by_release(release_name: releases_names, wait: false)
def helm_releases
args = ['--all', '--date']
rescue Tooling::Helm3Client::CommandFailedError => ex
raise ex unless ignore_exception?(ex.message, IGNORED_HELM_ERRORS)
helm.releases(args: args)
end
puts "Ignoring the following Helm error:\n#{ex}\n"
rescue Tooling::KubernetesClient::CommandFailedError => ex
raise ex unless ignore_exception?(ex.message, IGNORED_KUBERNETES_ERRORS)
def delete_helm_releases(releases)
return if releases.empty?
puts "Ignoring the following Kubernetes error:\n#{ex}\n"
end
releases.each do |release|
print_release_state(subject: 'Release', release_name: release.name, release_status: release.status, release_date: release.last_update, action: 'cleaning')
end
def threshold_time(days:)
Time.now - days * 24 * 3600
end
releases_names = releases.map(&:name)
unless dry_run
helm.delete(release_name: releases_names)
kubernetes.cleanup_by_release(release_name: releases_names, wait: false)
end
def ignore_exception?(exception_message, exceptions_ignored)
exception_message.match?(/(#{exceptions_ignored})/)
end
rescue Tooling::Helm3Client::CommandFailedError => ex
raise ex unless ignore_exception?(ex.message, IGNORED_HELM_ERRORS)
def print_release_state(subject:, release_name:, release_date:, action:, release_status: nil)
puts "\n#{subject} '#{release_name}' #{"(#{release_status}) " if release_status}was last deployed on #{release_date}: #{action} it.\n"
puts "Ignoring the following Helm error:\n#{ex}\n"
rescue Tooling::KubernetesClient::CommandFailedError => ex
raise ex unless ignore_exception?(ex.message, IGNORED_KUBERNETES_ERRORS)
puts "Ignoring the following Kubernetes error:\n#{ex}\n"
end
def threshold_time(days:)
Time.now - days * 24 * 3600
end
def ignore_exception?(exception_message, exceptions_ignored)
exception_message.match?(/(#{exceptions_ignored})/)
end
def print_release_state(subject:, release_name:, release_date:, action:, release_status: nil)
puts "\n#{subject} '#{release_name}' #{"(#{release_status}) " if release_status}was last deployed on #{release_date}: #{action} it.\n"
end
end
end
@ -256,28 +265,43 @@ def timed(task)
puts "#{task} finished in #{Time.now - start} seconds.\n"
end
automated_cleanup = AutomatedCleanup.new
if $0 == __FILE__
options = {
dry_run: false
}
timed('Review Apps cleanup') do
automated_cleanup.perform_gitlab_environment_cleanup!(days_for_stop: 5, days_for_delete: 6)
OptionParser.new do |opts|
opts.on("-d", "--dry-run", "Whether to perform a dry-run or not.") do |value|
options[:dry_run] = true
end
opts.on("-h", "--help", "Prints this help") do
puts opts
exit
end
end.parse!
automated_cleanup = ReviewApps::AutomatedCleanup.new(options: options)
timed('Review Apps cleanup') do
automated_cleanup.perform_gitlab_environment_cleanup!(days_for_stop: 5, days_for_delete: 6)
end
timed('Docs Review Apps cleanup') do
automated_cleanup.perform_gitlab_docs_environment_cleanup!(days_for_stop: 20, days_for_delete: 30)
end
puts
timed('Helm releases cleanup') do
automated_cleanup.perform_helm_releases_cleanup!(days: 7)
end
timed('Stale Namespace cleanup') do
automated_cleanup.perform_stale_namespace_cleanup!(days: 14)
end
timed('Stale PVC cleanup') do
automated_cleanup.perform_stale_pvc_cleanup!(days: 30)
end
end
timed('Docs Review Apps cleanup') do
automated_cleanup.perform_gitlab_docs_environment_cleanup!(days_for_stop: 20, days_for_delete: 30)
end
puts
timed('Helm releases cleanup') do
automated_cleanup.perform_helm_releases_cleanup!(days: 7)
end
timed('Stale Namespace cleanup') do
automated_cleanup.perform_stale_namespace_cleanup!(days: 14)
end
timed('Stale PVC cleanup') do
automated_cleanup.perform_stale_pvc_cleanup!(days: 30)
end
exit(0)

View File

@ -93,12 +93,12 @@ function setup_db() {
}
function install_gitlab_gem() {
run_timed_command "gem install httparty --no-document --version 0.18.1"
run_timed_command "gem install gitlab --no-document --version 4.17.0"
run_timed_command "gem install httparty --no-document --version 0.20.0"
run_timed_command "gem install gitlab --no-document --version 4.19.0"
}
function install_tff_gem() {
run_timed_command "gem install test_file_finder --no-document --version 0.1.1"
run_timed_command "gem install test_file_finder --no-document --version 0.1.4"
}
function install_junit_merge_gem() {

View File

@ -1336,4 +1336,24 @@ RSpec.describe ProjectsHelper do
)
end
end
describe '#localized_project_human_access' do
using RSpec::Parameterized::TableSyntax
where(:key, :localized_project_human_access) do
Gitlab::Access::NO_ACCESS | _('No access')
Gitlab::Access::MINIMAL_ACCESS | _("Minimal Access")
Gitlab::Access::GUEST | _('Guest')
Gitlab::Access::REPORTER | _('Reporter')
Gitlab::Access::DEVELOPER | _('Developer')
Gitlab::Access::MAINTAINER | _('Maintainer')
Gitlab::Access::OWNER | _('Owner')
end
with_them do
it 'with correct key' do
expect(helper.localized_project_human_access(key)).to eq(localized_project_human_access)
end
end
end
end

View File

@ -7,12 +7,23 @@ RSpec.describe API::Helpers::Packages::DependencyProxyHelpers do
describe '#redirect_registry_request' do
using RSpec::Parameterized::TableSyntax
include_context 'dependency proxy helpers context'
let_it_be(:project) { create(:project) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
let_it_be_with_reload(:package_setting) { create(:namespace_package_setting, namespace: group) }
let(:target) { project }
let(:options) { {} }
subject { helper.redirect_registry_request(forward_to_registry, package_type, options) { helper.fallback } }
subject do
helper.redirect_registry_request(
forward_to_registry: forward_to_registry,
package_type: package_type,
target: target,
options: options
) { helper.fallback }
end
before do
allow(helper).to receive(:options).and_return(for: described_class)
@ -42,32 +53,57 @@ RSpec.describe API::Helpers::Packages::DependencyProxyHelpers do
%i[maven npm pypi].each do |forwardable_package_type|
context "with #{forwardable_package_type} packages" do
include_context 'dependency proxy helpers context'
let(:package_type) { forwardable_package_type }
let(:options) { { project: project } }
where(:application_setting, :forward_to_registry, :example_name) do
true | true | 'executing redirect'
true | false | 'executing fallback'
false | true | 'executing fallback'
false | false | 'executing fallback'
where(:application_setting, :group_setting, :forward_to_registry, :example_name) do
true | nil | true | 'executing redirect'
true | nil | false | 'executing fallback'
false | nil | true | 'executing fallback'
false | nil | false | 'executing fallback'
true | false | true | 'executing fallback'
true | false | false | 'executing fallback'
false | true | true | 'executing redirect'
false | true | false | 'executing fallback'
end
with_them do
before do
allow_fetch_application_setting(attribute: "#{forwardable_package_type}_package_requests_forwarding", return_value: application_setting)
allow_fetch_cascade_application_setting(attribute: "#{forwardable_package_type}_package_requests_forwarding", return_value: application_setting)
package_setting.update!("#{forwardable_package_type}_package_requests_forwarding" => group_setting)
end
it_behaves_like params[:example_name]
end
end
context 'when cascade_package_forwarding_settings is disabled' do
let(:package_type) { forwardable_package_type }
let(:forward_to_registry) { true }
before do
stub_feature_flags(cascade_package_forwarding_settings: false)
allow_fetch_cascade_application_setting(attribute: "#{forwardable_package_type}_package_requests_forwarding", return_value: true)
package_setting.update!("#{forwardable_package_type}_package_requests_forwarding" => false)
end
it_behaves_like 'executing redirect'
end
context 'when no target is present' do
let(:package_type) { forwardable_package_type }
let(:forward_to_registry) { true }
let(:target) { nil }
before do
allow_fetch_cascade_application_setting(attribute: "#{forwardable_package_type}_package_requests_forwarding", return_value: true)
package_setting.update!("#{forwardable_package_type}_package_requests_forwarding" => false)
end
it_behaves_like 'executing redirect'
end
context 'when maven_central_request_forwarding is disabled' do
let(:package_type) { :maven }
let(:options) { { project: project } }
include_context 'dependency proxy helpers context'
where(:application_setting, :forward_to_registry) do
true | true
@ -79,7 +115,7 @@ RSpec.describe API::Helpers::Packages::DependencyProxyHelpers do
with_them do
before do
stub_feature_flags(maven_central_request_forwarding: false)
allow_fetch_application_setting(attribute: "maven_package_requests_forwarding", return_value: application_setting)
allow_fetch_cascade_application_setting(attribute: "maven_package_requests_forwarding", return_value: application_setting)
end
it_behaves_like 'executing fallback'

View File

@ -1,347 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe NamespaceSetting, 'CascadingNamespaceSettingAttribute' do
let(:group) { create(:group) }
let(:subgroup) { create(:group, parent: group) }
def group_settings
group.namespace_settings
end
def subgroup_settings
subgroup.namespace_settings
end
describe '#delayed_project_removal' do
subject(:delayed_project_removal) { subgroup_settings.delayed_project_removal }
context 'when there is no parent' do
context 'and the value is not nil' do
before do
group_settings.update!(delayed_project_removal: true)
end
it 'returns the local value' do
expect(group_settings.delayed_project_removal).to eq(true)
end
end
context 'and the value is nil' do
before do
group_settings.update!(delayed_project_removal: nil)
stub_application_setting(delayed_project_removal: false)
end
it 'returns the application settings value' do
expect(group_settings.delayed_project_removal).to eq(false)
end
end
end
context 'when parent does not lock the attribute' do
context 'and value is not nil' do
before do
group_settings.update!(delayed_project_removal: false)
end
it 'returns local setting when present' do
subgroup_settings.update!(delayed_project_removal: true)
expect(delayed_project_removal).to eq(true)
end
it 'returns the parent value when local value is nil' do
subgroup_settings.update!(delayed_project_removal: nil)
expect(delayed_project_removal).to eq(false)
end
it 'returns the correct dirty value' do
subgroup_settings.delayed_project_removal = true
expect(delayed_project_removal).to eq(true)
end
it 'does not return the application setting value when parent value is false' do
stub_application_setting(delayed_project_removal: true)
expect(delayed_project_removal).to eq(false)
end
end
context 'and the value is nil' do
before do
group_settings.update!(delayed_project_removal: nil, lock_delayed_project_removal: false)
subgroup_settings.update!(delayed_project_removal: nil)
subgroup_settings.clear_memoization(:delayed_project_removal)
end
it 'cascades to the application settings value' do
expect(delayed_project_removal).to eq(false)
end
end
context 'when multiple ancestors set a value' do
let(:third_level_subgroup) { create(:group, parent: subgroup) }
before do
group_settings.update!(delayed_project_removal: true)
subgroup_settings.update!(delayed_project_removal: false)
end
it 'returns the closest ancestor value' do
expect(third_level_subgroup.namespace_settings.delayed_project_removal).to eq(false)
end
end
end
context 'when parent locks the attribute' do
before do
subgroup_settings.update!(delayed_project_removal: true)
group_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: false)
subgroup_settings.clear_memoization(:delayed_project_removal)
subgroup_settings.clear_memoization(:delayed_project_removal_locked_ancestor)
end
it 'returns the parent value' do
expect(delayed_project_removal).to eq(false)
end
it 'does not allow the local value to be saved' do
subgroup_settings.delayed_project_removal = nil
expect { subgroup_settings.save! }
.to raise_error(ActiveRecord::RecordInvalid, /Delayed project removal cannot be changed because it is locked by an ancestor/)
end
end
context 'when the application settings locks the attribute' do
before do
subgroup_settings.update!(delayed_project_removal: true)
stub_application_setting(lock_delayed_project_removal: true, delayed_project_removal: true)
end
it 'returns the application setting value' do
expect(delayed_project_removal).to eq(true)
end
it 'does not allow the local value to be saved' do
subgroup_settings.delayed_project_removal = false
expect { subgroup_settings.save! }
.to raise_error(ActiveRecord::RecordInvalid, /Delayed project removal cannot be changed because it is locked by an ancestor/)
end
end
context 'when parent locked the attribute then the application settings locks it' do
before do
subgroup_settings.update!(delayed_project_removal: true)
group_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: false)
stub_application_setting(lock_delayed_project_removal: true, delayed_project_removal: true)
subgroup_settings.clear_memoization(:delayed_project_removal)
subgroup_settings.clear_memoization(:delayed_project_removal_locked_ancestor)
end
it 'returns the application setting value' do
expect(delayed_project_removal).to eq(true)
end
end
end
describe '#delayed_project_removal?' do
before do
subgroup_settings.update!(delayed_project_removal: true)
group_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: false)
subgroup_settings.clear_memoization(:delayed_project_removal)
subgroup_settings.clear_memoization(:delayed_project_removal_locked_ancestor)
end
it 'aliases the method when the attribute is a boolean' do
expect(subgroup_settings.delayed_project_removal?).to eq(subgroup_settings.delayed_project_removal)
end
end
describe '#delayed_project_removal=' do
before do
subgroup_settings.update!(delayed_project_removal: nil)
group_settings.update!(delayed_project_removal: true)
end
it 'does not save the value locally when it matches the cascaded value' do
subgroup_settings.update!(delayed_project_removal: true)
expect(subgroup_settings.read_attribute(:delayed_project_removal)).to eq(nil)
end
end
describe '#delayed_project_removal_locked?' do
shared_examples 'not locked' do
it 'is not locked by an ancestor' do
expect(subgroup_settings.delayed_project_removal_locked_by_ancestor?).to eq(false)
end
it 'is not locked by application setting' do
expect(subgroup_settings.delayed_project_removal_locked_by_application_setting?).to eq(false)
end
it 'does not return a locked namespace' do
expect(subgroup_settings.delayed_project_removal_locked_ancestor).to be_nil
end
end
context 'when attribute is locked by self' do
before do
subgroup_settings.update!(lock_delayed_project_removal: true)
end
it 'is not locked by default' do
expect(subgroup_settings.delayed_project_removal_locked?).to eq(false)
end
it 'is locked when including self' do
expect(subgroup_settings.delayed_project_removal_locked?(include_self: true)).to eq(true)
end
end
context 'when parent does not lock the attribute' do
it_behaves_like 'not locked'
end
context 'when parent locks the attribute' do
before do
group_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: false)
subgroup_settings.clear_memoization(:delayed_project_removal)
subgroup_settings.clear_memoization(:delayed_project_removal_locked_ancestor)
end
it 'is locked by an ancestor' do
expect(subgroup_settings.delayed_project_removal_locked_by_ancestor?).to eq(true)
end
it 'is not locked by application setting' do
expect(subgroup_settings.delayed_project_removal_locked_by_application_setting?).to eq(false)
end
it 'returns a locked namespace settings object' do
expect(subgroup_settings.delayed_project_removal_locked_ancestor.namespace_id).to eq(group_settings.namespace_id)
end
end
context 'when not locked by application settings' do
before do
stub_application_setting(lock_delayed_project_removal: false)
end
it_behaves_like 'not locked'
end
context 'when locked by application settings' do
before do
stub_application_setting(lock_delayed_project_removal: true)
end
it 'is not locked by an ancestor' do
expect(subgroup_settings.delayed_project_removal_locked_by_ancestor?).to eq(false)
end
it 'is locked by application setting' do
expect(subgroup_settings.delayed_project_removal_locked_by_application_setting?).to eq(true)
end
it 'does not return a locked namespace' do
expect(subgroup_settings.delayed_project_removal_locked_ancestor).to be_nil
end
end
end
describe '#lock_delayed_project_removal=' do
context 'when parent locks the attribute' do
before do
group_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: false)
subgroup_settings.clear_memoization(:delayed_project_removal)
subgroup_settings.clear_memoization(:delayed_project_removal_locked_ancestor)
end
it 'does not allow the attribute to be saved' do
subgroup_settings.lock_delayed_project_removal = true
expect { subgroup_settings.save! }
.to raise_error(ActiveRecord::RecordInvalid, /Lock delayed project removal cannot be changed because it is locked by an ancestor/)
end
end
context 'when parent does not lock the attribute' do
before do
group_settings.update!(lock_delayed_project_removal: false)
subgroup_settings.lock_delayed_project_removal = true
end
it 'allows the lock to be set when the attribute is not nil' do
subgroup_settings.delayed_project_removal = true
expect(subgroup_settings.save).to eq(true)
end
it 'does not allow the lock to be saved when the attribute is nil' do
subgroup_settings.delayed_project_removal = nil
expect { subgroup_settings.save! }
.to raise_error(ActiveRecord::RecordInvalid, /Delayed project removal cannot be nil when locking the attribute/)
end
it 'copies the cascaded value when locking the attribute if the local value is nil', :aggregate_failures do
subgroup_settings.delayed_project_removal = nil
subgroup_settings.lock_delayed_project_removal = true
expect(subgroup_settings.read_attribute(:delayed_project_removal)).to eq(false)
end
end
context 'when application settings locks the attribute' do
before do
stub_application_setting(lock_delayed_project_removal: true)
end
it 'does not allow the attribute to be saved' do
subgroup_settings.lock_delayed_project_removal = true
expect { subgroup_settings.save! }
.to raise_error(ActiveRecord::RecordInvalid, /Lock delayed project removal cannot be changed because it is locked by an ancestor/)
end
end
context 'when application_settings does not lock the attribute' do
before do
stub_application_setting(lock_delayed_project_removal: false)
end
it 'allows the attribute to be saved' do
subgroup_settings.delayed_project_removal = true
subgroup_settings.lock_delayed_project_removal = true
expect(subgroup_settings.save).to eq(true)
end
end
end
describe 'after update callback' do
before do
subgroup_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: false)
end
it 'clears descendant locks' do
group_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: true)
expect(subgroup_settings.reload.lock_delayed_project_removal).to eq(false)
end
end
end

View File

@ -85,4 +85,13 @@ RSpec.describe Namespace::PackageSetting do
end
end
end
describe 'package forwarding attributes' do
%i[maven_package_requests_forwarding
pypi_package_requests_forwarding
npm_package_requests_forwarding].each do |attribute|
it_behaves_like 'a cascading namespace setting boolean attribute', settings_attribute_name: attribute,
settings_association: :package_settings
end
end
end

View File

@ -177,4 +177,8 @@ RSpec.describe NamespaceSetting, type: :model do
end
end
end
describe '#delayed_project_removal' do
it_behaves_like 'a cascading namespace setting boolean attribute', settings_attribute_name: :delayed_project_removal
end
end

View File

@ -362,6 +362,9 @@ RSpec.describe Namespace do
it { is_expected.to delegate_method(:name).to(:owner).with_prefix.allow_nil }
it { is_expected.to delegate_method(:avatar_url).to(:owner).allow_nil }
it { is_expected.to delegate_method(:prevent_sharing_groups_outside_hierarchy).to(:namespace_settings).allow_nil }
it { is_expected.to delegate_method(:maven_package_requests_forwarding).to(:package_settings) }
it { is_expected.to delegate_method(:pypi_package_requests_forwarding).to(:package_settings) }
it { is_expected.to delegate_method(:npm_package_requests_forwarding).to(:package_settings) }
it do
is_expected.to delegate_method(:prevent_sharing_groups_outside_hierarchy=).to(:namespace_settings)

View File

@ -859,6 +859,9 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to delegate_method(:environments_access_level).to(:project_feature) }
it { is_expected.to delegate_method(:feature_flags_access_level).to(:project_feature) }
it { is_expected.to delegate_method(:releases_access_level).to(:project_feature) }
it { is_expected.to delegate_method(:maven_package_requests_forwarding).to(:namespace) }
it { is_expected.to delegate_method(:pypi_package_requests_forwarding).to(:namespace) }
it { is_expected.to delegate_method(:npm_package_requests_forwarding).to(:namespace) }
describe 'read project settings' do
%i(

View File

@ -254,7 +254,7 @@ RSpec.describe API::MavenPackages do
let(:package_name) { package_in_project ? package_file.file_name : 'foo' }
before do
allow_fetch_application_setting(attribute: 'maven_package_requests_forwarding', return_value: forward)
allow_fetch_cascade_application_setting(attribute: 'maven_package_requests_forwarding', return_value: forward)
end
it_behaves_like params[:shared_examples_name]
@ -273,7 +273,7 @@ RSpec.describe API::MavenPackages do
before do
stub_feature_flags(maven_central_request_forwarding: false)
allow_fetch_application_setting(attribute: 'maven_package_requests_forwarding', return_value: forward)
allow_fetch_cascade_application_setting(attribute: 'maven_package_requests_forwarding', return_value: forward)
end
it_behaves_like params[:shared_examples_name]

View File

@ -851,6 +851,12 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
should_not_email(non_subscriber)
end
it 'triggers GraphQL subscription mergeRequestMergeStatusUpdated' do
expect(GraphqlTriggers).to receive(:merge_request_merge_status_updated).with(merge_request)
update_merge_request(title: 'New title')
end
context 'when removing through wip_event param' do
it 'removes Draft from the title' do
expect { update_merge_request({ wip_event: "ready" }) }
@ -877,6 +883,12 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
should_not_email(non_subscriber)
end
it 'triggers GraphQL subscription mergeRequestMergeStatusUpdated' do
expect(GraphqlTriggers).to receive(:merge_request_merge_status_updated).with(merge_request)
update_merge_request(title: 'Draft: New title')
end
context 'when adding through wip_event param' do
it 'adds Draft to the title' do
expect { update_merge_request({ wip_event: "draft" }) }

View File

@ -1,6 +1,11 @@
# frozen_string_literal: true
RSpec.shared_context 'dependency proxy helpers context' do
def allow_fetch_cascade_application_setting(attribute:, return_value:)
allow(Gitlab::CurrentSettings).to receive(:public_send).with(attribute.to_sym).and_return(return_value)
allow(Gitlab::CurrentSettings).to receive(:public_send).with("lock_#{attribute}").and_return(false)
end
def allow_fetch_application_setting(attribute:, return_value:)
attributes = double
allow(::Gitlab::CurrentSettings.current_application_settings).to receive(:attributes).and_return(attributes)

View File

@ -0,0 +1,355 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.shared_examples 'a cascading namespace setting boolean attribute' do
|settings_association: :namespace_settings, settings_attribute_name:|
let_it_be_with_reload(:group) { create(:group) }
let_it_be_with_reload(:subgroup) { create(:group, parent: group) }
let(:group_settings) { group.send(settings_association) }
let(:subgroup_settings) { subgroup.send(settings_association) }
describe "##{settings_attribute_name}" do
subject(:cascading_attribute) { subgroup_settings.send(settings_attribute_name) }
before do
stub_application_setting(settings_attribute_name => false)
end
context 'when there is no parent' do
context 'and the value is not nil' do
before do
group_settings.update!(settings_attribute_name => true)
end
it 'returns the local value' do
expect(group_settings.send(settings_attribute_name)).to eq(true)
end
end
context 'and the value is nil' do
before do
group_settings.update!(settings_attribute_name => nil)
end
it 'returns the application settings value' do
expect(group_settings.send(settings_attribute_name)).to eq(false)
end
end
end
context 'when parent does not lock the attribute' do
context 'and value is not nil' do
before do
group_settings.update!(settings_attribute_name => false)
end
it 'returns local setting when present' do
subgroup_settings.update!(settings_attribute_name => true)
expect(cascading_attribute).to eq(true)
end
it 'returns the parent value when local value is nil' do
subgroup_settings.update!(settings_attribute_name => nil)
expect(cascading_attribute).to eq(false)
end
it 'returns the correct dirty value' do
subgroup_settings.send("#{settings_attribute_name}=", true)
expect(cascading_attribute).to eq(true)
end
it 'does not return the application setting value when parent value is false' do
stub_application_setting(settings_attribute_name => true)
expect(cascading_attribute).to eq(false)
end
end
context 'and the value is nil' do
before do
group_settings.update!(settings_attribute_name => nil, "lock_#{settings_attribute_name}".to_sym => false)
subgroup_settings.update!(settings_attribute_name => nil)
subgroup_settings.clear_memoization(settings_attribute_name)
end
it 'cascades to the application settings value' do
expect(cascading_attribute).to eq(false)
end
end
context 'when multiple ancestors set a value' do
let(:third_level_subgroup) { create(:group, parent: subgroup) }
before do
group_settings.update!(settings_attribute_name => true)
subgroup_settings.update!(settings_attribute_name => false)
end
it 'returns the closest ancestor value' do
expect(third_level_subgroup.send(settings_association).send(settings_attribute_name)).to eq(false)
end
end
end
context 'when parent locks the attribute' do
before do
subgroup_settings.update!(settings_attribute_name => true)
group_settings.update!("lock_#{settings_attribute_name}" => true, settings_attribute_name => false)
subgroup_settings.clear_memoization(settings_attribute_name)
subgroup_settings.clear_memoization("#{settings_attribute_name}_locked_ancestor")
end
it 'returns the parent value' do
expect(cascading_attribute).to eq(false)
end
it 'does not allow the local value to be saved' do
subgroup_settings.send("#{settings_attribute_name}=", nil)
expect { subgroup_settings.save! }
.to raise_error(ActiveRecord::RecordInvalid,
/cannot be changed because it is locked by an ancestor/)
end
end
context 'when the application settings locks the attribute' do
before do
subgroup_settings.update!(settings_attribute_name => true)
stub_application_setting("lock_#{settings_attribute_name}" => true, settings_attribute_name => true)
end
it 'returns the application setting value' do
expect(cascading_attribute).to eq(true)
end
it 'does not allow the local value to be saved' do
subgroup_settings.send("#{settings_attribute_name}=", false)
expect { subgroup_settings.save! }
.to raise_error(
ActiveRecord::RecordInvalid,
/cannot be changed because it is locked by an ancestor/
)
end
end
context 'when parent locked the attribute then the application settings locks it' do
before do
subgroup_settings.update!(settings_attribute_name => true)
group_settings.update!("lock_#{settings_attribute_name}" => true, settings_attribute_name => false)
stub_application_setting("lock_#{settings_attribute_name}" => true, settings_attribute_name => true)
subgroup_settings.clear_memoization(settings_attribute_name)
subgroup_settings.clear_memoization("#{settings_attribute_name}_locked_ancestor")
end
it 'returns the application setting value' do
expect(cascading_attribute).to eq(true)
end
end
end
describe "##{settings_attribute_name}?" do
before do
subgroup_settings.update!(settings_attribute_name => true)
group_settings.update!("lock_#{settings_attribute_name}" => true, settings_attribute_name => false)
subgroup_settings.clear_memoization(settings_attribute_name)
subgroup_settings.clear_memoization("#{settings_attribute_name}_locked_ancestor")
end
it 'aliases the method when the attribute is a boolean' do
expect(subgroup_settings.send("#{settings_attribute_name}?"))
.to eq(subgroup_settings.send(settings_attribute_name))
end
end
describe "##{settings_attribute_name}=" do
before do
subgroup_settings.update!(settings_attribute_name => nil)
group_settings.update!(settings_attribute_name => true)
end
it 'does not save the value locally when it matches the cascaded value' do
subgroup_settings.update!(settings_attribute_name => true)
expect(subgroup_settings.read_attribute(settings_attribute_name)).to eq(nil)
end
end
describe "##{settings_attribute_name}_locked?" do
shared_examples 'not locked' do
it 'is not locked by an ancestor' do
expect(subgroup_settings.send("#{settings_attribute_name}_locked_by_ancestor?")).to eq(false)
end
it 'is not locked by application setting' do
expect(subgroup_settings.send("#{settings_attribute_name}_locked_by_application_setting?")).to eq(false)
end
it 'does not return a locked namespace' do
expect(subgroup_settings.send("#{settings_attribute_name}_locked_ancestor")).to be_nil
end
end
context 'when attribute is locked by self' do
before do
subgroup_settings.update!("lock_#{settings_attribute_name}" => true)
end
it 'is not locked by default' do
expect(subgroup_settings.send("#{settings_attribute_name}_locked?")).to eq(false)
end
it 'is locked when including self' do
expect(subgroup_settings.send("#{settings_attribute_name}_locked?", include_self: true)).to eq(true)
end
end
context 'when parent does not lock the attribute' do
it_behaves_like 'not locked'
end
context 'when parent locks the attribute' do
before do
group_settings.update!("lock_#{settings_attribute_name}".to_sym => true, settings_attribute_name => false)
subgroup_settings.clear_memoization(settings_attribute_name)
subgroup_settings.clear_memoization("#{settings_attribute_name}_locked_ancestor")
end
it 'is locked by an ancestor' do
expect(subgroup_settings.send("#{settings_attribute_name}_locked_by_ancestor?")).to eq(true)
end
it 'is not locked by application setting' do
expect(subgroup_settings.send("#{settings_attribute_name}_locked_by_application_setting?")).to eq(false)
end
it 'returns a locked namespace settings object' do
expect(subgroup_settings.send("#{settings_attribute_name}_locked_ancestor").namespace_id)
.to eq(group_settings.namespace_id)
end
end
context 'when not locked by application settings' do
before do
stub_application_setting("lock_#{settings_attribute_name}" => false)
end
it_behaves_like 'not locked'
end
context 'when locked by application settings' do
before do
stub_application_setting("lock_#{settings_attribute_name}" => true)
end
it 'is not locked by an ancestor' do
expect(subgroup_settings.send("#{settings_attribute_name}_locked_by_ancestor?")).to eq(false)
end
it 'is locked by application setting' do
expect(subgroup_settings.send("#{settings_attribute_name}_locked_by_application_setting?")).to eq(true)
end
it 'does not return a locked namespace' do
expect(subgroup_settings.send("#{settings_attribute_name}_locked_ancestor")).to be_nil
end
end
end
describe "#lock_#{settings_attribute_name}=" do
context 'when parent locks the attribute' do
before do
group_settings.update!("lock_#{settings_attribute_name}".to_sym => true, settings_attribute_name => false)
subgroup_settings.clear_memoization(settings_attribute_name)
subgroup_settings.clear_memoization("#{settings_attribute_name}_locked_ancestor")
end
it 'does not allow the attribute to be saved' do
subgroup_settings.send("lock_#{settings_attribute_name}=", true)
expect { subgroup_settings.save! }
.to raise_error(ActiveRecord::RecordInvalid,
/cannot be changed because it is locked by an ancestor/)
end
end
context 'when parent does not lock the attribute' do
before do
group_settings.update!("lock_#{settings_attribute_name}" => false, settings_attribute_name => false)
subgroup_settings.send("lock_#{settings_attribute_name}=", true)
end
it 'allows the lock to be set when the attribute is not nil' do
subgroup_settings.send("#{settings_attribute_name}=", true)
expect(subgroup_settings.save).to eq(true)
end
it 'does not allow the lock to be saved when the attribute is nil' do
subgroup_settings.send("#{settings_attribute_name}=", nil)
expect { subgroup_settings.save! }
.to raise_error(ActiveRecord::RecordInvalid,
/cannot be nil when locking the attribute/)
end
it 'copies the cascaded value when locking the attribute if the local value is nil', :aggregate_failures do
subgroup_settings.send("#{settings_attribute_name}=", nil)
subgroup_settings.send("lock_#{settings_attribute_name}=", true)
expect(subgroup_settings.read_attribute(settings_attribute_name)).to eq(false)
end
end
context 'when application settings locks the attribute' do
before do
stub_application_setting("lock_#{settings_attribute_name}".to_sym => true)
end
it 'does not allow the attribute to be saved' do
subgroup_settings.send("lock_#{settings_attribute_name}=", true)
expect { subgroup_settings.save! }
.to raise_error(ActiveRecord::RecordInvalid,
/cannot be changed because it is locked by an ancestor/)
end
end
context 'when application_settings does not lock the attribute' do
before do
stub_application_setting("lock_#{settings_attribute_name}".to_sym => false)
end
it 'allows the attribute to be saved' do
subgroup_settings.send("#{settings_attribute_name}=", true)
subgroup_settings.send("lock_#{settings_attribute_name}=", true)
expect(subgroup_settings.save).to eq(true)
end
end
end
describe 'after update callback' do
before do
group_settings.update!("lock_#{settings_attribute_name}" => false, settings_attribute_name => false)
subgroup_settings.update!("lock_#{settings_attribute_name}" => true, settings_attribute_name => false)
end
it 'clears descendant locks' do
group_settings.update!("lock_#{settings_attribute_name}" => true, settings_attribute_name => true)
expect(subgroup_settings.reload.send("lock_#{settings_attribute_name}")).to eq(false)
end
end
end

View File

@ -260,7 +260,11 @@ RSpec.shared_examples 'handling get metadata requests' do |scope: :project|
project.send("add_#{user_role}", user) if user_role
project.update!(visibility: visibility.to_s)
package.update!(name: package_name) unless package_name == 'non-existing-package'
allow_fetch_application_setting(attribute: "npm_package_requests_forwarding", return_value: request_forward)
if scope == :instance
allow_fetch_application_setting(attribute: "npm_package_requests_forwarding", return_value: request_forward)
else
allow_fetch_cascade_application_setting(attribute: "npm_package_requests_forwarding", return_value: request_forward)
end
end
example_name = "#{params[:expected_result]} metadata request"

View File

@ -14,6 +14,7 @@ RSpec.shared_examples 'accept package tags request' do |status:|
before do
allow_fetch_application_setting(attribute: "npm_package_requests_forwarding", return_value: false)
allow_fetch_cascade_application_setting(attribute: "npm_package_requests_forwarding", return_value: false)
end
context 'with valid package name' do

View File

@ -291,7 +291,7 @@ RSpec.shared_examples 'pypi simple API endpoint' do
end
before do
allow_fetch_application_setting(attribute: "pypi_package_requests_forwarding", return_value: forward)
allow_fetch_cascade_application_setting(attribute: "pypi_package_requests_forwarding", return_value: forward)
end
it_behaves_like params[:shared_examples_name], :reporter, params[:expected_status]