Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-04-26 15:10:32 +00:00
parent ae4756174c
commit 8cd1a72e8f
62 changed files with 989 additions and 1020 deletions

View File

@ -72,7 +72,7 @@ review-qa-reliable:
extends:
- .review-qa-base
- .review:rules:review-qa-reliable
parallel: 8
parallel: 10
retry: 1
variables:
QA_RUN_TYPE: review-qa-reliable

View File

@ -247,6 +247,9 @@
.models-patterns: &models-patterns
- "{,ee/,jh/}{app/models}/**/*"
.lib-gitlab-patterns: &lib-gitlab-patterns
- "{,ee/,jh/}lib/{,ee/,jh/}gitlab/**/*"
.startup-css-patterns: &startup-css-patterns
- "{,ee/,jh/}app/assets/stylesheets/startup/**/*"
@ -604,6 +607,7 @@
rules:
- <<: *if-not-ee
when: never
- <<: *if-merge-request-targeting-stable-branch
- <<: *if-merge-request-labels-run-review-app
- <<: *if-dot-com-gitlab-org-and-security-merge-request
changes: *ci-build-images-patterns
@ -618,6 +622,7 @@
rules:
- <<: *if-not-canonical-namespace
when: never
- <<: *if-merge-request-targeting-stable-branch
- <<: *if-merge-request-labels-run-review-app
- <<: *if-auto-deploy-branches
- changes: *ci-build-images-patterns
@ -692,6 +697,7 @@
rules:
- <<: *if-not-canonical-namespace
when: never
- <<: *if-merge-request-targeting-stable-branch
- <<: *if-merge-request-labels-run-review-app
- <<: *if-auto-deploy-branches
- changes: *code-qa-patterns
@ -882,6 +888,8 @@
- <<: *if-dot-com-gitlab-org-and-security-merge-request
changes: *feature-flag-development-config-patterns
when: never
- <<: *if-merge-request-targeting-stable-branch
allow_failure: true
- <<: *if-dot-com-gitlab-org-and-security-merge-request
changes: *nodejs-patterns
allow_failure: true
@ -1534,6 +1542,8 @@
changes: *controllers-patterns
- <<: *if-dot-com-gitlab-org-merge-request
changes: *models-patterns
- <<: *if-dot-com-gitlab-org-merge-request
changes: *lib-gitlab-patterns
- <<: *if-dot-com-gitlab-org-merge-request
changes: *qa-patterns
- <<: *if-dot-com-gitlab-org-merge-request

View File

@ -112,9 +112,7 @@ export default {
},
signupEnabledHelpText() {
const text = sprintf(
s__(
'ApplicationSettings|When enabled, any user visiting %{host} will be able to create an account.',
),
s__('ApplicationSettings|Any user that visits %{host} can create an account.'),
{
host: this.host,
},
@ -125,7 +123,7 @@ export default {
requireAdminApprovalHelpText() {
const text = sprintf(
s__(
'ApplicationSettings|When enabled, any user visiting %{host} and creating an account will have to be explicitly approved by an admin before they can sign in. This setting is effective only if sign-ups are enabled.',
'ApplicationSettings|Any user that visits %{host} and creates an account must be explicitly approved by an administrator before they can sign in. Only effective if sign-ups are enabled.',
),
{
host: this.host,
@ -197,32 +195,34 @@ export default {
),
domainAllowListLabel: s__('ApplicationSettings|Allowed domains for sign-ups'),
domainAllowListDescription: s__(
'ApplicationSettings|ONLY users with e-mail addresses that match these domain(s) will be able to sign-up. Wildcards allowed. Use separate lines for multiple entries. Ex: domain.com, *.domain.com',
'ApplicationSettings|Only users with e-mail addresses that match these domain(s) can sign up. Wildcards allowed. Use separate lines for multiple entries. Example: domain.com, *.domain.com',
),
userCapLabel: s__('ApplicationSettings|User cap'),
userCapDescription: s__(
'ApplicationSettings|Once the instance reaches the user cap, any user who is added or requests access will have to be approved by an admin. Leave the field empty for unlimited.',
'ApplicationSettings|After the instance reaches the user cap, any user who is added or requests access must be approved by an administrator. Leave blank for unlimited.',
),
domainDenyListGroupLabel: s__('ApplicationSettings|Domain denylist'),
domainDenyListLabel: s__('ApplicationSettings|Enable domain denylist for sign ups'),
domainDenyListLabel: s__('ApplicationSettings|Enable domain denylist for sign-ups'),
domainDenyListTypeFileLabel: s__('ApplicationSettings|Upload denylist file'),
domainDenyListTypeRawLabel: s__('ApplicationSettings|Enter denylist manually'),
domainDenyListFileLabel: s__('ApplicationSettings|Denylist file'),
domainDenyListFileDescription: s__(
'ApplicationSettings|Users with e-mail addresses that match these domain(s) will NOT be able to sign-up. Wildcards allowed. Use separate lines or commas for multiple entries.',
'ApplicationSettings|Users with e-mail addresses that match these domain(s) cannot sign up. Wildcards allowed. Use separate lines or commas for multiple entries.',
),
domainDenyListListLabel: s__('ApplicationSettings|Denied domains for sign-ups'),
domainDenyListListDescription: s__(
'ApplicationSettings|Users with e-mail addresses that match these domain(s) will NOT be able to sign-up. Wildcards allowed. Use separate lines for multiple entries. Ex: domain.com, *.domain.com',
'ApplicationSettings|Users with e-mail addresses that match these domain(s) cannot sign up. Wildcards allowed. Use separate lines for multiple entries. Example: domain.com, *.domain.com',
),
domainPlaceholder: s__('ApplicationSettings|domain.com'),
emailRestrictionsEnabledGroupLabel: s__('ApplicationSettings|Email restrictions'),
emailRestrictionsEnabledLabel: s__(
'ApplicationSettings|Enable email restrictions for sign ups',
'ApplicationSettings|Enable email restrictions for sign-ups',
),
emailRestrictionsGroupLabel: s__('ApplicationSettings|Email restrictions for sign-ups'),
afterSignUpTextGroupLabel: s__('ApplicationSettings|After sign up text'),
afterSignUpTextGroupDescription: s__('ApplicationSettings|Markdown enabled'),
afterSignUpTextGroupLabel: s__('ApplicationSettings|After sign-up text'),
afterSignUpTextGroupDescription: s__(
'ApplicationSettings|Text shown after a user signs up. Markdown enabled.',
),
},
};
</script>
@ -288,19 +288,21 @@ export default {
name="application_setting[minimum_password_length]"
/>
<gl-sprintf
:message="
s__(
'ApplicationSettings|See GitLab\'s %{linkStart}Password Policy Guidelines%{linkEnd}',
)
"
>
<template #link="{ content }">
<gl-link :href="form.minimumPasswordLengthHelpLink" target="_blank">{{
content
}}</gl-link>
</template>
</gl-sprintf>
<template #description>
<gl-sprintf
:message="
s__(
'ApplicationSettings|See GitLab\'s %{linkStart}Password Policy Guidelines%{linkEnd}.',
)
"
>
<template #link="{ content }">
<gl-link :href="form.minimumPasswordLengthHelpLink" target="_blank">{{
content
}}</gl-link>
</template>
</gl-sprintf>
</template>
</gl-form-group>
<gl-form-group
@ -380,17 +382,19 @@ export default {
name="application_setting[email_restrictions]"
></textarea>
<gl-sprintf
:message="
s__(
'ApplicationSettings|Restricts sign-ups for email addresses that match the given regex. See the %{linkStart}supported syntax%{linkEnd} for more information.',
)
"
>
<template #link="{ content }">
<gl-link :href="form.supportedSyntaxLinkUrl" target="_blank">{{ content }}</gl-link>
</template>
</gl-sprintf>
<template #description>
<gl-sprintf
:message="
s__(
'ApplicationSettings|Restricts sign-ups for email addresses that match the given regex. %{linkStart}What is the supported syntax?%{linkEnd}',
)
"
>
<template #link="{ content }">
<gl-link :href="form.supportedSyntaxLinkUrl" target="_blank">{{ content }}</gl-link>
</template>
</gl-sprintf>
</template>
</gl-form-group>
<gl-form-group

View File

@ -36,6 +36,9 @@ export default {
return content;
},
firstLineClass() {
return { 'gl-mt-3!': this.number === 1 };
},
},
methods: {
wrapBidiChar(bidiChar) {
@ -56,10 +59,11 @@ export default {
</script>
<template>
<div class="gl-display-flex">
<div class="line-numbers gl-pt-0! gl-pb-0! gl-absolute gl-z-index-3">
<div class="gl-p-0! gl-absolute gl-z-index-3 gl-border-r diff-line-num line-numbers">
<gl-link
:id="`L${number}`"
class="file-line-num diff-line-num gl-user-select-none"
class="gl-user-select-none gl-ml-5 gl-pr-3 gl-shadow-none! file-line-num diff-line-num"
:class="firstLineClass"
:to="`#L${number}`"
:data-line-number="number"
>
@ -68,7 +72,8 @@ export default {
</div>
<pre
class="code highlight gl-p-0! gl-w-full gl-overflow-visible! gl-ml-11!"
class="gl-p-0! gl-w-full gl-overflow-visible! gl-ml-11! gl-border-none! code highlight"
:class="firstLineClass"
><code><span :id="`LC${number}`" v-safe-html="formattedContent" :lang="language" class="line" data-testid="content"></span></code></pre>
</div>
</template>

View File

@ -174,6 +174,10 @@
width: 100%;
}
.btn.dropdown-toggle-split {
margin-left: 1px;
}
/* This resets the width of the control so that the search button doesn't wrap */
.gl-search-box-by-click .form-control {
width: 1%;
@ -368,7 +372,7 @@
.project-item-select-holder.btn-group {
.new-project-item-select-button {
max-width: 44px;
max-width: 32px;
}
}

View File

@ -138,7 +138,8 @@
@include gl-mr-2;
@include gl-w-4;
@include gl-h-4;
@include gl-float-left;
@include gl-absolute;
@include gl-left-3;
background-color: $color;
mask-image: asset_url('icons-stacked.svg#link');
mask-repeat: no-repeat;

View File

@ -73,7 +73,7 @@ class UserRecentEventsFinder
return Event.none if users.empty?
if Feature.enabled?(:optimized_followed_users_queries, current_user)
if Feature.enabled?(:optimized_followed_users_queries, current_user, default_enabled: :yaml)
query_builder_params = event_filter.in_operator_query_builder_params(users)
Gitlab::Pagination::Keyset::InOperatorOptimization::QueryBuilder

View File

@ -57,6 +57,14 @@ module Ci
end
end
def retryable?
return false unless Feature.enabled?(:ci_recreate_downstream_pipeline, project, default_enabled: :yaml)
return false if failed? && (pipeline_loop_detected? || reached_max_descendant_pipelines_depth?)
super
end
def self.with_preloads
preload(
:metadata,
@ -65,8 +73,11 @@ module Ci
)
end
def retryable?
false
def self.clone_accessors
%i[pipeline project ref tag options name
allow_failure stage stage_id stage_idx
yaml_variables when description needs_attributes
scheduling_type].freeze
end
def inherit_status_from_downstream!(pipeline)

View File

@ -101,6 +101,21 @@ module Ci
:merge_train_pipeline?,
to: :pipeline
def clone(current_user:)
new_attributes = self.class.clone_accessors.to_h do |attribute|
[attribute, public_send(attribute)] # rubocop:disable GitlabSecurity/PublicSend
end
if persisted_environment.present?
new_attributes[:metadata_attributes] ||= {}
new_attributes[:metadata_attributes][:expanded_environment_name] = expanded_environment_name
end
new_attributes[:user] = current_user
self.class.new(new_attributes)
end
def retryable?
return false if retried? || archived? || deployment_rejected?

View File

@ -1,34 +0,0 @@
# frozen_string_literal: true
module AuthorizedProjectUpdate
class ProjectCreateService < BaseService
BATCH_SIZE = 1000
def initialize(project)
@project = project
end
def execute
group = project.group
unless group
return ServiceResponse.error(message: 'Project does not have a group')
end
group.members_from_self_and_ancestors_with_effective_access_level
.each_batch(of: BATCH_SIZE, column: :user_id) do |members|
attributes = members.map do |member|
{ user_id: member.user_id, project_id: project.id, access_level: member.access_level }
end
ProjectAuthorization.insert_all(attributes) unless attributes.empty?
end
ServiceResponse.success
end
private
attr_reader :project
end
end

View File

@ -23,11 +23,11 @@ module Ci
# Cloning a job requires a strict type check to ensure
# the attributes being used for the clone are taken straight
# from the model and not overridden by other abstractions.
raise TypeError unless job.instance_of?(Ci::Build)
raise TypeError unless job.instance_of?(Ci::Build) || job.instance_of?(Ci::Bridge)
check_access!(job)
new_job = clone_job(job)
new_job = job.clone(current_user: current_user)
new_job.run_after_commit do
::Ci::CopyCrossDatabaseAssociationsService.new.execute(job, new_job)
@ -53,9 +53,12 @@ module Ci
private
def check_assignable_runners!(job); end
def retry_job(job)
clone!(job).tap do |new_job|
check_assignable_runners!(new_job)
check_assignable_runners!(new_job) if new_job.is_a?(Ci::Build)
next if new_job.failed?
Gitlab::OptimisticLocking.retry_lock(new_job, name: 'retry_build', &:enqueue)
@ -68,26 +71,6 @@ module Ci
raise Gitlab::Access::AccessDeniedError, '403 Forbidden'
end
end
def check_assignable_runners!(job); end
def clone_job(job)
project.builds.new(job_attributes(job))
end
def job_attributes(job)
attributes = job.class.clone_accessors.to_h do |attribute|
[attribute, job.public_send(attribute)] # rubocop:disable GitlabSecurity/PublicSend
end
if job.persisted_environment.present?
attributes[:metadata_attributes] ||= {}
attributes[:metadata_attributes][:expanded_environment_name] = job.expanded_environment_name
end
attributes[:user] = current_user
attributes
end
end
end

View File

@ -3,24 +3,60 @@
module ServicePing
class BuildPayloadService
def execute
return {} unless allowed_to_report?
return {} unless ServicePingSettings.product_intelligence_enabled?
raw_payload
filtered_usage_data
end
private
def allowed_to_report?
product_intelligence_enabled? && !User.single_user&.requires_usage_stats_consent?
end
def product_intelligence_enabled?
::Gitlab::CurrentSettings.usage_ping_enabled?
end
def raw_payload
@raw_payload ||= ::Gitlab::Usage::ServicePingReport.for(output: :all_metrics_values)
end
def filtered_usage_data(payload = raw_payload, parents = [])
return unless payload.is_a?(Hash)
payload.keep_if do |label, node|
key_path = parents.dup.append(label).join('.')
if has_metric_definition?(key_path)
include_metric?(key_path)
else
filtered_usage_data(node, parents.dup << label) if node.is_a?(Hash)
end
end
end
def include_metric?(key_path)
valid_metric_status?(key_path) && permitted_metric?(key_path)
end
def valid_metric_status?(key_path)
metric_definitions[key_path]&.valid_service_ping_status?
end
def permitted_categories
@permitted_categories ||= ::ServicePing::PermitDataCategoriesService.new.execute
end
def permitted_metric?(key_path)
permitted_categories.include?(metric_category(key_path))
end
def has_metric_definition?(key_path)
metric_definitions[key_path].present?
end
def metric_category(key_path)
metric_definitions[key_path]
&.attributes
&.fetch(:data_category, ::ServicePing::PermitDataCategoriesService::OPTIONAL_CATEGORY)
end
def metric_definitions
@metric_definitions ||= ::Gitlab::Usage::MetricDefinition.definitions
end
end
end

View File

@ -1,7 +1,6 @@
- if any_projects?(@projects)
.project-item-select-holder.btn-group.gl-ml-auto.gl-mr-auto.gl-relative.gl-overflow-hidden{ class: 'gl-display-flex!' }
%a.btn.gl-button.btn-confirm.js-new-project-item-link.block-truncated.qa-new-project-item-link{ href: '', data: { label: local_assigns[:label], type: local_assigns[:type] }, class: "gl-m-0!" }
.dropdown.b-dropdown.gl-new-dropdown.btn-group.project-item-select-holder{ class: 'gl-display-inline-flex!' }
%a.btn.gl-button.btn-confirm.split-content-button.js-new-project-item-link.block-truncated.qa-new-project-item-link{ href: '', data: { label: local_assigns[:label], type: local_assigns[:type] } }
= gl_loading_icon(inline: true, color: 'light')
= project_select_tag :project_path, class: "project-item-select gl-absolute! gl-visibility-hidden", data: { include_groups: local_assigns[:include_groups], order_by: 'last_activity_at', relative_path: local_assigns[:path], with_shared: local_assigns[:with_shared], include_projects_in_subgroups: local_assigns[:include_projects_in_subgroups] }, with_feature_enabled: local_assigns[:with_feature_enabled]
%button.btn.dropdown-toggle.btn-confirm.btn-md.gl-button.gl-dropdown-toggle.dropdown-toggle-split.new-project-item-select-button.qa-new-project-item-select-button.gl-p-0.gl-w-100{ class: "gl-m-0!", 'aria-label': _('Toggle project select') }
= sprite_icon('chevron-down')
%button.btn.dropdown-toggle.btn-confirm.btn-md.gl-button.gl-dropdown-toggle.dropdown-toggle-split.new-project-item-select-button.qa-new-project-item-select-button{ 'aria-label': _('Toggle project select') }

View File

@ -3,15 +3,6 @@
#
# Do not edit it manually!
---
- :name: authorized_project_update:authorized_project_update_project_create
:worker_name: AuthorizedProjectUpdate::ProjectCreateWorker
:feature_category: :authentication_and_authorization
:has_external_dependencies:
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: authorized_project_update:authorized_project_update_project_recalculate
:worker_name: AuthorizedProjectUpdate::ProjectRecalculateWorker
:feature_category: :authentication_and_authorization

View File

@ -1,23 +0,0 @@
# frozen_string_literal: true
module AuthorizedProjectUpdate
class ProjectCreateWorker
include ApplicationWorker
data_consistency :always
sidekiq_options retry: 3
feature_category :authentication_and_authorization
urgency :low
queue_namespace :authorized_project_update
idempotent!
def perform(project_id)
project = Project.find(project_id)
AuthorizedProjectUpdate::ProjectCreateService.new(project).execute
end
end
end

View File

@ -1,8 +1,8 @@
---
name: deployment_approval_rules
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/83495
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/354726
name: ci_recreate_downstream_pipeline
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/83613
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/358409
milestone: '14.10'
type: development
group: group::release
default_enabled: true
group: group::pipeline authoring
default_enabled: false

View File

@ -1,8 +0,0 @@
---
name: container_registry_follow_redirects_middleware
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/81056
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/353291
milestone: '14.9'
type: development
group: group::package
default_enabled: true

View File

@ -2,7 +2,7 @@
name: optimized_followed_users_queries
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/84856
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/358649
milestone: '14.10'
milestone: '15.0'
type: development
group: group::optimize
default_enabled: false
default_enabled: true

View File

@ -0,0 +1,18 @@
- name: "GraphQL permissions change for Package settings"
announcement_milestone: "14.9"
announcement_date: "2022-03-22"
removal_milestone: "15.0"
removal_date: "2022-05-22"
breaking_change: true
reporter: trizzi
body: | # Do not modify this line, instead modify the lines below.
The GitLab Package stage offers a Package Registry, Container Registry, and Dependency Proxy to help you manage all of your dependencies using GitLab. Each of these product categories has a variety of settings that can be adjusted using the API.
The permissions model for GraphQL is being updated. After 15.0, users with the Guest, Reporter, and Developer role can no longer update these settings:
- [Package Registry settings](https://docs.gitlab.com/ee/api/graphql/reference/#packagesettings)
- [Container Registry cleanup policy](https://docs.gitlab.com/ee/api/graphql/reference/#containerexpirationpolicy)
- [Dependency Proxy time-to-live policy](https://docs.gitlab.com/ee/api/graphql/reference/#dependencyproxyimagettlgrouppolicy)
- [Enabling the Dependency Proxy for your group](https://docs.gitlab.com/ee/api/graphql/reference/#dependencyproxysetting)
The issue for this removal is [GitLab-#350682](https://gitlab.com/gitlab-org/gitlab/-/issues/350682)

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class RemoveTmpIndexSupportingLeakyRegexCleanup < Gitlab::Database::Migration[2.0]
INDEX_NAME = "tmp_index_merge_requests_draft_and_status_leaky_regex"
LEAKY_REGEXP_STR = "^\\[draft\\]|\\(draft\\)|draft:|draft|\\[WIP\\]|WIP:|WIP"
CORRECTED_REGEXP_STR = "^(\\[draft\\]|\\(draft\\)|draft:|draft|\\[WIP\\]|WIP:|WIP)"
disable_ddl_transaction!
def up
remove_concurrent_index_by_name :merge_requests, INDEX_NAME
end
def down
where_clause = <<~SQL
draft = true AND
state_id = 1 AND
((title)::text ~* '#{LEAKY_REGEXP_STR}'::text) AND ((title)::text !~* '#{CORRECTED_REGEXP_STR}'::text)
SQL
add_concurrent_index :merge_requests, :id,
where: where_clause,
name: INDEX_NAME
end
end

View File

@ -0,0 +1 @@
4042ca84ca23fafa3943705704c68606f1c423832395170d93988f90255c3249

View File

@ -29738,8 +29738,6 @@ CREATE INDEX tmp_index_issues_on_issue_type_and_id ON issues USING btree (issue_
CREATE INDEX tmp_index_members_on_state ON members USING btree (state) WHERE (state = 2);
CREATE INDEX tmp_index_merge_requests_draft_and_status_leaky_regex ON merge_requests USING btree (id) WHERE ((draft = true) AND (state_id = 1) AND ((title)::text ~* '^\[draft\]|\(draft\)|draft:|draft|\[WIP\]|WIP:|WIP'::text) AND ((title)::text !~* '^(\[draft\]|\(draft\)|draft:|draft|\[WIP\]|WIP:|WIP)'::text));
CREATE INDEX tmp_index_namespaces_empty_traversal_ids_with_child_namespaces ON namespaces USING btree (id) WHERE ((parent_id IS NOT NULL) AND (traversal_ids = '{}'::integer[]));
CREATE INDEX tmp_index_namespaces_empty_traversal_ids_with_root_namespaces ON namespaces USING btree (id) WHERE ((parent_id IS NULL) AND (traversal_ids = '{}'::integer[]));

View File

@ -1391,8 +1391,8 @@ To configure the Praefect nodes, on each one:
praefect['database_host'] = '10.6.0.141'
praefect['database_port'] = 5432
# `no_proxy` settings must always be a direct connection for caching
praefect['database_host_no_proxy'] = '10.6.0.141'
praefect['database_port_no_proxy'] = 5432
praefect['database_direct_host'] = '10.6.0.141'
praefect['database_direct_port'] = 5432
praefect['database_dbname'] = 'praefect_production'
praefect['database_user'] = 'praefect'
praefect['database_password'] = '<praefect_postgresql_password>'

View File

@ -1395,8 +1395,8 @@ To configure the Praefect nodes, on each one:
praefect['database_host'] = '10.6.0.141'
praefect['database_port'] = 5432
# `no_proxy` settings must always be a direct connection for caching
praefect['database_host_no_proxy'] = '10.6.0.141'
praefect['database_port_no_proxy'] = 5432
praefect['database_direct_host'] = '10.6.0.141'
praefect['database_direct_port'] = 5432
praefect['database_dbname'] = 'praefect_production'
praefect['database_user'] = 'praefect'
praefect['database_password'] = '<praefect_postgresql_password>'

View File

@ -1335,8 +1335,8 @@ To configure the Praefect nodes, on each one:
praefect['database_host'] = '10.6.0.141'
praefect['database_port'] = 5432
# `no_proxy` settings must always be a direct connection for caching
praefect['database_host_no_proxy'] = '10.6.0.141'
praefect['database_port_no_proxy'] = 5432
praefect['database_direct_host'] = '10.6.0.141'
praefect['database_direct_port'] = 5432
praefect['database_dbname'] = 'praefect_production'
praefect['database_user'] = 'praefect'
praefect['database_password'] = '<praefect_postgresql_password>'

View File

@ -1404,8 +1404,8 @@ To configure the Praefect nodes, on each one:
praefect['database_host'] = '10.6.0.141'
praefect['database_port'] = 5432
# `no_proxy` settings must always be a direct connection for caching
praefect['database_host_no_proxy'] = '10.6.0.141'
praefect['database_port_no_proxy'] = 5432
praefect['database_direct_host'] = '10.6.0.141'
praefect['database_direct_port'] = 5432
praefect['database_dbname'] = 'praefect_production'
praefect['database_user'] = 'praefect'
praefect['database_password'] = '<praefect_postgresql_password>'

View File

@ -1333,8 +1333,8 @@ To configure the Praefect nodes, on each one:
praefect['database_host'] = '10.6.0.141'
praefect['database_port'] = 5432
# `no_proxy` settings must always be a direct connection for caching
praefect['database_host_no_proxy'] = '10.6.0.141'
praefect['database_port_no_proxy'] = 5432
praefect['database_direct_host'] = '10.6.0.141'
praefect['database_direct_port'] = 5432
praefect['database_dbname'] = 'praefect_production'
praefect['database_user'] = 'praefect'
praefect['database_password'] = '<praefect_postgresql_password>'

View File

@ -92,7 +92,8 @@ Maintainer role.
#### Multiple approval rules
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/345678) in GitLab 14.10 with a flag named `deployment_approval_rules`. Disabled by default.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/345678) in GitLab 14.10 with a flag named `deployment_approval_rules`. Disabled by default.
> - [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/345678) in GitLab 15.0. [Feature flag `deployment_approval_rules`](https://gitlab.com/gitlab-org/gitlab/-/issues/345678) removed.
1. Using the [REST API](../../api/group_protected_environments.md#protect-an-environment).
1. `deploy_access_levels` represents which entity can execute the deployment job.

View File

@ -40,10 +40,10 @@ The following example uses `if` to define that the job runs in only two specific
job:
script: echo "Hello, Rules!"
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
when: manual
allow_failure: true
- if: '$CI_PIPELINE_SOURCE == "schedule"'
- if: $CI_PIPELINE_SOURCE == "schedule"
```
- If the pipeline is for a merge request, the first rule matches, and the job
@ -67,9 +67,9 @@ run them in all other cases:
job:
script: echo "Hello, Rules!"
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
when: never
- if: '$CI_PIPELINE_SOURCE == "schedule"'
- if: $CI_PIPELINE_SOURCE == "schedule"
when: never
- when: on_success
```
@ -118,7 +118,7 @@ For example:
docker build:
script: docker build -t my-image:$CI_COMMIT_REF_SLUG .
rules:
- if: '$VAR == "string value"'
- if: $VAR == "string value"
changes: # Include the job and set to when:manual if any of the follow paths match a modified file.
- Dockerfile
- docker/scripts/*
@ -160,7 +160,7 @@ For example:
job:
script: echo "This job creates double pipelines!"
rules:
- if: '$CUSTOM_VARIABLE == "false"'
- if: $CUSTOM_VARIABLE == "false"
when: never
- when: always
```
@ -181,7 +181,7 @@ To avoid duplicate pipelines, you can:
job:
script: echo "This job does NOT create double pipelines!"
rules:
- if: '$CUSTOM_VARIABLE == "true" && $CI_PIPELINE_SOURCE == "merge_request_event"'
- if: $CUSTOM_VARIABLE == "true" && $CI_PIPELINE_SOURCE == "merge_request_event"
```
You can also avoid duplicate pipelines by changing the job rules to avoid either push (branch)
@ -195,7 +195,7 @@ without `workflow: rules`:
job:
script: echo "This job does NOT create double pipelines!"
rules:
- if: '$CI_PIPELINE_SOURCE == "push"'
- if: $CI_PIPELINE_SOURCE == "push"
when: never
- when: always
```
@ -207,8 +207,8 @@ You should not include both push and merge request pipelines in the same job wit
job:
script: echo "This job creates double pipelines!"
rules:
- if: '$CI_PIPELINE_SOURCE == "push"'
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
- if: $CI_PIPELINE_SOURCE == "push"
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
```
Also, do not mix `only/except` jobs with `rules` jobs in the same pipeline.
@ -222,7 +222,7 @@ job-with-no-rules:
job-with-rules:
script: echo "This job runs in merge request pipelines."
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
```
For every change pushed to the branch, duplicate pipelines run. One
@ -259,10 +259,10 @@ add the job to any other pipeline type.
job:
script: echo "Hello, Rules!"
rules:
- if: '$CI_PIPELINE_SOURCE == "schedule"'
- if: $CI_PIPELINE_SOURCE == "schedule"
when: manual
allow_failure: true
- if: '$CI_PIPELINE_SOURCE == "push"'
- if: $CI_PIPELINE_SOURCE == "push"
```
The following example runs the job as a `when: on_success` job in [merge request pipelines](../pipelines/merge_request_pipelines.md)
@ -272,25 +272,25 @@ and scheduled pipelines. It does not run in any other pipeline type.
job:
script: echo "Hello, Rules!"
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
- if: '$CI_PIPELINE_SOURCE == "schedule"'
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_PIPELINE_SOURCE == "schedule"
```
Other commonly used variables for `if` clauses:
- `if: $CI_COMMIT_TAG`: If changes are pushed for a tag.
- `if: $CI_COMMIT_BRANCH`: If changes are pushed to any branch.
- `if: '$CI_COMMIT_BRANCH == "main"'`: If changes are pushed to `main`.
- `if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'`: If changes are pushed to the default
- `if: $CI_COMMIT_BRANCH == "main"`: If changes are pushed to `main`.
- `if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH`: If changes are pushed to the default
branch. Use when you want to have the same configuration in multiple
projects with different default branches.
- `if: '$CI_COMMIT_BRANCH =~ /regex-expression/'`: If the commit branch matches a regular expression.
- `if: $CI_COMMIT_BRANCH =~ /regex-expression/`: If the commit branch matches a regular expression.
- `if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_COMMIT_TITLE =~ /Merge branch.*/`:
If the commit branch is the default branch and the commit message title matches a regular expression.
For example, the default commit message for a merge commit starts with `Merge branch`.
- `if: '$CUSTOM_VARIABLE !~ /regex-expression/'`: If the [custom variable](../variables/index.md#custom-cicd-variables)
- `if: $CUSTOM_VARIABLE !~ /regex-expression/`: If the [custom variable](../variables/index.md#custom-cicd-variables)
`CUSTOM_VARIABLE` does **not** match a regular expression.
- `if: '$CUSTOM_VARIABLE == "value1"'`: If the custom variable `CUSTOM_VARIABLE` is
- `if: $CUSTOM_VARIABLE == "value1"`: If the custom variable `CUSTOM_VARIABLE` is
exactly `value1`.
### Variables in `rules:changes`

View File

@ -25,7 +25,7 @@ All metrics are stored in YAML files:
- [`config/metrics`](https://gitlab.com/gitlab-org/gitlab/-/tree/master/config/metrics)
WARNING:
Only metrics with a metric definition YAML are added to the Service Ping JSON payload.
Only metrics with a metric definition YAML and whose status is not `removed` are added to the Service Ping JSON payload.
Each metric is defined in a separate YAML file consisting of a number of fields:

View File

@ -16,6 +16,7 @@ For any of the following scenarios, the `start-review-app-pipeline` job would be
- for merge requests with frontend changes
- for merge requests with changes to `{,ee/,jh/}{app/controllers}/**/*`
- for merge requests with changes to `{,ee/,jh/}{app/models}/**/*`
- for merge requests with changes to `{,ee/,jh/}lib/{,ee/,jh/}gitlab/**/*`
- for merge requests with QA changes
- for scheduled pipelines
- the MR has the `pipeline:run-review-app` label set

View File

@ -321,7 +321,7 @@ Find where your version sits in the upgrade path below, and upgrade GitLab
accordingly, while also consulting the
[version-specific upgrade instructions](#version-specific-upgrading-instructions):
`8.11.Z` -> `8.12.0` -> `8.17.7` -> `9.5.10` -> `10.8.7` -> [`11.11.8`](#1200) -> `12.0.12` -> [`12.1.17`](#1210) -> `12.10.14` -> `13.0.14` -> [`13.1.11`](#1310) -> [`13.8.8`](#1388) -> [`13.12.15`](#13120) -> [`14.0.12`](#1400) -> [latest `14.Y.Z`](https://gitlab.com/gitlab-org/gitlab/-/releases)
`8.11.Z` -> `8.12.0` -> `8.17.7` -> `9.5.10` -> `10.8.7` -> [`11.11.8`](#1200) -> `12.0.12` -> [`12.1.17`](#1210) -> [`12.10.14`](#12100) -> `13.0.14` -> [`13.1.11`](#1310) -> [`13.8.8`](#1388) -> [`13.12.15`](#13120) -> [`14.0.12`](#1400) -> [latest `14.Y.Z`](https://gitlab.com/gitlab-org/gitlab/-/releases)
The following table, while not exhaustive, shows some examples of the supported
upgrade paths.
@ -813,6 +813,20 @@ supplied with GitLab during upgrades. We recommend you use these GitLab-supplied
If you persist your own Rack Attack initializers between upgrades, you might
[get `500` errors](https://gitlab.com/gitlab-org/gitlab/-/issues/334681) when [upgrading to GitLab 14.0 and later](#1400).
### 12.10.0
- The final patch release (12.10.14)
[has a regression affecting maven package uploads](https://about.gitlab.com/releases/2020/07/06/critical-security-release-gitlab-13-1-3-released/#maven-package-upload-broken-in-121014).
If you use this feature and need to stay on 12.10 while preparing to upgrade to 13.0:
- Upgrade to 12.10.13 instead.
- Upgrade to 13.0.14 as soon as possible.
- [GitLab 13.0 requires PostgreSQL 11](https://about.gitlab.com/releases/2020/05/22/gitlab-13-0-released/#postgresql-11-is-now-the-minimum-required-version-to-install-gitlab).
- 12.10 is the final release that shipped with PostgreSQL 9.6, 10, and 11.
- You should make sure that your database is PostgreSQL 11 on GitLab 12.10 before upgrading to 13.0. This will require downtime.
### 12.2.0
In 12.2.0, we enabled Rails' authenticated cookie encryption. Old sessions are

View File

@ -61,6 +61,25 @@ The Container Registry supports [authentication](https://gitlab.com/gitlab-org/c
Since it isn't used in the context of GitLab (the product), `htpasswd` authentication will be deprecated in GitLab 14.9 and removed in GitLab 15.0.
### GraphQL permissions change for Package settings
WARNING:
This feature was changed or removed in 15.0
as a [breaking change](https://docs.gitlab.com/ee/development/contributing/#breaking-changes).
Before updating GitLab, review the details carefully to determine if you need to make any
changes to your code, settings, or workflow.
The GitLab Package stage offers a Package Registry, Container Registry, and Dependency Proxy to help you manage all of your dependencies using GitLab. Each of these product categories has a variety of settings that can be adjusted using the API.
The permissions model for GraphQL is being updated. After 15.0, users with the Guest, Reporter, and Developer role can no longer update these settings:
- [Package Registry settings](https://docs.gitlab.com/ee/api/graphql/reference/#packagesettings)
- [Container Registry cleanup policy](https://docs.gitlab.com/ee/api/graphql/reference/#containerexpirationpolicy)
- [Dependency Proxy time-to-live policy](https://docs.gitlab.com/ee/api/graphql/reference/#dependencyproxyimagettlgrouppolicy)
- [Enabling the Dependency Proxy for your group](https://docs.gitlab.com/ee/api/graphql/reference/#dependencyproxysetting)
The issue for this removal is [GitLab-#350682](https://gitlab.com/gitlab-org/gitlab/-/issues/350682)
### Vulnerability Check
WARNING:

View File

@ -31,9 +31,6 @@ module ContainerRegistry
end
}.freeze
# Taken from: FaradayMiddleware::FollowRedirects
REDIRECT_CODES = Set.new [301, 302, 303, 307]
class << self
private
@ -98,23 +95,10 @@ module ContainerRegistry
conn.adapter :net_http
end
def response_body(response, allow_redirect: false)
if allow_redirect && REDIRECT_CODES.include?(response.status)
response = redirect_response(response.headers['location'])
end
def response_body(response)
response.body if response && response.success?
end
def redirect_response(location)
return unless location
uri = URI(@base_uri).merge(location)
raise ArgumentError, "Invalid scheme for #{location}" unless %w[http https].include?(uri.scheme)
faraday_redirect.get(uri)
end
def configure_connection(conn)
conn.headers['Accept'] = ACCEPTED_TYPES
@ -125,18 +109,6 @@ module ContainerRegistry
conn.response :json, content_type: OCI_MANIFEST_V1_TYPE
end
# Create a new request to make sure the Authorization header is not inserted
# via the Faraday middleware
def faraday_redirect
@faraday_redirect ||= faraday_base do |conn|
conn.request :json
conn.request(:retry, RETRY_OPTIONS)
conn.request(:gitlab_error_callback, ERROR_CALLBACK_OPTIONS)
conn.adapter :net_http
end
end
def delete_if_exists(path)
result = faraday.delete(path)

View File

@ -130,7 +130,7 @@ module ContainerRegistry
def blob(name, digest, type = nil)
type ||= 'application/octet-stream'
response_body faraday_blob.get("/v2/#{name}/blobs/#{digest}", nil, 'Accept' => type), allow_redirect: true
response_body faraday_blob.get("/v2/#{name}/blobs/#{digest}", nil, 'Accept' => type)
end
def delete_blob(name, digest)
@ -152,9 +152,7 @@ module ContainerRegistry
@faraday_blob ||= faraday_base do |conn|
initialize_connection(conn, @options)
if Feature.enabled?(:container_registry_follow_redirects_middleware, default_enabled: :yaml)
conn.use ::FaradayMiddleware::FollowRedirects, REDIRECT_OPTIONS
end
conn.use ::FaradayMiddleware::FollowRedirects, REDIRECT_OPTIONS
end
end
end

View File

@ -21,7 +21,7 @@ module Gitlab
end
def unit
_('per day')
_('/day')
end
def links

View File

@ -6,7 +6,8 @@ module Gitlab
METRIC_SCHEMA_PATH = Rails.root.join('config', 'metrics', 'schema.json')
BASE_REPO_PATH = 'https://gitlab.com/gitlab-org/gitlab/-/blob/master'
SKIP_VALIDATION_STATUSES = %w[deprecated removed].to_set.freeze
AVAILABLE_STATUSES = %w[active data_available implemented deprecated].freeze
AVAILABLE_STATUSES = %w[active data_available implemented deprecated].to_set.freeze
VALID_SERVICE_PING_STATUSES = %w[active data_available implemented deprecated broken].to_set.freeze
InvalidError = Class.new(RuntimeError)
@ -64,6 +65,10 @@ module Gitlab
AVAILABLE_STATUSES.include?(attributes[:status])
end
def valid_service_ping_status?
VALID_SERVICE_PING_STATUSES.include?(attributes[:status])
end
alias_method :to_dictionary, :to_h
class << self

View File

@ -14,7 +14,14 @@ module Gitlab
# ::Issue.where(database_time_constraints)
# end
# end
UnimplementedOperationError = Class.new(StandardError) # rubocop:disable UsageData/InstrumentationSuperclass
class << self
IMPLEMENTED_OPERATIONS = %i(count distinct_count estimate_batch_distinct_count).freeze
private_constant :IMPLEMENTED_OPERATIONS
def start(&block)
return @metric_start&.call unless block_given?
@ -40,6 +47,8 @@ module Gitlab
end
def operation(symbol, column: nil, &block)
raise UnimplementedOperationError unless symbol.in?(IMPLEMENTED_OPERATIONS)
@metric_operation = symbol
@column = column
@metric_operation_block = block if block_given?

View File

@ -1324,6 +1324,9 @@ msgstr ""
msgid "/"
msgstr ""
msgid "/day"
msgstr ""
msgid "0 bytes"
msgstr ""
@ -4393,12 +4396,21 @@ msgstr ""
msgid "ApplicationSettings|Add a link to Grafana"
msgstr ""
msgid "ApplicationSettings|After sign up text"
msgid "ApplicationSettings|After sign-up text"
msgstr ""
msgid "ApplicationSettings|After the instance reaches the user cap, any user who is added or requests access must be approved by an administrator. Leave blank for unlimited."
msgstr ""
msgid "ApplicationSettings|Allowed domains for sign-ups"
msgstr ""
msgid "ApplicationSettings|Any user that visits %{host} and creates an account must be explicitly approved by an administrator before they can sign in. Only effective if sign-ups are enabled."
msgstr ""
msgid "ApplicationSettings|Any user that visits %{host} can create an account."
msgstr ""
msgid "ApplicationSettings|Approve %d user"
msgid_plural "ApplicationSettings|Approve %d users"
msgstr[0] ""
@ -4439,37 +4451,31 @@ msgstr ""
msgid "ApplicationSettings|Enable Slack application"
msgstr ""
msgid "ApplicationSettings|Enable domain denylist for sign ups"
msgid "ApplicationSettings|Enable domain denylist for sign-ups"
msgstr ""
msgid "ApplicationSettings|Enable email restrictions for sign ups"
msgid "ApplicationSettings|Enable email restrictions for sign-ups"
msgstr ""
msgid "ApplicationSettings|Enter denylist manually"
msgstr ""
msgid "ApplicationSettings|Markdown enabled"
msgstr ""
msgid "ApplicationSettings|Minimum password length (number of characters)"
msgstr ""
msgid "ApplicationSettings|ONLY users with e-mail addresses that match these domain(s) will be able to sign-up. Wildcards allowed. Use separate lines for multiple entries. Ex: domain.com, *.domain.com"
msgstr ""
msgid "ApplicationSettings|Once the instance reaches the user cap, any user who is added or requests access will have to be approved by an admin. Leave the field empty for unlimited."
msgid "ApplicationSettings|Only users with e-mail addresses that match these domain(s) can sign up. Wildcards allowed. Use separate lines for multiple entries. Example: domain.com, *.domain.com"
msgstr ""
msgid "ApplicationSettings|Require admin approval for new sign-ups"
msgstr ""
msgid "ApplicationSettings|Restricts sign-ups for email addresses that match the given regex. See the %{linkStart}supported syntax%{linkEnd} for more information."
msgid "ApplicationSettings|Restricts sign-ups for email addresses that match the given regex. %{linkStart}What is the supported syntax?%{linkEnd}"
msgstr ""
msgid "ApplicationSettings|Save changes"
msgstr ""
msgid "ApplicationSettings|See GitLab's %{linkStart}Password Policy Guidelines%{linkEnd}"
msgid "ApplicationSettings|See GitLab's %{linkStart}Password Policy Guidelines%{linkEnd}."
msgstr ""
msgid "ApplicationSettings|Send confirmation email on sign-up"
@ -4478,6 +4484,9 @@ msgstr ""
msgid "ApplicationSettings|Sign-up enabled"
msgstr ""
msgid "ApplicationSettings|Text shown after a user signs up. Markdown enabled."
msgstr ""
msgid "ApplicationSettings|This option is only available on GitLab.com"
msgstr ""
@ -4487,16 +4496,10 @@ msgstr ""
msgid "ApplicationSettings|User cap"
msgstr ""
msgid "ApplicationSettings|Users with e-mail addresses that match these domain(s) will NOT be able to sign-up. Wildcards allowed. Use separate lines for multiple entries. Ex: domain.com, *.domain.com"
msgid "ApplicationSettings|Users with e-mail addresses that match these domain(s) cannot sign up. Wildcards allowed. Use separate lines for multiple entries. Example: domain.com, *.domain.com"
msgstr ""
msgid "ApplicationSettings|Users with e-mail addresses that match these domain(s) will NOT be able to sign-up. Wildcards allowed. Use separate lines or commas for multiple entries."
msgstr ""
msgid "ApplicationSettings|When enabled, any user visiting %{host} and creating an account will have to be explicitly approved by an admin before they can sign in. This setting is effective only if sign-ups are enabled."
msgstr ""
msgid "ApplicationSettings|When enabled, any user visiting %{host} will be able to create an account."
msgid "ApplicationSettings|Users with e-mail addresses that match these domain(s) cannot sign up. Wildcards allowed. Use separate lines or commas for multiple entries."
msgstr ""
msgid "ApplicationSettings|domain.com"
@ -45477,9 +45480,6 @@ msgstr ""
msgid "pending deletion"
msgstr ""
msgid "per day"
msgstr ""
msgid "personal access token"
msgstr ""

View File

@ -6,11 +6,12 @@ RSpec.describe 'Dropdown assignee', :js do
include FilteredSearchHelpers
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user, name: 'administrator', username: 'root') }
let_it_be(:user) { create(:user) }
let_it_be(:issue) { create(:issue, project: project) }
let(:js_dropdown_assignee) { '#js-dropdown-assignee' }
let(:filter_dropdown) { find("#{js_dropdown_assignee} .filter-dropdown") }
before do
stub_feature_flags(vue_issues_list: true)
end
describe 'behavior' do
before do
@ -21,15 +22,17 @@ RSpec.describe 'Dropdown assignee', :js do
end
it 'loads all the assignees when opened' do
input_filtered_search('assignee:=', submit: false, extra_space: false)
select_tokens 'Assignee', '='
expect_filtered_search_dropdown_results(filter_dropdown, 2)
# Expect None, Any, administrator, John Doe2
expect_suggestion_count 4
end
it 'shows current user at top of dropdown' do
input_filtered_search('assignee:=', submit: false, extra_space: false)
select_tokens 'Assignee', '='
expect(filter_dropdown.first('.filter-dropdown-item')).to have_content(user.name)
# List items 1 to 3 are None, Any, divider
expect(page).to have_css('.gl-filtered-search-suggestion:nth-child(4)', text: user.name)
end
end
@ -41,7 +44,7 @@ RSpec.describe 'Dropdown assignee', :js do
visit project_issues_path(project)
Gitlab::Testing::RequestBlockerMiddleware.block_requests!
input_filtered_search('assignee:=', submit: false, extra_space: false)
select_tokens 'Assignee', '='
end
after do
@ -49,11 +52,10 @@ RSpec.describe 'Dropdown assignee', :js do
end
it 'selects current user' do
find("#{js_dropdown_assignee} .filter-dropdown-item", text: user.username).click
click_on user.username
expect(page).to have_css(js_dropdown_assignee, visible: false)
expect_tokens([assignee_token(user.username)])
expect_filtered_search_input_empty
expect_assignee_token(user.username)
expect_empty_search_term
end
end
@ -93,7 +95,7 @@ RSpec.describe 'Dropdown assignee', :js do
it 'shows inherited, direct, and invited group members but not descendent members', :aggregate_failures do
visit issues_group_path(subgroup)
input_filtered_search('assignee:=', submit: false, extra_space: false)
select_tokens 'Assignee', '='
expect(page).to have_text group_user.name
expect(page).to have_text subgroup_user.name
@ -103,7 +105,7 @@ RSpec.describe 'Dropdown assignee', :js do
visit project_issues_path(subgroup_project)
input_filtered_search('assignee:=', submit: false, extra_space: false)
select_tokens 'Assignee', '='
expect(page).to have_text group_user.name
expect(page).to have_text subgroup_user.name

View File

@ -6,13 +6,12 @@ RSpec.describe 'Dropdown author', :js do
include FilteredSearchHelpers
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user, name: 'administrator', username: 'root') }
let_it_be(:user) { create(:user) }
let_it_be(:issue) { create(:issue, project: project) }
let(:js_dropdown_author) { '#js-dropdown-author' }
let(:filter_dropdown) { find("#{js_dropdown_author} .filter-dropdown") }
before do
stub_feature_flags(vue_issues_list: true)
project.add_maintainer(user)
sign_in(user)
@ -21,22 +20,22 @@ RSpec.describe 'Dropdown author', :js do
describe 'behavior' do
it 'loads all the authors when opened' do
input_filtered_search('author:=', submit: false, extra_space: false)
select_tokens 'Author', '='
expect_filtered_search_dropdown_results(filter_dropdown, 2)
expect_suggestion_count 2
end
it 'shows current user at top of dropdown' do
input_filtered_search('author:=', submit: false, extra_space: false)
select_tokens 'Author', '='
expect(filter_dropdown.first('.filter-dropdown-item')).to have_content(user.name)
expect(page).to have_css('.gl-filtered-search-suggestion:first-child', text: user.name)
end
end
describe 'selecting from dropdown without Ajax call' do
before do
Gitlab::Testing::RequestBlockerMiddleware.block_requests!
input_filtered_search('author:=', submit: false, extra_space: false)
select_tokens 'Author', '='
end
after do
@ -44,11 +43,10 @@ RSpec.describe 'Dropdown author', :js do
end
it 'selects current user' do
find("#{js_dropdown_author} .filter-dropdown-item", text: user.username).click
click_on user.username
expect(page).to have_css(js_dropdown_author, visible: false)
expect_tokens([author_token(user.username)])
expect_filtered_search_input_empty
expect_author_token(user.username)
expect_empty_search_term
end
end
end

View File

@ -6,18 +6,12 @@ RSpec.describe 'Dropdown base', :js do
include FilteredSearchHelpers
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user, name: 'administrator', username: 'root') }
let_it_be(:user) { create(:user) }
let_it_be(:issue) { create(:issue, project: project) }
let(:filtered_search) { find('.filtered-search') }
let(:js_dropdown_assignee) { '#js-dropdown-assignee' }
let(:filter_dropdown) { find("#{js_dropdown_assignee} .filter-dropdown") }
def dropdown_assignee_size
filter_dropdown.all('.filter-dropdown-item').size
end
before do
stub_feature_flags(vue_issues_list: true)
project.add_maintainer(user)
sign_in(user)
@ -26,17 +20,17 @@ RSpec.describe 'Dropdown base', :js do
describe 'caching requests' do
it 'caches requests after the first load' do
input_filtered_search('assignee:=', submit: false, extra_space: false)
initial_size = dropdown_assignee_size
select_tokens 'Assignee', '='
initial_size = get_suggestion_count
expect(initial_size).to be > 0
new_user = create(:user)
project.add_maintainer(new_user)
find('.filtered-search-box .clear-search').click
input_filtered_search('assignee:=', submit: false, extra_space: false)
click_button 'Clear'
select_tokens 'Assignee', '='
expect(dropdown_assignee_size).to eq(initial_size)
expect_suggestion_count(initial_size)
end
end
end

View File

@ -6,15 +6,13 @@ RSpec.describe 'Dropdown emoji', :js do
include FilteredSearchHelpers
let_it_be(:project) { create(:project, :public) }
let_it_be(:user) { create(:user, name: 'administrator', username: 'root') }
let_it_be(:user) { create(:user) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:award_emoji_star) { create(:award_emoji, name: 'star', user: user, awardable: issue) }
let(:filtered_search) { find('.filtered-search') }
let(:js_dropdown_emoji) { '#js-dropdown-my-reaction' }
let(:filter_dropdown) { find("#{js_dropdown_emoji} .filter-dropdown") }
before do
stub_feature_flags(vue_issues_list: true)
project.add_maintainer(user)
create_list(:award_emoji, 2, user: user, name: 'thumbsup')
create_list(:award_emoji, 1, user: user, name: 'thumbsdown')
@ -27,15 +25,15 @@ RSpec.describe 'Dropdown emoji', :js do
end
describe 'behavior' do
it 'does not open when the search bar has my-reaction=' do
filtered_search.set('my-reaction=')
it 'does not contain My-Reaction in the list of suggestions' do
click_filtered_search_bar
expect(page).not_to have_css(js_dropdown_emoji)
expect(page).not_to have_link 'My-Reaction'
end
end
end
context 'when user loggged in' do
context 'when user logged in' do
before do
sign_in(user)
@ -43,22 +41,18 @@ RSpec.describe 'Dropdown emoji', :js do
end
describe 'behavior' do
it 'opens when the search bar has my-reaction=' do
filtered_search.set('my-reaction:=')
expect(page).to have_css(js_dropdown_emoji, visible: true)
end
it 'loads all the emojis when opened' do
input_filtered_search('my-reaction:=', submit: false, extra_space: false)
select_tokens 'My-Reaction', '='
expect_filtered_search_dropdown_results(filter_dropdown, 3)
# Expect None, Any, star, thumbsup, thumbsdown
expect_suggestion_count 5
end
it 'shows the most populated emoji at top of dropdown' do
input_filtered_search('my-reaction:=', submit: false, extra_space: false)
select_tokens 'My-Reaction', '='
expect(first("#{js_dropdown_emoji} .filter-dropdown li")).to have_content(award_emoji_star.name)
# List items 1-3 are None, Any, divider
expect(page).to have_css('.gl-filtered-search-suggestion-list li:nth-child(4)', text: award_emoji_star.name)
end
end
end

View File

@ -9,19 +9,9 @@ RSpec.describe 'Dropdown hint', :js do
let_it_be(:user) { create(:user) }
let_it_be(:issue) { create(:issue, project: project) }
let(:filtered_search) { find('.filtered-search') }
let(:js_dropdown_hint) { '#js-dropdown-hint' }
let(:js_dropdown_operator) { '#js-dropdown-operator' }
def click_hint(text)
find('#js-dropdown-hint .filter-dropdown .filter-dropdown-item', text: text).click
end
def click_operator(op)
find("#js-dropdown-operator .filter-dropdown .filter-dropdown-item[data-value='#{op}']").click
end
before do
stub_feature_flags(vue_issues_list: true)
project.add_maintainer(user)
end
@ -31,8 +21,9 @@ RSpec.describe 'Dropdown hint', :js do
end
it 'does not exist my-reaction dropdown item' do
expect(page).to have_css(js_dropdown_hint, visible: false)
expect(page).not_to have_content('My-reaction')
click_filtered_search_bar
expect(page).not_to have_link 'My-reaction'
end
end
@ -45,57 +36,56 @@ RSpec.describe 'Dropdown hint', :js do
describe 'behavior' do
before do
expect(page).to have_css(js_dropdown_hint, visible: false)
filtered_search.click
click_filtered_search_bar
end
it 'opens when the search bar is first focused' do
expect(page).to have_css(js_dropdown_hint, visible: true)
expect_visible_suggestions_list
find('body').click
expect(page).to have_css(js_dropdown_hint, visible: false)
expect_hidden_suggestions_list
end
end
describe 'filtering' do
it 'filters with text' do
filtered_search.set('a')
click_filtered_search_bar
send_keys 'as'
expect(find(js_dropdown_hint)).to have_selector('.filter-dropdown .filter-dropdown-item', count: 6)
# Expect Assignee and Release
expect_suggestion_count 2
end
end
describe 'selecting from dropdown with no input' do
before do
filtered_search.click
click_filtered_search_bar
end
it 'opens the token dropdown when you click on it' do
click_hint('Author')
click_link 'Author'
expect(page).to have_css(js_dropdown_hint, visible: false)
expect(page).to have_css(js_dropdown_operator, visible: true)
expect_visible_suggestions_list
expect_suggestion '='
click_operator('=')
click_link '= is'
expect(page).to have_css(js_dropdown_hint, visible: false)
expect(page).to have_css(js_dropdown_operator, visible: false)
expect(page).to have_css('#js-dropdown-author', visible: true)
expect_tokens([{ name: 'Author', operator: '=' }])
expect_filtered_search_input_empty
expect_visible_suggestions_list
expect_token_segment 'Author'
expect_token_segment '='
expect_empty_search_term
end
end
describe 'reselecting from dropdown' do
it 'reuses existing token text' do
filtered_search.send_keys('author')
filtered_search.send_keys(:backspace)
filtered_search.send_keys(:backspace)
click_hint('Author')
click_filtered_search_bar
send_keys 'author', :backspace, :backspace
click_link 'Author'
expect_tokens([{ name: 'Author' }])
expect_filtered_search_input_empty
expect_token_segment 'Author'
expect_empty_search_term
end
end
end

View File

@ -10,10 +10,9 @@ RSpec.describe 'Dropdown label', :js do
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:label) { create(:label, project: project, title: 'bug-label') }
let(:filtered_search) { find('.filtered-search') }
let(:filter_dropdown) { find('#js-dropdown-label .filter-dropdown') }
before do
stub_feature_flags(vue_issues_list: true)
project.add_maintainer(user)
sign_in(user)
@ -22,9 +21,10 @@ RSpec.describe 'Dropdown label', :js do
describe 'behavior' do
it 'loads all the labels when opened' do
filtered_search.set('label:=')
select_tokens 'Label', '='
expect_filtered_search_dropdown_results(filter_dropdown, 1)
# Expect None, Any, bug-label
expect_suggestion_count 3
end
end
end

View File

@ -11,10 +11,9 @@ RSpec.describe 'Dropdown milestone', :js do
let_it_be(:uppercase_milestone) { create(:milestone, title: 'CAP_MILESTONE', project: project) }
let_it_be(:issue) { create(:issue, project: project) }
let(:filtered_search) { find('.filtered-search') }
let(:filter_dropdown) { find('#js-dropdown-milestone .filter-dropdown') }
before do
stub_feature_flags(vue_issues_list: true)
project.add_maintainer(user)
sign_in(user)
@ -22,12 +21,11 @@ RSpec.describe 'Dropdown milestone', :js do
end
describe 'behavior' do
before do
filtered_search.set('milestone:=')
end
it 'loads all the milestones when opened' do
expect_filtered_search_dropdown_results(filter_dropdown, 2)
select_tokens 'Milestone', '='
# Expect None, Any, Upcoming, Started, CAP_MILESTONE, v1.0
expect_suggestion_count 6
end
end
end

View File

@ -5,16 +5,15 @@ require 'spec_helper'
RSpec.describe 'Dropdown release', :js do
include FilteredSearchHelpers
let_it_be(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:release) { create(:release, tag: 'v1.0', project: project) }
let_it_be(:crazy_release) { create(:release, tag: '☺!/"#%&\'{}+,-.<>;=@]_`{|}🚀', project: project) }
let_it_be(:issue) { create(:issue, project: project) }
let(:filtered_search) { find('.filtered-search') }
let(:filter_dropdown) { find('#js-dropdown-release .filter-dropdown') }
before do
stub_feature_flags(vue_issues_list: true)
project.add_maintainer(user)
sign_in(user)
@ -22,12 +21,11 @@ RSpec.describe 'Dropdown release', :js do
end
describe 'behavior' do
before do
filtered_search.set('release:=')
end
it 'loads all the releases when opened' do
expect_filtered_search_dropdown_results(filter_dropdown, 2)
select_tokens 'Release', '='
# Expect None, Any, v1.0, !/\"#%&'{}+,-.<>;=@]_`{|}
expect_suggestion_count 4
end
end
end

View File

@ -6,7 +6,7 @@ const MOCK_METRIC = {
key: 'deployment-frequency',
label: 'Deployment Frequency',
value: '10.0',
unit: 'per day',
unit: '/day',
description: 'Average number of deployments to production per day.',
links: [],
};

View File

@ -40,7 +40,7 @@ export const summary = [
{ value: '20', title: 'New Issues' },
{ value: null, title: 'Commits' },
{ value: null, title: 'Deploys' },
{ value: null, title: 'Deployment Frequency', unit: 'per day' },
{ value: null, title: 'Deployment Frequency', unit: '/day' },
];
export const issueStage = {
@ -130,7 +130,7 @@ export const convertedData = {
{ value: '20', title: 'New Issues' },
{ value: '-', title: 'Commits' },
{ value: '-', title: 'Deploys' },
{ value: '-', title: 'Deployment Frequency', unit: 'per day' },
{ value: '-', title: 'Deployment Frequency', unit: '/day' },
],
};

View File

@ -199,69 +199,16 @@ RSpec.describe ContainerRegistry::Client do
let(:redirect_location) { 'http://redirect?foo=bar&test=signature=' }
it_behaves_like 'handling redirects'
context 'with container_registry_follow_redirects_middleware disabled' do
before do
stub_feature_flags(container_registry_follow_redirects_middleware: false)
end
it 'follows the redirect' do
expect(Faraday::Utils).to receive(:escape).with('foo').and_call_original
expect(Faraday::Utils).to receive(:escape).with('bar').and_call_original
expect(Faraday::Utils).to receive(:escape).with('test').and_call_original
expect(Faraday::Utils).to receive(:escape).with('signature=').and_call_original
expect_new_faraday(times: 2)
expect(subject).to eq('Successfully redirected')
end
end
end
context 'with a redirect location with params ending with %3D' do
let(:redirect_location) { 'http://redirect?foo=bar&test=signature%3D' }
it_behaves_like 'handling redirects'
context 'with container_registry_follow_redirects_middleware disabled' do
before do
stub_feature_flags(container_registry_follow_redirects_middleware: false)
end
it 'follows the redirect' do
expect(Faraday::Utils).to receive(:escape).with('foo').and_call_original
expect(Faraday::Utils).to receive(:escape).with('bar').and_call_original
expect(Faraday::Utils).to receive(:escape).with('test').and_call_original
expect(Faraday::Utils).to receive(:escape).with('signature=').and_call_original
expect_new_faraday(times: 2)
expect(subject).to eq('Successfully redirected')
end
end
end
end
it_behaves_like 'handling timeouts'
# TODO Remove this context along with the
# container_registry_follow_redirects_middleware feature flag
# See https://gitlab.com/gitlab-org/gitlab/-/issues/353291
context 'faraday blob' do
subject { client.send(:faraday_blob) }
it 'has a follow redirects middleware' do
expect(subject.builder.handlers).to include(::FaradayMiddleware::FollowRedirects)
end
context 'with container_registry_follow_redirects_middleware is disabled' do
before do
stub_feature_flags(container_registry_follow_redirects_middleware: false)
end
it 'has not a follow redirects middleware' do
expect(subject.builder.handlers).not_to include(::FaradayMiddleware::FollowRedirects)
end
end
end
end
describe '#upload_blob' do

View File

@ -177,6 +177,24 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
end
end
describe '#valid_service_ping_status?' do
context 'when metric has active status' do
it 'has to return true' do
attributes[:status] = 'active'
expect(described_class.new(path, attributes).valid_service_ping_status?).to be_truthy
end
end
context 'when metric has removed status' do
it 'has to return false' do
attributes[:status] = 'removed'
expect(described_class.new(path, attributes).valid_service_ping_status?).to be_falsey
end
end
end
describe 'statuses' do
using RSpec::Parameterized::TableSyntax

View File

@ -161,4 +161,17 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DatabaseMetric do
end
end
end
context 'with unimplemented operation method used' do
subject do
described_class.tap do |metric_class|
metric_class.relation { Issue }
metric_class.operation :invalid_operation
end.new(time_frame: 'all')
end
it 'raises an error' do
expect { subject }.to raise_error(described_class::UnimplementedOperationError)
end
end
end

View File

@ -31,7 +31,37 @@ RSpec.describe Ci::Bridge do
end
describe '#retryable?' do
let(:bridge) { create(:ci_bridge, :success) }
it 'returns true' do
expect(bridge.retryable?).to eq(true)
end
context 'without ci_recreate_downstream_pipeline ff' do
before do
stub_feature_flags(ci_recreate_downstream_pipeline: false)
end
it 'returns false' do
expect(bridge.retryable?).to eq(false)
end
end
end
context 'when there is a pipeline loop detected' do
let(:bridge) { create(:ci_bridge, :failed, failure_reason: :pipeline_loop_detected) }
it 'returns false' do
expect(bridge.failure_reason).to eq('pipeline_loop_detected')
expect(bridge.retryable?).to eq(false)
end
end
context 'when the pipeline depth has reached the max descendents' do
let(:bridge) { create(:ci_bridge, :failed, failure_reason: :reached_max_descendant_pipelines_depth) }
it 'returns false' do
expect(bridge.failure_reason).to eq('reached_max_descendant_pipelines_depth')
expect(bridge.retryable?).to eq(false)
end
end

View File

@ -14,6 +14,223 @@ RSpec.describe Ci::Processable do
it { is_expected.to delegate_method(:legacy_detached_merge_request_pipeline?).to(:pipeline) }
end
describe '#clone' do
let(:user) { create(:user) }
let(:new_processable) do
new_proc = processable.clone(current_user: user)
new_proc.save!
new_proc
end
let_it_be(:stage) { create(:ci_stage_entity, project: project, pipeline: pipeline, name: 'test') }
shared_context 'processable bridge' do
let_it_be(:downstream_project) { create(:project, :repository) }
let_it_be_with_refind(:processable) do
create(
:ci_bridge, :success, pipeline: pipeline, downstream: downstream_project,
description: 'a trigger job', stage_id: stage.id
)
end
let(:clone_accessors) { ::Ci::Bridge.clone_accessors }
let(:reject_accessors) { [] }
let(:ignore_accessors) { [] }
end
shared_context 'processable build' do
let_it_be(:another_pipeline) { create(:ci_empty_pipeline, project: project) }
let_it_be_with_refind(:processable) do
create(:ci_build, :failed, :picked, :expired, :erased, :queued, :coverage, :tags,
:allowed_to_fail, :on_tag, :triggered, :teardown_environment, :resource_group,
description: 'my-job', stage: 'test', stage_id: stage.id,
pipeline: pipeline, auto_canceled_by: another_pipeline,
scheduled_at: 10.seconds.since)
end
let_it_be(:internal_job_variable) { create(:ci_job_variable, job: processable) }
let(:clone_accessors) { ::Ci::Build.clone_accessors.without(::Ci::Build.extra_accessors) }
let(:reject_accessors) do
%i[id status user token token_encrypted coverage trace runner
artifacts_expire_at
created_at updated_at started_at finished_at queued_at erased_by
erased_at auto_canceled_by job_artifacts job_artifacts_archive
job_artifacts_metadata job_artifacts_trace job_artifacts_junit
job_artifacts_sast job_artifacts_secret_detection job_artifacts_dependency_scanning
job_artifacts_container_scanning job_artifacts_cluster_image_scanning job_artifacts_dast
job_artifacts_license_scanning
job_artifacts_performance job_artifacts_browser_performance job_artifacts_load_performance
job_artifacts_lsif job_artifacts_terraform job_artifacts_cluster_applications
job_artifacts_codequality job_artifacts_metrics scheduled_at
job_variables waiting_for_resource_at job_artifacts_metrics_referee
job_artifacts_network_referee job_artifacts_dotenv
job_artifacts_cobertura needs job_artifacts_accessibility
job_artifacts_requirements job_artifacts_coverage_fuzzing
job_artifacts_api_fuzzing terraform_state_versions].freeze
end
let(:ignore_accessors) do
%i[type lock_version target_url base_tags trace_sections
commit_id deployment erased_by_id project_id
runner_id tag_taggings taggings tags trigger_request_id
user_id auto_canceled_by_id retried failure_reason
sourced_pipelines artifacts_file_store artifacts_metadata_store
metadata runner_session trace_chunks upstream_pipeline_id
artifacts_file artifacts_metadata artifacts_size commands
resource resource_group_id processed security_scans author
pipeline_id report_results pending_state pages_deployments
queuing_entry runtime_metadata trace_metadata
dast_site_profile dast_scanner_profile].freeze
end
before_all do
# Create artifacts to check that the associations are rejected when cloning
Ci::JobArtifact::TYPE_AND_FORMAT_PAIRS.each do |file_type, file_format|
create(:ci_job_artifact, file_format,
file_type: file_type, job: processable, expire_at: processable.artifacts_expire_at)
end
create(:ci_job_variable, :dotenv_source, job: processable)
create(:terraform_state_version, build: processable)
end
before do
processable.update!(retried: false, status: :success)
end
end
shared_examples_for 'clones the processable' do
before_all do
processable.update!(stage: 'test', stage_id: stage.id)
create(:ci_build_need, build: processable)
end
describe 'clone accessors' do
let(:forbidden_associations) do
Ci::Build.reflect_on_all_associations.each_with_object(Set.new) do |assoc, memo|
memo << assoc.name unless assoc.macro == :belongs_to
end
end
it 'clones the processable attributes', :aggregate_failures do
clone_accessors.each do |attribute|
expect(attribute).not_to be_in(forbidden_associations), "association #{attribute} must be `belongs_to`"
expect(processable.send(attribute)).not_to be_nil, "old processable attribute #{attribute} should not be nil"
expect(new_processable.send(attribute)).not_to be_nil, "new processable attribute #{attribute} should not be nil"
expect(new_processable.send(attribute)).to eq(processable.send(attribute)), "new processable attribute #{attribute} should match old processable"
end
end
it 'clones only the needs attributes' do
expect(new_processable.needs.size).to be(1)
expect(processable.needs.exists?).to be_truthy
expect(new_processable.needs_attributes).to match(processable.needs_attributes)
expect(new_processable.needs).not_to match(processable.needs)
end
context 'when the processable has protected: nil' do
before do
processable.update_attribute(:protected, nil)
end
it 'clones the protected job attribute' do
expect(new_processable.protected).to be_nil
expect(new_processable.protected).to eq processable.protected
end
end
end
describe 'reject accessors' do
it 'does not clone rejected attributes' do
reject_accessors.each do |attribute|
expect(new_processable.send(attribute)).not_to eq(processable.send(attribute)), "processable attribute #{attribute} should not have been cloned"
end
end
end
it 'creates a new processable that represents the old processable' do
expect(new_processable.name).to eq processable.name
end
end
context 'when the processable to be cloned is a bridge' do
include_context 'processable bridge'
it_behaves_like 'clones the processable'
end
context 'when the processable to be cloned is a build' do
include_context 'processable build'
it_behaves_like 'clones the processable'
it 'has the correct number of known attributes', :aggregate_failures do
processed_accessors = clone_accessors + reject_accessors
known_accessors = processed_accessors + ignore_accessors
current_accessors =
Ci::Build.attribute_names.map(&:to_sym) +
Ci::Build.attribute_aliases.keys.map(&:to_sym) +
Ci::Build.reflect_on_all_associations.map(&:name) +
[:tag_list, :needs_attributes, :job_variables_attributes] -
# ToDo: Move EE accessors to ee/
::Ci::Build.extra_accessors -
[:dast_site_profiles_build, :dast_scanner_profiles_build]
current_accessors.uniq!
expect(current_accessors).to include(*processed_accessors)
expect(known_accessors).to include(*current_accessors)
end
context 'when it has a deployment' do
let!(:processable) do
create(:ci_build, :with_deployment, :deploy_to_production,
pipeline: pipeline, stage_id: stage.id, project: project)
end
it 'persists the expanded environment name' do
expect(new_processable.metadata.expanded_environment_name).to eq('production')
end
end
context 'when it has a dynamic environment' do
let_it_be(:other_developer) { create(:user).tap { |u| project.add_developer(u) } }
let(:environment_name) { 'review/$CI_COMMIT_REF_SLUG-$GITLAB_USER_ID' }
let!(:processable) do
create(:ci_build, :with_deployment, environment: environment_name,
options: { environment: { name: environment_name } },
pipeline: pipeline, stage_id: stage.id, project: project,
user: other_developer)
end
it 're-uses the previous persisted environment' do
expect(processable.persisted_environment.name).to eq("review/#{processable.ref}-#{other_developer.id}")
expect(new_processable.persisted_environment.name).to eq("review/#{processable.ref}-#{other_developer.id}")
end
end
context 'when the processable has job variables' do
it 'only clones the internal job variables' do
expect(new_processable.job_variables.size).to eq(1)
expect(new_processable.job_variables.first.key).to eq(internal_job_variable.key)
expect(new_processable.job_variables.first.value).to eq(internal_job_variable.value)
end
end
end
end
describe '#retryable' do
shared_examples_for 'retryable processable' do
context 'when processable is successful' do
@ -69,6 +286,12 @@ RSpec.describe Ci::Processable do
end
end
context 'when the processable is a bridge' do
subject(:processable) { create(:ci_bridge, pipeline: pipeline) }
it_behaves_like 'retryable processable'
end
context 'when the processable is a build' do
subject(:processable) { create(:ci_build, pipeline: pipeline) }

View File

@ -1,185 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe AuthorizedProjectUpdate::ProjectCreateService do
let_it_be(:group_parent) { create(:group, :private) }
let_it_be(:group) { create(:group, :private, parent: group_parent) }
let_it_be(:group_child) { create(:group, :private, parent: group) }
let_it_be(:group_project) { create(:project, group: group) }
let_it_be(:parent_group_user) { create(:user) }
let_it_be(:group_user) { create(:user) }
let_it_be(:child_group_user) { create(:user) }
let(:access_level) { Gitlab::Access::MAINTAINER }
subject(:service) { described_class.new(group_project) }
describe '#perform' do
context 'direct group members' do
before do
create(:group_member, access_level: access_level, group: group, user: group_user)
ProjectAuthorization.delete_all
end
it 'creates project authorization' do
expect { service.execute }.to(
change { ProjectAuthorization.count }.from(0).to(1))
project_authorization = ProjectAuthorization.where(
project_id: group_project.id,
user_id: group_user.id,
access_level: access_level)
expect(project_authorization).to exist
end
end
context 'inherited group members' do
before do
create(:group_member, access_level: access_level, group: group_parent, user: parent_group_user)
ProjectAuthorization.delete_all
end
it 'creates project authorization' do
expect { service.execute }.to(
change { ProjectAuthorization.count }.from(0).to(1))
project_authorization = ProjectAuthorization.where(
project_id: group_project.id,
user_id: parent_group_user.id,
access_level: access_level)
expect(project_authorization).to exist
end
end
context 'membership overrides' do
context 'group hierarchy' do
before do
create(:group_member, access_level: Gitlab::Access::REPORTER, group: group_parent, user: group_user)
create(:group_member, access_level: Gitlab::Access::DEVELOPER, group: group, user: group_user)
ProjectAuthorization.delete_all
end
it 'creates project authorization' do
expect { service.execute }.to(
change { ProjectAuthorization.count }.from(0).to(1))
project_authorization = ProjectAuthorization.where(
project_id: group_project.id,
user_id: group_user.id,
access_level: Gitlab::Access::DEVELOPER)
expect(project_authorization).to exist
end
end
context 'group sharing' do
let!(:shared_with_group) { create(:group) }
before do
create(:group_member, access_level: Gitlab::Access::REPORTER, group: group, user: group_user)
create(:group_member, access_level: Gitlab::Access::MAINTAINER, group: shared_with_group, user: group_user)
create(:group_member, :minimal_access, source: shared_with_group, user: create(:user))
create(:group_group_link, shared_group: group, shared_with_group: shared_with_group, group_access: Gitlab::Access::DEVELOPER)
ProjectAuthorization.delete_all
end
it 'creates project authorization' do
expect { service.execute }.to(
change { ProjectAuthorization.count }.from(0).to(1))
project_authorization = ProjectAuthorization.where(
project_id: group_project.id,
user_id: group_user.id,
access_level: Gitlab::Access::DEVELOPER)
expect(project_authorization).to exist
end
it 'does not create project authorization for user with minimal access' do
expect { service.execute }.to(
change { ProjectAuthorization.count }.from(0).to(1))
end
end
end
context 'no group member' do
it 'does not create project authorization' do
expect { service.execute }.not_to(
change { ProjectAuthorization.count }.from(0))
end
end
context 'unapproved access requests' do
before do
create(:group_member, :guest, :access_request, user: group_user, group: group)
end
it 'does not create project authorization' do
expect { service.execute }.not_to(
change { ProjectAuthorization.count }.from(0))
end
end
context 'member with minimal access' do
before do
create(:group_member, :minimal_access, user: group_user, source: group)
end
it 'does not create project authorization' do
expect { service.execute }.not_to(
change { ProjectAuthorization.count }.from(0))
end
end
context 'project has more user than BATCH_SIZE' do
let(:batch_size) { 2 }
let(:users) { create_list(:user, batch_size + 1 ) }
before do
stub_const("#{described_class.name}::BATCH_SIZE", batch_size)
users.each do |user|
create(:group_member, access_level: access_level, group: group_parent, user: user)
end
ProjectAuthorization.delete_all
end
it 'bulk creates project authorizations in batches' do
users.each_slice(batch_size) do |batch|
attributes = batch.map do |user|
{ user_id: user.id, project_id: group_project.id, access_level: access_level }
end
expect(ProjectAuthorization).to(
receive(:insert_all).with(array_including(attributes)).and_call_original)
end
expect { service.execute }.to(
change { ProjectAuthorization.count }.from(0).to(batch_size + 1))
end
end
context 'ignores existing project authorizations' do
before do
# ProjectAuthorizations is also created because of an after_commit
# callback on Member model
create(:group_member, access_level: access_level, group: group, user: group_user)
end
it 'does not create project authorization' do
project_authorization = ProjectAuthorization.where(
project_id: group_project.id,
user_id: group_user.id,
access_level: access_level)
expect { service.execute }.not_to(
change { project_authorization.reload.exists? }.from(true))
end
end
end
end

View File

@ -17,183 +17,257 @@ RSpec.describe Ci::RetryJobService do
name: 'test')
end
let_it_be_with_refind(:build) { create(:ci_build, :success, pipeline: pipeline, stage_id: stage.id) }
let(:user) { developer }
let(:service) do
described_class.new(project, user)
end
let(:service) { described_class.new(project, user) }
before_all do
project.add_developer(developer)
project.add_reporter(reporter)
end
clone_accessors = ::Ci::Build.clone_accessors.without(::Ci::Build.extra_accessors)
shared_context 'retryable bridge' do
let_it_be(:downstream_project) { create(:project, :repository) }
reject_accessors =
%i[id status user token token_encrypted coverage trace runner
artifacts_expire_at
created_at updated_at started_at finished_at queued_at erased_by
erased_at auto_canceled_by job_artifacts job_artifacts_archive
job_artifacts_metadata job_artifacts_trace job_artifacts_junit
job_artifacts_sast job_artifacts_secret_detection job_artifacts_dependency_scanning
job_artifacts_container_scanning job_artifacts_cluster_image_scanning job_artifacts_dast
job_artifacts_license_scanning
job_artifacts_performance job_artifacts_browser_performance job_artifacts_load_performance
job_artifacts_lsif job_artifacts_terraform job_artifacts_cluster_applications
job_artifacts_codequality job_artifacts_metrics scheduled_at
job_variables waiting_for_resource_at job_artifacts_metrics_referee
job_artifacts_network_referee job_artifacts_dotenv
job_artifacts_cobertura needs job_artifacts_accessibility
job_artifacts_requirements job_artifacts_coverage_fuzzing
job_artifacts_api_fuzzing terraform_state_versions].freeze
ignore_accessors =
%i[type lock_version target_url base_tags trace_sections
commit_id deployment erased_by_id project_id
runner_id tag_taggings taggings tags trigger_request_id
user_id auto_canceled_by_id retried failure_reason
sourced_pipelines artifacts_file_store artifacts_metadata_store
metadata runner_session trace_chunks upstream_pipeline_id
artifacts_file artifacts_metadata artifacts_size commands
resource resource_group_id processed security_scans author
pipeline_id report_results pending_state pages_deployments
queuing_entry runtime_metadata trace_metadata
dast_site_profile dast_scanner_profile].freeze
shared_examples 'build duplication' do
let_it_be(:another_pipeline) { create(:ci_empty_pipeline, project: project) }
let_it_be(:build) do
create(:ci_build, :failed, :picked, :expired, :erased, :queued, :coverage, :tags,
:allowed_to_fail, :on_tag, :triggered, :teardown_environment, :resource_group,
description: 'my-job', stage: 'test', stage_id: stage.id,
pipeline: pipeline, auto_canceled_by: another_pipeline,
scheduled_at: 10.seconds.since)
let_it_be_with_refind(:job) do
create(
:ci_bridge, :success, pipeline: pipeline, downstream: downstream_project,
description: 'a trigger job', stage_id: stage.id
)
end
let_it_be(:internal_job_variable) { create(:ci_job_variable, job: build) }
before_all do
# Make sure that build has both `stage_id` and `stage` because FactoryBot
# can reset one of the fields when assigning another. We plan to deprecate
# and remove legacy `stage` column in the future.
build.update!(stage: 'test', stage_id: stage.id)
# Make sure we have one instance for every possible job_artifact_X
# associations to check they are correctly rejected on build duplication.
Ci::JobArtifact::TYPE_AND_FORMAT_PAIRS.each do |file_type, file_format|
create(:ci_job_artifact, file_format,
file_type: file_type, job: build, expire_at: build.artifacts_expire_at)
end
create(:ci_job_variable, :dotenv_source, job: build)
create(:ci_build_need, build: build)
create(:terraform_state_version, build: build)
end
let_it_be(:job_to_clone) { job }
before do
build.update!(retried: false, status: :success)
end
describe 'clone accessors' do
let(:forbidden_associations) do
Ci::Build.reflect_on_all_associations.each_with_object(Set.new) do |assoc, memo|
memo << assoc.name unless assoc.macro == :belongs_to
end
end
clone_accessors.each do |attribute|
it "clones #{attribute} build attribute", :aggregate_failures do
expect(attribute).not_to be_in(forbidden_associations), "association #{attribute} must be `belongs_to`"
expect(build.send(attribute)).not_to be_nil
expect(new_build.send(attribute)).not_to be_nil
expect(new_build.send(attribute)).to eq build.send(attribute)
end
end
context 'when job has nullified protected' do
before do
build.update_attribute(:protected, nil)
end
it "clones protected build attribute" do
expect(new_build.protected).to be_nil
expect(new_build.protected).to eq build.protected
end
end
it 'clones only the needs attributes' do
expect(new_build.needs.exists?).to be_truthy
expect(build.needs.exists?).to be_truthy
expect(new_build.needs_attributes).to match(build.needs_attributes)
expect(new_build.needs).not_to match(build.needs)
end
it 'clones only internal job variables' do
expect(new_build.job_variables.count).to eq(1)
expect(new_build.job_variables).to contain_exactly(having_attributes(key: internal_job_variable.key, value: internal_job_variable.value))
end
end
describe 'reject accessors' do
reject_accessors.each do |attribute|
it "does not clone #{attribute} build attribute" do
expect(new_build.send(attribute)).not_to eq build.send(attribute)
end
end
end
it 'has correct number of known attributes', :aggregate_failures do
processed_accessors = clone_accessors + reject_accessors
known_accessors = processed_accessors + ignore_accessors
# :tag_list is a special case, this accessor does not exist
# in reflected associations, comes from `act_as_taggable` and
# we use it to copy tags, instead of reusing tags.
#
current_accessors =
Ci::Build.attribute_names.map(&:to_sym) +
Ci::Build.attribute_aliases.keys.map(&:to_sym) +
Ci::Build.reflect_on_all_associations.map(&:name) +
[:tag_list, :needs_attributes, :job_variables_attributes] -
# ee-specific accessors should be tested in ee/spec/services/ci/retry_job_service_spec.rb instead
Ci::Build.extra_accessors -
[:dast_site_profiles_build, :dast_scanner_profiles_build] # join tables
current_accessors.uniq!
expect(current_accessors).to include(*processed_accessors)
expect(known_accessors).to include(*current_accessors)
job.update!(retried: false)
end
end
describe '#execute' do
let(:new_build) do
travel_to(1.second.from_now) do
service.execute(build)[:job]
end
shared_context 'retryable build' do
let_it_be_with_refind(:job) { create(:ci_build, :success, pipeline: pipeline, stage_id: stage.id) }
let_it_be(:another_pipeline) { create(:ci_empty_pipeline, project: project) }
let_it_be(:job_to_clone) do
create(:ci_build, :failed, :picked, :expired, :erased, :queued, :coverage, :tags,
:allowed_to_fail, :on_tag, :triggered, :teardown_environment, :resource_group,
description: 'my-job', stage: 'test', stage_id: stage.id,
pipeline: pipeline, auto_canceled_by: another_pipeline,
scheduled_at: 10.seconds.since)
end
context 'when user has ability to execute build' do
before do
job.update!(retried: false, status: :success)
job_to_clone.update!(retried: false, status: :success)
end
end
shared_examples_for 'clones the job' do
let(:job) { job_to_clone }
before_all do
# Make sure that job has both `stage_id` and `stage`
job_to_clone.update!(stage: 'test', stage_id: stage.id)
create(:ci_build_need, build: job_to_clone)
end
context 'when the user has ability to execute job' do
before do
stub_not_protect_default_branch
end
it_behaves_like 'build duplication'
context 'when there is a failed job ToDo for the MR' do
let!(:merge_request) { create(:merge_request, source_project: project, author: user, head_pipeline: pipeline) }
let!(:todo) { create(:todo, :build_failed, user: user, project: project, author: user, target: merge_request) }
it 'creates a new build that represents the old one' do
expect(new_build.name).to eq build.name
it 'resolves the ToDo for the failed job' do
expect do
service.execute(job)
end.to change { todo.reload.state }.from('pending').to('done')
end
end
it 'enqueues the new build' do
expect(new_build).to be_pending
context 'when the job has needs' do
before do
create(:ci_build_need, build: job, name: 'build1')
create(:ci_build_need, build: job, name: 'build2')
end
it 'bulk inserts all the needs' do
expect(Ci::BuildNeed).to receive(:bulk_insert!).and_call_original
new_job
end
end
context 'when there are subsequent processables that are skipped' do
it 'marks the old job as retried' do
expect(new_job).to be_latest
expect(job).to be_retried
expect(job).to be_processed
end
end
context 'when the user does not have permission to execute the job' do
let(:user) { reporter }
it 'raises an error' do
expect { service.execute(job) }
.to raise_error Gitlab::Access::AccessDeniedError
end
end
end
shared_examples_for 'retries the job' do
it_behaves_like 'clones the job'
it 'enqueues the new job' do
expect(new_job).to be_pending
end
context 'when there are subsequent processables that are skipped' do
let!(:subsequent_build) do
create(:ci_build, :skipped, stage_idx: 2,
pipeline: pipeline,
stage: 'deploy')
end
let!(:subsequent_bridge) do
create(:ci_bridge, :skipped, stage_idx: 2,
pipeline: pipeline,
stage: 'deploy')
end
it 'resumes pipeline processing in the subsequent stage' do
service.execute(job)
expect(subsequent_build.reload).to be_created
expect(subsequent_bridge.reload).to be_created
end
it 'updates ownership for subsequent builds' do
expect { service.execute(job) }.to change { subsequent_build.reload.user }.to(user)
end
it 'updates ownership for subsequent bridges' do
expect { service.execute(job) }.to change { subsequent_bridge.reload.user }.to(user)
end
end
context 'when the pipeline has other jobs' do
let!(:stage2) { create(:ci_stage_entity, project: project, pipeline: pipeline, name: 'deploy') }
let!(:build2) { create(:ci_build, pipeline: pipeline, stage_id: stage.id ) }
let!(:deploy) { create(:ci_build, pipeline: pipeline, stage_id: stage2.id) }
let!(:deploy_needs_build2) { create(:ci_build_need, build: deploy, name: build2.name) }
context 'when job has a nil scheduling_type' do
before do
job.pipeline.processables.update_all(scheduling_type: nil)
job.reload
end
it 'populates scheduling_type of processables' do
expect(new_job.scheduling_type).to eq('stage')
expect(job.reload.scheduling_type).to eq('stage')
expect(build2.reload.scheduling_type).to eq('stage')
expect(deploy.reload.scheduling_type).to eq('dag')
end
end
context 'when job has scheduling_type' do
it 'does not call populate_scheduling_type!' do
expect(job.pipeline).not_to receive(:ensure_scheduling_type!)
expect(new_job.scheduling_type).to eq('stage')
end
end
end
context 'when the pipeline is a child pipeline and the bridge uses strategy:depend' do
let!(:parent_pipeline) { create(:ci_pipeline, project: project) }
let!(:bridge) { create(:ci_bridge, :strategy_depend, pipeline: parent_pipeline, status: 'success') }
let!(:source_pipeline) { create(:ci_sources_pipeline, pipeline: pipeline, source_job: bridge) }
it 'marks the source bridge as pending' do
service.execute(job)
expect(bridge.reload).to be_pending
end
end
end
describe '#clone!' do
let(:new_job) { service.clone!(job) }
it 'raises an error when an unexpected class is passed' do
expect { service.clone!(create(:ci_build).present) }.to raise_error(TypeError)
end
context 'when the job to be cloned is a bridge' do
include_context 'retryable bridge'
it_behaves_like 'clones the job'
end
context 'when the job to be cloned is a build' do
include_context 'retryable build'
let(:job) { job_to_clone }
it_behaves_like 'clones the job'
context 'when a build with a deployment is retried' do
let!(:job) do
create(:ci_build, :with_deployment, :deploy_to_production,
pipeline: pipeline, stage_id: stage.id, project: project)
end
it 'creates a new deployment' do
expect { new_job }.to change { Deployment.count }.by(1)
end
it 'does not create a new environment' do
expect { new_job }.not_to change { Environment.count }
end
end
context 'when a build with a dynamic environment is retried' do
let_it_be(:other_developer) { create(:user).tap { |u| project.add_developer(u) } }
let(:environment_name) { 'review/$CI_COMMIT_REF_SLUG-$GITLAB_USER_ID' }
let!(:job) do
create(:ci_build, :with_deployment, environment: environment_name,
options: { environment: { name: environment_name } },
pipeline: pipeline, stage_id: stage.id, project: project,
user: other_developer)
end
it 'creates a new deployment' do
expect { new_job }.to change { Deployment.count }.by(1)
end
it 'does not create a new environment' do
expect { new_job }.not_to change { Environment.count }
end
end
end
end
describe '#execute' do
let(:new_job) { service.execute(job)[:job] }
context 'when the job to be retried is a bridge' do
include_context 'retryable bridge'
it_behaves_like 'retries the job'
end
context 'when the job to be retried is a build' do
include_context 'retryable build'
it_behaves_like 'retries the job'
context 'when there are subsequent jobs that are skipped' do
let!(:subsequent_build) do
create(:ci_build, :skipped, stage_idx: 2,
pipeline: pipeline,
@ -206,207 +280,13 @@ RSpec.describe Ci::RetryJobService do
stage: 'deploy')
end
it 'resumes pipeline processing in the subsequent stage' do
service.execute(build)
it 'does not cause an N+1 when updating the job ownership' do
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { service.execute(job) }.count
expect(subsequent_build.reload).to be_created
expect(subsequent_bridge.reload).to be_created
create_list(:ci_build, 2, :skipped, stage_idx: job.stage_idx + 1, pipeline: pipeline, stage: 'deploy')
expect { service.execute(job) }.not_to exceed_all_query_limit(control_count)
end
it 'updates ownership for subsequent builds' do
expect { service.execute(build) }.to change { subsequent_build.reload.user }.to(user)
end
it 'updates ownership for subsequent bridges' do
expect { service.execute(build) }.to change { subsequent_bridge.reload.user }.to(user)
end
it 'does not cause n+1 when updaing build ownership' do
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { service.execute(build) }.count
create_list(:ci_build, 2, :skipped, stage_idx: build.stage_idx + 1, pipeline: pipeline, stage: 'deploy')
expect { service.execute(build) }.not_to exceed_all_query_limit(control_count)
end
end
context 'when pipeline has other builds' do
let!(:stage2) { create(:ci_stage_entity, project: project, pipeline: pipeline, name: 'deploy') }
let!(:build2) { create(:ci_build, pipeline: pipeline, stage_id: stage.id ) }
let!(:deploy) { create(:ci_build, pipeline: pipeline, stage_id: stage2.id) }
let!(:deploy_needs_build2) { create(:ci_build_need, build: deploy, name: build2.name) }
context 'when build has nil scheduling_type' do
before do
build.pipeline.processables.update_all(scheduling_type: nil)
build.reload
end
it 'populates scheduling_type of processables' do
expect(new_build.scheduling_type).to eq('stage')
expect(build.reload.scheduling_type).to eq('stage')
expect(build2.reload.scheduling_type).to eq('stage')
expect(deploy.reload.scheduling_type).to eq('dag')
end
end
context 'when build has scheduling_type' do
it 'does not call populate_scheduling_type!' do
expect_any_instance_of(Ci::Pipeline).not_to receive(:ensure_scheduling_type!) # rubocop: disable RSpec/AnyInstanceOf
expect(new_build.scheduling_type).to eq('stage')
end
end
end
context 'when the pipeline is a child pipeline and the bridge is depended' do
let!(:parent_pipeline) { create(:ci_pipeline, project: project) }
let!(:bridge) { create(:ci_bridge, :strategy_depend, pipeline: parent_pipeline, status: 'success') }
let!(:source_pipeline) { create(:ci_sources_pipeline, pipeline: pipeline, source_job: bridge) }
it 'marks source bridge as pending' do
service.execute(build)
expect(bridge.reload).to be_pending
end
end
context 'when there is a failed job todo for the MR' do
let!(:merge_request) { create(:merge_request, source_project: project, author: user, head_pipeline: pipeline) }
let!(:todo) { create(:todo, :build_failed, user: user, project: project, author: user, target: merge_request) }
it 'resolves the todo for the old failed build' do
expect do
service.execute(build)
end.to change { todo.reload.state }.from('pending').to('done')
end
end
end
context 'when user does not have ability to execute build' do
let(:user) { reporter }
it 'raises an error' do
expect { service.execute(build) }
.to raise_error Gitlab::Access::AccessDeniedError
end
context 'when the job is not retryable' do
let(:build) { create(:ci_build, :created, pipeline: pipeline) }
it 'returns a ServiceResponse error' do
response = service.execute(build)
expect(response).to be_a(ServiceResponse)
expect(response).to be_error
expect(response.message).to eq("Job cannot be retried")
end
end
end
end
describe '#clone!' do
let(:new_build) do
travel_to(1.second.from_now) do
service.clone!(build)
end
end
it 'raises an error when an unexpected class is passed' do
expect { service.clone!(create(:ci_build).present) }.to raise_error(TypeError)
end
context 'when user has ability to execute build' do
before do
stub_not_protect_default_branch
end
it_behaves_like 'build duplication'
it 'creates a new build that represents the old one' do
expect(new_build.name).to eq build.name
end
it 'does not enqueue the new build' do
expect(new_build).to be_created
expect(new_build).not_to be_processed
end
it 'does mark old build as retried' do
expect(new_build).to be_latest
expect(build).to be_retried
expect(build).to be_processed
end
shared_examples_for 'when build with deployment is retried' do
let!(:build) do
create(:ci_build, :with_deployment, :deploy_to_production,
pipeline: pipeline, stage_id: stage.id, project: project)
end
it 'creates a new deployment' do
expect { new_build }.to change { Deployment.count }.by(1)
end
it 'persists expanded environment name' do
expect(new_build.metadata.expanded_environment_name).to eq('production')
end
it 'does not create a new environment' do
expect { new_build }.not_to change { Environment.count }
end
end
shared_examples_for 'when build with dynamic environment is retried' do
let_it_be(:other_developer) { create(:user).tap { |u| project.add_developer(u) } }
let(:environment_name) { 'review/$CI_COMMIT_REF_SLUG-$GITLAB_USER_ID' }
let!(:build) do
create(:ci_build, :with_deployment, environment: environment_name,
options: { environment: { name: environment_name } },
pipeline: pipeline, stage_id: stage.id, project: project,
user: other_developer)
end
it 're-uses the previous persisted environment' do
expect(build.persisted_environment.name).to eq("review/#{build.ref}-#{other_developer.id}")
expect(new_build.persisted_environment.name).to eq("review/#{build.ref}-#{other_developer.id}")
end
it 'creates a new deployment' do
expect { new_build }.to change { Deployment.count }.by(1)
end
it 'does not create a new environment' do
expect { new_build }.not_to change { Environment.count }
end
end
it_behaves_like 'when build with deployment is retried'
it_behaves_like 'when build with dynamic environment is retried'
context 'when build has needs' do
before do
create(:ci_build_need, build: build, name: 'build1')
create(:ci_build_need, build: build, name: 'build2')
end
it 'bulk inserts all needs' do
expect(Ci::BuildNeed).to receive(:bulk_insert!).and_call_original
new_build
end
end
end
context 'when user does not have ability to execute build' do
let(:user) { reporter }
it 'raises an error' do
expect { service.clone!(build) }
.to raise_error Gitlab::Access::AccessDeniedError
end
end
end

View File

@ -9,7 +9,7 @@ RSpec.describe ServicePing::BuildPayloadService do
include_context 'stubbed service ping metrics definitions' do
let(:subscription_metrics) do
[
metric_attributes('active_user_count', "Subscription")
metric_attributes('active_user_count', "subscription")
]
end
end

View File

@ -187,4 +187,69 @@ module FilteredSearchHelpers
toggle.click if toggle.visible?
end
end
##
# For use with gl-filtered-search
def select_tokens(*args, submit: false)
within '[data-testid="filtered-search-input"]' do
find_field('Search').click
args.each do |token|
# Move mouse away to prevent invoking tooltips on usernames, which blocks the search input
find_button('Search').hover
if token == '='
click_on '= is'
else
click_on token
end
wait_for_requests
end
end
if submit
send_keys :enter
end
end
def get_suggestion_count
all('.gl-filtered-search-suggestion').size
end
def click_filtered_search_bar
find('.gl-filtered-search-last-item').click
end
def expect_visible_suggestions_list
expect(page).to have_css('.gl-filtered-search-suggestion-list')
end
def expect_hidden_suggestions_list
expect(page).not_to have_css('.gl-filtered-search-suggestion-list')
end
def expect_suggestion(value)
expect(page).to have_css('.gl-filtered-search-suggestion', text: value)
end
def expect_suggestion_count(count)
expect(page).to have_css('.gl-filtered-search-suggestion', count: count)
end
def expect_assignee_token(value)
expect(page).to have_css '.gl-filtered-search-token', text: "Assignee = #{value}"
end
def expect_author_token(value)
expect(page).to have_css '.gl-filtered-search-token', text: "Author = #{value}"
end
def expect_empty_search_term
expect(page).to have_css '.gl-filtered-search-term', text: ''
end
def expect_token_segment(value)
expect(page).to have_css '.gl-filtered-search-token-segment', text: value
end
end

View File

@ -43,12 +43,12 @@ RSpec.shared_context 'stubbed service ping metrics definitions' do
Gitlab::Usage::MetricDefinition.instance_variable_set(:@all, nil)
end
def metric_attributes(key_path, category, value_type = 'string', instrumentation_class = '')
def metric_attributes(key_path, category, value_type = 'string', instrumentation_class = '', status = 'active')
{
'key_path' => key_path,
'data_category' => category,
'value_type' => value_type,
'status' => 'active',
'status' => status,
'instrumentation_class' => instrumentation_class,
'time_frame' => 'all'
}

View File

@ -62,8 +62,8 @@ shared_examples 'deployment metrics examples' do
describe '#deployment_frequency' do
subject { stage_summary.fourth[:value] }
it 'includes the unit: `per day`' do
expect(stage_summary.fourth[:unit]).to eq _('per day')
it 'includes the unit: `/day`' do
expect(stage_summary.fourth[:unit]).to eq _('/day')
end
before do

View File

@ -1,50 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe AuthorizedProjectUpdate::ProjectCreateWorker do
let_it_be(:group) { create(:group, :private) }
let_it_be(:group_project) { create(:project, group: group) }
let_it_be(:group_user) { create(:user) }
let(:access_level) { Gitlab::Access::MAINTAINER }
subject(:worker) { described_class.new }
it 'calls AuthorizedProjectUpdate::ProjectCreateService' do
expect_next_instance_of(AuthorizedProjectUpdate::ProjectCreateService) do |service|
expect(service).to(receive(:execute))
end
worker.perform(group_project.id)
end
it 'returns ServiceResponse.success' do
result = worker.perform(group_project.id)
expect(result.success?).to be_truthy
end
context 'idempotence' do
before do
create(:group_member, access_level: access_level, group: group, user: group_user)
ProjectAuthorization.delete_all
end
include_examples 'an idempotent worker' do
let(:job_args) { group_project.id }
it 'creates project authorization' do
subject
project_authorization = ProjectAuthorization.where(
project_id: group_project.id,
user_id: group_user.id,
access_level: access_level)
expect(project_authorization).to exist
expect(ProjectAuthorization.count).to eq(1)
end
end
end
end

View File

@ -126,7 +126,6 @@ RSpec.describe 'Every Sidekiq worker' do
'ApproveBlockedPendingApprovalUsersWorker' => 3,
'ArchiveTraceWorker' => 3,
'AuthorizedKeysWorker' => 3,
'AuthorizedProjectUpdate::ProjectCreateWorker' => 3,
'AuthorizedProjectUpdate::UserRefreshOverUserRangeWorker' => 3,
'AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker' => 3,
'AuthorizedProjectUpdate::UserRefreshFromReplicaWorker' => 3,