Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-04-20 15:10:23 +00:00
parent 76e4e8f1b0
commit 6a85a7c312
72 changed files with 1643 additions and 406 deletions

View File

@ -2,8 +2,8 @@ include:
- template: Jobs/Code-Quality.gitlab-ci.yml
- template: Jobs/SAST.gitlab-ci.yml
- template: Jobs/Secret-Detection.gitlab-ci.yml
- template: Security/Dependency-Scanning.gitlab-ci.yml
- template: Security/License-Scanning.gitlab-ci.yml
- template: Jobs/Dependency-Scanning.gitlab-ci.yml
- template: Jobs/License-Scanning.gitlab-ci.yml
code_quality:
extends:

View File

@ -10,6 +10,8 @@ export const i18n = {
label: s__('Reports|Test summary'),
loading: s__('Reports|Test summary results are loading'),
error: s__('Reports|Test summary failed to load results'),
newHeader: s__('Reports|New'),
fixedHeader: s__('Reports|Fixed'),
fullReport: s__('Reports|Full report'),
noChanges: (bold) => s__(`Reports|${noText(bold)} changed test results`),
@ -36,4 +38,32 @@ export const i18n = {
sprintf(s__('Reports|An error occurred while loading %{name} results'), { name }),
headReportParsingError: s__('Reports|Head report parsing error:'),
baseReportParsingError: s__('Reports|Base report parsing error:'),
recentFailureSummary: (recentlyFailed, failed) => {
if (failed < 2) {
return sprintf(
s__(
'Reports|%{recentlyFailed} out of %{failed} failed test has failed more than once in the last 14 days',
),
{ recentlyFailed, failed },
);
}
return sprintf(
n__(
'Reports|%{recentlyFailed} out of %{failed} failed tests has failed more than once in the last 14 days',
'Reports|%{recentlyFailed} out of %{failed} failed tests have failed more than once in the last 14 days',
recentlyFailed,
),
{ recentlyFailed, failed },
);
},
recentFailureCount: (recentFailures) =>
sprintf(
n__(
'Reports|Failed %{count} time in %{base_branch} in the last 14 days',
'Reports|Failed %{count} times in %{base_branch} in the last 14 days',
recentFailures.count,
),
recentFailures,
),
};

View File

@ -1,7 +1,13 @@
import { uniqueId } from 'lodash';
import axios from '~/lib/utils/axios_utils';
import { EXTENSION_ICONS } from '../../constants';
import { summaryTextBuilder, reportTextBuilder, reportSubTextBuilder } from './utils';
import {
summaryTextBuilder,
reportTextBuilder,
reportSubTextBuilder,
countRecentlyFailedTests,
recentFailuresTextBuilder,
} from './utils';
import { i18n, TESTS_FAILED_STATUS, ERROR_STATUS } from './constants';
export default {
@ -18,7 +24,10 @@ export default {
if (data.hasSuiteError) {
return this.$options.i18n.error;
}
return summaryTextBuilder(this.$options.i18n.label, data.summary);
return {
subject: summaryTextBuilder(this.$options.i18n.label, data.summary),
meta: recentFailuresTextBuilder(data.summary),
};
},
statusIcon(data) {
if (data.parsingInProgress) {
@ -50,6 +59,10 @@ export default {
hasSuiteError: data.suites?.some((suite) => suite.status === ERROR_STATUS),
parsingInProgress: status === 204,
...data,
summary: {
recentlyFailed: countRecentlyFailedTests(data.suites),
...data.summary,
},
},
};
});
@ -66,17 +79,66 @@ export default {
}
return EXTENSION_ICONS.success;
},
prepareReports() {
return this.collapsedData.suites.map((suite) => {
testHeader(test, sectionHeader, index) {
const headers = [];
if (index === 0) {
headers.push(sectionHeader);
}
if (test.recent_failures?.count && test.recent_failures?.base_branch) {
headers.push(i18n.recentFailureCount(test.recent_failures));
}
return headers;
},
mapTestAsChild({ iconName, sectionHeader }) {
return (test, index) => {
return {
id: uniqueId('suite-'),
text: reportTextBuilder(suite),
subtext: reportSubTextBuilder(suite),
icon: {
name: this.suiteIcon(suite),
},
id: uniqueId('test-'),
header: this.testHeader(test, sectionHeader, index),
icon: { name: iconName },
text: test.name,
};
});
};
},
prepareReports() {
return this.collapsedData.suites
.map((suite) => {
return {
...suite,
summary: {
recentlyFailed: countRecentlyFailedTests(suite),
...suite.summary,
},
};
})
.map((suite) => {
return {
id: uniqueId('suite-'),
text: reportTextBuilder(suite),
subtext: reportSubTextBuilder(suite),
icon: {
name: this.suiteIcon(suite),
},
children: [
...[...suite.new_failures, ...suite.new_errors].map(
this.mapTestAsChild({
sectionHeader: i18n.newHeader,
iconName: EXTENSION_ICONS.failed,
}),
),
...[...suite.existing_failures, ...suite.existing_errors].map(
this.mapTestAsChild({
iconName: EXTENSION_ICONS.failed,
}),
),
...[...suite.resolved_failures, ...suite.resolved_errors].map(
this.mapTestAsChild({
sectionHeader: i18n.fixedHeader,
iconName: EXTENSION_ICONS.success,
}),
),
],
};
});
},
},
};

View File

@ -43,13 +43,42 @@ export const reportTextBuilder = ({ name = '', summary = {}, status }) => {
return i18n.summaryText(name, resultsString);
};
export const reportSubTextBuilder = ({ suite_errors }) => {
const errors = [];
if (suite_errors?.head) {
errors.push(`${i18n.headReportParsingError} ${suite_errors.head}`);
}
if (suite_errors?.base) {
errors.push(`${i18n.baseReportParsingError} ${suite_errors.base}`);
}
return errors.join('<br />');
export const recentFailuresTextBuilder = (summary = {}) => {
const { failed, recentlyFailed } = summary;
if (!failed || !recentlyFailed) return '';
return i18n.recentFailureSummary(recentlyFailed, failed);
};
export const reportSubTextBuilder = ({ suite_errors, summary }) => {
if (suite_errors?.head || suite_errors?.base) {
const errors = [];
if (suite_errors?.head) {
errors.push(`${i18n.headReportParsingError} ${suite_errors.head}`);
}
if (suite_errors?.base) {
errors.push(`${i18n.baseReportParsingError} ${suite_errors.base}`);
}
return errors.join('<br />');
}
return recentFailuresTextBuilder(summary);
};
export const countRecentlyFailedTests = (subject) => {
// handle either a single report or an array of reports
const reports = !subject.length ? [subject] : subject;
return reports
.map((report) => {
return (
[report.new_failures, report.existing_failures, report.resolved_failures]
// only count tests which have failed more than once
.map(
(failureArray) =>
failureArray.filter((failure) => failure.recent_failures?.count > 1).length,
)
.reduce((total, count) => total + count, 0)
);
})
.reduce((total, count) => total + count, 0);
};

View File

@ -195,6 +195,9 @@ export default {
shouldRenderTestReport() {
return Boolean(this.mr?.testResultsPath);
},
shouldRenderRefactoredTestReport() {
return window.gon?.features?.refactorMrWidgetTestSummary;
},
mergeError() {
let { mergeError } = this.mr;
@ -512,7 +515,7 @@ export default {
}
},
registerTestReportExtension() {
if (this.shouldRenderTestReport && this.shouldShowExtension) {
if (this.shouldRenderTestReport && this.shouldRenderRefactoredTestReport) {
registerExtension(testReportExtension);
}
},
@ -588,7 +591,7 @@ export default {
/>
<grouped-test-reports-app
v-if="mr.testResultsPath && !shouldShowExtension"
v-if="shouldRenderTestReport && !shouldRenderRefactoredTestReport"
class="js-reports-container"
:endpoint="mr.testResultsPath"
:head-blob-path="mr.headBlobPath"

View File

@ -39,6 +39,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
push_frontend_feature_flag(:confidential_notes, project, default_enabled: :yaml)
push_frontend_feature_flag(:restructured_mr_widget, project, default_enabled: :yaml)
push_frontend_feature_flag(:refactor_mr_widgets_extensions, project, default_enabled: :yaml)
push_frontend_feature_flag(:refactor_mr_widget_test_summary, project, default_enabled: :yaml)
push_frontend_feature_flag(:rebase_without_ci_ui, project, default_enabled: :yaml)
push_frontend_feature_flag(:secure_vulnerability_training, project, default_enabled: :yaml)
push_frontend_feature_flag(:issue_assignees_widget, @project, default_enabled: :yaml)

View File

@ -8,8 +8,11 @@ module LazyImageTagHelper
end
# Override the default ActionView `image_tag` helper to support lazy-loading
# accept :auto_dark boolean to enable automatic dark variant of the image
# (see: https://gitlab.com/gitlab-org/gitlab-ui/-/merge_requests/2698)
# accept :dark_variant path to be used as a source when dark mode is enabled
def image_tag(source, options = {})
source = options[:dark_variant] if options[:dark_variant] && user_application_dark_mode?
source, options = prepare_dark_variant(source, options)
options = options.symbolize_keys
unless options.delete(:lazy) == false
@ -29,4 +32,25 @@ module LazyImageTagHelper
# Required for Banzai::Filter::ImageLazyLoadFilter
module_function :placeholder_image # rubocop: disable Style/AccessModifierDeclarations
private
def prepare_dark_variant(source, options)
dark_variant = options.delete(:dark_variant)
auto_dark = options.delete(:auto_dark)
if dark_variant && auto_dark
raise ArgumentError, "dark_variant and auto_dark are mutually exclusive"
end
if (auto_dark || dark_variant) && user_application_dark_mode?
if auto_dark
options[:class] = 'gl-dark-invert-keep-hue'
elsif dark_variant
source = dark_variant
end
end
[source, options]
end
end

View File

@ -43,7 +43,8 @@ class ContainerRepository < ApplicationRecord
migration_canceled: 4,
not_found: 5,
native_import: 6,
migration_forced_canceled: 7
migration_forced_canceled: 7,
migration_canceled_by_registry: 8
}
delegate :client, :gitlab_api_client, to: :registry
@ -214,7 +215,7 @@ class ContainerRepository < ApplicationRecord
container_repository.migration_skipped_at = Time.zone.now
end
before_transition any => %i[import_done import_aborted] do |container_repository|
before_transition any => %i[import_done import_aborted import_skipped] do |container_repository|
container_repository.run_after_commit do
::ContainerRegistry::Migration::EnqueuerWorker.perform_async
end
@ -328,7 +329,7 @@ class ContainerRepository < ApplicationRecord
when 'import_canceled', 'pre_import_canceled'
return if import_skipped?
skip_import(reason: :migration_canceled)
skip_import(reason: :migration_canceled_by_registry)
when 'import_complete'
finish_import
when 'import_failed'
@ -376,6 +377,10 @@ class ContainerRepository < ApplicationRecord
migration_retries_count >= ContainerRegistry::Migration.max_retries
end
def nearing_or_exceeded_retry_limit?
migration_retries_count >= ContainerRegistry::Migration.max_retries - 1
end
def last_import_step_done_at
[migration_pre_import_done_at, migration_import_done_at, migration_aborted_at, migration_skipped_at].compact.max
end

View File

@ -24,15 +24,9 @@ class Key < ApplicationRecord
length: { maximum: 5000 },
format: { with: /\A(#{Gitlab::SSHPublicKey.supported_algorithms.join('|')})/ }
validates :fingerprint,
uniqueness: true,
presence: { message: 'cannot be generated' },
unless: -> { Gitlab::FIPS.enabled? }
validates :fingerprint_sha256,
uniqueness: true,
presence: { message: 'cannot be generated' },
if: -> { Gitlab::FIPS.enabled? }
presence: { message: 'cannot be generated' }
validate :key_meets_restrictions

View File

@ -83,8 +83,10 @@ module QuickActions
args.map! { _1.gsub(/\\_/, '_') }
usernames = (args - ['me']).map { _1.delete_prefix('@') }
found = User.by_username(usernames).to_a.select { can?(:read_user, _1) }
found_names = found.map(&:username).to_set
missing = args.reject { |arg| arg == 'me' || found_names.include?(arg.delete_prefix('@')) }.map { "'#{_1}'" }
found_names = found.map(&:username).map(&:downcase).to_set
missing = args.reject do |arg|
arg == 'me' || found_names.include?(arg.downcase.delete_prefix('@'))
end.map { "'#{_1}'" }
failed_parse(format(_("Failed to find users for %{missing}"), missing: missing.to_sentence)) if missing.present?

View File

@ -6,6 +6,7 @@ module ServicePing
STAGING_BASE_URL = 'https://gitlab-services-version-gitlab-com-staging.gs-staging.gitlab.org'
USAGE_DATA_PATH = 'usage_data'
ERROR_PATH = 'usage_ping_errors'
METADATA_PATH = 'usage_ping_metadata'
SubmissionError = Class.new(StandardError)
@ -31,7 +32,7 @@ module ServicePing
message: e.message,
elapsed: (Time.current - start).round(1)
}
submit_payload({ error: error_payload }, url: error_url)
submit_payload({ error: error_payload }, path: ERROR_PATH)
usage_data = Gitlab::Usage::ServicePingReport.for(output: :all_metrics_values)
response = submit_usage_data_payload(usage_data)
@ -48,21 +49,30 @@ module ServicePing
raw_usage_data.update_version_metadata!(usage_data_id: version_usage_data_id)
DevopsReportService.new(response).execute
end
end
def url
URI.join(base_url, USAGE_DATA_PATH)
end
return unless Feature.enabled?(:measure_service_ping_metric_collection, default_enabled: :yaml)
def error_url
URI.join(base_url, ERROR_PATH)
submit_payload({ metadata: { metrics: metrics_collection_time(usage_data) } }, path: METADATA_PATH)
end
private
def submit_payload(payload, url: self.url)
def metrics_collection_time(payload, parents = [])
return [] unless payload.is_a?(Hash)
payload.flat_map do |key, metric_value|
key_path = parents.dup.append(key)
if metric_value.respond_to?(:duration)
{ name: key_path.join('.'), time_elapsed: metric_value.duration }
else
metrics_collection_time(metric_value, key_path)
end
end
end
def submit_payload(payload, path: USAGE_DATA_PATH)
Gitlab::HTTP.post(
url,
URI.join(base_url, path),
body: payload.to_json,
allow_local_requests: true,
headers: { 'Content-type' => 'application/json' }

View File

@ -34,7 +34,7 @@
= render 'groups/settings/ip_restriction_registration_features_cta', f: f
= render_if_exists 'groups/settings/ip_restriction', f: f, group: @group
= render_if_exists 'groups/settings/allowed_email_domain', f: f, group: @group
- if Feature.enabled?(:group_wiki_settings_toggle, @group, default_enabled: :yaml)
- if @group.licensed_feature_available?(:group_wikis) && Feature.enabled?(:group_wiki_settings_toggle, @group, default_enabled: :yaml)
= render_if_exists 'groups/settings/wiki', f: f, group: @group
= render 'groups/settings/lfs', f: f
= render 'groups/settings/project_creation_level', f: f, group: @group

View File

@ -10,7 +10,7 @@
.row.empty-state.merge-requests
.col-12
.svg-content
= image_tag 'illustrations/merge_requests.svg'
= image_tag 'illustrations/merge_requests.svg', { auto_dark: true }
.col-12
.text-content
- if has_filter_bar_param?

View File

@ -93,7 +93,7 @@ module ContainerRegistry
end
def long_running_migration_threshold
@threshold ||= 30.minutes.ago
@threshold ||= 10.minutes.ago
end
def cancel_long_running_migration(repository)
@ -101,7 +101,11 @@ module ContainerRegistry
case result[:status]
when :ok
repository.skip_import(reason: :migration_canceled)
if repository.nearing_or_exceeded_retry_limit?
repository.skip_import(reason: :migration_canceled)
else
repository.abort_import
end
when :bad_request
repository.reconcile_import_status(result[:state]) do
repository.abort_import

View File

@ -0,0 +1,8 @@
---
name: container_registry_migration_phase2_capacity_2
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/85277
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/350543
milestone: '14.10'
type: development
group: group::package
default_enabled: false

View File

@ -0,0 +1,8 @@
---
name: measure_service_ping_metric_collection
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/82607
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/358128
milestone: '15.0'
type: development
group: group::product intelligence
default_enabled: false

View File

@ -0,0 +1,8 @@
---
name: refactor_mr_widget_test_summary
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/83631
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/358208
milestone: '15.0'
type: development
group: group::pipeline insights
default_enabled: false

View File

@ -1,13 +1,13 @@
- name: "Package pipelines in API payload is paginated" # The name of the feature to be deprecated
announcement_milestone: "14.5" # The milestone when this feature was first announced as deprecated.
announcement_date: "2021-11-22" # The date of the milestone release when this feature was first announced as deprecated. This should almost always be the 22nd of a month (YYYY-MM-22), unless you did an out of band blog post.
removal_milestone: "15.0" # The milestone when this feature is planned to be removed
removal_date: "2022-05-22" # the date of the milestone release when this feature is planned to be removed
removal_milestone: "16.0" # The milestone when this feature is planned to be removed
removal_date: "2023-05-22" # the date of the milestone release when this feature is planned to be removed
breaking_change: true
body: | # Do not modify this line, instead modify the lines below.
A request to the API for `/api/v4/projects/:id/packages` returns a paginated result of packages. Each package lists all of its pipelines in this response. This is a performance concern, as it's possible for a package to have hundreds or thousands of associated pipelines.
In milestone 15.0, we will remove the `pipelines` attribute from the API response.
In milestone 16.0, we will remove the `pipelines` attribute from the API response.
stage: package
tiers: Free
issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/289956

View File

@ -1,13 +0,0 @@
- name: "`pipelines` fields in the Package GraphQL types"
announcement_milestone: "14.6" # The milestone when this feature was first announced as deprecated.
announcement_date: "2021-12-22" # The date of the milestone release when this feature was first announced as deprecated. This should almost always be the 22nd of a month (YYYY-MM-22), unless you did an out of band blog post.
removal_milestone: "15.0" # The milestone when this feature is planned to be removed
removal_date: "2022-05-22" # the date of the milestone release when this feature is planned to be removed
breaking_change: true
body: | # Do not modify this line, instead modify the lines below.
As part of the work to create a [Package Registry GraphQL API](https://gitlab.com/groups/gitlab-org/-/epics/6318), the Package group deprecated the `pipelines` fields in all Package-related GraphQL types. As of GitLab 14.6, the `pipelines` field is deprecated in [`Package`](https://docs.gitlab.com/ee/api/graphql/reference/index.html#package) and [`PackageDetailsType`](https://docs.gitlab.com/ee/api/graphql/reference/index.html#packagedetailstype) due to scalability and performance concerns.
In milestone 15.0, we will completely remove `pipelines` from `Package` and `PackageDetailsType`. You can follow and contribute to work on a replacement in the epic [GitLab-#7214](https://gitlab.com/groups/gitlab-org/-/epics/7214).
stage: package
tiers: Free
issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/347219

View File

@ -3,7 +3,7 @@ table_name: coverage_fuzzing_corpuses
classes:
- AppSec::Fuzzing::Coverage::Corpus
feature_categories:
- code_quality
description: TODO
- fuzz_testing
description: Stores additional values describing corpuses used by coverage fuzzing
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/71704
milestone: '14.4'

View File

@ -50,7 +50,8 @@ classes:
- Integrations::Zentao
feature_categories:
- integrations
description: Support 3rd party integrations: Jira, Slack, etc., formerly services table.
description: |
Support 3rd party integrations: Jira, Slack, etc., formerly services table.
https://gitlab.com/gitlab-org/gitlab/-/commit/1dab19d0d7b25cb5af27b8d10c8b615b2d38c2cf
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/64562
milestone: '9.4'

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class AddUniqueFingerprintSha256IndexToKey < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
OLD_INDEX_NAME = 'index_keys_on_fingerprint_sha256'
NEW_INDEX_NAME = 'index_keys_on_fingerprint_sha256_unique'
def up
add_concurrent_index :keys, :fingerprint_sha256, unique: true, name: NEW_INDEX_NAME
remove_concurrent_index_by_name :keys, OLD_INDEX_NAME
end
def down
add_concurrent_index :keys, :fingerprint_sha256, name: OLD_INDEX_NAME
remove_concurrent_index_by_name :keys, NEW_INDEX_NAME
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class AddUniqueFingerprintSha256IndexToGroupDeployKey < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
OLD_INDEX_NAME = 'index_group_deploy_keys_on_fingerprint_sha256'
NEW_INDEX_NAME = 'index_group_deploy_keys_on_fingerprint_sha256_unique'
def up
add_concurrent_index :group_deploy_keys, :fingerprint_sha256, unique: true, name: NEW_INDEX_NAME
remove_concurrent_index_by_name :group_deploy_keys, OLD_INDEX_NAME
end
def down
add_concurrent_index :group_deploy_keys, :fingerprint_sha256, name: OLD_INDEX_NAME
remove_concurrent_index_by_name :group_deploy_keys, NEW_INDEX_NAME
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class DropUniqueFingerprintMd5IndexFromKey < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
INDEX_NAME = 'index_keys_on_fingerprint'
def up
remove_concurrent_index_by_name :keys, INDEX_NAME
add_concurrent_index :keys, :fingerprint, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :keys, INDEX_NAME
add_concurrent_index :keys, :fingerprint, unique: true, name: INDEX_NAME
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class DropUniqueFingerprintMd5IndexFromGroupDeployKey < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
INDEX_NAME = 'index_group_deploy_keys_on_fingerprint'
def up
remove_concurrent_index_by_name :group_deploy_keys, INDEX_NAME
add_concurrent_index :group_deploy_keys, :fingerprint, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :group_deploy_keys, INDEX_NAME
add_concurrent_index :group_deploy_keys, :fingerprint, unique: true, name: INDEX_NAME
end
end

View File

@ -0,0 +1 @@
ec0dbbc2963943fd1449503c141fd8e84e5c0f58ab6b845734a61bedcd7da44a

View File

@ -0,0 +1 @@
28d84269536b34084bf060d72592119c580ad92ff9de481dd67770f07fc26ab4

View File

@ -0,0 +1 @@
95353be853064cd1038e8a416254017f33c42bfadf97eca6732a62c0796018f9

View File

@ -0,0 +1 @@
bf0696047c736e361225ce33ee750def26ae2f9bd2ece9065799fd9514edbfcc

View File

@ -27811,9 +27811,9 @@ CREATE UNIQUE INDEX index_group_deploy_keys_group_on_group_deploy_key_and_group_
CREATE INDEX index_group_deploy_keys_groups_on_group_deploy_key_id ON group_deploy_keys_groups USING btree (group_deploy_key_id);
CREATE UNIQUE INDEX index_group_deploy_keys_on_fingerprint ON group_deploy_keys USING btree (fingerprint);
CREATE INDEX index_group_deploy_keys_on_fingerprint ON group_deploy_keys USING btree (fingerprint);
CREATE INDEX index_group_deploy_keys_on_fingerprint_sha256 ON group_deploy_keys USING btree (fingerprint_sha256);
CREATE UNIQUE INDEX index_group_deploy_keys_on_fingerprint_sha256_unique ON group_deploy_keys USING btree (fingerprint_sha256);
CREATE INDEX index_group_deploy_keys_on_user_id ON group_deploy_keys USING btree (user_id);
@ -28051,9 +28051,9 @@ CREATE INDEX index_job_artifact_states_pending_verification ON ci_job_artifact_s
CREATE INDEX index_keys_on_expires_at_and_id ON keys USING btree (date(timezone('UTC'::text, expires_at)), id) WHERE (expiry_notification_delivered_at IS NULL);
CREATE UNIQUE INDEX index_keys_on_fingerprint ON keys USING btree (fingerprint);
CREATE INDEX index_keys_on_fingerprint ON keys USING btree (fingerprint);
CREATE INDEX index_keys_on_fingerprint_sha256 ON keys USING btree (fingerprint_sha256);
CREATE UNIQUE INDEX index_keys_on_fingerprint_sha256_unique ON keys USING btree (fingerprint_sha256);
CREATE INDEX index_keys_on_id_and_ldap_key_type ON keys USING btree (id) WHERE ((type)::text = 'LDAPKey'::text);

View File

@ -1664,7 +1664,7 @@ pricing page. For example:
You must assign a tier badge:
- To all H1 topic headings.
- To all H1 topic headings, except the pages under `doc/development/*`.
- To topic headings that don't apply to the same tier as the H1.
To add a tier badge to a heading, add the relevant tier badge

View File

@ -1208,20 +1208,6 @@ Runners that have never contacted the GitLab instance will also return `stale` i
**Planned removal milestone: 15.0 (2022-05-22)**
### `pipelines` fields in the Package GraphQL types
WARNING:
This feature will be changed or removed in 15.0
as a [breaking change](https://docs.gitlab.com/ee/development/contributing/#breaking-changes).
Before updating GitLab, review the details carefully to determine if you need to make any
changes to your code, settings, or workflow.
As part of the work to create a [Package Registry GraphQL API](https://gitlab.com/groups/gitlab-org/-/epics/6318), the Package group deprecated the `pipelines` fields in all Package-related GraphQL types. As of GitLab 14.6, the `pipelines` field is deprecated in [`Package`](https://docs.gitlab.com/ee/api/graphql/reference/index.html#package) and [`PackageDetailsType`](https://docs.gitlab.com/ee/api/graphql/reference/index.html#packagedetailstype) due to scalability and performance concerns.
In milestone 15.0, we will completely remove `pipelines` from `Package` and `PackageDetailsType`. You can follow and contribute to work on a replacement in the epic [GitLab-#7214](https://gitlab.com/groups/gitlab-org/-/epics/7214).
**Planned removal milestone: 15.0 (2022-05-22)**
### `type` and `types` keyword in CI/CD configuration
WARNING:
@ -1334,16 +1320,16 @@ Prior to 14.5, if you did not define the `AuthenticationType`, GitLab Runner cho
### Package pipelines in API payload is paginated
WARNING:
This feature will be changed or removed in 15.0
This feature will be changed or removed in 16.0
as a [breaking change](https://docs.gitlab.com/ee/development/contributing/#breaking-changes).
Before updating GitLab, review the details carefully to determine if you need to make any
changes to your code, settings, or workflow.
A request to the API for `/api/v4/projects/:id/packages` returns a paginated result of packages. Each package lists all of its pipelines in this response. This is a performance concern, as it's possible for a package to have hundreds or thousands of associated pipelines.
In milestone 15.0, we will remove the `pipelines` attribute from the API response.
In milestone 16.0, we will remove the `pipelines` attribute from the API response.
**Planned removal milestone: 15.0 (2022-05-22)**
**Planned removal milestone: 16.0 (2023-05-22)**
### REST and GraphQL API Runner status will not return `paused`

View File

@ -429,7 +429,7 @@ that may remain stuck permanently in a **pending** state.
```ruby
Gitlab::Database::BackgroundMigrationJob.pending.where(class_name: "PopulateTopicsNonPrivateProjectsCount").find_each do |job|
puts Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded("PopulateTopicsNonPrivateProjectsCountq", job.arguments)
puts Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded("PopulateTopicsNonPrivateProjectsCount", job.arguments)
end
```

View File

@ -251,6 +251,7 @@ run tests:
- coverage run -m pytest
- coverage report
- coverage xml
coverage: '/TOTAL.*\s([.\d]+)%/'
artifacts:
reports:
cobertura: coverage.xml

View File

@ -175,8 +175,8 @@ module API
mount ::API::BulkImports
mount ::API::Ci::JobArtifacts
mount ::API::Ci::Jobs
mount ::API::Ci::Pipelines
mount ::API::Ci::PipelineSchedules
mount ::API::Ci::Pipelines
mount ::API::Ci::ResourceGroups
mount ::API::Ci::Runner
mount ::API::Ci::Runners
@ -184,14 +184,20 @@ module API
mount ::API::Ci::Triggers
mount ::API::Ci::Variables
mount ::API::Clusters::Agents
mount ::API::Commits
mount ::API::CommitStatuses
mount ::API::Commits
mount ::API::ComposerPackages
mount ::API::ConanInstancePackages
mount ::API::ConanProjectPackages
mount ::API::ContainerRegistryEvent
mount ::API::ContainerRepositories
mount ::API::DebianGroupPackages
mount ::API::DebianProjectPackages
mount ::API::DependencyProxy
mount ::API::DeployKeys
mount ::API::DeployTokens
mount ::API::Deployments
mount ::API::Discussions
mount ::API::Environments
mount ::API::ErrorTracking::ClientKeys
mount ::API::ErrorTracking::Collector
@ -202,87 +208,78 @@ module API
mount ::API::Features
mount ::API::Files
mount ::API::FreezePeriods
mount ::API::GenericPackages
mount ::API::Geo
mount ::API::GoProxy
mount ::API::GroupAvatar
mount ::API::GroupBoards
mount ::API::GroupClusters
mount ::API::GroupContainerRepositories
mount ::API::GroupDebianDistributions
mount ::API::GroupExport
mount ::API::GroupImport
mount ::API::GroupLabels
mount ::API::GroupMilestones
mount ::API::Groups
mount ::API::GroupContainerRepositories
mount ::API::GroupDebianDistributions
mount ::API::GroupPackages
mount ::API::GroupVariables
mount ::API::Groups
mount ::API::HelmPackages
mount ::API::ImportBitbucketServer
mount ::API::ImportGithub
mount ::API::IssueLinks
mount ::API::Integrations
mount ::API::Invitations
mount ::API::IssueLinks
mount ::API::Issues
mount ::API::Keys
mount ::API::Labels
mount ::API::Lint
mount ::API::Markdown
mount ::API::MavenPackages
mount ::API::Members
mount ::API::MergeRequestApprovals
mount ::API::MergeRequestDiffs
mount ::API::MergeRequests
mount ::API::MergeRequestApprovals
mount ::API::Metrics::Dashboard::Annotations
mount ::API::Metrics::UserStarredDashboards
mount ::API::Namespaces
mount ::API::Notes
mount ::API::Discussions
mount ::API::ResourceLabelEvents
mount ::API::ResourceMilestoneEvents
mount ::API::ResourceStateEvents
mount ::API::NotificationSettings
mount ::API::ProjectPackages
mount ::API::GroupPackages
mount ::API::PackageFiles
mount ::API::NugetProjectPackages
mount ::API::NugetGroupPackages
mount ::API::PypiPackages
mount ::API::ComposerPackages
mount ::API::ConanProjectPackages
mount ::API::ConanInstancePackages
mount ::API::DebianGroupPackages
mount ::API::DebianProjectPackages
mount ::API::MavenPackages
mount ::API::NpmProjectPackages
mount ::API::NpmInstancePackages
mount ::API::GenericPackages
mount ::API::GoProxy
mount ::API::HelmPackages
mount ::API::NpmProjectPackages
mount ::API::NugetGroupPackages
mount ::API::NugetProjectPackages
mount ::API::PackageFiles
mount ::API::Pages
mount ::API::PagesDomains
mount ::API::PersonalAccessTokens
mount ::API::ProjectClusters
mount ::API::ProjectContainerRepositories
mount ::API::ProjectDebianDistributions
mount ::API::ProjectEvents
mount ::API::ProjectExport
mount ::API::ProjectImport
mount ::API::ProjectHooks
mount ::API::ProjectImport
mount ::API::ProjectMilestones
mount ::API::ProjectPackages
mount ::API::ProjectRepositoryStorageMoves
mount ::API::Projects
mount ::API::ProjectSnapshots
mount ::API::ProjectSnippets
mount ::API::ProjectStatistics
mount ::API::ProjectTemplates
mount ::API::Terraform::State
mount ::API::Terraform::StateVersion
mount ::API::Terraform::Modules::V1::Packages
mount ::API::PersonalAccessTokens
mount ::API::Projects
mount ::API::ProtectedBranches
mount ::API::ProtectedTags
mount ::API::Releases
mount ::API::PypiPackages
mount ::API::Release::Links
mount ::API::Releases
mount ::API::RemoteMirrors
mount ::API::Repositories
mount ::API::ResourceAccessTokens
mount ::API::ResourceLabelEvents
mount ::API::ResourceMilestoneEvents
mount ::API::ResourceStateEvents
mount ::API::RubygemPackages
mount ::API::Search
mount ::API::Integrations
mount ::API::Settings
mount ::API::SidekiqMetrics
mount ::API::SnippetRepositoryStorageMoves
@ -294,12 +291,15 @@ module API
mount ::API::SystemHooks
mount ::API::Tags
mount ::API::Templates
mount ::API::Terraform::Modules::V1::Packages
mount ::API::Terraform::State
mount ::API::Terraform::StateVersion
mount ::API::Todos
mount ::API::Topics
mount ::API::Unleash
mount ::API::UsageData
mount ::API::UsageDataQueries
mount ::API::UsageDataNonSqlMetrics
mount ::API::UsageDataQueries
mount ::API::UserCounts
mount ::API::Users
mount ::API::Version

View File

@ -160,7 +160,17 @@ module API
def find_group!(id)
group = find_group(id)
check_group_access(group)
end
# rubocop: disable CodeReuse/ActiveRecord
def find_group_by_full_path!(full_path)
group = Group.find_by_full_path(full_path)
check_group_access(group)
end
# rubocop: enable CodeReuse/ActiveRecord
def check_group_access(group)
return group if can?(current_user, :read_group, group)
return unauthorized! if authenticate_non_public?

View File

@ -0,0 +1,52 @@
# frozen_string_literal: true
module BulkImports
module Common
module Extractors
class JsonExtractor
def initialize(relation:)
@relation = relation
@tmpdir = Dir.mktmpdir
end
def extract(context)
download_service(context).execute
decompression_service.execute
attributes = ndjson_reader.consume_attributes(relation)
BulkImports::Pipeline::ExtractedData.new(data: attributes)
end
def remove_tmpdir
FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
end
private
attr_reader :relation, :tmpdir
def filename
"#{relation}.json.gz"
end
def download_service(context)
@download_service ||= BulkImports::FileDownloadService.new(
configuration: context.configuration,
relative_url: context.entity.relation_download_url_path(relation),
tmpdir: tmpdir,
filename: filename
)
end
def decompression_service
@decompression_service ||= BulkImports::FileDecompressionService.new(tmpdir: tmpdir, filename: filename)
end
def ndjson_reader
@ndjson_reader ||= Gitlab::ImportExport::Json::NdjsonReader.new(tmpdir)
end
end
end
end
end

View File

@ -0,0 +1,35 @@
# frozen_string_literal: true
module BulkImports
module Groups
module Pipelines
class GroupAttributesPipeline
include Pipeline
ndjson_pipeline!
relation_name 'self'
extractor ::BulkImports::Common::Extractors::JsonExtractor, relation: relation
transformer ::BulkImports::Common::Transformers::ProhibitedAttributesTransformer
def transform(_context, data)
return unless data
data.symbolize_keys.slice(:membership_lock)
end
def load(_context, data)
return unless data
::Groups::UpdateService.new(portable, current_user, data).execute
end
def after_run(_context)
extractor.remove_tmpdir
end
end
end
end
end

View File

@ -0,0 +1,43 @@
# frozen_string_literal: true
module BulkImports
module Groups
module Pipelines
class NamespaceSettingsPipeline
include Pipeline
ndjson_pipeline!
relation_name 'namespace_settings'
extractor ::BulkImports::Common::Extractors::NdjsonExtractor, relation: relation
transformer ::BulkImports::Common::Transformers::ProhibitedAttributesTransformer
def transform(_context, data)
return unless data
data.first.symbolize_keys.slice(*allowed_attributes)
end
def load(_context, data)
return unless data
::Groups::UpdateService.new(portable, current_user, data).execute
end
def after_run(_context)
extractor.remove_tmpdir
end
private
def allowed_attributes
Gitlab::ImportExport::Config.new(
config: Gitlab::ImportExport.group_config_file
).to_h.dig(:included_attributes, :namespace_settings)
end
end
end
end
end

View File

@ -11,10 +11,18 @@ module BulkImports
pipeline: BulkImports::Groups::Pipelines::GroupPipeline,
stage: 0
},
group_attributes: {
pipeline: BulkImports::Groups::Pipelines::GroupAttributesPipeline,
stage: 1
},
subgroups: {
pipeline: BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline,
stage: 1
},
namespace_settings: {
pipeline: BulkImports::Groups::Pipelines::NamespaceSettingsPipeline,
stage: 1
},
members: {
pipeline: BulkImports::Common::Pipelines::MembersPipeline,
stage: 1

View File

@ -52,6 +52,7 @@ module ContainerRegistry
#
return 25 if Feature.enabled?(:container_registry_migration_phase2_capacity_25)
return 10 if Feature.enabled?(:container_registry_migration_phase2_capacity_10)
return 2 if Feature.enabled?(:container_registry_migration_phase2_capacity_2)
return 1 if Feature.enabled?(:container_registry_migration_phase2_capacity_1)
0

View File

@ -16,6 +16,7 @@ tree:
- :board
- members:
- :user
- :namespace_settings
included_attributes:
user:
@ -24,6 +25,8 @@ included_attributes:
- :username
author:
- :name
namespace_settings:
- :prevent_sharing_groups_outside_hierarchy
excluded_attributes:
group:

View File

@ -30,6 +30,10 @@ module Gitlab
update_group_references
end
def invalid_relation?
@relation_name == :namespace_settings
end
def update_group_references
return unless self.class.existing_object_relations.include?(@relation_name)
return unless @relation_hash['group_id']

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
module Gitlab
module Usage
module ServicePing
class LegacyMetricTimingDecorator < SimpleDelegator
attr_reader :duration
delegate :class, :is_a?, :kind_of?, to: :__getobj__
def initialize(value, duration)
@duration = duration
super(value)
end
end
end
end
end

View File

@ -308,7 +308,7 @@ module Gitlab
Settings[component]['object_store']
end
if config
if config.present?
{
enabled: alt_usage_data { Settings[component]['enabled'] },
object_store: {
@ -684,6 +684,17 @@ module Gitlab
.merge!(ide_monthly_active_users(date_range))
end
def with_duration
return yield unless Feature.enabled?(:measure_service_ping_metric_collection, default_enabled: :yaml)
result = nil
duration = Benchmark.realtime do
result = yield
end
::Gitlab::Usage::ServicePing::LegacyMetricTimingDecorator.new(result, duration)
end
private
def stage_manage_events(time_period)

View File

@ -5,6 +5,10 @@ module Gitlab
# See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/41091
class UsageDataQueries < UsageData
class << self
def with_duration
yield
end
def add_metric(metric, time_frame: 'none', options: {})
metric_class = "Gitlab::Usage::Metrics::Instrumentations::#{metric}".constantize

View File

@ -44,57 +44,64 @@ module Gitlab
DISTRIBUTED_HLL_FALLBACK = -2
MAX_BUCKET_SIZE = 100
def with_duration
yield
end
def add_metric(metric, time_frame: 'none', options: {})
metric_class = "Gitlab::Usage::Metrics::Instrumentations::#{metric}".constantize
metric_class.new(time_frame: time_frame, options: options).value
end
def count(relation, column = nil, batch: true, batch_size: nil, start: nil, finish: nil)
if batch
Gitlab::Database::BatchCount.batch_count(relation, column, batch_size: batch_size, start: start, finish: finish)
else
relation.count
def count(relation, column = nil, batch: true, batch_size: nil, start: nil, finish: nil, start_at: Time.current)
with_duration do
if batch
Gitlab::Database::BatchCount.batch_count(relation, column, batch_size: batch_size, start: start, finish: finish)
else
relation.count
end
rescue ActiveRecord::StatementInvalid => error
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error)
FALLBACK
end
rescue ActiveRecord::StatementInvalid => error
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error)
FALLBACK
end
def distinct_count(relation, column = nil, batch: true, batch_size: nil, start: nil, finish: nil)
if batch
Gitlab::Database::BatchCount.batch_distinct_count(relation, column, batch_size: batch_size, start: start, finish: finish)
else
relation.distinct_count_by(column)
with_duration do
if batch
Gitlab::Database::BatchCount.batch_distinct_count(relation, column, batch_size: batch_size, start: start, finish: finish)
else
relation.distinct_count_by(column)
end
rescue ActiveRecord::StatementInvalid => error
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error)
FALLBACK
end
rescue ActiveRecord::StatementInvalid => error
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error)
FALLBACK
end
def estimate_batch_distinct_count(relation, column = nil, batch_size: nil, start: nil, finish: nil)
buckets = Gitlab::Database::PostgresHll::BatchDistinctCounter
.new(relation, column)
.execute(batch_size: batch_size, start: start, finish: finish)
with_duration do
buckets = Gitlab::Database::PostgresHll::BatchDistinctCounter
.new(relation, column)
.execute(batch_size: batch_size, start: start, finish: finish)
yield buckets if block_given?
yield buckets if block_given?
buckets.estimated_distinct_count
rescue ActiveRecord::StatementInvalid => error
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error)
FALLBACK
# catch all rescue should be removed as a part of feature flag rollout issue
# https://gitlab.com/gitlab-org/gitlab/-/issues/285485
rescue StandardError => error
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error)
DISTRIBUTED_HLL_FALLBACK
buckets.estimated_distinct_count
rescue ActiveRecord::StatementInvalid => error
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error)
FALLBACK
end
end
def sum(relation, column, batch_size: nil, start: nil, finish: nil)
Gitlab::Database::BatchCount.batch_sum(relation, column, batch_size: batch_size, start: start, finish: finish)
rescue ActiveRecord::StatementInvalid => error
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error)
FALLBACK
with_duration do
Gitlab::Database::BatchCount.batch_sum(relation, column, batch_size: batch_size, start: start, finish: finish)
rescue ActiveRecord::StatementInvalid => error
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error)
FALLBACK
end
end
# We don't support batching with histograms.
@ -103,103 +110,113 @@ module Gitlab
#
# rubocop: disable CodeReuse/ActiveRecord
def histogram(relation, column, buckets:, bucket_size: buckets.size)
# Using lambda to avoid exposing histogram specific methods
parameters_valid = lambda do
error_message =
if buckets.first == buckets.last
'Lower bucket bound cannot equal to upper bucket bound'
elsif bucket_size == 0
'Bucket size cannot be zero'
elsif bucket_size > MAX_BUCKET_SIZE
"Bucket size #{bucket_size} exceeds the limit of #{MAX_BUCKET_SIZE}"
end
with_duration do
# Using lambda to avoid exposing histogram specific methods
parameters_valid = lambda do
error_message =
if buckets.first == buckets.last
'Lower bucket bound cannot equal to upper bucket bound'
elsif bucket_size == 0
'Bucket size cannot be zero'
elsif bucket_size > MAX_BUCKET_SIZE
"Bucket size #{bucket_size} exceeds the limit of #{MAX_BUCKET_SIZE}"
end
return true unless error_message
break true unless error_message
exception = ArgumentError.new(error_message)
exception.set_backtrace(caller)
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(exception)
exception = ArgumentError.new(error_message)
exception.set_backtrace(caller)
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(exception)
false
false
end
break HISTOGRAM_FALLBACK unless parameters_valid.call
count_grouped = relation.group(column).select(Arel.star.count.as('count_grouped'))
cte = Gitlab::SQL::CTE.new(:count_cte, count_grouped)
# For example, 9 segments gives 10 buckets
bucket_segments = bucket_size - 1
width_bucket = Arel::Nodes::NamedFunction
.new('WIDTH_BUCKET', [cte.table[:count_grouped], buckets.first, buckets.last, bucket_segments])
.as('buckets')
query = cte
.table
.project(width_bucket, cte.table[:count])
.group('buckets')
.order('buckets')
.with(cte.to_arel)
# Return the histogram as a Hash because buckets are unique.
relation
.connection
.exec_query(query.to_sql)
.rows
.to_h
# Keys are converted to strings in Usage Ping JSON
.stringify_keys
rescue ActiveRecord::StatementInvalid => e
Gitlab::AppJsonLogger.error(
event: 'histogram',
relation: relation.table_name,
operation: 'histogram',
operation_args: [column, buckets.first, buckets.last, bucket_segments],
query: query.to_sql,
message: e.message
)
# Raises error for dev env
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e)
HISTOGRAM_FALLBACK
end
return HISTOGRAM_FALLBACK unless parameters_valid.call
count_grouped = relation.group(column).select(Arel.star.count.as('count_grouped'))
cte = Gitlab::SQL::CTE.new(:count_cte, count_grouped)
# For example, 9 segments gives 10 buckets
bucket_segments = bucket_size - 1
width_bucket = Arel::Nodes::NamedFunction
.new('WIDTH_BUCKET', [cte.table[:count_grouped], buckets.first, buckets.last, bucket_segments])
.as('buckets')
query = cte
.table
.project(width_bucket, cte.table[:count])
.group('buckets')
.order('buckets')
.with(cte.to_arel)
# Return the histogram as a Hash because buckets are unique.
relation
.connection
.exec_query(query.to_sql)
.rows
.to_h
# Keys are converted to strings in Usage Ping JSON
.stringify_keys
rescue ActiveRecord::StatementInvalid => e
Gitlab::AppJsonLogger.error(
event: 'histogram',
relation: relation.table_name,
operation: 'histogram',
operation_args: [column, buckets.first, buckets.last, bucket_segments],
query: query.to_sql,
message: e.message
)
# Raises error for dev env
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e)
HISTOGRAM_FALLBACK
end
# rubocop: enable CodeReuse/ActiveRecord
def add(*args)
return -1 if args.any?(&:negative?)
with_duration do
break -1 if args.any?(&:negative?)
args.sum
rescue StandardError => error
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error)
FALLBACK
args.sum
rescue StandardError => error
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error)
FALLBACK
end
end
def alt_usage_data(value = nil, fallback: FALLBACK, &block)
if block_given?
yield
else
value
with_duration do
if block_given?
yield
else
value
end
rescue StandardError => error
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error)
fallback
end
rescue StandardError => error
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error)
fallback
end
def redis_usage_data(counter = nil, &block)
if block_given?
redis_usage_counter(&block)
elsif counter.present?
redis_usage_data_totals(counter)
with_duration do
if block_given?
redis_usage_counter(&block)
elsif counter.present?
redis_usage_data_totals(counter)
end
end
end
def with_prometheus_client(fallback: {}, verify: true)
client = prometheus_client(verify: verify)
return fallback unless client
with_duration do
client = prometheus_client(verify: verify)
break fallback unless client
yield client
rescue StandardError
fallback
yield client
rescue StandardError
fallback
end
end
def measure_duration
@ -231,25 +248,28 @@ module Gitlab
# rubocop: disable UsageData/LargeTable:
def jira_integration_data
data = {
projects_jira_server_active: 0,
projects_jira_cloud_active: 0
}
with_duration do
data = {
projects_jira_server_active: 0,
projects_jira_cloud_active: 0
}
# rubocop: disable CodeReuse/ActiveRecord
::Integrations::Jira.active.includes(:jira_tracker_data).find_in_batches(batch_size: 100) do |services|
counts = services.group_by do |service|
# TODO: Simplify as part of https://gitlab.com/gitlab-org/gitlab/issues/29404
service_url = service.data_fields&.url || (service.properties && service.properties['url'])
service_url&.include?('.atlassian.net') ? :cloud : :server
# rubocop: disable CodeReuse/ActiveRecord
::Integrations::Jira.active.includes(:jira_tracker_data).find_in_batches(batch_size: 100) do |services|
counts = services.group_by do |service|
# TODO: Simplify as part of https://gitlab.com/gitlab-org/gitlab/issues/29404
service_url = service.data_fields&.url || (service.properties && service.properties['url'])
service_url&.include?('.atlassian.net') ? :cloud : :server
end
data[:projects_jira_server_active] += counts[:server].size if counts[:server]
data[:projects_jira_cloud_active] += counts[:cloud].size if counts[:cloud]
end
data[:projects_jira_server_active] += counts[:server].size if counts[:server]
data[:projects_jira_cloud_active] += counts[:cloud].size if counts[:cloud]
data
end
data
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: enable UsageData/LargeTable:
@ -263,9 +283,11 @@ module Gitlab
end
def epics_deepest_relationship_level
# rubocop: disable UsageData/LargeTable
{ epics_deepest_relationship_level: ::Epic.deepest_relationship_level.to_i }
# rubocop: enable UsageData/LargeTable
with_duration do
# rubocop: disable UsageData/LargeTable
{ epics_deepest_relationship_level: ::Epic.deepest_relationship_level.to_i }
# rubocop: enable UsageData/LargeTable
end
end
private

View File

@ -4413,6 +4413,9 @@ msgstr ""
msgid "ApplicationSettings|Email restrictions for sign-ups"
msgstr ""
msgid "ApplicationSettings|Enable Slack application"
msgstr ""
msgid "ApplicationSettings|Enable domain denylist for sign ups"
msgstr ""
@ -4452,6 +4455,9 @@ msgstr ""
msgid "ApplicationSettings|Sign-up enabled"
msgstr ""
msgid "ApplicationSettings|This option is only available on GitLab.com"
msgstr ""
msgid "ApplicationSettings|Upload denylist file"
msgstr ""
@ -31659,6 +31665,9 @@ msgstr ""
msgid "Reports|Filename"
msgstr ""
msgid "Reports|Fixed"
msgstr ""
msgid "Reports|Full report"
msgstr ""
@ -31689,6 +31698,9 @@ msgstr ""
msgid "Reports|Metrics reports: %{strong_start}%{numberOfChanges}%{strong_end} %{changes}"
msgstr ""
msgid "Reports|New"
msgstr ""
msgid "Reports|Scanner"
msgstr ""
@ -38796,9 +38808,6 @@ msgstr ""
msgid "This only applies to repository indexing operations."
msgstr ""
msgid "This option is only available on GitLab.com"
msgstr ""
msgid "This page is unavailable because you are not allowed to read information across multiple projects."
msgstr ""

View File

@ -20,6 +20,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
stub_feature_flags(refactor_mr_widgets_extensions: false)
stub_feature_flags(refactor_mr_widgets_extensions_user: false)
stub_feature_flags(refactor_mr_widget_test_summary: false)
end
context 'new merge request', :sidekiq_might_not_need_inline do

View File

@ -16,6 +16,7 @@ import newErrorsTestReports from 'jest/reports/mock_data/new_errors_report.json'
import newFailedTestReports from 'jest/reports/mock_data/new_failures_report.json';
import successTestReports from 'jest/reports/mock_data/no_failures_report.json';
import resolvedFailures from 'jest/reports/mock_data/resolved_failures.json';
import recentFailures from 'jest/reports/mock_data/recent_failures_report.json';
const reportWithParsingErrors = failedReport;
reportWithParsingErrors.suites[0].suite_errors = {
@ -101,6 +102,17 @@ describe('Test report extension', () => {
expect(wrapper.text()).toContain(expectedResult);
});
it('displays report level recently failed count', async () => {
mockApi(httpStatusCodes.OK, recentFailures);
createComponent();
await waitForPromises();
expect(wrapper.text()).toContain(
'2 out of 3 failed tests have failed more than once in the last 14 days',
);
});
it('displays a link to the full report', async () => {
mockApi(httpStatusCodes.OK);
createComponent();
@ -125,10 +137,10 @@ describe('Test report extension', () => {
it('displays summary for each suite', async () => {
await createExpandedWidgetWithData();
expect(trimText(findAllExtensionListItems().at(0).text())).toBe(
expect(trimText(findAllExtensionListItems().at(0).text())).toContain(
'rspec:pg: 1 failed and 2 fixed test results, 8 total tests',
);
expect(trimText(findAllExtensionListItems().at(1).text())).toBe(
expect(trimText(findAllExtensionListItems().at(1).text())).toContain(
'java ant: 1 failed, 3 total tests',
);
});
@ -145,5 +157,37 @@ describe('Test report extension', () => {
'Base report parsing error: JUnit data parsing failed: string not matched',
);
});
it('displays suite level recently failed count', async () => {
await createExpandedWidgetWithData(recentFailures);
expect(trimText(findAllExtensionListItems().at(0).text())).toContain(
'1 out of 2 failed tests has failed more than once in the last 14 days',
);
expect(trimText(findAllExtensionListItems().at(1).text())).toContain(
'1 out of 1 failed test has failed more than once in the last 14 days',
);
});
it('displays the list of failed and fixed tests', async () => {
await createExpandedWidgetWithData();
const firstSuite = trimText(findAllExtensionListItems().at(0).text());
const secondSuite = trimText(findAllExtensionListItems().at(1).text());
expect(firstSuite).toContain('Test#subtract when a is 2 and b is 1 returns correct result');
expect(firstSuite).toContain('Test#sum when a is 1 and b is 2 returns summary');
expect(firstSuite).toContain('Test#sum when a is 100 and b is 200 returns summary');
expect(secondSuite).toContain('sumTest');
});
it('displays the test level recently failed count', async () => {
await createExpandedWidgetWithData(recentFailures);
expect(trimText(findAllExtensionListItems().at(0).text())).toContain(
'Failed 8 times in main in the last 14 days',
);
});
});
});

View File

@ -28,7 +28,7 @@ RSpec.describe ::Types::RangeInputType do
values: {},
object: nil
)
instance = described_class[of_integer].new(context: context, defaults_used: [], ruby_kwargs: {})
instance = described_class[of_integer].new({}, context: context, defaults_used: [], ruby_kwargs: {})
expect(instance).to be_a_kind_of(described_class)
expect(instance).to be_a_kind_of(described_class[of_integer])

View File

@ -0,0 +1,109 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe LazyImageTagHelper do
describe '#image_tag' do
let(:image_src) { '/path/to/image.jpg' }
let(:dark_image_src) { '/path/to/image_dark.jpg' }
context 'when only source passed' do
let(:current_user) { create(:user) }
let(:result) { image_tag(image_src) }
it 'returns a lazy image tag by default' do
expect(result).to eq(
"<img data-src=\"#{image_src}\" class=\"lazy\" src=\"#{placeholder_image}\" />"
)
end
end
context 'when lazy mode is disabled' do
let(:current_user) { create(:user) }
let(:result) { image_tag(image_src, lazy: false) }
it 'returns a normal image tag' do
expect(result).to eq(
"<img src=\"#{image_src}\" />"
)
end
end
context 'when Dark Mode is enabled' do
let(:current_user) { create(:user, theme_id: 11) }
context 'when auto dark enabled' do
let(:result) { image_tag(image_src, auto_dark: true) }
it 'adds an auto dark mode class from gitlab-ui' do
expect(result).to eq(
"<img class=\"gl-dark-invert-keep-hue lazy\" data-src=\"#{image_src}\" src=\"#{placeholder_image}\" />"
)
end
end
context 'when auto dark disabled' do
let(:result) { image_tag(image_src, auto_dark: false) }
it 'does nothing' do
expect(result).to eq(
"<img data-src=\"#{image_src}\" class=\"lazy\" src=\"#{placeholder_image}\" />"
)
end
end
context 'when dark variant is present' do
let(:result) { image_tag(image_src, dark_variant: dark_image_src) }
it 'uses dark variant as a source' do
expect(result).to eq(
"<img data-src=\"#{dark_image_src}\" class=\"lazy\" src=\"#{placeholder_image}\" />"
)
end
end
end
context 'when Dark Mode is disabled' do
let(:current_user) { create(:user, theme_id: 1) }
context 'when auto dark enabled' do
let(:result) { image_tag(image_src, auto_dark: true) }
it 'does not add a dark mode class from gitlab-ui' do
expect(result).to eq(
"<img data-src=\"#{image_src}\" class=\"lazy\" src=\"#{placeholder_image}\" />"
)
end
end
context 'when auto dark disabled' do
let(:result) { image_tag(image_src, auto_dark: true) }
it 'does nothing' do
expect(result).to eq(
"<img data-src=\"#{image_src}\" class=\"lazy\" src=\"#{placeholder_image}\" />"
)
end
end
context 'when dark variant is present' do
let(:result) { image_tag(image_src, dark_variant: dark_image_src) }
it 'uses original image as a source' do
expect(result).to eq(
"<img data-src=\"#{image_src}\" class=\"lazy\" src=\"#{placeholder_image}\" />"
)
end
end
end
context 'when auto_dark and dark_variant are both passed' do
let(:current_user) { create(:user) }
it 'does not add a dark mode class from gitlab-ui' do
expect { image_tag('image.jpg', dark_variant: 'image_dark.jpg', auto_dark: true) }
.to raise_error(ArgumentError, 'dark_variant and auto_dark are mutually exclusive')
end
end
end
end

View File

@ -150,8 +150,8 @@ RSpec.describe API::Helpers do
context 'when user is authenticated' do
before do
subject.instance_variable_set(:@current_user, user)
subject.instance_variable_set(:@initial_current_user, user)
allow(subject).to receive(:current_user).and_return(user)
allow(subject).to receive(:initial_current_user).and_return(user)
end
context 'public project' do
@ -167,8 +167,8 @@ RSpec.describe API::Helpers do
context 'when user is not authenticated' do
before do
subject.instance_variable_set(:@current_user, nil)
subject.instance_variable_set(:@initial_current_user, nil)
allow(subject).to receive(:current_user).and_return(nil)
allow(subject).to receive(:initial_current_user).and_return(nil)
end
context 'public project' do
@ -181,58 +181,213 @@ RSpec.describe API::Helpers do
it_behaves_like 'private project without access'
end
end
context 'support for IDs and paths as argument' do
let_it_be(:project) { create(:project) }
let(:user) { project.first_owner}
before do
allow(subject).to receive(:current_user).and_return(user)
allow(subject).to receive(:authorized_project_scope?).and_return(true)
allow(subject).to receive(:job_token_authentication?).and_return(false)
allow(subject).to receive(:authenticate_non_public?).and_return(false)
end
shared_examples 'project finder' do
context 'when project exists' do
it 'returns requested project' do
expect(subject.find_project!(existing_id)).to eq(project)
end
it 'returns nil' do
expect(subject).to receive(:render_api_error!).with('404 Project Not Found', 404)
expect(subject.find_project!(non_existing_id)).to be_nil
end
end
end
context 'when ID is used as an argument' do
let(:existing_id) { project.id }
let(:non_existing_id) { non_existing_record_id }
it_behaves_like 'project finder'
end
context 'when PATH is used as an argument' do
let(:existing_id) { project.full_path }
let(:non_existing_id) { 'something/else' }
it_behaves_like 'project finder'
context 'with an invalid PATH' do
let(:non_existing_id) { 'undefined' } # path without slash
it_behaves_like 'project finder'
it 'does not hit the database' do
expect(Project).not_to receive(:find_by_full_path)
expect(subject).to receive(:render_api_error!).with('404 Project Not Found', 404)
subject.find_project!(non_existing_id)
end
end
end
end
end
describe '#find_project!' do
let_it_be(:project) { create(:project) }
describe '#find_group!' do
let_it_be(:group) { create(:group, :public) }
let_it_be(:user) { create(:user) }
let(:user) { project.first_owner}
shared_examples 'private group without access' do
before do
group.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value('private'))
allow(subject).to receive(:authenticate_non_public?).and_return(false)
end
before do
allow(subject).to receive(:current_user).and_return(user)
allow(subject).to receive(:authorized_project_scope?).and_return(true)
allow(subject).to receive(:job_token_authentication?).and_return(false)
allow(subject).to receive(:authenticate_non_public?).and_return(false)
it 'returns not found' do
expect(subject).to receive(:not_found!)
subject.find_group!(group.id)
end
end
shared_examples 'project finder' do
context 'when project exists' do
it 'returns requested project' do
expect(subject.find_project!(existing_id)).to eq(project)
end
context 'when user is authenticated' do
before do
allow(subject).to receive(:current_user).and_return(user)
allow(subject).to receive(:initial_current_user).and_return(user)
end
it 'returns nil' do
expect(subject).to receive(:render_api_error!).with('404 Project Not Found', 404)
expect(subject.find_project!(non_existing_id)).to be_nil
context 'public group' do
it 'returns requested group' do
expect(subject.find_group!(group.id)).to eq(group)
end
end
context 'private group' do
it_behaves_like 'private group without access'
end
end
context 'when user is not authenticated' do
before do
allow(subject).to receive(:current_user).and_return(nil)
allow(subject).to receive(:initial_current_user).and_return(nil)
end
context 'public group' do
it 'returns requested group' do
expect(subject.find_group!(group.id)).to eq(group)
end
end
context 'private group' do
it_behaves_like 'private group without access'
end
end
context 'support for IDs and paths as arguments' do
let_it_be(:group) { create(:group) }
let(:user) { group.first_owner }
before do
allow(subject).to receive(:current_user).and_return(user)
allow(subject).to receive(:authorized_project_scope?).and_return(true)
allow(subject).to receive(:job_token_authentication?).and_return(false)
allow(subject).to receive(:authenticate_non_public?).and_return(false)
end
shared_examples 'group finder' do
context 'when group exists' do
it 'returns requested group' do
expect(subject.find_group!(existing_id)).to eq(group)
end
it 'returns nil' do
expect(subject).to receive(:render_api_error!).with('404 Group Not Found', 404)
expect(subject.find_group!(non_existing_id)).to be_nil
end
end
end
context 'when ID is used as an argument' do
let(:existing_id) { group.id }
let(:non_existing_id) { non_existing_record_id }
it_behaves_like 'group finder'
end
context 'when PATH is used as an argument' do
let(:existing_id) { group.full_path }
let(:non_existing_id) { 'something/else' }
it_behaves_like 'group finder'
end
end
end
describe '#find_group_by_full_path!' do
let_it_be(:group) { create(:group, :public) }
let_it_be(:user) { create(:user) }
shared_examples 'private group without access' do
before do
group.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value('private'))
allow(subject).to receive(:authenticate_non_public?).and_return(false)
end
it 'returns not found' do
expect(subject).to receive(:not_found!)
subject.find_group_by_full_path!(group.full_path)
end
end
context 'when user is authenticated' do
before do
allow(subject).to receive(:current_user).and_return(user)
allow(subject).to receive(:initial_current_user).and_return(user)
end
context 'public group' do
it 'returns requested group' do
expect(subject.find_group_by_full_path!(group.full_path)).to eq(group)
end
end
context 'private group' do
it_behaves_like 'private group without access'
context 'with access' do
before do
group.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value('private'))
group.add_developer(user)
end
it 'returns requested group with access' do
expect(subject.find_group_by_full_path!(group.full_path)).to eq(group)
end
end
end
end
context 'when ID is used as an argument' do
let(:existing_id) { project.id }
let(:non_existing_id) { non_existing_record_id }
context 'when user is not authenticated' do
before do
allow(subject).to receive(:current_user).and_return(nil)
allow(subject).to receive(:initial_current_user).and_return(nil)
end
it_behaves_like 'project finder'
end
context 'when PATH is used as an argument' do
let(:existing_id) { project.full_path }
let(:non_existing_id) { 'something/else' }
it_behaves_like 'project finder'
context 'with an invalid PATH' do
let(:non_existing_id) { 'undefined' } # path without slash
it_behaves_like 'project finder'
it 'does not hit the database' do
expect(Project).not_to receive(:find_by_full_path)
expect(subject).to receive(:render_api_error!).with('404 Project Not Found', 404)
subject.find_project!(non_existing_id)
context 'public group' do
it 'returns requested group' do
expect(subject.find_group_by_full_path!(group.full_path)).to eq(group)
end
end
context 'private group' do
it_behaves_like 'private group without access'
end
end
end

View File

@ -0,0 +1,58 @@
# frozen_string_literal: true
require 'spec_helper'
require 'zlib'
RSpec.describe BulkImports::Common::Extractors::JsonExtractor do
subject { described_class.new(relation: 'self') }
let_it_be(:tmpdir) { Dir.mktmpdir }
let_it_be(:import) { create(:bulk_import) }
let_it_be(:config) { create(:bulk_import_configuration, bulk_import: import) }
let_it_be(:entity) { create(:bulk_import_entity, bulk_import: import) }
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
before do
allow(FileUtils).to receive(:remove_entry).with(any_args).and_call_original
subject.instance_variable_set(:@tmpdir, tmpdir)
end
after(:all) do
FileUtils.remove_entry(tmpdir) if File.directory?(tmpdir)
end
describe '#extract' do
before do
Zlib::GzipWriter.open(File.join(tmpdir, 'self.json.gz')) do |gz|
gz.write '{"name": "Name","description": "Description","avatar":{"url":null}}'
end
expect(BulkImports::FileDownloadService).to receive(:new)
.with(
configuration: context.configuration,
relative_url: entity.relation_download_url_path('self'),
tmpdir: tmpdir,
filename: 'self.json.gz')
.and_return(instance_double(BulkImports::FileDownloadService, execute: nil))
end
it 'returns ExtractedData', :aggregate_failures do
extracted_data = subject.extract(context)
expect(extracted_data).to be_instance_of(BulkImports::Pipeline::ExtractedData)
expect(extracted_data.data).to contain_exactly(
{ 'name' => 'Name', 'description' => 'Description', 'avatar' => { 'url' => nil } }
)
end
end
describe '#remove_tmpdir' do
it 'removes tmp dir' do
expect(FileUtils).to receive(:remove_entry).with(tmpdir).once
subject.remove_tmpdir
end
end
end

View File

@ -1,10 +1,10 @@
# frozen_string_literal: true
require 'spec_helper'
require 'zlib'
RSpec.describe BulkImports::Common::Extractors::NdjsonExtractor do
let_it_be(:tmpdir) { Dir.mktmpdir }
let_it_be(:filepath) { 'spec/fixtures/bulk_imports/gz/labels.ndjson.gz' }
let_it_be(:import) { create(:bulk_import) }
let_it_be(:config) { create(:bulk_import_configuration, bulk_import: import) }
let_it_be(:entity) { create(:bulk_import_entity, bulk_import: import) }
@ -25,21 +25,30 @@ RSpec.describe BulkImports::Common::Extractors::NdjsonExtractor do
describe '#extract' do
before do
FileUtils.copy_file(filepath, File.join(tmpdir, 'labels.ndjson.gz'))
allow_next_instance_of(BulkImports::FileDownloadService) do |service|
allow(service).to receive(:execute)
Zlib::GzipWriter.open(File.join(tmpdir, 'labels.ndjson.gz')) do |gz|
gz.write [
'{"title": "Title 1","description": "Description 1","type":"GroupLabel"}',
'{"title": "Title 2","description": "Description 2","type":"GroupLabel"}'
].join("\n")
end
expect(BulkImports::FileDownloadService).to receive(:new)
.with(
configuration: context.configuration,
relative_url: entity.relation_download_url_path('labels'),
tmpdir: tmpdir,
filename: 'labels.ndjson.gz')
.and_return(instance_double(BulkImports::FileDownloadService, execute: nil))
end
it 'returns ExtractedData' do
it 'returns ExtractedData', :aggregate_failures do
extracted_data = subject.extract(context)
label = extracted_data.data.first.first
expect(extracted_data).to be_instance_of(BulkImports::Pipeline::ExtractedData)
expect(label['title']).to include('Label')
expect(label['description']).to include('Label')
expect(label['type']).to eq('GroupLabel')
expect(extracted_data.data.to_a).to contain_exactly(
[{ "title" => "Title 1", "description" => "Description 1", "type" => "GroupLabel" }, 0],
[{ "title" => "Title 2", "description" => "Description 2", "type" => "GroupLabel" }, 1]
)
end
end

View File

@ -0,0 +1,76 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::GroupAttributesPipeline do
subject(:pipeline) { described_class.new(context) }
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:entity) { create(:bulk_import_entity, :group_entity, group: group, bulk_import: bulk_import) }
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:group_attributes) do
{
'id' => 1,
'name' => 'Group name',
'path' => 'group-path',
'description' => 'description',
'avatar' => {
'url' => nil
},
'membership_lock' => true,
'traversal_ids' => [
2
]
}
end
describe '#run' do
before do
allow_next_instance_of(BulkImports::Common::Extractors::JsonExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(
BulkImports::Pipeline::ExtractedData.new(data: group_attributes)
)
end
end
it 'imports allowed group attributes' do
expect(Groups::UpdateService).to receive(:new).with(group, user, { membership_lock: true }).and_call_original
pipeline.run
expect(group).to have_attributes(membership_lock: true)
end
end
describe '#transform' do
it 'fetches only allowed attributes and symbolize keys' do
transformed_data = pipeline.transform(context, group_attributes)
expect(transformed_data).to eq({ membership_lock: true })
end
context 'when there is no data to transform' do
let(:group_attributes) { nil }
it do
transformed_data = pipeline.transform(context, group_attributes)
expect(transformed_data).to eq(nil)
end
end
end
describe '#after_run' do
it 'calls extractor#remove_tmpdir' do
expect_next_instance_of(BulkImports::Common::Extractors::JsonExtractor) do |extractor|
expect(extractor).to receive(:remove_tmpdir)
end
pipeline.after_run(nil)
end
end
end

View File

@ -0,0 +1,75 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::NamespaceSettingsPipeline do
subject(:pipeline) { described_class.new(context) }
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group, namespace_settings: create(:namespace_settings) ) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:entity) { create(:bulk_import_entity, :group_entity, group: group, bulk_import: bulk_import) }
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
before do
group.add_owner(user)
end
describe '#run' do
before do
allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
namespace_settings_attributes = {
'namespace_id' => 22,
'prevent_forking_outside_group' => true,
'prevent_sharing_groups_outside_hierarchy' => true
}
allow(extractor).to receive(:extract).and_return(
BulkImports::Pipeline::ExtractedData.new(data: [[namespace_settings_attributes, 0]])
)
end
end
it 'imports allowed namespace settings attributes' do
expect(Groups::UpdateService).to receive(:new).with(
group, user, { prevent_sharing_groups_outside_hierarchy: true }
).and_call_original
pipeline.run
expect(group.namespace_settings).to have_attributes(prevent_sharing_groups_outside_hierarchy: true)
end
end
describe '#transform' do
it 'fetches only allowed attributes and symbolize keys' do
all_model_attributes = NamespaceSetting.new.attributes
transformed_data = pipeline.transform(context, [all_model_attributes, 0])
expect(transformed_data.keys).to match_array([:prevent_sharing_groups_outside_hierarchy])
end
context 'when there is no data to transform' do
it do
namespace_settings_attributes = nil
transformed_data = pipeline.transform(context, namespace_settings_attributes)
expect(transformed_data).to eq(nil)
end
end
end
describe '#after_run' do
it 'calls extractor#remove_tmpdir' do
expect_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
expect(extractor).to receive(:remove_tmpdir)
end
context = instance_double(BulkImports::Pipeline::Context)
pipeline.after_run(context)
end
end
end

View File

@ -11,7 +11,9 @@ RSpec.describe BulkImports::Groups::Stage do
let(:pipelines) do
[
[0, BulkImports::Groups::Pipelines::GroupPipeline],
[1, BulkImports::Groups::Pipelines::GroupAttributesPipeline],
[1, BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline],
[1, BulkImports::Groups::Pipelines::NamespaceSettingsPipeline],
[1, BulkImports::Common::Pipelines::MembersPipeline],
[1, BulkImports::Common::Pipelines::LabelsPipeline],
[1, BulkImports::Common::Pipelines::MilestonesPipeline],

View File

@ -58,21 +58,30 @@ RSpec.describe ContainerRegistry::Migration do
describe '.capacity' do
subject { described_class.capacity }
where(:ff_1_enabled, :ff_10_enabled, :ff_25_enabled, :expected_result) do
false | false | false | 0
true | false | false | 1
true | true | false | 10
true | true | true | 25
false | true | false | 10
false | true | true | 25
false | false | true | 25
true | false | true | 25
where(:ff_1_enabled, :ff_2_enabled, :ff_10_enabled, :ff_25_enabled, :expected_result) do
false | false | false | false | 0
true | false | false | false | 1
false | true | false | false | 2
true | true | false | false | 2
false | true | true | false | 10
false | true | true | true | 25
false | true | false | true | 25
true | true | false | true | 25
true | true | true | true | 25
true | true | true | false | 10
true | false | true | false | 10
true | false | true | true | 25
false | false | true | false | 10
false | false | true | true | 25
false | false | false | true | 25
true | false | false | true | 25
end
with_them do
before do
stub_feature_flags(
container_registry_migration_phase2_capacity_1: ff_1_enabled,
container_registry_migration_phase2_capacity_2: ff_2_enabled,
container_registry_migration_phase2_capacity_10: ff_10_enabled,
container_registry_migration_phase2_capacity_25: ff_25_enabled
)

View File

@ -88,6 +88,21 @@ RSpec.describe Gitlab::ImportExport::Group::RelationFactory do
end
end
context 'when relation is namespace_settings' do
let(:relation_sym) { :namespace_settings }
let(:relation_hash) do
{
'namespace_id' => 1,
'prevent_forking_outside_group' => true,
'prevent_sharing_groups_outside_hierarchy' => true
}
end
it do
expect(created_object).to eq(nil)
end
end
def random_id
rand(1000..10000)
end

View File

@ -0,0 +1,35 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Usage::ServicePing::LegacyMetricTimingDecorator do
using RSpec::Parameterized::TableSyntax
let(:duration) { 123 }
where(:metric_value, :metric_class) do
1 | Integer
"value" | String
true | TrueClass
false | FalseClass
nil | NilClass
end
with_them do
let(:decorated_object) { described_class.new(metric_value, duration) }
it 'exposes a duration with the correct value' do
expect(decorated_object.duration).to eq(duration)
end
it 'imitates wrapped class', :aggregate_failures do
expect(decorated_object).to eq metric_value
expect(decorated_object.class).to eq metric_class
expect(decorated_object.is_a?(metric_class)).to be_truthy
# rubocop:disable Style/ClassCheck
expect(decorated_object.kind_of?(metric_class)).to be_truthy
# rubocop:enable Style/ClassCheck
expect({ metric: decorated_object }.to_json).to eql({ metric: metric_value }.to_json)
end
end
end

View File

@ -11,6 +11,12 @@ RSpec.describe Gitlab::UsageDataQueries do
end
end
describe '.with_duration' do
it 'yields passed block' do
expect { |block| described_class.with_duration(&block) }.to yield_with_no_args
end
end
describe '.count' do
it 'returns the raw SQL' do
expect(described_class.count(User)).to start_with('SELECT COUNT("users"."id") FROM "users"')

View File

@ -1470,4 +1470,31 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
end
describe ".with_duration" do
context 'with feature flag measure_service_ping_metric_collection turned off' do
before do
stub_feature_flags(measure_service_ping_metric_collection: false)
end
it 'does NOT record duration and return block response' do
expect(::Gitlab::Usage::ServicePing::LegacyMetricTimingDecorator).not_to receive(:new)
expect(described_class.with_duration { 1 + 1 }).to be 2
end
end
context 'with feature flag measure_service_ping_metric_collection turned off' do
before do
stub_feature_flags(measure_service_ping_metric_collection: true)
end
it 'records duration' do
expect(::Gitlab::Usage::ServicePing::LegacyMetricTimingDecorator)
.to receive(:new).with(2, kind_of(Float))
described_class.with_duration { 1 + 1 }
end
end
end
end

View File

@ -31,6 +31,12 @@ RSpec.describe Gitlab::Utils::UsageData do
end
end
describe '.with_duration' do
it 'yields passed block' do
expect { |block| described_class.with_duration(&block) }.to yield_with_no_args
end
end
describe '#add_metric' do
let(:metric) { 'UuidMetric'}
@ -48,6 +54,13 @@ RSpec.describe Gitlab::Utils::UsageData do
expect(described_class.count(relation, batch: false)).to eq(1)
end
it 'records duration' do
expect(described_class).to receive(:with_duration)
allow(relation).to receive(:count).and_return(1)
described_class.count(relation, batch: false)
end
context 'when counting fails' do
subject { described_class.count(relation, batch: false) }
@ -68,6 +81,13 @@ RSpec.describe Gitlab::Utils::UsageData do
expect(described_class.distinct_count(relation, batch: false)).to eq(1)
end
it 'records duration' do
expect(described_class).to receive(:with_duration)
allow(relation).to receive(:distinct_count_by).and_return(1)
described_class.distinct_count(relation, batch: false)
end
context 'when counting fails' do
subject { described_class.distinct_count(relation, batch: false) }
@ -206,14 +226,6 @@ RSpec.describe Gitlab::Utils::UsageData do
it_behaves_like 'failing hardening method'
end
it 'logs error and returns DISTRIBUTED_HLL_FALLBACK value when counting raises any error', :aggregate_failures do
error = StandardError.new('')
allow(Gitlab::Database::PostgresHll::BatchDistinctCounter).to receive(:new).and_raise(error)
expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).with(error)
expect(described_class.estimate_batch_distinct_count(relation)).to eq(4)
end
end
end
@ -229,6 +241,13 @@ RSpec.describe Gitlab::Utils::UsageData do
expect(described_class.sum(relation, :column, batch_size: 100, start: 2, finish: 3)).to eq(1)
end
it 'records duration' do
expect(described_class).to receive(:with_duration)
allow(Gitlab::Database::BatchCount).to receive(:batch_sum).and_return(1)
described_class.sum(relation, :column)
end
context 'when counting fails' do
subject { described_class.sum(relation, :column) }
@ -316,6 +335,12 @@ RSpec.describe Gitlab::Utils::UsageData do
expect(histogram).to eq('2' => 1)
end
it 'records duration' do
expect(described_class).to receive(:with_duration)
described_class.histogram(relation, column, buckets: 1..100)
end
context 'when query timeout' do
subject do
with_statement_timeout(0.001) do
@ -368,6 +393,12 @@ RSpec.describe Gitlab::Utils::UsageData do
expect(described_class.add).to eq(0)
end
it 'records duration' do
expect(described_class).to receive(:with_duration)
described_class.add
end
context 'when adding fails' do
subject { described_class.add(nil, 3) }
@ -392,6 +423,12 @@ RSpec.describe Gitlab::Utils::UsageData do
it_behaves_like 'failing hardening method', StandardError
end
it 'records duration' do
expect(described_class).to receive(:with_duration)
described_class.alt_usage_data
end
it 'returns the evaluated block when give' do
expect(described_class.alt_usage_data { Gitlab::CurrentSettings.uuid } ).to eq(Gitlab::CurrentSettings.uuid)
end
@ -402,6 +439,12 @@ RSpec.describe Gitlab::Utils::UsageData do
end
describe '#redis_usage_data' do
it 'records duration' do
expect(described_class).to receive(:with_duration)
described_class.redis_usage_data
end
context 'with block given' do
context 'when method fails' do
subject { described_class.redis_usage_data { raise ::Redis::CommandError } }
@ -445,6 +488,12 @@ RSpec.describe Gitlab::Utils::UsageData do
end
describe '#with_prometheus_client' do
it 'records duration' do
expect(described_class).to receive(:with_duration)
described_class.with_prometheus_client { |client| client }
end
it 'returns fallback with for an exception in yield block' do
allow(described_class).to receive(:prometheus_client).and_return(Gitlab::PrometheusClient.new('http://localhost:9090'))
result = described_class.with_prometheus_client(fallback: -42) { |client| raise StandardError }

View File

@ -354,6 +354,7 @@ RSpec.describe ContainerRepository, :aggregate_failures do
subject { repository.skip_import(reason: :too_many_retries) }
it_behaves_like 'transitioning from allowed states', ContainerRepository::SKIPPABLE_MIGRATION_STATES
it_behaves_like 'queueing the next import'
it 'sets migration_skipped_at and migration_skipped_reason' do
expect { subject }.to change { repository.reload.migration_skipped_at }
@ -1307,6 +1308,38 @@ RSpec.describe ContainerRepository, :aggregate_failures do
end
end
describe '#nearing_or_exceeded_retry_limit?' do
subject { repository.nearing_or_exceeded_retry_limit? }
before do
stub_application_setting(container_registry_import_max_retries: 3)
end
context 'migration_retries_count is 1 less than max_retries' do
before do
repository.update_column(:migration_retries_count, 2)
end
it { is_expected.to eq(true) }
end
context 'migration_retries_count is lower than max_retries' do
before do
repository.update_column(:migration_retries_count, 1)
end
it { is_expected.to eq(false) }
end
context 'migration_retries_count equal to or higher than max_retries' do
before do
repository.update_column(:migration_retries_count, 3)
end
it { is_expected.to eq(true) }
end
end
context 'with repositories' do
let_it_be_with_reload(:repository) { create(:container_repository, :cleanup_unscheduled) }
let_it_be(:other_repository) { create(:container_repository, :cleanup_unscheduled) }

View File

@ -126,32 +126,22 @@ RSpec.describe Key, :mailer do
context 'validation of uniqueness (based on fingerprint uniqueness)' do
let(:user) { create(:user) }
shared_examples 'fingerprint uniqueness' do
it 'accepts the key once' do
expect(build(:rsa_key_4096, user: user)).to be_valid
end
it 'does not accept the exact same key twice' do
first_key = create(:rsa_key_4096, user: user)
expect(build(:key, user: user, key: first_key.key)).not_to be_valid
end
it 'does not accept a duplicate key with a different comment' do
first_key = create(:rsa_key_4096, user: user)
duplicate = build(:key, user: user, key: first_key.key)
duplicate.key << ' extra comment'
expect(duplicate).not_to be_valid
end
it 'accepts the key once' do
expect(build(:rsa_key_4096, user: user)).to be_valid
end
context 'with FIPS mode off' do
it_behaves_like 'fingerprint uniqueness'
it 'does not accept the exact same key twice' do
first_key = create(:rsa_key_4096, user: user)
expect(build(:key, user: user, key: first_key.key)).not_to be_valid
end
context 'with FIPS mode', :fips_mode do
it_behaves_like 'fingerprint uniqueness'
it 'does not accept a duplicate key with a different comment' do
first_key = create(:rsa_key_4096, user: user)
duplicate = build(:key, user: user, key: first_key.key)
duplicate.key << ' extra comment'
expect(duplicate).not_to be_valid
end
end

View File

@ -696,6 +696,21 @@ RSpec.describe QuickActions::InterpretService do
expect(message).to eq("Assigned #{developer.to_reference}.")
end
context 'when the reference does not match the exact case' do
let(:user) { create(:user) }
let(:content) { "/assign #{user.to_reference.upcase}" }
it 'assigns to the user' do
issuable.project.add_developer(user)
_, updates, message = service.execute(content, issuable)
expect(content).not_to include(user.to_reference)
expect(updates).to eq(assignee_ids: [user.id])
expect(message).to eq("Assigned #{user.to_reference}.")
end
end
context 'when the user has a private profile' do
let(:user) { create(:user, :private_profile) }
let(:content) { "/assign #{user.to_reference}" }

View File

@ -51,6 +51,9 @@ RSpec.describe ServicePing::SubmitService do
let(:with_dev_ops_score_params) { { dev_ops_score: score_params[:score] } }
let(:with_conv_index_params) { { conv_index: score_params[:score] } }
let(:with_usage_data_id_params) { { conv_index: { usage_data_id: usage_data_id } } }
let(:service_ping_payload_url) { File.join(described_class::STAGING_BASE_URL, described_class::USAGE_DATA_PATH) }
let(:service_ping_errors_url) { File.join(described_class::STAGING_BASE_URL, described_class::ERROR_PATH) }
let(:service_ping_metadata_url) { File.join(described_class::STAGING_BASE_URL, described_class::METADATA_PATH) }
shared_examples 'does not run' do
it do
@ -63,7 +66,7 @@ RSpec.describe ServicePing::SubmitService do
shared_examples 'does not send a blank usage ping payload' do
it do
expect(Gitlab::HTTP).not_to receive(:post).with(subject.url, any_args)
expect(Gitlab::HTTP).not_to receive(:post).with(service_ping_payload_url, any_args)
expect { subject.execute }.to raise_error(described_class::SubmissionError) do |error|
expect(error.message).to include('Usage data is blank')
@ -117,6 +120,7 @@ RSpec.describe ServicePing::SubmitService do
it 'generates service ping' do
stub_response(body: with_dev_ops_score_params)
stub_response(body: nil, url: service_ping_metadata_url, status: 201)
expect(Gitlab::Usage::ServicePingReport).to receive(:for).with(output: :all_metrics_values).and_call_original
@ -129,7 +133,8 @@ RSpec.describe ServicePing::SubmitService do
stub_usage_data_connections
stub_database_flavor_check
stub_application_setting(usage_ping_enabled: true)
stub_response(body: nil, url: subject.error_url, status: 201)
stub_response(body: nil, url: service_ping_errors_url, status: 201)
stub_response(body: nil, url: service_ping_metadata_url, status: 201)
end
context 'and user requires usage stats consent' do
@ -141,6 +146,7 @@ RSpec.describe ServicePing::SubmitService do
end
it 'sends a POST request' do
stub_response(body: nil, url: service_ping_metadata_url, status: 201)
response = stub_response(body: with_dev_ops_score_params)
subject.execute
@ -278,6 +284,7 @@ RSpec.describe ServicePing::SubmitService do
context 'if payload service fails' do
before do
stub_response(body: with_dev_ops_score_params)
allow(ServicePing::BuildPayloadService).to receive_message_chain(:new, :execute)
.and_raise(described_class::SubmissionError, 'SubmissionError')
end
@ -291,9 +298,11 @@ RSpec.describe ServicePing::SubmitService do
end
it 'submits error' do
expect(Gitlab::HTTP).to receive(:post).with(subject.url, any_args)
expect(Gitlab::HTTP).to receive(:post).with(URI.join(service_ping_payload_url), any_args)
.and_call_original
expect(Gitlab::HTTP).to receive(:post).with(subject.error_url, any_args)
expect(Gitlab::HTTP).to receive(:post).with(URI.join(service_ping_errors_url), any_args)
.and_call_original
expect(Gitlab::HTTP).to receive(:post).with(URI.join(service_ping_metadata_url), any_args)
.and_call_original
subject.execute
@ -356,31 +365,72 @@ RSpec.describe ServicePing::SubmitService do
end
end
describe '#url' do
let(:url) { subject.url.to_s }
context 'metadata reporting' do
before do
stub_usage_data_connections
stub_database_flavor_check
stub_application_setting(usage_ping_enabled: true)
stub_response(body: with_conv_index_params)
end
context 'when Rails.env is production' do
before do
stub_rails_env('production')
context 'with feature flag measure_service_ping_metric_collection turned on' do
let(:metric_double) { instance_double(Gitlab::Usage::ServicePing::LegacyMetricTimingDecorator, duration: 123) }
let(:payload) do
{
metric_a: metric_double,
metric_group: {
metric_b: metric_double
},
metric_without_timing: "value",
recorded_at: Time.current
}
end
it 'points to the production Version app' do
expect(url).to eq("#{described_class::PRODUCTION_BASE_URL}/#{described_class::USAGE_DATA_PATH}")
let(:metadata_payload) do
{
metadata: {
metrics: [
{ name: 'metric_a', time_elapsed: 123 },
{ name: 'metric_group.metric_b', time_elapsed: 123 }
]
}
}
end
before do
stub_feature_flags(measure_service_ping_metric_collection: true)
allow_next_instance_of(ServicePing::BuildPayloadService) do |service|
allow(service).to receive(:execute).and_return(payload)
end
end
it 'submits metadata' do
response = stub_full_request(service_ping_metadata_url, method: :post)
.with(body: metadata_payload)
subject.execute
expect(response).to have_been_requested
end
end
context 'when Rails.env is not production' do
context 'with feature flag measure_service_ping_metric_collection turned off' do
before do
stub_rails_env('development')
stub_feature_flags(measure_service_ping_metric_collection: false)
end
it 'points to the staging Version app' do
expect(url).to eq("#{described_class::STAGING_BASE_URL}/#{described_class::USAGE_DATA_PATH}")
it 'does NOT submit metadata' do
response = stub_full_request(service_ping_metadata_url, method: :post)
subject.execute
expect(response).not_to have_been_requested
end
end
end
def stub_response(url: subject.url, body:, status: 201)
def stub_response(url: service_ping_payload_url, body:, status: 201)
stub_full_request(url, method: :post)
.to_return(
headers: { 'Content-Type' => 'application/json' },

View File

@ -211,18 +211,13 @@ end
RSpec::Matchers.define :have_graphql_resolver do |expected|
match do |field|
case expected
when Method
expect(field.type_class.resolve_proc).to eq(expected)
else
expect(field.type_class.resolver).to eq(expected)
end
expect(field.resolver).to eq(expected)
end
end
RSpec::Matchers.define :have_graphql_extension do |expected|
match do |field|
expect(field.type_class.extensions).to include(expected)
expect(field.extensions).to include(expected)
end
end

View File

@ -202,7 +202,13 @@ RSpec.shared_examples 'reconciling migration_state' do
context "#{canceled_status} response" do
let(:status) { canceled_status }
it_behaves_like 'enforcing states coherence to', 'import_skipped'
it_behaves_like 'enforcing states coherence to', 'import_skipped' do
it 'skips with migration_canceled_by_registry' do
subject
expect(repository.reload.migration_skipped_reason).to eq('migration_canceled_by_registry')
end
end
end
end
end

View File

@ -251,7 +251,19 @@ RSpec.describe ContainerRegistry::Migration::EnqueuerWorker, :aggregate_failures
expect(container_repository.migration_skipped_at).not_to be_nil
end
it_behaves_like 're-enqueuing based on capacity', capacity_limit: 3
context 're-enqueuing' do
before do
# skipping will also re-enqueue, so we isolate the capacity behavior here
method = worker.method(:next_repository)
allow(worker).to receive(:next_repository) do
next_qualified_repository = method.call
allow(next_qualified_repository).to receive(:skip_import).and_return(true)
next_qualified_repository
end
end
it_behaves_like 're-enqueuing based on capacity', capacity_limit: 3
end
end
context 'when an error occurs' do

View File

@ -32,50 +32,59 @@ RSpec.describe ContainerRegistry::Migration::GuardWorker, :aggregate_failures do
end
end
context 'migration is canceled' do
let(:migration_cancel_response) { { status: :ok } }
it 'will not abort the migration' do
shared_examples 'aborting the migration' do
it 'will abort the migration' do
expect(worker).to receive(:log_extra_metadata_on_done).with(:stale_migrations_count, 1)
expect(worker).to receive(:log_extra_metadata_on_done).with(:aborted_stale_migrations_count, 1)
expect(worker).to receive(:log_extra_metadata_on_done).with(:aborted_long_running_migration_ids, [stale_migration.id])
expect { subject }
.to change(import_skipped_migrations, :count)
.to change(import_aborted_migrations, :count).by(1)
.and change { stale_migration.reload.migration_state }.to('import_aborted')
.and not_change { ongoing_migration.migration_state }
end
end
expect(stale_migration.reload.migration_state).to eq('import_skipped')
expect(stale_migration.reload.migration_skipped_reason).to eq('migration_canceled')
context 'migration is canceled' do
let(:migration_cancel_response) { { status: :ok } }
before do
stub_application_setting(container_registry_import_max_retries: 3)
end
context 'when the retry limit has been reached' do
before do
stale_migration.update_column(:migration_retries_count, 2)
end
it 'will not abort the migration' do
expect(worker).to receive(:log_extra_metadata_on_done).with(:stale_migrations_count, 1)
expect(worker).to receive(:log_extra_metadata_on_done).with(:aborted_stale_migrations_count, 1)
expect(worker).to receive(:log_extra_metadata_on_done).with(:aborted_long_running_migration_ids, [stale_migration.id])
expect { subject }
.to change(import_skipped_migrations, :count)
expect(stale_migration.reload.migration_state).to eq('import_skipped')
expect(stale_migration.reload.migration_skipped_reason).to eq('migration_canceled')
end
end
context 'when the retry limit has not been reached' do
it_behaves_like 'aborting the migration'
end
end
context 'migration cancelation fails with an error' do
let(:migration_cancel_response) { { status: :error } }
it 'will abort the migration' do
expect(worker).to receive(:log_extra_metadata_on_done).with(:stale_migrations_count, 1)
expect(worker).to receive(:log_extra_metadata_on_done).with(:aborted_stale_migrations_count, 1)
expect(worker).to receive(:log_extra_metadata_on_done).with(:aborted_long_running_migration_ids, [stale_migration.id])
expect { subject }
.to change(import_aborted_migrations, :count).by(1)
.and change { stale_migration.reload.migration_state }.to('import_aborted')
.and not_change { ongoing_migration.migration_state }
end
it_behaves_like 'aborting the migration'
end
context 'migration receives bad request with a new status' do
let(:migration_cancel_response) { { status: :bad_request, migration_state: :import_done } }
it 'will abort the migration' do
expect(worker).to receive(:log_extra_metadata_on_done).with(:stale_migrations_count, 1)
expect(worker).to receive(:log_extra_metadata_on_done).with(:aborted_stale_migrations_count, 1)
expect(worker).to receive(:log_extra_metadata_on_done).with(:aborted_long_running_migration_ids, [stale_migration.id])
expect { subject }
.to change(import_aborted_migrations, :count).by(1)
.and change { stale_migration.reload.migration_state }.to('import_aborted')
.and not_change { ongoing_migration.migration_state }
end
it_behaves_like 'aborting the migration'
end
end
@ -96,7 +105,7 @@ RSpec.describe ContainerRegistry::Migration::GuardWorker, :aggregate_failures do
context 'with pre_importing stale migrations' do
let(:ongoing_migration) { create(:container_repository, :pre_importing) }
let(:stale_migration) { create(:container_repository, :pre_importing, migration_pre_import_started_at: 35.minutes.ago) }
let(:stale_migration) { create(:container_repository, :pre_importing, migration_pre_import_started_at: 11.minutes.ago) }
let(:import_status) { 'test' }
before do
@ -128,7 +137,7 @@ RSpec.describe ContainerRegistry::Migration::GuardWorker, :aggregate_failures do
context 'with pre_import_done stale migrations' do
let(:ongoing_migration) { create(:container_repository, :pre_import_done) }
let(:stale_migration) { create(:container_repository, :pre_import_done, migration_pre_import_done_at: 35.minutes.ago) }
let(:stale_migration) { create(:container_repository, :pre_import_done, migration_pre_import_done_at: 11.minutes.ago) }
before do
allow(::ContainerRegistry::Migration).to receive(:max_step_duration).and_return(5.minutes)
@ -151,7 +160,7 @@ RSpec.describe ContainerRegistry::Migration::GuardWorker, :aggregate_failures do
context 'with importing stale migrations' do
let(:ongoing_migration) { create(:container_repository, :importing) }
let(:stale_migration) { create(:container_repository, :importing, migration_import_started_at: 35.minutes.ago) }
let(:stale_migration) { create(:container_repository, :importing, migration_import_started_at: 11.minutes.ago) }
let(:import_status) { 'test' }
before do