Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-04-09 00:09:46 +00:00
parent 81c0f29ad9
commit b3c9b2468d
29 changed files with 353 additions and 103 deletions

View file

@ -12,6 +12,7 @@ import { redirectTo } from '~/lib/utils/url_utility';
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import WebIdeLink from '~/vue_shared/components/web_ide_link.vue';
import CodeIntelligence from '~/code_navigation/components/app.vue';
import LineHighlighter from '~/blob/line_highlighter';
import getRefMixin from '../mixins/get_ref';
import blobInfoQuery from '../queries/blob_info.query.graphql';
import userInfoQuery from '../queries/user_info.query.graphql';
@ -192,6 +193,7 @@ export default {
window.requestIdleCallback(() => {
this.isRenderingLegacyTextViewer = false;
new LineHighlighter(); // eslint-disable-line no-new
});
} else {
this.legacyRichViewer = html;

View file

@ -1,7 +1,5 @@
<script>
import { GlIcon, GlSafeHtmlDirective } from '@gitlab/ui';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import LineHighlighter from '~/blob/line_highlighter';
import { HIGHLIGHT_CLASS_NAME } from './constants';
import ViewerMixin from './mixins';
@ -13,7 +11,7 @@ export default {
directives: {
SafeHtml: GlSafeHtmlDirective,
},
mixins: [ViewerMixin, glFeatureFlagsMixin()],
mixins: [ViewerMixin],
inject: ['blobHash'],
data() {
return {
@ -21,21 +19,14 @@ export default {
};
},
computed: {
refactorBlobViewerEnabled() {
return this.glFeatures.refactorBlobViewer;
},
lineNumbers() {
return this.content.split('\n').length;
},
},
mounted() {
if (this.refactorBlobViewerEnabled) {
// This line will be removed once we start using highlight.js on the frontend (https://gitlab.com/groups/gitlab-org/-/epics/7146)
new LineHighlighter(); // eslint-disable-line no-new
} else {
const { hash } = window.location;
if (hash) this.scrollToLine(hash, true);
const { hash } = window.location;
if (hash) {
this.scrollToLine(hash, true);
}
},
methods: {

View file

@ -27,7 +27,11 @@ class UsersController < ApplicationController
check_rate_limit!(:username_exists, scope: request.ip)
end
feature_category :users
feature_category :users, [:show, :activity, :groups, :projects, :contributed, :starred,
:followers, :following, :calendar, :calendar_activities,
:exists, :activity, :follow, :unfollow, :ssh_keys, :gpg_keys]
feature_category :snippets, [:snippets]
def show
respond_to do |format|

View file

@ -17,6 +17,7 @@ class ContainerRepository < ApplicationRecord
SKIPPABLE_MIGRATION_STATES = (ABORTABLE_MIGRATION_STATES + %w[import_aborted]).freeze
MIGRATION_PHASE_1_STARTED_AT = Date.new(2021, 11, 4).freeze
MIGRATION_PHASE_1_ENDED_AT = Date.new(2022, 01, 23).freeze
TooManyImportsError = Class.new(StandardError)
@ -58,8 +59,8 @@ class ContainerRepository < ApplicationRecord
scope :import_in_process, -> { where(migration_state: %w[pre_importing pre_import_done importing]) }
scope :recently_done_migration_step, -> do
where(migration_state: %w[import_done pre_import_done import_aborted])
.order(Arel.sql('GREATEST(migration_pre_import_done_at, migration_import_done_at, migration_aborted_at) DESC'))
where(migration_state: %w[import_done pre_import_done import_aborted import_skipped])
.order(Arel.sql('GREATEST(migration_pre_import_done_at, migration_import_done_at, migration_aborted_at, migration_skipped_at) DESC'))
end
scope :ready_for_import, -> do
@ -160,7 +161,7 @@ class ContainerRepository < ApplicationRecord
end
end
before_transition %i[pre_importing import_aborted] => :pre_import_done do |container_repository|
before_transition any => :pre_import_done do |container_repository|
container_repository.migration_pre_import_done_at = Time.zone.now
end
@ -217,6 +218,13 @@ class ContainerRepository < ApplicationRecord
).exists?
end
def self.all_migrated?
# check that the set of non migrated repositories is empty
where(created_at: ...MIGRATION_PHASE_1_ENDED_AT)
.where.not(migration_state: 'import_done')
.empty?
end
def self.with_enabled_policy
joins('INNER JOIN container_expiration_policies ON container_repositories.project_id = container_expiration_policies.project_id')
.where(container_expiration_policies: { enabled: true })
@ -359,7 +367,7 @@ class ContainerRepository < ApplicationRecord
end
def last_import_step_done_at
[migration_pre_import_done_at, migration_import_done_at, migration_aborted_at].compact.max
[migration_pre_import_done_at, migration_import_done_at, migration_aborted_at, migration_skipped_at].compact.max
end
def external_import_status
@ -456,7 +464,7 @@ class ContainerRepository < ApplicationRecord
next if self.created_at.before?(MIGRATION_PHASE_1_STARTED_AT)
next unless gitlab_api_client.supports_gitlab_api?
gitlab_api_client.repository_details(self.path, with_size: true)['size_bytes']
gitlab_api_client.repository_details(self.path, sizing: :self)['size_bytes']
end
end

View file

@ -1063,6 +1063,17 @@ class Project < ApplicationRecord
end
end
def container_repositories_size
strong_memoize(:container_repositories_size) do
next unless Gitlab.com?
next 0 if container_repositories.empty?
next unless container_repositories.all_migrated?
next unless ContainerRegistry::GitlabApiClient.supports_gitlab_api?
ContainerRegistry::GitlabApiClient.deduplicated_size(full_path)
end
end
def has_container_registry_tags?
return @images if defined?(@images)

View file

@ -9,14 +9,12 @@
- link_start = '<a href="%{url}">'.html_safe % { url: help_page_path('development/snowplow/index') }
= html_escape(_('Configure %{link} to track events. %{link_start}Learn more.%{link_end}')) % { link: link_to('Snowplow', 'https://snowplowanalytics.com/', target: '_blank', rel: 'noopener noreferrer').html_safe, link_start: link_start, link_end: '</a>'.html_safe }
.settings-content
= form_for @application_setting, url: general_admin_application_settings_path(anchor: 'js-snowplow-settings'), html: { class: 'fieldset-form', id: 'snowplow-settings' } do |f|
= gitlab_ui_form_for @application_setting, url: general_admin_application_settings_path(anchor: 'js-snowplow-settings'), html: { class: 'fieldset-form', id: 'snowplow-settings' } do |f|
= form_errors(@application_setting) if expanded
%fieldset
.form-group
.form-check
= f.check_box :snowplow_enabled, class: 'form-check-input', data: { qa_selector: 'snowplow_enabled_checkbox' }
= f.label :snowplow_enabled, _('Enable Snowplow tracking'), class: 'form-check-label'
= f.gitlab_ui_checkbox_component :snowplow_enabled, _('Enable Snowplow tracking'), checkbox_options: { data: { qa_selector: 'snowplow_enabled_checkbox' } }
.form-group
= f.label :snowplow_collector_hostname, _('Collector hostname'), class: 'label-light'
= f.text_field :snowplow_collector_hostname, class: 'form-control gl-form-input', placeholder: 'snowplow.example.com'

View file

@ -16,20 +16,14 @@
.settings-content
= form_for @application_setting, url: general_admin_application_settings_path(anchor: 'js-sourcegraph-settings'), html: { class: 'fieldset-form', id: 'sourcegraph-settings' } do |f|
= gitlab_ui_form_for @application_setting, url: general_admin_application_settings_path(anchor: 'js-sourcegraph-settings'), html: { class: 'fieldset-form', id: 'sourcegraph-settings' } do |f|
= form_errors(@application_setting)
%fieldset
.form-group
.form-check
= f.check_box :sourcegraph_enabled, class: 'form-check-input'
= f.label :sourcegraph_enabled, s_('SourcegraphAdmin|Enable Sourcegraph'), class: 'form-check-label'
= f.gitlab_ui_checkbox_component :sourcegraph_enabled, s_('SourcegraphAdmin|Enable Sourcegraph')
.form-group
.form-check
= f.check_box :sourcegraph_public_only, class: 'form-check-input'
= f.label :sourcegraph_public_only, s_('SourcegraphAdmin|Block on private and internal projects'), class: 'form-check-label'
.form-text.text-muted
= s_('SourcegraphAdmin|Only public projects have code intelligence enabled and communicate with Sourcegraph.')
= f.gitlab_ui_checkbox_component :sourcegraph_public_only, s_('SourcegraphAdmin|Block on private and internal projects'), help_text: s_('SourcegraphAdmin|Only public projects have code intelligence enabled and communicate with Sourcegraph.')
.form-group
= f.label :sourcegraph_url, s_('SourcegraphAdmin|Sourcegraph URL'), class: 'label-bold'
= f.text_field :sourcegraph_url, class: 'form-control gl-form-input', placeholder: s_('SourcegraphAdmin|https://sourcegraph.example.com')

View file

@ -23,9 +23,10 @@
- else
= link_to 'discussion', target_url
- if include_stylesheet_link && discussion&.diff_discussion? && discussion.on_text?
= content_for :head do
= stylesheet_link_tag 'mailers/highlighted_diff_email'
- if discussion&.diff_discussion? && discussion.on_text?
- if include_stylesheet_link
= content_for :head do
= stylesheet_link_tag 'mailers/highlighted_diff_email'
%table.code.gl-mb-5
= render partial: "projects/diffs/email_line",

View file

@ -2,8 +2,7 @@
.row
.col-lg-12
.gl-alert.gl-alert-info{ role: 'alert' }
= sprite_icon('information-o', css_class: 'gl-icon gl-alert-icon gl-alert-icon-no-title')
= render Pajamas::AlertComponent.new(dismissible: false) do
.gl-alert-body
= s_('AlertSettings|You can now set up alert endpoints for manually configured Prometheus instances in the Alerts section on the Operations settings page. Alert endpoint fields on this page have been deprecated.')
.gl-alert-actions

View file

@ -14,7 +14,9 @@ module Packages
artifact.destroy!
rescue StandardError
artifact&.update_column(:status, :error)
unless artifact&.destroyed?
artifact&.update_column(:status, :error)
end
end
after_destroy

View file

@ -82,7 +82,7 @@ module ContainerRegistry
def waiting_time_passed?
delay = migration.enqueue_waiting_time
return true if delay == 0
return true unless last_step_completed_repository
return true unless last_step_completed_repository&.last_import_step_done_at
last_step_completed_repository.last_import_step_done_at < Time.zone.now - delay
end

View file

@ -0,0 +1,20 @@
# frozen_string_literal: true
class AddNonMigratedIndexToContainerRepositories < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
# follow up issue: https://gitlab.com/gitlab-org/gitlab/-/issues/358407
INDEX_NAME = 'tmp_idx_container_repos_on_non_migrated'
MIGRATION_PHASE_1_ENDED_AT = '2022-01-23'
def up
add_concurrent_index :container_repositories,
[:project_id, :id],
name: INDEX_NAME,
where: "migration_state != 'import_done' AND created_at < '#{MIGRATION_PHASE_1_ENDED_AT}'"
end
def down
remove_concurrent_index_by_name :container_repositories, INDEX_NAME
end
end

View file

@ -0,0 +1,24 @@
# frozen_string_literal: true
class UpdateIndexOnGreatedDoneAtOnContainerRepositories < Gitlab::Database::Migration[1.0]
OLD_INDEX_NAME = 'index_container_repositories_on_greatest_done_at'
NEW_INDEX_NAME = 'index_container_repositories_on_greatest_completed_at'
disable_ddl_transaction!
def up
add_concurrent_index :container_repositories,
'GREATEST(migration_pre_import_done_at, migration_import_done_at, migration_aborted_at, migration_skipped_at)',
where: "migration_state IN ('import_done', 'pre_import_done', 'import_aborted', 'import_skipped')",
name: NEW_INDEX_NAME
remove_concurrent_index_by_name :container_repositories, OLD_INDEX_NAME
end
def down
add_concurrent_index :container_repositories,
'GREATEST(migration_pre_import_done_at, migration_import_done_at, migration_aborted_at)',
where: "migration_state IN ('import_done', 'pre_import_done', 'import_aborted')",
name: OLD_INDEX_NAME
remove_concurrent_index_by_name :container_repositories, NEW_INDEX_NAME
end
end

View file

@ -0,0 +1 @@
c4dcb2b2e1262d63c56e171796f1cb6fb76d4b7dc090cf585f17a451c2fa784f

View file

@ -0,0 +1 @@
01d8ab924e8c76b54d316ba94089eabea28999e4ce747e6c51803e1ea97b37df

View file

@ -27308,7 +27308,7 @@ CREATE INDEX index_composer_cache_files_where_namespace_id_is_null ON packages_c
CREATE INDEX index_container_expiration_policies_on_next_run_at_and_enabled ON container_expiration_policies USING btree (next_run_at, enabled);
CREATE INDEX index_container_repositories_on_greatest_done_at ON container_repositories USING btree (GREATEST(migration_pre_import_done_at, migration_import_done_at, migration_aborted_at)) WHERE (migration_state = ANY (ARRAY['import_done'::text, 'pre_import_done'::text, 'import_aborted'::text]));
CREATE INDEX index_container_repositories_on_greatest_completed_at ON container_repositories USING btree (GREATEST(migration_pre_import_done_at, migration_import_done_at, migration_aborted_at, migration_skipped_at)) WHERE (migration_state = ANY (ARRAY['import_done'::text, 'pre_import_done'::text, 'import_aborted'::text, 'import_skipped'::text]));
CREATE INDEX index_container_repositories_on_migration_state_import_done_at ON container_repositories USING btree (migration_state, migration_import_done_at);
@ -29646,6 +29646,8 @@ CREATE INDEX tmp_gitlab_subscriptions_max_seats_used_migration ON gitlab_subscri
CREATE INDEX tmp_gitlab_subscriptions_max_seats_used_migration_2 ON gitlab_subscriptions USING btree (id) WHERE ((start_date < '2021-08-02'::date) AND (max_seats_used <> 0) AND (max_seats_used > seats_in_use) AND (max_seats_used > seats));
CREATE INDEX tmp_idx_container_repos_on_non_migrated ON container_repositories USING btree (project_id, id) WHERE ((migration_state <> 'import_done'::text) AND (created_at < '2022-01-23 00:00:00'::timestamp without time zone));
CREATE INDEX tmp_index_ci_job_artifacts_on_id_where_trace_and_expire_at ON ci_job_artifacts USING btree (id) WHERE ((file_type = 3) AND (expire_at = ANY (ARRAY['2021-04-22 00:00:00+00'::timestamp with time zone, '2021-05-22 00:00:00+00'::timestamp with time zone, '2021-06-22 00:00:00+00'::timestamp with time zone, '2022-01-22 00:00:00+00'::timestamp with time zone, '2022-02-22 00:00:00+00'::timestamp with time zone, '2022-03-22 00:00:00+00'::timestamp with time zone, '2022-04-22 00:00:00+00'::timestamp with time zone])));
CREATE INDEX tmp_index_container_repositories_on_id_migration_state ON container_repositories USING btree (id, migration_state);

View file

@ -76,7 +76,14 @@ To avoid this scenario:
1. Select the **Skip outdated deployment jobs** checkbox.
1. Select **Save changes**.
Older deployment jobs are skipped when a new deployment starts.
When a new deployment starts, older deployment jobs are skipped. Skipped jobs are labeled:
- `forward deployment failure` in the pipeline view.
- `The deployment job is older than the previously succeeded deployment job, and therefore cannot be run`
when viewing the completed job.
Job age is determined by the job start time, not the commit time, so a newer commit
can be skipped in some circumstances.
For more information, see [Deployment safety](../environments/deployment_safety.md).

View file

@ -37,14 +37,24 @@ module ContainerRegistry
class << self
private
def with_dummy_client(return_value_if_disabled: nil)
def with_dummy_client(return_value_if_disabled: nil, token_config: { type: :full_access_token, path: nil })
registry_config = Gitlab.config.registry
unless registry_config.enabled && registry_config.api_url.present?
return return_value_if_disabled
end
token = Auth::ContainerRegistryAuthenticationService.access_token([], [])
yield new(registry_config.api_url, token: token)
yield new(registry_config.api_url, token: token_from(token_config))
end
def token_from(config)
case config[:type]
when :full_access_token
Auth::ContainerRegistryAuthenticationService.access_token([], [])
when :nested_repositories_token
return unless config[:path]
Auth::ContainerRegistryAuthenticationService.pull_nested_repositories_access_token(config[:path])
end
end
end

View file

@ -27,6 +27,12 @@ module ContainerRegistry
end
end
def self.deduplicated_size(path)
with_dummy_client(token_config: { type: :nested_repositories_token, path: path }) do |client|
client.repository_details(path, sizing: :self_with_descendants)['size_bytes']
end
end
# https://gitlab.com/gitlab-org/container-registry/-/blob/master/docs-gitlab/api.md#compliance-check
def supports_gitlab_api?
strong_memoize(:supports_gitlab_api) do
@ -78,10 +84,10 @@ module ContainerRegistry
end
end
def repository_details(path, with_size: false)
def repository_details(path, sizing: nil)
with_token_faraday do |faraday_client|
req = faraday_client.get("/gitlab/v1/repositories/#{path}/") do |req|
req.params['size'] = 'self' if with_size
req.params['size'] = sizing if sizing
end
break {} unless req.success?

View file

@ -39298,6 +39298,9 @@ msgstr ""
msgid "To access this domain create a new DNS record"
msgstr ""
msgid "To activate your trial, we need additional details from you."
msgstr ""
msgid "To add a custom suffix, set up a Service Desk email address. %{linkStart}Learn more.%{linkEnd}"
msgstr ""
@ -39307,6 +39310,9 @@ msgstr ""
msgid "To ask someone to look at a merge request, select %{strongStart}Request attention%{strongEnd}. Select again to remove the request."
msgstr ""
msgid "To complete registration, we need additional details from you."
msgstr ""
msgid "To confirm, type %{phrase_code}"
msgstr ""

View file

@ -25,6 +25,7 @@ import { redirectTo } from '~/lib/utils/url_utility';
import { isLoggedIn } from '~/lib/utils/common_utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import httpStatusCodes from '~/lib/utils/http_status';
import LineHighlighter from '~/blob/line_highlighter';
import {
simpleViewerMock,
richViewerMock,
@ -39,6 +40,7 @@ import {
jest.mock('~/repository/components/blob_viewers');
jest.mock('~/lib/utils/url_utility');
jest.mock('~/lib/utils/common_utils');
jest.mock('~/blob/line_highlighter');
let wrapper;
let mockResolver;
@ -173,20 +175,30 @@ describe('Blob content viewer component', () => {
});
describe('legacy viewers', () => {
const legacyViewerUrl = 'some_file.js?format=json&viewer=simple';
const fileType = 'text';
const highlightJs = false;
it('loads a legacy viewer when a the fileType is text and the highlightJs feature is turned off', async () => {
await createComponent({
blob: { ...simpleViewerMock, fileType: 'text', highlightJs: false },
blob: { ...simpleViewerMock, fileType, highlightJs },
});
expect(mockAxios.history.get).toHaveLength(1);
expect(mockAxios.history.get[0].url).toEqual('some_file.js?format=json&viewer=simple');
expect(mockAxios.history.get[0].url).toBe(legacyViewerUrl);
});
it('loads a legacy viewer when a viewer component is not available', async () => {
await createComponent({ blob: { ...simpleViewerMock, fileType: 'unknown' } });
expect(mockAxios.history.get).toHaveLength(1);
expect(mockAxios.history.get[0].url).toEqual('some_file.js?format=json&viewer=simple');
expect(mockAxios.history.get[0].url).toBe(legacyViewerUrl);
});
it('loads the LineHighlighter', async () => {
mockAxios.onGet(legacyViewerUrl).replyOnce(httpStatusCodes.OK, 'test');
await createComponent({ blob: { ...simpleViewerMock, fileType, highlightJs } });
expect(LineHighlighter).toHaveBeenCalled();
});
});
});

View file

@ -2,9 +2,6 @@ import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { HIGHLIGHT_CLASS_NAME } from '~/vue_shared/components/blob_viewers/constants';
import SimpleViewer from '~/vue_shared/components/blob_viewers/simple_viewer.vue';
import LineHighlighter from '~/blob/line_highlighter';
jest.mock('~/blob/line_highlighter');
describe('Blob Simple Viewer component', () => {
let wrapper;
@ -30,20 +27,6 @@ describe('Blob Simple Viewer component', () => {
wrapper.destroy();
});
describe('refactorBlobViewer feature flag', () => {
it('loads the LineHighlighter if refactorBlobViewer is enabled', () => {
createComponent('', false, { refactorBlobViewer: true });
expect(LineHighlighter).toHaveBeenCalled();
});
it('does not load the LineHighlighter if refactorBlobViewer is disabled', () => {
createComponent('', false, { refactorBlobViewer: false });
expect(LineHighlighter).not.toHaveBeenCalled();
});
});
it('does not fail if content is empty', () => {
const spy = jest.spyOn(window.console, 'error');
createComponent('');

View file

@ -174,31 +174,26 @@ RSpec.describe ContainerRegistry::GitlabApiClient do
describe '#repository_details' do
let(:path) { 'namespace/path/to/repository' }
let(:response) { { foo: :bar, this: :is_a_test } }
let(:with_size) { true }
subject { client.repository_details(path, with_size: with_size) }
subject { client.repository_details(path, sizing: sizing) }
context 'with size' do
before do
stub_repository_details(path, with_size: with_size, respond_with: response)
[:self, :self_with_descendants, nil].each do |size_type|
context "with sizing #{size_type}" do
let(:sizing) { size_type }
before do
stub_repository_details(path, sizing: sizing, respond_with: response)
end
it { is_expected.to eq(response.stringify_keys.deep_transform_values(&:to_s)) }
end
it { is_expected.to eq(response.stringify_keys.deep_transform_values(&:to_s)) }
end
context 'without_size' do
let(:with_size) { false }
before do
stub_repository_details(path, with_size: with_size, respond_with: response)
end
it { is_expected.to eq(response.stringify_keys.deep_transform_values(&:to_s)) }
end
context 'with non successful response' do
let(:sizing) { nil }
before do
stub_repository_details(path, with_size: with_size, status_code: 404)
stub_repository_details(path, sizing: sizing, status_code: 404)
end
it { is_expected.to eq({}) }
@ -263,6 +258,54 @@ RSpec.describe ContainerRegistry::GitlabApiClient do
end
end
describe '.deduplicated_size' do
let(:path) { 'foo/bar' }
let(:response) { { 'size_bytes': 555 } }
let(:registry_enabled) { true }
subject { described_class.deduplicated_size(path) }
before do
stub_container_registry_config(enabled: registry_enabled, api_url: registry_api_url, key: 'spec/fixtures/x509_certificate_pk.key')
end
context 'with successful response' do
before do
expect(Auth::ContainerRegistryAuthenticationService).to receive(:pull_nested_repositories_access_token).with(path).and_return(token)
stub_repository_details(path, sizing: :self_with_descendants, status_code: 200, respond_with: response)
end
it { is_expected.to eq(555) }
end
context 'with unsuccessful response' do
before do
expect(Auth::ContainerRegistryAuthenticationService).to receive(:pull_nested_repositories_access_token).with(path).and_return(token)
stub_repository_details(path, sizing: :self_with_descendants, status_code: 404, respond_with: response)
end
it { is_expected.to eq(nil) }
end
context 'with the registry disabled' do
let(:registry_enabled) { false }
it { is_expected.to eq(nil) }
end
context 'with a nil path' do
let(:path) { nil }
let(:token) { nil }
before do
expect(Auth::ContainerRegistryAuthenticationService).not_to receive(:pull_nested_repositories_access_token)
stub_repository_details(path, sizing: :self_with_descendants, status_code: 401, respond_with: response)
end
it { is_expected.to eq(nil) }
end
end
def stub_pre_import(path, status_code, pre:)
import_type = pre ? 'pre' : 'final'
stub_request(:put, "#{registry_api_url}/gitlab/v1/import/#{path}/?import_type=#{import_type}")
@ -303,11 +346,15 @@ RSpec.describe ContainerRegistry::GitlabApiClient do
)
end
def stub_repository_details(path, with_size: true, status_code: 200, respond_with: {})
def stub_repository_details(path, sizing: nil, status_code: 200, respond_with: {})
url = "#{registry_api_url}/gitlab/v1/repositories/#{path}/"
url += "?size=self" if with_size
url += "?size=#{sizing}" if sizing
headers = { 'Accept' => described_class::JSON_TYPE }
headers['Authorization'] = "bearer #{token}" if token
stub_request(:get, url)
.with(headers: { 'Accept' => described_class::JSON_TYPE, 'Authorization' => "bearer #{token}" })
.with(headers: headers)
.to_return(status: status_code, body: respond_with.to_json, headers: { 'Content-Type' => described_class::JSON_TYPE })
end
end

View file

@ -652,7 +652,7 @@ RSpec.describe ContainerRepository, :aggregate_failures do
context 'supports gitlab api on .com with a recent repository' do
before do
expect(repository.gitlab_api_client).to receive(:supports_gitlab_api?).and_return(true)
expect(repository.gitlab_api_client).to receive(:repository_details).with(repository.path, with_size: true).and_return(response)
expect(repository.gitlab_api_client).to receive(:repository_details).with(repository.path, sizing: :self).and_return(response)
end
context 'with a size_bytes field' do
@ -1076,6 +1076,43 @@ RSpec.describe ContainerRepository, :aggregate_failures do
end
end
describe '.all_migrated?' do
let_it_be(:project) { create(:project) }
subject { project.container_repositories.all_migrated? }
context 'with no repositories' do
it { is_expected.to be_truthy }
end
context 'with only recent repositories' do
let_it_be(:container_repository1) { create(:container_repository, project: project) }
let_it_be_with_reload(:container_repository2) { create(:container_repository, project: project) }
it { is_expected.to be_truthy }
context 'with one old non migrated repository' do
before do
container_repository2.update!(created_at: described_class::MIGRATION_PHASE_1_ENDED_AT - 3.months)
end
it { is_expected.to be_falsey }
end
context 'with one old migrated repository' do
before do
container_repository2.update!(
created_at: described_class::MIGRATION_PHASE_1_ENDED_AT - 3.months,
migration_state: 'import_done',
migration_import_done_at: Time.zone.now
)
end
it { is_expected.to be_truthy }
end
end
end
describe '.with_enabled_policy' do
let_it_be(:repository) { create(:container_repository) }
let_it_be(:repository2) { create(:container_repository) }
@ -1271,11 +1308,12 @@ RSpec.describe ContainerRepository, :aggregate_failures do
let_it_be(:import_done_repository) { create(:container_repository, :import_done, migration_pre_import_done_at: 3.days.ago, migration_import_done_at: 2.days.ago) }
let_it_be(:import_aborted_repository) { create(:container_repository, :import_aborted, migration_pre_import_done_at: 5.days.ago, migration_aborted_at: 1.day.ago) }
let_it_be(:pre_import_done_repository) { create(:container_repository, :pre_import_done, migration_pre_import_done_at: 1.hour.ago) }
let_it_be(:import_skipped_repository) { create(:container_repository, :import_skipped, migration_skipped_at: 90.minutes.ago) }
subject { described_class.recently_done_migration_step }
it 'returns completed imports by done_at date' do
expect(subject.to_a).to eq([pre_import_done_repository, import_aborted_repository, import_done_repository])
expect(subject.to_a).to eq([pre_import_done_repository, import_skipped_repository, import_aborted_repository, import_done_repository])
end
end
@ -1296,13 +1334,15 @@ RSpec.describe ContainerRepository, :aggregate_failures do
describe '#last_import_step_done_at' do
let_it_be(:aborted_at) { Time.zone.now - 1.hour }
let_it_be(:pre_import_done_at) { Time.zone.now - 2.hours }
let_it_be(:skipped_at) { Time.zone.now - 3.hours }
subject { repository.last_import_step_done_at }
before do
repository.update_columns(
migration_pre_import_done_at: pre_import_done_at,
migration_aborted_at: aborted_at
migration_aborted_at: aborted_at,
migration_skipped_at: skipped_at
)
end

View file

@ -2715,6 +2715,39 @@ RSpec.describe Project, factory_default: :keep do
end
end
describe '#container_repositories_size' do
let(:project) { build(:project) }
subject { project.container_repositories_size }
context 'on gitlab.com' do
where(:no_container_repositories, :all_migrated, :gitlab_api_supported, :returned_size, :expected_result) do
true | nil | nil | nil | 0
false | false | nil | nil | nil
false | true | false | nil | nil
false | true | true | 555 | 555
false | true | true | nil | nil
end
with_them do
before do
stub_container_registry_config(enabled: true, api_url: 'http://container-registry', key: 'spec/fixtures/x509_certificate_pk.key')
allow(Gitlab).to receive(:com?).and_return(true)
allow(project.container_repositories).to receive(:empty?).and_return(no_container_repositories)
allow(project.container_repositories).to receive(:all_migrated?).and_return(all_migrated)
allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(gitlab_api_supported)
allow(ContainerRegistry::GitlabApiClient).to receive(:deduplicated_size).with(project.full_path).and_return(returned_size)
end
it { is_expected.to eq(expected_result) }
end
end
context 'not on gitlab.com' do
it { is_expected.to eq(nil) }
end
end
describe '#container_registry_enabled=' do
let_it_be_with_reload(:project) { create(:project) }

View file

@ -86,6 +86,13 @@ module CycleAnalyticsHelpers
wait_for_stages_to_load(ready_selector)
end
def select_value_stream(value_stream_name)
toggle_value_stream_dropdown
page.find('[data-testid="dropdown-value-streams"]').all('li button').find { |item| item.text == value_stream_name.to_s }.click
wait_for_requests
end
def toggle_dropdown(field)
page.within("[data-testid*='#{field}']") do
find('.dropdown-toggle').click

View file

@ -8,8 +8,8 @@ RSpec.shared_context 'container registry client stubs' do
end
end
def stub_container_registry_gitlab_api_repository_details(client, path:, size_bytes:)
allow(client).to receive(:repository_details).with(path, with_size: true).and_return('size_bytes' => size_bytes)
def stub_container_registry_gitlab_api_repository_details(client, path:, size_bytes:, sizing: :self)
allow(client).to receive(:repository_details).with(path, sizing: sizing).and_return('size_bytes' => size_bytes)
end
def stub_container_registry_gitlab_api_network_error(client_method: :supports_gitlab_api?)

View file

@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe ContainerRegistry::Migration::EnqueuerWorker, :aggregate_failures, :clean_gitlab_redis_shared_state do
using RSpec::Parameterized::TableSyntax
include ExclusiveLeaseHelpers
let_it_be_with_reload(:container_repository) { create(:container_repository, created_at: 2.days.ago) }
@ -131,14 +132,34 @@ RSpec.describe ContainerRegistry::Migration::EnqueuerWorker, :aggregate_failures
end
context 'too soon before previous completed import step' do
before do
create(:container_repository, :import_done, migration_import_done_at: 1.minute.ago)
allow(ContainerRegistry::Migration).to receive(:enqueue_waiting_time).and_return(1.hour)
where(:state, :timestamp) do
:import_done | :migration_import_done_at
:pre_import_done | :migration_pre_import_done_at
:import_aborted | :migration_aborted_at
:import_skipped | :migration_skipped_at
end
it_behaves_like 'no action' do
with_them do
before do
expect_log_extra_metadata(waiting_time_passed: false, current_waiting_time_setting: 1.hour)
allow(ContainerRegistry::Migration).to receive(:enqueue_waiting_time).and_return(1.hour)
create(:container_repository, state, timestamp => 1.minute.ago)
end
it_behaves_like 'no action' do
before do
expect_log_extra_metadata(waiting_time_passed: false, current_waiting_time_setting: 1.hour)
end
end
end
context 'when last completed repository has nil timestamps' do
before do
allow(ContainerRegistry::Migration).to receive(:enqueue_waiting_time).and_return(1.hour)
create(:container_repository, migration_state: 'import_done')
end
it 'continues to try the next import' do
expect { subject }.to change { container_repository.reload.migration_state }
end
end
end

View file

@ -43,16 +43,36 @@ RSpec.describe Packages::CleanupPackageFileWorker do
end
end
context 'with an error during the destroy' do
context 'with a package file to destroy' do
let_it_be(:package_file) { create(:package_file, :pending_destruction) }
before do
expect(worker).to receive(:log_metadata).and_raise('Error!')
context 'with an error during the destroy' do
before do
expect(worker).to receive(:log_metadata).and_raise('Error!')
end
it 'handles the error' do
expect { subject }.to change { Packages::PackageFile.error.count }.from(0).to(1)
expect(package_file.reload).to be_error
end
end
it 'handles the error' do
expect { subject }.to change { Packages::PackageFile.error.count }.from(0).to(1)
expect(package_file.reload).to be_error
context 'when trying to destroy a destroyed record' do
before do
allow_next_found_instance_of(Packages::PackageFile) do |package_file|
destroy_method = package_file.method(:destroy!)
allow(package_file).to receive(:destroy!) do
destroy_method.call
raise 'Error!'
end
end
end
it 'handles the error' do
expect { subject }.to change { Packages::PackageFile.count }.by(-1)
end
end
end