Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-04-07 09:08:40 +00:00
parent c793bb6303
commit f6f4bc2bc0
20 changed files with 315 additions and 46 deletions

View file

@ -53,6 +53,7 @@ export default {
<gl-modal hide-footer modal-id="work-item-detail-modal" :visible="visible" @hide="closeModal"> <gl-modal hide-footer modal-id="work-item-detail-modal" :visible="visible" @hide="closeModal">
<template #modal-header> <template #modal-header>
<div class="gl-w-full gl-display-flex gl-align-items-center gl-justify-content-end"> <div class="gl-w-full gl-display-flex gl-align-items-center gl-justify-content-end">
<h2 class="modal-title gl-mr-auto">{{ s__('WorkItem|Work Item') }}</h2>
<work-item-actions <work-item-actions
:work-item-id="workItemId" :work-item-id="workItemId"
:can-update="canUpdate" :can-update="canUpdate"

View file

@ -22,11 +22,11 @@ module Packages
def packages_for_group_projects(installable_only: false) def packages_for_group_projects(installable_only: false)
packages = ::Packages::Package packages = ::Packages::Package
.preload_pipelines
.including_project_route .including_project_route
.including_tags .including_tags
.for_projects(group_projects_visible_to_current_user.select(:id)) .for_projects(group_projects_visible_to_current_user.select(:id))
.sort_by_attribute("#{params[:order_by]}_#{params[:sort]}") .sort_by_attribute("#{params[:order_by]}_#{params[:sort]}")
packages = packages.preload_pipelines if preload_pipelines
packages = filter_with_version(packages) packages = filter_with_version(packages)
packages = filter_by_package_type(packages) packages = filter_by_package_type(packages)
@ -59,5 +59,9 @@ module Packages
def exclude_subgroups? def exclude_subgroups?
params[:exclude_subgroups] params[:exclude_subgroups]
end end
def preload_pipelines
params.fetch(:preload_pipelines, true)
end
end end
end end

View file

@ -14,9 +14,9 @@ module Packages
def execute def execute
packages = project.packages packages = project.packages
.preload_pipelines
.including_project_route .including_project_route
.including_tags .including_tags
packages = packages.preload_pipelines if preload_pipelines
packages = filter_with_version(packages) packages = filter_with_version(packages)
packages = filter_by_package_type(packages) packages = filter_by_package_type(packages)
@ -32,5 +32,9 @@ module Packages
def order_packages(packages) def order_packages(packages)
packages.sort_by_attribute("#{params[:order_by]}_#{params[:sort]}") packages.sort_by_attribute("#{params[:order_by]}_#{params[:sort]}")
end end
def preload_pipelines
params.fetch(:preload_pipelines, true)
end
end end
end end

View file

@ -12,7 +12,7 @@ module Projects
if batch.any? if batch.any?
# We are doing the sum in ruby because the query takes too long when done in SQL # We are doing the sum in ruby because the query takes too long when done in SQL
total_artifacts_size = batch.sum(&:size) total_artifacts_size = batch.sum { |artifact| artifact.size.to_i }
Projects::BuildArtifactsSizeRefresh.transaction do Projects::BuildArtifactsSizeRefresh.transaction do
# Mark the refresh ready for another worker to pick up and process the next batch # Mark the refresh ready for another worker to pick up and process the next batch

View file

@ -8,7 +8,7 @@ module ContainerRegistry
include Gitlab::Utils::StrongMemoize include Gitlab::Utils::StrongMemoize
include ExclusiveLeaseGuard include ExclusiveLeaseGuard
DEFAULT_LEASE_TIMEOUT = 1.hour.to_i.freeze DEFAULT_LEASE_TIMEOUT = 30.minutes.to_i.freeze
data_consistency :always data_consistency :always
feature_category :container_registry feature_category :container_registry

View file

@ -1801,8 +1801,8 @@ def ready?(**args)
end end
``` ```
In the future this may be able to be done using `InputUnions` if In the future this may be able to be done using `OneOf Input Objects` if
[this RFC](https://github.com/graphql/graphql-spec/blob/master/rfcs/InputUnion.md) [this RFC](https://github.com/graphql/graphql-spec/pull/825)
is merged. is merged.
## GitLab custom scalars ## GitLab custom scalars

View file

@ -307,7 +307,7 @@ reduce the repository size for another import attempt:
#### Workaround option 2 #### Workaround option 2
NOTE: NOTE:
This workaround requires access to the rails console, which isn't available to end-users on GitLab.com. This workaround does not account for LFS objects.
Rather than attempting to push all changes at once, this workaround: Rather than attempting to push all changes at once, this workaround:

View file

@ -37,13 +37,13 @@ module Gitlab
allow_webpack_dev_server(directives) allow_webpack_dev_server(directives)
allow_letter_opener(directives) allow_letter_opener(directives)
allow_snowplow_micro(directives) if Gitlab::Tracking.snowplow_micro_enabled? allow_snowplow_micro(directives) if Gitlab::Tracking.snowplow_micro_enabled?
allow_customersdot(directives) if ENV['CUSTOMER_PORTAL_URL'].present?
end end
allow_websocket_connections(directives) allow_websocket_connections(directives)
allow_cdn(directives, Settings.gitlab.cdn_host) if Settings.gitlab.cdn_host.present? allow_cdn(directives, Settings.gitlab.cdn_host) if Settings.gitlab.cdn_host.present?
allow_sentry(directives) if Gitlab.config.sentry&.enabled && Gitlab.config.sentry&.clientside_dsn allow_sentry(directives) if Gitlab.config.sentry&.enabled && Gitlab.config.sentry&.clientside_dsn
allow_framed_gitlab_paths(directives) allow_framed_gitlab_paths(directives)
allow_customersdot(directives) if ENV['CUSTOMER_PORTAL_URL'].present?
# The follow section contains workarounds to patch Safari's lack of support for CSP Level 3 # The follow section contains workarounds to patch Safari's lack of support for CSP Level 3
# See https://gitlab.com/gitlab-org/gitlab/-/issues/343579 # See https://gitlab.com/gitlab-org/gitlab/-/issues/343579

View file

@ -0,0 +1,90 @@
# frozen_string_literal: true
# Connection for an array of Active Record instances.
# Resolvers needs to handle cursors (before and after).
# This connection will handle (first and last).
# Supports batch loaded items.
# Expects the array to use a fixed DESC order. This is similar to
# ExternallyPaginatedArrayConnection.
module Gitlab
module Graphql
module Pagination
class ActiveRecordArrayConnection < GraphQL::Pagination::ArrayConnection
include ::Gitlab::Graphql::ConnectionCollectionMethods
prepend ::Gitlab::Graphql::ConnectionRedaction
delegate :<<, to: :items
def nodes
load_nodes
@nodes
end
def next_page?
load_nodes
if before
true
elsif first
limit_value < items.size
else
false
end
end
def previous_page?
load_nodes
if after
true
elsif last
limit_value < items.size
else
false
end
end
# see https://graphql-ruby.org/pagination/custom_connections#connection-wrapper
alias_method :has_next_page, :next_page?
alias_method :has_previous_page, :previous_page?
def cursor_for(item)
# item could be a batch loaded item. Sync it to have the id.
cursor = { 'id' => Gitlab::Graphql::Lazy.force(item).id.to_s }
encode(cursor.to_json)
end
# Part of the implied interface for default objects for BatchLoader: objects must be clonable
def dup
self.class.new(
items.dup,
first: first,
after: after,
max_page_size: max_page_size,
last: last,
before: before
)
end
private
def limit_value
# note: only first _or_ last can be specified, not both
@limit_value ||= [first, last, max_page_size].compact.min
end
def load_nodes
@nodes ||= begin
limited_nodes = items
limited_nodes = limited_nodes.first(first) if first
limited_nodes = limited_nodes.last(last) if last
limited_nodes
end
end
end
end
end
end

View file

@ -50,7 +50,7 @@ module Gitlab
def alt_usage_data(value = nil, fallback: FALLBACK, &block) def alt_usage_data(value = nil, fallback: FALLBACK, &block)
if block_given? if block_given?
{ alt_usage_data_block: block.to_s } { alt_usage_data_block: "non-SQL usage data block" }
else else
{ alt_usage_data_value: value } { alt_usage_data_value: value }
end end
@ -58,9 +58,9 @@ module Gitlab
def redis_usage_data(counter = nil, &block) def redis_usage_data(counter = nil, &block)
if block_given? if block_given?
{ redis_usage_data_block: block.to_s } { redis_usage_data_block: "non-SQL usage data block" }
elsif counter.present? elsif counter.present?
{ redis_usage_data_counter: counter } { redis_usage_data_counter: counter.to_s }
end end
end end
@ -74,6 +74,13 @@ module Gitlab
def epics_deepest_relationship_level def epics_deepest_relationship_level
{ epics_deepest_relationship_level: 0 } { epics_deepest_relationship_level: 0 }
end end
def topology_usage_data
{
duration_s: 0,
failures: []
}
end
end end
end end
end end

View file

@ -24714,19 +24714,19 @@ msgstr ""
msgid "NamespaceStorageSize|push to your repository, create pipelines, create issues or add comments. To reduce storage capacity, delete unused repositories, artifacts, wikis, issues, and pipelines." msgid "NamespaceStorageSize|push to your repository, create pipelines, create issues or add comments. To reduce storage capacity, delete unused repositories, artifacts, wikis, issues, and pipelines."
msgstr "" msgstr ""
msgid "NamespaceStorage|%{name_with_link} namespace has %{percent} or less namespace storage space remaining." msgid "NamespaceStorage|%{name_with_link} namespace has approximately %{percent} namespace storage space remaining."
msgstr "" msgstr ""
msgid "NamespaceStorage|%{name_with_link} namespace has exceeded its namespace storage limit." msgid "NamespaceStorage|%{name_with_link} namespace has exceeded its namespace storage limit."
msgstr "" msgstr ""
msgid "NamespaceStorage|%{name}(%{url}) namespace has %{percent} or less namespace storage space remaining." msgid "NamespaceStorage|%{name}(%{url}) namespace has approximately %{percent} namespace storage space remaining."
msgstr "" msgstr ""
msgid "NamespaceStorage|%{name}(%{url}) namespace has exceeded its namespace storage limit." msgid "NamespaceStorage|%{name}(%{url}) namespace has exceeded its namespace storage limit."
msgstr "" msgstr ""
msgid "NamespaceStorage|Action required: Less than %{percentage_of_available_storage}%% of namespace storage remains for %{namespace_name}" msgid "NamespaceStorage|Action required: Approximately %{percentage_of_available_storage}%% of namespace storage remains for %{namespace_name}"
msgstr "" msgstr ""
msgid "NamespaceStorage|Action required: Storage has been exceeded for %{namespace_name}" msgid "NamespaceStorage|Action required: Storage has been exceeded for %{namespace_name}"
@ -42696,6 +42696,9 @@ msgstr ""
msgid "WorkItem|Type" msgid "WorkItem|Type"
msgstr "" msgstr ""
msgid "WorkItem|Work Item"
msgstr ""
msgid "WorkItem|Work Items" msgid "WorkItem|Work Items"
msgstr "" msgstr ""

View file

@ -247,6 +247,8 @@ module QA
else else
find_element(name, **original_kwargs).disabled? == disabled find_element(name, **original_kwargs).disabled? == disabled
end end
rescue Capybara::ElementNotFound
false
end end
# Check for the element before waiting for requests, just in case unrelated requests are in progress. # Check for the element before waiting for requests, just in case unrelated requests are in progress.

View file

@ -149,6 +149,22 @@ RSpec.describe Packages::GroupPackagesFinder do
it { is_expected.to match_array([package1, package2]) } it { is_expected.to match_array([package1, package2]) }
end end
context 'preload_pipelines' do
it 'preloads pipelines by default' do
expect(Packages::Package).to receive(:preload_pipelines).and_call_original
expect(subject).to match_array([package1, package2])
end
context 'set to false' do
let(:params) { { preload_pipelines: false } }
it 'does not preload pipelines' do
expect(Packages::Package).not_to receive(:preload_pipelines)
expect(subject).to match_array([package1, package2])
end
end
end
context 'with package_name' do context 'with package_name' do
let_it_be(:named_package) { create(:maven_package, project: project, name: 'maven') } let_it_be(:named_package) { create(:maven_package, project: project, name: 'maven') }

View file

@ -81,6 +81,22 @@ RSpec.describe ::Packages::PackagesFinder do
it { is_expected.to match_array([conan_package, maven_package]) } it { is_expected.to match_array([conan_package, maven_package]) }
end end
context 'preload_pipelines' do
it 'preloads pipelines by default' do
expect(Packages::Package).to receive(:preload_pipelines).and_call_original
expect(subject).to match_array([maven_package, conan_package])
end
context 'set to false' do
let(:params) { { preload_pipelines: false } }
it 'does not preload pipelines' do
expect(Packages::Package).not_to receive(:preload_pipelines)
expect(subject).to match_array([maven_package, conan_package])
end
end
end
it_behaves_like 'concerning versionless param' it_behaves_like 'concerning versionless param'
it_behaves_like 'concerning package statuses' it_behaves_like 'concerning package statuses'
end end

View file

@ -36,6 +36,12 @@ describe('WorkItemDetailModal component', () => {
}); });
}); });
it('renders heading', () => {
createComponent();
expect(wrapper.find('h2').text()).toBe('Work Item');
});
it('renders WorkItemDetail', () => { it('renders WorkItemDetail', () => {
createComponent(); createComponent();

View file

@ -107,24 +107,8 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
stub_env('CUSTOMER_PORTAL_URL', customer_portal_url) stub_env('CUSTOMER_PORTAL_URL', customer_portal_url)
end end
context 'when in production' do
before do
allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('production'))
end
it 'does not add CUSTOMER_PORTAL_URL to CSP' do
expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " http://localhost/admin/ http://localhost/assets/ http://localhost/-/speedscope/index.html http://localhost/-/sandbox/mermaid")
end
end
context 'when in development' do
before do
allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('development'))
end
it 'adds CUSTOMER_PORTAL_URL to CSP' do it 'adds CUSTOMER_PORTAL_URL to CSP' do
expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " http://localhost/rails/letter_opener/ https://customers.example.com http://localhost/admin/ http://localhost/assets/ http://localhost/-/speedscope/index.html http://localhost/-/sandbox/mermaid") expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " http://localhost/admin/ http://localhost/assets/ http://localhost/-/speedscope/index.html http://localhost/-/sandbox/mermaid #{customer_portal_url}")
end
end end
end end

View file

@ -0,0 +1,135 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Graphql::Pagination::ActiveRecordArrayConnection do
using RSpec::Parameterized::TableSyntax
let_it_be(:items) { create_list(:package_build_info, 3) }
let_it_be(:context) do
GraphQL::Query::Context.new(
query: GraphQL::Query.new(GitlabSchema, document: nil, context: {}, variables: {}),
values: {},
object: nil
)
end
let(:first) { nil }
let(:last) { nil }
let(:after) { nil }
let(:before) { nil }
let(:max_page_size) { nil }
let(:connection) do
described_class.new(
items,
context: context,
first: first,
last: last,
after: after,
before: before,
max_page_size: max_page_size
)
end
it_behaves_like 'a connection with collection methods'
it_behaves_like 'a redactable connection' do
let(:unwanted) { items[1] }
end
describe '#nodes' do
subject { connection.nodes }
it { is_expected.to match_array(items) }
context 'with first set' do
let(:first) { 2 }
it { is_expected.to match_array([items[0], items[1]]) }
end
context 'with last set' do
let(:last) { 2 }
it { is_expected.to match_array([items[1], items[2]]) }
end
end
describe '#next_page?' do
subject { connection.next_page? }
where(:before, :first, :max_page_size, :result) do
nil | nil | nil | false
1 | nil | nil | true
nil | 1 | nil | true
nil | 10 | nil | false
nil | 1 | 1 | true
nil | 1 | 10 | true
nil | 10 | 10 | false
end
with_them do
it { is_expected.to eq(result) }
end
end
describe '#previous_page?' do
subject { connection.previous_page? }
where(:after, :last, :max_page_size, :result) do
nil | nil | nil | false
1 | nil | nil | true
nil | 1 | nil | true
nil | 10 | nil | false
nil | 1 | 1 | true
nil | 1 | 10 | true
nil | 10 | 10 | false
end
with_them do
it { is_expected.to eq(result) }
end
end
describe '#cursor_for' do
let(:item) { items[0] }
let(:expected_result) do
GitlabSchema.cursor_encoder.encode(
Gitlab::Json.dump(id: item.id.to_s),
nonce: true
)
end
subject { connection.cursor_for(item) }
it { is_expected.to eq(expected_result) }
context 'with a BatchLoader::GraphQL item' do
let_it_be(:user) { create(:user) }
let(:item) { ::Gitlab::Graphql::Loaders::BatchModelLoader.new(::User, user.id).find }
let(:expected_result) do
GitlabSchema.cursor_encoder.encode(
Gitlab::Json.dump(id: user.id.to_s),
nonce: true
)
end
it { is_expected.to eq(expected_result) }
end
end
describe '#dup' do
subject { connection.dup }
it 'properly handles items duplication' do
connection2 = subject
connection2 << create(:package_build_info)
expect(connection.items).not_to eq(connection2.items)
end
end
end

View file

@ -34,14 +34,14 @@ RSpec.describe Gitlab::UsageDataQueries do
describe '.redis_usage_data' do describe '.redis_usage_data' do
subject(:redis_usage_data) { described_class.redis_usage_data { 42 } } subject(:redis_usage_data) { described_class.redis_usage_data { 42 } }
it 'returns a class for redis_usage_data with a counter call' do it 'returns a stringified class for redis_usage_data with a counter call' do
expect(described_class.redis_usage_data(Gitlab::UsageDataCounters::WikiPageCounter)) expect(described_class.redis_usage_data(Gitlab::UsageDataCounters::WikiPageCounter))
.to eq(redis_usage_data_counter: Gitlab::UsageDataCounters::WikiPageCounter) .to eq(redis_usage_data_counter: "Gitlab::UsageDataCounters::WikiPageCounter")
end end
it 'returns a stringified block for redis_usage_data with a block' do it 'returns a placeholder string for redis_usage_data with a block' do
is_expected.to include(:redis_usage_data_block) is_expected.to include(:redis_usage_data_block)
expect(redis_usage_data[:redis_usage_data_block]).to start_with('#<Proc:') expect(redis_usage_data[:redis_usage_data_block]).to eq('non-SQL usage data block')
end end
end end
@ -53,8 +53,8 @@ RSpec.describe Gitlab::UsageDataQueries do
.to eq(alt_usage_data_value: 1) .to eq(alt_usage_data_value: 1)
end end
it 'returns a stringified block for alt_usage_data with a block' do it 'returns a placeholder string for alt_usage_data with a block' do
expect(alt_usage_data[:alt_usage_data_block]).to start_with('#<Proc:') expect(alt_usage_data[:alt_usage_data_block]).to eq('non-SQL usage data block')
end end
end end

View file

@ -10,7 +10,8 @@ RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsService, :clean_gitl
let_it_be(:artifact_1) { create(:ci_job_artifact, project: project, size: 1, created_at: 14.days.ago) } let_it_be(:artifact_1) { create(:ci_job_artifact, project: project, size: 1, created_at: 14.days.ago) }
let_it_be(:artifact_2) { create(:ci_job_artifact, project: project, size: 2, created_at: 13.days.ago) } let_it_be(:artifact_2) { create(:ci_job_artifact, project: project, size: 2, created_at: 13.days.ago) }
let_it_be(:artifact_3) { create(:ci_job_artifact, project: project, size: 5, created_at: 12.days.ago) } let_it_be(:artifact_3) { create(:ci_job_artifact, project: project, size: nil, created_at: 13.days.ago) }
let_it_be(:artifact_4) { create(:ci_job_artifact, project: project, size: 5, created_at: 12.days.ago) }
# This should not be included in the recalculation as it is created later than the refresh start time # This should not be included in the recalculation as it is created later than the refresh start time
let_it_be(:future_artifact) { create(:ci_job_artifact, project: project, size: 8, created_at: 2.days.from_now) } let_it_be(:future_artifact) { create(:ci_job_artifact, project: project, size: 8, created_at: 2.days.from_now) }
@ -33,7 +34,7 @@ RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsService, :clean_gitl
end end
before do before do
stub_const("#{described_class}::BATCH_SIZE", 2) stub_const("#{described_class}::BATCH_SIZE", 3)
stats = create(:project_statistics, project: project, build_artifacts_size: 120) stats = create(:project_statistics, project: project, build_artifacts_size: 120)
stats.increment_counter(:build_artifacts_size, 30) stats.increment_counter(:build_artifacts_size, 30)
@ -48,7 +49,7 @@ RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsService, :clean_gitl
end end
it 'updates the last_job_artifact_id to the ID of the last artifact from the batch' do it 'updates the last_job_artifact_id to the ID of the last artifact from the batch' do
expect { service.execute }.to change { refresh.reload.last_job_artifact_id.to_i }.to(artifact_2.id) expect { service.execute }.to change { refresh.reload.last_job_artifact_id.to_i }.to(artifact_3.id)
end end
it 'requeues the refresh job' do it 'requeues the refresh job' do
@ -62,7 +63,7 @@ RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsService, :clean_gitl
:project_build_artifacts_size_refresh, :project_build_artifacts_size_refresh,
:pending, :pending,
project: project, project: project,
last_job_artifact_id: artifact_2.id last_job_artifact_id: artifact_3.id
) )
end end
@ -73,7 +74,7 @@ RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsService, :clean_gitl
end end
it 'keeps the last_job_artifact_id unchanged' do it 'keeps the last_job_artifact_id unchanged' do
expect(refresh.reload.last_job_artifact_id).to eq(artifact_2.id) expect(refresh.reload.last_job_artifact_id).to eq(artifact_3.id)
end end
it 'keeps the state of the refresh record at running' do it 'keeps the state of the refresh record at running' do
@ -89,7 +90,7 @@ RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsService, :clean_gitl
project: project, project: project,
updated_at: 2.days.ago, updated_at: 2.days.ago,
refresh_started_at: now, refresh_started_at: now,
last_job_artifact_id: artifact_3.id last_job_artifact_id: artifact_4.id
) )
end end

View file

@ -261,7 +261,7 @@ RSpec.describe ContainerRegistry::Migration::EnqueuerWorker, :aggregate_failures
let(:lease_key) { worker.send(:lease_key) } let(:lease_key) { worker.send(:lease_key) }
before do before do
stub_exclusive_lease_taken(lease_key, timeout: 1.hour) stub_exclusive_lease_taken(lease_key, timeout: 30.minutes)
end end
it 'does not perform' do it 'does not perform' do