Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-03-16 09:11:17 +00:00
parent 93c27b216a
commit 67cdffe4de
49 changed files with 738 additions and 931 deletions

View file

@ -1,13 +1,11 @@
<script>
import { GlLoadingIcon } from '@gitlab/ui';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
import { GlIcon } from '@gitlab/ui';
import STATUS_MAP from '../constants';
export default {
name: 'ImportStatus',
components: {
CiIcon,
GlLoadingIcon,
GlIcon,
},
props: {
status: {
@ -20,28 +18,13 @@ export default {
mappedStatus() {
return STATUS_MAP[this.status];
},
ciIconStatus() {
const { icon } = this.mappedStatus;
return {
icon: `status_${icon}`,
group: icon,
};
},
},
};
</script>
<template>
<div class="gl-display-flex gl-h-7 gl-align-items-center">
<gl-loading-icon
v-if="mappedStatus.loadingIcon"
:inline="true"
:class="mappedStatus.textClass"
class="align-middle mr-2"
/>
<ci-icon v-else css-classes="align-middle mr-2" :status="ciIconStatus" />
<span :class="mappedStatus.textClass">{{ mappedStatus.text }}</span>
<div>
<gl-icon :name="mappedStatus.icon" :class="mappedStatus.iconClass" :size="12" class="gl-mr-2" />
<span>{{ mappedStatus.text }}</span>
</div>
</template>

View file

@ -11,43 +11,43 @@ export const STATUSES = {
STARTED: 'started',
NONE: 'none',
SCHEDULING: 'scheduling',
CANCELLED: 'cancelled',
};
const SCHEDULED_STATUS = {
icon: 'status-scheduled',
text: __('Pending'),
iconClass: 'gl-text-orange-400',
};
const STATUS_MAP = {
[STATUSES.NONE]: {
icon: 'status-waiting',
text: __('Not started'),
iconClass: 'gl-text-gray-400',
},
[STATUSES.SCHEDULING]: SCHEDULED_STATUS,
[STATUSES.SCHEDULED]: SCHEDULED_STATUS,
[STATUSES.CREATED]: SCHEDULED_STATUS,
[STATUSES.STARTED]: {
icon: 'status-running',
text: __('Importing...'),
iconClass: 'gl-text-blue-400',
},
[STATUSES.FINISHED]: {
icon: 'success',
text: __('Done'),
textClass: 'text-success',
icon: 'status-success',
text: __('Complete'),
iconClass: 'gl-text-green-400',
},
[STATUSES.FAILED]: {
icon: 'failed',
icon: 'status-failed',
text: __('Failed'),
textClass: 'text-danger',
iconClass: 'gl-text-red-600',
},
[STATUSES.CREATED]: {
icon: 'pending',
text: __('Scheduled'),
textClass: 'text-warning',
},
[STATUSES.SCHEDULED]: {
icon: 'pending',
text: __('Scheduled'),
textClass: 'text-warning',
},
[STATUSES.STARTED]: {
icon: 'running',
text: __('Running…'),
textClass: 'text-info',
},
[STATUSES.NONE]: {
icon: 'created',
text: __('Not started'),
textClass: 'text-muted',
},
[STATUSES.SCHEDULING]: {
loadingIcon: true,
text: __('Scheduling'),
textClass: 'text-warning',
[STATUSES.CANCELLED]: {
icon: 'status-stopped',
text: __('Cancelled'),
iconClass: 'gl-text-red-600',
},
};

View file

@ -8,6 +8,7 @@ import cancelPipelineMutation from '../graphql/mutations/cancel_pipeline.mutatio
import deletePipelineMutation from '../graphql/mutations/delete_pipeline.mutation.graphql';
import retryPipelineMutation from '../graphql/mutations/retry_pipeline.mutation.graphql';
import getPipelineQuery from '../graphql/queries/get_pipeline_header_data.query.graphql';
import { getQueryHeaders } from './graph/utils';
const DELETE_MODAL_ID = 'pipeline-delete-modal';
const POLL_INTERVAL = 10000;
@ -34,7 +35,9 @@ export default {
[DEFAULT]: __('An unknown error occurred.'),
},
inject: {
// Receive `fullProject` and `pipelinesPath`
graphqlResourceEtag: {
default: '',
},
paths: {
default: {},
},
@ -47,6 +50,9 @@ export default {
},
apollo: {
pipeline: {
context() {
return getQueryHeaders(this.graphqlResourceEtag);
},
query: getPipelineQuery,
variables() {
return {

View file

@ -7,6 +7,7 @@ import { reportToSentry } from './components/graph/utils';
import TestReports from './components/test_reports/test_reports.vue';
import GraphBundleMixin from './mixins/graph_pipeline_bundle_mixin';
import createDagApp from './pipeline_details_dag';
import { apolloProvider } from './pipeline_shared_client';
import createTestReportsStore from './stores/test_reports';
Vue.use(Translate);
@ -80,7 +81,7 @@ const createTestDetails = () => {
export default async function initPipelineDetailsBundle() {
createTestDetails();
createDagApp();
createDagApp(apolloProvider);
const canShowNewPipelineDetails =
gon.features.graphqlPipelineDetails || gon.features.graphqlPipelineDetailsUsers;
@ -93,7 +94,7 @@ export default async function initPipelineDetailsBundle() {
/* webpackChunkName: 'createPipelinesDetailApp' */ './pipeline_details_graph'
);
createPipelinesDetailApp(SELECTORS.PIPELINE_GRAPH, dataset);
createPipelinesDetailApp(SELECTORS.PIPELINE_GRAPH, apolloProvider, dataset);
} catch {
Flash(__('An error occurred while loading the pipeline.'));
}
@ -111,7 +112,7 @@ export default async function initPipelineDetailsBundle() {
const { createPipelineHeaderApp } = await import(
/* webpackChunkName: 'createPipelineHeaderApp' */ './pipeline_details_header'
);
createPipelineHeaderApp(SELECTORS.PIPELINE_HEADER);
createPipelineHeaderApp(SELECTORS.PIPELINE_HEADER, apolloProvider, dataset.graphqlResourceEtag);
} catch {
Flash(__('An error occurred while loading a section of this page.'));
}

View file

@ -1,15 +1,10 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createDefaultClient from '~/lib/graphql';
import Dag from './components/dag/dag.vue';
Vue.use(VueApollo);
const apolloProvider = new VueApollo({
defaultClient: createDefaultClient(),
});
const createDagApp = () => {
const createDagApp = (apolloProvider) => {
const el = document.querySelector('#js-pipeline-dag-vue');
if (!el) {

View file

@ -1,23 +1,14 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createDefaultClient from '~/lib/graphql';
import { GRAPHQL } from './components/graph/constants';
import PipelineGraphWrapper from './components/graph/graph_component_wrapper.vue';
import { reportToSentry } from './components/graph/utils';
Vue.use(VueApollo);
const apolloProvider = new VueApollo({
defaultClient: createDefaultClient(
{},
{
useGet: true,
},
),
});
const createPipelinesDetailApp = (
selector,
apolloProvider,
{ pipelineProjectPath, pipelineIid, metricsPath, graphqlResourceEtag } = {},
) => {
// eslint-disable-next-line no-new

View file

@ -1,15 +1,10 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createDefaultClient from '~/lib/graphql';
import pipelineHeader from './components/header_component.vue';
Vue.use(VueApollo);
const apolloProvider = new VueApollo({
defaultClient: createDefaultClient(),
});
export const createPipelineHeaderApp = (elSelector) => {
export const createPipelineHeaderApp = (elSelector, apolloProvider, graphqlResourceEtag) => {
const el = document.querySelector(elSelector);
if (!el) {
@ -27,6 +22,7 @@ export const createPipelineHeaderApp = (elSelector) => {
provide: {
paths: {
fullProject: fullPath,
graphqlResourceEtag,
pipelinesPath,
},
pipelineId,

View file

@ -0,0 +1,11 @@
import VueApollo from 'vue-apollo';
import createDefaultClient from '~/lib/graphql';
export const apolloProvider = new VueApollo({
defaultClient: createDefaultClient(
{},
{
useGet: true,
},
),
});

View file

@ -532,10 +532,6 @@ table.code {
&.parallel {
display: table-cell;
width: 46%;
span {
word-break: break-all;
}
}
&.old {

View file

@ -1,172 +0,0 @@
# frozen_string_literal: true
module Ci
class CreateJobArtifactsService < ::BaseService
include Gitlab::Utils::UsageData
ArtifactsExistError = Class.new(StandardError)
LSIF_ARTIFACT_TYPE = 'lsif'
METRICS_REPORT_UPLOAD_EVENT_NAME = 'i_testing_metrics_report_artifact_uploaders'
OBJECT_STORAGE_ERRORS = [
Errno::EIO,
Google::Apis::ServerError,
Signet::RemoteServerError
].freeze
def initialize(job)
@job = job
@project = job.project
end
def authorize(artifact_type:, filesize: nil)
result = validate_requirements(artifact_type: artifact_type, filesize: filesize)
return result unless result[:status] == :success
headers = JobArtifactUploader.workhorse_authorize(has_length: false, maximum_size: max_size(artifact_type))
if lsif?(artifact_type)
headers[:ProcessLsif] = true
track_usage_event('i_source_code_code_intelligence', project.id)
end
success(headers: headers)
end
def execute(artifacts_file, params, metadata_file: nil)
result = validate_requirements(artifact_type: params[:artifact_type], filesize: artifacts_file.size)
return result unless result[:status] == :success
return success if sha256_matches_existing_artifact?(params[:artifact_type], artifacts_file)
artifact, artifact_metadata = build_artifact(artifacts_file, params, metadata_file)
result = parse_artifact(artifact)
track_artifact_uploader(artifact)
return result unless result[:status] == :success
persist_artifact(artifact, artifact_metadata, params)
end
private
attr_reader :job, :project
def validate_requirements(artifact_type:, filesize:)
return too_large_error if too_large?(artifact_type, filesize)
success
end
def too_large?(type, size)
size > max_size(type) if size
end
def lsif?(type)
type == LSIF_ARTIFACT_TYPE
end
def max_size(type)
Ci::JobArtifact.max_artifact_size(type: type, project: project)
end
def forbidden_type_error(type)
error("#{type} artifacts are forbidden", :forbidden)
end
def too_large_error
error('file size has reached maximum size limit', :payload_too_large)
end
def build_artifact(artifacts_file, params, metadata_file)
expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
artifact = Ci::JobArtifact.new(
job_id: job.id,
project: project,
file: artifacts_file,
file_type: params[:artifact_type],
file_format: params[:artifact_format],
file_sha256: artifacts_file.sha256,
expire_in: expire_in)
artifact_metadata = if metadata_file
Ci::JobArtifact.new(
job_id: job.id,
project: project,
file: metadata_file,
file_type: :metadata,
file_format: :gzip,
file_sha256: metadata_file.sha256,
expire_in: expire_in)
end
[artifact, artifact_metadata]
end
def parse_artifact(artifact)
unless Feature.enabled?(:ci_synchronous_artifact_parsing, project, default_enabled: true)
return success
end
case artifact.file_type
when 'dotenv' then parse_dotenv_artifact(artifact)
when 'cluster_applications' then parse_cluster_applications_artifact(artifact)
else success
end
end
def persist_artifact(artifact, artifact_metadata, params)
Ci::JobArtifact.transaction do
artifact.save!
artifact_metadata&.save!
# NOTE: The `artifacts_expire_at` column is already deprecated and to be removed in the near future.
job.update_column(:artifacts_expire_at, artifact.expire_at)
end
success
rescue ActiveRecord::RecordNotUnique => error
track_exception(error, params)
error('another artifact of the same type already exists', :bad_request)
rescue *OBJECT_STORAGE_ERRORS => error
track_exception(error, params)
error(error.message, :service_unavailable)
rescue => error
track_exception(error, params)
error(error.message, :bad_request)
end
def sha256_matches_existing_artifact?(artifact_type, artifacts_file)
existing_artifact = job.job_artifacts.find_by_file_type(artifact_type)
return false unless existing_artifact
existing_artifact.file_sha256 == artifacts_file.sha256
end
def track_exception(error, params)
Gitlab::ErrorTracking.track_exception(error,
job_id: job.id,
project_id: job.project_id,
uploading_type: params[:artifact_type]
)
end
def track_artifact_uploader(artifact)
return unless artifact.file_type == 'metrics'
track_usage_event(METRICS_REPORT_UPLOAD_EVENT_NAME, @job.user_id)
end
def parse_dotenv_artifact(artifact)
Ci::ParseDotenvArtifactService.new(project, current_user).execute(artifact)
end
def parse_cluster_applications_artifact(artifact)
Clusters::ParseClusterApplicationsArtifactService.new(job, job.user).execute(artifact)
end
end
end

View file

@ -1,56 +0,0 @@
# frozen_string_literal: true
module Ci
class DestroyExpiredJobArtifactsService
include ::Gitlab::ExclusiveLeaseHelpers
include ::Gitlab::LoopHelpers
BATCH_SIZE = 100
LOOP_TIMEOUT = 5.minutes
LOOP_LIMIT = 1000
EXCLUSIVE_LOCK_KEY = 'expired_job_artifacts:destroy:lock'
LOCK_TIMEOUT = 6.minutes
def initialize
@removed_artifacts_count = 0
end
##
# Destroy expired job artifacts on GitLab instance
#
# This destroy process cannot run for more than 6 minutes. This is for
# preventing multiple `ExpireBuildArtifactsWorker` CRON jobs run concurrently,
# which is scheduled every 7 minutes.
def execute
in_lock(EXCLUSIVE_LOCK_KEY, ttl: LOCK_TIMEOUT, retries: 1) do
destroy_job_artifacts_with_slow_iteration(Time.current)
end
@removed_artifacts_count
end
private
def destroy_job_artifacts_with_slow_iteration(start_at)
Ci::JobArtifact.expired_before(start_at).each_batch(of: BATCH_SIZE, column: :expire_at, order: :desc) do |relation, index|
# For performance reasons, join with ci_pipelines after the batch is queried.
# See: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/47496
artifacts = relation.unlocked
service_response = destroy_batch_async(artifacts)
@removed_artifacts_count += service_response[:destroyed_artifacts_count]
break if loop_timeout?(start_at)
break if index >= LOOP_LIMIT
end
end
def destroy_batch_async(artifacts)
Ci::JobArtifactsDestroyBatchService.new(artifacts).execute
end
def loop_timeout?(start_at)
Time.current > start_at + LOOP_TIMEOUT
end
end
end

View file

@ -0,0 +1,174 @@
# frozen_string_literal: true
module Ci
module JobArtifacts
class CreateService < ::BaseService
include Gitlab::Utils::UsageData
ArtifactsExistError = Class.new(StandardError)
LSIF_ARTIFACT_TYPE = 'lsif'
METRICS_REPORT_UPLOAD_EVENT_NAME = 'i_testing_metrics_report_artifact_uploaders'
OBJECT_STORAGE_ERRORS = [
Errno::EIO,
Google::Apis::ServerError,
Signet::RemoteServerError
].freeze
def initialize(job)
@job = job
@project = job.project
end
def authorize(artifact_type:, filesize: nil)
result = validate_requirements(artifact_type: artifact_type, filesize: filesize)
return result unless result[:status] == :success
headers = JobArtifactUploader.workhorse_authorize(has_length: false, maximum_size: max_size(artifact_type))
if lsif?(artifact_type)
headers[:ProcessLsif] = true
track_usage_event('i_source_code_code_intelligence', project.id)
end
success(headers: headers)
end
def execute(artifacts_file, params, metadata_file: nil)
result = validate_requirements(artifact_type: params[:artifact_type], filesize: artifacts_file.size)
return result unless result[:status] == :success
return success if sha256_matches_existing_artifact?(params[:artifact_type], artifacts_file)
artifact, artifact_metadata = build_artifact(artifacts_file, params, metadata_file)
result = parse_artifact(artifact)
track_artifact_uploader(artifact)
return result unless result[:status] == :success
persist_artifact(artifact, artifact_metadata, params)
end
private
attr_reader :job, :project
def validate_requirements(artifact_type:, filesize:)
return too_large_error if too_large?(artifact_type, filesize)
success
end
def too_large?(type, size)
size > max_size(type) if size
end
def lsif?(type)
type == LSIF_ARTIFACT_TYPE
end
def max_size(type)
Ci::JobArtifact.max_artifact_size(type: type, project: project)
end
def forbidden_type_error(type)
error("#{type} artifacts are forbidden", :forbidden)
end
def too_large_error
error('file size has reached maximum size limit', :payload_too_large)
end
def build_artifact(artifacts_file, params, metadata_file)
expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
artifact = Ci::JobArtifact.new(
job_id: job.id,
project: project,
file: artifacts_file,
file_type: params[:artifact_type],
file_format: params[:artifact_format],
file_sha256: artifacts_file.sha256,
expire_in: expire_in)
artifact_metadata = if metadata_file
Ci::JobArtifact.new(
job_id: job.id,
project: project,
file: metadata_file,
file_type: :metadata,
file_format: :gzip,
file_sha256: metadata_file.sha256,
expire_in: expire_in)
end
[artifact, artifact_metadata]
end
def parse_artifact(artifact)
unless Feature.enabled?(:ci_synchronous_artifact_parsing, project, default_enabled: true)
return success
end
case artifact.file_type
when 'dotenv' then parse_dotenv_artifact(artifact)
when 'cluster_applications' then parse_cluster_applications_artifact(artifact)
else success
end
end
def persist_artifact(artifact, artifact_metadata, params)
Ci::JobArtifact.transaction do
artifact.save!
artifact_metadata&.save!
# NOTE: The `artifacts_expire_at` column is already deprecated and to be removed in the near future.
job.update_column(:artifacts_expire_at, artifact.expire_at)
end
success
rescue ActiveRecord::RecordNotUnique => error
track_exception(error, params)
error('another artifact of the same type already exists', :bad_request)
rescue *OBJECT_STORAGE_ERRORS => error
track_exception(error, params)
error(error.message, :service_unavailable)
rescue => error
track_exception(error, params)
error(error.message, :bad_request)
end
def sha256_matches_existing_artifact?(artifact_type, artifacts_file)
existing_artifact = job.job_artifacts.find_by_file_type(artifact_type)
return false unless existing_artifact
existing_artifact.file_sha256 == artifacts_file.sha256
end
def track_exception(error, params)
Gitlab::ErrorTracking.track_exception(error,
job_id: job.id,
project_id: job.project_id,
uploading_type: params[:artifact_type]
)
end
def track_artifact_uploader(artifact)
return unless artifact.file_type == 'metrics'
track_usage_event(METRICS_REPORT_UPLOAD_EVENT_NAME, @job.user_id)
end
def parse_dotenv_artifact(artifact)
Ci::ParseDotenvArtifactService.new(project, current_user).execute(artifact)
end
def parse_cluster_applications_artifact(artifact)
Clusters::ParseClusterApplicationsArtifactService.new(job, job.user).execute(artifact)
end
end
end
end

View file

@ -0,0 +1,58 @@
# frozen_string_literal: true
module Ci
module JobArtifacts
class DestroyAllExpiredService
include ::Gitlab::ExclusiveLeaseHelpers
include ::Gitlab::LoopHelpers
BATCH_SIZE = 100
LOOP_TIMEOUT = 5.minutes
LOOP_LIMIT = 1000
EXCLUSIVE_LOCK_KEY = 'expired_job_artifacts:destroy:lock'
LOCK_TIMEOUT = 6.minutes
def initialize
@removed_artifacts_count = 0
end
##
# Destroy expired job artifacts on GitLab instance
#
# This destroy process cannot run for more than 6 minutes. This is for
# preventing multiple `ExpireBuildArtifactsWorker` CRON jobs run concurrently,
# which is scheduled every 7 minutes.
def execute
in_lock(EXCLUSIVE_LOCK_KEY, ttl: LOCK_TIMEOUT, retries: 1) do
destroy_job_artifacts_with_slow_iteration(Time.current)
end
@removed_artifacts_count
end
private
def destroy_job_artifacts_with_slow_iteration(start_at)
Ci::JobArtifact.expired_before(start_at).each_batch(of: BATCH_SIZE, column: :expire_at, order: :desc) do |relation, index|
# For performance reasons, join with ci_pipelines after the batch is queried.
# See: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/47496
artifacts = relation.unlocked
service_response = destroy_batch_async(artifacts)
@removed_artifacts_count += service_response[:destroyed_artifacts_count]
break if loop_timeout?(start_at)
break if index >= LOOP_LIMIT
end
end
def destroy_batch_async(artifacts)
Ci::JobArtifacts::DestroyBatchService.new(artifacts).execute
end
def loop_timeout?(start_at)
Time.current > start_at + LOOP_TIMEOUT
end
end
end
end

View file

@ -0,0 +1,74 @@
# frozen_string_literal: true
module Ci
module JobArtifacts
class DestroyBatchService
include BaseServiceUtility
include ::Gitlab::Utils::StrongMemoize
# Danger: Private - Should only be called in Ci Services that pass a batch of job artifacts
# Not for use outside of the Ci:: namespace
# Adds the passed batch of job artifacts to the `ci_deleted_objects` table
# for asyncronous destruction of the objects in Object Storage via the `Ci::DeleteObjectsService`
# and then deletes the batch of related `ci_job_artifacts` records.
# Params:
# +job_artifacts+:: A relation of job artifacts to destroy (fewer than MAX_JOB_ARTIFACT_BATCH_SIZE)
# +pick_up_at+:: When to pick up for deletion of files
# Returns:
# +Hash+:: A hash with status and destroyed_artifacts_count keys
def initialize(job_artifacts, pick_up_at: nil)
@job_artifacts = job_artifacts.with_destroy_preloads.to_a
@pick_up_at = pick_up_at
end
# rubocop: disable CodeReuse/ActiveRecord
def execute
return success(destroyed_artifacts_count: artifacts_count) if @job_artifacts.empty?
Ci::DeletedObject.transaction do
Ci::DeletedObject.bulk_import(@job_artifacts, @pick_up_at)
Ci::JobArtifact.id_in(@job_artifacts.map(&:id)).delete_all
destroy_related_records(@job_artifacts)
end
# This is executed outside of the transaction because it depends on Redis
update_project_statistics
increment_monitoring_statistics(artifacts_count)
success(destroyed_artifacts_count: artifacts_count)
end
# rubocop: enable CodeReuse/ActiveRecord
private
# This method is implemented in EE and it must do only database work
def destroy_related_records(artifacts); end
def update_project_statistics
artifacts_by_project = @job_artifacts.group_by(&:project)
artifacts_by_project.each do |project, artifacts|
delta = -artifacts.sum { |artifact| artifact.size.to_i }
ProjectStatistics.increment_statistic(
project, Ci::JobArtifact.project_statistics_name, delta)
end
end
def increment_monitoring_statistics(size)
metrics.increment_destroyed_artifacts(size)
end
def metrics
@metrics ||= ::Gitlab::Ci::Artifacts::Metrics.new
end
def artifacts_count
strong_memoize(:artifacts_count) do
@job_artifacts.count
end
end
end
end
end
Ci::JobArtifacts::DestroyBatchService.prepend_if_ee('EE::Ci::JobArtifacts::DestroyBatchService')

View file

@ -1,72 +0,0 @@
# frozen_string_literal: true
module Ci
class JobArtifactsDestroyBatchService
include BaseServiceUtility
include ::Gitlab::Utils::StrongMemoize
# Danger: Private - Should only be called in Ci Services that pass a batch of job artifacts
# Not for use outsie of the ci namespace
# Adds the passed batch of job artifacts to the `ci_deleted_objects` table
# for asyncronous destruction of the objects in Object Storage via the `Ci::DeleteObjectsService`
# and then deletes the batch of related `ci_job_artifacts` records.
# Params:
# +job_artifacts+:: A relation of job artifacts to destroy (fewer than MAX_JOB_ARTIFACT_BATCH_SIZE)
# +pick_up_at+:: When to pick up for deletion of files
# Returns:
# +Hash+:: A hash with status and destroyed_artifacts_count keys
def initialize(job_artifacts, pick_up_at: nil)
@job_artifacts = job_artifacts.with_destroy_preloads.to_a
@pick_up_at = pick_up_at
end
# rubocop: disable CodeReuse/ActiveRecord
def execute
return success(destroyed_artifacts_count: artifacts_count) if @job_artifacts.empty?
Ci::DeletedObject.transaction do
Ci::DeletedObject.bulk_import(@job_artifacts, @pick_up_at)
Ci::JobArtifact.id_in(@job_artifacts.map(&:id)).delete_all
destroy_related_records(@job_artifacts)
end
# This is executed outside of the transaction because it depends on Redis
update_project_statistics
increment_monitoring_statistics(artifacts_count)
success(destroyed_artifacts_count: artifacts_count)
end
# rubocop: enable CodeReuse/ActiveRecord
private
# This method is implemented in EE and it must do only database work
def destroy_related_records(artifacts); end
def update_project_statistics
artifacts_by_project = @job_artifacts.group_by(&:project)
artifacts_by_project.each do |project, artifacts|
delta = -artifacts.sum { |artifact| artifact.size.to_i }
ProjectStatistics.increment_statistic(
project, Ci::JobArtifact.project_statistics_name, delta)
end
end
def increment_monitoring_statistics(size)
metrics.increment_destroyed_artifacts(size)
end
def metrics
@metrics ||= ::Gitlab::Ci::Artifacts::Metrics.new
end
def artifacts_count
strong_memoize(:artifacts_count) do
@job_artifacts.count
end
end
end
end
Ci::JobArtifactsDestroyBatchService.prepend_if_ee('EE::Ci::JobArtifactsDestroyBatchService')

View file

@ -2,7 +2,7 @@
module Ci
module PipelineArtifacts
class DestroyExpiredArtifactsService
class DestroyAllExpiredService
include ::Gitlab::LoopHelpers
include ::Gitlab::Utils::StrongMemoize

View file

@ -14,12 +14,13 @@ module Issuable
end
def execute
if assignee_ids.blank?
updated_new_assignees = new_assignee_ids
updated_new_assignees = new_assignee_ids
if add_assignee_ids.blank? && remove_assignee_ids.blank?
updated_new_assignees = assignee_ids if assignee_ids
else
updated_new_assignees |= add_assignee_ids if add_assignee_ids
updated_new_assignees -= remove_assignee_ids if remove_assignee_ids
else
updated_new_assignees = assignee_ids
end
updated_new_assignees.uniq

View file

@ -29,32 +29,48 @@ class IssuableBaseService < BaseService
params.delete(:label_ids)
params.delete(:assignee_ids)
params.delete(:assignee_id)
params.delete(:add_assignee_ids)
params.delete(:remove_assignee_ids)
params.delete(:due_date)
params.delete(:canonical_issue_id)
params.delete(:project)
params.delete(:discussion_locked)
end
filter_assignee(issuable)
filter_assignees(issuable)
filter_milestone
filter_labels
end
def filter_assignee(issuable)
return if params[:assignee_ids].blank?
def filter_assignees(issuable)
filter_assignees_with_key(issuable, :assignee_ids, :assignees)
filter_assignees_with_key(issuable, :add_assignee_ids, :add_assignees)
filter_assignees_with_key(issuable, :remove_assignee_ids, :remove_assignees)
end
unless issuable.allows_multiple_assignees?
params[:assignee_ids] = params[:assignee_ids].first(1)
def filter_assignees_with_key(issuable, id_key, key)
if params[key] && params[id_key].blank?
params[id_key] = params[key].map(&:id)
end
assignee_ids = params[:assignee_ids].select { |assignee_id| user_can_read?(issuable, assignee_id) }
return if params[id_key].blank?
if params[:assignee_ids].map(&:to_s) == [IssuableFinder::Params::NONE]
params[:assignee_ids] = []
filter_assignees_using_checks(issuable, id_key)
end
def filter_assignees_using_checks(issuable, id_key)
unless issuable.allows_multiple_assignees?
params[id_key] = params[id_key].first(1)
end
assignee_ids = params[id_key].select { |assignee_id| user_can_read?(issuable, assignee_id) }
if params[id_key].map(&:to_s) == [IssuableFinder::Params::NONE]
params[id_key] = []
elsif assignee_ids.any?
params[:assignee_ids] = assignee_ids
params[id_key] = assignee_ids
else
params.delete(:assignee_ids)
params.delete(id_key)
end
end
@ -116,6 +132,15 @@ class IssuableBaseService < BaseService
new_label_ids.uniq
end
def process_assignee_ids(attributes, existing_assignee_ids: nil, extra_assignee_ids: [])
process = Issuable::ProcessAssignees.new(assignee_ids: attributes.delete(:assignee_ids),
add_assignee_ids: attributes.delete(:add_assignee_ids),
remove_assignee_ids: attributes.delete(:remove_assignee_ids),
existing_assignee_ids: existing_assignee_ids,
extra_assignee_ids: extra_assignee_ids)
process.execute
end
def handle_quick_actions(issuable)
merge_quick_actions_into_params!(issuable)
end
@ -145,6 +170,10 @@ class IssuableBaseService < BaseService
params[:author] ||= current_user
params[:label_ids] = process_label_ids(params, extra_label_ids: issuable.label_ids.to_a)
if issuable.respond_to?(:assignee_ids)
params[:assignee_ids] = process_assignee_ids(params, extra_assignee_ids: issuable.assignee_ids.to_a)
end
issuable.assign_attributes(params)
before_create(issuable)
@ -191,6 +220,7 @@ class IssuableBaseService < BaseService
old_associations = associations_before_update(issuable)
assign_requested_labels(issuable)
assign_requested_assignees(issuable)
if issuable.changed? || params.present?
issuable.assign_attributes(params)
@ -354,6 +384,16 @@ class IssuableBaseService < BaseService
issuable.touch
end
def assign_requested_assignees(issuable)
return if issuable.is_a?(Epic)
assignee_ids = process_assignee_ids(params, existing_assignee_ids: issuable.assignee_ids)
if ids_changing?(issuable.assignee_ids, assignee_ids)
params[:assignee_ids] = assignee_ids
issuable.touch
end
end
# Arrays of ids are used, but we should really use sets of ids, so
# let's have an helper to properly check if some ids are changing
def ids_changing?(old_array, new_array)

View file

@ -16,17 +16,7 @@ module MergeRequests
merge_request.source_project = find_source_project
merge_request.target_project = find_target_project
# Force remove the source branch?
merge_request.merge_params['force_remove_source_branch'] = force_remove_source_branch
# Only assign merge requests params that are allowed
self.params = assign_allowed_merge_params(merge_request, params)
# Filter out params that are either not allowed or invalid
filter_params(merge_request)
# Filter out :add_label_ids and :remove_label_ids params
filter_label_id_params
process_params
merge_request.compare_commits = []
set_merge_request_target_branch
@ -70,21 +60,41 @@ module MergeRequests
end
end
def filter_label_id_params
def filter_id_params
# merge_request.assign_attributes(...) below is a Rails
# method that only work if all the params it is passed have
# corresponding fields in the database. As there are no fields
# in the database for :add_label_ids and :remove_label_ids, we
# in the database for :add_label_ids, :remove_label_ids,
# :add_assignee_ids and :remove_assignee_ids, we
# need to remove them from the params before the call to
# merge_request.assign_attributes(...)
#
# IssuableBaseService#process_label_ids takes care
# IssuableBaseService#process_label_ids and
# IssuableBaseService#process_assignee_ids take care
# of the removal.
params[:label_ids] = process_label_ids(params, extra_label_ids: merge_request.label_ids.to_a)
params[:assignee_ids] = process_assignee_ids(params, extra_assignee_ids: merge_request.assignee_ids.to_a)
merge_request.assign_attributes(params.to_h.compact)
end
def process_params
# Force remove the source branch?
merge_request.merge_params['force_remove_source_branch'] = force_remove_source_branch
# Only assign merge requests params that are allowed
self.params = assign_allowed_merge_params(merge_request, params)
# Filter out params that are either not allowed or invalid
filter_params(merge_request)
# Filter out the following from params:
# - :add_label_ids and :remove_label_ids
# - :add_assignee_ids and :remove_assignee_ids
filter_id_params
end
def find_source_project
source_project = project_from_params(:source_project)
return source_project if source_project.present? && can?(current_user, :create_merge_request_from, source_project)

View file

@ -129,7 +129,9 @@ module MergeRequests
target_branch: push_options[:target],
force_remove_source_branch: push_options[:remove_source_branch],
label: push_options[:label],
unlabel: push_options[:unlabel]
unlabel: push_options[:unlabel],
assign: push_options[:assign],
unassign: push_options[:unassign]
}
params.compact!
@ -137,6 +139,9 @@ module MergeRequests
params[:add_labels] = params.delete(:label).keys if params.has_key?(:label)
params[:remove_labels] = params.delete(:unlabel).keys if params.has_key?(:unlabel)
params[:add_assignee_ids] = params.delete(:assign).keys if params.has_key?(:assign)
params[:remove_assignee_ids] = params.delete(:unassign).keys if params.has_key?(:unassign)
params
end

View file

@ -13,11 +13,14 @@ class JsonSchemaValidator < ActiveModel::EachValidator
FILENAME_ALLOWED = /\A[a-z0-9_-]*\Z/.freeze
FilenameError = Class.new(StandardError)
JSON_VALIDATOR_MAX_DRAFT_VERSION = 4
BASE_DIRECTORY = %w(app validators json_schemas).freeze
def initialize(options)
raise ArgumentError, "Expected 'filename' as an argument" unless options[:filename]
raise FilenameError, "Must be a valid 'filename'" unless options[:filename].match?(FILENAME_ALLOWED)
@base_directory = options.delete(:base_directory) || BASE_DIRECTORY
super(options)
end
@ -29,6 +32,8 @@ class JsonSchemaValidator < ActiveModel::EachValidator
private
attr_reader :base_directory
def valid_schema?(value)
if draft_version > JSON_VALIDATOR_MAX_DRAFT_VERSION
JSONSchemer.schema(Pathname.new(schema_path)).valid?(value)
@ -38,10 +43,16 @@ class JsonSchemaValidator < ActiveModel::EachValidator
end
def schema_path
Rails.root.join('app', 'validators', 'json_schemas', "#{options[:filename]}.json").to_s
@schema_path ||= Rails.root.join(*base_directory, filename_with_extension).to_s
end
def filename_with_extension
"#{options[:filename]}.json"
end
def draft_version
options[:draft] || JSON_VALIDATOR_MAX_DRAFT_VERSION
end
end
JsonSchemaValidator.prepend_ee_mod

View file

@ -1,28 +0,0 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"title": "Security::Scan#info schema",
"description": "The schema validates the content of the Security::Scan#info attribute",
"additionalProperties": false,
"properties": {
"errors": {
"type": "array",
"items": {
"type": "object",
"additionalProperties": false,
"properties": {
"type": {
"type": "string"
},
"message": {
"type": "string"
}
},
"required": [
"type",
"message"
]
}
}
}
}

View file

@ -1,294 +0,0 @@
{
"type": "object",
"description": "The schema for vulnerability finding details",
"additionalProperties": false,
"patternProperties": {
"^.*$": {
"allOf": [
{ "$ref": "#/definitions/named_field" },
{ "$ref": "#/definitions/detail_type" }
]
}
},
"definitions": {
"detail_type": {
"oneOf": [
{ "$ref": "#/definitions/named_list" },
{ "$ref": "#/definitions/list" },
{ "$ref": "#/definitions/table" },
{ "$ref": "#/definitions/text" },
{ "$ref": "#/definitions/url" },
{ "$ref": "#/definitions/code" },
{ "$ref": "#/definitions/value" },
{ "$ref": "#/definitions/diff" },
{ "$ref": "#/definitions/markdown" },
{ "$ref": "#/definitions/commit" },
{ "$ref": "#/definitions/file_location" },
{ "$ref": "#/definitions/module_location" }
]
},
"text_value": {
"type": "string"
},
"named_field": {
"type": "object",
"required": [
"name"
],
"properties": {
"name": {
"$ref": "#/definitions/text_value",
"minLength": 1
},
"description": {
"$ref": "#/definitions/text_value"
}
}
},
"named_list": {
"type": "object",
"description": "An object with named and typed fields",
"required": [
"type",
"items"
],
"properties": {
"type": {
"const": "named-list"
},
"items": {
"type": "object",
"patternProperties": {
"^.*$": {
"allOf": [
{
"$ref": "#/definitions/named_field"
},
{
"$ref": "#/definitions/detail_type"
}
]
}
}
}
}
},
"list": {
"type": "object",
"description": "A list of typed fields",
"required": [
"type",
"items"
],
"properties": {
"type": {
"const": "list"
},
"items": {
"type": "array",
"items": {
"$ref": "#/definitions/detail_type"
}
}
}
},
"table": {
"type": "object",
"description": "A table of typed fields",
"required": [
"type",
"rows"
],
"properties": {
"type": {
"const": "table"
},
"header": {
"type": "array",
"items": {
"$ref": "#/definitions/detail_type"
}
},
"rows": {
"type": "array",
"items": {
"type": "array",
"items": {
"$ref": "#/definitions/detail_type"
}
}
}
}
},
"text": {
"type": "object",
"description": "Raw text",
"required": [
"type",
"value"
],
"properties": {
"type": {
"const": "text"
},
"value": {
"$ref": "#/definitions/text_value"
}
}
},
"url": {
"type": "object",
"description": "A single URL",
"required": [
"type",
"href"
],
"properties": {
"type": {
"const": "url"
},
"text": {
"$ref": "#/definitions/text_value"
},
"href": {
"type": "string",
"minLength": 1,
"examples": ["http://mysite.com"]
}
}
},
"code": {
"type": "object",
"description": "A codeblock",
"required": [
"type",
"value"
],
"properties": {
"type": {
"const": "code"
},
"value": {
"type": "string"
},
"lang": {
"type": "string",
"description": "A programming language"
}
}
},
"value": {
"type": "object",
"description": "A field that can store a range of types of value",
"required": ["type", "value"],
"properties": {
"type": { "const": "value" },
"value": {
"type": ["number", "string", "boolean"]
}
}
},
"diff": {
"type": "object",
"description": "A diff",
"required": [
"type",
"before",
"after"
],
"properties": {
"type": {
"const": "diff"
},
"before": {
"type": "string"
},
"after": {
"type": "string"
}
}
},
"markdown": {
"type": "object",
"description": "GitLab flavoured markdown, see https://docs.gitlab.com/ee/user/markdown.html",
"required": [
"type",
"value"
],
"properties": {
"type": {
"const": "markdown"
},
"value": {
"$ref": "#/definitions/text_value",
"examples": ["Here is markdown `inline code` #1 [test](gitlab.com)\n\n![GitLab Logo](https://about.gitlab.com/images/press/logo/preview/gitlab-logo-white-preview.png)"]
}
}
},
"commit": {
"type": "object",
"description": "A commit/tag/branch within the GitLab project",
"required": [
"type",
"value"
],
"properties": {
"type": {
"const": "commit"
},
"value": {
"type": "string",
"description": "The commit SHA",
"minLength": 1
}
}
},
"file_location": {
"type": "object",
"description": "A location within a file in the project",
"required": [
"type",
"file_name",
"line_start"
],
"properties": {
"type": {
"const": "file-location"
},
"file_name": {
"type": "string",
"minLength": 1
},
"line_start": {
"type": "integer"
},
"line_end": {
"type": "integer"
}
}
},
"module_location": {
"type": "object",
"description": "A location within a binary module of the form module+relative_offset",
"required": [
"type",
"module_name",
"offset"
],
"properties": {
"type": {
"const": "module-location"
},
"module_name": {
"type": "string",
"minLength": 1,
"examples": ["compiled_binary"]
},
"offset": {
"type": "integer",
"examples": [100]
}
}
}
}
}

View file

@ -14,7 +14,7 @@ module Ci
feature_category :continuous_integration
def perform
service = ::Ci::PipelineArtifacts::DestroyExpiredArtifactsService.new
service = ::Ci::PipelineArtifacts::DestroyAllExpiredService.new
artifacts_count = service.execute
log_extra_metadata_on_done(:destroyed_pipeline_artifacts_count, artifacts_count)
end

View file

@ -10,7 +10,7 @@ class ExpireBuildArtifactsWorker # rubocop:disable Scalability/IdempotentWorker
feature_category :continuous_integration
def perform
service = Ci::DestroyExpiredJobArtifactsService.new
service = Ci::JobArtifacts::DestroyAllExpiredService.new
artifacts_count = service.execute
log_extra_metadata_on_done(:destroyed_job_artifacts_count, artifacts_count)
end

View file

@ -0,0 +1,5 @@
---
title: Update import statuses texts and icons
merge_request: 54957
author:
type: changed

View file

@ -0,0 +1,5 @@
---
title: Update Jira issues list to use new UI components
merge_request: 56465
author:
type: changed

View file

@ -0,0 +1,5 @@
---
title: Fix word wrapping in parallel diffs
merge_request: 56713
author:
type: fixed

View file

@ -0,0 +1,5 @@
---
title: Support adding and removing assignees w/ push opts
merge_request: 25904
author:
type: added

View file

@ -1,8 +0,0 @@
---
name: jira_issues_list
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/45678
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/273726
milestone: '13.6'
type: development
group: group::ecosystem
default_enabled: false

View file

@ -3595,6 +3595,16 @@ An edge in a connection.
| `cursor` | [`String!`](#string) | A cursor for use in pagination. |
| `node` | [`IterationCadence`](#iterationcadence) | The item at the end of the edge. |
### `IterationCadenceUpdatePayload`
Autogenerated return type of IterationCadenceUpdate.
| Field | Type | Description |
| ----- | ---- | ----------- |
| `clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| `errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
| `iterationCadence` | [`IterationCadence`](#iterationcadence) | The updated iteration cadence. |
### `IterationConnection`
The connection type for Iteration.

View file

@ -2325,8 +2325,8 @@ The valid values of `when` are:
1. `delayed` - [Delay the execution of a job](#whendelayed) for a specified duration.
Added in GitLab 11.14.
1. `never`:
- With [`rules`](#rules), don't execute job.
- With [`workflow`](#workflow), don't run pipeline.
- With job [`rules`](#rules), don't execute job.
- With [`workflow:rules`](#workflow), don't run pipeline.
In the following example, the script:

View file

@ -808,8 +808,8 @@ Alternatively, you can use the CI/CD variable `SECURE_ANALYZERS_PREFIX` to overr
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/218465) in GitLab 13.2.
> - [Improved](https://gitlab.com/gitlab-org/gitlab/-/issues/218465) in GitLab 13.3.
> - The saved scans feature was [added](https://gitlab.com/groups/gitlab-org/-/epics/5100) in
> GitLab 13.9.
> - The saved scans feature was [introduced](https://gitlab.com/groups/gitlab-org/-/epics/5100) in GitLab 13.9.
> - The option to select a branch was [introduced](https://gitlab.com/groups/gitlab-org/-/epics/4847) in GitLab 13.10.
An on-demand DAST scan runs outside the DevOps life cycle. Changes in your repository don't trigger
the scan. You must start it manually.
@ -821,6 +821,11 @@ An on-demand DAST scan:
- Is associated with your project's default branch.
- Is saved on creation so it can be run later.
In GitLab 13.10 and later, you can select to run an on-demand scan against a specific branch. This
feature is [deployed behind a feature flag](../../feature_flags.md), enabled by default. It's
enabled on GitLab.com and recommended for production use. [GitLab administrators with access to the GitLab Rails console](../../../administration/feature_flags.md)
can opt to disable it with `Feature.disable(:dast_branch_selection)`.
### On-demand scan modes
An on-demand scan can be run in active or passive mode:
@ -853,6 +858,7 @@ To run an on-demand scan, either:
1. From your project's home page, go to **Security & Compliance > On-demand Scans** in the left
sidebar.
1. Complete the **Scan name** and **Description** fields.
1. In GitLab 13.10 and later, select the desired branch from the **Branch** dropdown.
1. In **Scanner profile**, select a scanner profile from the dropdown.
1. In **Site profile**, select a site profile from the dropdown.
1. To run the on-demand scan now, select **Save and run scan**. Otherwise select **Save scan** to
@ -887,6 +893,9 @@ To run a saved on-demand scan:
1. Select the **Saved Scans** tab.
1. In the scan's row select **Run scan**.
If the branch saved in the scan no longer exists, you must first
[edit the scan](#edit-an-on-demand-scan), select a new branch, and save the edited scan.
The on-demand DAST scan runs and the project's dashboard shows the results.
### Edit an on-demand scan

View file

@ -36,6 +36,7 @@ From the Vulnerability Report you can:
- [Filter the list of vulnerabilities](#filter-the-list-of-vulnerabilities).
- [View more details about a vulnerability](#view-details-of-a-vulnerability).
- [View vulnerable source location](#view-vulnerable-source-location) (if available).
- [View an issue raised for a vulnerability](#view-issues-raised-for-a-vulnerability).
- [Change the status of vulnerabilities](#change-status-of-vulnerabilities).
- [Export details of vulnerabilities](#export-vulnerability-details).
@ -99,6 +100,16 @@ Selection behavior when using the Activity filter:
To view more details of a vulnerability, select the vulnerability's **Description**. The
[vulnerability's details](../vulnerabilities) page is opened.
## View vulnerable source location
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/267509) in GitLab 13.10.
Some security scanners output the filename and line number of a potential vulnerability. When
that information is available, the vulnerability's details include a link to the relevant file,
in the default branch.
To view the relevant file, select the filename in the vulnerability's details.
## View issues raised for a vulnerability
The **Activity** column indicates the number of issues that have been created for the vulnerability.

View file

@ -68,6 +68,8 @@ time as pushing changes:
| `merge_request.description="<description>"` | Set the description of the merge request. Ex: `git push -o merge_request.description="The description I want"`. | [12.2](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/64320) |
| `merge_request.label="<label>"` | Add labels to the merge request. If the label does not exist, it is created. For example, for two labels: `git push -o merge_request.label="label1" -o merge_request.label="label2"`. | [12.3](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/31831) |
| `merge_request.unlabel="<label>"` | Remove labels from the merge request. For example, for two labels: `git push -o merge_request.unlabel="label1" -o merge_request.unlabel="label2"`. | [12.3](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/31831) |
| `merge_request.assign="<user>"` | Assign users to the merge request. For example, for two users: `git push -o merge_request.assign="user1" -o merge_request.assign="user2"`. | [12.9](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/XXXXX) |
| `merge_request.unassign="<user>"` | Remove assigned users from the merge request. For example, for two users: `git push -o merge_request.unassign="user1" -o merge_request.unassign="user2"`. | [12.9](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/XXXXX) |
If you use a push option that requires text with spaces in it, you need to enclose it
in quotes (`"`). You can omit the quotes if there are no spaces. Some examples:

View file

@ -245,7 +245,7 @@ module API
job = authenticate_job!
result = ::Ci::CreateJobArtifactsService.new(job).authorize(artifact_type: params[:artifact_type], filesize: params[:filesize])
result = ::Ci::JobArtifacts::CreateService.new(job).authorize(artifact_type: params[:artifact_type], filesize: params[:filesize])
if result[:status] == :success
content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
@ -284,7 +284,7 @@ module API
artifacts = params[:file]
metadata = params[:metadata]
result = ::Ci::CreateJobArtifactsService.new(job).execute(artifacts, params, metadata_file: metadata)
result = ::Ci::JobArtifacts::CreateService.new(job).execute(artifacts, params, metadata_file: metadata)
if result[:status] == :success
status :created

View file

@ -5,6 +5,7 @@ module Gitlab
VALID_OPTIONS = HashWithIndifferentAccess.new({
merge_request: {
keys: [
:assign,
:create,
:description,
:label,
@ -12,6 +13,7 @@ module Gitlab
:remove_source_branch,
:target,
:title,
:unassign,
:unlabel
]
},
@ -23,7 +25,9 @@ module Gitlab
MULTI_VALUE_OPTIONS = [
%w[ci variable],
%w[merge_request label],
%w[merge_request unlabel]
%w[merge_request unlabel],
%w[merge_request assign],
%w[merge_request unassign]
].freeze
NAMESPACE_ALIASES = HashWithIndifferentAccess.new({

View file

@ -5519,6 +5519,9 @@ msgstr ""
msgid "Canceled deployment to"
msgstr ""
msgid "Cancelled"
msgstr ""
msgid "Cancelling Preview"
msgstr ""
@ -8844,9 +8847,6 @@ msgstr ""
msgid "Create new file or directory"
msgstr ""
msgid "Create new issue in Jira"
msgstr ""
msgid "Create new label"
msgstr ""
@ -15739,6 +15739,9 @@ msgid_plural "Importing %d repositories"
msgstr[0] ""
msgstr[1] ""
msgid "Importing..."
msgstr ""
msgid "Improve customer support with Service Desk"
msgstr ""
@ -26322,9 +26325,6 @@ msgstr ""
msgid "Running"
msgstr ""
msgid "Running…"
msgstr ""
msgid "Runs a number of housekeeping tasks within the current repository, such as compressing file revisions and removing unreachable objects."
msgstr ""
@ -26466,9 +26466,6 @@ msgstr ""
msgid "Schedules to merge this merge request (%{strategy})."
msgstr ""
msgid "Scheduling"
msgstr ""
msgid "Scheduling Pipelines"
msgstr ""

View file

@ -37,7 +37,7 @@ RSpec.describe 'Import multiple repositories by uploading a manifest file', :js
wait_for_requests
page.within(second_row) do
expect(page).to have_content 'Done'
expect(page).to have_content 'Complete'
expect(page).to have_content("#{group.full_path}/build/blueprint")
end
end

0
spec/fixtures/ce_sample_schema.json vendored Normal file
View file

View file

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Ci::CreateJobArtifactsService do
RSpec.describe Ci::JobArtifacts::CreateService do
let_it_be(:project) { create(:project) }
let(:service) { described_class.new(job) }
let(:job) { create(:ci_build, project: project) }

View file

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared_state do
RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_shared_state do
include ExclusiveLeaseHelpers
let(:service) { described_class.new }
@ -24,7 +24,7 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
job = create(:ci_build, pipeline: artifact.job.pipeline)
create(:ci_job_artifact, :archive, :expired, job: job)
stub_const('Ci::DestroyExpiredJobArtifactsService::LOOP_LIMIT', 1)
stub_const("#{described_class}::LOOP_LIMIT", 1)
end
it 'performs the smallest number of queries for job_artifacts' do
@ -113,7 +113,7 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
context 'when failed to destroy artifact' do
before do
stub_const('Ci::DestroyExpiredJobArtifactsService::LOOP_LIMIT', 10)
stub_const("#{described_class}::LOOP_LIMIT", 10)
end
context 'when the import fails' do
@ -159,8 +159,8 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
let!(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
before do
stub_const('Ci::DestroyExpiredJobArtifactsService::LOOP_TIMEOUT', 0.seconds)
stub_const('Ci::DestroyExpiredJobArtifactsService::BATCH_SIZE', 1)
stub_const("#{described_class}::LOOP_TIMEOUT", 0.seconds)
stub_const("#{described_class}::BATCH_SIZE", 1)
second_artifact.job.pipeline.unlocked!
end
@ -176,8 +176,8 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
context 'when loop reached loop limit' do
before do
stub_const('Ci::DestroyExpiredJobArtifactsService::LOOP_LIMIT', 1)
stub_const('Ci::DestroyExpiredJobArtifactsService::BATCH_SIZE', 1)
stub_const("#{described_class}::LOOP_LIMIT", 1)
stub_const("#{described_class}::BATCH_SIZE", 1)
second_artifact.job.pipeline.unlocked!
end
@ -209,7 +209,7 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
context 'when there are artifacts more than batch sizes' do
before do
stub_const('Ci::DestroyExpiredJobArtifactsService::BATCH_SIZE', 1)
stub_const("#{described_class}::BATCH_SIZE", 1)
second_artifact.job.pipeline.unlocked!
end

View file

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Ci::JobArtifactsDestroyBatchService do
RSpec.describe Ci::JobArtifacts::DestroyBatchService do
include ExclusiveLeaseHelpers
let(:artifacts) { Ci::JobArtifact.all }

View file

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Ci::PipelineArtifacts::DestroyExpiredArtifactsService do
RSpec.describe Ci::PipelineArtifacts::DestroyAllExpiredService do
let(:service) { described_class.new }
describe '.execute' do
@ -10,7 +10,7 @@ RSpec.describe Ci::PipelineArtifacts::DestroyExpiredArtifactsService do
context 'when timeout happens' do
before do
stub_const('Ci::PipelineArtifacts::DestroyExpiredArtifactsService::LOOP_TIMEOUT', 0.1.seconds)
stub_const('Ci::PipelineArtifacts::DestroyAllExpiredService::LOOP_TIMEOUT', 0.1.seconds)
allow(service).to receive(:destroy_artifacts_batch) { true }
end
@ -27,8 +27,8 @@ RSpec.describe Ci::PipelineArtifacts::DestroyExpiredArtifactsService do
context 'when the loop limit is reached' do
before do
stub_const('::Ci::PipelineArtifacts::DestroyExpiredArtifactsService::LOOP_LIMIT', 1)
stub_const('::Ci::PipelineArtifacts::DestroyExpiredArtifactsService::BATCH_SIZE', 1)
stub_const('::Ci::PipelineArtifacts::DestroyAllExpiredService::LOOP_LIMIT', 1)
stub_const('::Ci::PipelineArtifacts::DestroyAllExpiredService::BATCH_SIZE', 1)
create_list(:ci_pipeline_artifact, 2, expire_at: 1.week.ago)
end
@ -44,7 +44,7 @@ RSpec.describe Ci::PipelineArtifacts::DestroyExpiredArtifactsService do
context 'when there are artifacts more than batch sizes' do
before do
stub_const('Ci::PipelineArtifacts::DestroyExpiredArtifactsService::BATCH_SIZE', 1)
stub_const('Ci::PipelineArtifacts::DestroyAllExpiredService::BATCH_SIZE', 1)
create_list(:ci_pipeline_artifact, 2, expire_at: 1.week.ago)
end

View file

@ -4,10 +4,10 @@ require 'spec_helper'
RSpec.describe Issuable::ProcessAssignees do
describe '#execute' do
it 'returns assignee_ids when assignee_ids are specified' do
it 'returns assignee_ids when add_assignee_ids and remove_assignee_ids are not specified' do
process = Issuable::ProcessAssignees.new(assignee_ids: %w(5 7 9),
add_assignee_ids: %w(2 4 6),
remove_assignee_ids: %w(4 7 11),
add_assignee_ids: nil,
remove_assignee_ids: nil,
existing_assignee_ids: %w(1 3 9),
extra_assignee_ids: %w(2 5 12))
result = process.execute
@ -15,19 +15,19 @@ RSpec.describe Issuable::ProcessAssignees do
expect(result.sort).to eq(%w(5 7 9).sort)
end
it 'combines other ids when assignee_ids is empty' do
process = Issuable::ProcessAssignees.new(assignee_ids: [],
add_assignee_ids: %w(2 4 6),
remove_assignee_ids: %w(4 7 11),
existing_assignee_ids: %w(1 3 11),
extra_assignee_ids: %w(2 5 12))
result = process.execute
expect(result.sort).to eq(%w(1 2 3 5 6 12).sort)
end
it 'combines other ids when assignee_ids is nil' do
process = Issuable::ProcessAssignees.new(assignee_ids: nil,
add_assignee_ids: nil,
remove_assignee_ids: nil,
existing_assignee_ids: %w(1 3 11),
extra_assignee_ids: %w(2 5 12))
result = process.execute
expect(result.sort).to eq(%w(1 2 3 5 11 12).sort)
end
it 'combines other ids when both add_assignee_ids and remove_assignee_ids are not empty' do
process = Issuable::ProcessAssignees.new(assignee_ids: %w(5 7 9),
add_assignee_ids: %w(2 4 6),
remove_assignee_ids: %w(4 7 11),
existing_assignee_ids: %w(1 3 11),
@ -37,8 +37,8 @@ RSpec.describe Issuable::ProcessAssignees do
expect(result.sort).to eq(%w(1 2 3 5 6 12).sort)
end
it 'combines other ids when assignee_ids and add_assignee_ids are nil' do
process = Issuable::ProcessAssignees.new(assignee_ids: nil,
it 'combines other ids when remove_assignee_ids is not empty' do
process = Issuable::ProcessAssignees.new(assignee_ids: %w(5 7 9),
add_assignee_ids: nil,
remove_assignee_ids: %w(4 7 11),
existing_assignee_ids: %w(1 3 11),
@ -48,8 +48,8 @@ RSpec.describe Issuable::ProcessAssignees do
expect(result.sort).to eq(%w(1 2 3 5 12).sort)
end
it 'combines other ids when assignee_ids and remove_assignee_ids are nil' do
process = Issuable::ProcessAssignees.new(assignee_ids: nil,
it 'combines other ids when add_assignee_ids is not empty' do
process = Issuable::ProcessAssignees.new(assignee_ids: %w(5 7 9),
add_assignee_ids: %w(2 4 6),
remove_assignee_ids: nil,
existing_assignee_ids: %w(1 3 11),
@ -59,8 +59,8 @@ RSpec.describe Issuable::ProcessAssignees do
expect(result.sort).to eq(%w(1 2 4 3 5 6 11 12).sort)
end
it 'combines ids when only add_assignee_ids and remove_assignee_ids are passed' do
process = Issuable::ProcessAssignees.new(assignee_ids: nil,
it 'combines ids when existing_assignee_ids and extra_assignee_ids are omitted' do
process = Issuable::ProcessAssignees.new(assignee_ids: %w(5 7 9),
add_assignee_ids: %w(2 4 6),
remove_assignee_ids: %w(4 7 11))
result = process.execute

View file

@ -6,10 +6,12 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
include ProjectForksHelper
let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:user) { create(:user, developer_projects: [project]) }
let_it_be(:forked_project) { fork_project(project, user, repository: true) }
let_it_be(:user1) { create(:user, developer_projects: [project]) }
let_it_be(:user2) { create(:user, developer_projects: [project]) }
let_it_be(:user3) { create(:user, developer_projects: [project]) }
let_it_be(:forked_project) { fork_project(project, user1, repository: true) }
let(:service) { described_class.new(project, user, changes, push_options) }
let(:service) { described_class.new(project, user1, changes, push_options) }
let(:source_branch) { 'fix' }
let(:target_branch) { 'feature' }
let(:title) { 'my title' }
@ -23,32 +25,8 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
let(:default_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{project.default_branch}" }
let(:error_mr_required) { "A merge_request.create push option is required to create a merge request for branch #{source_branch}" }
shared_examples_for 'a service that can create a merge request' do
subject(:last_mr) { MergeRequest.last }
it 'creates a merge request with the correct target branch and assigned user' do
branch = push_options[:target] || project.default_branch
expect { service.execute }.to change { MergeRequest.count }.by(1)
expect(last_mr.target_branch).to eq(branch)
expect(last_mr.assignees).to contain_exactly(user)
end
context 'when project has been forked', :sidekiq_might_not_need_inline do
let(:forked_project) { fork_project(project, user, repository: true) }
let(:service) { described_class.new(forked_project, user, changes, push_options) }
before do
allow(forked_project).to receive(:empty_repo?).and_return(false)
end
it 'sets the correct source and target project' do
service.execute
expect(last_mr.source_project).to eq(forked_project)
expect(last_mr.target_project).to eq(project)
end
end
before do
stub_licensed_features(multiple_merge_request_assignees: false)
end
shared_examples_for 'a service that can set the target of a merge request' do
@ -91,7 +69,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
expect(last_mr.auto_merge_enabled).to eq(true)
expect(last_mr.auto_merge_strategy).to eq(AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS)
expect(last_mr.merge_user).to eq(user)
expect(last_mr.merge_user).to eq(user1)
expect(last_mr.merge_params['sha']).to eq(change[:newrev])
end
end
@ -116,12 +94,6 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
end
end
shared_examples_for 'a service that does not create a merge request' do
it do
expect { service.execute }.not_to change { MergeRequest.count }
end
end
shared_examples_for 'a service that does not update a merge request' do
it do
expect { service.execute }.not_to change { MergeRequest.maximum(:updated_at) }
@ -133,6 +105,18 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
include_examples 'a service that does not update a merge request'
end
shared_examples 'with a deleted branch' do
let(:changes) { deleted_branch_changes }
it_behaves_like 'a service that does nothing'
end
shared_examples 'with the project default branch' do
let(:changes) { default_branch_changes }
it_behaves_like 'a service that does nothing'
end
describe '`create` push option' do
let(:push_options) { { create: true } }
@ -155,17 +139,8 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that does not create a merge request'
end
context 'with a deleted branch' do
let(:changes) { deleted_branch_changes }
it_behaves_like 'a service that does nothing'
end
context 'with the project default branch' do
let(:changes) { default_branch_changes }
it_behaves_like 'a service that does nothing'
end
it_behaves_like 'with a deleted branch'
it_behaves_like 'with the project default branch'
end
describe '`merge_when_pipeline_succeeds` push option' do
@ -217,17 +192,8 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that can set the merge request to merge when pipeline succeeds'
end
context 'with a deleted branch' do
let(:changes) { deleted_branch_changes }
it_behaves_like 'a service that does nothing'
end
context 'with the project default branch' do
let(:changes) { default_branch_changes }
it_behaves_like 'a service that does nothing'
end
it_behaves_like 'with a deleted branch'
it_behaves_like 'with the project default branch'
end
describe '`remove_source_branch` push option' do
@ -239,11 +205,9 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that does not create a merge request'
it 'adds an error to the service' do
error = "A merge_request.create push option is required to create a merge request for branch #{source_branch}"
service.execute
expect(service.errors).to include(error)
expect(service.errors).to include(error_mr_required)
end
context 'when coupled with the `create` push option' do
@ -281,17 +245,8 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that can remove the source branch when it is merged'
end
context 'with a deleted branch' do
let(:changes) { deleted_branch_changes }
it_behaves_like 'a service that does nothing'
end
context 'with the project default branch' do
let(:changes) { default_branch_changes }
it_behaves_like 'a service that does nothing'
end
it_behaves_like 'with a deleted branch'
it_behaves_like 'with the project default branch'
end
describe '`target` push option' do
@ -343,17 +298,8 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that can set the target of a merge request'
end
context 'with a deleted branch' do
let(:changes) { deleted_branch_changes }
it_behaves_like 'a service that does nothing'
end
context 'with the project default branch' do
let(:changes) { default_branch_changes }
it_behaves_like 'a service that does nothing'
end
it_behaves_like 'with a deleted branch'
it_behaves_like 'with the project default branch'
end
describe '`title` push option' do
@ -405,17 +351,8 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that can set the title of a merge request'
end
context 'with a deleted branch' do
let(:changes) { deleted_branch_changes }
it_behaves_like 'a service that does nothing'
end
context 'with the project default branch' do
let(:changes) { default_branch_changes }
it_behaves_like 'a service that does nothing'
end
it_behaves_like 'with a deleted branch'
it_behaves_like 'with the project default branch'
end
describe '`description` push option' do
@ -467,17 +404,8 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that can set the description of a merge request'
end
context 'with a deleted branch' do
let(:changes) { deleted_branch_changes }
it_behaves_like 'a service that does nothing'
end
context 'with the project default branch' do
let(:changes) { default_branch_changes }
it_behaves_like 'a service that does nothing'
end
it_behaves_like 'with a deleted branch'
it_behaves_like 'with the project default branch'
end
describe '`label` push option' do
@ -529,17 +457,8 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that can change labels of a merge request', 2
end
context 'with a deleted branch' do
let(:changes) { deleted_branch_changes }
it_behaves_like 'a service that does nothing'
end
context 'with the project default branch' do
let(:changes) { default_branch_changes }
it_behaves_like 'a service that does nothing'
end
it_behaves_like 'with a deleted branch'
it_behaves_like 'with the project default branch'
end
describe '`unlabel` push option' do
@ -551,11 +470,9 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that does not create a merge request'
it 'adds an error to the service' do
error = "A merge_request.create push option is required to create a merge request for branch #{source_branch}"
service.execute
expect(service.errors).to include(error)
expect(service.errors).to include(error_mr_required)
end
context 'when coupled with the `create` push option' do
@ -572,11 +489,9 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that does not create a merge request'
it 'adds an error to the service' do
error = "A merge_request.create push option is required to create a merge request for branch #{source_branch}"
service.execute
expect(service.errors).to include(error)
expect(service.errors).to include(error_mr_required)
end
context 'when coupled with the `create` push option' do
@ -595,17 +510,42 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that can change labels of a merge request', 1
end
context 'with a deleted branch' do
let(:changes) { deleted_branch_changes }
it_behaves_like 'with a deleted branch'
it_behaves_like 'with the project default branch'
end
it_behaves_like 'a service that does nothing'
end
shared_examples 'with an existing branch that has a merge request open in foss' do
let(:changes) { existing_branch_changes }
let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch)}
context 'with the project default branch' do
let(:changes) { default_branch_changes }
it_behaves_like 'a service that does not create a merge request'
it_behaves_like 'a service that can change assignees of a merge request', 1
end
it_behaves_like 'a service that does nothing'
end
describe '`assign` push option' do
let(:assigned) { { user2.id => 1, user3.id => 1 } }
let(:unassigned) { nil }
let(:push_options) { { assign: assigned, unassign: unassigned } }
it_behaves_like 'with a new branch', 1
it_behaves_like 'with an existing branch but no open MR', 1
it_behaves_like 'with an existing branch that has a merge request open in foss'
it_behaves_like 'with a deleted branch'
it_behaves_like 'with the project default branch'
end
describe '`unassign` push option' do
let(:assigned) { { user2.id => 1, user3.id => 1 } }
let(:unassigned) { { user1.id => 1, user3.id => 1 } }
let(:push_options) { { assign: assigned, unassign: unassigned } }
it_behaves_like 'with a new branch', 1
it_behaves_like 'with an existing branch but no open MR', 1
it_behaves_like 'with an existing branch that has a merge request open in foss'
it_behaves_like 'with a deleted branch'
it_behaves_like 'with the project default branch'
end
describe 'multiple pushed branches' do
@ -645,7 +585,9 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
end
describe 'no user' do
let(:user) { nil }
let(:user1) { nil }
let(:user2) { nil }
let(:user3) { nil }
let(:push_options) { { create: true } }
let(:changes) { new_branch_changes }
@ -661,7 +603,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
let(:changes) { new_branch_changes }
it 'records an error' do
Members::DestroyService.new(user).execute(ProjectMember.find_by!(user_id: user.id))
Members::DestroyService.new(user1).execute(ProjectMember.find_by!(user_id: user1.id))
service.execute
@ -707,7 +649,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
end
describe 'when MRs are not enabled' do
let(:project) { create(:project, :public, :repository).tap { |pr| pr.add_developer(user) } }
let(:project) { create(:project, :public, :repository).tap { |pr| pr.add_developer(user1) } }
let(:push_options) { { create: true } }
let(:changes) { new_branch_changes }

View file

@ -73,3 +73,93 @@ RSpec.shared_examples 'merge request reviewers cache counters invalidator' do
described_class.new(project, user, {}).execute(merge_request)
end
end
RSpec.shared_examples_for 'a service that can create a merge request' do
subject(:last_mr) { MergeRequest.last }
it 'creates a merge request with the correct target branch' do
branch = push_options[:target] || project.default_branch
expect { service.execute }.to change { MergeRequest.count }.by(1)
expect(last_mr.target_branch).to eq(branch)
end
context 'when project has been forked', :sidekiq_might_not_need_inline do
let(:forked_project) { fork_project(project, user1, repository: true) }
let(:service) { described_class.new(forked_project, user1, changes, push_options) }
before do
allow(forked_project).to receive(:empty_repo?).and_return(false)
end
it 'sets the correct source and target project' do
service.execute
expect(last_mr.source_project).to eq(forked_project)
expect(last_mr.target_project).to eq(project)
end
end
end
RSpec.shared_examples_for 'a service that does not create a merge request' do
it do
expect { service.execute }.not_to change { MergeRequest.count }
end
end
# In the non-foss version of GitLab, there can be many assignees, so
# there 'count' can be something other than 0 or 1. In the foss
# version of GitLab, there can be only one assignee though, so 'count'
# can only be 0 or 1.
RSpec.shared_examples_for 'a service that can change assignees of a merge request' do |count|
subject(:last_mr) { MergeRequest.last }
it 'changes assignee count' do
service.execute
expect(last_mr.assignees.count).to eq(count)
end
end
RSpec.shared_examples 'with an existing branch that has a merge request open' do |count|
let(:changes) { existing_branch_changes }
let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch)}
it_behaves_like 'a service that does not create a merge request'
it_behaves_like 'a service that can change assignees of a merge request', count
end
RSpec.shared_examples 'when coupled with the `create` push option' do |count|
let(:push_options) { { create: true, assign: assigned, unassign: unassigned } }
it_behaves_like 'a service that can create a merge request'
it_behaves_like 'a service that can change assignees of a merge request', count
end
RSpec.shared_examples 'with a new branch' do |count|
let(:changes) { new_branch_changes }
it_behaves_like 'a service that does not create a merge request'
it 'adds an error to the service' do
service.execute
expect(service.errors).to include(error_mr_required)
end
it_behaves_like 'when coupled with the `create` push option', count
end
RSpec.shared_examples 'with an existing branch but no open MR' do |count|
let(:changes) { existing_branch_changes }
it_behaves_like 'a service that does not create a merge request'
it 'adds an error to the service' do
service.execute
expect(service.errors).to include(error_mr_required)
end
it_behaves_like 'when coupled with the `create` push option', count
end

View file

@ -11,7 +11,7 @@ RSpec.describe Ci::PipelineArtifacts::ExpireArtifactsWorker do
end
it 'executes a service' do
expect_next_instance_of(::Ci::PipelineArtifacts::DestroyExpiredArtifactsService) do |instance|
expect_next_instance_of(::Ci::PipelineArtifacts::DestroyAllExpiredService) do |instance|
expect(instance).to receive(:execute)
end

View file

@ -7,7 +7,7 @@ RSpec.describe ExpireBuildArtifactsWorker do
describe '#perform' do
it 'executes a service' do
expect_next_instance_of(Ci::DestroyExpiredJobArtifactsService) do |instance|
expect_next_instance_of(Ci::JobArtifacts::DestroyAllExpiredService) do |instance|
expect(instance).to receive(:execute).and_call_original
end