Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
a00537e412
commit
bd5ff5cb65
70 changed files with 822 additions and 704 deletions
|
@ -1,10 +1,10 @@
|
|||
<!-- Title suggestion: [Experiment Rollout] experiment-key - description of experiment -->
|
||||
<!-- Title suggestion: [Experiment Rollout] feature-flag-name - description of experiment -->
|
||||
|
||||
## Summary
|
||||
|
||||
This issue tracks the rollout and status of an experiment through to removal.
|
||||
|
||||
1. Experiment key / feature flag name: `<experiment-key>`
|
||||
1. Feature flag name: `<feature-flag-name>`
|
||||
1. Epic or issue link: `<issue or epic link>`
|
||||
|
||||
This is an experiment rollout issue
|
||||
|
@ -55,7 +55,7 @@ Note: you can use the [CXL calculator](https://cxl.com/ab-test-calculator/) to d
|
|||
- Runtime in days, or until we expect to reach statistical significance: `30`
|
||||
- We will roll this out behind a feature flag and expose this to `<rollout-percentage>`% of actors to start then ramp it up from there.
|
||||
|
||||
`/chatops run feature set <experiment-key> <rollout-percentage> --actors`
|
||||
`/chatops run feature set <feature-flag-name> <rollout-percentage> --actors`
|
||||
|
||||
### Status
|
||||
|
||||
|
@ -83,14 +83,14 @@ In this rollout issue, ensure the scoped `experiment::` label is kept accurate.
|
|||
## Roll Out Steps
|
||||
|
||||
- [ ] [Confirm that end-to-end tests pass with the feature flag enabled](https://docs.gitlab.com/ee/development/testing_guide/end_to_end/feature_flags.html#confirming-that-end-to-end-tests-pass-with-a-feature-flag-enabled). If there are failing tests, contact the relevant [stable counterpart in the Quality department](https://about.gitlab.com/handbook/engineering/quality/#individual-contributors) to collaborate in updating the tests or confirming that the failing tests are not caused by the changes behind the enabled feature flag.
|
||||
- [ ] Enable on staging (`/chatops run feature set <experiment-key> true --staging`)
|
||||
- [ ] Enable on staging (`/chatops run feature set <feature-flag-name> true --staging`)
|
||||
- [ ] Test on staging
|
||||
- [ ] Ensure that documentation has been updated
|
||||
- [ ] Enable on GitLab.com for individual groups/projects listed above and verify behaviour (`/chatops run feature set --project=gitlab-org/gitlab feature_name true`)
|
||||
- [ ] Enable on GitLab.com for individual groups/projects listed above and verify behaviour (`/chatops run feature set --project=gitlab-org/gitlab <feature-flag-name> true`)
|
||||
- [ ] Coordinate a time to enable the flag with the SRE oncall and release managers
|
||||
- In `#production` mention `@sre-oncall` and `@release-managers`. Once an SRE on call and Release Manager on call confirm, you can proceed with the rollout
|
||||
- [ ] Announce on the issue an estimated time this will be enabled on GitLab.com
|
||||
- [ ] Enable on GitLab.com by running chatops command in `#production` (`/chatops run feature set feature_name true`)
|
||||
- [ ] Enable on GitLab.com by running chatops command in `#production` (`/chatops run feature set <feature-flag-name> true`)
|
||||
- [ ] Cross post chatops Slack command to `#support_gitlab-com` ([more guidance when this is necessary in the dev docs](https://docs.gitlab.com/ee/development/feature_flags/controls.html#where-to-run-commands)) and in your team channel
|
||||
- [ ] Announce on the issue that the flag has been enabled
|
||||
- [ ] Remove experiment code and feature flag and add changelog entry - a separate [cleanup issue](https://gitlab.com/gitlab-org/gitlab/-/issues/new?issuable_template=Experiment%20Successful%20Cleanup) might be required
|
||||
|
@ -102,7 +102,7 @@ In this rollout issue, ensure the scoped `experiment::` label is kept accurate.
|
|||
- [ ] This feature can be disabled by running the following Chatops command:
|
||||
|
||||
```
|
||||
/chatops run feature set <experiment-key> false
|
||||
/chatops run feature set <feature-flag-name> false
|
||||
```
|
||||
|
||||
## Experiment Successful Cleanup Concerns
|
||||
|
|
2
Gemfile
2
Gemfile
|
@ -194,7 +194,7 @@ end
|
|||
# State machine
|
||||
gem 'state_machines-activerecord', '~> 0.8.0'
|
||||
|
||||
# Issue tags
|
||||
# CI domain tags
|
||||
gem 'acts-as-taggable-on', '~> 9.0'
|
||||
|
||||
# Background jobs
|
||||
|
|
|
@ -15,12 +15,10 @@ export default {
|
|||
onCiConfigUpdate(content) {
|
||||
this.$emit('updateCiConfig', content);
|
||||
},
|
||||
registerCiSchema() {
|
||||
registerCiSchema({ detail: { instance } }) {
|
||||
if (this.glFeatures.schemaLinting) {
|
||||
const editorInstance = this.$refs.editor.getEditor();
|
||||
|
||||
editorInstance.use({ definition: CiSchemaExtension });
|
||||
editorInstance.registerCiSchema();
|
||||
instance.use({ definition: CiSchemaExtension });
|
||||
instance.registerCiSchema();
|
||||
}
|
||||
},
|
||||
},
|
||||
|
@ -33,7 +31,7 @@ export default {
|
|||
ref="editor"
|
||||
:file-name="ciConfigPath"
|
||||
v-bind="$attrs"
|
||||
@[$options.readyEvent]="registerCiSchema"
|
||||
@[$options.readyEvent]="registerCiSchema($event)"
|
||||
@input="onCiConfigUpdate"
|
||||
v-on="$listeners"
|
||||
/>
|
||||
|
|
|
@ -97,7 +97,7 @@ export default {
|
|||
ref="editor"
|
||||
data-editor-loading
|
||||
data-qa-selector="source_editor_container"
|
||||
@[$options.readyEvent]="$emit($options.readyEvent)"
|
||||
@[$options.readyEvent]="$emit($options.readyEvent, $event)"
|
||||
>
|
||||
<pre class="editor-loading-content">{{ value }}</pre>
|
||||
</div>
|
||||
|
|
|
@ -56,9 +56,7 @@ class Projects::MergeRequests::CreationsController < Projects::MergeRequests::Ap
|
|||
|
||||
@diff_notes_disabled = true
|
||||
|
||||
@environment = @merge_request.environments_for(current_user, latest: true).last
|
||||
|
||||
render json: { html: view_to_html_string('projects/merge_requests/creations/_diffs', diffs: @diffs, environment: @environment) }
|
||||
render json: { html: view_to_html_string('projects/merge_requests/creations/_diffs', diffs: @diffs) }
|
||||
end
|
||||
|
||||
def diff_for_path
|
||||
|
|
|
@ -35,13 +35,11 @@ class Projects::MergeRequests::DiffsController < Projects::MergeRequests::Applic
|
|||
|
||||
diffs = @compare.diffs_in_batch(params[:page], params[:per_page], diff_options: diff_options_hash)
|
||||
unfoldable_positions = @merge_request.note_positions_for_paths(diffs.diff_file_paths, current_user).unfoldable
|
||||
environment = @merge_request.environments_for(current_user, latest: true).last
|
||||
|
||||
diffs.unfold_diff_files(unfoldable_positions)
|
||||
diffs.write_cache
|
||||
|
||||
options = {
|
||||
environment: environment,
|
||||
merge_request: @merge_request,
|
||||
commit: commit,
|
||||
diff_view: diff_view,
|
||||
|
@ -54,7 +52,6 @@ class Projects::MergeRequests::DiffsController < Projects::MergeRequests::Applic
|
|||
# NOTE: Any variables that would affect the resulting json needs to be added to the cache_context to avoid stale cache issues.
|
||||
cache_context = [
|
||||
current_user&.cache_key,
|
||||
environment&.cache_key,
|
||||
unfoldable_positions.map(&:to_h),
|
||||
diff_view,
|
||||
params[:w],
|
||||
|
@ -98,7 +95,6 @@ class Projects::MergeRequests::DiffsController < Projects::MergeRequests::Applic
|
|||
# Deprecated: https://gitlab.com/gitlab-org/gitlab/issues/37735
|
||||
def render_diffs
|
||||
diffs = @compare.diffs(diff_options)
|
||||
@environment = @merge_request.environments_for(current_user, latest: true).last
|
||||
|
||||
diffs.unfold_diff_files(note_positions.unfoldable)
|
||||
diffs.write_cache
|
||||
|
@ -175,7 +171,6 @@ class Projects::MergeRequests::DiffsController < Projects::MergeRequests::Applic
|
|||
|
||||
def additional_attributes
|
||||
{
|
||||
environment: @environment,
|
||||
merge_request: @merge_request,
|
||||
merge_request_diff: @merge_request_diff,
|
||||
merge_request_diffs: @merge_request_diffs,
|
||||
|
|
|
@ -62,7 +62,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
|
|||
|
||||
feature_category :code_testing, [:test_reports, :coverage_reports]
|
||||
feature_category :code_quality, [:codequality_reports, :codequality_mr_diff_reports]
|
||||
feature_category :accessibility_testing, [:accessibility_reports]
|
||||
feature_category :code_testing, [:accessibility_reports]
|
||||
feature_category :infrastructure_as_code, [:terraform_reports]
|
||||
feature_category :continuous_integration, [:pipeline_status, :pipelines, :exposed_artifacts]
|
||||
|
||||
|
|
|
@ -14,8 +14,7 @@ module Environments
|
|||
def execute
|
||||
deployments =
|
||||
if ref
|
||||
deployments_query = params[:with_tags] ? 'ref = :ref OR tag IS TRUE' : 'ref = :ref'
|
||||
Deployment.where(deployments_query, ref: ref.to_s)
|
||||
Deployment.where(ref: ref.to_s)
|
||||
elsif commit
|
||||
Deployment.where(sha: commit.sha)
|
||||
else
|
||||
|
|
|
@ -420,6 +420,10 @@ module Ci
|
|||
true
|
||||
end
|
||||
|
||||
def save_tags
|
||||
super unless Thread.current['ci_bulk_insert_tags']
|
||||
end
|
||||
|
||||
def archived?
|
||||
return true if degenerated?
|
||||
|
||||
|
|
|
@ -181,9 +181,7 @@ module Ci
|
|||
end
|
||||
|
||||
scope :erasable, -> do
|
||||
types = self.file_types.reject { |file_type| NON_ERASABLE_FILE_TYPES.include?(file_type) }.values
|
||||
|
||||
where(file_type: types)
|
||||
where(file_type: self.erasable_file_types)
|
||||
end
|
||||
|
||||
scope :downloadable, -> { where(file_type: DOWNLOADABLE_TYPES) }
|
||||
|
@ -263,6 +261,10 @@ module Ci
|
|||
[file_type]
|
||||
end
|
||||
|
||||
def self.erasable_file_types
|
||||
self.file_types.keys - NON_ERASABLE_FILE_TYPES
|
||||
end
|
||||
|
||||
def self.total_size
|
||||
self.sum(:size)
|
||||
end
|
||||
|
|
|
@ -221,8 +221,8 @@ class CommitStatus < Ci::ApplicationRecord
|
|||
false
|
||||
end
|
||||
|
||||
def self.bulk_insert_tags!(statuses, tag_list_by_build)
|
||||
Gitlab::Ci::Tags::BulkInsert.new(statuses, tag_list_by_build).insert!
|
||||
def self.bulk_insert_tags!(statuses)
|
||||
Gitlab::Ci::Tags::BulkInsert.new(statuses).insert!
|
||||
end
|
||||
|
||||
def locking_enabled?
|
||||
|
|
|
@ -1395,20 +1395,6 @@ class MergeRequest < ApplicationRecord
|
|||
actual_head_pipeline.success?
|
||||
end
|
||||
|
||||
def environments_for(current_user, latest: false)
|
||||
return [] unless diff_head_commit
|
||||
|
||||
envs = Environments::EnvironmentsByDeploymentsFinder.new(target_project, current_user,
|
||||
ref: target_branch, commit: diff_head_commit, with_tags: true, find_latest: latest).execute
|
||||
|
||||
if source_project
|
||||
envs.concat Environments::EnvironmentsByDeploymentsFinder.new(source_project, current_user,
|
||||
ref: source_branch, commit: diff_head_commit, find_latest: latest).execute
|
||||
end
|
||||
|
||||
envs.uniq
|
||||
end
|
||||
|
||||
##
|
||||
# This method is for looking for active environments which created via pipelines for merge requests.
|
||||
# Since deployments run on a merge request ref (e.g. `refs/merge-requests/:iid/head`),
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Ci
|
||||
module JobArtifacts
|
||||
class DeleteProjectArtifactsService < BaseProjectService
|
||||
def execute
|
||||
ExpireProjectBuildArtifactsWorker.perform_async(project.id)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,35 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Ci
|
||||
module JobArtifacts
|
||||
class ExpireProjectBuildArtifactsService
|
||||
BATCH_SIZE = 1000
|
||||
|
||||
def initialize(project_id, expiry_time)
|
||||
@project_id = project_id
|
||||
@expiry_time = expiry_time
|
||||
end
|
||||
|
||||
# rubocop:disable CodeReuse/ActiveRecord
|
||||
def execute
|
||||
scope = Ci::JobArtifact.for_project(project_id).order(:id)
|
||||
file_type_values = Ci::JobArtifact.erasable_file_types.map { |file_type| [Ci::JobArtifact.file_types[file_type]] }
|
||||
from_sql = Arel::Nodes::Grouping.new(Arel::Nodes::ValuesList.new(file_type_values)).as('file_types (file_type)').to_sql
|
||||
array_scope = Ci::JobArtifact.from(from_sql).select(:file_type)
|
||||
array_mapping_scope = -> (file_type_expression) { Ci::JobArtifact.where(Ci::JobArtifact.arel_table[:file_type].eq(file_type_expression)) }
|
||||
|
||||
Gitlab::Pagination::Keyset::Iterator
|
||||
.new(scope: scope, in_operator_optimization_options: { array_scope: array_scope, array_mapping_scope: array_mapping_scope })
|
||||
.each_batch(of: BATCH_SIZE) do |batch|
|
||||
ids = batch.reselect!(:id).to_a.map(&:id)
|
||||
Ci::JobArtifact.unlocked.where(id: ids).update_all(locked: Ci::JobArtifact.lockeds[:unlocked], expire_at: expiry_time)
|
||||
end
|
||||
end
|
||||
# rubocop:enable CodeReuse/ActiveRecord
|
||||
|
||||
private
|
||||
|
||||
attr_reader :project_id, :expiry_time
|
||||
end
|
||||
end
|
||||
end
|
|
@ -9,17 +9,18 @@
|
|||
= group_icon(@group, class: 'avatar avatar-tile s64', width: 64, height: 64, itemprop: 'logo')
|
||||
.d-flex.flex-column.flex-wrap.align-items-baseline
|
||||
.d-inline-flex.align-items-baseline
|
||||
%h1.home-panel-title.gl-mt-3.gl-mb-2.gl-ml-3{ itemprop: 'name' }
|
||||
%h1.home-panel-title.gl-mt-3.gl-mb-2{ itemprop: 'name' }
|
||||
= @group.name
|
||||
%span.visibility-icon.text-secondary.gl-ml-2.has-tooltip{ data: { container: 'body' }, title: visibility_icon_description(@group) }
|
||||
= visibility_level_icon(@group.visibility_level, options: {class: 'icon'})
|
||||
.home-panel-metadata.text-secondary.gl-font-base.gl-font-weight-normal.gl-line-height-normal
|
||||
.home-panel-metadata.text-secondary.gl-font-base.gl-font-weight-normal.gl-line-height-normal{ data: { qa_selector: 'group_id_content' }, itemprop: 'identifier' }
|
||||
- if can?(current_user, :read_group, @group)
|
||||
- button_class = "btn gl-button btn-sm btn-tertiary btn-default-tertiary home-panel-metadata"
|
||||
- button_text = s_('GroupPage|Group ID: %{group_id}') % { group_id: @group.id }
|
||||
= clipboard_button(title: s_('GroupPage|Copy group ID'), text: @group.id, hide_button_icon: true, button_text: button_text, class: button_class, qa_selector: 'group_id_content', itemprop: 'identifier')
|
||||
%span.gl-display-inline-block.gl-vertical-align-middle
|
||||
= s_("GroupPage|Group ID: %{group_id}") % { group_id: @group.id }
|
||||
- button_class = "btn gl-button btn-sm btn-tertiary btn-default-tertiary home-panel-metadata"
|
||||
= clipboard_button(title: s_('GroupPage|Copy group ID'), text: @group.id, class: button_class)
|
||||
- if current_user
|
||||
%span.gl-ml-3
|
||||
%span.gl-ml-3.gl-mb-3
|
||||
= render 'shared/members/access_request_links', source: @group
|
||||
|
||||
.home-panel-buttons.col-md-12.col-lg-6
|
||||
|
|
|
@ -10,18 +10,19 @@
|
|||
= project_icon(@project, alt: @project.name, class: 'avatar avatar-tile s64', width: 64, height: 64, itemprop: 'image')
|
||||
.d-flex.flex-column.flex-wrap.align-items-baseline
|
||||
.d-inline-flex.align-items-baseline
|
||||
%h1.home-panel-title.gl-mt-3.gl-mb-2.gl-font-size-h1.gl-line-height-24.gl-font-weight-bold.gl-ml-3{ data: { qa_selector: 'project_name_content' }, itemprop: 'name' }
|
||||
%h1.home-panel-title.gl-mt-3.gl-mb-2.gl-font-size-h1.gl-line-height-24.gl-font-weight-bold{ data: { qa_selector: 'project_name_content' }, itemprop: 'name' }
|
||||
= @project.name
|
||||
%span.visibility-icon.text-secondary.gl-ml-2.has-tooltip{ data: { container: 'body' }, title: visibility_icon_description(@project) }
|
||||
= visibility_level_icon(@project.visibility_level, options: { class: 'icon' })
|
||||
= render_if_exists 'compliance_management/compliance_framework/compliance_framework_badge', project: @project
|
||||
.home-panel-metadata.text-secondary.gl-font-base.gl-font-weight-normal.gl-line-height-normal
|
||||
.home-panel-metadata.text-secondary.gl-font-base.gl-font-weight-normal.gl-line-height-normal{ data: { qa_selector: 'project_id_content' }, itemprop: 'identifier' }
|
||||
- if can?(current_user, :read_project, @project)
|
||||
- button_class = "btn gl-button btn-sm btn-tertiary btn-default-tertiary home-panel-metadata"
|
||||
- button_text = s_('ProjectPage|Project ID: %{project_id}') % { project_id: @project.id }
|
||||
= clipboard_button(title: s_('ProjectPage|Copy project ID'), text: @project.id, hide_button_icon: true, button_text: button_text, class: button_class, qa_selector: 'project_id_content', itemprop: 'identifier')
|
||||
%span.gl-display-inline-block.gl-vertical-align-middle
|
||||
= s_('ProjectPage|Project ID: %{project_id}') % { project_id: @project.id }
|
||||
- button_class = "btn gl-button btn-sm btn-tertiary btn-default-tertiary home-panel-metadata"
|
||||
= clipboard_button(title: s_('ProjectPage|Copy project ID'), text: @project.id, class: button_class)
|
||||
- if current_user
|
||||
%span.gl-display-inline-block.gl-vertical-align-middle.gl-ml-3
|
||||
%span.gl-ml-3.gl-mb-3
|
||||
= render 'shared/members/access_request_links', source: @project
|
||||
|
||||
.gl-mt-3.gl-pl-3.gl-w-full
|
||||
|
|
|
@ -1987,6 +1987,15 @@
|
|||
:weight: 1
|
||||
:idempotent: true
|
||||
:tags: []
|
||||
- :name: ci_job_artifacts_expire_project_build_artifacts
|
||||
:worker_name: Ci::JobArtifacts::ExpireProjectBuildArtifactsWorker
|
||||
:feature_category: :build_artifacts
|
||||
:has_external_dependencies:
|
||||
:urgency: :low
|
||||
:resource_boundary: :unknown
|
||||
:weight: 1
|
||||
:idempotent: true
|
||||
:tags: []
|
||||
- :name: create_commit_signature
|
||||
:worker_name: CreateCommitSignatureWorker
|
||||
:feature_category: :source_code_management
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Ci
|
||||
module JobArtifacts
|
||||
class ExpireProjectBuildArtifactsWorker
|
||||
include ApplicationWorker
|
||||
|
||||
data_consistency :always
|
||||
|
||||
feature_category :build_artifacts
|
||||
idempotent!
|
||||
|
||||
def perform(project_id)
|
||||
return unless Project.id_in(project_id).exists?
|
||||
|
||||
ExpireProjectBuildArtifactsService.new(project_id, Time.current).execute
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -7,7 +7,6 @@
|
|||
# PLEASE DO NOT EDIT THIS FILE MANUALLY.
|
||||
#
|
||||
---
|
||||
- accessibility_testing
|
||||
- advanced_deployments
|
||||
- api_security
|
||||
- attack_emulation
|
||||
|
@ -104,6 +103,8 @@
|
|||
- review_apps
|
||||
- runbooks
|
||||
- runner
|
||||
- runner_fleet
|
||||
- runner_saas
|
||||
- scalability
|
||||
- secret_detection
|
||||
- secrets_management
|
||||
|
@ -121,7 +122,6 @@
|
|||
- synthetic_monitoring
|
||||
- team_planning
|
||||
- tracing
|
||||
- usability_testing
|
||||
- usage_ping
|
||||
- users
|
||||
- utilization
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
name: bulk_expire_project_artifacts
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/75488
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/347405
|
||||
milestone: '14.6'
|
||||
type: development
|
||||
group: group::testing
|
||||
default_enabled: false
|
|
@ -2,6 +2,7 @@
|
|||
name: lfs_link_existing_object
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/41770
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/249246
|
||||
milestone: '13.4'
|
||||
group: group::source code
|
||||
type: development
|
||||
default_enabled: false
|
||||
|
|
|
@ -73,6 +73,8 @@
|
|||
- 1
|
||||
- - ci_delete_objects
|
||||
- 1
|
||||
- - ci_job_artifacts_expire_project_build_artifacts
|
||||
- 1
|
||||
- - ci_upstream_projects_subscriptions_cleanup
|
||||
- 1
|
||||
- - container_repository
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddIndexCiJobArtifactsProjectIdFileType < Gitlab::Database::Migration[1.0]
|
||||
disable_ddl_transaction!
|
||||
|
||||
INDEX_NAME = 'index_ci_job_artifacts_on_id_project_id_and_file_type'
|
||||
|
||||
def up
|
||||
add_concurrent_index :ci_job_artifacts, [:project_id, :file_type, :id], name: INDEX_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index_by_name :ci_job_artifacts, INDEX_NAME
|
||||
end
|
||||
end
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class RemoveProjectsCiUnitTestsProjectIdFk < Gitlab::Database::Migration[1.0]
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
with_lock_retries do
|
||||
remove_foreign_key_if_exists(:ci_unit_tests, :projects, name: "fk_7a8fabf0a8")
|
||||
end
|
||||
end
|
||||
|
||||
def down
|
||||
add_concurrent_foreign_key(:ci_unit_tests, :projects, name: "fk_7a8fabf0a8", column: :project_id, target_column: :id, on_delete: "cascade")
|
||||
end
|
||||
end
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class RemoveProjectsCiBuildReportResultsProjectIdFk < Gitlab::Database::Migration[1.0]
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
with_lock_retries do
|
||||
remove_foreign_key_if_exists(:ci_build_report_results, :projects, name: "fk_rails_056d298d48")
|
||||
end
|
||||
end
|
||||
|
||||
def down
|
||||
add_concurrent_foreign_key(:ci_build_report_results, :projects, name: "fk_rails_056d298d48", column: :project_id, target_column: :id, on_delete: "cascade")
|
||||
end
|
||||
end
|
1
db/schema_migrations/20211207081708
Normal file
1
db/schema_migrations/20211207081708
Normal file
|
@ -0,0 +1 @@
|
|||
e26065e63eca51e4138b6e9f07e9ec1ee45838afa82c5832849e360375beeae2
|
1
db/schema_migrations/20220112230642
Normal file
1
db/schema_migrations/20220112230642
Normal file
|
@ -0,0 +1 @@
|
|||
c528730414c1dcda5d312f03d4e37a0dbb51ebb0b0b87ada786cf686c358daa7
|
1
db/schema_migrations/20220113015830
Normal file
1
db/schema_migrations/20220113015830
Normal file
|
@ -0,0 +1 @@
|
|||
774a5ff616663d6d0e002bd04d33747982de10b02cbb9ad7d8abfe0b26a2b441
|
|
@ -25465,6 +25465,8 @@ CREATE INDEX index_ci_job_artifacts_on_file_store ON ci_job_artifacts USING btre
|
|||
|
||||
CREATE INDEX index_ci_job_artifacts_on_file_type_for_devops_adoption ON ci_job_artifacts USING btree (file_type, project_id, created_at) WHERE (file_type = ANY (ARRAY[5, 6, 8, 23]));
|
||||
|
||||
CREATE INDEX index_ci_job_artifacts_on_id_project_id_and_file_type ON ci_job_artifacts USING btree (project_id, file_type, id);
|
||||
|
||||
CREATE UNIQUE INDEX index_ci_job_artifacts_on_job_id_and_file_type ON ci_job_artifacts USING btree (job_id, file_type);
|
||||
|
||||
CREATE INDEX index_ci_job_artifacts_on_project_id ON ci_job_artifacts USING btree (project_id);
|
||||
|
@ -29378,9 +29380,6 @@ ALTER TABLE ONLY analytics_devops_adoption_snapshots
|
|||
ALTER TABLE ONLY lists
|
||||
ADD CONSTRAINT fk_7a5553d60f FOREIGN KEY (label_id) REFERENCES labels(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY ci_unit_tests
|
||||
ADD CONSTRAINT fk_7a8fabf0a8 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY protected_branches
|
||||
ADD CONSTRAINT fk_7a9c6d93e7 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
|
||||
|
||||
|
@ -29942,9 +29941,6 @@ ALTER TABLE ONLY ip_restrictions
|
|||
ALTER TABLE ONLY terraform_state_versions
|
||||
ADD CONSTRAINT fk_rails_04f176e239 FOREIGN KEY (terraform_state_id) REFERENCES terraform_states(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY ci_build_report_results
|
||||
ADD CONSTRAINT fk_rails_056d298d48 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY ci_daily_build_group_report_results
|
||||
ADD CONSTRAINT fk_rails_0667f7608c FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
|
||||
|
||||
|
|
|
@ -259,7 +259,7 @@ Example response:
|
|||
}
|
||||
```
|
||||
|
||||
## Delete artifacts
|
||||
## Delete job artifacts
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/25522) in GitLab 11.9.
|
||||
|
||||
|
@ -284,3 +284,34 @@ NOTE:
|
|||
At least Maintainer role is required to delete artifacts.
|
||||
|
||||
If the artifacts were deleted successfully, a response with status `204 No Content` is returned.
|
||||
|
||||
## Delete project artifacts
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/223793) in GitLab 14.7 [with a flag](../administration/feature_flags.md) named `bulk_expire_project_artifacts`. Disabled by default.
|
||||
|
||||
FLAG:
|
||||
On self-managed GitLab, by default this feature is not available. To make it
|
||||
available, ask an administrator to [enable the `bulk_expire_project_artifacts` flag](../administration/feature_flags.md).
|
||||
On GitLab.com, this feature is not available.
|
||||
|
||||
[Expire artifacts of a project that can be deleted](https://gitlab.com/gitlab-org/gitlab/-/issues/223793) but that don't have an expiry time.
|
||||
|
||||
```plaintext
|
||||
DELETE /projects/:id/artifacts
|
||||
```
|
||||
|
||||
| Attribute | Type | Required | Description |
|
||||
|-----------|----------------|----------|-----------------------------------------------------------------------------|
|
||||
| `id` | integer/string | yes | ID or [URL-encoded path of the project](index.md#namespaced-path-encoding) |
|
||||
|
||||
Example request:
|
||||
|
||||
```shell
|
||||
curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/artifacts"
|
||||
```
|
||||
|
||||
NOTE:
|
||||
At least Maintainer role is required to delete artifacts.
|
||||
|
||||
Schedules a worker to update to the current time the expiry of all artifacts that can be deleted.
|
||||
A response with status `202 Accepted` is returned.
|
||||
|
|
|
@ -181,7 +181,7 @@ After you have the route mapping set up, it takes effect in the following locati
|
|||
|
||||
![View app file list in merge request widget](img/view_on_mr_widget.png)
|
||||
|
||||
- In the diff for a merge request, comparison, or commit.
|
||||
- In the diff for a comparison or commit.
|
||||
|
||||
![View on environment button in merge request diff](img/view_on_env_mr.png)
|
||||
|
||||
|
|
|
@ -1,402 +0,0 @@
|
|||
---
|
||||
stage: Growth
|
||||
group: Activation
|
||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
|
||||
---
|
||||
|
||||
# Create an A/B test with `Experimentation Module`
|
||||
|
||||
NOTE:
|
||||
We recommend using [GLEX](gitlab_experiment.md) for new experiments.
|
||||
|
||||
## Implement the experiment
|
||||
|
||||
1. Add the experiment to the `Gitlab::Experimentation::EXPERIMENTS` hash in
|
||||
[`experimentation.rb`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib%2Fgitlab%2Fexperimentation.rb):
|
||||
|
||||
```ruby
|
||||
EXPERIMENTS = {
|
||||
other_experiment: {
|
||||
#...
|
||||
},
|
||||
# Add your experiment here:
|
||||
signup_flow: {
|
||||
tracking_category: 'Growth::Activation::Experiment::SignUpFlow' # Used for providing the category when setting up tracking data
|
||||
}
|
||||
}.freeze
|
||||
```
|
||||
|
||||
1. Use the experiment in the code.
|
||||
|
||||
Experiments can be performed on a `subject`. The provided `subject` should
|
||||
respond to `to_global_id` or `to_s`.
|
||||
The resulting string is bucketed and assigned to either the control or the
|
||||
experimental group, so you must always provide the same `subject`
|
||||
for an experiment to have the same experience.
|
||||
|
||||
1. Use this standard for the experiment in a controller:
|
||||
|
||||
- Experiment run for a user:
|
||||
|
||||
```ruby
|
||||
class ProjectController < ApplicationController
|
||||
def show
|
||||
# experiment_enabled?(:experiment_key) is also available in views and helpers
|
||||
if experiment_enabled?(:signup_flow, subject: current_user)
|
||||
# render the experiment
|
||||
else
|
||||
# render the original version
|
||||
end
|
||||
end
|
||||
end
|
||||
```
|
||||
|
||||
- Experiment run for a namespace:
|
||||
|
||||
```ruby
|
||||
if experiment_enabled?(:signup_flow, subject: namespace)
|
||||
# experiment code
|
||||
else
|
||||
# control code
|
||||
end
|
||||
```
|
||||
|
||||
When no subject is given, it falls back to a cookie that gets set and is consistent until
|
||||
the cookie gets deleted.
|
||||
|
||||
```ruby
|
||||
class RegistrationController < ApplicationController
|
||||
def show
|
||||
# falls back to a cookie
|
||||
if experiment_enabled?(:signup_flow)
|
||||
# render the experiment
|
||||
else
|
||||
# render the original version
|
||||
end
|
||||
end
|
||||
end
|
||||
```
|
||||
|
||||
1. Make the experiment available to the frontend in a controller. This example
|
||||
checks whether the experiment is enabled and pushes the result to the frontend:
|
||||
|
||||
```ruby
|
||||
before_action do
|
||||
push_frontend_experiment(:signup_flow, subject: current_user)
|
||||
end
|
||||
```
|
||||
|
||||
You can check the state of the feature flag in JavaScript:
|
||||
|
||||
```javascript
|
||||
import { isExperimentEnabled } from '~/experimentation';
|
||||
|
||||
if ( isExperimentEnabled('signupFlow') ) {
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
You can also run an experiment outside of the controller scope, such as in a worker:
|
||||
|
||||
```ruby
|
||||
class SomeWorker
|
||||
def perform
|
||||
# Check if the experiment is active at all (the percentage_of_time_value > 0)
|
||||
return unless Gitlab::Experimentation.active?(:experiment_key)
|
||||
|
||||
# Since we cannot access cookies in a worker, we need to bucket models
|
||||
# based on a unique, unchanging attribute instead.
|
||||
# It is therefore necessary to always provide the same subject.
|
||||
if Gitlab::Experimentation.in_experiment_group?(:experiment_key, subject: user)
|
||||
# execute experimental code
|
||||
else
|
||||
# execute control code
|
||||
end
|
||||
end
|
||||
end
|
||||
```
|
||||
|
||||
## Implement tracking events
|
||||
|
||||
To determine whether the experiment is a success or not, we must implement tracking events
|
||||
to acquire data for analyzing. We can send events to Snowplow via either the backend or frontend.
|
||||
Read the [product intelligence guide](https://about.gitlab.com/handbook/product/product-intelligence-guide/) for more details.
|
||||
|
||||
### Track backend events
|
||||
|
||||
The framework provides a helper method that is available in controllers:
|
||||
|
||||
```ruby
|
||||
before_action do
|
||||
track_experiment_event(:signup_flow, 'action', 'value', subject: current_user)
|
||||
end
|
||||
```
|
||||
|
||||
To test it:
|
||||
|
||||
```ruby
|
||||
context 'when the experiment is active and the user is in the experimental group' do
|
||||
before do
|
||||
stub_experiment(signup_flow: true)
|
||||
stub_experiment_for_subject(signup_flow: true)
|
||||
end
|
||||
|
||||
it 'tracks an event', :snowplow do
|
||||
subject
|
||||
|
||||
expect_snowplow_event(
|
||||
category: 'Growth::Activation::Experiment::SignUpFlow',
|
||||
action: 'action',
|
||||
value: 'value',
|
||||
label: 'experimentation_subject_id',
|
||||
property: 'experimental_group'
|
||||
)
|
||||
end
|
||||
end
|
||||
```
|
||||
|
||||
### Track frontend events
|
||||
|
||||
The framework provides a helper method that is available in controllers:
|
||||
|
||||
```ruby
|
||||
before_action do
|
||||
push_frontend_experiment(:signup_flow, subject: current_user)
|
||||
frontend_experimentation_tracking_data(:signup_flow, 'action', 'value', subject: current_user)
|
||||
end
|
||||
```
|
||||
|
||||
This pushes tracking data to `gon.experiments` and `gon.tracking_data`.
|
||||
|
||||
```ruby
|
||||
expect(Gon.experiments['signupFlow']).to eq(true)
|
||||
|
||||
expect(Gon.tracking_data).to eq(
|
||||
{
|
||||
category: 'Growth::Activation::Experiment::SignUpFlow',
|
||||
action: 'action',
|
||||
value: 'value',
|
||||
label: 'experimentation_subject_id',
|
||||
property: 'experimental_group'
|
||||
}
|
||||
)
|
||||
```
|
||||
|
||||
To track it:
|
||||
|
||||
```javascript
|
||||
import { isExperimentEnabled } from '~/lib/utils/experimentation';
|
||||
import Tracking from '~/tracking';
|
||||
|
||||
document.addEventListener('DOMContentLoaded', () => {
|
||||
const signupFlowExperimentEnabled = isExperimentEnabled('signupFlow');
|
||||
|
||||
if (signupFlowExperimentEnabled && gon.tracking_data) {
|
||||
const { category, action, ...data } = gon.tracking_data;
|
||||
|
||||
Tracking.event(category, action, data);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
To test it in Jest:
|
||||
|
||||
```javascript
|
||||
import { withGonExperiment } from 'helpers/experimentation_helper';
|
||||
import Tracking from '~/tracking';
|
||||
|
||||
describe('event tracking', () => {
|
||||
describe('with tracking data', () => {
|
||||
withGonExperiment('signupFlow');
|
||||
|
||||
beforeEach(() => {
|
||||
jest.spyOn(Tracking, 'event').mockImplementation(() => {});
|
||||
|
||||
gon.tracking_data = {
|
||||
category: 'Growth::Activation::Experiment::SignUpFlow',
|
||||
action: 'action',
|
||||
value: 'value',
|
||||
label: 'experimentation_subject_id',
|
||||
property: 'experimental_group'
|
||||
};
|
||||
});
|
||||
|
||||
it('should track data', () => {
|
||||
performAction()
|
||||
|
||||
expect(Tracking.event).toHaveBeenCalledWith(
|
||||
'Growth::Activation::Experiment::SignUpFlow',
|
||||
'action',
|
||||
{
|
||||
value: 'value',
|
||||
label: 'experimentation_subject_id',
|
||||
property: 'experimental_group'
|
||||
},
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Record experiment user
|
||||
|
||||
In addition to the anonymous tracking of events, we can also record which users
|
||||
have participated in which experiments, and whether they were given the control
|
||||
experience or the experimental experience.
|
||||
|
||||
The `record_experiment_user` helper method is available to all controllers, and it
|
||||
enables you to record these experiment participants (the current user) and which
|
||||
experience they were given:
|
||||
|
||||
```ruby
|
||||
before_action do
|
||||
record_experiment_user(:signup_flow)
|
||||
end
|
||||
```
|
||||
|
||||
Subsequent calls to this method for the same experiment and the same user have no
|
||||
effect unless the user is then enrolled into a different experience. This happens
|
||||
when we roll out the experimental experience to a greater percentage of users.
|
||||
|
||||
This data is completely separate from the [events tracking data](#implement-tracking-events).
|
||||
They are not linked together in any way.
|
||||
|
||||
### Add context
|
||||
|
||||
You can add arbitrary context data in a hash which gets stored as part of the experiment
|
||||
user record. New calls to the `record_experiment_user` with newer contexts are merged
|
||||
deeply into the existing context.
|
||||
|
||||
This data can then be used by data analytics dashboards.
|
||||
|
||||
```ruby
|
||||
before_action do
|
||||
record_experiment_user(:signup_flow, foo: 42, bar: { a: 22})
|
||||
# context is { "foo" => 42, "bar" => { "a" => 22 }}
|
||||
end
|
||||
|
||||
# Additional contexts for newer record calls are merged deeply
|
||||
record_experiment_user(:signup_flow, foo: 40, bar: { b: 2 }, thor: 3)
|
||||
# context becomes { "foo" => 40, "bar" => { "a" => 22, "b" => 2 }, "thor" => 3}
|
||||
```
|
||||
|
||||
## Record experiment conversion event
|
||||
|
||||
Along with the tracking of backend and frontend events and the
|
||||
[recording of experiment participants](#record-experiment-user), we can also record
|
||||
when a user performs the desired conversion event action. For example:
|
||||
|
||||
- **Experimental experience:** Show an in-product nudge to test if the change causes more
|
||||
people to sign up for trials.
|
||||
- **Conversion event:** The user starts a trial.
|
||||
|
||||
The `record_experiment_conversion_event` helper method is available to all controllers.
|
||||
Use it to record the conversion event for the current user, regardless of whether
|
||||
the user is in the control or experimental group:
|
||||
|
||||
```ruby
|
||||
before_action do
|
||||
record_experiment_conversion_event(:signup_flow)
|
||||
end
|
||||
```
|
||||
|
||||
Note that the use of this method requires that we have first
|
||||
[recorded the user](#record-experiment-user) as being part of the experiment.
|
||||
|
||||
## Enable the experiment
|
||||
|
||||
After all merge requests have been merged, use [ChatOps](../../ci/chatops/index.md) in the
|
||||
[appropriate channel](../feature_flags/controls.md#communicate-the-change) to start the experiment for 10% of the users.
|
||||
The feature flag should have the name of the experiment with the `_experiment_percentage` suffix appended.
|
||||
For visibility, share any commands run against production in the `#s_growth` channel:
|
||||
|
||||
```shell
|
||||
/chatops run feature set signup_flow_experiment_percentage 10
|
||||
```
|
||||
|
||||
If you notice issues with the experiment, you can disable the experiment by removing the feature flag:
|
||||
|
||||
```shell
|
||||
/chatops run feature delete signup_flow_experiment_percentage
|
||||
```
|
||||
|
||||
## Add user to experiment group manually
|
||||
|
||||
To force the application to add your current user into the experiment group,
|
||||
add a query string parameter to the path where the experiment runs. If you add the
|
||||
query string parameter, the experiment works only for this request, and doesn't work
|
||||
after following links or submitting forms.
|
||||
|
||||
For example, to forcibly enable the `EXPERIMENT_KEY` experiment, add `force_experiment=EXPERIMENT_KEY`
|
||||
to the URL:
|
||||
|
||||
```shell
|
||||
https://gitlab.com/<EXPERIMENT_ENTRY_URL>?force_experiment=<EXPERIMENT_KEY>
|
||||
```
|
||||
|
||||
## Add user to experiment group with a cookie
|
||||
|
||||
You can force the current user into the experiment group for `<EXPERIMENT_KEY>`
|
||||
during the browser session by using your browser's developer tools:
|
||||
|
||||
```javascript
|
||||
document.cookie = "force_experiment=<EXPERIMENT_KEY>; path=/";
|
||||
```
|
||||
|
||||
Use a comma to list more than one experiment to be forced:
|
||||
|
||||
```javascript
|
||||
document.cookie = "force_experiment=<EXPERIMENT_KEY>,<ANOTHER_EXPERIMENT_KEY>; path=/";
|
||||
```
|
||||
|
||||
To clear the experiments, unset the `force_experiment` cookie:
|
||||
|
||||
```javascript
|
||||
document.cookie = "force_experiment=; path=/";
|
||||
```
|
||||
|
||||
## Testing and test helpers
|
||||
|
||||
### RSpec
|
||||
|
||||
Use the following in RSpec to mock the experiment:
|
||||
|
||||
```ruby
|
||||
context 'when the experiment is active' do
|
||||
before do
|
||||
stub_experiment(signup_flow: true)
|
||||
end
|
||||
|
||||
context 'when the user is in the experimental group' do
|
||||
before do
|
||||
stub_experiment_for_subject(signup_flow: true)
|
||||
end
|
||||
|
||||
it { is_expected.to do_experimental_thing }
|
||||
end
|
||||
|
||||
context 'when the user is in the control group' do
|
||||
before do
|
||||
stub_experiment_for_subject(signup_flow: false)
|
||||
end
|
||||
|
||||
it { is_expected.to do_control_thing }
|
||||
end
|
||||
end
|
||||
```
|
||||
|
||||
### Jest
|
||||
|
||||
Use the following in Jest to mock the experiment:
|
||||
|
||||
```javascript
|
||||
import { withGonExperiment } from 'helpers/experimentation_helper';
|
||||
|
||||
describe('given experiment is enabled', () => {
|
||||
withGonExperiment('signupFlow');
|
||||
|
||||
it('should do the experimental thing', () => {
|
||||
expect(wrapper.find('.js-some-experiment-triggered-element')).toEqual(expect.any(Element));
|
||||
});
|
||||
});
|
||||
```
|
|
@ -71,6 +71,8 @@ class Cached cached
|
|||
|
||||
## Implement an experiment
|
||||
|
||||
[Examples](https://gitlab.com/gitlab-org/growth/growth/-/wikis/GLEX-Framework-code-examples)
|
||||
|
||||
Start by generating a feature flag using the `bin/feature-flag` command as you
|
||||
normally would for a development feature flag, making sure to use `experiment` for
|
||||
the type. For the sake of documentation let's name our feature flag (and experiment)
|
||||
|
|
|
@ -48,10 +48,7 @@ If the experiment is successful and becomes part of the product, any items that
|
|||
|
||||
For more information, see [Implementing an A/B/n experiment using GLEX](gitlab_experiment.md).
|
||||
|
||||
There are still some longer running experiments using the [`Exerimentation Module`](experimentation.md).
|
||||
|
||||
Both approaches use [experiment](../feature_flags/index.md#experiment-type) feature flags.
|
||||
`GLEX` is the preferred option for new experiments.
|
||||
This uses [experiment](../feature_flags/index.md#experiment-type) feature flags.
|
||||
|
||||
### Add new icons and illustrations for experiments
|
||||
|
||||
|
|
|
@ -416,6 +416,7 @@ applications.
|
|||
| `AUTO_DEVOPS_ALLOW_TO_FORCE_DEPLOY_V<N>` | From [auto-deploy-image](https://gitlab.com/gitlab-org/cluster-integration/auto-deploy-image) v1.0.0, if this variable is present, a new major version of chart is forcibly deployed. For more information, see [Ignore warnings and continue deploying](upgrading_auto_deploy_dependencies.md#ignore-warnings-and-continue-deploying). |
|
||||
| `BUILDPACK_URL` | Buildpack's full URL. [Must point to a URL supported by Pack or Herokuish](#custom-buildpacks). |
|
||||
| `CANARY_ENABLED` | Used to define a [deploy policy for canary environments](#deploy-policy-for-canary-environments). |
|
||||
| `BUILDPACK_VOLUMES` | Specify one or more [Buildpack volumes to mount](stages.md#mount-volumes-into-the-build-container). Use a pipe `|` as list separator. |
|
||||
| `CANARY_PRODUCTION_REPLICAS` | Number of canary replicas to deploy for [Canary Deployments](../../user/project/canary_deployments.md) in the production environment. Takes precedence over `CANARY_REPLICAS`. Defaults to 1. |
|
||||
| `CANARY_REPLICAS` | Number of canary replicas to deploy for [Canary Deployments](../../user/project/canary_deployments.md). Defaults to 1. |
|
||||
| `CI_APPLICATION_REPOSITORY` | The repository of container image being built or deployed, `$CI_APPLICATION_REPOSITORY:$CI_APPLICATION_TAG`. For more details, read [Custom container image](#custom-container-image). |
|
||||
|
|
|
@ -65,6 +65,30 @@ Auto Test still uses Herokuish, as test suite detection is not
|
|||
yet part of the Cloud Native Buildpack specification. For more information, see
|
||||
[this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/212689).
|
||||
|
||||
#### Mount volumes into the build container
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/cluster-integration/auto-build-image/-/merge_requests/65) in GitLab 14.2.
|
||||
> - Multiple volume support (or `auto-build-image` v1.6.0) [introduced](https://gitlab.com/gitlab-org/cluster-integration/auto-build-image/-/merge_requests/80) in GitLab 14.6.
|
||||
|
||||
The variable `BUILDPACK_VOLUMES` can be used to pass volume mount definitions to the
|
||||
`pack` command. The mounts are passed to `pack build` using `--volume` arguments.
|
||||
Each volume definition can include any of the capabilities provided by `build pack`
|
||||
such as the host path, the target path, whether the volume is writable, and
|
||||
one or more volume options.
|
||||
|
||||
Use a pipe `|` character to pass multiple volumes.
|
||||
Each item from the list is passed to `build back` using a separate `--volume` argument.
|
||||
|
||||
In this example, three volumes are mounted in the container as `/etc/foo`, `/opt/foo`, and `/var/opt/foo`:
|
||||
|
||||
```yaml
|
||||
buildjob:
|
||||
variables:
|
||||
BUILDPACK_VOLUMES: /mnt/1:/etc/foo:ro|/mnt/2:/opt/foo:ro|/mnt/3:/var/opt/foo:rw
|
||||
```
|
||||
|
||||
Read more about defining volumes in the [`pack build` documentation](https://buildpacks.io/docs/tools/pack/cli/pack_build/).
|
||||
|
||||
### Auto Build using Herokuish
|
||||
|
||||
> [Replaced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/63351) with Cloud Native Buildpacks in GitLab 14.0.
|
||||
|
|
|
@ -9,6 +9,58 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
Deploy your application internally or to the public. Use
|
||||
flags to release features incrementally.
|
||||
|
||||
- [Environments and deployments](../ci/environments/index.md)
|
||||
- [Releases](../user/project/releases/index.md)
|
||||
- [Feature flags](../operations/feature_flags.md)
|
||||
## Deployments
|
||||
|
||||
Deployment is the step of the software delivery process when your application gets deployed to its
|
||||
final, target infrastructure.
|
||||
|
||||
### Deploy with Auto DevOps
|
||||
|
||||
[Auto DevOps](autodevops/index.md) is an automated CI/CD-based workflow that supports the entire software
|
||||
supply chain: build, test, lint, package, deploy, secure, and monitor applications using GitLab CI/CD.
|
||||
It provides a set of ready-to-use templates that serve the vast majority of use cases.
|
||||
|
||||
[Auto Deploy](autodevops/stages.md#auto-deploy) is the DevOps stage dedicated to software
|
||||
deployment using GitLab CI/CD.
|
||||
|
||||
### Deploy applications to Kubernetes clusters
|
||||
|
||||
With the extensive integration between GitLab and Kubernetes, you can safely deploy your applications
|
||||
to Kubernetes clusters using the [GitLab Agent](../user/clusters/agent/install/index.md).
|
||||
|
||||
#### GitOps deployments **(PREMIUM)**
|
||||
|
||||
With the [GitLab Agent](../user/clusters/agent/install/index.md), you can perform pull-based
|
||||
deployments using Kubernetes manifests. This provides a scalable, secure, and cloud-native
|
||||
approach to manage Kubernetes deployments.
|
||||
|
||||
#### Deploy to Kubernetes with the CI/CD Tunnel
|
||||
|
||||
With the [GitLab Agent](../user/clusters/agent/install/index.md), you can perform push-based
|
||||
deployments with the [CI/CD Tunnel](../user/clusters/agent/ci_cd_tunnel.md). It provides
|
||||
a secure and reliable connection between GitLab and your Kubernetes cluster.
|
||||
|
||||
### Deploy to AWS with GitLab CI/CD
|
||||
|
||||
GitLab provides Docker images that you can use to run AWS commands from GitLab CI/CD, and a template to
|
||||
facilitate [deployment to AWS](../ci/cloud_deployment). Moreover, Auto Deploy has built-in support
|
||||
for EC2 and ECS deployments.
|
||||
|
||||
### General software deployment with GitLab CI/CD
|
||||
|
||||
You can use GitLab CI/CD to target any type of infrastructure accessible by the GitLab Runner.
|
||||
[User and pre-defined environment variables](../ci/variables/index.md) and CI/CD templates
|
||||
support setting up a vast number of deployment strategies.
|
||||
|
||||
## Environments
|
||||
|
||||
To keep track of your deployments and gain insights into your infrastructure, we recommend
|
||||
connecting them to [a GitLab Environment](../ci/environments/index.md).
|
||||
|
||||
## Releases
|
||||
|
||||
Use GitLab [Releases](../user/project/releases/index.md) to plan, build, and deliver your applications.
|
||||
|
||||
### Feature flags
|
||||
|
||||
Use [feature flags](../operations/feature_flags.md) to control and strategically roullout application deployments.
|
||||
|
|
|
@ -201,6 +201,10 @@ For self-managed GitLab instances, go to `https://gitlab.example.com/-/graphql-e
|
|||
kubectl delete -n gitlab-kubernetes-agent -f ./resources.yml
|
||||
```
|
||||
|
||||
## Migrating to the GitLab Agent from the legacy certificate-based integration
|
||||
|
||||
Find out how to [migrate to the GitLab Agent for Kubernetes](../../infrastructure/clusters/migrate_to_gitlab_agent.md) from the certificate-based integration depending on the features you use.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
If you face any issues while using the Agent, read the
|
||||
|
|
|
@ -30,10 +30,7 @@ To install the [Agent](../index.md) in your cluster:
|
|||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/259669) in GitLab 13.7, the Agent manifest configuration can be added to multiple directories (or subdirectories) of its repository.
|
||||
> - Group authorization was [introduced](https://gitlab.com/groups/gitlab-org/-/epics/5784) in GitLab 14.3.
|
||||
|
||||
To create an agent, you need:
|
||||
|
||||
1. A GitLab repository to hold the configuration file.
|
||||
1. Install the Agent in a cluster.
|
||||
To create an agent, you need a GitLab repository to hold the configuration file.
|
||||
|
||||
After installed, when you update the configuration file, GitLab transmits the
|
||||
information to the cluster automatically without downtime.
|
||||
|
|
88
doc/user/infrastructure/clusters/migrate_to_gitlab_agent.md
Normal file
88
doc/user/infrastructure/clusters/migrate_to_gitlab_agent.md
Normal file
|
@ -0,0 +1,88 @@
|
|||
---
|
||||
stage: Configure
|
||||
group: Configure
|
||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
|
||||
---
|
||||
|
||||
# Migrate to the GitLab Agent for Kubernetes **(FREE)**
|
||||
|
||||
The first integration between GitLab and Kubernetes used cluster certificates
|
||||
to connect the cluster to GitLab.
|
||||
This method was [deprecated](https://about.gitlab.com/blog/2021/11/15/deprecating-the-cert-based-kubernetes-integration/)
|
||||
in GitLab 14.5 in favor of the [GitLab Agent for Kubernetes](../../clusters/agent/index.md).
|
||||
|
||||
To make sure your clusters connected to GitLab do not break in the future,
|
||||
we recommend you migrate to the GitLab Agent as soon as possible by following
|
||||
the processes described in this document.
|
||||
|
||||
The certificate-based integration was used for some popular GitLab features such as,
|
||||
GitLab Managed Apps, GitLab-managed clusters, and Auto DevOps.
|
||||
|
||||
As a general rule, migrating clusters that rely on GitLab CI/CD can be
|
||||
achieved using the [CI/CD Tunnel](../../clusters/agent/ci_cd_tunnel.md)
|
||||
provided by the Agent.
|
||||
|
||||
NOTE:
|
||||
The GitLab Agent for Kubernetes does not intend to provide feature parity with the
|
||||
certificate-based cluster integrations. As a result, the Agent doesn't support
|
||||
all the features available to clusters connected through certificates.
|
||||
|
||||
## Migrate cluster application deployments
|
||||
|
||||
### Migrate from GitLab-managed clusters
|
||||
|
||||
With GitLab-managed clusters, GitLab creates separate service accounts and namespaces
|
||||
for every branch and deploys using these resources.
|
||||
|
||||
To achieve a similar result with the GitLab Agent, you can use [impersonation](../../clusters/agent/repository.md#use-impersonation-to-restrict-project-and-group-access)
|
||||
strategies to deploy to your cluster with restricted account access. To do so:
|
||||
|
||||
1. Choose the impersonation strategy that suits your needs.
|
||||
1. Use Kubernetes RBAC rules to manage impersonated account permissions in Kubernetes.
|
||||
1. Use the `access_as` attribute in your Agent’s configuration file to define the impersonation.
|
||||
|
||||
### Migrate from Auto DevOps
|
||||
|
||||
To configure your Auto DevOps project to use the GitLab Agent:
|
||||
|
||||
1. Follow the steps to [install an agent](../../clusters/agent/install/index.md) on your cluster.
|
||||
1. Go to the project in which you use Auto DevOps.
|
||||
1. From the sidebar, select **Settings > CI/CD** and expand **Variables**.
|
||||
1. Select **Add new variable**.
|
||||
1. Add `KUBE_CONTEXT` as the key, `path/to/agent/project:agent-name` as the value, and select the environment scope of your choice.
|
||||
1. Select **Add variable**.
|
||||
1. Repeat the process to add another variable, `KUBE_NAMESPACE`, setting the value for the Kubernetes namespace you want your deployments to target, and set the same environment scope from the previous step.
|
||||
1. From the sidebar, select **Infrastructure > Kubernetes clusters**.
|
||||
1. From the certificate-based clusters section, open the cluster that serves the same environment scope.
|
||||
1. Select the **Details** tab and disable the cluster.
|
||||
1. To activate the changes, from the project's sidebar, select **CI/CD > Variables > Run pipeline**.
|
||||
|
||||
### Migrate generic deployments
|
||||
|
||||
When you use Kubernetes contexts to reach the cluster from GitLab, you can use the [CI/CD Tunnel](../../clusters/agent/ci_cd_tunnel.md)
|
||||
directly. It injects the available contexts into your CI environment automatically:
|
||||
|
||||
1. Follow the steps to [install an agent](../../clusters/agent/install/index.md) on your cluster.
|
||||
1. Go to the project in which you use Auto DevOps.
|
||||
1. From the sidebar, select **Settings > CI/CD** and expand **Variables**.
|
||||
1. Select **Add new variable**.
|
||||
1. Add `KUBE_CONTEXT` as the key, `path/to/agent-configuration-project:your-agent-name` as the value, and select the environment scope of your choice.
|
||||
1. Edit your `.gitlab-ci.yml` file and set the Kubernetes context to the `KUBE_CONTEXT` you defined in the previous step:
|
||||
|
||||
```yaml
|
||||
<your job name>:
|
||||
script:
|
||||
- kubectl config use-context $KUBE_CONTEXT
|
||||
```
|
||||
|
||||
## Migrate from GitLab Managed Applications
|
||||
|
||||
Follow the process to [migrate from GitLab Managed Apps to the Cluster Management Project](../../clusters/migrating_from_gma_to_project_template.md).
|
||||
|
||||
## Migrating a Cluster Management project
|
||||
|
||||
See [how to use a cluster management project with the GitLab Agent](../../clusters/management_project_template.md#use-the-agent-with-the-cluster-management-project-template).
|
||||
|
||||
## Migrate cluster monitoring features
|
||||
|
||||
Cluster monitoring features are not supported by the GitLab Agent for Kubernetes yet.
|
|
@ -137,6 +137,17 @@ module API
|
|||
|
||||
status :no_content
|
||||
end
|
||||
|
||||
desc 'Expire the artifacts files from a project'
|
||||
delete ':id/artifacts' do
|
||||
not_found! unless Feature.enabled?(:bulk_expire_project_artifacts, default_enabled: :yaml)
|
||||
|
||||
authorize_destroy_artifacts!
|
||||
|
||||
::Ci::JobArtifacts::DeleteProjectArtifactsService.new(project: user_project).execute
|
||||
|
||||
accepted!
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -183,7 +183,9 @@ module API
|
|||
params do
|
||||
use :project_full_path
|
||||
end
|
||||
get ':namespace/:project/pulls' do
|
||||
# TODO Remove the custom Apdex SLO target `urgency: :low` when this endpoint has been optimised.
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/337269
|
||||
get ':namespace/:project/pulls', urgency: :low do
|
||||
user_project = find_project_with_access(params)
|
||||
|
||||
merge_requests = authorized_merge_requests_for_project(user_project)
|
||||
|
@ -236,7 +238,9 @@ module API
|
|||
use :project_full_path
|
||||
use :pagination
|
||||
end
|
||||
get ':namespace/:project/branches' do
|
||||
# TODO Remove the custom Apdex SLO target `urgency: :low` when this endpoint has been optimised.
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/337268
|
||||
get ':namespace/:project/branches', urgency: :low do
|
||||
user_project = find_project_with_access(params)
|
||||
|
||||
update_project_feature_usage_for(user_project)
|
||||
|
|
|
@ -11,11 +11,11 @@ module Gitlab
|
|||
def perform!
|
||||
logger.instrument_with_sql(:pipeline_save) do
|
||||
BulkInsertableAssociations.with_bulk_insert do
|
||||
tags = extract_tag_list_by_status
|
||||
|
||||
pipeline.transaction do
|
||||
pipeline.save!
|
||||
CommitStatus.bulk_insert_tags!(statuses, tags) if bulk_insert_tags?
|
||||
with_bulk_insert_tags do
|
||||
pipeline.transaction do
|
||||
pipeline.save!
|
||||
CommitStatus.bulk_insert_tags!(statuses) if bulk_insert_tags?
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -29,34 +29,28 @@ module Gitlab
|
|||
|
||||
private
|
||||
|
||||
def statuses
|
||||
strong_memoize(:statuses) do
|
||||
pipeline.stages.flat_map(&:statuses)
|
||||
end
|
||||
end
|
||||
|
||||
# We call `job.tag_list=` to assign tags to the jobs from the
|
||||
# Chain::Seed step which uses the `@tag_list` instance variable to
|
||||
# store them on the record. We remove them here because we want to
|
||||
# bulk insert them, otherwise they would be inserted and assigned one
|
||||
# by one with callbacks. We must use `remove_instance_variable`
|
||||
# because having the instance variable defined would still run the callbacks
|
||||
def extract_tag_list_by_status
|
||||
return {} unless bulk_insert_tags?
|
||||
|
||||
statuses.each.with_object({}) do |job, acc|
|
||||
tag_list = job.clear_memoization(:tag_list)
|
||||
next unless tag_list
|
||||
|
||||
acc[job.name] = tag_list
|
||||
end
|
||||
end
|
||||
|
||||
def bulk_insert_tags?
|
||||
strong_memoize(:bulk_insert_tags) do
|
||||
::Feature.enabled?(:ci_bulk_insert_tags, project, default_enabled: :yaml)
|
||||
end
|
||||
end
|
||||
|
||||
def with_bulk_insert_tags
|
||||
previous = Thread.current['ci_bulk_insert_tags']
|
||||
Thread.current['ci_bulk_insert_tags'] = bulk_insert_tags?
|
||||
yield
|
||||
ensure
|
||||
Thread.current['ci_bulk_insert_tags'] = previous
|
||||
end
|
||||
|
||||
def statuses
|
||||
strong_memoize(:statuses) do
|
||||
pipeline
|
||||
.stages
|
||||
.flat_map(&:statuses)
|
||||
.select { |status| status.respond_to?(:tag_list) }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -4,12 +4,13 @@ module Gitlab
|
|||
module Ci
|
||||
module Tags
|
||||
class BulkInsert
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
TAGGINGS_BATCH_SIZE = 1000
|
||||
TAGS_BATCH_SIZE = 500
|
||||
|
||||
def initialize(statuses, tag_list_by_status)
|
||||
def initialize(statuses)
|
||||
@statuses = statuses
|
||||
@tag_list_by_status = tag_list_by_status
|
||||
end
|
||||
|
||||
def insert!
|
||||
|
@ -20,7 +21,18 @@ module Gitlab
|
|||
|
||||
private
|
||||
|
||||
attr_reader :statuses, :tag_list_by_status
|
||||
attr_reader :statuses
|
||||
|
||||
def tag_list_by_status
|
||||
strong_memoize(:tag_list_by_status) do
|
||||
statuses.each.with_object({}) do |status, acc|
|
||||
tag_list = status.tag_list
|
||||
next unless tag_list
|
||||
|
||||
acc[status] = tag_list
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def persist_build_tags!
|
||||
all_tags = tag_list_by_status.values.flatten.uniq.reject(&:blank?)
|
||||
|
@ -54,7 +66,7 @@ module Gitlab
|
|||
|
||||
def build_taggings_attributes(tag_records_by_name)
|
||||
taggings = statuses.flat_map do |status|
|
||||
tag_list = tag_list_by_status[status.name]
|
||||
tag_list = tag_list_by_status[status]
|
||||
next unless tag_list
|
||||
|
||||
tags = tag_records_by_name.values_at(*tag_list)
|
||||
|
|
|
@ -7,7 +7,7 @@ code_quality:
|
|||
variables:
|
||||
DOCKER_DRIVER: overlay2
|
||||
DOCKER_TLS_CERTDIR: ""
|
||||
CODE_QUALITY_IMAGE: "registry.gitlab.com/gitlab-org/ci-cd/codequality:0.85.24-gitlab.1"
|
||||
CODE_QUALITY_IMAGE: "registry.gitlab.com/gitlab-org/ci-cd/codequality:0.85.26"
|
||||
needs: []
|
||||
script:
|
||||
- export SOURCE_CODE=$PWD
|
||||
|
|
|
@ -58,6 +58,10 @@ ci_namespace_mirrors:
|
|||
- table: namespaces
|
||||
column: namespace_id
|
||||
on_delete: async_delete
|
||||
ci_build_report_results:
|
||||
- table: projects
|
||||
column: project_id
|
||||
on_delete: async_delete
|
||||
ci_builds:
|
||||
- table: users
|
||||
column: user_id
|
||||
|
@ -79,6 +83,10 @@ ci_project_mirrors:
|
|||
- table: namespaces
|
||||
column: namespace_id
|
||||
on_delete: async_delete
|
||||
ci_unit_tests:
|
||||
- table: projects
|
||||
column: project_id
|
||||
on_delete: async_delete
|
||||
merge_requests:
|
||||
- table: ci_pipelines
|
||||
column: head_pipeline_id
|
||||
|
|
|
@ -34297,9 +34297,6 @@ msgstr ""
|
|||
msgid "SubscriptionTable|Trial start date"
|
||||
msgstr ""
|
||||
|
||||
msgid "SubscriptionTable|Upgrade"
|
||||
msgstr ""
|
||||
|
||||
msgid "SubscriptionTable|Usage"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
@ -151,7 +151,7 @@
|
|||
"lowlight": "^1.20.0",
|
||||
"marked": "^0.3.12",
|
||||
"mathjax": "3",
|
||||
"mermaid": "^8.13.4",
|
||||
"mermaid": "^8.13.8",
|
||||
"minimatch": "^3.0.4",
|
||||
"monaco-editor": "^0.25.2",
|
||||
"monaco-editor-webpack-plugin": "^4.0.0",
|
||||
|
|
|
@ -276,7 +276,7 @@ module QA
|
|||
|
||||
all_runs = query_api.query(query: query(reliable)).values
|
||||
all_runs.each_with_object(Hash.new { |hsh, key| hsh[key] = {} }) do |table, result|
|
||||
records = table.records
|
||||
records = table.records.sort_by { |record| record.values["_time"] }
|
||||
# skip specs that executed less time than defined by range or stopped executing before report date
|
||||
# offset 1 day due to how schedulers are configured and first run can be 1 day later
|
||||
next if (Date.today - Date.parse(records.first.values["_time"])).to_i < (range - 1)
|
||||
|
|
|
@ -205,7 +205,6 @@ RSpec.describe Projects::MergeRequests::DiffsController do
|
|||
let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiff }
|
||||
let(:expected_options) do
|
||||
{
|
||||
environment: nil,
|
||||
merge_request: merge_request,
|
||||
merge_request_diff: merge_request.merge_request_diff,
|
||||
merge_request_diffs: merge_request.merge_request_diffs,
|
||||
|
@ -280,7 +279,6 @@ RSpec.describe Projects::MergeRequests::DiffsController do
|
|||
let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiff }
|
||||
let(:expected_options) do
|
||||
{
|
||||
environment: nil,
|
||||
merge_request: merge_request,
|
||||
merge_request_diff: merge_request.merge_request_diff,
|
||||
merge_request_diffs: merge_request.merge_request_diffs,
|
||||
|
@ -303,7 +301,6 @@ RSpec.describe Projects::MergeRequests::DiffsController do
|
|||
let(:collection) { Gitlab::Diff::FileCollection::Commit }
|
||||
let(:expected_options) do
|
||||
{
|
||||
environment: nil,
|
||||
merge_request: merge_request,
|
||||
merge_request_diff: nil,
|
||||
merge_request_diffs: merge_request.merge_request_diffs,
|
||||
|
@ -330,7 +327,6 @@ RSpec.describe Projects::MergeRequests::DiffsController do
|
|||
let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiff }
|
||||
let(:expected_options) do
|
||||
{
|
||||
environment: nil,
|
||||
merge_request: merge_request,
|
||||
merge_request_diff: merge_request.merge_request_diff,
|
||||
merge_request_diffs: merge_request.merge_request_diffs,
|
||||
|
@ -494,7 +490,6 @@ RSpec.describe Projects::MergeRequests::DiffsController do
|
|||
|
||||
def collection_arguments(pagination_data = {})
|
||||
{
|
||||
environment: nil,
|
||||
merge_request: merge_request,
|
||||
commit: nil,
|
||||
diff_view: :inline,
|
||||
|
|
|
@ -10,6 +10,10 @@ FactoryBot.define do
|
|||
expire_at { Date.yesterday }
|
||||
end
|
||||
|
||||
trait :locked do
|
||||
locked { Ci::JobArtifact.lockeds[:artifacts_locked] }
|
||||
end
|
||||
|
||||
trait :remote_store do
|
||||
file_store { JobArtifactUploader::Store::REMOTE}
|
||||
end
|
||||
|
|
|
@ -87,6 +87,10 @@ FactoryBot.define do
|
|||
locked { Ci::Pipeline.lockeds[:unlocked] }
|
||||
end
|
||||
|
||||
trait :artifacts_locked do
|
||||
locked { Ci::Pipeline.lockeds[:artifacts_locked] }
|
||||
end
|
||||
|
||||
trait :protected do
|
||||
add_attribute(:protected) { true }
|
||||
end
|
||||
|
|
|
@ -50,7 +50,7 @@ RSpec.describe "User comments on issue", :js do
|
|||
|
||||
add_note(comment)
|
||||
|
||||
expect(page.find('svg.mermaid')).to have_content html_content
|
||||
expect(page.find('svg.mermaid')).not_to have_content 'javascript'
|
||||
within('svg.mermaid') { expect(page).not_to have_selector('img') }
|
||||
end
|
||||
|
||||
|
|
|
@ -48,26 +48,6 @@ RSpec.describe 'View on environment', :js do
|
|||
let(:environment) { create(:environment, project: project, name: 'review/feature', external_url: 'http://feature.review.example.com') }
|
||||
let!(:deployment) { create(:deployment, :success, environment: environment, ref: branch_name, sha: sha) }
|
||||
|
||||
context 'when visiting the diff of a merge request for the branch' do
|
||||
let(:merge_request) { create(:merge_request, :simple, source_project: project, source_branch: branch_name) }
|
||||
|
||||
before do
|
||||
sign_in(user)
|
||||
|
||||
visit diffs_project_merge_request_path(project, merge_request)
|
||||
|
||||
wait_for_requests
|
||||
end
|
||||
|
||||
it 'has a "View on env" button' do
|
||||
within '.diffs' do
|
||||
text = 'View on feature.review.example.com'
|
||||
url = 'http://feature.review.example.com/ruby/feature'
|
||||
expect(page).to have_selector("a[title='#{text}'][href='#{url}']")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when visiting a comparison for the branch' do
|
||||
before do
|
||||
sign_in(user)
|
||||
|
|
|
@ -22,16 +22,6 @@ RSpec.describe Environments::EnvironmentsByDeploymentsFinder do
|
|||
create(:deployment, :success, environment: environment_two, ref: 'v1.1.0', tag: true, sha: project.commit('HEAD~1').id)
|
||||
end
|
||||
|
||||
it 'returns environment when with_tags is set' do
|
||||
expect(described_class.new(project, user, ref: 'master', commit: commit, with_tags: true).execute)
|
||||
.to contain_exactly(environment, environment_two)
|
||||
end
|
||||
|
||||
it 'does not return environment when no with_tags is set' do
|
||||
expect(described_class.new(project, user, ref: 'master', commit: commit).execute)
|
||||
.to be_empty
|
||||
end
|
||||
|
||||
it 'does not return environment when commit is not part of deployment' do
|
||||
expect(described_class.new(project, user, ref: 'master', commit: project.commit('feature')).execute)
|
||||
.to be_empty
|
||||
|
@ -41,7 +31,7 @@ RSpec.describe Environments::EnvironmentsByDeploymentsFinder do
|
|||
# This tests to ensure we don't call one CommitIsAncestor per environment
|
||||
it 'only calls Gitaly twice when multiple environments are present', :request_store do
|
||||
expect do
|
||||
result = described_class.new(project, user, ref: 'master', commit: commit, with_tags: true, find_latest: true).execute
|
||||
result = described_class.new(project, user, ref: 'v1.1.0', commit: commit, find_latest: true).execute
|
||||
|
||||
expect(result).to contain_exactly(environment_two)
|
||||
end.to change { Gitlab::GitalyClient.get_request_count }.by(2)
|
||||
|
|
|
@ -17,19 +17,12 @@ describe('Pipeline Editor | Text editor component', () => {
|
|||
let editorReadyListener;
|
||||
let mockUse;
|
||||
let mockRegisterCiSchema;
|
||||
let mockEditorInstance;
|
||||
let editorInstanceDetail;
|
||||
|
||||
const MockSourceEditor = {
|
||||
template: '<div/>',
|
||||
props: ['value', 'fileName'],
|
||||
mounted() {
|
||||
this.$emit(EDITOR_READY_EVENT);
|
||||
},
|
||||
methods: {
|
||||
getEditor: () => ({
|
||||
use: mockUse,
|
||||
registerCiSchema: mockRegisterCiSchema,
|
||||
}),
|
||||
},
|
||||
};
|
||||
|
||||
const createComponent = (glFeatures = {}, mountFn = shallowMount) => {
|
||||
|
@ -58,6 +51,21 @@ describe('Pipeline Editor | Text editor component', () => {
|
|||
|
||||
const findEditor = () => wrapper.findComponent(MockSourceEditor);
|
||||
|
||||
beforeEach(() => {
|
||||
editorReadyListener = jest.fn();
|
||||
mockUse = jest.fn();
|
||||
mockRegisterCiSchema = jest.fn();
|
||||
mockEditorInstance = {
|
||||
use: mockUse,
|
||||
registerCiSchema: mockRegisterCiSchema,
|
||||
};
|
||||
editorInstanceDetail = {
|
||||
detail: {
|
||||
instance: mockEditorInstance,
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
wrapper.destroy();
|
||||
|
||||
|
@ -67,10 +75,6 @@ describe('Pipeline Editor | Text editor component', () => {
|
|||
|
||||
describe('template', () => {
|
||||
beforeEach(() => {
|
||||
editorReadyListener = jest.fn();
|
||||
mockUse = jest.fn();
|
||||
mockRegisterCiSchema = jest.fn();
|
||||
|
||||
createComponent();
|
||||
});
|
||||
|
||||
|
@ -87,7 +91,7 @@ describe('Pipeline Editor | Text editor component', () => {
|
|||
});
|
||||
|
||||
it('bubbles up events', () => {
|
||||
findEditor().vm.$emit(EDITOR_READY_EVENT);
|
||||
findEditor().vm.$emit(EDITOR_READY_EVENT, editorInstanceDetail);
|
||||
|
||||
expect(editorReadyListener).toHaveBeenCalled();
|
||||
});
|
||||
|
@ -97,11 +101,7 @@ describe('Pipeline Editor | Text editor component', () => {
|
|||
describe('when `schema_linting` feature flag is on', () => {
|
||||
beforeEach(() => {
|
||||
createComponent({ schemaLinting: true });
|
||||
// Since the editor will have already mounted, the event will have fired.
|
||||
// To ensure we properly test this, we clear the mock and re-remit the event.
|
||||
mockRegisterCiSchema.mockClear();
|
||||
mockUse.mockClear();
|
||||
findEditor().vm.$emit(EDITOR_READY_EVENT);
|
||||
findEditor().vm.$emit(EDITOR_READY_EVENT, editorInstanceDetail);
|
||||
});
|
||||
|
||||
it('configures editor with syntax highlight', () => {
|
||||
|
@ -113,7 +113,7 @@ describe('Pipeline Editor | Text editor component', () => {
|
|||
describe('when `schema_linting` feature flag is off', () => {
|
||||
beforeEach(() => {
|
||||
createComponent();
|
||||
findEditor().vm.$emit(EDITOR_READY_EVENT);
|
||||
findEditor().vm.$emit(EDITOR_READY_EVENT, editorInstanceDetail);
|
||||
});
|
||||
|
||||
it('does not call the register CI schema function', () => {
|
||||
|
|
|
@ -79,12 +79,11 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Create do
|
|||
it 'extracts an empty tag list' do
|
||||
expect(CommitStatus)
|
||||
.to receive(:bulk_insert_tags!)
|
||||
.with(stage.statuses, {})
|
||||
.with([job])
|
||||
.and_call_original
|
||||
|
||||
step.perform!
|
||||
|
||||
expect(job.instance_variable_defined?(:@tag_list)).to be_falsey
|
||||
expect(job).to be_persisted
|
||||
expect(job.tag_list).to eq([])
|
||||
end
|
||||
|
@ -98,14 +97,13 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Create do
|
|||
it 'bulk inserts tags' do
|
||||
expect(CommitStatus)
|
||||
.to receive(:bulk_insert_tags!)
|
||||
.with(stage.statuses, { job.name => %w[tag1 tag2] })
|
||||
.with([job])
|
||||
.and_call_original
|
||||
|
||||
step.perform!
|
||||
|
||||
expect(job.instance_variable_defined?(:@tag_list)).to be_falsey
|
||||
expect(job).to be_persisted
|
||||
expect(job.tag_list).to match_array(%w[tag1 tag2])
|
||||
expect(job.reload.tag_list).to match_array(%w[tag1 tag2])
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -120,7 +118,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Create do
|
|||
|
||||
step.perform!
|
||||
|
||||
expect(job.instance_variable_defined?(:@tag_list)).to be_truthy
|
||||
expect(job).to be_persisted
|
||||
expect(job.reload.tag_list).to match_array(%w[tag1 tag2])
|
||||
end
|
||||
|
|
|
@ -5,27 +5,37 @@ require 'spec_helper'
|
|||
RSpec.describe Gitlab::Ci::Tags::BulkInsert do
|
||||
let_it_be(:project) { create(:project, :repository) }
|
||||
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
|
||||
let_it_be_with_refind(:job) { create(:ci_build, :unique_name, pipeline: pipeline, project: project) }
|
||||
let_it_be_with_refind(:other_job) { create(:ci_build, :unique_name, pipeline: pipeline, project: project) }
|
||||
let_it_be_with_refind(:bridge) { create(:ci_bridge, pipeline: pipeline, project: project) }
|
||||
let_it_be_with_refind(:job) { create(:ci_build, :unique_name, pipeline: pipeline) }
|
||||
let_it_be_with_refind(:other_job) { create(:ci_build, :unique_name, pipeline: pipeline) }
|
||||
|
||||
let(:statuses) { [job, bridge, other_job] }
|
||||
let(:statuses) { [job, other_job] }
|
||||
|
||||
subject(:service) { described_class.new(statuses, tags_list) }
|
||||
subject(:service) { described_class.new(statuses) }
|
||||
|
||||
describe 'gem version' do
|
||||
let(:acceptable_version) { '9.0.0' }
|
||||
|
||||
let(:error_message) do
|
||||
<<~MESSAGE
|
||||
A mechanism depending on internals of 'act-as-taggable-on` has been designed
|
||||
to bulk insert tags for Ci::Build records.
|
||||
Please review the code carefully before updating the gem version
|
||||
https://gitlab.com/gitlab-org/gitlab/-/issues/350053
|
||||
MESSAGE
|
||||
end
|
||||
|
||||
it { expect(ActsAsTaggableOn::VERSION).to eq(acceptable_version), error_message }
|
||||
end
|
||||
|
||||
describe '#insert!' do
|
||||
context 'without tags' do
|
||||
let(:tags_list) { {} }
|
||||
|
||||
it { expect(service.insert!).to be_falsey }
|
||||
end
|
||||
|
||||
context 'with tags' do
|
||||
let(:tags_list) do
|
||||
{
|
||||
job.name => %w[tag1 tag2],
|
||||
other_job.name => %w[tag2 tag3 tag4]
|
||||
}
|
||||
before do
|
||||
job.tag_list = %w[tag1 tag2]
|
||||
other_job.tag_list = %w[tag2 tag3 tag4]
|
||||
end
|
||||
|
||||
it 'persists tags' do
|
||||
|
@ -35,5 +45,18 @@ RSpec.describe Gitlab::Ci::Tags::BulkInsert do
|
|||
expect(other_job.reload.tag_list).to match_array(%w[tag2 tag3 tag4])
|
||||
end
|
||||
end
|
||||
|
||||
context 'with tags for only one job' do
|
||||
before do
|
||||
job.tag_list = %w[tag1 tag2]
|
||||
end
|
||||
|
||||
it 'persists tags' do
|
||||
expect(service.insert!).to be_truthy
|
||||
|
||||
expect(job.reload.tag_list).to match_array(%w[tag1 tag2])
|
||||
expect(other_job.reload.tag_list).to be_empty
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -5,6 +5,11 @@ require 'spec_helper'
|
|||
RSpec.describe Ci::BuildReportResult do
|
||||
let(:build_report_result) { build(:ci_build_report_result, :with_junit_success) }
|
||||
|
||||
it_behaves_like 'cleanup by a loose foreign key' do
|
||||
let!(:parent) { create(:project) }
|
||||
let!(:model) { create(:ci_build_report_result, project: parent) }
|
||||
end
|
||||
|
||||
describe 'associations' do
|
||||
it { is_expected.to belong_to(:build) }
|
||||
it { is_expected.to belong_to(:project) }
|
||||
|
|
|
@ -143,6 +143,17 @@ RSpec.describe Ci::JobArtifact do
|
|||
end
|
||||
end
|
||||
|
||||
describe '.erasable_file_types' do
|
||||
subject { described_class.erasable_file_types }
|
||||
|
||||
it 'returns a list of erasable file types' do
|
||||
all_types = described_class.file_types.keys
|
||||
erasable_types = all_types - described_class::NON_ERASABLE_FILE_TYPES
|
||||
|
||||
expect(subject).to contain_exactly(*erasable_types)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.erasable' do
|
||||
subject { described_class.erasable }
|
||||
|
||||
|
|
|
@ -3,6 +3,11 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Ci::UnitTest do
|
||||
it_behaves_like 'cleanup by a loose foreign key' do
|
||||
let!(:parent) { create(:project) }
|
||||
let!(:model) { create(:ci_unit_test, project: parent) }
|
||||
end
|
||||
|
||||
describe 'relationships' do
|
||||
it { is_expected.to belong_to(:project) }
|
||||
it { is_expected.to have_many(:unit_test_failures) }
|
||||
|
|
|
@ -961,18 +961,17 @@ RSpec.describe CommitStatus do
|
|||
|
||||
describe '.bulk_insert_tags!' do
|
||||
let(:statuses) { double('statuses') }
|
||||
let(:tag_list_by_build) { double('tag list') }
|
||||
let(:inserter) { double('inserter') }
|
||||
|
||||
it 'delegates to bulk insert class' do
|
||||
expect(Gitlab::Ci::Tags::BulkInsert)
|
||||
.to receive(:new)
|
||||
.with(statuses, tag_list_by_build)
|
||||
.with(statuses)
|
||||
.and_return(inserter)
|
||||
|
||||
expect(inserter).to receive(:insert!)
|
||||
|
||||
described_class.bulk_insert_tags!(statuses, tag_list_by_build)
|
||||
described_class.bulk_insert_tags!(statuses)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -3492,84 +3492,6 @@ RSpec.describe MergeRequest, factory_default: :keep do
|
|||
end
|
||||
end
|
||||
|
||||
describe "#environments_for" do
|
||||
let(:project) { create(:project, :repository) }
|
||||
let(:user) { project.creator }
|
||||
let(:merge_request) { create(:merge_request, source_project: project) }
|
||||
let(:source_branch) { merge_request.source_branch }
|
||||
let(:target_branch) { merge_request.target_branch }
|
||||
let(:source_oid) { project.commit(source_branch).id }
|
||||
let(:target_oid) { project.commit(target_branch).id }
|
||||
|
||||
before do
|
||||
merge_request.source_project.add_maintainer(user)
|
||||
merge_request.target_project.add_maintainer(user)
|
||||
end
|
||||
|
||||
context 'with multiple environments' do
|
||||
let(:environments) { create_list(:environment, 3, project: project) }
|
||||
|
||||
before do
|
||||
create(:deployment, :success, environment: environments.first, ref: source_branch, sha: source_oid)
|
||||
create(:deployment, :success, environment: environments.second, ref: target_branch, sha: target_oid)
|
||||
end
|
||||
|
||||
it 'selects deployed environments' do
|
||||
expect(merge_request.environments_for(user)).to contain_exactly(environments.first)
|
||||
end
|
||||
|
||||
it 'selects latest deployed environment' do
|
||||
latest_environment = create(:environment, project: project)
|
||||
create(:deployment, :success, environment: latest_environment, ref: source_branch, sha: source_oid)
|
||||
|
||||
expect(merge_request.environments_for(user)).to eq([environments.first, latest_environment])
|
||||
expect(merge_request.environments_for(user, latest: true)).to contain_exactly(latest_environment)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with environments on source project' do
|
||||
let(:source_project) { fork_project(project, nil, repository: true) }
|
||||
|
||||
let(:merge_request) do
|
||||
create(:merge_request,
|
||||
source_project: source_project, source_branch: 'feature',
|
||||
target_project: project)
|
||||
end
|
||||
|
||||
let(:source_environment) { create(:environment, project: source_project) }
|
||||
|
||||
before do
|
||||
create(:deployment, :success, environment: source_environment, ref: 'feature', sha: merge_request.diff_head_sha)
|
||||
end
|
||||
|
||||
it 'selects deployed environments', :sidekiq_might_not_need_inline do
|
||||
expect(merge_request.environments_for(user)).to contain_exactly(source_environment)
|
||||
end
|
||||
|
||||
context 'with environments on target project' do
|
||||
let(:target_environment) { create(:environment, project: project) }
|
||||
|
||||
before do
|
||||
create(:deployment, :success, environment: target_environment, tag: true, sha: merge_request.diff_head_sha)
|
||||
end
|
||||
|
||||
it 'selects deployed environments', :sidekiq_might_not_need_inline do
|
||||
expect(merge_request.environments_for(user)).to contain_exactly(source_environment, target_environment)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'without a diff_head_commit' do
|
||||
before do
|
||||
expect(merge_request).to receive(:diff_head_commit).and_return(nil)
|
||||
end
|
||||
|
||||
it 'returns an empty array' do
|
||||
expect(merge_request.environments_for(user)).to be_empty
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "#environments" do
|
||||
subject { merge_request.environments }
|
||||
|
||||
|
|
|
@ -81,6 +81,71 @@ RSpec.describe API::Ci::JobArtifacts do
|
|||
end
|
||||
end
|
||||
|
||||
describe 'DELETE /projects/:id/artifacts' do
|
||||
context 'when feature flag is disabled' do
|
||||
before do
|
||||
stub_feature_flags(bulk_expire_project_artifacts: false)
|
||||
end
|
||||
|
||||
it 'returns 404' do
|
||||
delete api("/projects/#{project.id}/artifacts", api_user)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user is anonymous' do
|
||||
let(:api_user) { nil }
|
||||
|
||||
it 'does not execute Ci::JobArtifacts::DeleteProjectArtifactsService' do
|
||||
expect(Ci::JobArtifacts::DeleteProjectArtifactsService)
|
||||
.not_to receive(:new)
|
||||
|
||||
delete api("/projects/#{project.id}/artifacts", api_user)
|
||||
end
|
||||
|
||||
it 'returns status 401 (unauthorized)' do
|
||||
delete api("/projects/#{project.id}/artifacts", api_user)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:unauthorized)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with developer' do
|
||||
it 'does not execute Ci::JobArtifacts::DeleteProjectArtifactsService' do
|
||||
expect(Ci::JobArtifacts::DeleteProjectArtifactsService)
|
||||
.not_to receive(:new)
|
||||
|
||||
delete api("/projects/#{project.id}/artifacts", api_user)
|
||||
end
|
||||
|
||||
it 'returns status 403 (forbidden)' do
|
||||
delete api("/projects/#{project.id}/artifacts", api_user)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:forbidden)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with authorized user' do
|
||||
let(:maintainer) { create(:project_member, :maintainer, project: project).user }
|
||||
let!(:api_user) { maintainer }
|
||||
|
||||
it 'executes Ci::JobArtifacts::DeleteProjectArtifactsService' do
|
||||
expect_next_instance_of(Ci::JobArtifacts::DeleteProjectArtifactsService, project: project) do |service|
|
||||
expect(service).to receive(:execute).and_call_original
|
||||
end
|
||||
|
||||
delete api("/projects/#{project.id}/artifacts", api_user)
|
||||
end
|
||||
|
||||
it 'returns status 202 (accepted)' do
|
||||
delete api("/projects/#{project.id}/artifacts", api_user)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:accepted)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'GET /projects/:id/jobs/:job_id/artifacts/:artifact_path' do
|
||||
context 'when job has artifacts' do
|
||||
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
|
||||
|
|
|
@ -31,7 +31,6 @@ RSpec.describe 'Merge Requests Context Commit Diffs' do
|
|||
|
||||
def collection_arguments(pagination_data = {})
|
||||
{
|
||||
environment: nil,
|
||||
merge_request: merge_request,
|
||||
commit: nil,
|
||||
diff_view: :inline,
|
||||
|
|
|
@ -29,7 +29,6 @@ RSpec.describe 'Merge Requests Diffs' do
|
|||
|
||||
def collection_arguments(pagination_data = {})
|
||||
{
|
||||
environment: nil,
|
||||
merge_request: merge_request,
|
||||
commit: nil,
|
||||
diff_view: :inline,
|
||||
|
@ -110,21 +109,6 @@ RSpec.describe 'Merge Requests Diffs' do
|
|||
end
|
||||
end
|
||||
|
||||
context 'with a new environment' do
|
||||
let(:environment) do
|
||||
create(:environment, :available, project: project)
|
||||
end
|
||||
|
||||
let!(:deployment) do
|
||||
create(:deployment, :success, environment: environment, ref: merge_request.source_branch)
|
||||
end
|
||||
|
||||
it_behaves_like 'serializes diffs with expected arguments' do
|
||||
let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
|
||||
let(:expected_options) { collection_arguments(total_pages: 20).merge(environment: environment) }
|
||||
end
|
||||
end
|
||||
|
||||
context 'with disabled display_merge_conflicts_in_diff feature' do
|
||||
before do
|
||||
stub_feature_flags(display_merge_conflicts_in_diff: false)
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Ci::JobArtifacts::DeleteProjectArtifactsService do
|
||||
let_it_be(:project) { create(:project) }
|
||||
|
||||
subject { described_class.new(project: project) }
|
||||
|
||||
describe '#execute' do
|
||||
it 'enqueues a Ci::ExpireProjectBuildArtifactsWorker' do
|
||||
expect(Ci::JobArtifacts::ExpireProjectBuildArtifactsWorker).to receive(:perform_async).with(project.id).and_call_original
|
||||
|
||||
subject.execute
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,157 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Ci::JobArtifacts::ExpireProjectBuildArtifactsService do
|
||||
let_it_be(:project) { create(:project) }
|
||||
let_it_be(:pipeline, reload: true) { create(:ci_pipeline, :unlocked, project: project) }
|
||||
|
||||
let(:expiry_time) { Time.current }
|
||||
|
||||
RSpec::Matchers.define :have_locked_status do |expected_status|
|
||||
match do |job_artifacts|
|
||||
predicate = "#{expected_status}?".to_sym
|
||||
job_artifacts.all? { |artifact| artifact.__send__(predicate) }
|
||||
end
|
||||
end
|
||||
|
||||
RSpec::Matchers.define :expire_at do |expected_expiry|
|
||||
match do |job_artifacts|
|
||||
job_artifacts.all? { |artifact| artifact.expire_at.to_i == expected_expiry.to_i }
|
||||
end
|
||||
end
|
||||
|
||||
RSpec::Matchers.define :have_no_expiry do
|
||||
match do |job_artifacts|
|
||||
job_artifacts.all? { |artifact| artifact.expire_at.nil? }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#execute' do
|
||||
subject(:execute) { described_class.new(project.id, expiry_time).execute }
|
||||
|
||||
context 'with job containing erasable artifacts' do
|
||||
let_it_be(:job, reload: true) { create(:ci_build, :erasable, pipeline: pipeline) }
|
||||
|
||||
it 'unlocks erasable job artifacts' do
|
||||
execute
|
||||
|
||||
expect(job.job_artifacts).to have_locked_status(:artifact_unlocked)
|
||||
end
|
||||
|
||||
it 'expires erasable job artifacts' do
|
||||
execute
|
||||
|
||||
expect(job.job_artifacts).to expire_at(expiry_time)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with job containing trace artifacts' do
|
||||
let_it_be(:job, reload: true) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
|
||||
|
||||
it 'does not unlock trace artifacts' do
|
||||
execute
|
||||
|
||||
expect(job.job_artifacts).to have_locked_status(:artifact_unknown)
|
||||
end
|
||||
|
||||
it 'does not expire trace artifacts' do
|
||||
execute
|
||||
|
||||
expect(job.job_artifacts).to have_no_expiry
|
||||
end
|
||||
end
|
||||
|
||||
context 'with job from artifact locked pipeline' do
|
||||
let_it_be(:job, reload: true) { create(:ci_build, pipeline: pipeline) }
|
||||
let_it_be(:locked_artifact, reload: true) { create(:ci_job_artifact, :locked, job: job) }
|
||||
|
||||
before do
|
||||
pipeline.artifacts_locked!
|
||||
end
|
||||
|
||||
it 'does not unlock locked artifacts' do
|
||||
execute
|
||||
|
||||
expect(job.job_artifacts).to have_locked_status(:artifact_artifacts_locked)
|
||||
end
|
||||
|
||||
it 'does not expire locked artifacts' do
|
||||
execute
|
||||
|
||||
expect(job.job_artifacts).to have_no_expiry
|
||||
end
|
||||
end
|
||||
|
||||
context 'with job containing both erasable and trace artifacts' do
|
||||
let_it_be(:job, reload: true) { create(:ci_build, pipeline: pipeline) }
|
||||
let_it_be(:erasable_artifact, reload: true) { create(:ci_job_artifact, :archive, job: job) }
|
||||
let_it_be(:trace_artifact, reload: true) { create(:ci_job_artifact, :trace, job: job) }
|
||||
|
||||
it 'unlocks erasable artifacts' do
|
||||
execute
|
||||
|
||||
expect(erasable_artifact.artifact_unlocked?).to be_truthy
|
||||
end
|
||||
|
||||
it 'expires erasable artifacts' do
|
||||
execute
|
||||
|
||||
expect(erasable_artifact.expire_at.to_i).to eq(expiry_time.to_i)
|
||||
end
|
||||
|
||||
it 'does not unlock trace artifacts' do
|
||||
execute
|
||||
|
||||
expect(trace_artifact.artifact_unlocked?).to be_falsey
|
||||
end
|
||||
|
||||
it 'does not expire trace artifacts' do
|
||||
execute
|
||||
|
||||
expect(trace_artifact.expire_at).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'with multiple pipelines' do
|
||||
let_it_be(:job, reload: true) { create(:ci_build, :erasable, pipeline: pipeline) }
|
||||
|
||||
let_it_be(:pipeline2, reload: true) { create(:ci_pipeline, :unlocked, project: project) }
|
||||
let_it_be(:job2, reload: true) { create(:ci_build, :erasable, pipeline: pipeline) }
|
||||
|
||||
it 'unlocks artifacts across pipelines' do
|
||||
execute
|
||||
|
||||
expect(job.job_artifacts).to have_locked_status(:artifact_unlocked)
|
||||
expect(job2.job_artifacts).to have_locked_status(:artifact_unlocked)
|
||||
end
|
||||
|
||||
it 'expires artifacts across pipelines' do
|
||||
execute
|
||||
|
||||
expect(job.job_artifacts).to expire_at(expiry_time)
|
||||
expect(job2.job_artifacts).to expire_at(expiry_time)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with artifacts belonging to another project' do
|
||||
let_it_be(:job, reload: true) { create(:ci_build, :erasable, pipeline: pipeline) }
|
||||
|
||||
let_it_be(:another_project, reload: true) { create(:project) }
|
||||
let_it_be(:another_pipeline, reload: true) { create(:ci_pipeline, project: another_project) }
|
||||
let_it_be(:another_job, reload: true) { create(:ci_build, :erasable, pipeline: another_pipeline) }
|
||||
|
||||
it 'does not unlock erasable artifacts in other projects' do
|
||||
execute
|
||||
|
||||
expect(another_job.job_artifacts).to have_locked_status(:artifact_unknown)
|
||||
end
|
||||
|
||||
it 'does not expire erasable artifacts in other projects' do
|
||||
execute
|
||||
|
||||
expect(another_job.job_artifacts).to have_no_expiry
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,32 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Ci::JobArtifacts::ExpireProjectBuildArtifactsWorker do
|
||||
let(:worker) { described_class.new }
|
||||
let(:current_time) { Time.current }
|
||||
|
||||
let_it_be(:project) { create(:project) }
|
||||
|
||||
around do |example|
|
||||
freeze_time { example.run }
|
||||
end
|
||||
|
||||
describe '#perform' do
|
||||
it 'executes ExpireProjectArtifactsService service with the project' do
|
||||
expect_next_instance_of(Ci::JobArtifacts::ExpireProjectBuildArtifactsService, project.id, current_time) do |instance|
|
||||
expect(instance).to receive(:execute).and_call_original
|
||||
end
|
||||
|
||||
worker.perform(project.id)
|
||||
end
|
||||
|
||||
context 'when project does not exist' do
|
||||
it 'does nothing' do
|
||||
expect(Ci::JobArtifacts::ExpireProjectBuildArtifactsService).not_to receive(:new)
|
||||
|
||||
worker.perform(non_existing_record_id)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
17
yarn.lock
17
yarn.lock
|
@ -4922,12 +4922,7 @@ domhandler@^4.0.0, domhandler@^4.2.0:
|
|||
dependencies:
|
||||
domelementtype "^2.2.0"
|
||||
|
||||
dompurify@2.3.3:
|
||||
version "2.3.3"
|
||||
resolved "https://registry.yarnpkg.com/dompurify/-/dompurify-2.3.3.tgz#c1af3eb88be47324432964d8abc75cf4b98d634c"
|
||||
integrity sha512-dqnqRkPMAjOZE0FogZ+ceJNM2dZ3V/yNOuFB7+39qpO93hHhfRpHw3heYQC7DPK9FqbQTfBKUJhiSfz4MvXYwg==
|
||||
|
||||
dompurify@^2.3.4:
|
||||
dompurify@2.3.4, dompurify@^2.3.4:
|
||||
version "2.3.4"
|
||||
resolved "https://registry.yarnpkg.com/dompurify/-/dompurify-2.3.4.tgz#1cf5cf0105ccb4debdf6db162525bd41e6ddacc6"
|
||||
integrity sha512-6BVcgOAVFXjI0JTjEvZy901Rghm+7fDQOrNIcxB4+gdhj6Kwp6T9VBhBY/AbagKHJocRkDYGd6wvI+p4/10xtQ==
|
||||
|
@ -8469,16 +8464,16 @@ merge2@^1.3.0:
|
|||
resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae"
|
||||
integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==
|
||||
|
||||
mermaid@^8.13.4:
|
||||
version "8.13.4"
|
||||
resolved "https://registry.yarnpkg.com/mermaid/-/mermaid-8.13.4.tgz#924cb85f39380285e0a99f245c66cfa61014a2e1"
|
||||
integrity sha512-zdWtsXabVy1PEAE25Jkm4zbTDlQe8rqNlTMq2B3j+D+NxDskJEY5OsgalarvNLsw+b5xFa1a8D1xcm/PijrDow==
|
||||
mermaid@^8.13.8:
|
||||
version "8.13.8"
|
||||
resolved "https://registry.yarnpkg.com/mermaid/-/mermaid-8.13.8.tgz#fc137e2a59df34a3e053712033833ffbbc8d84a9"
|
||||
integrity sha512-Z5v31rvo8P7BPTiGicdJl9BbzyUe9s5sXILK8sM1g7ijkagpfFjPtXZVsq5P1WlN8m/fUp2PPNXVF9SqeTM91w==
|
||||
dependencies:
|
||||
"@braintree/sanitize-url" "^3.1.0"
|
||||
d3 "^7.0.0"
|
||||
dagre "^0.8.5"
|
||||
dagre-d3 "^0.6.4"
|
||||
dompurify "2.3.3"
|
||||
dompurify "2.3.4"
|
||||
graphlib "^2.1.8"
|
||||
khroma "^1.4.1"
|
||||
moment-mini "^2.24.0"
|
||||
|
|
Loading…
Reference in a new issue