Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-11-03 00:13:30 +00:00
parent 3279435af8
commit 511c2bbab1
13 changed files with 101 additions and 46 deletions

View File

@ -24,7 +24,11 @@ module Ci
# which is scheduled every 7 minutes.
def execute
in_lock(EXCLUSIVE_LOCK_KEY, ttl: LOCK_TIMEOUT, retries: 1) do
destroy_job_artifacts_with_slow_iteration(Time.current)
if ::Feature.enabled?(:ci_destroy_unlocked_job_artifacts)
destroy_unlocked_job_artifacts(Time.current)
else
destroy_job_artifacts_with_slow_iteration(Time.current)
end
end
@removed_artifacts_count
@ -32,13 +36,21 @@ module Ci
private
def destroy_unlocked_job_artifacts(start_at)
loop_until(timeout: LOOP_TIMEOUT, limit: LOOP_LIMIT) do
artifacts = Ci::JobArtifact.expired_before(start_at).artifact_unlocked.limit(BATCH_SIZE)
service_response = destroy_batch(artifacts)
@removed_artifacts_count += service_response[:destroyed_artifacts_count]
end
end
def destroy_job_artifacts_with_slow_iteration(start_at)
Ci::JobArtifact.expired_before(start_at).each_batch(of: BATCH_SIZE, column: :expire_at, order: :desc) do |relation, index|
# For performance reasons, join with ci_pipelines after the batch is queried.
# See: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/47496
artifacts = relation.unlocked
service_response = destroy_batch_async(artifacts)
service_response = destroy_batch(artifacts)
@removed_artifacts_count += service_response[:destroyed_artifacts_count]
break if loop_timeout?(start_at)
@ -46,7 +58,7 @@ module Ci
end
end
def destroy_batch_async(artifacts)
def destroy_batch(artifacts)
Ci::JobArtifacts::DestroyBatchService.new(artifacts).execute
end

View File

@ -34,7 +34,7 @@ module Ci
# This is executed outside of the transaction because it depends on Redis
update_project_statistics! if update_stats
increment_monitoring_statistics(artifacts_count)
increment_monitoring_statistics(artifacts_count, artifacts_bytes)
success(destroyed_artifacts_count: artifacts_count,
statistics_updates: affected_project_statistics)
@ -63,8 +63,9 @@ module Ci
end
end
def increment_monitoring_statistics(size)
metrics.increment_destroyed_artifacts(size)
def increment_monitoring_statistics(size, bytes)
metrics.increment_destroyed_artifacts_count(size)
metrics.increment_destroyed_artifacts_bytes(bytes)
end
def metrics
@ -76,6 +77,12 @@ module Ci
@job_artifacts.count
end
end
def artifacts_bytes
strong_memoize(:artifacts_bytes) do
@job_artifacts.sum { |artifact| artifact.try(:size) || 0 }
end
end
end
end
end

View File

@ -10,7 +10,7 @@
.row{ 'v-cloak': true }
#create-group-pane.tab-pane
= form_for @group, html: { class: 'group-form gl-show-field-errors' } do |f|
= form_for @group, html: { class: 'group-form gl-show-field-errors gl-mt-3' } do |f|
= render 'new_group_fields', f: f, group_name_id: 'create-group-name'
#import-group-pane.tab-pane

View File

@ -12,7 +12,7 @@
.row{ 'v-cloak': true }
#blank-project-pane.tab-pane.active
= form_for @project, html: { class: 'new_project' } do |f|
= form_for @project, html: { class: 'new_project gl-mt-3' } do |f|
= render 'new_project_fields', f: f, project_name_id: "blank-project-name"
#create-from-template-pane.tab-pane

View File

@ -1,8 +1,8 @@
---
name: finding_ci_pipeline_disable_joins
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/70216
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/338665
milestone: '14.3'
name: ci_destroy_unlocked_job_artifacts
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/72406
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/338165
milestone: '14.5'
type: development
group: group::threat insights
default_enabled: true
group: group::testing
default_enabled: false

View File

@ -172,7 +172,9 @@ test its execution using `CREATE INDEX CONCURRENTLY` in the `#database-lab` Slac
`gitlab-org/gitlab-foss` (`project_id = 13083`) or the `gitlab-org/gitlab` (`project_id = 278964`)
projects provide enough data to serve as a good example.
- That means that no query plan should return 0 records or less records than the provided limit (if a limit is included). If a query is used in batching, a proper example batch with adequate included results should be identified and provided.
- If your queries belong to a new feature in GitLab.com and thus they don't return data in production, it's suggested to analyze the query and to provide the plan from a local environment.
- If your queries belong to a new feature in GitLab.com and thus they don't return data in production:
- You may analyze the query and to provide the plan from a local environment.
- `#database-lab` and [postgres.ai](https://postgres.ai/) both allow updates to data (`exec UPDATE issues SET ...`) and creation of new tables and columns (`exec ALTER TABLE issues ADD COLUMN ...`).
- More information on how to find the number of actual returned records in [Understanding EXPLAIN plans](understanding_explain_plans.md)
- For query changes, it is best to provide both the SQL queries along with the
plan _before_ and _after_ the change. This helps spot differences quickly.

View File

@ -4,10 +4,11 @@ group:
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---
# Release your application **(FREE)**
# Deploy and release your application **(FREE)**
Release your application internally or to the public. Use
Deploy your application internally or to the public. Use
flags to release features incrementally.
- [Environments and deployments](../ci/environments/index.md)
- [Releases](../user/project/releases/index.md)
- [Feature flags](../operations/feature_flags.md)

View File

@ -206,7 +206,7 @@ required when the latest security report in a merge request:
- Contains vulnerabilities with severity levels (for example, `high`, `critical`, or `unknown`)
matching the rule's severity levels.
- Contains a vulnerability count higher than the rule allows.
- Is not generated during pipeline execution.
- Is not yet generated (until pipeline completion).
An approval is optional when the security report:

View File

@ -549,6 +549,8 @@ NPM_TOKEN=<your_token> npm install
If you get this error, ensure that:
- The Package Registry is enabled in your project settings. Although the Package Registry is enabled
by default, it's possible to [disable it](../package_registry/#disable-the-package-registry).
- Your token is not expired and has appropriate permissions.
- A package with the same name or version doesn't already exist within the given scope.
- Your NPM package name does not contain a dot `.`. This is a [known issue](https://gitlab.com/gitlab-org/gitlab-ee/issues/10248)

View File

@ -6,10 +6,14 @@ module Gitlab
class Metrics
include Gitlab::Utils::StrongMemoize
def increment_destroyed_artifacts(size)
def increment_destroyed_artifacts_count(size)
destroyed_artifacts_counter.increment({}, size.to_i)
end
def increment_destroyed_artifacts_bytes(bytes)
destroyed_artifacts_bytes_counter.increment({}, bytes)
end
private
def destroyed_artifacts_counter
@ -20,6 +24,15 @@ module Gitlab
::Gitlab::Metrics.counter(name, comment)
end
end
def destroyed_artifacts_bytes_counter
strong_memoize(:destroyed_artifacts_bytes_counter) do
name = :destroyed_job_artifacts_bytes_total
comment = 'Counter of bytes of destroyed expired job artifacts'
::Gitlab::Metrics.counter(name, comment)
end
end
end
end
end

View File

@ -10,9 +10,9 @@ RSpec.describe Gitlab::Ci::Artifacts::Metrics, :prometheus do
let(:counter) { metrics.send(:destroyed_artifacts_counter) }
it 'increments a single counter' do
subject.increment_destroyed_artifacts(10)
subject.increment_destroyed_artifacts(20)
subject.increment_destroyed_artifacts(30)
subject.increment_destroyed_artifacts_count(10)
subject.increment_destroyed_artifacts_count(20)
subject.increment_destroyed_artifacts_count(30)
expect(counter.get).to eq 60
expect(counter.values.count).to eq 1

View File

@ -16,26 +16,43 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
let_it_be(:job) { create(:ci_build, :success, pipeline: pipeline) }
context 'when artifact is expired' do
let!(:artifact) { create(:ci_job_artifact, :expired, job: job) }
let!(:artifact) { create(:ci_job_artifact, :expired, job: job, locked: job.pipeline.locked) }
context 'with preloaded relationships' do
before do
stub_const("#{described_class}::LOOP_LIMIT", 1)
end
it 'performs the smallest number of queries for job_artifacts' do
log = ActiveRecord::QueryRecorder.new { subject }
context 'with ci_destroy_unlocked_job_artifacts feature flag disabled' do
before do
stub_feature_flags(ci_destroy_unlocked_job_artifacts: false)
end
# SELECT expired ci_job_artifacts - 3 queries from each_batch
# PRELOAD projects, routes, project_statistics
# BEGIN
# INSERT into ci_deleted_objects
# DELETE loaded ci_job_artifacts
# DELETE security_findings -- for EE
# COMMIT
# SELECT next expired ci_job_artifacts
it 'performs the smallest number of queries for job_artifacts' do
log = ActiveRecord::QueryRecorder.new { subject }
expect(log.count).to be_within(1).of(10)
# SELECT expired ci_job_artifacts - 3 queries from each_batch
# PRELOAD projects, routes, project_statistics
# BEGIN
# INSERT into ci_deleted_objects
# DELETE loaded ci_job_artifacts
# DELETE security_findings -- for EE
# COMMIT
# SELECT next expired ci_job_artifacts
expect(log.count).to be_within(1).of(10)
end
end
context 'with ci_destroy_unlocked_job_artifacts feature flag enabled' do
before do
stub_feature_flags(ci_destroy_unlocked_job_artifacts: true)
end
it 'performs the smallest number of queries for job_artifacts' do
log = ActiveRecord::QueryRecorder.new { subject }
expect(log.count).to be_within(1).of(8)
end
end
end
@ -53,7 +70,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
context 'when the artifact has a file attached to it' do
let!(:artifact) { create(:ci_job_artifact, :expired, :zip, job: job) }
let!(:artifact) { create(:ci_job_artifact, :expired, :zip, job: job, locked: job.pipeline.locked) }
it 'creates a deleted object' do
expect { subject }.to change { Ci::DeletedObject.count }.by(1)
@ -74,7 +91,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
context 'when artifact is locked' do
let!(:artifact) { create(:ci_job_artifact, :expired, job: locked_job) }
let!(:artifact) { create(:ci_job_artifact, :expired, job: locked_job, locked: locked_job.pipeline.locked) }
it 'does not destroy job artifact' do
expect { subject }.not_to change { Ci::JobArtifact.count }
@ -83,7 +100,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
context 'when artifact is not expired' do
let!(:artifact) { create(:ci_job_artifact, job: job) }
let!(:artifact) { create(:ci_job_artifact, job: job, locked: job.pipeline.locked) }
it 'does not destroy expired job artifacts' do
expect { subject }.not_to change { Ci::JobArtifact.count }
@ -91,7 +108,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
context 'when artifact is permanent' do
let!(:artifact) { create(:ci_job_artifact, expire_at: nil, job: job) }
let!(:artifact) { create(:ci_job_artifact, expire_at: nil, job: job, locked: job.pipeline.locked) }
it 'does not destroy expired job artifacts' do
expect { subject }.not_to change { Ci::JobArtifact.count }
@ -99,7 +116,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
context 'when failed to destroy artifact' do
let!(:artifact) { create(:ci_job_artifact, :expired, job: job) }
let!(:artifact) { create(:ci_job_artifact, :expired, job: job, locked: job.pipeline.locked) }
before do
stub_const("#{described_class}::LOOP_LIMIT", 10)
@ -135,7 +152,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
context 'when exclusive lease has already been taken by the other instance' do
let!(:artifact) { create(:ci_job_artifact, :expired, job: job) }
let!(:artifact) { create(:ci_job_artifact, :expired, job: job, locked: job.pipeline.locked) }
before do
stub_exclusive_lease_taken(described_class::EXCLUSIVE_LOCK_KEY, timeout: described_class::LOCK_TIMEOUT)
@ -149,8 +166,8 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
context 'with a second artifact and batch size of 1' do
let(:second_job) { create(:ci_build, :success, pipeline: pipeline) }
let!(:second_artifact) { create(:ci_job_artifact, :archive, expire_at: 1.day.ago, job: second_job) }
let!(:artifact) { create(:ci_job_artifact, :expired, job: job) }
let!(:second_artifact) { create(:ci_job_artifact, :archive, expire_at: 1.day.ago, job: second_job, locked: job.pipeline.locked) }
let!(:artifact) { create(:ci_job_artifact, :expired, job: job, locked: job.pipeline.locked) }
before do
stub_const("#{described_class}::BATCH_SIZE", 1)
@ -206,8 +223,8 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
context 'when some artifacts are locked' do
let!(:artifact) { create(:ci_job_artifact, :expired, job: job) }
let!(:locked_artifact) { create(:ci_job_artifact, :expired, job: locked_job) }
let!(:artifact) { create(:ci_job_artifact, :expired, job: job, locked: job.pipeline.locked) }
let!(:locked_artifact) { create(:ci_job_artifact, :expired, job: locked_job, locked: locked_job.pipeline.locked) }
it 'destroys only unlocked artifacts' do
expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
@ -216,7 +233,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
context 'when all artifacts are locked' do
let!(:artifact) { create(:ci_job_artifact, :expired, job: locked_job) }
let!(:artifact) { create(:ci_job_artifact, :expired, job: locked_job, locked: locked_job.pipeline.locked) }
it 'destroys no artifacts' do
expect { subject }.to change { Ci::JobArtifact.count }.by(0)

View File

@ -29,7 +29,8 @@ RSpec.describe Ci::JobArtifacts::DestroyBatchService do
it 'reports metrics for destroyed artifacts' do
expect_next_instance_of(Gitlab::Ci::Artifacts::Metrics) do |metrics|
expect(metrics).to receive(:increment_destroyed_artifacts).with(1).and_call_original
expect(metrics).to receive(:increment_destroyed_artifacts_count).with(1).and_call_original
expect(metrics).to receive(:increment_destroyed_artifacts_bytes).with(107464).and_call_original
end
execute