2019-03-30 03:23:56 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2017-09-21 04:34:12 -04:00
|
|
|
require 'spec_helper'
|
|
|
|
|
2020-06-24 14:09:03 -04:00
|
|
|
RSpec.describe Ci::JobArtifact do
|
2018-03-20 19:03:50 -04:00
|
|
|
let(:artifact) { create(:ci_job_artifact, :archive) }
|
2017-09-21 04:34:12 -04:00
|
|
|
|
|
|
|
describe "Associations" do
|
|
|
|
it { is_expected.to belong_to(:project) }
|
|
|
|
it { is_expected.to belong_to(:job) }
|
|
|
|
end
|
|
|
|
|
|
|
|
it { is_expected.to respond_to(:file) }
|
|
|
|
it { is_expected.to respond_to(:created_at) }
|
|
|
|
it { is_expected.to respond_to(:updated_at) }
|
|
|
|
|
2018-02-06 09:18:32 -05:00
|
|
|
it { is_expected.to delegate_method(:open).to(:file) }
|
|
|
|
it { is_expected.to delegate_method(:exists?).to(:file) }
|
|
|
|
|
2018-12-04 05:24:21 -05:00
|
|
|
it_behaves_like 'having unique enum values'
|
|
|
|
|
2020-09-18 08:09:50 -04:00
|
|
|
it_behaves_like 'UpdateProjectStatistics', :with_counter_attribute do
|
2020-09-09 08:08:22 -04:00
|
|
|
let_it_be(:job, reload: true) { create(:ci_build) }
|
|
|
|
|
|
|
|
subject { build(:ci_job_artifact, :archive, job: job, size: 107464) }
|
2019-06-03 11:06:03 -04:00
|
|
|
end
|
|
|
|
|
2020-06-04 08:08:21 -04:00
|
|
|
describe '.not_expired' do
|
|
|
|
it 'returns artifacts that have not expired' do
|
|
|
|
_expired_artifact = create(:ci_job_artifact, :expired)
|
|
|
|
|
|
|
|
expect(described_class.not_expired).to contain_exactly(artifact)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-05-20 18:56:19 -04:00
|
|
|
describe '.with_reports' do
|
2019-05-01 17:31:04 -04:00
|
|
|
let!(:artifact) { create(:ci_job_artifact, :archive) }
|
|
|
|
|
2019-05-20 18:56:19 -04:00
|
|
|
subject { described_class.with_reports }
|
2019-05-01 17:31:04 -04:00
|
|
|
|
|
|
|
it { is_expected.to be_empty }
|
|
|
|
|
|
|
|
context 'when there are reports' do
|
|
|
|
let!(:metrics_report) { create(:ci_job_artifact, :junit) }
|
|
|
|
let!(:codequality_report) { create(:ci_job_artifact, :codequality) }
|
|
|
|
|
2020-10-01 02:09:59 -04:00
|
|
|
it { is_expected.to match_array([metrics_report, codequality_report]) }
|
2019-05-01 17:31:04 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-07-27 01:04:35 -04:00
|
|
|
describe '.test_reports' do
|
|
|
|
subject { described_class.test_reports }
|
|
|
|
|
|
|
|
context 'when there is a test report' do
|
|
|
|
let!(:artifact) { create(:ci_job_artifact, :junit) }
|
|
|
|
|
|
|
|
it { is_expected.to eq([artifact]) }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there are no test reports' do
|
|
|
|
let!(:artifact) { create(:ci_job_artifact, :archive) }
|
|
|
|
|
|
|
|
it { is_expected.to be_empty }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-04-22 23:09:51 -04:00
|
|
|
describe '.accessibility_reports' do
|
|
|
|
subject { described_class.accessibility_reports }
|
|
|
|
|
|
|
|
context 'when there is an accessibility report' do
|
|
|
|
let(:artifact) { create(:ci_job_artifact, :accessibility) }
|
|
|
|
|
|
|
|
it { is_expected.to eq([artifact]) }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there are no accessibility report' do
|
|
|
|
let(:artifact) { create(:ci_job_artifact, :archive) }
|
|
|
|
|
|
|
|
it { is_expected.to be_empty }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-03-17 14:09:44 -04:00
|
|
|
describe '.coverage_reports' do
|
|
|
|
subject { described_class.coverage_reports }
|
|
|
|
|
|
|
|
context 'when there is a coverage report' do
|
|
|
|
let!(:artifact) { create(:ci_job_artifact, :cobertura) }
|
|
|
|
|
|
|
|
it { is_expected.to eq([artifact]) }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there are no coverage reports' do
|
|
|
|
let!(:artifact) { create(:ci_job_artifact, :archive) }
|
2020-11-23 10:09:37 -05:00
|
|
|
|
|
|
|
it { is_expected.to be_empty }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '.codequality_reports' do
|
|
|
|
subject { described_class.codequality_reports }
|
|
|
|
|
|
|
|
context 'when there is a codequality report' do
|
|
|
|
let!(:artifact) { create(:ci_job_artifact, :codequality) }
|
|
|
|
|
|
|
|
it { is_expected.to eq([artifact]) }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there are no codequality reports' do
|
|
|
|
let!(:artifact) { create(:ci_job_artifact, :archive) }
|
2020-03-17 14:09:44 -04:00
|
|
|
|
|
|
|
it { is_expected.to be_empty }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-04-21 11:21:10 -04:00
|
|
|
describe '.terraform_reports' do
|
|
|
|
context 'when there is a terraform report' do
|
|
|
|
it 'return the job artifact' do
|
|
|
|
artifact = create(:ci_job_artifact, :terraform)
|
|
|
|
|
|
|
|
expect(described_class.terraform_reports).to eq([artifact])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there are no terraform reports' do
|
|
|
|
it 'return the an empty array' do
|
|
|
|
expect(described_class.terraform_reports).to eq([])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-07-17 02:09:11 -04:00
|
|
|
describe '.associated_file_types_for' do
|
|
|
|
using RSpec::Parameterized::TableSyntax
|
|
|
|
|
|
|
|
subject { Ci::JobArtifact.associated_file_types_for(file_type) }
|
|
|
|
|
|
|
|
where(:file_type, :result) do
|
|
|
|
'codequality' | %w(codequality)
|
|
|
|
'quality' | nil
|
|
|
|
end
|
|
|
|
|
|
|
|
with_them do
|
|
|
|
it { is_expected.to eq result }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-01-13 10:14:46 -05:00
|
|
|
describe '.erasable_file_types' do
|
|
|
|
subject { described_class.erasable_file_types }
|
|
|
|
|
|
|
|
it 'returns a list of erasable file types' do
|
|
|
|
all_types = described_class.file_types.keys
|
|
|
|
erasable_types = all_types - described_class::NON_ERASABLE_FILE_TYPES
|
|
|
|
|
|
|
|
expect(subject).to contain_exactly(*erasable_types)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-09-27 17:15:08 -04:00
|
|
|
describe '.erasable' do
|
|
|
|
subject { described_class.erasable }
|
|
|
|
|
2018-10-02 13:01:26 -04:00
|
|
|
context 'when there is an erasable artifact' do
|
2018-09-27 17:15:08 -04:00
|
|
|
let!(:artifact) { create(:ci_job_artifact, :junit) }
|
|
|
|
|
|
|
|
it { is_expected.to eq([artifact]) }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there are no erasable artifacts' do
|
|
|
|
let!(:artifact) { create(:ci_job_artifact, :trace) }
|
|
|
|
|
|
|
|
it { is_expected.to be_empty }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-06-04 08:08:21 -04:00
|
|
|
describe '.downloadable' do
|
|
|
|
subject { described_class.downloadable }
|
|
|
|
|
|
|
|
it 'filters for downloadable artifacts' do
|
|
|
|
downloadable_artifact = create(:ci_job_artifact, :codequality)
|
|
|
|
_not_downloadable_artifact = create(:ci_job_artifact, :trace)
|
|
|
|
|
|
|
|
expect(subject).to contain_exactly(downloadable_artifact)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-07-18 05:22:46 -04:00
|
|
|
describe '.archived_trace_exists_for?' do
|
|
|
|
subject { described_class.archived_trace_exists_for?(job_id) }
|
|
|
|
|
|
|
|
let!(:artifact) { create(:ci_job_artifact, :trace, job: job) }
|
|
|
|
let(:job) { create(:ci_build) }
|
|
|
|
|
|
|
|
context 'when the specified job_id exists' do
|
|
|
|
let(:job_id) { job.id }
|
|
|
|
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
|
|
|
|
context 'when the job does have archived trace' do
|
|
|
|
let!(:artifact) { }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsy }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the specified job_id does not exist' do
|
|
|
|
let(:job_id) { 10000 }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsy }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-04-06 08:09:21 -04:00
|
|
|
describe '#archived_trace_exists?' do
|
|
|
|
subject { artifact.archived_trace_exists? }
|
|
|
|
|
|
|
|
context 'when the file exists' do
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the file does not exist' do
|
|
|
|
before do
|
|
|
|
artifact.file.remove!
|
|
|
|
end
|
|
|
|
|
|
|
|
it { is_expected.to be_falsy }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-02-10 10:08:54 -05:00
|
|
|
describe '.for_sha' do
|
2020-04-27 17:10:10 -04:00
|
|
|
let(:first_pipeline) { create(:ci_pipeline) }
|
|
|
|
let(:second_pipeline) { create(:ci_pipeline, project: first_pipeline.project, sha: Digest::SHA1.hexdigest(SecureRandom.hex)) }
|
|
|
|
let!(:first_artifact) { create(:ci_job_artifact, job: create(:ci_build, pipeline: first_pipeline)) }
|
|
|
|
let!(:second_artifact) { create(:ci_job_artifact, job: create(:ci_build, pipeline: second_pipeline)) }
|
|
|
|
|
2020-02-10 10:08:54 -05:00
|
|
|
it 'returns job artifacts for a given pipeline sha' do
|
2020-04-27 17:10:10 -04:00
|
|
|
expect(described_class.for_sha(first_pipeline.sha, first_pipeline.project.id)).to eq([first_artifact])
|
|
|
|
expect(described_class.for_sha(second_pipeline.sha, first_pipeline.project.id)).to eq([second_artifact])
|
|
|
|
end
|
|
|
|
end
|
2020-02-10 10:08:54 -05:00
|
|
|
|
2020-03-24 14:07:55 -04:00
|
|
|
describe '.for_job_name' do
|
|
|
|
it 'returns job artifacts for a given job name' do
|
|
|
|
first_job = create(:ci_build, name: 'first')
|
|
|
|
second_job = create(:ci_build, name: 'second')
|
|
|
|
first_artifact = create(:ci_job_artifact, job: first_job)
|
|
|
|
second_artifact = create(:ci_job_artifact, job: second_job)
|
|
|
|
|
|
|
|
expect(described_class.for_job_name(first_job.name)).to eq([first_artifact])
|
|
|
|
expect(described_class.for_job_name(second_job.name)).to eq([second_artifact])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-12-22 07:10:09 -05:00
|
|
|
describe '.unlocked' do
|
|
|
|
let_it_be(:job_artifact) { create(:ci_job_artifact) }
|
|
|
|
|
|
|
|
context 'with locked pipelines' do
|
|
|
|
before do
|
|
|
|
job_artifact.job.pipeline.artifacts_locked!
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns an empty array' do
|
|
|
|
expect(described_class.unlocked).to be_empty
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with unlocked pipelines' do
|
|
|
|
before do
|
|
|
|
job_artifact.job.pipeline.unlocked!
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the artifact' do
|
|
|
|
expect(described_class.unlocked).to eq([job_artifact])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '.order_expired_desc' do
|
|
|
|
let_it_be(:first_artifact) { create(:ci_job_artifact, expire_at: 2.days.ago) }
|
|
|
|
let_it_be(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
|
|
|
|
|
|
|
|
it 'returns ordered artifacts' do
|
|
|
|
expect(described_class.order_expired_desc).to eq([second_artifact, first_artifact])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-06-17 11:10:03 -04:00
|
|
|
describe '.for_project' do
|
|
|
|
it 'returns artifacts only for given project(s)', :aggregate_failures do
|
|
|
|
artifact1 = create(:ci_job_artifact)
|
|
|
|
artifact2 = create(:ci_job_artifact)
|
|
|
|
create(:ci_job_artifact)
|
|
|
|
|
|
|
|
expect(described_class.for_project(artifact1.project)).to match_array([artifact1])
|
|
|
|
expect(described_class.for_project([artifact1.project, artifact2.project])).to match_array([artifact1, artifact2])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'created_in_time_range' do
|
|
|
|
it 'returns artifacts created in given time range', :aggregate_failures do
|
|
|
|
artifact1 = create(:ci_job_artifact, created_at: 1.day.ago)
|
|
|
|
artifact2 = create(:ci_job_artifact, created_at: 1.month.ago)
|
|
|
|
artifact3 = create(:ci_job_artifact, created_at: 1.year.ago)
|
|
|
|
|
|
|
|
expect(described_class.created_in_time_range(from: 1.week.ago)).to match_array([artifact1])
|
|
|
|
expect(described_class.created_in_time_range(to: 1.week.ago)).to match_array([artifact2, artifact3])
|
|
|
|
expect(described_class.created_in_time_range(from: 2.months.ago, to: 1.week.ago)).to match_array([artifact2])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-12-08 04:09:06 -05:00
|
|
|
describe 'callbacks' do
|
2018-03-01 16:30:31 -05:00
|
|
|
describe '#schedule_background_upload' do
|
2020-04-27 17:10:10 -04:00
|
|
|
subject { create(:ci_job_artifact, :archive) }
|
|
|
|
|
2017-12-08 04:09:06 -05:00
|
|
|
context 'when object storage is disabled' do
|
|
|
|
before do
|
|
|
|
stub_artifacts_object_storage(enabled: false)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not schedule the migration' do
|
2018-07-06 11:08:27 -04:00
|
|
|
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
|
2017-12-08 04:09:06 -05:00
|
|
|
|
|
|
|
subject
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when object storage is enabled' do
|
|
|
|
context 'when background upload is enabled' do
|
2018-03-06 16:19:24 -05:00
|
|
|
before do
|
|
|
|
stub_artifacts_object_storage(background_upload: true)
|
2017-12-08 04:09:06 -05:00
|
|
|
end
|
|
|
|
|
2018-03-06 16:19:24 -05:00
|
|
|
it 'schedules the model for migration' do
|
2018-03-07 13:27:49 -05:00
|
|
|
expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('JobArtifactUploader', described_class.name, :file, kind_of(Numeric))
|
2017-12-08 04:09:06 -05:00
|
|
|
|
2018-03-06 16:19:24 -05:00
|
|
|
subject
|
2017-12-08 04:09:06 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when background upload is disabled' do
|
|
|
|
before do
|
|
|
|
stub_artifacts_object_storage(background_upload: false)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'schedules the model for migration' do
|
2018-03-07 13:27:49 -05:00
|
|
|
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
|
2017-12-08 04:09:06 -05:00
|
|
|
|
|
|
|
subject
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-20 19:03:50 -04:00
|
|
|
context 'creating the artifact' do
|
|
|
|
let(:project) { create(:project) }
|
|
|
|
let(:artifact) { create(:ci_job_artifact, :archive, project: project) }
|
|
|
|
|
|
|
|
it 'sets the size from the file size' do
|
2020-03-06 10:08:05 -05:00
|
|
|
expect(artifact.size).to eq(107464)
|
2017-09-21 04:34:12 -04:00
|
|
|
end
|
2018-03-20 19:03:50 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
context 'updating the artifact file' do
|
|
|
|
it 'updates the artifact size' do
|
2018-06-05 17:18:06 -04:00
|
|
|
artifact.update!(file: fixture_file_upload('spec/fixtures/dk.png'))
|
2018-03-20 19:03:50 -04:00
|
|
|
expect(artifact.size).to eq(1062)
|
|
|
|
end
|
2017-09-21 04:34:12 -04:00
|
|
|
end
|
|
|
|
|
2021-10-29 08:14:45 -04:00
|
|
|
context 'when updating any field except the file' do
|
|
|
|
let(:artifact) { create(:ci_job_artifact, :unarchived_trace_artifact, file_store: 2) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
stub_artifacts_object_storage(direct_upload: true)
|
|
|
|
artifact.file.object_store = 1
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'the `after_commit` hook does not update `file_store`' do
|
|
|
|
artifact.update!(expire_at: Time.current)
|
|
|
|
|
|
|
|
expect(artifact.file_store).to be(2)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-07-27 01:04:35 -04:00
|
|
|
describe 'validates file format' do
|
|
|
|
subject { artifact }
|
|
|
|
|
2018-09-27 17:15:08 -04:00
|
|
|
described_class::TYPE_AND_FORMAT_PAIRS.except(:trace).each do |file_type, file_format|
|
|
|
|
context "when #{file_type} type with #{file_format} format" do
|
|
|
|
let(:artifact) { build(:ci_job_artifact, file_type: file_type, file_format: file_format) }
|
2018-07-27 01:04:35 -04:00
|
|
|
|
2018-09-27 17:15:08 -04:00
|
|
|
it { is_expected.to be_valid }
|
|
|
|
end
|
2018-07-27 01:04:35 -04:00
|
|
|
|
2018-09-27 17:15:08 -04:00
|
|
|
context "when #{file_type} type without format specification" do
|
|
|
|
let(:artifact) { build(:ci_job_artifact, file_type: file_type, file_format: nil) }
|
2018-07-27 01:04:35 -04:00
|
|
|
|
2018-09-27 17:15:08 -04:00
|
|
|
it { is_expected.not_to be_valid }
|
|
|
|
end
|
2018-07-27 01:04:35 -04:00
|
|
|
|
2018-09-27 17:15:08 -04:00
|
|
|
context "when #{file_type} type with other formats" do
|
|
|
|
described_class.file_formats.except(file_format).values.each do |other_format|
|
|
|
|
let(:artifact) { build(:ci_job_artifact, file_type: file_type, file_format: other_format) }
|
2018-07-27 01:04:35 -04:00
|
|
|
|
2018-09-27 17:15:08 -04:00
|
|
|
it { is_expected.not_to be_valid }
|
|
|
|
end
|
|
|
|
end
|
2018-07-27 01:04:35 -04:00
|
|
|
end
|
2018-09-27 17:15:08 -04:00
|
|
|
end
|
2018-07-27 01:04:35 -04:00
|
|
|
|
2018-09-27 17:15:08 -04:00
|
|
|
describe 'validates DEFAULT_FILE_NAMES' do
|
|
|
|
subject { described_class::DEFAULT_FILE_NAMES }
|
2018-07-27 01:04:35 -04:00
|
|
|
|
2018-09-27 17:15:08 -04:00
|
|
|
described_class.file_types.each do |file_type, _|
|
|
|
|
it "expects #{file_type} to be included" do
|
|
|
|
is_expected.to include(file_type.to_sym)
|
|
|
|
end
|
2018-07-27 01:04:35 -04:00
|
|
|
end
|
2018-09-27 17:15:08 -04:00
|
|
|
end
|
2018-07-27 01:04:35 -04:00
|
|
|
|
2018-09-27 17:15:08 -04:00
|
|
|
describe 'validates TYPE_AND_FORMAT_PAIRS' do
|
|
|
|
subject { described_class::TYPE_AND_FORMAT_PAIRS }
|
2018-07-27 01:04:35 -04:00
|
|
|
|
2018-09-27 17:15:08 -04:00
|
|
|
described_class.file_types.each do |file_type, _|
|
|
|
|
it "expects #{file_type} to be included" do
|
|
|
|
expect(described_class.file_formats).to include(subject[file_type.to_sym])
|
|
|
|
end
|
2018-07-27 01:04:35 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-09-21 04:34:12 -04:00
|
|
|
describe '#file' do
|
|
|
|
subject { artifact.file }
|
|
|
|
|
|
|
|
context 'the uploader api' do
|
|
|
|
it { is_expected.to respond_to(:store_dir) }
|
|
|
|
it { is_expected.to respond_to(:cache_dir) }
|
|
|
|
it { is_expected.to respond_to(:work_dir) }
|
|
|
|
end
|
|
|
|
end
|
2017-12-03 06:02:11 -05:00
|
|
|
|
2020-06-04 08:08:21 -04:00
|
|
|
describe 'expired?' do
|
|
|
|
subject { artifact.expired? }
|
|
|
|
|
|
|
|
context 'when expire_at is nil' do
|
|
|
|
let(:artifact) { build(:ci_job_artifact, expire_at: nil) }
|
|
|
|
|
|
|
|
it 'returns false' do
|
|
|
|
is_expected.to be_falsy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when expire_at is in the past' do
|
|
|
|
let(:artifact) { build(:ci_job_artifact, expire_at: Date.yesterday) }
|
|
|
|
|
|
|
|
it 'returns true' do
|
|
|
|
is_expected.to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when expire_at is in the future' do
|
|
|
|
let(:artifact) { build(:ci_job_artifact, expire_at: Date.tomorrow) }
|
|
|
|
|
|
|
|
it 'returns false' do
|
|
|
|
is_expected.to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#expiring?' do
|
|
|
|
subject { artifact.expiring? }
|
|
|
|
|
|
|
|
context 'when expire_at is nil' do
|
|
|
|
let(:artifact) { build(:ci_job_artifact, expire_at: nil) }
|
|
|
|
|
|
|
|
it 'returns false' do
|
|
|
|
is_expected.to be_falsy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when expire_at is in the past' do
|
|
|
|
let(:artifact) { build(:ci_job_artifact, expire_at: Date.yesterday) }
|
|
|
|
|
|
|
|
it 'returns false' do
|
|
|
|
is_expected.to be_falsy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when expire_at is in the future' do
|
|
|
|
let(:artifact) { build(:ci_job_artifact, expire_at: Date.tomorrow) }
|
|
|
|
|
|
|
|
it 'returns true' do
|
|
|
|
is_expected.to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-12-03 06:02:11 -05:00
|
|
|
describe '#expire_in' do
|
|
|
|
subject { artifact.expire_in }
|
|
|
|
|
|
|
|
it { is_expected.to be_nil }
|
|
|
|
|
|
|
|
context 'when expire_at is specified' do
|
2020-05-22 05:08:09 -04:00
|
|
|
let(:expire_at) { Time.current + 7.days }
|
2017-12-03 06:02:11 -05:00
|
|
|
|
|
|
|
before do
|
|
|
|
artifact.expire_at = expire_at
|
|
|
|
end
|
|
|
|
|
2020-05-22 05:08:09 -04:00
|
|
|
it { is_expected.to be_within(5).of(expire_at - Time.current) }
|
2017-12-03 06:02:11 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#expire_in=' do
|
|
|
|
subject { artifact.expire_in }
|
|
|
|
|
|
|
|
it 'when assigning valid duration' do
|
|
|
|
artifact.expire_in = '7 days'
|
|
|
|
|
|
|
|
is_expected.to be_within(10).of(7.days.to_i)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'when assigning invalid duration' do
|
|
|
|
expect { artifact.expire_in = '7 elephants' }.to raise_error(ChronicDuration::DurationParseError)
|
2017-12-03 10:21:59 -05:00
|
|
|
|
2017-12-03 06:02:11 -05:00
|
|
|
is_expected.to be_nil
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'when resetting value' do
|
|
|
|
artifact.expire_in = nil
|
|
|
|
|
|
|
|
is_expected.to be_nil
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'when setting to 0' do
|
|
|
|
artifact.expire_in = '0'
|
|
|
|
|
|
|
|
is_expected.to be_nil
|
|
|
|
end
|
|
|
|
end
|
2018-03-20 19:03:50 -04:00
|
|
|
|
2021-10-29 08:14:45 -04:00
|
|
|
describe '#store_after_commit?' do
|
|
|
|
let(:file_type) { :archive }
|
|
|
|
let(:artifact) { build(:ci_job_artifact, file_type) }
|
|
|
|
|
|
|
|
context 'when direct upload is enabled' do
|
|
|
|
before do
|
|
|
|
stub_artifacts_object_storage(direct_upload: true)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the artifact is a trace' do
|
|
|
|
let(:file_type) { :trace }
|
|
|
|
|
2022-01-19 13:14:01 -05:00
|
|
|
it 'returns true' do
|
|
|
|
expect(artifact.store_after_commit?).to be_truthy
|
2021-10-29 08:14:45 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the artifact is not a trace' do
|
|
|
|
it 'returns false' do
|
|
|
|
expect(artifact.store_after_commit?).to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when direct upload is disabled' do
|
|
|
|
before do
|
|
|
|
stub_artifacts_object_storage(direct_upload: false)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false' do
|
|
|
|
expect(artifact.store_after_commit?).to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-04-13 04:20:07 -04:00
|
|
|
describe 'file is being stored' do
|
2020-04-15 14:09:36 -04:00
|
|
|
subject { create(:ci_job_artifact, :archive) }
|
2020-04-14 11:09:44 -04:00
|
|
|
|
2018-04-13 04:20:07 -04:00
|
|
|
context 'when existing object has local store' do
|
2020-08-13 14:10:36 -04:00
|
|
|
it_behaves_like 'mounted file in local store'
|
2018-04-13 04:20:07 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
context 'when direct upload is enabled' do
|
|
|
|
before do
|
|
|
|
stub_artifacts_object_storage(direct_upload: true)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when file is stored' do
|
2020-08-13 14:10:36 -04:00
|
|
|
it_behaves_like 'mounted file in object store'
|
2018-04-13 04:20:07 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2020-07-06 11:08:42 -04:00
|
|
|
|
|
|
|
describe '.file_types' do
|
|
|
|
context 'all file types have corresponding limit' do
|
|
|
|
let_it_be(:plan_limits) { create(:plan_limits) }
|
|
|
|
|
|
|
|
where(:file_type) do
|
|
|
|
described_class.file_types.keys
|
|
|
|
end
|
|
|
|
|
|
|
|
with_them do
|
|
|
|
let(:limit_name) { "#{described_class::PLAN_LIMIT_PREFIX}#{file_type}" }
|
|
|
|
|
|
|
|
it { expect(plan_limits.attributes).to include(limit_name), file_type_limit_failure_message(file_type, limit_name) }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '.max_artifact_size' do
|
|
|
|
let(:build) { create(:ci_build) }
|
|
|
|
|
|
|
|
subject(:max_size) { described_class.max_artifact_size(type: artifact_type, project: build.project) }
|
|
|
|
|
|
|
|
context 'when file type is supported' do
|
|
|
|
let(:project_closest_setting) { 1024 }
|
|
|
|
let(:artifact_type) { 'junit' }
|
2020-07-17 20:09:34 -04:00
|
|
|
let(:limit_name) { "#{described_class::PLAN_LIMIT_PREFIX}#{artifact_type}" }
|
2020-07-06 11:08:42 -04:00
|
|
|
|
2020-07-17 20:09:34 -04:00
|
|
|
let!(:plan_limits) { create(:plan_limits, :default_plan) }
|
2020-07-06 11:08:42 -04:00
|
|
|
|
|
|
|
shared_examples_for 'basing off the project closest setting' do
|
|
|
|
it { is_expected.to eq(project_closest_setting.megabytes.to_i) }
|
|
|
|
end
|
|
|
|
|
|
|
|
shared_examples_for 'basing off the plan limit' do
|
|
|
|
it { is_expected.to eq(max_size_for_type.megabytes.to_i) }
|
|
|
|
end
|
|
|
|
|
2020-07-17 20:09:34 -04:00
|
|
|
before do
|
|
|
|
allow(build.project).to receive(:closest_setting).with(:max_artifacts_size).and_return(project_closest_setting)
|
|
|
|
end
|
2020-07-06 11:08:42 -04:00
|
|
|
|
2020-07-17 20:09:34 -04:00
|
|
|
context 'and plan limit is disabled for the given artifact type' do
|
|
|
|
before do
|
|
|
|
plan_limits.update!(limit_name => 0)
|
|
|
|
end
|
2020-07-06 11:08:42 -04:00
|
|
|
|
2020-07-17 20:09:34 -04:00
|
|
|
it_behaves_like 'basing off the project closest setting'
|
2020-07-06 11:08:42 -04:00
|
|
|
|
2020-07-17 20:09:34 -04:00
|
|
|
context 'and project closest setting results to zero' do
|
|
|
|
let(:project_closest_setting) { 0 }
|
2020-07-06 11:08:42 -04:00
|
|
|
|
2020-07-17 20:09:34 -04:00
|
|
|
it { is_expected.to eq(0) }
|
2020-07-06 11:08:42 -04:00
|
|
|
end
|
2020-07-17 20:09:34 -04:00
|
|
|
end
|
2020-07-06 11:08:42 -04:00
|
|
|
|
2020-07-17 20:09:34 -04:00
|
|
|
context 'and plan limit is enabled for the given artifact type' do
|
|
|
|
before do
|
|
|
|
plan_limits.update!(limit_name => max_size_for_type)
|
|
|
|
end
|
2020-07-06 11:08:42 -04:00
|
|
|
|
2020-07-17 20:09:34 -04:00
|
|
|
context 'and plan limit is smaller than project setting' do
|
|
|
|
let(:max_size_for_type) { project_closest_setting - 1 }
|
2020-07-06 11:08:42 -04:00
|
|
|
|
2020-07-17 20:09:34 -04:00
|
|
|
it_behaves_like 'basing off the plan limit'
|
2020-07-06 11:08:42 -04:00
|
|
|
end
|
|
|
|
|
2020-07-17 20:09:34 -04:00
|
|
|
context 'and plan limit is larger than project setting' do
|
|
|
|
let(:max_size_for_type) { project_closest_setting + 1 }
|
2020-07-06 11:08:42 -04:00
|
|
|
|
2020-07-17 20:09:34 -04:00
|
|
|
it_behaves_like 'basing off the project closest setting'
|
|
|
|
end
|
2020-07-06 11:08:42 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-05-14 08:10:58 -04:00
|
|
|
context 'FastDestroyAll' do
|
|
|
|
let_it_be(:project) { create(:project) }
|
|
|
|
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
|
|
|
|
let_it_be(:job) { create(:ci_build, pipeline: pipeline, project: project) }
|
|
|
|
|
|
|
|
let!(:job_artifact) { create(:ci_job_artifact, :archive, job: job) }
|
|
|
|
let(:subjects) { pipeline.job_artifacts }
|
|
|
|
|
|
|
|
describe '.use_fast_destroy' do
|
|
|
|
it 'performs cascading delete with fast_destroy_all' do
|
|
|
|
expect(Ci::DeletedObject.count).to eq(0)
|
|
|
|
expect(subjects.count).to be > 0
|
|
|
|
|
|
|
|
expect { pipeline.destroy! }.not_to raise_error
|
|
|
|
|
|
|
|
expect(subjects.count).to eq(0)
|
|
|
|
expect(Ci::DeletedObject.count).to be > 0
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'updates project statistics' do
|
|
|
|
expect(ProjectStatistics).to receive(:increment_statistic).once
|
|
|
|
.with(project, :build_artifacts_size, -job_artifact.file.size)
|
|
|
|
|
|
|
|
pipeline.destroy!
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-07-06 11:08:42 -04:00
|
|
|
def file_type_limit_failure_message(type, limit_name)
|
|
|
|
<<~MSG
|
|
|
|
The artifact type `#{type}` is missing its counterpart plan limit which is expected to be named `#{limit_name}`.
|
|
|
|
|
|
|
|
Please refer to https://docs.gitlab.com/ee/development/application_limits.html on how to add new plan limit columns.
|
|
|
|
|
|
|
|
Take note that while existing max size plan limits default to 0, succeeding new limits are recommended to have
|
2020-07-17 20:09:34 -04:00
|
|
|
non-zero default values. Also, remember to update the plan limits documentation (doc/administration/instance_limits.md)
|
|
|
|
when changes or new entries are made.
|
2020-07-06 11:08:42 -04:00
|
|
|
MSG
|
|
|
|
end
|
2021-12-15 19:15:50 -05:00
|
|
|
|
|
|
|
it_behaves_like 'it has loose foreign keys' do
|
|
|
|
let(:factory_name) { :ci_job_artifact }
|
|
|
|
end
|
2022-01-27 13:14:37 -05:00
|
|
|
|
|
|
|
context 'loose foreign key on ci_job_artifacts.project_id' do
|
|
|
|
it_behaves_like 'cleanup by a loose foreign key' do
|
|
|
|
let!(:parent) { create(:project) }
|
|
|
|
let!(:model) { create(:ci_job_artifact, project: parent) }
|
|
|
|
end
|
|
|
|
end
|
2017-09-21 04:34:12 -04:00
|
|
|
end
|