another round of EE code removal
This commit is contained in:
parent
0d458b96e8
commit
4484587ef8
8 changed files with 75 additions and 269 deletions
|
@ -15,9 +15,11 @@ class JobArtifactUploader < GitlabUploader
|
|||
end
|
||||
|
||||
def open
|
||||
raise 'Only File System is supported' unless file_storage?
|
||||
|
||||
File.open(path, "rb") if path
|
||||
if file_storage?
|
||||
File.open(path, "rb") if path
|
||||
else
|
||||
::Gitlab::Ci::Trace::HttpIO.new(url, size) if url
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
@ -290,12 +290,10 @@ ActiveRecord::Schema.define(version: 20180216121030) do
|
|||
t.integer "auto_canceled_by_id"
|
||||
t.boolean "retried"
|
||||
t.integer "stage_id"
|
||||
t.integer "artifacts_file_store", default: 1, null: false
|
||||
t.integer "artifacts_metadata_store", default: 1, null: false
|
||||
t.boolean "protected"
|
||||
t.integer "failure_reason"
|
||||
t.integer "artifacts_file_store"
|
||||
t.integer "artifacts_metadata_store"
|
||||
t.boolean "protected"
|
||||
t.integer "failure_reason"
|
||||
end
|
||||
|
||||
add_index "ci_builds", ["artifacts_expire_at"], name: "index_ci_builds_on_artifacts_expire_at", where: "(artifacts_file <> ''::text)", using: :btree
|
||||
|
@ -338,7 +336,6 @@ ActiveRecord::Schema.define(version: 20180216121030) do
|
|||
t.datetime_with_timezone "updated_at", null: false
|
||||
t.datetime_with_timezone "expire_at"
|
||||
t.string "file"
|
||||
t.integer "file_store"
|
||||
end
|
||||
|
||||
add_index "ci_job_artifacts", ["expire_at", "job_id"], name: "index_ci_job_artifacts_on_expire_at_and_job_id", using: :btree
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
# coding: utf-8
|
||||
require 'spec_helper'
|
||||
|
||||
describe Projects::JobsController do
|
||||
include ApiHelpers
|
||||
include HttpIOHelpers
|
||||
|
||||
let(:project) { create(:project, :public) }
|
||||
let(:pipeline) { create(:ci_pipeline, project: project) }
|
||||
|
@ -203,6 +205,41 @@ describe Projects::JobsController do
|
|||
end
|
||||
end
|
||||
|
||||
context 'when trace artifact is in ObjectStorage' do
|
||||
let!(:job) { create(:ci_build, :success, :trace_artifact, pipeline: pipeline) }
|
||||
|
||||
before do
|
||||
allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
|
||||
allow_any_instance_of(JobArtifactUploader).to receive(:url) { remote_trace_url }
|
||||
allow_any_instance_of(JobArtifactUploader).to receive(:size) { remote_trace_size }
|
||||
end
|
||||
|
||||
context 'when there are no network issues' do
|
||||
before do
|
||||
stub_remote_trace_206
|
||||
|
||||
get_trace
|
||||
end
|
||||
|
||||
it 'returns a trace' do
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response['id']).to eq job.id
|
||||
expect(json_response['status']).to eq job.status
|
||||
expect(json_response['html']).to eq(job.trace.html)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there is a network issue' do
|
||||
before do
|
||||
stub_remote_trace_500
|
||||
end
|
||||
|
||||
it 'returns a trace' do
|
||||
expect { get_trace }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def get_trace
|
||||
get :trace, namespace_id: project.namespace,
|
||||
project_id: project,
|
||||
|
@ -446,14 +483,18 @@ describe Projects::JobsController do
|
|||
end
|
||||
|
||||
describe 'GET raw' do
|
||||
before do
|
||||
get_raw
|
||||
subject do
|
||||
post :raw, namespace_id: project.namespace,
|
||||
project_id: project,
|
||||
id: job.id
|
||||
end
|
||||
|
||||
context 'when job has a trace artifact' do
|
||||
let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
|
||||
|
||||
it 'returns a trace' do
|
||||
response = subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response.content_type).to eq 'text/plain; charset=utf-8'
|
||||
expect(response.body).to eq job.job_artifacts_trace.open.read
|
||||
|
@ -464,6 +505,8 @@ describe Projects::JobsController do
|
|||
let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) }
|
||||
|
||||
it 'send a trace file' do
|
||||
response = subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response.content_type).to eq 'text/plain; charset=utf-8'
|
||||
expect(response.body).to eq 'BUILD TRACE'
|
||||
|
@ -474,14 +517,22 @@ describe Projects::JobsController do
|
|||
let(:job) { create(:ci_build, pipeline: pipeline) }
|
||||
|
||||
it 'returns not_found' do
|
||||
response = subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
end
|
||||
|
||||
def get_raw
|
||||
post :raw, namespace_id: project.namespace,
|
||||
project_id: project,
|
||||
id: job.id
|
||||
context 'when the trace artifact is in ObjectStorage' do
|
||||
let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
|
||||
|
||||
before do
|
||||
allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
|
||||
end
|
||||
|
||||
it 'redirect to the trace file url' do
|
||||
expect(subject).to redirect_to(job.job_artifacts_trace.file.url)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,73 +0,0 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe Projects::JobsController do
|
||||
include ApiHelpers
|
||||
include HttpIOHelpers
|
||||
|
||||
let(:project) { create(:project, :public) }
|
||||
let(:pipeline) { create(:ci_pipeline, project: project) }
|
||||
|
||||
describe 'GET trace.json' do
|
||||
context 'when trace artifact is in ObjectStorage' do
|
||||
let!(:job) { create(:ci_build, :success, :trace_artifact, pipeline: pipeline) }
|
||||
|
||||
before do
|
||||
allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
|
||||
allow_any_instance_of(JobArtifactUploader).to receive(:url) { remote_trace_url }
|
||||
allow_any_instance_of(JobArtifactUploader).to receive(:size) { remote_trace_size }
|
||||
end
|
||||
|
||||
context 'when there are no network issues' do
|
||||
before do
|
||||
stub_remote_trace_206
|
||||
|
||||
get_trace
|
||||
end
|
||||
|
||||
it 'returns a trace' do
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response['id']).to eq job.id
|
||||
expect(json_response['status']).to eq job.status
|
||||
expect(json_response['html']).to eq(job.trace.html)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there is a network issue' do
|
||||
before do
|
||||
stub_remote_trace_500
|
||||
end
|
||||
|
||||
it 'returns a trace' do
|
||||
expect { get_trace }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def get_trace
|
||||
get :trace, namespace_id: project.namespace,
|
||||
project_id: project,
|
||||
id: job.id,
|
||||
format: :json
|
||||
end
|
||||
end
|
||||
|
||||
describe 'GET raw' do
|
||||
subject do
|
||||
post :raw, namespace_id: project.namespace,
|
||||
project_id: project,
|
||||
id: job.id
|
||||
end
|
||||
|
||||
context 'when the trace artifact is in ObjectStorage' do
|
||||
let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
|
||||
|
||||
before do
|
||||
allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
|
||||
end
|
||||
|
||||
it 'redirect to the trace file url' do
|
||||
expect(subject).to redirect_to(job.job_artifacts_trace.file.url)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,110 +0,0 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe LfsObject do
|
||||
describe '#local_store?' do
|
||||
it 'returns true when file_store is nil' do
|
||||
subject.file_store = nil
|
||||
|
||||
expect(subject.local_store?).to eq true
|
||||
end
|
||||
|
||||
it 'returns true when file_store is equal to LfsObjectUploader::Store::LOCAL' do
|
||||
subject.file_store = LfsObjectUploader::Store::LOCAL
|
||||
|
||||
expect(subject.local_store?).to eq true
|
||||
end
|
||||
|
||||
it 'returns false whe file_store is equal to LfsObjectUploader::Store::REMOTE' do
|
||||
subject.file_store = LfsObjectUploader::Store::REMOTE
|
||||
|
||||
expect(subject.local_store?).to eq false
|
||||
end
|
||||
end
|
||||
|
||||
describe '#destroy' do
|
||||
subject { create(:lfs_object, :with_file) }
|
||||
|
||||
context 'when running in a Geo primary node' do
|
||||
set(:primary) { create(:geo_node, :primary) }
|
||||
set(:secondary) { create(:geo_node) }
|
||||
|
||||
it 'logs an event to the Geo event log' do
|
||||
expect { subject.destroy }.to change(Geo::LfsObjectDeletedEvent, :count).by(1)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#schedule_migration_to_object_storage' do
|
||||
before do
|
||||
stub_lfs_setting(enabled: true)
|
||||
end
|
||||
|
||||
subject { create(:lfs_object, :with_file) }
|
||||
|
||||
context 'when object storage is disabled' do
|
||||
before do
|
||||
stub_lfs_object_storage(enabled: false)
|
||||
end
|
||||
|
||||
it 'does not schedule the migration' do
|
||||
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
|
||||
context 'when object storage is enabled' do
|
||||
context 'when background upload is enabled' do
|
||||
context 'when is licensed' do
|
||||
before do
|
||||
stub_lfs_object_storage(background_upload: true)
|
||||
end
|
||||
|
||||
it 'schedules the model for migration' do
|
||||
expect(ObjectStorage::BackgroundMoveWorker)
|
||||
.to receive(:perform_async)
|
||||
.with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
|
||||
.once
|
||||
|
||||
subject
|
||||
end
|
||||
|
||||
it 'schedules the model for migration once' do
|
||||
expect(ObjectStorage::BackgroundMoveWorker)
|
||||
.to receive(:perform_async)
|
||||
.with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
|
||||
.once
|
||||
|
||||
lfs_object = create(:lfs_object)
|
||||
lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
|
||||
lfs_object.save!
|
||||
end
|
||||
end
|
||||
|
||||
context 'when is unlicensed' do
|
||||
before do
|
||||
stub_lfs_object_storage(background_upload: true, licensed: false)
|
||||
end
|
||||
|
||||
it 'does not schedule the migration' do
|
||||
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when background upload is disabled' do
|
||||
before do
|
||||
stub_lfs_object_storage(background_upload: false)
|
||||
end
|
||||
|
||||
it 'schedules the model for migration' do
|
||||
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,50 +0,0 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe Projects::HashedStorage::MigrateAttachmentsService do
|
||||
let(:project) { create(:project, storage_version: 1) }
|
||||
let(:service) { described_class.new(project) }
|
||||
let(:legacy_storage) { Storage::LegacyProject.new(project) }
|
||||
let(:hashed_storage) { Storage::HashedProject.new(project) }
|
||||
let(:old_attachments_path) { legacy_storage.disk_path }
|
||||
let(:new_attachments_path) { hashed_storage.disk_path }
|
||||
|
||||
describe '#execute' do
|
||||
set(:primary) { create(:geo_node, :primary) }
|
||||
set(:secondary) { create(:geo_node) }
|
||||
|
||||
context 'on success' do
|
||||
before do
|
||||
TestEnv.clean_test_path
|
||||
FileUtils.mkdir_p(File.join(FileUploader.root, old_attachments_path))
|
||||
end
|
||||
|
||||
it 'returns true' do
|
||||
expect(service.execute).to be_truthy
|
||||
end
|
||||
|
||||
it 'creates a Geo::HashedStorageAttachmentsEvent' do
|
||||
expect { service.execute }.to change(Geo::EventLog, :count).by(1)
|
||||
|
||||
event = Geo::EventLog.first.event
|
||||
|
||||
expect(event).to be_a(Geo::HashedStorageAttachmentsEvent)
|
||||
expect(event).to have_attributes(
|
||||
old_attachments_path: old_attachments_path,
|
||||
new_attachments_path: new_attachments_path
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'on failure' do
|
||||
it 'does not create a Geo event when skipped' do
|
||||
expect { service.execute }.not_to change { Geo::EventLog.count }
|
||||
end
|
||||
|
||||
it 'does not create a Geo event on failure' do
|
||||
expect(service).to receive(:move_folder!).and_raise(::Projects::HashedStorage::AttachmentMigrationError)
|
||||
expect { service.execute }.to raise_error(::Projects::HashedStorage::AttachmentMigrationError)
|
||||
expect(Geo::EventLog.count).to eq(0)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,22 +0,0 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe JobArtifactUploader do
|
||||
let(:store) { ObjectStorage::Store::LOCAL }
|
||||
let(:job_artifact) { create(:ci_job_artifact, file_store: store) }
|
||||
let(:uploader) { described_class.new(job_artifact, :file) }
|
||||
|
||||
describe '#open' do
|
||||
subject { uploader.open }
|
||||
|
||||
context 'when trace is stored in Object storage' do
|
||||
before do
|
||||
allow(uploader).to receive(:file_storage?) { false }
|
||||
allow(uploader).to receive(:url) { 'http://object_storage.com/trace' }
|
||||
end
|
||||
|
||||
it 'returns http io stream' do
|
||||
is_expected.to be_a(Gitlab::Ci::Trace::HttpIO)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -48,6 +48,17 @@ describe JobArtifactUploader do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when trace is stored in Object storage' do
|
||||
before do
|
||||
allow(uploader).to receive(:file_storage?) { false }
|
||||
allow(uploader).to receive(:url) { 'http://object_storage.com/trace' }
|
||||
end
|
||||
|
||||
it 'returns http io stream' do
|
||||
is_expected.to be_a(Gitlab::Ci::Trace::HttpIO)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'file is stored in valid local_path' do
|
||||
|
|
Loading…
Reference in a new issue