2019-04-11 08:17:24 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2015-11-03 15:28:07 -05:00
|
|
|
require "spec_helper"
|
|
|
|
|
2020-06-24 11:08:50 -04:00
|
|
|
RSpec.describe Projects::UpdatePagesService do
|
2020-01-10 04:07:49 -05:00
|
|
|
let_it_be(:project, refind: true) { create(:project, :repository) }
|
2021-08-16 05:09:05 -04:00
|
|
|
|
|
|
|
let_it_be(:old_pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
|
2020-01-10 04:07:49 -05:00
|
|
|
let_it_be(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
|
2021-06-28 23:07:32 -04:00
|
|
|
|
2020-01-30 16:08:47 -05:00
|
|
|
let(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') }
|
2018-06-05 17:18:06 -04:00
|
|
|
let(:invalid_file) { fixture_file_upload('spec/fixtures/dk.png') }
|
2017-11-24 03:23:56 -05:00
|
|
|
|
2019-01-02 14:01:11 -05:00
|
|
|
let(:file) { fixture_file_upload("spec/fixtures/pages.zip") }
|
|
|
|
let(:empty_file) { fixture_file_upload("spec/fixtures/pages_empty.zip") }
|
2022-05-18 14:08:05 -04:00
|
|
|
let(:empty_metadata_filename) { "spec/fixtures/pages_empty.zip.meta" }
|
2019-01-02 14:01:11 -05:00
|
|
|
let(:metadata_filename) { "spec/fixtures/pages.zip.meta" }
|
|
|
|
let(:metadata) { fixture_file_upload(metadata_filename) if File.exist?(metadata_filename) }
|
2016-05-13 06:51:52 -04:00
|
|
|
|
2016-01-15 06:21:52 -05:00
|
|
|
subject { described_class.new(project, build) }
|
2015-11-03 15:28:07 -05:00
|
|
|
|
2017-11-24 03:23:56 -05:00
|
|
|
context 'for new artifacts' do
|
|
|
|
context "for a valid job" do
|
2020-10-22 11:08:25 -04:00
|
|
|
let!(:artifacts_archive) { create(:ci_job_artifact, :correct_checksum, file: file, job: build) }
|
2020-08-31 14:10:43 -04:00
|
|
|
|
2016-01-20 15:49:26 -05:00
|
|
|
before do
|
2018-07-27 01:04:35 -04:00
|
|
|
create(:ci_job_artifact, file_type: :metadata, file_format: :gzip, file: metadata, job: build)
|
2017-11-24 03:23:56 -05:00
|
|
|
|
|
|
|
build.reload
|
2016-01-20 15:49:26 -05:00
|
|
|
end
|
2015-11-03 15:28:07 -05:00
|
|
|
|
2020-11-26 13:09:18 -05:00
|
|
|
it "doesn't delete artifacts after deploying" do
|
|
|
|
expect(execute).to eq(:success)
|
2017-03-06 03:49:38 -05:00
|
|
|
|
2020-11-26 13:09:18 -05:00
|
|
|
expect(project.pages_metadatum).to be_deployed
|
|
|
|
expect(build.artifacts?).to eq(true)
|
2017-03-05 17:14:02 -05:00
|
|
|
end
|
|
|
|
|
2016-01-20 15:49:26 -05:00
|
|
|
it 'succeeds' do
|
2016-02-19 14:12:56 -05:00
|
|
|
expect(project.pages_deployed?).to be_falsey
|
2016-01-20 15:49:26 -05:00
|
|
|
expect(execute).to eq(:success)
|
2019-09-25 05:06:04 -04:00
|
|
|
expect(project.pages_metadatum).to be_deployed
|
2016-02-19 14:12:56 -05:00
|
|
|
expect(project.pages_deployed?).to be_truthy
|
2021-02-02 10:09:06 -05:00
|
|
|
end
|
|
|
|
|
2022-06-28 08:09:11 -04:00
|
|
|
it 'publishes a PageDeployedEvent event with project id and namespace id' do
|
|
|
|
expected_data = {
|
|
|
|
project_id: project.id,
|
|
|
|
namespace_id: project.namespace_id,
|
|
|
|
root_namespace_id: project.root_namespace.id
|
|
|
|
}
|
|
|
|
|
|
|
|
expect { subject.execute }.to publish_event(Pages::PageDeployedEvent).with(expected_data)
|
|
|
|
end
|
|
|
|
|
2020-10-12 14:08:31 -04:00
|
|
|
it 'creates pages_deployment and saves it in the metadata' do
|
|
|
|
expect do
|
|
|
|
expect(execute).to eq(:success)
|
|
|
|
end.to change { project.pages_deployments.count }.by(1)
|
|
|
|
|
|
|
|
deployment = project.pages_deployments.last
|
|
|
|
|
|
|
|
expect(deployment.size).to eq(file.size)
|
|
|
|
expect(deployment.file).to be
|
2020-10-22 11:08:25 -04:00
|
|
|
expect(deployment.file_count).to eq(3)
|
|
|
|
expect(deployment.file_sha256).to eq(artifacts_archive.file_sha256)
|
2020-10-12 14:08:31 -04:00
|
|
|
expect(project.pages_metadatum.reload.pages_deployment_id).to eq(deployment.id)
|
2021-08-16 05:09:05 -04:00
|
|
|
expect(deployment.ci_build_id).to eq(build.id)
|
2020-10-12 14:08:31 -04:00
|
|
|
end
|
|
|
|
|
2020-11-05 10:09:04 -05:00
|
|
|
it 'does not fail if pages_metadata is absent' do
|
|
|
|
project.pages_metadatum.destroy!
|
|
|
|
project.reload
|
|
|
|
|
|
|
|
expect do
|
|
|
|
expect(execute).to eq(:success)
|
|
|
|
end.to change { project.pages_deployments.count }.by(1)
|
|
|
|
|
|
|
|
expect(project.pages_metadatum.reload.pages_deployment).to eq(project.pages_deployments.last)
|
|
|
|
end
|
|
|
|
|
2020-10-29 11:09:12 -04:00
|
|
|
context 'when there is an old pages deployment' do
|
|
|
|
let!(:old_deployment_from_another_project) { create(:pages_deployment) }
|
|
|
|
let!(:old_deployment) { create(:pages_deployment, project: project) }
|
|
|
|
|
|
|
|
it 'schedules a destruction of older deployments' do
|
|
|
|
expect(DestroyPagesDeploymentsWorker).to(
|
|
|
|
receive(:perform_in).with(described_class::OLD_DEPLOYMENTS_DESTRUCTION_DELAY,
|
|
|
|
project.id,
|
|
|
|
instance_of(Integer))
|
|
|
|
)
|
|
|
|
|
|
|
|
execute
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'removes older deployments', :sidekiq_inline do
|
|
|
|
expect do
|
|
|
|
execute
|
|
|
|
end.not_to change { PagesDeployment.count } # it creates one and deletes one
|
|
|
|
|
|
|
|
expect(PagesDeployment.find_by_id(old_deployment.id)).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-05-18 14:08:05 -04:00
|
|
|
context 'when archive does not have pages directory' do
|
|
|
|
let(:file) { empty_file }
|
|
|
|
let(:metadata_filename) { empty_metadata_filename }
|
|
|
|
|
|
|
|
it 'returns an error' do
|
|
|
|
expect(execute).not_to eq(:success)
|
|
|
|
|
|
|
|
expect(GenericCommitStatus.last.description).to eq("Error: The `public/` folder is missing, or not declared in `.gitlab-ci.yml`.")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-01-20 15:49:26 -05:00
|
|
|
it 'limits pages size' do
|
|
|
|
stub_application_setting(max_pages_size: 1)
|
2016-05-30 04:11:46 -04:00
|
|
|
expect(execute).not_to eq(:success)
|
2016-01-20 15:49:26 -05:00
|
|
|
end
|
2015-11-03 15:28:07 -05:00
|
|
|
|
2021-08-03 11:10:03 -04:00
|
|
|
it 'limits pages file count' do
|
|
|
|
create(:plan_limits, :default_plan, pages_file_entries: 2)
|
|
|
|
|
|
|
|
expect(execute).not_to eq(:success)
|
|
|
|
|
|
|
|
expect(GenericCommitStatus.last.description).to eq("pages site contains 3 file entries, while limit is set to 2")
|
|
|
|
end
|
|
|
|
|
2018-03-05 14:37:09 -05:00
|
|
|
context 'when timeout happens by DNS error' do
|
|
|
|
before do
|
2020-01-17 16:08:29 -05:00
|
|
|
allow_next_instance_of(described_class) do |instance|
|
2021-09-03 11:10:48 -04:00
|
|
|
allow(instance).to receive(:create_pages_deployment).and_raise(SocketError)
|
2020-01-17 16:08:29 -05:00
|
|
|
end
|
2018-03-05 14:37:09 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
it 'raises an error' do
|
2018-03-30 04:20:47 -04:00
|
|
|
expect { execute }.to raise_error(SocketError)
|
2018-03-06 04:17:37 -05:00
|
|
|
|
|
|
|
build.reload
|
2018-03-30 03:50:23 -04:00
|
|
|
expect(deploy_status).to be_failed
|
2019-09-25 05:06:04 -04:00
|
|
|
expect(project.pages_metadatum).not_to be_deployed
|
2018-03-30 03:50:23 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when missing artifacts metadata' do
|
|
|
|
before do
|
2018-04-27 13:30:52 -04:00
|
|
|
expect(build).to receive(:artifacts_metadata?).and_return(false)
|
2018-03-30 03:50:23 -04:00
|
|
|
end
|
|
|
|
|
2018-04-27 13:30:52 -04:00
|
|
|
it 'does not raise an error as failed job' do
|
2018-03-30 03:50:23 -04:00
|
|
|
execute
|
|
|
|
|
|
|
|
build.reload
|
|
|
|
expect(deploy_status).to be_failed
|
2019-09-25 05:06:04 -04:00
|
|
|
expect(project.pages_metadatum).not_to be_deployed
|
2018-03-05 14:37:09 -05:00
|
|
|
end
|
|
|
|
end
|
2020-05-20 08:07:52 -04:00
|
|
|
|
|
|
|
context 'with background jobs running', :sidekiq_inline do
|
2020-08-13 11:10:03 -04:00
|
|
|
it 'succeeds' do
|
|
|
|
expect(project.pages_deployed?).to be_falsey
|
|
|
|
expect(execute).to eq(:success)
|
2020-05-20 08:07:52 -04:00
|
|
|
end
|
|
|
|
end
|
2021-08-16 05:09:05 -04:00
|
|
|
|
|
|
|
context "when sha on branch was updated before deployment was uploaded" do
|
|
|
|
before do
|
|
|
|
expect(subject).to receive(:create_pages_deployment).and_wrap_original do |m, *args|
|
|
|
|
build.update!(ref: 'feature')
|
|
|
|
m.call(*args)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
shared_examples 'successfully deploys' do
|
|
|
|
it 'succeeds' do
|
|
|
|
expect do
|
|
|
|
expect(execute).to eq(:success)
|
|
|
|
end.to change { project.pages_deployments.count }.by(1)
|
|
|
|
|
|
|
|
deployment = project.pages_deployments.last
|
|
|
|
expect(deployment.ci_build_id).to eq(build.id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
include_examples 'successfully deploys'
|
|
|
|
|
|
|
|
context 'when old deployment present' do
|
|
|
|
before do
|
|
|
|
old_build = create(:ci_build, pipeline: old_pipeline, ref: 'HEAD')
|
|
|
|
old_deployment = create(:pages_deployment, ci_build: old_build, project: project)
|
|
|
|
project.update_pages_deployment!(old_deployment)
|
|
|
|
end
|
|
|
|
|
|
|
|
include_examples 'successfully deploys'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when newer deployment present' do
|
|
|
|
before do
|
|
|
|
new_pipeline = create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha)
|
|
|
|
new_build = create(:ci_build, pipeline: new_pipeline, ref: 'HEAD')
|
|
|
|
new_deployment = create(:pages_deployment, ci_build: new_build, project: project)
|
|
|
|
project.update_pages_deployment!(new_deployment)
|
|
|
|
end
|
|
|
|
|
2022-07-18 05:08:30 -04:00
|
|
|
it 'fails with outdated reference message' do
|
|
|
|
expect(execute).to eq(:error)
|
|
|
|
expect(project.reload.pages_metadatum).not_to be_deployed
|
|
|
|
|
|
|
|
expect(deploy_status).to be_failed
|
|
|
|
expect(deploy_status.description).to eq('build SHA is outdated for this ref')
|
|
|
|
end
|
2021-08-16 05:09:05 -04:00
|
|
|
end
|
|
|
|
end
|
2022-06-10 11:09:22 -04:00
|
|
|
|
2022-07-18 05:08:30 -04:00
|
|
|
it 'fails when uploaded deployment size is wrong' do
|
|
|
|
allow_next_instance_of(PagesDeployment) do |deployment|
|
|
|
|
allow(deployment)
|
|
|
|
.to receive(:size)
|
|
|
|
.and_return(file.size + 1)
|
|
|
|
end
|
2022-06-10 11:09:22 -04:00
|
|
|
|
2022-07-18 05:08:30 -04:00
|
|
|
expect(execute).not_to eq(:success)
|
2022-06-10 11:09:22 -04:00
|
|
|
|
2022-07-18 05:08:30 -04:00
|
|
|
expect(GenericCommitStatus.last.description).to eq('The uploaded artifact size does not match the expected value')
|
|
|
|
project.pages_metadatum.reload
|
|
|
|
expect(project.pages_metadatum).not_to be_deployed
|
|
|
|
expect(project.pages_metadatum.pages_deployment).to be_nil
|
2022-06-10 11:09:22 -04:00
|
|
|
end
|
2015-11-03 15:28:07 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-10-22 11:08:25 -04:00
|
|
|
# this situation should never happen in real life because all new archives have sha256
|
|
|
|
# and we only use new archives
|
|
|
|
# this test is here just to clarify that this behavior is intentional
|
|
|
|
context 'when artifacts archive does not have sha256' do
|
|
|
|
let!(:artifacts_archive) { create(:ci_job_artifact, file: file, job: build) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
create(:ci_job_artifact, file_type: :metadata, file_format: :gzip, file: metadata, job: build)
|
|
|
|
|
|
|
|
build.reload
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'fails with exception raised' do
|
|
|
|
expect do
|
|
|
|
execute
|
|
|
|
end.to raise_error("Validation failed: File sha256 can't be blank")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-11-03 15:28:07 -05:00
|
|
|
it 'fails if no artifacts' do
|
2016-05-30 04:11:46 -04:00
|
|
|
expect(execute).not_to eq(:success)
|
2015-11-03 15:28:07 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
it 'fails for invalid archive' do
|
2019-03-22 08:38:45 -04:00
|
|
|
create(:ci_job_artifact, :archive, file: invalid_file, job: build)
|
2016-05-30 04:11:46 -04:00
|
|
|
expect(execute).not_to eq(:success)
|
2015-11-03 15:28:07 -05:00
|
|
|
end
|
2016-05-13 06:51:52 -04:00
|
|
|
|
2017-07-25 06:45:17 -04:00
|
|
|
describe 'maximum pages artifacts size' do
|
|
|
|
let(:metadata) { spy('metadata') }
|
|
|
|
|
|
|
|
before do
|
2018-06-05 17:18:06 -04:00
|
|
|
file = fixture_file_upload('spec/fixtures/pages.zip')
|
|
|
|
metafile = fixture_file_upload('spec/fixtures/pages.zip.meta')
|
2017-07-25 06:45:17 -04:00
|
|
|
|
2020-10-22 11:08:25 -04:00
|
|
|
create(:ci_job_artifact, :archive, :correct_checksum, file: file, job: build)
|
2019-03-22 08:38:45 -04:00
|
|
|
create(:ci_job_artifact, :metadata, file: metafile, job: build)
|
2017-07-25 06:45:17 -04:00
|
|
|
|
|
|
|
allow(build).to receive(:artifacts_metadata_entry)
|
|
|
|
.and_return(metadata)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when maximum pages size is set to zero' do
|
|
|
|
before do
|
|
|
|
stub_application_setting(max_pages_size: 0)
|
|
|
|
end
|
|
|
|
|
2019-12-20 04:24:38 -05:00
|
|
|
it_behaves_like 'pages size limit is', ::Gitlab::Pages::MAX_SIZE
|
2017-07-25 06:45:17 -04:00
|
|
|
end
|
|
|
|
|
2019-12-20 04:24:38 -05:00
|
|
|
context 'when size is limited on the instance level' do
|
2017-07-25 06:45:17 -04:00
|
|
|
before do
|
2019-12-20 04:24:38 -05:00
|
|
|
stub_application_setting(max_pages_size: 100)
|
2017-07-25 06:45:17 -04:00
|
|
|
end
|
|
|
|
|
2019-12-20 04:24:38 -05:00
|
|
|
it_behaves_like 'pages size limit is', 100.megabytes
|
2017-07-25 06:45:17 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-03-09 07:08:52 -05:00
|
|
|
context 'when retrying the job' do
|
|
|
|
let!(:older_deploy_job) do
|
|
|
|
create(:generic_commit_status, :failed, pipeline: pipeline,
|
|
|
|
ref: build.ref,
|
|
|
|
stage: 'deploy',
|
|
|
|
name: 'pages:deploy')
|
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
|
|
|
create(:ci_job_artifact, :correct_checksum, file: file, job: build)
|
|
|
|
create(:ci_job_artifact, file_type: :metadata, file_format: :gzip, file: metadata, job: build)
|
|
|
|
build.reload
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'marks older pages:deploy jobs retried' do
|
|
|
|
expect(execute).to eq(:success)
|
|
|
|
|
|
|
|
expect(older_deploy_job.reload).to be_retried
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
2017-07-25 06:45:17 -04:00
|
|
|
def deploy_status
|
2017-07-25 07:47:03 -04:00
|
|
|
GenericCommitStatus.find_by(name: 'pages:deploy')
|
2017-07-25 06:45:17 -04:00
|
|
|
end
|
|
|
|
|
2016-01-15 06:21:52 -05:00
|
|
|
def execute
|
|
|
|
subject.execute[:status]
|
2015-11-03 15:28:07 -05:00
|
|
|
end
|
|
|
|
end
|