2018-07-17 12:50:37 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2016-01-15 06:21:52 -05:00
|
|
|
module Projects
|
2016-02-15 09:01:42 -05:00
|
|
|
class UpdatePagesService < BaseService
|
2018-04-27 13:40:31 -04:00
|
|
|
InvalidStateError = Class.new(StandardError)
|
2016-01-15 06:21:52 -05:00
|
|
|
BLOCK_SIZE = 32.kilobytes
|
2019-08-31 15:22:19 -04:00
|
|
|
PUBLIC_DIR = 'public'
|
2019-01-02 14:01:11 -05:00
|
|
|
|
2020-10-29 11:09:12 -04:00
|
|
|
# old deployment can be cached by pages daemon
|
|
|
|
# so we need to give pages daemon some time update cache
|
|
|
|
# 10 minutes is enough, but 30 feels safer
|
|
|
|
OLD_DEPLOYMENTS_DESTRUCTION_DELAY = 30.minutes.freeze
|
|
|
|
|
2016-01-15 06:21:52 -05:00
|
|
|
attr_reader :build
|
|
|
|
|
|
|
|
def initialize(project, build)
|
2021-04-19 17:09:27 -04:00
|
|
|
@project = project
|
|
|
|
@build = build
|
2016-01-15 06:21:52 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def execute
|
2018-03-05 07:57:48 -05:00
|
|
|
register_attempt
|
|
|
|
|
2016-01-15 06:21:52 -05:00
|
|
|
# Create status notifying the deployment of pages
|
2021-07-08 11:10:06 -04:00
|
|
|
@status = build_commit_status
|
|
|
|
::Ci::Pipelines::AddJobService.new(@build.pipeline).execute!(@status) do |job|
|
|
|
|
job.enqueue!
|
|
|
|
job.run!
|
|
|
|
end
|
2016-01-15 06:21:52 -05:00
|
|
|
|
2021-08-03 11:10:03 -04:00
|
|
|
validate_state!
|
|
|
|
validate_max_size!
|
|
|
|
validate_max_entries!
|
2016-01-15 06:21:52 -05:00
|
|
|
|
2021-01-18 10:10:42 -05:00
|
|
|
build.artifacts_file.use_file do |artifacts_path|
|
|
|
|
create_pages_deployment(artifacts_path, build)
|
2016-01-15 06:21:52 -05:00
|
|
|
success
|
|
|
|
end
|
2018-04-27 13:40:31 -04:00
|
|
|
rescue InvalidStateError => e
|
2016-01-15 06:21:52 -05:00
|
|
|
error(e.message)
|
2021-04-26 08:09:44 -04:00
|
|
|
rescue StandardError => e
|
2018-04-27 13:30:52 -04:00
|
|
|
error(e.message)
|
2018-03-30 04:20:47 -04:00
|
|
|
raise e
|
2016-01-15 06:21:52 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def success
|
2020-05-27 11:08:11 -04:00
|
|
|
@status.success
|
2020-08-31 14:10:43 -04:00
|
|
|
@project.mark_pages_as_deployed(artifacts_archive: build.job_artifacts_archive)
|
2016-01-15 06:21:52 -05:00
|
|
|
super
|
|
|
|
end
|
|
|
|
|
2018-04-27 13:30:52 -04:00
|
|
|
def error(message)
|
2018-03-30 03:50:23 -04:00
|
|
|
register_failure
|
2017-03-30 08:39:17 -04:00
|
|
|
log_error("Projects::UpdatePagesService: #{message}")
|
2016-01-15 06:21:52 -05:00
|
|
|
@status.allow_failure = !latest?
|
|
|
|
@status.description = message
|
2020-05-27 11:08:11 -04:00
|
|
|
@status.drop(:script_failure)
|
2016-01-15 06:21:52 -05:00
|
|
|
super
|
|
|
|
end
|
|
|
|
|
2021-07-08 11:10:06 -04:00
|
|
|
def build_commit_status
|
2016-01-15 06:21:52 -05:00
|
|
|
GenericCommitStatus.new(
|
|
|
|
user: build.user,
|
|
|
|
stage: 'deploy',
|
|
|
|
name: 'pages:deploy'
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2020-10-22 11:08:25 -04:00
|
|
|
def create_pages_deployment(artifacts_path, build)
|
|
|
|
sha256 = build.job_artifacts_archive.file_sha256
|
|
|
|
|
2020-10-29 11:09:12 -04:00
|
|
|
deployment = nil
|
2020-10-12 14:08:31 -04:00
|
|
|
File.open(artifacts_path) do |file|
|
2020-10-22 11:08:25 -04:00
|
|
|
deployment = project.pages_deployments.create!(file: file,
|
|
|
|
file_count: entries_count,
|
2021-08-16 05:09:05 -04:00
|
|
|
file_sha256: sha256,
|
|
|
|
ci_build_id: build.id
|
|
|
|
)
|
2021-01-18 10:10:42 -05:00
|
|
|
|
2021-08-03 11:10:03 -04:00
|
|
|
validate_outdated_sha!
|
2021-01-18 10:10:42 -05:00
|
|
|
|
2020-11-05 10:09:04 -05:00
|
|
|
project.update_pages_deployment!(deployment)
|
2020-10-12 14:08:31 -04:00
|
|
|
end
|
|
|
|
|
2020-10-29 11:09:12 -04:00
|
|
|
DestroyPagesDeploymentsWorker.perform_in(
|
|
|
|
OLD_DEPLOYMENTS_DESTRUCTION_DELAY,
|
|
|
|
project.id,
|
|
|
|
deployment.id
|
|
|
|
)
|
2020-10-12 14:08:31 -04:00
|
|
|
end
|
|
|
|
|
2016-01-15 06:21:52 -05:00
|
|
|
def ref
|
|
|
|
build.ref
|
|
|
|
end
|
|
|
|
|
2018-03-09 10:31:31 -05:00
|
|
|
def artifacts
|
|
|
|
build.artifacts_file.path
|
|
|
|
end
|
|
|
|
|
2016-01-15 06:21:52 -05:00
|
|
|
def latest_sha
|
|
|
|
project.commit(build.ref).try(:sha).to_s
|
2018-03-30 04:20:32 -04:00
|
|
|
ensure
|
|
|
|
# Close any file descriptors that were opened and free libgit2 buffers
|
|
|
|
project.cleanup
|
2016-01-15 06:21:52 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def sha
|
|
|
|
build.sha
|
|
|
|
end
|
2017-07-24 06:44:33 -04:00
|
|
|
|
|
|
|
def register_attempt
|
2017-08-10 14:05:44 -04:00
|
|
|
pages_deployments_total_counter.increment
|
2017-07-24 06:44:33 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def register_failure
|
2017-08-10 14:05:44 -04:00
|
|
|
pages_deployments_failed_total_counter.increment
|
2017-07-24 06:44:33 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def pages_deployments_total_counter
|
|
|
|
@pages_deployments_total_counter ||= Gitlab::Metrics.counter(:pages_deployments_total, "Counter of GitLab Pages deployments triggered")
|
|
|
|
end
|
|
|
|
|
|
|
|
def pages_deployments_failed_total_counter
|
|
|
|
@pages_deployments_failed_total_counter ||= Gitlab::Metrics.counter(:pages_deployments_failed_total, "Counter of GitLab Pages deployments which failed")
|
|
|
|
end
|
2019-01-02 14:01:11 -05:00
|
|
|
|
2021-08-03 11:10:03 -04:00
|
|
|
def validate_state!
|
|
|
|
raise InvalidStateError, 'missing pages artifacts' unless build.artifacts?
|
|
|
|
raise InvalidStateError, 'missing artifacts metadata' unless build.artifacts_metadata?
|
|
|
|
|
|
|
|
validate_outdated_sha!
|
|
|
|
end
|
|
|
|
|
|
|
|
def validate_outdated_sha!
|
2021-08-16 05:09:05 -04:00
|
|
|
return if latest?
|
|
|
|
|
2021-10-05 11:12:53 -04:00
|
|
|
# use pipeline_id in case the build is retried
|
|
|
|
last_deployed_pipeline_id = project.pages_metadatum&.pages_deployment&.ci_build&.pipeline_id
|
2021-08-16 05:09:05 -04:00
|
|
|
|
2021-10-05 11:12:53 -04:00
|
|
|
return unless last_deployed_pipeline_id
|
|
|
|
return if last_deployed_pipeline_id <= build.pipeline_id
|
2021-08-16 05:09:05 -04:00
|
|
|
|
|
|
|
raise InvalidStateError, 'build SHA is outdated for this ref'
|
2021-08-03 11:10:03 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def latest?
|
|
|
|
# check if sha for the ref is still the most recent one
|
|
|
|
# this helps in case when multiple deployments happens
|
|
|
|
sha == latest_sha
|
|
|
|
end
|
|
|
|
|
|
|
|
def validate_max_size!
|
|
|
|
if total_size > max_size
|
|
|
|
raise InvalidStateError, "artifacts for pages are too large: #{total_size}"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Calculate page size after extract
|
|
|
|
def total_size
|
|
|
|
@total_size ||= build.artifacts_metadata_entry(PUBLIC_DIR + '/', recursive: true).total_size
|
|
|
|
end
|
|
|
|
|
|
|
|
def max_size_from_settings
|
|
|
|
Gitlab::CurrentSettings.max_pages_size.megabytes
|
|
|
|
end
|
|
|
|
|
|
|
|
def max_size
|
|
|
|
max_pages_size = max_size_from_settings
|
|
|
|
|
|
|
|
return ::Gitlab::Pages::MAX_SIZE if max_pages_size == 0
|
|
|
|
|
|
|
|
max_pages_size
|
|
|
|
end
|
|
|
|
|
|
|
|
def validate_max_entries!
|
|
|
|
if pages_file_entries_limit > 0 && entries_count > pages_file_entries_limit
|
|
|
|
raise InvalidStateError, "pages site contains #{entries_count} file entries, while limit is set to #{pages_file_entries_limit}"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def entries_count
|
|
|
|
# we're using the full archive and pages daemon needs to read it
|
|
|
|
# so we want the total count from entries, not only "public/" directory
|
|
|
|
# because it better approximates work we need to do before we can serve the site
|
|
|
|
@entries_count = build.artifacts_metadata_entry("", recursive: true).entries.count
|
|
|
|
end
|
|
|
|
|
|
|
|
def pages_file_entries_limit
|
|
|
|
project.actual_limits.pages_file_entries
|
|
|
|
end
|
2016-01-15 06:21:52 -05:00
|
|
|
end
|
|
|
|
end
|
2019-12-20 04:24:38 -05:00
|
|
|
|
2021-05-11 17:10:21 -04:00
|
|
|
Projects::UpdatePagesService.prepend_mod_with('Projects::UpdatePagesService')
|