2016-01-15 06:21:52 -05:00
|
|
|
module Projects
|
2016-02-15 09:01:42 -05:00
|
|
|
class UpdatePagesService < BaseService
|
2017-08-31 05:47:03 -04:00
|
|
|
include Gitlab::CurrentSettings
|
|
|
|
|
2016-01-15 06:21:52 -05:00
|
|
|
BLOCK_SIZE = 32.kilobytes
|
|
|
|
MAX_SIZE = 1.terabyte
|
2017-02-21 18:32:18 -05:00
|
|
|
SITE_PATH = 'public/'.freeze
|
2016-01-15 06:21:52 -05:00
|
|
|
|
|
|
|
attr_reader :build
|
|
|
|
|
|
|
|
def initialize(project, build)
|
|
|
|
@project, @build = project, build
|
|
|
|
end
|
|
|
|
|
|
|
|
def execute
|
|
|
|
# Create status notifying the deployment of pages
|
|
|
|
@status = create_status
|
2016-08-13 05:15:31 -04:00
|
|
|
@status.enqueue!
|
2016-01-15 06:21:52 -05:00
|
|
|
@status.run!
|
|
|
|
|
2017-11-24 03:23:56 -05:00
|
|
|
raise 'missing pages artifacts' unless build.artifacts?
|
2016-01-15 06:21:52 -05:00
|
|
|
raise 'pages are outdated' unless latest?
|
|
|
|
|
|
|
|
# Create temporary directory in which we will extract the artifacts
|
|
|
|
FileUtils.mkdir_p(tmp_path)
|
|
|
|
Dir.mktmpdir(nil, tmp_path) do |archive_path|
|
|
|
|
extract_archive!(archive_path)
|
|
|
|
|
|
|
|
# Check if we did extract public directory
|
|
|
|
archive_public_path = File.join(archive_path, 'public')
|
2016-05-13 06:51:52 -04:00
|
|
|
raise 'pages miss the public folder' unless Dir.exist?(archive_public_path)
|
2016-01-15 06:21:52 -05:00
|
|
|
raise 'pages are outdated' unless latest?
|
|
|
|
|
|
|
|
deploy_page!(archive_public_path)
|
|
|
|
success
|
|
|
|
end
|
|
|
|
rescue => e
|
2017-07-24 06:44:33 -04:00
|
|
|
register_failure
|
2016-01-15 06:21:52 -05:00
|
|
|
error(e.message)
|
2017-03-05 17:14:02 -05:00
|
|
|
ensure
|
2017-07-24 06:44:33 -04:00
|
|
|
register_attempt
|
2017-03-05 17:14:02 -05:00
|
|
|
build.erase_artifacts! unless build.has_expiring_artifacts?
|
2016-01-15 06:21:52 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def success
|
|
|
|
@status.success
|
|
|
|
super
|
|
|
|
end
|
|
|
|
|
|
|
|
def error(message, http_status = nil)
|
2017-03-30 08:39:17 -04:00
|
|
|
log_error("Projects::UpdatePagesService: #{message}")
|
2016-01-15 06:21:52 -05:00
|
|
|
@status.allow_failure = !latest?
|
|
|
|
@status.description = message
|
2017-09-05 02:10:34 -04:00
|
|
|
@status.drop(:script_failure)
|
2016-01-15 06:21:52 -05:00
|
|
|
super
|
|
|
|
end
|
|
|
|
|
|
|
|
def create_status
|
|
|
|
GenericCommitStatus.new(
|
|
|
|
project: project,
|
2016-06-07 10:40:15 -04:00
|
|
|
pipeline: build.pipeline,
|
2016-01-15 06:21:52 -05:00
|
|
|
user: build.user,
|
|
|
|
ref: build.ref,
|
|
|
|
stage: 'deploy',
|
|
|
|
name: 'pages:deploy'
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def extract_archive!(temp_path)
|
2016-01-20 15:49:26 -05:00
|
|
|
if artifacts.ends_with?('.tar.gz') || artifacts.ends_with?('.tgz')
|
|
|
|
extract_tar_archive!(temp_path)
|
|
|
|
elsif artifacts.ends_with?('.zip')
|
|
|
|
extract_zip_archive!(temp_path)
|
|
|
|
else
|
|
|
|
raise 'unsupported artifacts format'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def extract_tar_archive!(temp_path)
|
2016-01-15 06:21:52 -05:00
|
|
|
results = Open3.pipeline(%W(gunzip -c #{artifacts}),
|
|
|
|
%W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
|
2016-01-20 15:49:26 -05:00
|
|
|
%W(tar -x -C #{temp_path} #{SITE_PATH}),
|
2016-01-15 06:21:52 -05:00
|
|
|
err: '/dev/null')
|
|
|
|
raise 'pages failed to extract' unless results.compact.all?(&:success?)
|
|
|
|
end
|
|
|
|
|
2016-01-20 15:49:26 -05:00
|
|
|
def extract_zip_archive!(temp_path)
|
|
|
|
raise 'missing artifacts metadata' unless build.artifacts_metadata?
|
|
|
|
|
|
|
|
# Calculate page size after extract
|
|
|
|
public_entry = build.artifacts_metadata_entry(SITE_PATH, recursive: true)
|
|
|
|
|
|
|
|
if public_entry.total_size > max_size
|
2016-04-21 13:33:08 -04:00
|
|
|
raise "artifacts for pages are too large: #{public_entry.total_size}"
|
2016-01-20 15:49:26 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
# Requires UnZip at least 6.00 Info-ZIP.
|
2017-06-28 17:52:58 -04:00
|
|
|
# -qq be (very) quiet
|
2016-01-20 15:49:26 -05:00
|
|
|
# -n never overwrite existing files
|
|
|
|
# We add * to end of SITE_PATH, because we want to extract SITE_PATH and all subdirectories
|
|
|
|
site_path = File.join(SITE_PATH, '*')
|
2017-06-28 17:52:58 -04:00
|
|
|
unless system(*%W(unzip -qq -n #{artifacts} #{site_path} -d #{temp_path}))
|
2016-01-20 15:49:26 -05:00
|
|
|
raise 'pages failed to extract'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-01-15 06:21:52 -05:00
|
|
|
def deploy_page!(archive_public_path)
|
|
|
|
# Do atomic move of pages
|
|
|
|
# Move and removal may not be atomic, but they are significantly faster then extracting and removal
|
|
|
|
# 1. We move deployed public to previous public path (file removal is slow)
|
|
|
|
# 2. We move temporary public to be deployed public
|
|
|
|
# 3. We remove previous public path
|
|
|
|
FileUtils.mkdir_p(pages_path)
|
|
|
|
begin
|
|
|
|
FileUtils.move(public_path, previous_public_path)
|
|
|
|
rescue
|
|
|
|
end
|
|
|
|
FileUtils.move(archive_public_path, public_path)
|
|
|
|
ensure
|
|
|
|
FileUtils.rm_r(previous_public_path, force: true)
|
|
|
|
end
|
|
|
|
|
|
|
|
def latest?
|
|
|
|
# check if sha for the ref is still the most recent one
|
|
|
|
# this helps in case when multiple deployments happens
|
|
|
|
sha == latest_sha
|
|
|
|
end
|
|
|
|
|
|
|
|
def blocks
|
|
|
|
# Calculate dd parameters: we limit the size of pages
|
2016-01-20 15:49:26 -05:00
|
|
|
1 + max_size / BLOCK_SIZE
|
|
|
|
end
|
|
|
|
|
|
|
|
def max_size
|
2017-07-26 05:19:57 -04:00
|
|
|
max_pages_size = current_application_settings.max_pages_size.megabytes
|
|
|
|
|
|
|
|
return MAX_SIZE if max_pages_size.zero?
|
|
|
|
|
|
|
|
[max_pages_size, MAX_SIZE].min
|
2016-01-15 06:21:52 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def tmp_path
|
2017-01-10 10:46:47 -05:00
|
|
|
@tmp_path ||= File.join(::Settings.pages.path, 'tmp')
|
2016-01-15 06:21:52 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def pages_path
|
|
|
|
@pages_path ||= project.pages_path
|
|
|
|
end
|
|
|
|
|
|
|
|
def public_path
|
|
|
|
@public_path ||= File.join(pages_path, 'public')
|
|
|
|
end
|
|
|
|
|
|
|
|
def previous_public_path
|
|
|
|
@previous_public_path ||= File.join(pages_path, "public.#{SecureRandom.hex}")
|
|
|
|
end
|
|
|
|
|
|
|
|
def ref
|
|
|
|
build.ref
|
|
|
|
end
|
|
|
|
|
|
|
|
def artifacts
|
|
|
|
build.artifacts_file.path
|
|
|
|
end
|
|
|
|
|
|
|
|
def latest_sha
|
|
|
|
project.commit(build.ref).try(:sha).to_s
|
|
|
|
end
|
|
|
|
|
|
|
|
def sha
|
|
|
|
build.sha
|
|
|
|
end
|
2017-07-24 06:44:33 -04:00
|
|
|
|
|
|
|
def register_attempt
|
2017-08-10 14:05:44 -04:00
|
|
|
pages_deployments_total_counter.increment
|
2017-07-24 06:44:33 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def register_failure
|
2017-08-10 14:05:44 -04:00
|
|
|
pages_deployments_failed_total_counter.increment
|
2017-07-24 06:44:33 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def pages_deployments_total_counter
|
|
|
|
@pages_deployments_total_counter ||= Gitlab::Metrics.counter(:pages_deployments_total, "Counter of GitLab Pages deployments triggered")
|
|
|
|
end
|
|
|
|
|
|
|
|
def pages_deployments_failed_total_counter
|
|
|
|
@pages_deployments_failed_total_counter ||= Gitlab::Metrics.counter(:pages_deployments_failed_total, "Counter of GitLab Pages deployments which failed")
|
|
|
|
end
|
2016-01-15 06:21:52 -05:00
|
|
|
end
|
|
|
|
end
|