Move most of PagesWorker logic UpdatePagesService

This commit is contained in:
Kamil Trzcinski 2016-01-15 12:21:52 +01:00 committed by James Edwards-Jones
parent 1d159ffbf8
commit 6e70870a2e
6 changed files with 152 additions and 144 deletions

View File

@ -457,7 +457,7 @@ module Ci
build_data = Gitlab::DataBuilder::Build.build(self)
project.execute_hooks(build_data.dup, :build_hooks)
project.execute_services(build_data.dup, :build_hooks)
UpdatePagesService.new(build_data).execute
PagesService.new(build_data).execute
project.running_or_pending_build_count(force: true)
end

View File

@ -1,4 +1,4 @@
class UpdatePagesService
class PagesService
attr_reader :data
def initialize(data)

View File

@ -0,0 +1,132 @@
module Projects
class UpdatePagesService < BaseService
BLOCK_SIZE = 32.kilobytes
MAX_SIZE = 1.terabyte
attr_reader :build
def initialize(project, build)
@project, @build = project, build
end
def execute
# Create status notifying the deployment of pages
@status = create_status
@status.run!
raise 'missing pages artifacts' unless build.artifacts_file?
raise 'pages are outdated' unless latest?
# Create temporary directory in which we will extract the artifacts
FileUtils.mkdir_p(tmp_path)
Dir.mktmpdir(nil, tmp_path) do |archive_path|
extract_archive!(archive_path)
# Check if we did extract public directory
archive_public_path = File.join(archive_path, 'public')
raise 'pages miss the public folder' unless Dir.exists?(archive_public_path)
raise 'pages are outdated' unless latest?
deploy_page!(archive_public_path)
success
end
rescue => e
error(e.message)
end
private
def success
@status.success
super
end
def error(message, http_status = nil)
@status.allow_failure = !latest?
@status.description = message
@status.drop
super
end
def create_status
GenericCommitStatus.new(
project: project,
commit: build.commit,
user: build.user,
ref: build.ref,
stage: 'deploy',
name: 'pages:deploy'
)
end
def extract_archive!(temp_path)
results = Open3.pipeline(%W(gunzip -c #{artifacts}),
%W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
%W(tar -x -C #{temp_path} public/),
err: '/dev/null')
raise 'pages failed to extract' unless results.compact.all?(&:success?)
end
def deploy_page!(archive_public_path)
# Do atomic move of pages
# Move and removal may not be atomic, but they are significantly faster then extracting and removal
# 1. We move deployed public to previous public path (file removal is slow)
# 2. We move temporary public to be deployed public
# 3. We remove previous public path
FileUtils.mkdir_p(pages_path)
begin
FileUtils.move(public_path, previous_public_path)
rescue
end
FileUtils.move(archive_public_path, public_path)
ensure
FileUtils.rm_r(previous_public_path, force: true)
end
def latest?
# check if sha for the ref is still the most recent one
# this helps in case when multiple deployments happens
sha == latest_sha
end
def blocks
# Calculate dd parameters: we limit the size of pages
max_size = current_application_settings.max_pages_size.megabytes
max_size ||= MAX_SIZE
blocks = 1 + max_size / BLOCK_SIZE
blocks
end
def tmp_path
@tmp_path ||= File.join(Settings.pages.path, 'tmp')
end
def pages_path
@pages_path ||= project.pages_path
end
def public_path
@public_path ||= File.join(pages_path, 'public')
end
def previous_public_path
@previous_public_path ||= File.join(pages_path, "public.#{SecureRandom.hex}")
end
def ref
build.ref
end
def artifacts
build.artifacts_file.path
end
def latest_sha
project.commit(build.ref).try(:sha).to_s
end
def sha
build.sha
end
end
end

View File

@ -1,9 +1,5 @@
class PagesWorker
include Sidekiq::Worker
include Gitlab::CurrentSettings
BLOCK_SIZE = 32.kilobytes
MAX_SIZE = 1.terabyte
sidekiq_options queue: :pages, retry: false
@ -12,137 +8,12 @@ class PagesWorker
end
def deploy(build_id)
@build_id = build_id
return unless valid?
# Create status notifying the deployment of pages
@status = create_status
@status.run!
raise 'pages are outdated' unless latest?
# Create temporary directory in which we will extract the artifacts
FileUtils.mkdir_p(tmp_path)
Dir.mktmpdir(nil, tmp_path) do |archive_path|
extract_archive!(archive_path)
# Check if we did extract public directory
archive_public_path = File.join(archive_path, 'public')
raise 'pages miss the public folder' unless Dir.exists?(archive_public_path)
raise 'pages are outdated' unless latest?
deploy_page!(archive_public_path)
@status.success
end
rescue => e
fail(e.message, !latest?)
return false
build = Ci::Build.find_by(id: build_id)
Projects::UpdatePagesService.new(build.project, build).execute
end
def remove(namespace_path, project_path)
full_path = File.join(Settings.pages.path, namespace_path, project_path)
FileUtils.rm_r(full_path, force: true)
end
private
def create_status
GenericCommitStatus.new(
project: project,
commit: build.commit,
user: build.user,
ref: build.ref,
stage: 'deploy',
name: 'pages:deploy'
)
end
def extract_archive!(temp_path)
results = Open3.pipeline(%W(gunzip -c #{artifacts}),
%W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
%W(tar -x -C #{temp_path} public/),
err: '/dev/null')
raise 'pages failed to extract' unless results.compact.all?(&:success?)
end
def deploy_page!(archive_public_path)
# Do atomic move of pages
# Move and removal may not be atomic, but they are significantly faster then extracting and removal
# 1. We move deployed public to previous public path (file removal is slow)
# 2. We move temporary public to be deployed public
# 3. We remove previous public path
FileUtils.mkdir_p(pages_path)
begin
FileUtils.move(public_path, previous_public_path)
rescue
end
FileUtils.move(archive_public_path, public_path)
ensure
FileUtils.rm_r(previous_public_path, force: true)
end
def fail(message, allow_failure = true)
@status.allow_failure = allow_failure
@status.description = message
@status.drop
end
def valid?
build && build.artifacts_file?
end
def latest?
# check if sha for the ref is still the most recent one
# this helps in case when multiple deployments happens
sha == latest_sha
end
def blocks
# Calculate dd parameters: we limit the size of pages
max_size = current_application_settings.max_pages_size.megabytes
max_size ||= MAX_SIZE
blocks = 1 + max_size / BLOCK_SIZE
blocks
end
def build
@build ||= Ci::Build.find_by(id: @build_id)
end
def project
@project ||= build.project
end
def tmp_path
@tmp_path ||= File.join(Settings.pages.path, 'tmp')
end
def pages_path
@pages_path ||= project.pages_path
end
def public_path
@public_path ||= File.join(pages_path, 'public')
end
def previous_public_path
@previous_public_path ||= File.join(pages_path, "public.#{SecureRandom.hex}")
end
def ref
build.ref
end
def artifacts
build.artifacts_file.path
end
def latest_sha
project.commit(build.ref).try(:sha).to_s
end
def sha
build.sha
end
end

View File

@ -1,9 +1,9 @@
require 'spec_helper'
describe UpdatePagesService, services: true do
describe PagesService, services: true do
let(:build) { create(:ci_build) }
let(:data) { Gitlab::BuildDataBuilder.build(build) }
let(:service) { UpdatePagesService.new(data) }
let(:service) { PagesService.new(data) }
before do
allow(Gitlab.config.pages).to receive(:enabled).and_return(true)

View File

@ -1,13 +1,14 @@
require "spec_helper"
describe PagesWorker do
describe Projects::UpdatePagesService do
let(:project) { create :project }
let(:commit) { create :ci_commit, project: project, sha: project.commit('HEAD').sha }
let(:build) { create :ci_build, commit: commit, ref: 'HEAD' }
let(:worker) { PagesWorker.new }
let(:file) { fixture_file_upload(Rails.root + 'spec/fixtures/pages.tar.gz', 'application/octet-stream') }
let(:empty_file) { fixture_file_upload(Rails.root + 'spec/fixtures/pages_empty.tar.gz', 'application/octet-stream') }
let(:invalid_file) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'application/octet-stream') }
subject { described_class.new(project, build) }
before do
project.remove_pages
@ -18,19 +19,19 @@ describe PagesWorker do
it 'succeeds' do
expect(project.pages_url).to be_nil
expect(worker.deploy(build.id)).to be_truthy
expect(execute).to eq(:success)
expect(project.pages_url).to_not be_nil
end
it 'limits pages size' do
stub_application_setting(max_pages_size: 1)
expect(worker.deploy(build.id)).to_not be_truthy
expect(execute).to_not eq(:success)
end
it 'removes pages after destroy' do
expect(PagesWorker).to receive(:perform_in)
expect(project.pages_url).to be_nil
expect(worker.deploy(build.id)).to be_truthy
expect(execute).to eq(:success)
expect(project.pages_url).to_not be_nil
project.destroy
expect(Dir.exist?(project.public_pages_path)).to be_falsey
@ -44,22 +45,26 @@ describe PagesWorker do
end
it 'fails if no artifacts' do
expect(worker.deploy(build.id)).to_not be_truthy
expect(execute).to_not eq(:success)
end
it 'fails for empty file fails' do
build.update_attributes(artifacts_file: empty_file)
expect(worker.deploy(build.id)).to_not be_truthy
expect(execute).to_not eq(:success)
end
it 'fails for invalid archive' do
build.update_attributes(artifacts_file: invalid_file)
expect(worker.deploy(build.id)).to_not be_truthy
expect(execute).to_not eq(:success)
end
it 'fails if sha on branch is not latest' do
commit.update_attributes(sha: 'old_sha')
build.update_attributes(artifacts_file: file)
expect(worker.deploy(build.id)).to_not be_truthy
expect(execute).to_not eq(:success)
end
def execute
subject.execute[:status]
end
end