Fix optimistic locking

This commit is contained in:
Kamil Trzcinski 2016-10-26 11:34:40 +02:00
parent 47b2add4f6
commit d8aed6a27b
7 changed files with 27 additions and 16 deletions

View File

@ -73,16 +73,16 @@ class CommitStatus < ActiveRecord::Base
transition [:created, :pending, :running] => :canceled
end
after_transition created: [:pending, :running] do |commit_status|
commit_status.update_attributes queued_at: Time.now
before_transition created: [:pending, :running] do |commit_status|
commit_status.queued_at = Time.now
end
after_transition [:created, :pending] => :running do |commit_status|
commit_status.update_attributes started_at: Time.now
before_transition [:created, :pending] => :running do |commit_status|
commit_status.started_at = Time.now
end
after_transition any => [:success, :failed, :canceled] do |commit_status|
commit_status.update_attributes finished_at: Time.now
before_transition any => [:success, :failed, :canceled] do |commit_status|
commit_status.finished_at = Time.now
end
after_transition do |commit_status, transition|

View File

@ -40,10 +40,12 @@ module Ci
def process_build(build, current_status)
if valid_statuses_for_when(build.when).include?(current_status)
build.enqueue
r = build.enqueue
puts "process_build: #{build.id}: enqueue: #{build.status} => #{r}"
true
else
build.skip
r = build.skip
puts "process_build: #{build.id}: skip: #{build.status} => #{r}"
false
end
end

View File

@ -178,6 +178,7 @@ Ci::Pipeline:
- finished_at
- duration
- user_id
- lock_version
CommitStatus:
- id
- project_id
@ -217,6 +218,7 @@ CommitStatus:
- yaml_variables
- queued_at
- token
- lock_version
Ci::Variable:
- id
- project_id

View File

@ -138,9 +138,9 @@ describe Ci::Pipeline, models: true do
describe 'state machine' do
let(:current) { Time.now.change(usec: 0) }
let(:build) { create_build('build1', current, 10) }
let(:build_b) { create_build('build2', current, 20) }
let(:build_c) { create_build('build3', current + 50, 10) }
let(:build) { create_build('build1', 0) }
let(:build_b) { create_build('build2', 0) }
let(:build_c) { create_build('build3', 0) }
describe '#duration' do
before do
@ -163,11 +163,12 @@ describe Ci::Pipeline, models: true do
build_c.success
end
pipeline.drop
# We have to reload pipeline, because its status is updated by processing builds
pipeline.reload.drop
end
it 'matches sum of builds duration' do
pipeline.reload
binding.pry
expect(pipeline.duration).to eq(40)
end
@ -455,7 +456,9 @@ describe Ci::Pipeline, models: true do
context 'when all builds succeed' do
before do
build_a.success
build_b.success
# We have to reload build_b as this is in next stage and it gets triggered by PipelineProcessWorker
build_b.reload.success
end
it 'receives a success event once' do

View File

@ -277,6 +277,7 @@ describe API::API, api: true do
context 'with regular branch' do
before do
pipeline.reload
pipeline.update(ref: 'master',
sha: project.commit('master').sha)
@ -288,6 +289,7 @@ describe API::API, api: true do
context 'with branch name containing slash' do
before do
pipeline.reload
pipeline.update(ref: 'improve/awesome',
sha: project.commit('improve/awesome').sha)
end

View File

@ -101,11 +101,11 @@ module Ci
it 'equalises number of running builds' do
# after finishing the first build for project 1, get a second build from the same project
expect(service.execute(shared_runner)).to eq(build1_project1)
build1_project1.success
build1_project1.reload.success
expect(service.execute(shared_runner)).to eq(build2_project1)
expect(service.execute(shared_runner)).to eq(build1_project2)
build1_project2.success
build1_project2.reload.success
expect(service.execute(shared_runner)).to eq(build2_project2)
expect(service.execute(shared_runner)).to eq(build1_project3)
expect(service.execute(shared_runner)).to eq(build3_project1)

View File

@ -147,6 +147,7 @@ describe MergeRequests::MergeWhenBuildSucceedsService do
expect(MergeWorker).not_to receive(:perform_async)
build.success
test.reload
test.drop
end
@ -154,6 +155,7 @@ describe MergeRequests::MergeWhenBuildSucceedsService do
expect(MergeWorker).to receive(:perform_async)
build.success
test.reload
test.success
end
end