2013-02-25 14:21:38 -05:00
|
|
|
require 'spec_helper'
|
|
|
|
|
2017-05-02 05:58:08 -04:00
|
|
|
describe GitPushService, services: true do
|
2014-08-01 08:11:27 -04:00
|
|
|
include RepoHelpers
|
|
|
|
|
2017-05-02 05:58:08 -04:00
|
|
|
let(:user) { create(:user) }
|
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
let(:blankrev) { Gitlab::Git::BLANK_SHA }
|
|
|
|
let(:oldrev) { sample_commit.parent_id }
|
|
|
|
let(:newrev) { sample_commit.id }
|
|
|
|
let(:ref) { 'refs/heads/master' }
|
2013-02-25 14:21:38 -05:00
|
|
|
|
|
|
|
before do
|
2017-12-22 03:18:28 -05:00
|
|
|
project.add_master(user)
|
2013-02-25 14:21:38 -05:00
|
|
|
end
|
|
|
|
|
2018-05-03 08:55:14 -04:00
|
|
|
describe 'with remote mirrors' do
|
|
|
|
let(:project) { create(:project, :repository, :remote_mirror) }
|
|
|
|
|
|
|
|
subject do
|
|
|
|
described_class.new(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when remote mirror feature is enabled' do
|
|
|
|
it 'fails stuck remote mirrors' do
|
|
|
|
allow(project).to receive(:update_remote_mirrors).and_return(project.remote_mirrors)
|
|
|
|
expect(project).to receive(:mark_stuck_remote_mirrors_as_failed!)
|
|
|
|
|
|
|
|
subject.execute
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'updates remote mirrors' do
|
|
|
|
expect(project).to receive(:update_remote_mirrors)
|
|
|
|
|
|
|
|
subject.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when remote mirror feature is disabled' do
|
|
|
|
before do
|
|
|
|
stub_application_setting(mirror_available: false)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with remote mirrors global setting overridden' do
|
|
|
|
before do
|
|
|
|
project.remote_mirror_available_overridden = true
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'fails stuck remote mirrors' do
|
|
|
|
allow(project).to receive(:update_remote_mirrors).and_return(project.remote_mirrors)
|
|
|
|
expect(project).to receive(:mark_stuck_remote_mirrors_as_failed!)
|
|
|
|
|
|
|
|
subject.execute
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'updates remote mirrors' do
|
|
|
|
expect(project).to receive(:update_remote_mirrors)
|
|
|
|
|
|
|
|
subject.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'without remote mirrors global setting overridden' do
|
|
|
|
before do
|
|
|
|
project.remote_mirror_available_overridden = false
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not fails stuck remote mirrors' do
|
|
|
|
expect(project).not_to receive(:mark_stuck_remote_mirrors_as_failed!)
|
|
|
|
|
|
|
|
subject.execute
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not updates remote mirrors' do
|
|
|
|
expect(project).not_to receive(:update_remote_mirrors)
|
|
|
|
|
|
|
|
subject.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-09-26 05:55:57 -04:00
|
|
|
describe 'Push branches' do
|
2016-02-17 12:29:43 -05:00
|
|
|
subject do
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, user, oldrev, newrev, ref)
|
2016-02-17 12:29:43 -05:00
|
|
|
end
|
|
|
|
|
2014-09-26 05:55:57 -04:00
|
|
|
context 'new branch' do
|
2017-05-02 05:58:08 -04:00
|
|
|
let(:oldrev) { blankrev }
|
2014-09-26 05:55:57 -04:00
|
|
|
|
2015-02-12 13:17:35 -05:00
|
|
|
it { is_expected.to be_truthy }
|
2016-02-08 06:50:55 -05:00
|
|
|
|
2016-11-18 08:04:18 -05:00
|
|
|
it 'calls the after_push_commit hook' do
|
|
|
|
expect(project.repository).to receive(:after_push_commit).with('master')
|
2016-02-08 06:50:55 -05:00
|
|
|
|
|
|
|
subject
|
|
|
|
end
|
|
|
|
|
2016-11-18 08:04:18 -05:00
|
|
|
it 'calls the after_create_branch hook' do
|
|
|
|
expect(project.repository).to receive(:after_create_branch)
|
2016-06-28 05:39:29 -04:00
|
|
|
|
|
|
|
subject
|
|
|
|
end
|
2014-09-26 05:55:57 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
context 'existing branch' do
|
2015-02-12 13:17:35 -05:00
|
|
|
it { is_expected.to be_truthy }
|
2016-02-08 06:50:55 -05:00
|
|
|
|
2016-11-18 08:04:18 -05:00
|
|
|
it 'calls the after_push_commit hook' do
|
|
|
|
expect(project.repository).to receive(:after_push_commit).with('master')
|
2016-06-28 05:39:29 -04:00
|
|
|
|
|
|
|
subject
|
|
|
|
end
|
2014-09-26 05:55:57 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
context 'rm branch' do
|
2017-05-02 05:58:08 -04:00
|
|
|
let(:newrev) { blankrev }
|
2014-09-26 05:55:57 -04:00
|
|
|
|
2015-02-12 13:17:35 -05:00
|
|
|
it { is_expected.to be_truthy }
|
2016-02-08 06:50:55 -05:00
|
|
|
|
2016-11-18 08:04:18 -05:00
|
|
|
it 'calls the after_push_commit hook' do
|
|
|
|
expect(project.repository).to receive(:after_push_commit).with('master')
|
2016-06-28 05:39:29 -04:00
|
|
|
|
|
|
|
subject
|
|
|
|
end
|
|
|
|
|
2016-11-18 08:04:18 -05:00
|
|
|
it 'calls the after_remove_branch hook' do
|
|
|
|
expect(project.repository).to receive(:after_remove_branch)
|
2016-02-08 06:50:55 -05:00
|
|
|
|
|
|
|
subject
|
|
|
|
end
|
2014-09-26 05:55:57 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-02-25 14:21:38 -05:00
|
|
|
describe "Git Push Data" do
|
2017-05-02 05:58:08 -04:00
|
|
|
let(:commit) { project.commit(newrev) }
|
2013-02-25 14:21:38 -05:00
|
|
|
|
2017-05-02 05:58:08 -04:00
|
|
|
subject { push_data_from_service(project, user, oldrev, newrev, ref) }
|
2013-02-25 14:21:38 -05:00
|
|
|
|
2015-02-19 00:02:57 -05:00
|
|
|
it { is_expected.to include(object_kind: 'push') }
|
2017-05-02 05:58:08 -04:00
|
|
|
it { is_expected.to include(before: oldrev) }
|
|
|
|
it { is_expected.to include(after: newrev) }
|
|
|
|
it { is_expected.to include(ref: ref) }
|
2015-02-12 13:17:35 -05:00
|
|
|
it { is_expected.to include(user_id: user.id) }
|
|
|
|
it { is_expected.to include(user_name: user.name) }
|
|
|
|
it { is_expected.to include(project_id: project.id) }
|
2013-02-25 14:21:38 -05:00
|
|
|
|
|
|
|
context "with repository data" do
|
2017-05-02 05:58:08 -04:00
|
|
|
subject { push_data_from_service(project, user, oldrev, newrev, ref)[:repository] }
|
2013-02-25 14:21:38 -05:00
|
|
|
|
2015-02-12 13:17:35 -05:00
|
|
|
it { is_expected.to include(name: project.name) }
|
|
|
|
it { is_expected.to include(url: project.url_to_repo) }
|
|
|
|
it { is_expected.to include(description: project.description) }
|
|
|
|
it { is_expected.to include(homepage: project.web_url) }
|
2013-02-25 14:21:38 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
context "with commits" do
|
2017-05-02 05:58:08 -04:00
|
|
|
subject { push_data_from_service(project, user, oldrev, newrev, ref)[:commits] }
|
2013-02-25 14:21:38 -05:00
|
|
|
|
2015-02-12 13:17:35 -05:00
|
|
|
it { is_expected.to be_an(Array) }
|
|
|
|
it 'has 1 element' do
|
|
|
|
expect(subject.size).to eq(1)
|
|
|
|
end
|
2013-02-25 14:21:38 -05:00
|
|
|
|
|
|
|
context "the commit" do
|
2017-05-02 05:58:08 -04:00
|
|
|
subject { push_data_from_service(project, user, oldrev, newrev, ref)[:commits].first }
|
2013-02-25 14:21:38 -05:00
|
|
|
|
2017-05-02 05:58:08 -04:00
|
|
|
it { is_expected.to include(id: commit.id) }
|
|
|
|
it { is_expected.to include(message: commit.safe_message) }
|
|
|
|
it { expect(subject[:timestamp].in_time_zone).to eq(commit.date.in_time_zone) }
|
2015-01-24 13:02:58 -05:00
|
|
|
it do
|
|
|
|
is_expected.to include(
|
|
|
|
url: [
|
|
|
|
Gitlab.config.gitlab.url,
|
|
|
|
project.namespace.to_param,
|
|
|
|
project.to_param,
|
|
|
|
'commit',
|
2017-05-02 05:58:08 -04:00
|
|
|
commit.id
|
2015-01-24 13:02:58 -05:00
|
|
|
].join('/')
|
|
|
|
)
|
|
|
|
end
|
2013-02-25 14:21:38 -05:00
|
|
|
|
|
|
|
context "with a author" do
|
2017-05-02 05:58:08 -04:00
|
|
|
subject { push_data_from_service(project, user, oldrev, newrev, ref)[:commits].first[:author] }
|
2013-02-25 14:21:38 -05:00
|
|
|
|
2017-05-02 05:58:08 -04:00
|
|
|
it { is_expected.to include(name: commit.author_name) }
|
|
|
|
it { is_expected.to include(email: commit.author_email) }
|
2013-02-25 14:21:38 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-05-24 09:13:51 -04:00
|
|
|
describe "Pipelines" do
|
2017-05-02 05:58:08 -04:00
|
|
|
subject { execute_service(project, user, oldrev, newrev, ref) }
|
2017-05-24 09:13:51 -04:00
|
|
|
|
|
|
|
before do
|
|
|
|
stub_ci_pipeline_to_return_yaml_file
|
|
|
|
end
|
|
|
|
|
|
|
|
it "creates a new pipeline" do
|
2017-08-09 05:52:22 -04:00
|
|
|
expect { subject }.to change { Ci::Pipeline.count }
|
2017-05-24 09:13:51 -04:00
|
|
|
expect(Ci::Pipeline.last).to be_push
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-02-25 14:21:38 -05:00
|
|
|
describe "Push Event" do
|
2017-05-02 05:58:08 -04:00
|
|
|
let!(:push_data) { push_data_from_service(project, user, oldrev, newrev, ref) }
|
|
|
|
let(:event) { Event.find_by_action(Event::PUSHED) }
|
2013-02-25 14:21:38 -05:00
|
|
|
|
Migrate events into a new format
This commit migrates events data in such a way that push events are
stored much more efficiently. This is done by creating a shadow table
called "events_for_migration", and a table called "push_event_payloads"
which is used for storing push data of push events. The background
migration in this commit will copy events from the "events" table into
the "events_for_migration" table, push events in will also have a row
created in "push_event_payloads".
This approach allows us to reclaim space in the next release by simply
swapping the "events" and "events_for_migration" tables, then dropping
the old events (now "events_for_migration") table.
The new table structure is also optimised for storage space, and does
not include the unused "title" column nor the "data" column (since this
data is moved to "push_event_payloads").
== Newly Created Events
Newly created events are inserted into both "events" and
"events_for_migration", both using the exact same primary key value. The
table "push_event_payloads" in turn has a foreign key to the _shadow_
table. This removes the need for recreating and validating the foreign
key after swapping the tables. Since the shadow table also has a foreign
key to "projects.id" we also don't have to worry about orphaned rows.
This approach however does require some additional storage as we're
duplicating a portion of the events data for at least 1 release. The
exact amount is hard to estimate, but for GitLab.com this is expected to
be between 10 and 20 GB at most. The background migration in this commit
deliberately does _not_ update the "events" table as doing so would put
a lot of pressure on PostgreSQL's auto vacuuming system.
== Supporting Both Old And New Events
Application code has also been adjusted to support push events using
both the old and new data formats. This is done by creating a PushEvent
class which extends the regular Event class. Using Rails' Single Table
Inheritance system we can ensure the right class is used for the right
data, which in this case is based on the value of `events.action`. To
support displaying old and new data at the same time the PushEvent class
re-defines a few methods of the Event class, falling back to their
original implementations for push events in the old format.
Once all existing events have been migrated the various push event
related methods can be removed from the Event model, and the calls to
`super` can be removed from the methods in the PushEvent model.
The UI and event atom feed have also been slightly changed to better
handle this new setup, fortunately only a few changes were necessary to
make this work.
== API Changes
The API only displays push data of events in the new format. Supporting
both formats in the API is a bit more difficult compared to the UI.
Since the old push data was not really well documented (apart from one
example that used an incorrect "action" nmae) I decided that supporting
both was not worth the effort, especially since events will be migrated
in a few days _and_ new events are created in the correct format.
2017-07-10 11:43:57 -04:00
|
|
|
it { expect(event).to be_an_instance_of(PushEvent) }
|
2017-05-02 05:58:08 -04:00
|
|
|
it { expect(event.project).to eq(project) }
|
|
|
|
it { expect(event.action).to eq(Event::PUSHED) }
|
Migrate events into a new format
This commit migrates events data in such a way that push events are
stored much more efficiently. This is done by creating a shadow table
called "events_for_migration", and a table called "push_event_payloads"
which is used for storing push data of push events. The background
migration in this commit will copy events from the "events" table into
the "events_for_migration" table, push events in will also have a row
created in "push_event_payloads".
This approach allows us to reclaim space in the next release by simply
swapping the "events" and "events_for_migration" tables, then dropping
the old events (now "events_for_migration") table.
The new table structure is also optimised for storage space, and does
not include the unused "title" column nor the "data" column (since this
data is moved to "push_event_payloads").
== Newly Created Events
Newly created events are inserted into both "events" and
"events_for_migration", both using the exact same primary key value. The
table "push_event_payloads" in turn has a foreign key to the _shadow_
table. This removes the need for recreating and validating the foreign
key after swapping the tables. Since the shadow table also has a foreign
key to "projects.id" we also don't have to worry about orphaned rows.
This approach however does require some additional storage as we're
duplicating a portion of the events data for at least 1 release. The
exact amount is hard to estimate, but for GitLab.com this is expected to
be between 10 and 20 GB at most. The background migration in this commit
deliberately does _not_ update the "events" table as doing so would put
a lot of pressure on PostgreSQL's auto vacuuming system.
== Supporting Both Old And New Events
Application code has also been adjusted to support push events using
both the old and new data formats. This is done by creating a PushEvent
class which extends the regular Event class. Using Rails' Single Table
Inheritance system we can ensure the right class is used for the right
data, which in this case is based on the value of `events.action`. To
support displaying old and new data at the same time the PushEvent class
re-defines a few methods of the Event class, falling back to their
original implementations for push events in the old format.
Once all existing events have been migrated the various push event
related methods can be removed from the Event model, and the calls to
`super` can be removed from the methods in the PushEvent model.
The UI and event atom feed have also been slightly changed to better
handle this new setup, fortunately only a few changes were necessary to
make this work.
== API Changes
The API only displays push data of events in the new format. Supporting
both formats in the API is a bit more difficult compared to the UI.
Since the old push data was not really well documented (apart from one
example that used an incorrect "action" nmae) I decided that supporting
both was not worth the effort, especially since events will be migrated
in a few days _and_ new events are created in the correct format.
2017-07-10 11:43:57 -04:00
|
|
|
it { expect(event.push_event_payload).to be_an_instance_of(PushEventPayload) }
|
|
|
|
it { expect(event.push_event_payload.commit_from).to eq(oldrev) }
|
|
|
|
it { expect(event.push_event_payload.commit_to).to eq(newrev) }
|
|
|
|
it { expect(event.push_event_payload.ref).to eq('master') }
|
2015-10-15 04:41:46 -04:00
|
|
|
|
|
|
|
context "Updates merge requests" do
|
|
|
|
it "when pushing a new branch for the first time" do
|
2017-06-21 09:48:12 -04:00
|
|
|
expect(UpdateMergeRequestsWorker).to receive(:perform_async)
|
2017-05-02 05:58:08 -04:00
|
|
|
.with(project.id, user.id, blankrev, 'newrev', ref)
|
|
|
|
execute_service(project, user, blankrev, 'newrev', ref )
|
2015-10-15 04:41:46 -04:00
|
|
|
end
|
|
|
|
end
|
2017-06-23 17:52:51 -04:00
|
|
|
|
2017-03-07 07:59:20 -05:00
|
|
|
context "Sends System Push data" do
|
|
|
|
it "when pushing on a branch" do
|
2017-05-02 05:58:08 -04:00
|
|
|
expect(SystemHookPushWorker).to receive(:perform_async).with(push_data, :push_hooks)
|
|
|
|
execute_service(project, user, oldrev, newrev, ref)
|
2017-03-07 07:59:20 -05:00
|
|
|
end
|
|
|
|
end
|
2013-02-25 14:21:38 -05:00
|
|
|
end
|
|
|
|
|
2016-03-14 19:33:00 -04:00
|
|
|
describe "Updates git attributes" do
|
|
|
|
context "for default branch" do
|
|
|
|
it "calls the copy attributes method for the first push to the default branch" do
|
|
|
|
expect(project.repository).to receive(:copy_gitattributes).with('master')
|
|
|
|
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, user, blankrev, 'newrev', ref)
|
2016-03-14 19:33:00 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
it "calls the copy attributes method for changes to the default branch" do
|
2017-05-02 05:58:08 -04:00
|
|
|
expect(project.repository).to receive(:copy_gitattributes).with(ref)
|
2016-03-14 19:33:00 -04:00
|
|
|
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, user, 'oldrev', 'newrev', ref)
|
2016-03-14 19:33:00 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "for non-default branch" do
|
|
|
|
before do
|
|
|
|
# Make sure the "default" branch is different
|
|
|
|
allow(project).to receive(:default_branch).and_return('not-master')
|
|
|
|
end
|
|
|
|
|
|
|
|
it "does not call copy attributes method" do
|
|
|
|
expect(project.repository).not_to receive(:copy_gitattributes)
|
|
|
|
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, user, oldrev, newrev, ref)
|
2016-03-14 19:33:00 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2016-03-03 10:40:05 -05:00
|
|
|
|
2016-03-10 14:48:29 -05:00
|
|
|
describe "Webhooks" do
|
|
|
|
context "execute webhooks" do
|
2013-02-25 14:21:38 -05:00
|
|
|
it "when pushing a branch for the first time" do
|
2015-02-12 13:17:35 -05:00
|
|
|
expect(project).to receive(:execute_hooks)
|
|
|
|
expect(project.default_branch).to eq("master")
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, user, blankrev, 'newrev', ref)
|
2016-07-08 05:15:29 -04:00
|
|
|
expect(project.protected_branches).not_to be_empty
|
2016-08-16 12:49:49 -04:00
|
|
|
expect(project.protected_branches.first.push_access_levels.map(&:access_level)).to eq([Gitlab::Access::MASTER])
|
|
|
|
expect(project.protected_branches.first.merge_access_levels.map(&:access_level)).to eq([Gitlab::Access::MASTER])
|
2015-01-31 03:10:17 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
it "when pushing a branch for the first time with default branch protection disabled" do
|
2015-07-01 17:21:51 -04:00
|
|
|
stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_NONE)
|
2015-01-31 03:10:17 -05:00
|
|
|
|
2015-02-12 13:17:35 -05:00
|
|
|
expect(project).to receive(:execute_hooks)
|
|
|
|
expect(project.default_branch).to eq("master")
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, user, blankrev, 'newrev', ref)
|
2016-07-08 05:15:29 -04:00
|
|
|
expect(project.protected_branches).to be_empty
|
2015-01-31 03:10:17 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
it "when pushing a branch for the first time with default branch protection set to 'developers can push'" do
|
2015-07-01 17:21:51 -04:00
|
|
|
stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_DEV_CAN_PUSH)
|
2015-01-31 03:10:17 -05:00
|
|
|
|
2015-02-12 13:17:35 -05:00
|
|
|
expect(project).to receive(:execute_hooks)
|
|
|
|
expect(project.default_branch).to eq("master")
|
2016-07-18 04:16:56 -04:00
|
|
|
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, user, blankrev, 'newrev', ref)
|
2016-07-08 05:15:29 -04:00
|
|
|
|
|
|
|
expect(project.protected_branches).not_to be_empty
|
2016-08-16 12:49:49 -04:00
|
|
|
expect(project.protected_branches.last.push_access_levels.map(&:access_level)).to eq([Gitlab::Access::DEVELOPER])
|
|
|
|
expect(project.protected_branches.last.merge_access_levels.map(&:access_level)).to eq([Gitlab::Access::MASTER])
|
2016-07-18 04:16:56 -04:00
|
|
|
end
|
|
|
|
|
2016-09-17 23:33:41 -04:00
|
|
|
it "when pushing a branch for the first time with an existing branch permission configured" do
|
|
|
|
stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_DEV_CAN_PUSH)
|
|
|
|
|
|
|
|
create(:protected_branch, :no_one_can_push, :developers_can_merge, project: project, name: 'master')
|
|
|
|
expect(project).to receive(:execute_hooks)
|
|
|
|
expect(project.default_branch).to eq("master")
|
|
|
|
expect_any_instance_of(ProtectedBranches::CreateService).not_to receive(:execute)
|
|
|
|
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, user, blankrev, 'newrev', ref)
|
2016-09-17 23:33:41 -04:00
|
|
|
|
|
|
|
expect(project.protected_branches).not_to be_empty
|
|
|
|
expect(project.protected_branches.last.push_access_levels.map(&:access_level)).to eq([Gitlab::Access::NO_ACCESS])
|
|
|
|
expect(project.protected_branches.last.merge_access_levels.map(&:access_level)).to eq([Gitlab::Access::DEVELOPER])
|
|
|
|
end
|
|
|
|
|
2016-07-18 04:16:56 -04:00
|
|
|
it "when pushing a branch for the first time with default branch protection set to 'developers can merge'" do
|
|
|
|
stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_DEV_CAN_MERGE)
|
|
|
|
|
|
|
|
expect(project).to receive(:execute_hooks)
|
|
|
|
expect(project.default_branch).to eq("master")
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, user, blankrev, 'newrev', ref)
|
2016-07-08 05:15:29 -04:00
|
|
|
expect(project.protected_branches).not_to be_empty
|
2016-08-16 12:49:49 -04:00
|
|
|
expect(project.protected_branches.first.push_access_levels.map(&:access_level)).to eq([Gitlab::Access::MASTER])
|
|
|
|
expect(project.protected_branches.first.merge_access_levels.map(&:access_level)).to eq([Gitlab::Access::DEVELOPER])
|
2013-02-25 14:21:38 -05:00
|
|
|
end
|
|
|
|
|
2013-12-03 04:34:06 -05:00
|
|
|
it "when pushing new commits to existing branch" do
|
2015-02-12 13:17:35 -05:00
|
|
|
expect(project).to receive(:execute_hooks)
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, user, 'oldrev', 'newrev', ref)
|
2013-12-03 04:34:06 -05:00
|
|
|
end
|
2013-02-25 14:21:38 -05:00
|
|
|
end
|
|
|
|
end
|
2013-05-30 19:16:49 -04:00
|
|
|
|
|
|
|
describe "cross-reference notes" do
|
|
|
|
let(:issue) { create :issue, project: project }
|
|
|
|
let(:commit_author) { create :user }
|
2015-04-21 09:13:40 -04:00
|
|
|
let(:commit) { project.commit }
|
2013-05-30 19:16:49 -04:00
|
|
|
|
|
|
|
before do
|
2017-12-22 03:18:28 -05:00
|
|
|
project.add_developer(commit_author)
|
|
|
|
project.add_developer(user)
|
2016-03-17 16:39:50 -04:00
|
|
|
|
2015-05-21 17:49:06 -04:00
|
|
|
allow(commit).to receive_messages(
|
2015-10-13 05:37:42 -04:00
|
|
|
safe_message: "this commit \n mentions #{issue.to_reference}",
|
2013-05-30 19:16:49 -04:00
|
|
|
references: [issue],
|
|
|
|
author_name: commit_author.name,
|
|
|
|
author_email: commit_author.email
|
2015-05-21 17:49:06 -04:00
|
|
|
)
|
2016-03-17 16:39:50 -04:00
|
|
|
|
2017-06-21 09:48:12 -04:00
|
|
|
allow_any_instance_of(ProcessCommitWorker).to receive(:build_commit)
|
|
|
|
.and_return(commit)
|
2016-10-07 09:20:57 -04:00
|
|
|
|
2015-05-21 17:49:06 -04:00
|
|
|
allow(project.repository).to receive(:commits_between).and_return([commit])
|
2013-05-30 19:16:49 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
it "creates a note if a pushed commit mentions an issue" do
|
2015-06-17 20:59:19 -04:00
|
|
|
expect(SystemNoteService).to receive(:cross_reference).with(issue, commit, commit_author)
|
2013-05-30 19:16:49 -04:00
|
|
|
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, user, oldrev, newrev, ref)
|
2013-05-30 19:16:49 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
it "only creates a cross-reference note if one doesn't already exist" do
|
2015-06-17 20:59:19 -04:00
|
|
|
SystemNoteService.cross_reference(issue, commit, user)
|
2013-05-30 19:16:49 -04:00
|
|
|
|
2015-06-17 20:59:19 -04:00
|
|
|
expect(SystemNoteService).not_to receive(:cross_reference).with(issue, commit, commit_author)
|
2013-05-30 19:16:49 -04:00
|
|
|
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, user, oldrev, newrev, ref)
|
2013-05-30 19:16:49 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
it "defaults to the pushing user if the commit's author is not known" do
|
2015-05-21 17:49:06 -04:00
|
|
|
allow(commit).to receive_messages(
|
|
|
|
author_name: 'unknown name',
|
|
|
|
author_email: 'unknown@email.com'
|
|
|
|
)
|
2015-06-17 20:59:19 -04:00
|
|
|
expect(SystemNoteService).to receive(:cross_reference).with(issue, commit, user)
|
2013-05-30 19:16:49 -04:00
|
|
|
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, user, oldrev, newrev, ref)
|
2013-05-30 19:16:49 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
it "finds references in the first push to a non-default branch" do
|
2017-05-02 05:58:08 -04:00
|
|
|
allow(project.repository).to receive(:commits_between).with(blankrev, newrev).and_return([])
|
|
|
|
allow(project.repository).to receive(:commits_between).with("master", newrev).and_return([commit])
|
2013-05-30 19:16:49 -04:00
|
|
|
|
2015-06-17 20:59:19 -04:00
|
|
|
expect(SystemNoteService).to receive(:cross_reference).with(issue, commit, commit_author)
|
2013-05-30 19:16:49 -04:00
|
|
|
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, user, blankrev, newrev, 'refs/heads/other')
|
2013-05-30 19:16:49 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-09-19 03:26:25 -04:00
|
|
|
describe "issue metrics" do
|
|
|
|
let(:issue) { create :issue, project: project }
|
|
|
|
let(:commit_author) { create :user }
|
|
|
|
let(:commit) { project.commit }
|
|
|
|
let(:commit_time) { Time.now }
|
|
|
|
|
|
|
|
before do
|
2017-12-22 03:18:28 -05:00
|
|
|
project.add_developer(commit_author)
|
|
|
|
project.add_developer(user)
|
2016-09-19 03:26:25 -04:00
|
|
|
|
|
|
|
allow(commit).to receive_messages(
|
|
|
|
safe_message: "this commit \n mentions #{issue.to_reference}",
|
|
|
|
references: [issue],
|
|
|
|
author_name: commit_author.name,
|
|
|
|
author_email: commit_author.email,
|
|
|
|
committed_date: commit_time
|
|
|
|
)
|
|
|
|
|
2017-06-21 09:48:12 -04:00
|
|
|
allow_any_instance_of(ProcessCommitWorker).to receive(:build_commit)
|
|
|
|
.and_return(commit)
|
2016-10-07 09:20:57 -04:00
|
|
|
|
2016-09-19 03:26:25 -04:00
|
|
|
allow(project.repository).to receive(:commits_between).and_return([commit])
|
|
|
|
end
|
|
|
|
|
|
|
|
context "while saving the 'first_mentioned_in_commit_at' metric for an issue" do
|
|
|
|
it 'sets the metric for referenced issues' do
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, user, oldrev, newrev, ref)
|
2016-09-19 03:26:25 -04:00
|
|
|
|
2016-10-17 08:40:02 -04:00
|
|
|
expect(issue.reload.metrics.first_mentioned_in_commit_at).to be_like_time(commit_time)
|
2016-09-19 03:26:25 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not set the metric for non-referenced issues' do
|
|
|
|
non_referenced_issue = create(:issue, project: project)
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, user, oldrev, newrev, ref)
|
2016-09-19 03:26:25 -04:00
|
|
|
|
2016-09-20 17:21:43 -04:00
|
|
|
expect(non_referenced_issue.reload.metrics.first_mentioned_in_commit_at).to be_nil
|
2016-09-19 03:26:25 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-08-21 09:05:43 -04:00
|
|
|
describe "closing issues from pushed commits containing a closing reference" do
|
2013-05-30 19:16:49 -04:00
|
|
|
let(:issue) { create :issue, project: project }
|
|
|
|
let(:other_issue) { create :issue, project: project }
|
|
|
|
let(:commit_author) { create :user }
|
2015-04-21 09:13:40 -04:00
|
|
|
let(:closing_commit) { project.commit }
|
2013-05-30 19:16:49 -04:00
|
|
|
|
|
|
|
before do
|
2015-05-21 17:49:06 -04:00
|
|
|
allow(closing_commit).to receive_messages(
|
2013-05-30 19:16:49 -04:00
|
|
|
issue_closing_regex: /^([Cc]loses|[Ff]ixes) #\d+/,
|
|
|
|
safe_message: "this is some work.\n\ncloses ##{issue.iid}",
|
|
|
|
author_name: commit_author.name,
|
|
|
|
author_email: commit_author.email
|
2015-05-21 17:49:06 -04:00
|
|
|
)
|
2013-05-30 19:16:49 -04:00
|
|
|
|
2017-06-21 09:48:12 -04:00
|
|
|
allow(project.repository).to receive(:commits_between)
|
|
|
|
.and_return([closing_commit])
|
2016-03-09 07:49:05 -05:00
|
|
|
|
2017-06-21 09:48:12 -04:00
|
|
|
allow_any_instance_of(ProcessCommitWorker).to receive(:build_commit)
|
|
|
|
.and_return(closing_commit)
|
2016-10-07 09:20:57 -04:00
|
|
|
|
2017-12-22 03:18:28 -05:00
|
|
|
project.add_master(commit_author)
|
2013-05-30 19:16:49 -04:00
|
|
|
end
|
|
|
|
|
2015-08-21 09:05:43 -04:00
|
|
|
context "to default branches" do
|
|
|
|
it "closes issues" do
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, commit_author, oldrev, newrev, ref)
|
2015-08-21 09:05:43 -04:00
|
|
|
expect(Issue.find(issue.id)).to be_closed
|
|
|
|
end
|
2013-05-30 19:16:49 -04:00
|
|
|
|
2015-08-21 09:05:43 -04:00
|
|
|
it "adds a note indicating that the issue is now closed" do
|
|
|
|
expect(SystemNoteService).to receive(:change_status).with(issue, project, commit_author, "closed", closing_commit)
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, commit_author, oldrev, newrev, ref)
|
2015-08-21 09:05:43 -04:00
|
|
|
end
|
2013-05-30 19:16:49 -04:00
|
|
|
|
2015-08-21 09:05:43 -04:00
|
|
|
it "doesn't create additional cross-reference notes" do
|
|
|
|
expect(SystemNoteService).not_to receive(:cross_reference)
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, commit_author, oldrev, newrev, ref)
|
2015-08-21 09:05:43 -04:00
|
|
|
end
|
2013-05-30 19:16:49 -04:00
|
|
|
end
|
2015-06-12 01:39:50 -04:00
|
|
|
|
2015-08-21 09:05:43 -04:00
|
|
|
context "to non-default branches" do
|
|
|
|
before do
|
|
|
|
# Make sure the "default" branch is different
|
|
|
|
allow(project).to receive(:default_branch).and_return('not-master')
|
|
|
|
end
|
|
|
|
|
|
|
|
it "creates cross-reference notes" do
|
|
|
|
expect(SystemNoteService).to receive(:cross_reference).with(issue, closing_commit, commit_author)
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, user, oldrev, newrev, ref)
|
2015-08-21 09:05:43 -04:00
|
|
|
end
|
2015-06-12 01:39:50 -04:00
|
|
|
|
2015-08-21 09:05:43 -04:00
|
|
|
it "doesn't close issues" do
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, user, oldrev, newrev, ref)
|
2015-08-21 09:05:43 -04:00
|
|
|
expect(Issue.find(issue.id)).to be_opened
|
|
|
|
end
|
2015-06-12 01:39:50 -04:00
|
|
|
end
|
2015-12-17 17:08:14 -05:00
|
|
|
|
|
|
|
context "for jira issue tracker" do
|
|
|
|
include JiraServiceHelper
|
|
|
|
|
|
|
|
let(:jira_tracker) { project.create_jira_service if project.jira_service.nil? }
|
|
|
|
|
|
|
|
before do
|
2016-10-06 17:17:11 -04:00
|
|
|
# project.create_jira_service doesn't seem to invalidate the cache here
|
|
|
|
project.has_external_issue_tracker = true
|
2015-12-17 17:08:14 -05:00
|
|
|
jira_service_settings
|
2016-09-29 17:11:32 -04:00
|
|
|
stub_jira_urls("JIRA-1")
|
2015-12-17 17:08:14 -05:00
|
|
|
|
|
|
|
allow(closing_commit).to receive_messages({
|
2017-05-02 05:58:08 -04:00
|
|
|
issue_closing_regex: Regexp.new(Gitlab.config.gitlab.issue_closing_pattern),
|
|
|
|
safe_message: message,
|
|
|
|
author_name: commit_author.name,
|
|
|
|
author_email: commit_author.email
|
|
|
|
})
|
|
|
|
|
2017-05-18 02:07:48 -04:00
|
|
|
allow(JIRA::Resource::Remotelink).to receive(:all).and_return([])
|
2015-12-17 17:08:14 -05:00
|
|
|
|
|
|
|
allow(project.repository).to receive_messages(commits_between: [closing_commit])
|
|
|
|
end
|
|
|
|
|
|
|
|
after do
|
|
|
|
jira_tracker.destroy!
|
|
|
|
end
|
|
|
|
|
|
|
|
context "mentioning an issue" do
|
|
|
|
let(:message) { "this is some work.\n\nrelated to JIRA-1" }
|
|
|
|
|
2016-08-01 11:00:44 -04:00
|
|
|
it "initiates one api call to jira server to mention the issue" do
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, user, oldrev, newrev, ref)
|
2015-12-17 17:08:14 -05:00
|
|
|
|
2016-09-29 17:11:32 -04:00
|
|
|
expect(WebMock).to have_requested(:post, jira_api_comment_url('JIRA-1')).with(
|
2015-12-17 17:08:14 -05:00
|
|
|
body: /mentioned this issue in/
|
|
|
|
).once
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "closing an issue" do
|
2016-10-06 18:05:27 -04:00
|
|
|
let(:message) { "this is some work.\n\ncloses JIRA-1" }
|
2017-05-02 05:58:08 -04:00
|
|
|
let(:comment_body) do
|
|
|
|
{
|
2017-07-20 05:34:09 -04:00
|
|
|
body: "Issue solved with [#{closing_commit.id}|http://#{Gitlab.config.gitlab.host}/#{project.full_path}/commit/#{closing_commit.id}]."
|
2017-05-02 05:58:08 -04:00
|
|
|
}.to_json
|
|
|
|
end
|
2016-10-06 18:05:27 -04:00
|
|
|
|
2016-11-09 16:55:21 -05:00
|
|
|
before do
|
|
|
|
open_issue = JIRA::Resource::Issue.new(jira_tracker.client, attrs: { "id" => "JIRA-1" })
|
|
|
|
closed_issue = open_issue.dup
|
|
|
|
allow(open_issue).to receive(:resolution).and_return(false)
|
|
|
|
allow(closed_issue).to receive(:resolution).and_return(true)
|
|
|
|
allow(JIRA::Resource::Issue).to receive(:find).and_return(open_issue, closed_issue)
|
|
|
|
|
|
|
|
allow_any_instance_of(JIRA::Resource::Issue).to receive(:key).and_return("JIRA-1")
|
|
|
|
end
|
|
|
|
|
2016-10-06 18:05:27 -04:00
|
|
|
context "using right markdown" do
|
|
|
|
it "initiates one api call to jira server to close the issue" do
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, commit_author, oldrev, newrev, ref)
|
2016-10-06 18:05:27 -04:00
|
|
|
|
2016-09-29 17:11:32 -04:00
|
|
|
expect(WebMock).to have_requested(:post, jira_api_transition_url('JIRA-1')).once
|
2016-10-06 18:05:27 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
it "initiates one api call to jira server to comment on the issue" do
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, commit_author, oldrev, newrev, ref)
|
2016-10-06 18:05:27 -04:00
|
|
|
|
2016-09-29 17:11:32 -04:00
|
|
|
expect(WebMock).to have_requested(:post, jira_api_comment_url('JIRA-1')).with(
|
2016-10-06 18:05:27 -04:00
|
|
|
body: comment_body
|
|
|
|
).once
|
|
|
|
end
|
2015-12-17 17:08:14 -05:00
|
|
|
end
|
|
|
|
|
2017-07-10 03:38:42 -04:00
|
|
|
context "using internal issue reference" do
|
|
|
|
context 'when internal issues are disabled' do
|
|
|
|
before do
|
|
|
|
project.issues_enabled = false
|
|
|
|
project.save!
|
|
|
|
end
|
|
|
|
let(:message) { "this is some work.\n\ncloses #1" }
|
|
|
|
|
|
|
|
it "does not initiates one api call to jira server to close the issue" do
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, commit_author, oldrev, newrev, ref)
|
2017-07-10 03:38:42 -04:00
|
|
|
|
|
|
|
expect(WebMock).not_to have_requested(:post, jira_api_transition_url('JIRA-1'))
|
|
|
|
end
|
|
|
|
|
|
|
|
it "does not initiates one api call to jira server to comment on the issue" do
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, commit_author, oldrev, newrev, ref)
|
2017-07-10 03:38:42 -04:00
|
|
|
|
|
|
|
expect(WebMock).not_to have_requested(:post, jira_api_comment_url('JIRA-1')).with(
|
|
|
|
body: comment_body
|
|
|
|
).once
|
|
|
|
end
|
|
|
|
end
|
2015-12-17 17:08:14 -05:00
|
|
|
|
2017-07-10 03:38:42 -04:00
|
|
|
context 'when internal issues are enabled' do
|
|
|
|
let(:issue) { create(:issue, project: project) }
|
|
|
|
let(:message) { "this is some work.\n\ncloses JIRA-1 \n\n closes #{issue.to_reference}" }
|
2016-10-06 18:05:27 -04:00
|
|
|
|
2017-07-10 03:38:42 -04:00
|
|
|
it "initiates one api call to jira server to close the jira issue" do
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, commit_author, oldrev, newrev, ref)
|
2016-10-06 18:05:27 -04:00
|
|
|
|
2017-07-10 03:38:42 -04:00
|
|
|
expect(WebMock).to have_requested(:post, jira_api_transition_url('JIRA-1')).once
|
|
|
|
end
|
2016-10-06 18:05:27 -04:00
|
|
|
|
2017-07-10 03:38:42 -04:00
|
|
|
it "initiates one api call to jira server to comment on the jira issue" do
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, commit_author, oldrev, newrev, ref)
|
2017-07-10 03:38:42 -04:00
|
|
|
|
|
|
|
expect(WebMock).to have_requested(:post, jira_api_comment_url('JIRA-1')).with(
|
|
|
|
body: comment_body
|
|
|
|
).once
|
|
|
|
end
|
|
|
|
|
|
|
|
it "closes the internal issue" do
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, commit_author, oldrev, newrev, ref)
|
2017-07-10 03:38:42 -04:00
|
|
|
expect(issue.reload).to be_closed
|
|
|
|
end
|
|
|
|
|
|
|
|
it "adds a note indicating that the issue is now closed" do
|
|
|
|
expect(SystemNoteService).to receive(:change_status)
|
|
|
|
.with(issue, project, commit_author, "closed", closing_commit)
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, commit_author, oldrev, newrev, ref)
|
2017-07-10 03:38:42 -04:00
|
|
|
end
|
2016-10-06 18:05:27 -04:00
|
|
|
end
|
2015-12-17 17:08:14 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2013-05-30 19:16:49 -04:00
|
|
|
end
|
2013-02-25 14:21:38 -05:00
|
|
|
|
2015-05-06 02:49:30 -04:00
|
|
|
describe "empty project" do
|
|
|
|
let(:project) { create(:project_empty_repo) }
|
2017-05-02 05:58:08 -04:00
|
|
|
let(:new_ref) { 'refs/heads/feature' }
|
2015-05-06 02:49:30 -04:00
|
|
|
|
|
|
|
before do
|
|
|
|
allow(project).to receive(:default_branch).and_return('feature')
|
|
|
|
expect(project).to receive(:change_head) { 'feature'}
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'push to first branch updates HEAD' do
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, user, blankrev, newrev, new_ref)
|
2015-05-06 02:49:30 -04:00
|
|
|
end
|
|
|
|
end
|
2016-02-17 04:42:59 -05:00
|
|
|
|
2016-03-14 11:49:24 -04:00
|
|
|
describe "housekeeping" do
|
2016-03-15 06:03:43 -04:00
|
|
|
let(:housekeeping) { Projects::HousekeepingService.new(project) }
|
2016-03-14 11:49:24 -04:00
|
|
|
|
|
|
|
before do
|
2017-07-10 23:35:47 -04:00
|
|
|
# Flush any raw key-value data stored by the housekeeping code.
|
|
|
|
Gitlab::Redis::Cache.with { |conn| conn.flushall }
|
|
|
|
Gitlab::Redis::Queues.with { |conn| conn.flushall }
|
|
|
|
Gitlab::Redis::SharedState.with { |conn| conn.flushall }
|
2016-10-11 11:31:19 -04:00
|
|
|
|
2016-03-14 11:49:24 -04:00
|
|
|
allow(Projects::HousekeepingService).to receive(:new).and_return(housekeeping)
|
|
|
|
end
|
|
|
|
|
2016-10-11 11:31:19 -04:00
|
|
|
after do
|
2017-07-10 23:35:47 -04:00
|
|
|
Gitlab::Redis::Cache.with { |conn| conn.flushall }
|
|
|
|
Gitlab::Redis::Queues.with { |conn| conn.flushall }
|
|
|
|
Gitlab::Redis::SharedState.with { |conn| conn.flushall }
|
2016-10-11 11:31:19 -04:00
|
|
|
end
|
|
|
|
|
2016-03-14 11:49:24 -04:00
|
|
|
it 'does not perform housekeeping when not needed' do
|
|
|
|
expect(housekeeping).not_to receive(:execute)
|
|
|
|
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, user, oldrev, newrev, ref)
|
2016-03-14 11:49:24 -04:00
|
|
|
end
|
|
|
|
|
2016-03-15 06:03:43 -04:00
|
|
|
context 'when housekeeping is needed' do
|
|
|
|
before do
|
|
|
|
allow(housekeeping).to receive(:needed?).and_return(true)
|
|
|
|
end
|
2016-03-14 11:49:24 -04:00
|
|
|
|
2016-03-15 06:03:43 -04:00
|
|
|
it 'performs housekeeping' do
|
|
|
|
expect(housekeeping).to receive(:execute)
|
|
|
|
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, user, oldrev, newrev, ref)
|
2016-03-15 06:03:43 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not raise an exception' do
|
|
|
|
allow(housekeeping).to receive(:try_obtain_lease).and_return(false)
|
|
|
|
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, user, oldrev, newrev, ref)
|
2016-03-15 06:03:43 -04:00
|
|
|
end
|
2016-03-14 11:49:24 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
it 'increments the push counter' do
|
|
|
|
expect(housekeeping).to receive(:increment!)
|
2016-03-15 06:03:43 -04:00
|
|
|
|
2017-05-02 05:58:08 -04:00
|
|
|
execute_service(project, user, oldrev, newrev, ref)
|
2016-03-14 11:49:24 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-11-18 08:04:18 -05:00
|
|
|
describe '#update_caches' do
|
|
|
|
let(:service) do
|
|
|
|
described_class.new(project,
|
|
|
|
user,
|
2017-05-02 05:58:08 -04:00
|
|
|
oldrev: oldrev,
|
|
|
|
newrev: newrev,
|
|
|
|
ref: ref)
|
2016-11-18 08:04:18 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
context 'on the default branch' do
|
|
|
|
before do
|
2017-08-24 13:03:50 -04:00
|
|
|
allow(service).to receive(:default_branch?).and_return(true)
|
2016-11-18 08:04:18 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
it 'flushes the caches of any special files that have been changed' do
|
|
|
|
commit = double(:commit)
|
|
|
|
diff = double(:diff, new_path: 'README.md')
|
|
|
|
|
2017-06-21 09:48:12 -04:00
|
|
|
expect(commit).to receive(:raw_deltas)
|
|
|
|
.and_return([diff])
|
2016-11-18 08:04:18 -05:00
|
|
|
|
|
|
|
service.push_commits = [commit]
|
|
|
|
|
2017-06-21 09:48:12 -04:00
|
|
|
expect(ProjectCacheWorker).to receive(:perform_async)
|
|
|
|
.with(project.id, %i(readme), %i(commit_count repository_size))
|
2016-11-18 08:04:18 -05:00
|
|
|
|
|
|
|
service.update_caches
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'on a non-default branch' do
|
|
|
|
before do
|
2017-08-24 13:03:50 -04:00
|
|
|
allow(service).to receive(:default_branch?).and_return(false)
|
2016-11-18 08:04:18 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not flush any conditional caches' do
|
2017-06-21 09:48:12 -04:00
|
|
|
expect(ProjectCacheWorker).to receive(:perform_async)
|
|
|
|
.with(project.id, [], %i(commit_count repository_size))
|
|
|
|
.and_call_original
|
2016-11-18 08:04:18 -05:00
|
|
|
|
|
|
|
service.update_caches
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-12-21 10:26:35 -05:00
|
|
|
describe '#process_commit_messages' do
|
|
|
|
let(:service) do
|
|
|
|
described_class.new(project,
|
|
|
|
user,
|
2017-05-02 05:58:08 -04:00
|
|
|
oldrev: oldrev,
|
|
|
|
newrev: newrev,
|
|
|
|
ref: ref)
|
2016-12-21 10:26:35 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
it 'only schedules a limited number of commits' do
|
2017-07-27 08:58:02 -04:00
|
|
|
service.push_commits = Array.new(1000, double(:commit, to_hash: {}, matches_cross_reference_regex?: true))
|
2016-12-21 10:26:35 -05:00
|
|
|
|
|
|
|
expect(ProcessCommitWorker).to receive(:perform_async).exactly(100).times
|
|
|
|
|
|
|
|
service.process_commit_messages
|
|
|
|
end
|
2017-04-21 22:26:58 -04:00
|
|
|
|
|
|
|
it "skips commits which don't include cross-references" do
|
2017-07-27 08:58:02 -04:00
|
|
|
service.push_commits = [double(:commit, to_hash: {}, matches_cross_reference_regex?: false)]
|
2017-04-21 22:26:58 -04:00
|
|
|
|
|
|
|
expect(ProcessCommitWorker).not_to receive(:perform_async)
|
|
|
|
|
|
|
|
service.process_commit_messages
|
|
|
|
end
|
2016-12-21 10:26:35 -05:00
|
|
|
end
|
|
|
|
|
2017-07-10 07:19:50 -04:00
|
|
|
describe '#update_signatures' do
|
|
|
|
let(:service) do
|
|
|
|
described_class.new(
|
|
|
|
project,
|
|
|
|
user,
|
2017-05-02 05:58:08 -04:00
|
|
|
oldrev: oldrev,
|
|
|
|
newrev: newrev,
|
2017-07-10 07:19:50 -04:00
|
|
|
ref: 'refs/heads/master'
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2017-08-15 07:22:55 -04:00
|
|
|
context 'when the commit has a signature' do
|
|
|
|
context 'when the signature is already cached' do
|
|
|
|
before do
|
|
|
|
create(:gpg_signature, commit_sha: sample_commit.id)
|
|
|
|
end
|
2017-07-10 07:19:50 -04:00
|
|
|
|
2017-08-15 07:22:55 -04:00
|
|
|
it 'does not queue a CreateGpgSignatureWorker' do
|
|
|
|
expect(CreateGpgSignatureWorker).not_to receive(:perform_async).with(sample_commit.id, project.id)
|
|
|
|
|
|
|
|
execute_service(project, user, oldrev, newrev, ref)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the signature is not yet cached' do
|
|
|
|
it 'queues a CreateGpgSignatureWorker' do
|
|
|
|
expect(CreateGpgSignatureWorker).to receive(:perform_async).with(sample_commit.id, project.id)
|
|
|
|
|
|
|
|
execute_service(project, user, oldrev, newrev, ref)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the commit does not have a signature' do
|
|
|
|
before do
|
|
|
|
allow(Gitlab::Git::Commit).to receive(:shas_with_signatures).with(project.repository, [sample_commit.id]).and_return([])
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not queue a CreateGpgSignatureWorker' do
|
|
|
|
expect(CreateGpgSignatureWorker).not_to receive(:perform_async).with(sample_commit.id, project.id)
|
|
|
|
|
|
|
|
execute_service(project, user, oldrev, newrev, ref)
|
|
|
|
end
|
2017-07-10 07:19:50 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-02-17 04:42:59 -05:00
|
|
|
def execute_service(project, user, oldrev, newrev, ref)
|
2017-05-02 05:58:08 -04:00
|
|
|
service = described_class.new(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
|
2016-02-17 04:42:59 -05:00
|
|
|
service.execute
|
|
|
|
service
|
|
|
|
end
|
2017-05-02 05:58:08 -04:00
|
|
|
|
|
|
|
def push_data_from_service(project, user, oldrev, newrev, ref)
|
|
|
|
execute_service(project, user, oldrev, newrev, ref).push_data
|
|
|
|
end
|
2015-10-14 01:07:58 -04:00
|
|
|
end
|