2019-04-11 08:17:24 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2019-03-28 10:59:24 -04:00
|
|
|
require 'spec_helper'
|
|
|
|
|
|
|
|
describe Git::BranchHooksService do
|
|
|
|
include RepoHelpers
|
2019-08-16 14:26:31 -04:00
|
|
|
include ProjectForksHelper
|
2019-03-28 10:59:24 -04:00
|
|
|
|
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
let(:user) { project.creator }
|
|
|
|
|
|
|
|
let(:branch) { project.default_branch }
|
|
|
|
let(:ref) { "refs/heads/#{branch}" }
|
|
|
|
let(:commit) { project.commit(sample_commit.id) }
|
|
|
|
let(:oldrev) { commit.parent_id }
|
|
|
|
let(:newrev) { commit.id }
|
|
|
|
|
|
|
|
let(:service) do
|
2019-10-03 11:07:07 -04:00
|
|
|
described_class.new(project, user, change: { oldrev: oldrev, newrev: newrev, ref: ref })
|
2019-03-28 10:59:24 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
describe "Git Push Data" do
|
2019-08-12 18:31:58 -04:00
|
|
|
subject(:push_data) { service.send(:push_data) }
|
2019-03-28 10:59:24 -04:00
|
|
|
|
|
|
|
it 'has expected push data attributes' do
|
|
|
|
is_expected.to match a_hash_including(
|
|
|
|
object_kind: 'push',
|
|
|
|
before: oldrev,
|
|
|
|
after: newrev,
|
|
|
|
ref: ref,
|
|
|
|
user_id: user.id,
|
|
|
|
user_name: user.name,
|
|
|
|
project_id: project.id
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
context "with repository data" do
|
|
|
|
subject { push_data[:repository] }
|
|
|
|
|
|
|
|
it 'has expected attributes' do
|
|
|
|
is_expected.to match a_hash_including(
|
|
|
|
name: project.name,
|
|
|
|
url: project.url_to_repo,
|
|
|
|
description: project.description,
|
|
|
|
homepage: project.web_url
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "with commits" do
|
|
|
|
subject { push_data[:commits] }
|
|
|
|
|
|
|
|
it { is_expected.to be_an(Array) }
|
|
|
|
|
|
|
|
it 'has 1 element' do
|
|
|
|
expect(subject.size).to eq(1)
|
|
|
|
end
|
|
|
|
|
|
|
|
context "the commit" do
|
|
|
|
subject { push_data[:commits].first }
|
|
|
|
|
|
|
|
it { expect(subject[:timestamp].in_time_zone).to eq(commit.date.in_time_zone) }
|
|
|
|
|
|
|
|
it 'includes expected commit data' do
|
|
|
|
is_expected.to match a_hash_including(
|
|
|
|
id: commit.id,
|
|
|
|
message: commit.safe_message,
|
|
|
|
url: [
|
|
|
|
Gitlab.config.gitlab.url,
|
|
|
|
project.namespace.to_param,
|
|
|
|
project.to_param,
|
2020-02-05 16:09:02 -05:00
|
|
|
'-',
|
2019-03-28 10:59:24 -04:00
|
|
|
'commit',
|
|
|
|
commit.id
|
|
|
|
].join('/')
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
context "with a author" do
|
|
|
|
subject { push_data[:commits].first[:author] }
|
|
|
|
|
|
|
|
it 'includes expected author data' do
|
|
|
|
is_expected.to match a_hash_including(
|
|
|
|
name: commit.author_name,
|
|
|
|
email: commit.author_email
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'Push Event' do
|
|
|
|
let(:event) { Event.find_by_action(Event::PUSHED) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
|
|
|
|
context "with an existing branch" do
|
|
|
|
it 'generates a push event with one commit' do
|
|
|
|
expect(event).to be_an_instance_of(PushEvent)
|
|
|
|
expect(event.project).to eq(project)
|
|
|
|
expect(event.action).to eq(Event::PUSHED)
|
|
|
|
expect(event.push_event_payload).to be_an_instance_of(PushEventPayload)
|
|
|
|
expect(event.push_event_payload.commit_from).to eq(oldrev)
|
|
|
|
expect(event.push_event_payload.commit_to).to eq(newrev)
|
2019-08-12 18:31:58 -04:00
|
|
|
expect(event.push_event_payload.commit_title).to eq('Change some files')
|
2019-03-28 10:59:24 -04:00
|
|
|
expect(event.push_event_payload.ref).to eq('master')
|
|
|
|
expect(event.push_event_payload.commit_count).to eq(1)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "with a new branch" do
|
|
|
|
let(:oldrev) { Gitlab::Git::BLANK_SHA }
|
|
|
|
|
|
|
|
it 'generates a push event with more than one commit' do
|
|
|
|
expect(event).to be_an_instance_of(PushEvent)
|
|
|
|
expect(event.project).to eq(project)
|
|
|
|
expect(event.action).to eq(Event::PUSHED)
|
|
|
|
expect(event.push_event_payload).to be_an_instance_of(PushEventPayload)
|
|
|
|
expect(event.push_event_payload.commit_from).to be_nil
|
|
|
|
expect(event.push_event_payload.commit_to).to eq(newrev)
|
2019-08-12 18:31:58 -04:00
|
|
|
expect(event.push_event_payload.commit_title).to eq('Initial commit')
|
2019-03-28 10:59:24 -04:00
|
|
|
expect(event.push_event_payload.ref).to eq('master')
|
|
|
|
expect(event.push_event_payload.commit_count).to be > 1
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'removing a branch' do
|
|
|
|
let(:newrev) { Gitlab::Git::BLANK_SHA }
|
|
|
|
|
|
|
|
it 'generates a push event with no commits' do
|
|
|
|
expect(event).to be_an_instance_of(PushEvent)
|
|
|
|
expect(event.project).to eq(project)
|
|
|
|
expect(event.action).to eq(Event::PUSHED)
|
|
|
|
expect(event.push_event_payload).to be_an_instance_of(PushEventPayload)
|
|
|
|
expect(event.push_event_payload.commit_from).to eq(oldrev)
|
|
|
|
expect(event.push_event_payload.commit_to).to be_nil
|
|
|
|
expect(event.push_event_payload.ref).to eq('master')
|
|
|
|
expect(event.push_event_payload.commit_count).to eq(0)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'Invalidating project cache' do
|
|
|
|
let(:commit_id) do
|
|
|
|
project.repository.update_file(
|
|
|
|
user, 'README.md', '', message: 'Update', branch_name: branch
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:commit) { project.repository.commit(commit_id) }
|
|
|
|
let(:blank_sha) { Gitlab::Git::BLANK_SHA }
|
|
|
|
|
|
|
|
def clears_cache(extended: [])
|
2019-08-16 15:53:56 -04:00
|
|
|
expect(service).to receive(:invalidated_file_types).and_return(extended)
|
|
|
|
|
|
|
|
if extended.present?
|
|
|
|
expect(ProjectCacheWorker)
|
|
|
|
.to receive(:perform_async)
|
|
|
|
.with(project.id, extended, [], false)
|
|
|
|
end
|
2019-03-28 10:59:24 -04:00
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
|
|
|
|
def clears_extended_cache
|
|
|
|
clears_cache(extended: %i[readme])
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'on default branch' do
|
|
|
|
context 'create' do
|
|
|
|
# FIXME: When creating the default branch,the cache worker runs twice
|
|
|
|
before do
|
|
|
|
allow(ProjectCacheWorker).to receive(:perform_async)
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:oldrev) { blank_sha }
|
|
|
|
|
|
|
|
it { clears_cache }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'update' do
|
|
|
|
it { clears_extended_cache }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'remove' do
|
|
|
|
let(:newrev) { blank_sha }
|
|
|
|
|
|
|
|
# TODO: this case should pass, but we only take account of added files
|
|
|
|
it { clears_cache }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'on ordinary branch' do
|
|
|
|
let(:branch) { 'fix' }
|
|
|
|
|
|
|
|
context 'create' do
|
|
|
|
let(:oldrev) { blank_sha }
|
|
|
|
|
|
|
|
it { clears_cache }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'update' do
|
|
|
|
it { clears_cache }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'remove' do
|
|
|
|
let(:newrev) { blank_sha }
|
|
|
|
|
|
|
|
it { clears_cache }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-02-06 19:09:12 -05:00
|
|
|
describe 'signatures' do
|
2019-03-28 10:59:24 -04:00
|
|
|
context 'when the commit has a signature' do
|
|
|
|
context 'when the signature is already cached' do
|
|
|
|
before do
|
|
|
|
create(:gpg_signature, commit_sha: commit.id)
|
|
|
|
end
|
|
|
|
|
2020-02-06 19:09:12 -05:00
|
|
|
it 'does not queue a CreateCommitSignatureWorker' do
|
|
|
|
expect(CreateCommitSignatureWorker).not_to receive(:perform_async)
|
2019-03-28 10:59:24 -04:00
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the signature is not yet cached' do
|
2020-02-06 19:09:12 -05:00
|
|
|
it 'queues a CreateCommitSignatureWorker' do
|
|
|
|
expect(CreateCommitSignatureWorker).to receive(:perform_async).with([commit.id], project.id)
|
2019-03-28 10:59:24 -04:00
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'can queue several commits to create the gpg signature' do
|
|
|
|
allow(Gitlab::Git::Commit)
|
|
|
|
.to receive(:shas_with_signatures)
|
|
|
|
.and_return([sample_commit.id, another_sample_commit.id])
|
|
|
|
|
2020-02-06 19:09:12 -05:00
|
|
|
expect(CreateCommitSignatureWorker)
|
2019-03-28 10:59:24 -04:00
|
|
|
.to receive(:perform_async)
|
|
|
|
.with([sample_commit.id, another_sample_commit.id], project.id)
|
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the commit does not have a signature' do
|
|
|
|
before do
|
|
|
|
allow(Gitlab::Git::Commit)
|
|
|
|
.to receive(:shas_with_signatures)
|
|
|
|
.with(project.repository, [sample_commit.id])
|
|
|
|
.and_return([])
|
|
|
|
end
|
|
|
|
|
2020-02-06 19:09:12 -05:00
|
|
|
it 'does not queue a CreateCommitSignatureWorker' do
|
|
|
|
expect(CreateCommitSignatureWorker)
|
2019-03-28 10:59:24 -04:00
|
|
|
.not_to receive(:perform_async)
|
|
|
|
.with(sample_commit.id, project.id)
|
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'Processing commit messages' do
|
2019-08-16 14:26:31 -04:00
|
|
|
# Create 6 commits, 3 of which have references. Limiting to 4 commits, we
|
|
|
|
# expect to see two commit message processors enqueued.
|
|
|
|
let!(:commit_ids) do
|
|
|
|
Array.new(6) do |i|
|
2019-03-28 10:59:24 -04:00
|
|
|
message = "Issue #{'#' if i.even?}#{i}"
|
|
|
|
project.repository.update_file(
|
|
|
|
user, 'README.md', '', message: message, branch_name: branch
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-08-16 14:26:31 -04:00
|
|
|
let(:oldrev) { project.commit(commit_ids.first).parent_id }
|
2019-03-28 10:59:24 -04:00
|
|
|
let(:newrev) { commit_ids.last }
|
|
|
|
|
|
|
|
before do
|
2019-08-16 14:26:31 -04:00
|
|
|
stub_const("::Git::BaseHooksService::PROCESS_COMMIT_LIMIT", 4)
|
2019-03-28 10:59:24 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
context 'creating the default branch' do
|
|
|
|
let(:oldrev) { Gitlab::Git::BLANK_SHA }
|
|
|
|
|
2019-06-13 06:46:47 -04:00
|
|
|
it 'processes a limited number of commit messages' do
|
2019-08-16 14:26:31 -04:00
|
|
|
expect(ProcessCommitWorker).to receive(:perform_async).twice
|
2019-03-28 10:59:24 -04:00
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'updating the default branch' do
|
|
|
|
it 'processes a limited number of commit messages' do
|
2019-08-16 14:26:31 -04:00
|
|
|
expect(ProcessCommitWorker).to receive(:perform_async).twice
|
2019-03-28 10:59:24 -04:00
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'removing the default branch' do
|
|
|
|
let(:newrev) { Gitlab::Git::BLANK_SHA }
|
|
|
|
|
|
|
|
it 'does not process commit messages' do
|
|
|
|
expect(ProcessCommitWorker).not_to receive(:perform_async)
|
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'creating a normal branch' do
|
|
|
|
let(:branch) { 'fix' }
|
|
|
|
let(:oldrev) { Gitlab::Git::BLANK_SHA }
|
|
|
|
|
|
|
|
it 'processes a limited number of commit messages' do
|
2019-08-16 14:26:31 -04:00
|
|
|
expect(ProcessCommitWorker).to receive(:perform_async).twice
|
2019-03-28 10:59:24 -04:00
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'updating a normal branch' do
|
|
|
|
let(:branch) { 'fix' }
|
|
|
|
|
|
|
|
it 'processes a limited number of commit messages' do
|
2019-08-16 14:26:31 -04:00
|
|
|
expect(ProcessCommitWorker).to receive(:perform_async).twice
|
2019-03-28 10:59:24 -04:00
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'removing a normal branch' do
|
|
|
|
let(:branch) { 'fix' }
|
|
|
|
let(:newrev) { Gitlab::Git::BLANK_SHA }
|
|
|
|
|
|
|
|
it 'does not process commit messages' do
|
|
|
|
expect(ProcessCommitWorker).not_to receive(:perform_async)
|
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
end
|
2019-08-16 14:26:31 -04:00
|
|
|
|
2019-10-23 05:06:03 -04:00
|
|
|
context 'when the project is forked', :sidekiq_might_not_need_inline do
|
2019-08-16 14:26:31 -04:00
|
|
|
let(:upstream_project) { project }
|
|
|
|
let(:forked_project) { fork_project(upstream_project, user, repository: true) }
|
|
|
|
|
|
|
|
let!(:forked_service) do
|
2019-10-03 11:07:07 -04:00
|
|
|
described_class.new(forked_project, user, change: { oldrev: oldrev, newrev: newrev, ref: ref })
|
2019-08-16 14:26:31 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
context 'when commits already exists in the upstream project' do
|
|
|
|
it 'does not process commit messages' do
|
|
|
|
expect(ProcessCommitWorker).not_to receive(:perform_async)
|
|
|
|
|
|
|
|
forked_service.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when a commit does not exist in the upstream repo' do
|
|
|
|
# On top of the existing 6 commits, 3 of which have references,
|
|
|
|
# create 2 more, 1 of which has a reference. Limiting to 4 commits, we
|
|
|
|
# expect to see one commit message processor enqueued.
|
|
|
|
let!(:forked_commit_ids) do
|
|
|
|
Array.new(2) do |i|
|
|
|
|
message = "Issue #{'#' if i.even?}#{i}"
|
|
|
|
forked_project.repository.update_file(
|
|
|
|
user, 'README.md', '', message: message, branch_name: branch
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:newrev) { forked_commit_ids.last }
|
|
|
|
|
|
|
|
it 'processes the commit message' do
|
|
|
|
expect(ProcessCommitWorker).to receive(:perform_async).once
|
|
|
|
|
|
|
|
forked_service.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the upstream project no longer exists' do
|
|
|
|
it 'processes the commit messages' do
|
|
|
|
upstream_project.destroy!
|
|
|
|
|
|
|
|
expect(ProcessCommitWorker).to receive(:perform_async).twice
|
|
|
|
|
|
|
|
forked_service.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2019-03-28 10:59:24 -04:00
|
|
|
end
|
Look for new branches more carefully
In certain cases, GitLab can miss a PostReceive invocation the first
time a branch is pushed. When this happens, the "branch created" hooks
are not run, which means various features don't work until the branch
is deleted and pushed again.
This MR changes the `Git::BranchPushService` so it checks the cache of
existing branches in addition to the `oldrev` reported for the branch.
If the branch name isn't in the cache, chances are we haven't run the
service yet (it's what refreshes the cache), so we can go ahead and
run it, even through `oldrev` is set.
If the cache has been cleared by some other means in the meantime, then
we'll still fail to run the hooks when we should. Fixing that in the
general case is a larger problem, and we'd need to devote significant
engineering effort to it.
There's a chance that we'll run the relevant hooks *multiple times*
with this change, if there's a race between the branch being created,
and the `PostReceive` worker being run multiple times, but this can
already happen, since Sidekiq is "at-least-once" execution of jobs. So,
this should be safe.
2019-06-17 11:12:05 -04:00
|
|
|
|
|
|
|
describe 'New branch detection' do
|
|
|
|
let(:branch) { 'fix' }
|
|
|
|
|
|
|
|
context 'oldrev is the blank SHA' do
|
|
|
|
let(:oldrev) { Gitlab::Git::BLANK_SHA }
|
|
|
|
|
|
|
|
it 'is treated as a new branch' do
|
|
|
|
expect(service).to receive(:branch_create_hooks)
|
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'oldrev is set' do
|
|
|
|
context 'Gitaly does not know about the branch' do
|
|
|
|
it 'is treated as a new branch' do
|
|
|
|
allow(project.repository).to receive(:branch_names) { [] }
|
|
|
|
|
|
|
|
expect(service).to receive(:branch_create_hooks)
|
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'Gitaly knows about the branch' do
|
|
|
|
it 'is not treated as a new branch' do
|
|
|
|
expect(service).not_to receive(:branch_create_hooks)
|
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2019-03-28 10:59:24 -04:00
|
|
|
end
|