2019-04-11 08:17:24 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2019-03-28 10:59:24 -04:00
|
|
|
require 'spec_helper'
|
|
|
|
|
2021-05-06 17:10:07 -04:00
|
|
|
RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state do
|
2019-03-28 10:59:24 -04:00
|
|
|
include RepoHelpers
|
2019-08-16 14:26:31 -04:00
|
|
|
include ProjectForksHelper
|
2019-03-28 10:59:24 -04:00
|
|
|
|
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
let(:user) { project.creator }
|
|
|
|
|
|
|
|
let(:branch) { project.default_branch }
|
|
|
|
let(:ref) { "refs/heads/#{branch}" }
|
2021-02-05 10:09:28 -05:00
|
|
|
let(:commit_id) { sample_commit.id }
|
|
|
|
let(:commit) { project.commit(commit_id) }
|
2019-03-28 10:59:24 -04:00
|
|
|
let(:oldrev) { commit.parent_id }
|
|
|
|
let(:newrev) { commit.id }
|
|
|
|
|
|
|
|
let(:service) do
|
2019-10-03 11:07:07 -04:00
|
|
|
described_class.new(project, user, change: { oldrev: oldrev, newrev: newrev, ref: ref })
|
2019-03-28 10:59:24 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
describe "Git Push Data" do
|
2019-08-12 18:31:58 -04:00
|
|
|
subject(:push_data) { service.send(:push_data) }
|
2019-03-28 10:59:24 -04:00
|
|
|
|
|
|
|
it 'has expected push data attributes' do
|
|
|
|
is_expected.to match a_hash_including(
|
|
|
|
object_kind: 'push',
|
|
|
|
before: oldrev,
|
|
|
|
after: newrev,
|
|
|
|
ref: ref,
|
|
|
|
user_id: user.id,
|
|
|
|
user_name: user.name,
|
|
|
|
project_id: project.id
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
context "with repository data" do
|
|
|
|
subject { push_data[:repository] }
|
|
|
|
|
|
|
|
it 'has expected attributes' do
|
|
|
|
is_expected.to match a_hash_including(
|
|
|
|
name: project.name,
|
|
|
|
url: project.url_to_repo,
|
|
|
|
description: project.description,
|
|
|
|
homepage: project.web_url
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "with commits" do
|
|
|
|
subject { push_data[:commits] }
|
|
|
|
|
|
|
|
it { is_expected.to be_an(Array) }
|
|
|
|
|
|
|
|
it 'has 1 element' do
|
|
|
|
expect(subject.size).to eq(1)
|
|
|
|
end
|
|
|
|
|
|
|
|
context "the commit" do
|
|
|
|
subject { push_data[:commits].first }
|
|
|
|
|
|
|
|
it { expect(subject[:timestamp].in_time_zone).to eq(commit.date.in_time_zone) }
|
|
|
|
|
|
|
|
it 'includes expected commit data' do
|
|
|
|
is_expected.to match a_hash_including(
|
|
|
|
id: commit.id,
|
|
|
|
message: commit.safe_message,
|
|
|
|
url: [
|
|
|
|
Gitlab.config.gitlab.url,
|
|
|
|
project.namespace.to_param,
|
|
|
|
project.to_param,
|
2020-02-05 16:09:02 -05:00
|
|
|
'-',
|
2019-03-28 10:59:24 -04:00
|
|
|
'commit',
|
|
|
|
commit.id
|
|
|
|
].join('/')
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
context "with a author" do
|
|
|
|
subject { push_data[:commits].first[:author] }
|
|
|
|
|
|
|
|
it 'includes expected author data' do
|
|
|
|
is_expected.to match a_hash_including(
|
|
|
|
name: commit.author_name,
|
|
|
|
email: commit.author_email
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'Push Event' do
|
2021-08-18 05:10:26 -04:00
|
|
|
let(:event) { Event.pushed_action.take }
|
2019-03-28 10:59:24 -04:00
|
|
|
|
2021-02-04 07:09:25 -05:00
|
|
|
subject(:execute_service) { service.execute }
|
2019-03-28 10:59:24 -04:00
|
|
|
|
|
|
|
context "with an existing branch" do
|
|
|
|
it 'generates a push event with one commit' do
|
2021-02-04 07:09:25 -05:00
|
|
|
execute_service
|
|
|
|
|
2019-03-28 10:59:24 -04:00
|
|
|
expect(event).to be_an_instance_of(PushEvent)
|
|
|
|
expect(event.project).to eq(project)
|
2020-05-28 11:08:02 -04:00
|
|
|
expect(event).to be_pushed_action
|
2019-03-28 10:59:24 -04:00
|
|
|
expect(event.push_event_payload).to be_an_instance_of(PushEventPayload)
|
|
|
|
expect(event.push_event_payload.commit_from).to eq(oldrev)
|
|
|
|
expect(event.push_event_payload.commit_to).to eq(newrev)
|
2019-08-12 18:31:58 -04:00
|
|
|
expect(event.push_event_payload.commit_title).to eq('Change some files')
|
2019-03-28 10:59:24 -04:00
|
|
|
expect(event.push_event_payload.ref).to eq('master')
|
|
|
|
expect(event.push_event_payload.commit_count).to eq(1)
|
|
|
|
end
|
2021-02-04 07:09:25 -05:00
|
|
|
|
|
|
|
context 'with changing CI config' do
|
|
|
|
before do
|
|
|
|
allow_next_instance_of(Gitlab::Git::Diff) do |diff|
|
|
|
|
allow(diff).to receive(:new_path).and_return('.gitlab-ci.yml')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
let!(:commit_author) { create(:user, email: sample_commit.author_email) }
|
|
|
|
|
|
|
|
let(:tracking_params) do
|
|
|
|
['o_pipeline_authoring_unique_users_committing_ciconfigfile', values: commit_author.id]
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'tracks the event' do
|
2021-05-06 17:10:07 -04:00
|
|
|
time = Time.zone.now
|
2021-02-04 07:09:25 -05:00
|
|
|
|
2021-05-06 17:10:07 -04:00
|
|
|
execute_service
|
2021-02-04 07:09:25 -05:00
|
|
|
|
2021-05-06 17:10:07 -04:00
|
|
|
expect(Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(event_names: 'o_pipeline_authoring_unique_users_committing_ciconfigfile', start_date: time, end_date: time + 7.days)).to eq(1)
|
2021-02-04 07:09:25 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
context 'when usage ping is disabled' do
|
|
|
|
before do
|
2021-08-12 08:11:05 -04:00
|
|
|
allow(::ServicePing::ServicePingSettings).to receive(:enabled?).and_return(false)
|
2021-02-04 07:09:25 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not track the event' do
|
|
|
|
execute_service
|
|
|
|
|
|
|
|
expect(Gitlab::UsageDataCounters::HLLRedisCounter)
|
2021-05-06 17:10:07 -04:00
|
|
|
.not_to receive(:track_event).with(*tracking_params)
|
2021-02-04 07:09:25 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the branch is not the main branch' do
|
|
|
|
let(:branch) { 'feature' }
|
|
|
|
|
|
|
|
it 'does not track the event' do
|
|
|
|
execute_service
|
|
|
|
|
|
|
|
expect(Gitlab::UsageDataCounters::HLLRedisCounter)
|
2021-05-06 17:10:07 -04:00
|
|
|
.not_to receive(:track_event).with(*tracking_params)
|
2021-02-04 07:09:25 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the CI config is a different path' do
|
|
|
|
before do
|
|
|
|
project.ci_config_path = 'config/ci.yml'
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not track the event' do
|
|
|
|
execute_service
|
|
|
|
|
|
|
|
expect(Gitlab::UsageDataCounters::HLLRedisCounter)
|
2021-05-06 17:10:07 -04:00
|
|
|
.not_to receive(:track_event).with(*tracking_params)
|
2021-02-04 07:09:25 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2019-03-28 10:59:24 -04:00
|
|
|
end
|
|
|
|
|
2021-08-18 05:10:26 -04:00
|
|
|
context "with a new default branch" do
|
2019-03-28 10:59:24 -04:00
|
|
|
let(:oldrev) { Gitlab::Git::BLANK_SHA }
|
|
|
|
|
|
|
|
it 'generates a push event with more than one commit' do
|
2021-02-04 07:09:25 -05:00
|
|
|
execute_service
|
|
|
|
|
2019-03-28 10:59:24 -04:00
|
|
|
expect(event).to be_an_instance_of(PushEvent)
|
|
|
|
expect(event.project).to eq(project)
|
2020-05-28 11:08:02 -04:00
|
|
|
expect(event).to be_pushed_action
|
2019-03-28 10:59:24 -04:00
|
|
|
expect(event.push_event_payload).to be_an_instance_of(PushEventPayload)
|
|
|
|
expect(event.push_event_payload.commit_from).to be_nil
|
|
|
|
expect(event.push_event_payload.commit_to).to eq(newrev)
|
2021-08-18 05:10:26 -04:00
|
|
|
expect(event.push_event_payload.commit_title).to eq('Change some files')
|
2019-03-28 10:59:24 -04:00
|
|
|
expect(event.push_event_payload.ref).to eq('master')
|
|
|
|
expect(event.push_event_payload.commit_count).to be > 1
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-08-18 05:10:26 -04:00
|
|
|
context "with a new non-default branch" do
|
|
|
|
let(:oldrev) { Gitlab::Git::BLANK_SHA }
|
|
|
|
let(:branch) { 'fix' }
|
|
|
|
let(:commit_id) { project.commit(branch).id }
|
|
|
|
|
|
|
|
it 'generates a push event with more than one commit' do
|
|
|
|
execute_service
|
|
|
|
|
|
|
|
expect(event).to be_an_instance_of(PushEvent)
|
|
|
|
expect(event.project).to eq(project)
|
|
|
|
expect(event).to be_pushed_action
|
|
|
|
expect(event.push_event_payload).to be_an_instance_of(PushEventPayload)
|
|
|
|
expect(event.push_event_payload.commit_from).to be_nil
|
|
|
|
expect(event.push_event_payload.commit_to).to eq(newrev)
|
|
|
|
expect(event.push_event_payload.commit_title).to eq('Test file for directories with a leading dot')
|
|
|
|
expect(event.push_event_payload.ref).to eq('fix')
|
|
|
|
expect(event.push_event_payload.commit_count).to be > 1
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-03-28 10:59:24 -04:00
|
|
|
context 'removing a branch' do
|
|
|
|
let(:newrev) { Gitlab::Git::BLANK_SHA }
|
|
|
|
|
|
|
|
it 'generates a push event with no commits' do
|
2021-02-04 07:09:25 -05:00
|
|
|
execute_service
|
|
|
|
|
2019-03-28 10:59:24 -04:00
|
|
|
expect(event).to be_an_instance_of(PushEvent)
|
|
|
|
expect(event.project).to eq(project)
|
2020-05-28 11:08:02 -04:00
|
|
|
expect(event).to be_pushed_action
|
2019-03-28 10:59:24 -04:00
|
|
|
expect(event.push_event_payload).to be_an_instance_of(PushEventPayload)
|
|
|
|
expect(event.push_event_payload.commit_from).to eq(oldrev)
|
|
|
|
expect(event.push_event_payload.commit_to).to be_nil
|
|
|
|
expect(event.push_event_payload.ref).to eq('master')
|
|
|
|
expect(event.push_event_payload.commit_count).to eq(0)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'Invalidating project cache' do
|
|
|
|
let(:commit_id) do
|
|
|
|
project.repository.update_file(
|
|
|
|
user, 'README.md', '', message: 'Update', branch_name: branch
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:blank_sha) { Gitlab::Git::BLANK_SHA }
|
|
|
|
|
|
|
|
def clears_cache(extended: [])
|
2019-08-16 15:53:56 -04:00
|
|
|
expect(service).to receive(:invalidated_file_types).and_return(extended)
|
|
|
|
|
|
|
|
if extended.present?
|
|
|
|
expect(ProjectCacheWorker)
|
|
|
|
.to receive(:perform_async)
|
|
|
|
.with(project.id, extended, [], false)
|
|
|
|
end
|
2019-03-28 10:59:24 -04:00
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
|
|
|
|
def clears_extended_cache
|
|
|
|
clears_cache(extended: %i[readme])
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'on default branch' do
|
|
|
|
context 'create' do
|
|
|
|
# FIXME: When creating the default branch,the cache worker runs twice
|
|
|
|
before do
|
|
|
|
allow(ProjectCacheWorker).to receive(:perform_async)
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:oldrev) { blank_sha }
|
|
|
|
|
|
|
|
it { clears_cache }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'update' do
|
|
|
|
it { clears_extended_cache }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'remove' do
|
|
|
|
let(:newrev) { blank_sha }
|
|
|
|
|
|
|
|
# TODO: this case should pass, but we only take account of added files
|
|
|
|
it { clears_cache }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'on ordinary branch' do
|
|
|
|
let(:branch) { 'fix' }
|
|
|
|
|
|
|
|
context 'create' do
|
|
|
|
let(:oldrev) { blank_sha }
|
|
|
|
|
|
|
|
it { clears_cache }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'update' do
|
|
|
|
it { clears_cache }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'remove' do
|
|
|
|
let(:newrev) { blank_sha }
|
|
|
|
|
|
|
|
it { clears_cache }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-02-06 19:09:12 -05:00
|
|
|
describe 'signatures' do
|
2019-03-28 10:59:24 -04:00
|
|
|
context 'when the commit has a signature' do
|
|
|
|
context 'when the signature is already cached' do
|
|
|
|
before do
|
|
|
|
create(:gpg_signature, commit_sha: commit.id)
|
|
|
|
end
|
|
|
|
|
2020-02-06 19:09:12 -05:00
|
|
|
it 'does not queue a CreateCommitSignatureWorker' do
|
|
|
|
expect(CreateCommitSignatureWorker).not_to receive(:perform_async)
|
2019-03-28 10:59:24 -04:00
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the signature is not yet cached' do
|
2020-02-06 19:09:12 -05:00
|
|
|
it 'queues a CreateCommitSignatureWorker' do
|
|
|
|
expect(CreateCommitSignatureWorker).to receive(:perform_async).with([commit.id], project.id)
|
2019-03-28 10:59:24 -04:00
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'can queue several commits to create the gpg signature' do
|
|
|
|
allow(Gitlab::Git::Commit)
|
|
|
|
.to receive(:shas_with_signatures)
|
|
|
|
.and_return([sample_commit.id, another_sample_commit.id])
|
|
|
|
|
2020-02-06 19:09:12 -05:00
|
|
|
expect(CreateCommitSignatureWorker)
|
2019-03-28 10:59:24 -04:00
|
|
|
.to receive(:perform_async)
|
|
|
|
.with([sample_commit.id, another_sample_commit.id], project.id)
|
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the commit does not have a signature' do
|
|
|
|
before do
|
|
|
|
allow(Gitlab::Git::Commit)
|
|
|
|
.to receive(:shas_with_signatures)
|
|
|
|
.with(project.repository, [sample_commit.id])
|
|
|
|
.and_return([])
|
|
|
|
end
|
|
|
|
|
2020-02-06 19:09:12 -05:00
|
|
|
it 'does not queue a CreateCommitSignatureWorker' do
|
|
|
|
expect(CreateCommitSignatureWorker)
|
2019-03-28 10:59:24 -04:00
|
|
|
.not_to receive(:perform_async)
|
|
|
|
.with(sample_commit.id, project.id)
|
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'Processing commit messages' do
|
2019-08-16 14:26:31 -04:00
|
|
|
# Create 6 commits, 3 of which have references. Limiting to 4 commits, we
|
|
|
|
# expect to see two commit message processors enqueued.
|
|
|
|
let!(:commit_ids) do
|
|
|
|
Array.new(6) do |i|
|
2019-03-28 10:59:24 -04:00
|
|
|
message = "Issue #{'#' if i.even?}#{i}"
|
|
|
|
project.repository.update_file(
|
|
|
|
user, 'README.md', '', message: message, branch_name: branch
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-08-25 08:11:32 -04:00
|
|
|
let(:commits_count) { service.send(:commits_count) }
|
|
|
|
let(:threshold_limit) { described_class::PROCESS_COMMIT_LIMIT + 1 }
|
|
|
|
|
2019-08-16 14:26:31 -04:00
|
|
|
let(:oldrev) { project.commit(commit_ids.first).parent_id }
|
2019-03-28 10:59:24 -04:00
|
|
|
let(:newrev) { commit_ids.last }
|
|
|
|
|
|
|
|
before do
|
2019-08-16 14:26:31 -04:00
|
|
|
stub_const("::Git::BaseHooksService::PROCESS_COMMIT_LIMIT", 4)
|
2019-03-28 10:59:24 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
context 'creating the default branch' do
|
|
|
|
let(:oldrev) { Gitlab::Git::BLANK_SHA }
|
|
|
|
|
2019-06-13 06:46:47 -04:00
|
|
|
it 'processes a limited number of commit messages' do
|
2021-08-25 08:11:32 -04:00
|
|
|
expect(project.repository)
|
|
|
|
.to receive(:commits)
|
|
|
|
.with(newrev, limit: threshold_limit)
|
|
|
|
.and_call_original
|
|
|
|
|
2019-08-16 14:26:31 -04:00
|
|
|
expect(ProcessCommitWorker).to receive(:perform_async).twice
|
2019-03-28 10:59:24 -04:00
|
|
|
|
|
|
|
service.execute
|
2021-08-25 08:11:32 -04:00
|
|
|
|
|
|
|
expect(commits_count).to eq(project.repository.commit_count_for_ref(newrev))
|
2019-03-28 10:59:24 -04:00
|
|
|
end
|
2022-07-07 02:10:01 -04:00
|
|
|
|
|
|
|
it 'collects the related metrics' do
|
|
|
|
expect(Gitlab::Metrics).to receive(:add_event).with(:push_commit, { branch: 'master' })
|
|
|
|
expect(Gitlab::Metrics).to receive(:add_event).with(:push_branch, {})
|
|
|
|
expect(Gitlab::Metrics).to receive(:add_event).with(:change_default_branch, {})
|
|
|
|
expect(Gitlab::Metrics).to receive(:add_event).with(:process_commit_limit_overflow)
|
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when limit is not hit' do
|
|
|
|
before do
|
|
|
|
stub_const("::Git::BaseHooksService::PROCESS_COMMIT_LIMIT", 100)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not collect the corresponding metric' do
|
|
|
|
expect(Gitlab::Metrics).not_to receive(:add_event).with(:process_commit_limit_overflow)
|
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
end
|
2019-03-28 10:59:24 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
context 'updating the default branch' do
|
|
|
|
it 'processes a limited number of commit messages' do
|
2021-08-25 08:11:32 -04:00
|
|
|
expect(project.repository)
|
|
|
|
.to receive(:commits_between)
|
|
|
|
.with(oldrev, newrev, limit: threshold_limit)
|
|
|
|
.and_call_original
|
|
|
|
|
2019-08-16 14:26:31 -04:00
|
|
|
expect(ProcessCommitWorker).to receive(:perform_async).twice
|
2019-03-28 10:59:24 -04:00
|
|
|
|
|
|
|
service.execute
|
2021-08-25 08:11:32 -04:00
|
|
|
|
|
|
|
expect(commits_count).to eq(project.repository.count_commits_between(oldrev, newrev))
|
2019-03-28 10:59:24 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'removing the default branch' do
|
|
|
|
let(:newrev) { Gitlab::Git::BLANK_SHA }
|
|
|
|
|
|
|
|
it 'does not process commit messages' do
|
2021-08-25 08:11:32 -04:00
|
|
|
expect(project.repository).not_to receive(:commits)
|
|
|
|
expect(project.repository).not_to receive(:commits_between)
|
2019-03-28 10:59:24 -04:00
|
|
|
expect(ProcessCommitWorker).not_to receive(:perform_async)
|
|
|
|
|
|
|
|
service.execute
|
2021-08-25 08:11:32 -04:00
|
|
|
|
|
|
|
expect(commits_count).to eq(0)
|
2019-03-28 10:59:24 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'creating a normal branch' do
|
|
|
|
let(:branch) { 'fix' }
|
|
|
|
let(:oldrev) { Gitlab::Git::BLANK_SHA }
|
|
|
|
|
|
|
|
it 'processes a limited number of commit messages' do
|
2021-08-25 08:11:32 -04:00
|
|
|
expect(project.repository)
|
|
|
|
.to receive(:commits_between)
|
|
|
|
.with(project.default_branch, newrev, limit: threshold_limit)
|
|
|
|
.and_call_original
|
|
|
|
|
2019-08-16 14:26:31 -04:00
|
|
|
expect(ProcessCommitWorker).to receive(:perform_async).twice
|
2019-03-28 10:59:24 -04:00
|
|
|
|
|
|
|
service.execute
|
2021-08-25 08:11:32 -04:00
|
|
|
|
|
|
|
expect(commits_count).to eq(project.repository.count_commits_between(project.default_branch, branch))
|
2019-03-28 10:59:24 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'updating a normal branch' do
|
|
|
|
let(:branch) { 'fix' }
|
|
|
|
|
|
|
|
it 'processes a limited number of commit messages' do
|
2021-08-25 08:11:32 -04:00
|
|
|
expect(project.repository)
|
|
|
|
.to receive(:commits_between)
|
|
|
|
.with(oldrev, newrev, limit: threshold_limit)
|
|
|
|
.and_call_original
|
|
|
|
|
2019-08-16 14:26:31 -04:00
|
|
|
expect(ProcessCommitWorker).to receive(:perform_async).twice
|
2019-03-28 10:59:24 -04:00
|
|
|
|
|
|
|
service.execute
|
2021-08-25 08:11:32 -04:00
|
|
|
expect(commits_count).to eq(project.repository.count_commits_between(oldrev, newrev))
|
2019-03-28 10:59:24 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'removing a normal branch' do
|
|
|
|
let(:branch) { 'fix' }
|
|
|
|
let(:newrev) { Gitlab::Git::BLANK_SHA }
|
|
|
|
|
|
|
|
it 'does not process commit messages' do
|
2021-08-25 08:11:32 -04:00
|
|
|
expect(project.repository).not_to receive(:commits)
|
|
|
|
expect(project.repository).not_to receive(:commits_between)
|
2019-03-28 10:59:24 -04:00
|
|
|
expect(ProcessCommitWorker).not_to receive(:perform_async)
|
|
|
|
|
|
|
|
service.execute
|
2021-08-25 08:11:32 -04:00
|
|
|
|
|
|
|
expect(commits_count).to eq(0)
|
2019-03-28 10:59:24 -04:00
|
|
|
end
|
|
|
|
end
|
2019-08-16 14:26:31 -04:00
|
|
|
|
2019-10-23 05:06:03 -04:00
|
|
|
context 'when the project is forked', :sidekiq_might_not_need_inline do
|
2019-08-16 14:26:31 -04:00
|
|
|
let(:upstream_project) { project }
|
2020-08-25 20:10:31 -04:00
|
|
|
let(:forked_project) { fork_project(upstream_project, user, repository: true, using_service: true) }
|
2019-08-16 14:26:31 -04:00
|
|
|
|
|
|
|
let!(:forked_service) do
|
2019-10-03 11:07:07 -04:00
|
|
|
described_class.new(forked_project, user, change: { oldrev: oldrev, newrev: newrev, ref: ref })
|
2019-08-16 14:26:31 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
context 'when commits already exists in the upstream project' do
|
|
|
|
it 'does not process commit messages' do
|
|
|
|
expect(ProcessCommitWorker).not_to receive(:perform_async)
|
|
|
|
|
|
|
|
forked_service.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when a commit does not exist in the upstream repo' do
|
|
|
|
# On top of the existing 6 commits, 3 of which have references,
|
|
|
|
# create 2 more, 1 of which has a reference. Limiting to 4 commits, we
|
|
|
|
# expect to see one commit message processor enqueued.
|
|
|
|
let!(:forked_commit_ids) do
|
|
|
|
Array.new(2) do |i|
|
|
|
|
message = "Issue #{'#' if i.even?}#{i}"
|
|
|
|
forked_project.repository.update_file(
|
|
|
|
user, 'README.md', '', message: message, branch_name: branch
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:newrev) { forked_commit_ids.last }
|
|
|
|
|
|
|
|
it 'processes the commit message' do
|
|
|
|
expect(ProcessCommitWorker).to receive(:perform_async).once
|
|
|
|
|
|
|
|
forked_service.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the upstream project no longer exists' do
|
|
|
|
it 'processes the commit messages' do
|
|
|
|
upstream_project.destroy!
|
|
|
|
|
|
|
|
expect(ProcessCommitWorker).to receive(:perform_async).twice
|
|
|
|
|
|
|
|
forked_service.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2019-03-28 10:59:24 -04:00
|
|
|
end
|
Look for new branches more carefully
In certain cases, GitLab can miss a PostReceive invocation the first
time a branch is pushed. When this happens, the "branch created" hooks
are not run, which means various features don't work until the branch
is deleted and pushed again.
This MR changes the `Git::BranchPushService` so it checks the cache of
existing branches in addition to the `oldrev` reported for the branch.
If the branch name isn't in the cache, chances are we haven't run the
service yet (it's what refreshes the cache), so we can go ahead and
run it, even through `oldrev` is set.
If the cache has been cleared by some other means in the meantime, then
we'll still fail to run the hooks when we should. Fixing that in the
general case is a larger problem, and we'd need to devote significant
engineering effort to it.
There's a chance that we'll run the relevant hooks *multiple times*
with this change, if there's a race between the branch being created,
and the `PostReceive` worker being run multiple times, but this can
already happen, since Sidekiq is "at-least-once" execution of jobs. So,
this should be safe.
2019-06-17 11:12:05 -04:00
|
|
|
|
|
|
|
describe 'New branch detection' do
|
|
|
|
let(:branch) { 'fix' }
|
|
|
|
|
|
|
|
context 'oldrev is the blank SHA' do
|
|
|
|
let(:oldrev) { Gitlab::Git::BLANK_SHA }
|
|
|
|
|
|
|
|
it 'is treated as a new branch' do
|
|
|
|
expect(service).to receive(:branch_create_hooks)
|
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'oldrev is set' do
|
|
|
|
context 'Gitaly does not know about the branch' do
|
|
|
|
it 'is treated as a new branch' do
|
|
|
|
allow(project.repository).to receive(:branch_names) { [] }
|
|
|
|
|
|
|
|
expect(service).to receive(:branch_create_hooks)
|
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'Gitaly knows about the branch' do
|
|
|
|
it 'is not treated as a new branch' do
|
|
|
|
expect(service).not_to receive(:branch_create_hooks)
|
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2020-10-02 14:08:56 -04:00
|
|
|
|
|
|
|
describe 'Metrics dashboard sync' do
|
2021-02-05 10:09:28 -05:00
|
|
|
shared_examples 'trigger dashboard sync' do
|
2020-10-02 14:08:56 -04:00
|
|
|
it 'imports metrics to database' do
|
|
|
|
expect(Metrics::Dashboard::SyncDashboardsWorker).to receive(:perform_async)
|
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-02-05 10:09:28 -05:00
|
|
|
shared_examples 'no dashboard sync' do
|
|
|
|
it 'does not sync metrics to database' do
|
|
|
|
expect(Metrics::Dashboard::SyncDashboardsWorker).not_to receive(:perform_async)
|
2020-10-02 14:08:56 -04:00
|
|
|
|
|
|
|
service.execute
|
|
|
|
end
|
|
|
|
end
|
2021-02-05 10:09:28 -05:00
|
|
|
|
|
|
|
def change_repository(**changes)
|
|
|
|
actions = changes.flat_map do |(action, paths)|
|
|
|
|
Array(paths).flat_map do |file_path|
|
|
|
|
{ action: action, file_path: file_path, content: SecureRandom.hex }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-09-13 05:13:39 -04:00
|
|
|
project.repository.commit_files(
|
2021-02-05 10:09:28 -05:00
|
|
|
user, message: 'message', branch_name: branch, actions: actions
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:charts) { '.gitlab/dashboards/charts.yml' }
|
|
|
|
let(:readme) { 'README.md' }
|
|
|
|
let(:commit_id) { change_repository(**commit_changes) }
|
|
|
|
|
|
|
|
context 'with default branch' do
|
|
|
|
context 'when adding files' do
|
|
|
|
let(:new_file) { 'somenewfile.md' }
|
|
|
|
|
|
|
|
context 'also related' do
|
|
|
|
let(:commit_changes) { { create: [charts, new_file] } }
|
|
|
|
|
|
|
|
include_examples 'trigger dashboard sync'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'only unrelated' do
|
|
|
|
let(:commit_changes) { { create: new_file } }
|
|
|
|
|
|
|
|
include_examples 'no dashboard sync'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when deleting files' do
|
|
|
|
before do
|
|
|
|
change_repository(create: charts)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'also related' do
|
|
|
|
let(:commit_changes) { { delete: [charts, readme] } }
|
|
|
|
|
|
|
|
include_examples 'trigger dashboard sync'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'only unrelated' do
|
|
|
|
let(:commit_changes) { { delete: readme } }
|
|
|
|
|
|
|
|
include_examples 'no dashboard sync'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when updating files' do
|
|
|
|
before do
|
|
|
|
change_repository(create: charts)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'also related' do
|
|
|
|
let(:commit_changes) { { update: [charts, readme] } }
|
|
|
|
|
|
|
|
include_examples 'trigger dashboard sync'
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'only unrelated' do
|
|
|
|
let(:commit_changes) { { update: readme } }
|
|
|
|
|
|
|
|
include_examples 'no dashboard sync'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'without changes' do
|
|
|
|
let(:commit_changes) { {} }
|
|
|
|
|
|
|
|
include_examples 'no dashboard sync'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with other branch' do
|
|
|
|
let(:branch) { 'fix' }
|
|
|
|
let(:commit_changes) { { create: charts } }
|
|
|
|
|
|
|
|
include_examples 'no dashboard sync'
|
|
|
|
end
|
2020-10-02 14:08:56 -04:00
|
|
|
end
|
2019-03-28 10:59:24 -04:00
|
|
|
end
|