Load and process at most 100 commits when pushing into default branch

This commit is contained in:
Douwe Maan 2017-07-27 14:58:02 +02:00
parent 4d05e85375
commit 0e355e5c92
4 changed files with 29 additions and 16 deletions

View File

@ -45,6 +45,7 @@ class GitPushService < BaseService
elsif push_to_existing_branch?
# Collect data for this git push
@push_commits = @project.repository.commits_between(params[:oldrev], params[:newrev])
process_commit_messages
# Update the bare repositories info/attributes file using the contents of the default branches
@ -64,15 +65,21 @@ class GitPushService < BaseService
def update_caches
if is_default_branch?
paths = Set.new
if push_to_new_branch?
# If this is the initial push into the default branch, the file type caches
# will already be reset as a result of `Project#change_head`.
types = []
else
paths = Set.new
@push_commits.each do |commit|
commit.raw_deltas.each do |diff|
paths << diff.new_path
@push_commits.last(PROCESS_COMMIT_LIMIT).each do |commit|
commit.raw_deltas.each do |diff|
paths << diff.new_path
end
end
end
types = Gitlab::FileDetector.types_in_paths(paths.to_a)
types = Gitlab::FileDetector.types_in_paths(paths.to_a)
end
else
types = []
end
@ -84,7 +91,7 @@ class GitPushService < BaseService
def process_commit_messages
default = is_default_branch?
push_commits.last(PROCESS_COMMIT_LIMIT).each do |commit|
@push_commits.last(PROCESS_COMMIT_LIMIT).each do |commit|
if commit.matches_cross_reference_regex?
ProcessCommitWorker
.perform_async(project.id, current_user.id, commit.to_hash, default)
@ -103,7 +110,7 @@ class GitPushService < BaseService
EventCreateService.new.push(@project, current_user, build_push_data)
Ci::CreatePipelineService.new(@project, current_user, build_push_data).execute(:push)
SystemHookPushWorker.perform_async(build_push_data.dup, :push_hooks)
@project.execute_hooks(build_push_data.dup, :push_hooks)
@project.execute_services(build_push_data.dup, :push_hooks)
@ -123,7 +130,10 @@ class GitPushService < BaseService
end
def process_default_branch
@push_commits = project.repository.commits(params[:newrev])
@push_commits_count = project.repository.commit_count_for_ref(params[:ref])
offset = [@push_commits_count - PROCESS_COMMIT_LIMIT, 0].max
@push_commits = project.repository.commits(params[:newrev], offset: offset, limit: PROCESS_COMMIT_LIMIT)
# Ensure HEAD points to the default branch in case it is not master
project.change_head(branch_name)
@ -152,7 +162,8 @@ class GitPushService < BaseService
params[:oldrev],
params[:newrev],
params[:ref],
push_commits)
@push_commits,
commits_count: @push_commits_count)
end
def push_to_existing_branch?

View File

@ -0,0 +1,4 @@
---
title: Improve performance of large (initial) push into default branch
merge_request:
author:

View File

@ -24,11 +24,11 @@ module Gitlab
# total_commits_count: Fixnum
# }
#
def build(project, user, oldrev, newrev, ref, commits = [], message = nil)
def build(project, user, oldrev, newrev, ref, commits = [], message = nil, commits_count: nil)
commits = Array(commits)
# Total commits count
commits_count = commits.size
commits_count ||= commits.size
# Get latest 20 commits ASC
commits_limited = commits.last(20)

View File

@ -663,8 +663,7 @@ describe GitPushService, services: true do
end
it 'only schedules a limited number of commits' do
allow(service).to receive(:push_commits)
.and_return(Array.new(1000, double(:commit, to_hash: {}, matches_cross_reference_regex?: true)))
service.push_commits = Array.new(1000, double(:commit, to_hash: {}, matches_cross_reference_regex?: true))
expect(ProcessCommitWorker).to receive(:perform_async).exactly(100).times
@ -672,8 +671,7 @@ describe GitPushService, services: true do
end
it "skips commits which don't include cross-references" do
allow(service).to receive(:push_commits)
.and_return([double(:commit, to_hash: {}, matches_cross_reference_regex?: false)])
service.push_commits = [double(:commit, to_hash: {}, matches_cross_reference_regex?: false)]
expect(ProcessCommitWorker).not_to receive(:perform_async)