Revert "Trigger iid logic from GitHub importer for milestones."

This reverts commit 358675d09f.
This commit is contained in:
Andreas Brandl 2019-01-02 16:05:40 +01:00
parent fda30abdb2
commit 852e68fd95
No known key found for this signature in database
GPG key ID: F25982B13FEE55DA
5 changed files with 6 additions and 42 deletions

View file

@ -111,7 +111,7 @@ class InternalId < ActiveRecord::Base
# Generates next internal id and returns it
def generate
InternalId.transaction do
subject.transaction do
# Create a record in internal_ids if one does not yet exist
# and increment its last value
#
@ -125,7 +125,7 @@ class InternalId < ActiveRecord::Base
#
# Note this will acquire a ROW SHARE lock on the InternalId record
def track_greatest(new_value)
InternalId.transaction do
subject.transaction do
(lookup || create_record).track_greatest_and_save!(new_value)
end
end
@ -148,7 +148,7 @@ class InternalId < ActiveRecord::Base
# violation. We can safely roll-back the nested transaction and perform
# a lookup instead to retrieve the record.
def create_record
InternalId.transaction(requires_new: true) do
subject.transaction(requires_new: true) do
InternalId.create!(
**scope,
usage: usage_value,

View file

@ -15,12 +15,10 @@ module Gitlab
end
# Bulk inserts the given rows into the database.
def bulk_insert(model, rows, batch_size: 100, pre_hook: nil)
def bulk_insert(model, rows, batch_size: 100)
rows.each_slice(batch_size) do |slice|
pre_hook.call(slice) if pre_hook
Gitlab::Database.bulk_insert(model.table_name, slice)
end
rows
end
end
end

View file

@ -19,20 +19,10 @@ module Gitlab
# rubocop: enable CodeReuse/ActiveRecord
def execute
# We insert records in bulk, by-passing any standard model callbacks.
# The pre_hook here makes sure we track internal ids consistently.
# Note this has to be called before performing an insert of a batch
# because we're outside a transaction scope here.
bulk_insert(Milestone, build_milestones, pre_hook: method(:track_greatest_iid))
bulk_insert(Milestone, build_milestones)
build_milestones_cache
end
def track_greatest_iid(slice)
greatest_iid = slice.max { |e| e[:iid] }[:iid]
InternalId.track_greatest(nil, { project: project }, :milestones, greatest_iid, ->(_) { project.milestones.maximum(:iid) })
end
def build_milestones
build_database_rows(each_milestone)
end

View file

@ -58,17 +58,5 @@ describe Gitlab::GithubImport::BulkImporting do
importer.bulk_insert(model, rows, batch_size: 5)
end
it 'calls pre_hook for each slice if given' do
rows = [{ title: 'Foo' }] * 10
model = double(:model, table_name: 'kittens')
pre_hook = double('pre_hook', call: nil)
allow(Gitlab::Database).to receive(:bulk_insert)
expect(pre_hook).to receive(:call).with(rows[0..4])
expect(pre_hook).to receive(:call).with(rows[5..9])
importer.bulk_insert(model, rows, batch_size: 5, pre_hook: pre_hook)
end
end
end

View file

@ -29,25 +29,13 @@ describe Gitlab::GithubImport::Importer::MilestonesImporter, :clean_gitlab_redis
expect(importer)
.to receive(:bulk_insert)
.with(Milestone, [milestone_hash], any_args)
.with(Milestone, [milestone_hash])
expect(importer)
.to receive(:build_milestones_cache)
importer.execute
end
it 'tracks internal ids' do
milestone_hash = { iid: 1, title: '1.0', project_id: project.id }
allow(importer)
.to receive(:build_milestones)
.and_return([milestone_hash])
expect(InternalId).to receive(:track_greatest)
.with(nil, { project: project }, :milestones, 1, any_args)
importer.execute
end
end
describe '#build_milestones' do