Remove background migrations for old schemas

On the assumption that a background migration whose specs need a schema
older than 2018 is obsoleted by this migration squash, we can remove
both specs and code for those that fail to run in CI as a result of the
schema at that date no longer existing.

This is true for all but the MigrateStageStatus background migration,
which is also used from the MigrateBuildStage background migration.
This commit is contained in:
Nick Thomas 2019-07-01 14:37:18 +01:00
parent 876d415116
commit 351392f409
No known key found for this signature in database
GPG Key ID: 2A313A47AFADACE9
13 changed files with 0 additions and 1637 deletions

View File

@ -1,85 +0,0 @@
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class CreateForkNetworkMembershipsRange
RESCHEDULE_DELAY = 15
class ForkedProjectLink < ActiveRecord::Base
self.table_name = 'forked_project_links'
end
def perform(start_id, end_id)
log("Creating memberships for forks: #{start_id} - #{end_id}")
insert_members(start_id, end_id)
if missing_members?(start_id, end_id)
BackgroundMigrationWorker.perform_in(RESCHEDULE_DELAY, "CreateForkNetworkMembershipsRange", [start_id, end_id])
end
end
def insert_members(start_id, end_id)
ActiveRecord::Base.connection.execute <<~INSERT_MEMBERS
INSERT INTO fork_network_members (fork_network_id, project_id, forked_from_project_id)
SELECT fork_network_members.fork_network_id,
forked_project_links.forked_to_project_id,
forked_project_links.forked_from_project_id
FROM forked_project_links
INNER JOIN fork_network_members
ON forked_project_links.forked_from_project_id = fork_network_members.project_id
WHERE forked_project_links.id BETWEEN #{start_id} AND #{end_id}
AND NOT EXISTS (
SELECT true
FROM fork_network_members existing_members
WHERE existing_members.project_id = forked_project_links.forked_to_project_id
)
INSERT_MEMBERS
rescue ActiveRecord::RecordNotUnique => e
# `fork_network_member` was created concurrently in another migration
log(e.message)
end
def missing_members?(start_id, end_id)
count_sql = <<~MISSING_MEMBERS
SELECT COUNT(*)
FROM forked_project_links
WHERE NOT EXISTS (
SELECT true
FROM fork_network_members
WHERE fork_network_members.project_id = forked_project_links.forked_to_project_id
)
AND EXISTS (
SELECT true
FROM projects
WHERE forked_project_links.forked_from_project_id = projects.id
)
AND NOT EXISTS (
SELECT true
FROM forked_project_links AS parent_links
WHERE parent_links.forked_to_project_id = forked_project_links.forked_from_project_id
AND NOT EXISTS (
SELECT true
FROM projects
WHERE parent_links.forked_from_project_id = projects.id
)
)
AND forked_project_links.id BETWEEN #{start_id} AND #{end_id}
MISSING_MEMBERS
ForkedProjectLink.count_by_sql(count_sql) > 0
end
def log(message)
Rails.logger.info("#{self.class.name} - #{message}")
end
end
end
end

View File

@ -1,13 +0,0 @@
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class DeleteConflictingRedirectRoutesRange
def perform(start_id, end_id)
# No-op.
# See https://gitlab.com/gitlab-com/infrastructure/issues/3460#note_53223252
end
end
end
end

View File

@ -1,179 +0,0 @@
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
# Class that migrates events for the new push event payloads setup. All
# events are copied to a shadow table, and push events will also have a row
# created in the push_event_payloads table.
class MigrateEventsToPushEventPayloads
class Event < ActiveRecord::Base
self.table_name = 'events'
serialize :data
BLANK_REF = ('0' * 40).freeze
TAG_REF_PREFIX = 'refs/tags/'.freeze
MAX_INDEX = 69
PUSHED = 5
def push_event?
action == PUSHED && data.present?
end
def commit_title
commit = commits.last
return unless commit && commit[:message]
index = commit[:message].index("\n")
message = index ? commit[:message][0..index] : commit[:message]
message.strip.truncate(70)
end
def commit_from_sha
if create?
nil
else
data[:before]
end
end
def commit_to_sha
if remove?
nil
else
data[:after]
end
end
def data
super || {}
end
def commits
data[:commits] || []
end
def commit_count
data[:total_commits_count] || 0
end
def ref
data[:ref]
end
def trimmed_ref_name
if ref_type == :tag
ref[10..-1]
else
ref[11..-1]
end
end
def create?
data[:before] == BLANK_REF
end
def remove?
data[:after] == BLANK_REF
end
def push_action
if create?
:created
elsif remove?
:removed
else
:pushed
end
end
def ref_type
if ref.start_with?(TAG_REF_PREFIX)
:tag
else
:branch
end
end
end
class EventForMigration < ActiveRecord::Base
self.table_name = 'events_for_migration'
end
class PushEventPayload < ActiveRecord::Base
self.table_name = 'push_event_payloads'
enum action: {
created: 0,
removed: 1,
pushed: 2
}
enum ref_type: {
branch: 0,
tag: 1
}
end
# start_id - The start ID of the range of events to process
# end_id - The end ID of the range to process.
def perform(start_id, end_id)
return unless migrate?
find_events(start_id, end_id).each { |event| process_event(event) }
end
def process_event(event)
ActiveRecord::Base.transaction do
replicate_event(event)
create_push_event_payload(event) if event.push_event?
end
rescue ActiveRecord::InvalidForeignKey => e
# A foreign key error means the associated event was removed. In this
# case we'll just skip migrating the event.
Rails.logger.error("Unable to migrate event #{event.id}: #{e}")
end
def replicate_event(event)
new_attributes = event.attributes
.with_indifferent_access.except(:title, :data)
EventForMigration.create!(new_attributes)
end
def create_push_event_payload(event)
commit_from = pack(event.commit_from_sha)
commit_to = pack(event.commit_to_sha)
PushEventPayload.create!(
event_id: event.id,
commit_count: event.commit_count,
ref_type: event.ref_type,
action: event.push_action,
commit_from: commit_from,
commit_to: commit_to,
ref: event.trimmed_ref_name,
commit_title: event.commit_title
)
end
def find_events(start_id, end_id)
Event
.where('NOT EXISTS (SELECT true FROM events_for_migration WHERE events_for_migration.id = events.id)')
.where(id: start_id..end_id)
end
def migrate?
Event.table_exists? && PushEventPayload.table_exists? &&
EventForMigration.table_exists?
end
def pack(value)
value ? [value].pack('H*') : nil
end
end
end
end

View File

@ -1,319 +0,0 @@
# frozen_string_literal: true
# rubocop:disable Metrics/MethodLength
# rubocop:disable Metrics/ClassLength
# rubocop:disable Metrics/BlockLength
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class NormalizeLdapExternUidsRange
class Identity < ActiveRecord::Base
self.table_name = 'identities'
end
# Copied this class to make this migration resilient to future code changes.
# And if the normalize behavior is changed in the future, it must be
# accompanied by another migration.
module Gitlab
module Auth
module LDAP
class DN
FormatError = Class.new(StandardError)
MalformedError = Class.new(FormatError)
UnsupportedError = Class.new(FormatError)
def self.normalize_value(given_value)
dummy_dn = "placeholder=#{given_value}"
normalized_dn = new(*dummy_dn).to_normalized_s
normalized_dn.sub(/\Aplaceholder=/, '')
end
##
# Initialize a DN, escaping as required. Pass in attributes in name/value
# pairs. If there is a left over argument, it will be appended to the dn
# without escaping (useful for a base string).
#
# Most uses of this class will be to escape a DN, rather than to parse it,
# so storing the dn as an escaped String and parsing parts as required
# with a state machine seems sensible.
def initialize(*args)
if args.length > 1
initialize_array(args)
else
initialize_string(args[0])
end
end
##
# Parse a DN into key value pairs using ASN from
# http://tools.ietf.org/html/rfc2253 section 3.
# rubocop:disable Metrics/AbcSize
# rubocop:disable Metrics/CyclomaticComplexity
# rubocop:disable Metrics/PerceivedComplexity
def each_pair
state = :key
key = StringIO.new
value = StringIO.new
hex_buffer = ""
@dn.each_char.with_index do |char, dn_index|
case state
when :key then
case char
when 'a'..'z', 'A'..'Z' then
state = :key_normal
key << char
when '0'..'9' then
state = :key_oid
key << char
when ' ' then state = :key
else raise(MalformedError, "Unrecognized first character of an RDN attribute type name \"#{char}\"")
end
when :key_normal then
case char
when '=' then state = :value
when 'a'..'z', 'A'..'Z', '0'..'9', '-', ' ' then key << char
else raise(MalformedError, "Unrecognized RDN attribute type name character \"#{char}\"")
end
when :key_oid then
case char
when '=' then state = :value
when '0'..'9', '.', ' ' then key << char
else raise(MalformedError, "Unrecognized RDN OID attribute type name character \"#{char}\"")
end
when :value then
case char
when '\\' then state = :value_normal_escape
when '"' then state = :value_quoted
when ' ' then state = :value
when '#' then
state = :value_hexstring
value << char
when ',' then
state = :key
yield key.string.strip, rstrip_except_escaped(value.string, dn_index)
key = StringIO.new
value = StringIO.new
else
state = :value_normal
value << char
end
when :value_normal then
case char
when '\\' then state = :value_normal_escape
when ',' then
state = :key
yield key.string.strip, rstrip_except_escaped(value.string, dn_index)
key = StringIO.new
value = StringIO.new
when '+' then raise(UnsupportedError, "Multivalued RDNs are not supported")
else value << char
end
when :value_normal_escape then
case char
when '0'..'9', 'a'..'f', 'A'..'F' then
state = :value_normal_escape_hex
hex_buffer = char
else
state = :value_normal
value << char
end
when :value_normal_escape_hex then
case char
when '0'..'9', 'a'..'f', 'A'..'F' then
state = :value_normal
value << "#{hex_buffer}#{char}".to_i(16).chr
else raise(MalformedError, "Invalid escaped hex code \"\\#{hex_buffer}#{char}\"")
end
when :value_quoted then
case char
when '\\' then state = :value_quoted_escape
when '"' then state = :value_end
else value << char
end
when :value_quoted_escape then
case char
when '0'..'9', 'a'..'f', 'A'..'F' then
state = :value_quoted_escape_hex
hex_buffer = char
else
state = :value_quoted
value << char
end
when :value_quoted_escape_hex then
case char
when '0'..'9', 'a'..'f', 'A'..'F' then
state = :value_quoted
value << "#{hex_buffer}#{char}".to_i(16).chr
else raise(MalformedError, "Expected the second character of a hex pair inside a double quoted value, but got \"#{char}\"")
end
when :value_hexstring then
case char
when '0'..'9', 'a'..'f', 'A'..'F' then
state = :value_hexstring_hex
value << char
when ' ' then state = :value_end
when ',' then
state = :key
yield key.string.strip, rstrip_except_escaped(value.string, dn_index)
key = StringIO.new
value = StringIO.new
else raise(MalformedError, "Expected the first character of a hex pair, but got \"#{char}\"")
end
when :value_hexstring_hex then
case char
when '0'..'9', 'a'..'f', 'A'..'F' then
state = :value_hexstring
value << char
else raise(MalformedError, "Expected the second character of a hex pair, but got \"#{char}\"")
end
when :value_end then
case char
when ' ' then state = :value_end
when ',' then
state = :key
yield key.string.strip, rstrip_except_escaped(value.string, dn_index)
key = StringIO.new
value = StringIO.new
else raise(MalformedError, "Expected the end of an attribute value, but got \"#{char}\"")
end
else raise "Fell out of state machine"
end
end
# Last pair
raise(MalformedError, 'DN string ended unexpectedly') unless
[:value, :value_normal, :value_hexstring, :value_end].include? state
yield key.string.strip, rstrip_except_escaped(value.string, @dn.length)
end
def rstrip_except_escaped(str, dn_index)
str_ends_with_whitespace = str.match(/\s\z/)
if str_ends_with_whitespace
dn_part_ends_with_escaped_whitespace = @dn[0, dn_index].match(/\\(\s+)\z/)
if dn_part_ends_with_escaped_whitespace
dn_part_rwhitespace = dn_part_ends_with_escaped_whitespace[1]
num_chars_to_remove = dn_part_rwhitespace.length - 1
str = str[0, str.length - num_chars_to_remove]
else
str.rstrip!
end
end
str
end
##
# Returns the DN as an array in the form expected by the constructor.
def to_a
a = []
self.each_pair { |key, value| a << key << value } unless @dn.empty?
a
end
##
# Return the DN as an escaped string.
def to_s
@dn
end
##
# Return the DN as an escaped and normalized string.
def to_normalized_s
self.class.new(*to_a).to_s.downcase
end
# https://tools.ietf.org/html/rfc4514 section 2.4 lists these exceptions
# for DN values. All of the following must be escaped in any normal string
# using a single backslash ('\') as escape. The space character is left
# out here because in a "normalized" string, spaces should only be escaped
# if necessary (i.e. leading or trailing space).
NORMAL_ESCAPES = [',', '+', '"', '\\', '<', '>', ';', '='].freeze
# The following must be represented as escaped hex
HEX_ESCAPES = {
"\n" => '\0a',
"\r" => '\0d'
}.freeze
# Compiled character class regexp using the keys from the above hash, and
# checking for a space or # at the start, or space at the end, of the
# string.
ESCAPE_RE = Regexp.new("(^ |^#| $|[" +
NORMAL_ESCAPES.map { |e| Regexp.escape(e) }.join +
"])")
HEX_ESCAPE_RE = Regexp.new("([" +
HEX_ESCAPES.keys.map { |e| Regexp.escape(e) }.join +
"])")
##
# Escape a string for use in a DN value
def self.escape(string)
escaped = string.gsub(ESCAPE_RE) { |char| "\\" + char }
escaped.gsub(HEX_ESCAPE_RE) { |char| HEX_ESCAPES[char] }
end
private
def initialize_array(args)
buffer = StringIO.new
args.each_with_index do |arg, index|
if index.even? # key
buffer << "," if index > 0
buffer << arg
else # value
buffer << "="
buffer << self.class.escape(arg)
end
end
@dn = buffer.string
end
def initialize_string(arg)
@dn = arg.to_s
end
##
# Proxy all other requests to the string object, because a DN is mainly
# used within the library as a string
# rubocop:disable GitlabSecurity/PublicSend
def method_missing(method, *args, &block)
@dn.send(method, *args, &block)
end
##
# Redefined to be consistent with redefined `method_missing` behavior
def respond_to?(sym, include_private = false)
@dn.respond_to?(sym, include_private)
end
end
end
end
end
def perform(start_id, end_id)
return unless migrate?
ldap_identities = Identity.where("provider like 'ldap%'").where(id: start_id..end_id)
ldap_identities.each do |identity|
identity.extern_uid = Gitlab::Auth::LDAP::DN.new(identity.extern_uid).to_normalized_s
unless identity.save
Rails.logger.info "Unable to normalize \"#{identity.extern_uid}\". Skipping."
end
rescue Gitlab::Auth::LDAP::DN::FormatError => e
Rails.logger.info "Unable to normalize \"#{identity.extern_uid}\" due to \"#{e.message}\". Skipping."
end
end
def migrate?
Identity.table_exists?
end
end
end
end

View File

@ -1,128 +0,0 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This background migration is going to create all `fork_networks` and
# the `fork_network_members` for the roots of fork networks based on the
# existing `forked_project_links`.
#
# When the source of a fork is deleted, we will create the fork with the
# target project as the root. This way, when there are forks of the target
# project, they will be joined into the same fork network.
#
# When the `fork_networks` and memberships for the root projects are created
# the `CreateForkNetworkMembershipsRange` migration is scheduled. This
# migration will create the memberships for all remaining forks-of-forks
class PopulateForkNetworksRange
def perform(start_id, end_id)
create_fork_networks_for_existing_projects(start_id, end_id)
create_fork_networks_for_missing_projects(start_id, end_id)
create_fork_networks_memberships_for_root_projects(start_id, end_id)
delay = BackgroundMigration::CreateForkNetworkMembershipsRange::RESCHEDULE_DELAY
BackgroundMigrationWorker.perform_in(
delay, "CreateForkNetworkMembershipsRange", [start_id, end_id]
)
end
def create_fork_networks_for_existing_projects(start_id, end_id)
log("Creating fork networks: #{start_id} - #{end_id}")
ActiveRecord::Base.connection.execute <<~INSERT_NETWORKS
INSERT INTO fork_networks (root_project_id)
SELECT DISTINCT forked_project_links.forked_from_project_id
FROM forked_project_links
-- Exclude the forks that are not the first level fork of a project
WHERE NOT EXISTS (
SELECT true
FROM forked_project_links inner_links
WHERE inner_links.forked_to_project_id = forked_project_links.forked_from_project_id
)
/* Exclude the ones that are already created, in case the fork network
was already created for another fork of the project.
*/
AND NOT EXISTS (
SELECT true
FROM fork_networks
WHERE forked_project_links.forked_from_project_id = fork_networks.root_project_id
)
-- Only create a fork network for a root project that still exists
AND EXISTS (
SELECT true
FROM projects
WHERE projects.id = forked_project_links.forked_from_project_id
)
AND forked_project_links.id BETWEEN #{start_id} AND #{end_id}
INSERT_NETWORKS
end
def create_fork_networks_for_missing_projects(start_id, end_id)
log("Creating fork networks with missing root: #{start_id} - #{end_id}")
ActiveRecord::Base.connection.execute <<~INSERT_NETWORKS
INSERT INTO fork_networks (root_project_id)
SELECT DISTINCT forked_project_links.forked_to_project_id
FROM forked_project_links
-- Exclude forks that are not the root forks
WHERE NOT EXISTS (
SELECT true
FROM forked_project_links inner_links
WHERE inner_links.forked_to_project_id = forked_project_links.forked_from_project_id
)
/* Exclude the ones that are already created, in case this migration is
re-run
*/
AND NOT EXISTS (
SELECT true
FROM fork_networks
WHERE forked_project_links.forked_to_project_id = fork_networks.root_project_id
)
/* Exclude projects for which the project still exists, those are
Processed in the previous step of this migration
*/
AND NOT EXISTS (
SELECT true
FROM projects
WHERE projects.id = forked_project_links.forked_from_project_id
)
AND forked_project_links.id BETWEEN #{start_id} AND #{end_id}
INSERT_NETWORKS
end
def create_fork_networks_memberships_for_root_projects(start_id, end_id)
log("Creating memberships for root projects: #{start_id} - #{end_id}")
ActiveRecord::Base.connection.execute <<~INSERT_ROOT
INSERT INTO fork_network_members (fork_network_id, project_id)
SELECT DISTINCT fork_networks.id, fork_networks.root_project_id
FROM fork_networks
/* Joining both on forked_from- and forked_to- so we could create the
memberships for forks for which the source was deleted
*/
INNER JOIN forked_project_links
ON forked_project_links.forked_from_project_id = fork_networks.root_project_id
OR forked_project_links.forked_to_project_id = fork_networks.root_project_id
WHERE NOT EXISTS (
SELECT true
FROM fork_network_members
WHERE fork_network_members.project_id = fork_networks.root_project_id
)
AND forked_project_links.id BETWEEN #{start_id} AND #{end_id}
INSERT_ROOT
end
def log(message)
Rails.logger.info("#{self.class.name} - #{message}")
end
end
end
end

View File

@ -1,33 +0,0 @@
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class PopulateMergeRequestsLatestMergeRequestDiffId
BATCH_SIZE = 1_000
class MergeRequest < ActiveRecord::Base
self.table_name = 'merge_requests'
include ::EachBatch
end
def perform(start_id, stop_id)
update = '
latest_merge_request_diff_id = (
SELECT MAX(id)
FROM merge_request_diffs
WHERE merge_requests.id = merge_request_diffs.merge_request_id
)'.squish
MergeRequest
.where(id: start_id..stop_id)
.where(latest_merge_request_diff_id: nil)
.each_batch(of: BATCH_SIZE) do |relation|
relation.update_all(update)
end
end
end
end
end

View File

@ -1,125 +0,0 @@
require 'spec_helper'
describe Gitlab::BackgroundMigration::CreateForkNetworkMembershipsRange, :migration, schema: 20170929131201 do
let(:migration) { described_class.new }
let(:projects) { table(:projects) }
let(:base1) { projects.create }
let(:base1_fork1) { projects.create }
let(:base1_fork2) { projects.create }
let(:base2) { projects.create }
let(:base2_fork1) { projects.create }
let(:base2_fork2) { projects.create }
let(:fork_of_fork) { projects.create }
let(:fork_of_fork2) { projects.create }
let(:second_level_fork) { projects.create }
let(:third_level_fork) { projects.create }
let(:fork_network1) { fork_networks.find_by(root_project_id: base1.id) }
let(:fork_network2) { fork_networks.find_by(root_project_id: base2.id) }
let!(:forked_project_links) { table(:forked_project_links) }
let!(:fork_networks) { table(:fork_networks) }
let!(:fork_network_members) { table(:fork_network_members) }
before do
# The fork-network relation created for the forked project
fork_networks.create(id: 1, root_project_id: base1.id)
fork_network_members.create(project_id: base1.id, fork_network_id: 1)
fork_networks.create(id: 2, root_project_id: base2.id)
fork_network_members.create(project_id: base2.id, fork_network_id: 2)
# Normal fork links
forked_project_links.create(id: 1, forked_from_project_id: base1.id, forked_to_project_id: base1_fork1.id)
forked_project_links.create(id: 2, forked_from_project_id: base1.id, forked_to_project_id: base1_fork2.id)
forked_project_links.create(id: 3, forked_from_project_id: base2.id, forked_to_project_id: base2_fork1.id)
forked_project_links.create(id: 4, forked_from_project_id: base2.id, forked_to_project_id: base2_fork2.id)
# Fork links
forked_project_links.create(id: 5, forked_from_project_id: base1_fork1.id, forked_to_project_id: fork_of_fork.id)
forked_project_links.create(id: 6, forked_from_project_id: base1_fork1.id, forked_to_project_id: fork_of_fork2.id)
# Forks 3 levels down
forked_project_links.create(id: 7, forked_from_project_id: fork_of_fork.id, forked_to_project_id: second_level_fork.id)
forked_project_links.create(id: 8, forked_from_project_id: second_level_fork.id, forked_to_project_id: third_level_fork.id)
migration.perform(1, 8)
end
it 'creates a memberships for the direct forks' do
base1_fork1_membership = fork_network_members.find_by(fork_network_id: fork_network1.id,
project_id: base1_fork1.id)
base1_fork2_membership = fork_network_members.find_by(fork_network_id: fork_network1.id,
project_id: base1_fork2.id)
base2_fork1_membership = fork_network_members.find_by(fork_network_id: fork_network2.id,
project_id: base2_fork1.id)
base2_fork2_membership = fork_network_members.find_by(fork_network_id: fork_network2.id,
project_id: base2_fork2.id)
expect(base1_fork1_membership.forked_from_project_id).to eq(base1.id)
expect(base1_fork2_membership.forked_from_project_id).to eq(base1.id)
expect(base2_fork1_membership.forked_from_project_id).to eq(base2.id)
expect(base2_fork2_membership.forked_from_project_id).to eq(base2.id)
end
it 'adds the fork network members for forks of forks' do
fork_of_fork_membership = fork_network_members.find_by(project_id: fork_of_fork.id,
fork_network_id: fork_network1.id)
fork_of_fork2_membership = fork_network_members.find_by(project_id: fork_of_fork2.id,
fork_network_id: fork_network1.id)
second_level_fork_membership = fork_network_members.find_by(project_id: second_level_fork.id,
fork_network_id: fork_network1.id)
third_level_fork_membership = fork_network_members.find_by(project_id: third_level_fork.id,
fork_network_id: fork_network1.id)
expect(fork_of_fork_membership.forked_from_project_id).to eq(base1_fork1.id)
expect(fork_of_fork2_membership.forked_from_project_id).to eq(base1_fork1.id)
expect(second_level_fork_membership.forked_from_project_id).to eq(fork_of_fork.id)
expect(third_level_fork_membership.forked_from_project_id).to eq(second_level_fork.id)
end
it 'reschedules itself when there are missing members' do
allow(migration).to receive(:missing_members?).and_return(true)
expect(BackgroundMigrationWorker)
.to receive(:perform_in).with(described_class::RESCHEDULE_DELAY, "CreateForkNetworkMembershipsRange", [1, 3])
migration.perform(1, 3)
end
it 'can be repeated without effect' do
expect { fork_network_members.count }.not_to change { migration.perform(1, 7) }
end
it 'knows it is finished for this range' do
expect(migration.missing_members?(1, 8)).to be_falsy
end
it 'does not miss members for forks of forks for which the root was deleted' do
forked_project_links.create(id: 9, forked_from_project_id: base1_fork1.id, forked_to_project_id: projects.create.id)
base1.destroy
expect(migration.missing_members?(7, 10)).to be_falsy
end
context 'with more forks' do
before do
forked_project_links.create(id: 9, forked_from_project_id: fork_of_fork.id, forked_to_project_id: projects.create.id)
forked_project_links.create(id: 10, forked_from_project_id: fork_of_fork.id, forked_to_project_id: projects.create.id)
end
it 'only processes a single batch of links at a time' do
expect(fork_network_members.count).to eq(10)
migration.perform(8, 10)
expect(fork_network_members.count).to eq(12)
end
it 'knows when not all memberships within a batch have been created' do
expect(migration.missing_members?(8, 10)).to be_truthy
end
end
end

View File

@ -1,35 +0,0 @@
require 'spec_helper'
describe Gitlab::BackgroundMigration::DeleteConflictingRedirectRoutesRange, :migration, schema: 20170907170235 do
let!(:redirect_routes) { table(:redirect_routes) }
let!(:routes) { table(:routes) }
before do
routes.create!(id: 1, source_id: 1, source_type: 'Namespace', path: 'foo1')
routes.create!(id: 2, source_id: 2, source_type: 'Namespace', path: 'foo2')
routes.create!(id: 3, source_id: 3, source_type: 'Namespace', path: 'foo3')
routes.create!(id: 4, source_id: 4, source_type: 'Namespace', path: 'foo4')
routes.create!(id: 5, source_id: 5, source_type: 'Namespace', path: 'foo5')
# Valid redirects
redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'bar')
redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'bar2')
redirect_routes.create!(source_id: 2, source_type: 'Namespace', path: 'bar3')
# Conflicting redirects
redirect_routes.create!(source_id: 2, source_type: 'Namespace', path: 'foo1')
redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo2')
redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo3')
redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo4')
redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo5')
end
# No-op. See https://gitlab.com/gitlab-com/infrastructure/issues/3460#note_53223252
it 'NO-OP: does not delete any redirect_routes' do
expect(redirect_routes.count).to eq(8)
described_class.new.perform(1, 5)
expect(redirect_routes.count).to eq(8)
end
end

View File

@ -1,433 +0,0 @@
require 'spec_helper'
# rubocop:disable RSpec/FactoriesInMigrationSpecs
describe Gitlab::BackgroundMigration::MigrateEventsToPushEventPayloads::Event, :migration, schema: 20170608152748 do
describe '#commit_title' do
it 'returns nil when there are no commits' do
expect(described_class.new.commit_title).to be_nil
end
it 'returns nil when there are commits without commit messages' do
event = described_class.new
allow(event).to receive(:commits).and_return([{ id: '123' }])
expect(event.commit_title).to be_nil
end
it 'returns the commit message when it is less than 70 characters long' do
event = described_class.new
allow(event).to receive(:commits).and_return([{ message: 'Hello world' }])
expect(event.commit_title).to eq('Hello world')
end
it 'returns the first line of a commit message if multiple lines are present' do
event = described_class.new
allow(event).to receive(:commits).and_return([{ message: "Hello\n\nworld" }])
expect(event.commit_title).to eq('Hello')
end
it 'truncates the commit to 70 characters when it is too long' do
event = described_class.new
allow(event).to receive(:commits).and_return([{ message: 'a' * 100 }])
expect(event.commit_title).to eq(('a' * 67) + '...')
end
end
describe '#commit_from_sha' do
it 'returns nil when pushing to a new ref' do
event = described_class.new
allow(event).to receive(:create?).and_return(true)
expect(event.commit_from_sha).to be_nil
end
it 'returns the ID of the first commit when pushing to an existing ref' do
event = described_class.new
allow(event).to receive(:create?).and_return(false)
allow(event).to receive(:data).and_return(before: '123')
expect(event.commit_from_sha).to eq('123')
end
end
describe '#commit_to_sha' do
it 'returns nil when removing an existing ref' do
event = described_class.new
allow(event).to receive(:remove?).and_return(true)
expect(event.commit_to_sha).to be_nil
end
it 'returns the ID of the last commit when pushing to an existing ref' do
event = described_class.new
allow(event).to receive(:remove?).and_return(false)
allow(event).to receive(:data).and_return(after: '123')
expect(event.commit_to_sha).to eq('123')
end
end
describe '#data' do
it 'returns the deserialized data' do
event = described_class.new(data: { before: '123' })
expect(event.data).to eq(before: '123')
end
it 'returns an empty hash when no data is present' do
event = described_class.new
expect(event.data).to eq({})
end
end
describe '#commits' do
it 'returns an Array of commits' do
event = described_class.new(data: { commits: [{ id: '123' }] })
expect(event.commits).to eq([{ id: '123' }])
end
it 'returns an empty array when no data is present' do
event = described_class.new
expect(event.commits).to eq([])
end
end
describe '#commit_count' do
it 'returns the number of commits' do
event = described_class.new(data: { total_commits_count: 2 })
expect(event.commit_count).to eq(2)
end
it 'returns 0 when no data is present' do
event = described_class.new
expect(event.commit_count).to eq(0)
end
end
describe '#ref' do
it 'returns the name of the ref' do
event = described_class.new(data: { ref: 'refs/heads/master' })
expect(event.ref).to eq('refs/heads/master')
end
end
describe '#trimmed_ref_name' do
it 'returns the trimmed ref name for a branch' do
event = described_class.new(data: { ref: 'refs/heads/master' })
expect(event.trimmed_ref_name).to eq('master')
end
it 'returns the trimmed ref name for a tag' do
event = described_class.new(data: { ref: 'refs/tags/v1.2' })
expect(event.trimmed_ref_name).to eq('v1.2')
end
end
describe '#create?' do
it 'returns true when creating a new ref' do
event = described_class.new(data: { before: described_class::BLANK_REF })
expect(event.create?).to eq(true)
end
it 'returns false when pushing to an existing ref' do
event = described_class.new(data: { before: '123' })
expect(event.create?).to eq(false)
end
end
describe '#remove?' do
it 'returns true when removing an existing ref' do
event = described_class.new(data: { after: described_class::BLANK_REF })
expect(event.remove?).to eq(true)
end
it 'returns false when pushing to an existing ref' do
event = described_class.new(data: { after: '123' })
expect(event.remove?).to eq(false)
end
end
describe '#push_action' do
let(:event) { described_class.new }
it 'returns :created when creating a new ref' do
allow(event).to receive(:create?).and_return(true)
expect(event.push_action).to eq(:created)
end
it 'returns :removed when removing an existing ref' do
allow(event).to receive(:create?).and_return(false)
allow(event).to receive(:remove?).and_return(true)
expect(event.push_action).to eq(:removed)
end
it 'returns :pushed when pushing to an existing ref' do
allow(event).to receive(:create?).and_return(false)
allow(event).to receive(:remove?).and_return(false)
expect(event.push_action).to eq(:pushed)
end
end
describe '#ref_type' do
let(:event) { described_class.new }
it 'returns :tag for a tag' do
allow(event).to receive(:ref).and_return('refs/tags/1.2')
expect(event.ref_type).to eq(:tag)
end
it 'returns :branch for a branch' do
allow(event).to receive(:ref).and_return('refs/heads/1.2')
expect(event.ref_type).to eq(:branch)
end
end
end
##
# The background migration relies on a temporary table, hence we're migrating
# to a specific version of the database where said table is still present.
#
describe Gitlab::BackgroundMigration::MigrateEventsToPushEventPayloads, :migration, schema: 20170825154015 do
let(:user_class) do
Class.new(ActiveRecord::Base) do
self.table_name = 'users'
end
end
let(:migration) { described_class.new }
let(:user_class) { table(:users) }
let(:author) { build(:user).becomes(user_class).tap(&:save!).becomes(User) }
let(:namespace) { create(:namespace, owner: author) }
let(:projects) { table(:projects) }
let(:project) { projects.create(namespace_id: namespace.id, creator_id: author.id) }
# We can not rely on FactoryBot as the state of Event may change in ways that
# the background migration does not expect, hence we use the Event class of
# the migration itself.
def create_push_event(project, author, data = nil)
klass = Gitlab::BackgroundMigration::MigrateEventsToPushEventPayloads::Event
klass.create!(
action: klass::PUSHED,
project_id: project.id,
author_id: author.id,
data: data
)
end
describe '#perform' do
it 'returns if data should not be migrated' do
allow(migration).to receive(:migrate?).and_return(false)
expect(migration).not_to receive(:find_events)
migration.perform(1, 10)
end
it 'migrates the range of events if data is to be migrated' do
event1 = create_push_event(project, author, { commits: [] })
event2 = create_push_event(project, author, { commits: [] })
allow(migration).to receive(:migrate?).and_return(true)
expect(migration).to receive(:process_event).twice
migration.perform(event1.id, event2.id)
end
end
describe '#process_event' do
it 'processes a regular event' do
event = double(:event, push_event?: false)
expect(migration).to receive(:replicate_event)
expect(migration).not_to receive(:create_push_event_payload)
migration.process_event(event)
end
it 'processes a push event' do
event = double(:event, push_event?: true)
expect(migration).to receive(:replicate_event)
expect(migration).to receive(:create_push_event_payload)
migration.process_event(event)
end
it 'handles an error gracefully' do
event1 = create_push_event(project, author, { commits: [] })
expect(migration).to receive(:replicate_event).and_call_original
expect(migration).to receive(:create_push_event_payload).and_raise(ActiveRecord::InvalidForeignKey, 'invalid foreign key')
migration.process_event(event1)
expect(described_class::EventForMigration.all.count).to eq(0)
end
end
describe '#replicate_event' do
it 'replicates the event to the "events_for_migration" table' do
event = create_push_event(
project,
author,
data: { commits: [] },
title: 'bla'
)
attributes = event
.attributes.with_indifferent_access.except(:title, :data)
expect(described_class::EventForMigration)
.to receive(:create!)
.with(attributes)
migration.replicate_event(event)
end
end
describe '#create_push_event_payload' do
let(:push_data) do
{
commits: [],
ref: 'refs/heads/master',
before: '156e0e9adc587a383a7eeb5b21ddecb9044768a8',
after: '0' * 40,
total_commits_count: 1
}
end
let(:event) do
create_push_event(project, author, push_data)
end
before do
# The foreign key in push_event_payloads at this point points to the
# "events_for_migration" table so we need to make sure a row exists in
# said table.
migration.replicate_event(event)
end
it 'creates a push event payload for an event' do
payload = migration.create_push_event_payload(event)
expect(PushEventPayload.count).to eq(1)
expect(payload.valid?).to eq(true)
end
it 'does not create push event payloads for removed events' do
allow(event).to receive(:id).and_return(-1)
expect { migration.create_push_event_payload(event) }.to raise_error(ActiveRecord::InvalidForeignKey)
expect(PushEventPayload.count).to eq(0)
end
it 'encodes and decodes the commit IDs from and to binary data' do
payload = migration.create_push_event_payload(event)
packed = migration.pack(push_data[:before])
expect(payload.commit_from).to eq(packed)
expect(payload.commit_to).to be_nil
end
end
describe '#find_events' do
it 'returns the events for the given ID range' do
event1 = create_push_event(project, author, { commits: [] })
event2 = create_push_event(project, author, { commits: [] })
event3 = create_push_event(project, author, { commits: [] })
events = migration.find_events(event1.id, event2.id)
expect(events.length).to eq(2)
expect(events.pluck(:id)).not_to include(event3.id)
end
end
describe '#migrate?' do
it 'returns true when data should be migrated' do
allow(described_class::Event)
.to receive(:table_exists?).and_return(true)
allow(described_class::PushEventPayload)
.to receive(:table_exists?).and_return(true)
allow(described_class::EventForMigration)
.to receive(:table_exists?).and_return(true)
expect(migration.migrate?).to eq(true)
end
it 'returns false if the "events" table does not exist' do
allow(described_class::Event)
.to receive(:table_exists?).and_return(false)
expect(migration.migrate?).to eq(false)
end
it 'returns false if the "push_event_payloads" table does not exist' do
allow(described_class::Event)
.to receive(:table_exists?).and_return(true)
allow(described_class::PushEventPayload)
.to receive(:table_exists?).and_return(false)
expect(migration.migrate?).to eq(false)
end
it 'returns false when the "events_for_migration" table does not exist' do
allow(described_class::Event)
.to receive(:table_exists?).and_return(true)
allow(described_class::PushEventPayload)
.to receive(:table_exists?).and_return(true)
allow(described_class::EventForMigration)
.to receive(:table_exists?).and_return(false)
expect(migration.migrate?).to eq(false)
end
end
describe '#pack' do
it 'packs a SHA1 into a 20 byte binary string' do
packed = migration.pack('156e0e9adc587a383a7eeb5b21ddecb9044768a8')
expect(packed.bytesize).to eq(20)
end
it 'returns nil if the input value is nil' do
expect(migration.pack(nil)).to be_nil
end
end
end
# rubocop:enable RSpec/FactoriesInMigrationSpecs

View File

@ -1,92 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::BackgroundMigration::MigrateStageStatus, :migration, schema: 20170711145320 do
let(:projects) { table(:projects) }
let(:pipelines) { table(:ci_pipelines) }
let(:stages) { table(:ci_stages) }
let(:jobs) { table(:ci_builds) }
let(:statuses) do
{
created: 0,
pending: 1,
running: 2,
success: 3,
failed: 4,
canceled: 5,
skipped: 6,
manual: 7
}
end
before do
projects.create!(id: 1, name: 'gitlab1', path: 'gitlab1')
pipelines.create!(id: 1, project_id: 1, ref: 'master', sha: 'adf43c3a')
stages.create!(id: 1, pipeline_id: 1, project_id: 1, name: 'test', status: nil)
stages.create!(id: 2, pipeline_id: 1, project_id: 1, name: 'deploy', status: nil)
end
context 'when stage status is known' do
before do
create_job(project: 1, pipeline: 1, stage: 'test', status: 'success')
create_job(project: 1, pipeline: 1, stage: 'test', status: 'running')
create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'failed')
end
it 'sets a correct stage status' do
described_class.new.perform(1, 2)
expect(stages.first.status).to eq statuses[:running]
expect(stages.second.status).to eq statuses[:failed]
end
end
context 'when stage status is not known' do
it 'sets a skipped stage status' do
described_class.new.perform(1, 2)
expect(stages.first.status).to eq statuses[:skipped]
expect(stages.second.status).to eq statuses[:skipped]
end
end
context 'when stage status includes status of a retried job' do
before do
create_job(project: 1, pipeline: 1, stage: 'test', status: 'canceled')
create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'failed', retried: true)
create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'success')
end
it 'sets a correct stage status' do
described_class.new.perform(1, 2)
expect(stages.first.status).to eq statuses[:canceled]
expect(stages.second.status).to eq statuses[:success]
end
end
context 'when some job in the stage is blocked / manual' do
before do
create_job(project: 1, pipeline: 1, stage: 'test', status: 'failed')
create_job(project: 1, pipeline: 1, stage: 'test', status: 'manual')
create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'success', when: 'manual')
end
it 'sets a correct stage status' do
described_class.new.perform(1, 2)
expect(stages.first.status).to eq statuses[:manual]
expect(stages.second.status).to eq statuses[:success]
end
end
def create_job(project:, pipeline:, stage:, status:, **opts)
stages = { test: 1, build: 2, deploy: 3 }
jobs.create!(project_id: project, commit_id: pipeline,
stage_idx: stages[stage.to_sym], stage: stage,
status: status, **opts)
end
end

View File

@ -1,36 +0,0 @@
require 'spec_helper'
describe Gitlab::BackgroundMigration::NormalizeLdapExternUidsRange, :migration, schema: 20170921101004 do
let!(:identities) { table(:identities) }
before do
# LDAP identities
(1..4).each do |i|
identities.create!(id: i, provider: 'ldapmain', extern_uid: " uid = foo #{i}, ou = People, dc = example, dc = com ", user_id: i)
end
# Non-LDAP identity
identities.create!(id: 5, provider: 'foo', extern_uid: " uid = foo 5, ou = People, dc = example, dc = com ", user_id: 5)
# Another LDAP identity
identities.create!(id: 6, provider: 'ldapmain', extern_uid: " uid = foo 6, ou = People, dc = example, dc = com ", user_id: 6)
end
it 'normalizes the LDAP identities in the range' do
described_class.new.perform(1, 3)
expect(identities.find(1).extern_uid).to eq("uid=foo 1,ou=people,dc=example,dc=com")
expect(identities.find(2).extern_uid).to eq("uid=foo 2,ou=people,dc=example,dc=com")
expect(identities.find(3).extern_uid).to eq("uid=foo 3,ou=people,dc=example,dc=com")
expect(identities.find(4).extern_uid).to eq(" uid = foo 4, ou = People, dc = example, dc = com ")
expect(identities.find(5).extern_uid).to eq(" uid = foo 5, ou = People, dc = example, dc = com ")
expect(identities.find(6).extern_uid).to eq(" uid = foo 6, ou = People, dc = example, dc = com ")
described_class.new.perform(4, 6)
expect(identities.find(1).extern_uid).to eq("uid=foo 1,ou=people,dc=example,dc=com")
expect(identities.find(2).extern_uid).to eq("uid=foo 2,ou=people,dc=example,dc=com")
expect(identities.find(3).extern_uid).to eq("uid=foo 3,ou=people,dc=example,dc=com")
expect(identities.find(4).extern_uid).to eq("uid=foo 4,ou=people,dc=example,dc=com")
expect(identities.find(5).extern_uid).to eq(" uid = foo 5, ou = People, dc = example, dc = com ")
expect(identities.find(6).extern_uid).to eq("uid=foo 6,ou=people,dc=example,dc=com")
end
end

View File

@ -1,97 +0,0 @@
require 'spec_helper'
describe Gitlab::BackgroundMigration::PopulateForkNetworksRange, :migration, schema: 20170929131201 do
let(:migration) { described_class.new }
let(:projects) { table(:projects) }
let(:base1) { projects.create }
let(:base2) { projects.create }
let(:base2_fork1) { projects.create }
let!(:forked_project_links) { table(:forked_project_links) }
let!(:fork_networks) { table(:fork_networks) }
let!(:fork_network_members) { table(:fork_network_members) }
let(:fork_network1) { fork_networks.find_by(root_project_id: base1.id) }
let(:fork_network2) { fork_networks.find_by(root_project_id: base2.id) }
before do
# A normal fork link
forked_project_links.create(id: 1,
forked_from_project_id: base1.id,
forked_to_project_id: projects.create.id)
forked_project_links.create(id: 2,
forked_from_project_id: base1.id,
forked_to_project_id: projects.create.id)
forked_project_links.create(id: 3,
forked_from_project_id: base2.id,
forked_to_project_id: base2_fork1.id)
# create a fork of a fork
forked_project_links.create(id: 4,
forked_from_project_id: base2_fork1.id,
forked_to_project_id: projects.create.id)
forked_project_links.create(id: 5,
forked_from_project_id: projects.create.id,
forked_to_project_id: projects.create.id)
# Stub out the calls to the other migrations
allow(BackgroundMigrationWorker).to receive(:perform_in)
migration.perform(1, 3)
end
it 'creates the fork network' do
expect(fork_network1).not_to be_nil
expect(fork_network2).not_to be_nil
end
it 'does not create a fork network for a fork-of-fork' do
# perfrom the entire batch
migration.perform(1, 5)
expect(fork_networks.find_by(root_project_id: base2_fork1.id)).to be_nil
end
it 'creates memberships for the root of fork networks' do
base1_membership = fork_network_members.find_by(fork_network_id: fork_network1.id,
project_id: base1.id)
base2_membership = fork_network_members.find_by(fork_network_id: fork_network2.id,
project_id: base2.id)
expect(base1_membership).not_to be_nil
expect(base2_membership).not_to be_nil
end
it 'creates a fork network for the fork of which the source was deleted' do
fork = projects.create
forked_project_links.create(id: 6, forked_from_project_id: 99999, forked_to_project_id: fork.id)
migration.perform(5, 8)
expect(fork_networks.find_by(root_project_id: 99999)).to be_nil
expect(fork_networks.find_by(root_project_id: fork.id)).not_to be_nil
expect(fork_network_members.find_by(project_id: fork.id)).not_to be_nil
end
it 'schedules a job for inserting memberships for forks-of-forks' do
delay = Gitlab::BackgroundMigration::CreateForkNetworkMembershipsRange::RESCHEDULE_DELAY
expect(BackgroundMigrationWorker)
.to receive(:perform_in).with(delay, "CreateForkNetworkMembershipsRange", [1, 3])
migration.perform(1, 3)
end
it 'only processes a single batch of links at a time' do
expect(fork_networks.count).to eq(2)
migration.perform(3, 5)
expect(fork_networks.count).to eq(3)
end
it 'can be repeated without effect' do
expect { migration.perform(1, 3) }.not_to change { fork_network_members.count }
end
end

View File

@ -1,62 +0,0 @@
require 'spec_helper'
describe Gitlab::BackgroundMigration::PopulateMergeRequestsLatestMergeRequestDiffId, :migration, schema: 20171026082505 do
let(:projects_table) { table(:projects) }
let(:merge_requests_table) { table(:merge_requests) }
let(:merge_request_diffs_table) { table(:merge_request_diffs) }
let(:project) { projects_table.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce') }
def create_mr!(name, diffs: 0)
merge_request =
merge_requests_table.create!(target_project_id: project.id,
target_branch: 'master',
source_project_id: project.id,
source_branch: name,
title: name)
diffs.times do
merge_request_diffs_table.create!(merge_request_id: merge_request.id)
end
merge_request
end
def diffs_for(merge_request)
merge_request_diffs_table.where(merge_request_id: merge_request.id)
end
describe '#perform' do
it 'ignores MRs without diffs' do
merge_request_without_diff = create_mr!('without_diff')
mr_id = merge_request_without_diff.id
expect(merge_request_without_diff.latest_merge_request_diff_id).to be_nil
expect { subject.perform(mr_id, mr_id) }
.not_to change { merge_request_without_diff.reload.latest_merge_request_diff_id }
end
it 'ignores MRs that have a diff ID already set' do
merge_request_with_multiple_diffs = create_mr!('with_multiple_diffs', diffs: 3)
diff_id = diffs_for(merge_request_with_multiple_diffs).minimum(:id)
mr_id = merge_request_with_multiple_diffs.id
merge_request_with_multiple_diffs.update!(latest_merge_request_diff_id: diff_id)
expect { subject.perform(mr_id, mr_id) }
.not_to change { merge_request_with_multiple_diffs.reload.latest_merge_request_diff_id }
end
it 'migrates multiple MR diffs to the correct values' do
merge_requests = Array.new(3).map.with_index { |_, i| create_mr!(i, diffs: 3) }
subject.perform(merge_requests.first.id, merge_requests.last.id)
merge_requests.each do |merge_request|
expect(merge_request.reload.latest_merge_request_diff_id)
.to eq(diffs_for(merge_request).maximum(:id))
end
end
end
end