Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-04-05 03:08:30 +00:00
parent 4fb352155b
commit 5a9763751b
39 changed files with 1391 additions and 91 deletions

View File

@ -7,7 +7,6 @@ import {
GlDatepicker,
GlLink,
GlSprintf,
GlButton,
GlFormInput,
} from '@gitlab/ui';
import { sprintf } from '~/locale';
@ -41,7 +40,6 @@ export default {
GlDropdown,
GlDropdownItem,
GlSprintf,
GlButton,
GlFormInput,
ContentTransition,
},
@ -104,6 +102,11 @@ export default {
required: false,
default: INVITE_BUTTON_TEXT,
},
cancelButtonText: {
type: String,
required: false,
default: CANCEL_BUTTON_TEXT,
},
currentSlot: {
type: String,
required: false,
@ -114,6 +117,11 @@ export default {
required: false,
default: () => [],
},
preventCancelDefault: {
type: Boolean,
required: false,
default: false,
},
},
data() {
// Be sure to check out reset!
@ -141,6 +149,22 @@ export default {
contentSlots() {
return [...DEFAULT_SLOTS, ...(this.extraSlots || [])];
},
actionPrimary() {
return {
text: this.submitButtonText,
attributes: {
variant: 'confirm',
disabled: this.submitDisabled,
loading: this.isLoading,
'data-qa-selector': 'invite_button',
},
};
},
actionCancel() {
return {
text: this.cancelButtonText,
};
},
},
watch: {
selectedAccessLevel: {
@ -151,7 +175,7 @@ export default {
},
},
methods: {
reset() {
onReset() {
// This component isn't necessarily disposed,
// so we might need to reset it's state.
this.selectedAccessLevel = this.defaultAccessLevel;
@ -159,14 +183,23 @@ export default {
this.$emit('reset');
},
closeModal() {
this.reset();
this.$refs.modal.hide();
onCloseModal(e) {
if (this.preventCancelDefault) {
e.preventDefault();
} else {
this.onReset();
this.$refs.modal.hide();
}
this.$emit('cancel');
},
changeSelectedItem(item) {
this.selectedAccessLevel = item;
},
submit() {
onSubmit(e) {
// We never want to hide when submitting
e.preventDefault();
this.$emit('submit', {
accessLevel: this.selectedAccessLevel,
expiresAt: this.selectedDate,
@ -192,9 +225,11 @@ export default {
size="sm"
:title="modalTitle"
:header-close-label="$options.HEADER_CLOSE_LABEL"
@hidden="reset"
@close="reset"
@hide="reset"
:action-primary="actionPrimary"
:action-cancel="actionCancel"
@primary="onSubmit"
@cancel="onCloseModal"
@hidden="onReset"
>
<content-transition
class="gl-display-grid"
@ -282,22 +317,5 @@ export default {
<slot :name="key"></slot>
</template>
</content-transition>
<template #modal-footer>
<slot name="cancel-button">
<gl-button data-testid="cancel-button" @click="closeModal">
{{ $options.CANCEL_BUTTON_TEXT }}
</gl-button>
</slot>
<gl-button
:disabled="submitDisabled"
:loading="isLoading"
variant="confirm"
data-qa-selector="invite_button"
data-testid="invite-button"
@click="submit"
>
{{ submitButtonText }}
</gl-button>
</template>
</gl-modal>
</template>

View File

@ -1484,7 +1484,7 @@ class Project < ApplicationRecord
end
def find_or_initialize_integration(name)
return if disabled_integrations.include?(name)
return if disabled_integrations.include?(name) || Integration.available_integration_names.exclude?(name)
find_integration(integrations, name) || build_from_instance(name) || build_integration(name)
end

View File

@ -0,0 +1,109 @@
# frozen_string_literal: true
module Database
class ConsistencyCheckService
CURSOR_REDIS_KEY_TTL = 7.days
EMPTY_RESULT = { matches: 0, mismatches: 0, batches: 0, mismatches_details: [] }.freeze
def initialize(source_model:, target_model:, source_columns:, target_columns:)
@source_model = source_model
@target_model = target_model
@source_columns = source_columns
@target_columns = target_columns
@source_sort_column = source_columns.first
@target_sort_column = target_columns.first
end
# This class takes two ActiveRecord models, and compares the selected columns
# of the two models tables, for the purposes of checking the consistency of
# mirroring of tables. For example Namespace and Ci::NamepaceMirror
#
# It compares up to 25 batches (1000 records / batch), or up to 30 seconds
# for all the batches in total.
#
# It saves the cursor of the next start_id (cusror) in Redis. If the start_id
# wasn't saved in Redis, for example, in the first run, it will choose some random start_id
#
# Example:
# service = Database::ConsistencyCheckService.new(
# source_model: Namespace,
# target_model: Ci::NamespaceMirror,
# source_columns: %w[id traversal_ids],
# target_columns: %w[namespace_id traversal_ids],
# )
# result = service.execute
#
# result is a hash that has the following fields:
# - batches: Number of batches checked
# - matches: The number of matched records
# - mismatches: The number of mismatched records
# - mismatches_details: It's an array that contains details about the mismatched records.
# each record in this array is a hash of format {id: ID, source_table: [...], target_table: [...]}
# Each record represents the attributes of the records in the two tables.
# - start_id: The start id cursor of the current batch. <nil> means no records.
# - next_start_id: The ID that can be used for the next batch iteration check. <nil> means no records
def execute
start_id = next_start_id
return EMPTY_RESULT if start_id.nil?
result = consistency_checker.execute(start_id: start_id)
result[:start_id] = start_id
save_next_start_id(result[:next_start_id])
result
end
private
attr_reader :source_model, :target_model, :source_columns, :target_columns, :source_sort_column, :target_sort_column
def consistency_checker
@consistency_checker ||= Gitlab::Database::ConsistencyChecker.new(
source_model: source_model,
target_model: target_model,
source_columns: source_columns,
target_columns: target_columns
)
end
def next_start_id
return if min_id.nil?
fetch_next_start_id || random_start_id
end
# rubocop: disable CodeReuse/ActiveRecord
def min_id
@min_id ||= source_model.minimum(source_sort_column)
end
def max_id
@max_id ||= source_model.minimum(source_sort_column)
end
# rubocop: enable CodeReuse/ActiveRecord
def fetch_next_start_id
Gitlab::Redis::SharedState.with { |redis| redis.get(cursor_redis_shared_state_key)&.to_i }
end
# This returns some random start_id, so that we don't always start checking
# from the start of the table, in case we lose the cursor in Redis.
def random_start_id
range_start = min_id
range_end = [min_id, max_id - Gitlab::Database::ConsistencyChecker::BATCH_SIZE].max
rand(range_start..range_end)
end
def save_next_start_id(start_id)
Gitlab::Redis::SharedState.with do |redis|
redis.set(cursor_redis_shared_state_key, start_id, ex: CURSOR_REDIS_KEY_TTL)
end
end
def cursor_redis_shared_state_key
"consistency_check_cursor:#{source_model.table_name}:#{target_model.table_name}"
end
end
end

View File

@ -327,6 +327,24 @@
:weight: 1
:idempotent: true
:tags: []
- :name: cronjob:database_ci_namespace_mirrors_consistency_check
:worker_name: Database::CiNamespaceMirrorsConsistencyCheckWorker
:feature_category: :sharding
:has_external_dependencies:
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: cronjob:database_ci_project_mirrors_consistency_check
:worker_name: Database::CiProjectMirrorsConsistencyCheckWorker
:feature_category: :sharding
:has_external_dependencies:
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: cronjob:database_drop_detached_partitions
:worker_name: Database::DropDetachedPartitionsWorker
:feature_category: :database

View File

@ -0,0 +1,28 @@
# frozen_string_literal: true
module Database
class CiNamespaceMirrorsConsistencyCheckWorker
include ApplicationWorker
include CronjobQueue # rubocop: disable Scalability/CronWorkerContext
sidekiq_options retry: false
feature_category :sharding
data_consistency :sticky
idempotent!
version 1
def perform
return if Feature.disabled?(:ci_namespace_mirrors_consistency_check, default_enabled: :yaml)
results = ConsistencyCheckService.new(
source_model: Namespace,
target_model: Ci::NamespaceMirror,
source_columns: %w[id traversal_ids],
target_columns: %w[namespace_id traversal_ids]
).execute
log_extra_metadata_on_done(:results, results)
end
end
end

View File

@ -0,0 +1,28 @@
# frozen_string_literal: true
module Database
class CiProjectMirrorsConsistencyCheckWorker
include ApplicationWorker
include CronjobQueue # rubocop: disable Scalability/CronWorkerContext
sidekiq_options retry: false
feature_category :sharding
data_consistency :sticky
idempotent!
version 1
def perform
return if Feature.disabled?(:ci_project_mirrors_consistency_check, default_enabled: :yaml)
results = ConsistencyCheckService.new(
source_model: Project,
target_model: Ci::ProjectMirror,
source_columns: %w[id namespace_id],
target_columns: %w[project_id namespace_id]
).execute
log_extra_metadata_on_done(:results, results)
end
end
end

View File

@ -0,0 +1,8 @@
---
name: ci_namespace_mirrors_consistency_check
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/81836
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/356577
milestone: '14.10'
type: development
group: group::sharding
default_enabled: false

View File

@ -0,0 +1,8 @@
---
name: ci_project_mirrors_consistency_check
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/81836
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/356583
milestone: '14.10'
type: development
group: group::sharding
default_enabled: false

View File

@ -763,6 +763,12 @@ Gitlab.ee do
Settings.cron_jobs['loose_foreign_keys_cleanup_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['loose_foreign_keys_cleanup_worker']['cron'] ||= '*/1 * * * *'
Settings.cron_jobs['loose_foreign_keys_cleanup_worker']['job_class'] = 'LooseForeignKeys::CleanupWorker'
Settings.cron_jobs['ci_namespace_mirrors_consistency_check_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['ci_namespace_mirrors_consistency_check_worker']['cron'] ||= '*/4 * * * *'
Settings.cron_jobs['ci_namespace_mirrors_consistency_check_worker']['job_class'] = 'Database::CiNamespaceMirrorsConsistencyCheckWorker'
Settings.cron_jobs['ci_project_mirrors_consistency_check_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['ci_project_mirrors_consistency_check_worker']['cron'] ||= '2-58/4 * * * *'
Settings.cron_jobs['ci_project_mirrors_consistency_check_worker']['job_class'] = 'Database::CiProjectMirrorsConsistencyCheckWorker'
end
#

View File

@ -167,6 +167,7 @@ options:
- p_ci_templates_implicit_security_cluster_image_scanning
- p_ci_templates_kaniko
- p_ci_templates_qualys_iac_security
- p_ci_templates_database_liquibase
distribution:
- ce
- ee

View File

@ -0,0 +1,25 @@
---
key_path: redis_hll_counters.ci_templates.p_ci_templates_database_liquibase_monthly
description: ""
product_section: ""
product_stage: ""
product_group: ""
product_category:
value_type: number
status: active
milestone: "14.9"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/81817
time_frame: 28d
data_source: redis_hll
data_category: optional
instrumentation_class: RedisHLLMetric
distribution:
- ce
- ee
tier:
- free
- premium
- ultimate
options:
events:
- p_ci_templates_database_liquibase

View File

@ -167,6 +167,7 @@ options:
- p_ci_templates_implicit_security_cluster_image_scanning
- p_ci_templates_kaniko
- p_ci_templates_qualys_iac_security
- p_ci_templates_database_liquibase
distribution:
- ce
- ee

View File

@ -0,0 +1,25 @@
---
key_path: redis_hll_counters.ci_templates.p_ci_templates_database_liquibase_weekly
description: ""
product_section: ""
product_stage: ""
product_group: ""
product_category: ""
value_type: number
status: active
milestone: "14.9"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/81817
time_frame: 7d
data_source: redis_hll
data_category: optional
instrumentation_class: RedisHLLMetric
distribution:
- ce
- ee
tier:
- free
- premium
- ultimate
options:
events:
- p_ci_templates_database_liquibase

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
class ScheduleMergeTopicsWithSameName < Gitlab::Database::Migration[1.0]
MIGRATION = 'MergeTopicsWithSameName'
BATCH_SIZE = 100
disable_ddl_transaction!
class Topic < ActiveRecord::Base
self.table_name = 'topics'
end
def up
Topic.select('LOWER(name) as name').group('LOWER(name)').having('COUNT(*) > 1').order('LOWER(name)')
.in_groups_of(BATCH_SIZE, false).each_with_index do |group, i|
migrate_in((i + 1) * 2.minutes, MIGRATION, [group.map(&:name)])
end
end
def down
# no-op
end
end

View File

@ -0,0 +1 @@
8fb72b15bfaa1b58f87cb3f1836df1e8bfa1a5ddec4e480a2cb6a3c9fafe3bda

View File

@ -170,5 +170,5 @@ This parameter is used for filtering by attributes, such as `environment_scope`.
Example usage:
```shell
curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/variables/VARIABLE_1?filter[environment_scope]=production"
curl --request DELETE --globoff --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/variables/VARIABLE_1?filter[environment_scope]=production"
```

View File

@ -0,0 +1,76 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# The class to merge project topics with the same case insensitive name
class MergeTopicsWithSameName
# Temporary AR model for topics
class Topic < ActiveRecord::Base
self.table_name = 'topics'
end
# Temporary AR model for project topic assignment
class ProjectTopic < ActiveRecord::Base
self.table_name = 'project_topics'
end
def perform(topic_names)
topic_names.each do |topic_name|
topics = Topic.where('LOWER(name) = ?', topic_name)
.order(total_projects_count: :desc, non_private_projects_count: :desc, id: :asc)
.to_a
topic_to_keep = topics.shift
merge_topics(topic_to_keep, topics) if topics.any?
end
end
private
def merge_topics(topic_to_keep, topics_to_remove)
description = topic_to_keep.description
topics_to_remove.each do |topic|
description ||= topic.description if topic.description.present?
process_avatar(topic_to_keep, topic) if topic.avatar.present?
ProjectTopic.transaction do
ProjectTopic.where(topic_id: topic.id)
.where.not(project_id: ProjectTopic.where(topic_id: topic_to_keep).select(:project_id))
.update_all(topic_id: topic_to_keep.id)
ProjectTopic.where(topic_id: topic.id).delete_all
end
end
Topic.where(id: topics_to_remove).delete_all
topic_to_keep.update(
description: description,
total_projects_count: total_projects_count(topic_to_keep.id),
non_private_projects_count: non_private_projects_count(topic_to_keep.id)
)
end
# We intentionally use application code here because we need to copy/remove avatar files
def process_avatar(topic_to_keep, topic_to_remove)
topic_to_remove = ::Projects::Topic.find(topic_to_remove.id)
topic_to_keep = ::Projects::Topic.find(topic_to_keep.id)
unless topic_to_keep.avatar.present?
topic_to_keep.avatar = topic_to_remove.avatar
topic_to_keep.save!
end
topic_to_remove.remove_avatar!
topic_to_remove.save!
end
def total_projects_count(topic_id)
ProjectTopic.where(topic_id: topic_id).count
end
def non_private_projects_count(topic_id)
ProjectTopic.joins('INNER JOIN projects ON project_topics.project_id = projects.id')
.where(project_topics: { topic_id: topic_id }).where('projects.visibility_level in (10, 20)').count
end
end
end
end

View File

@ -0,0 +1,149 @@
# This file is a template, and might need editing before it works on your project.
# Here is a live project example that is using this template:
# https://gitlab.com/szandany/h2
# To contribute improvements to CI/CD templates, please follow the Development guide at:
# https://docs.gitlab.com/ee/development/cicd/templates.html
# This specific template is located at:
# https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Database/liquibase.gitlab-ci.yml
# This template must be configured with CI/CD variables before it will work.
# See https://www.liquibase.com/blog/secure-database-developer-flow-using-gitlab-pipelines
# to learn how to configure the Liquibase template by using variables.
# Be sure to add the variables before running pipelines with this template.
# You may not want to run all the jobs in this template. You can comment out or delete the jobs you don't wish to use.
# List of stages for jobs and their order of execution.
stages:
- build
- test
- deploy
- compare
# Helper functions to determine if the database is ready for deployments (function isUpToDate) or rollbacks (function isRollback) when tag is applied.
.functions: &functions |
function isUpToDate(){
status=$(liquibase status --verbose)
if [[ $status == *'is up to date'* ]]; then
echo "database is already up to date" & exit 0
fi;
}
function isRollback(){
if [ -z "$TAG" ]; then
echo "No TAG provided, running any pending changes"
elif [[ "$(liquibase rollbackSQL $TAG)" ]]; then
liquibase --logLevel=info --logFile=${CI_JOB_NAME}_${CI_PIPELINE_ID}.log rollback $TAG && exit 0
else exit 0
fi;
}
# This is a series of Liquibase commands that can be run while doing database migrations from Liquibase docs at https://docs.liquibase.com/commands/home.html
.liquibase_job:
image: liquibase/liquibase:latest # Using the Liquibase Docker Image at - https://hub.docker.com/r/liquibase/liquibase
before_script:
- liquibase --version
- *functions
- isRollback
- isUpToDate
- liquibase checks run
- liquibase update
- liquibase rollbackOneUpdate --force # This is a Pro command. Try Pro free trial here - https://liquibase.org/try-liquibase-pro-free
- liquibase tag $CI_PIPELINE_ID
- liquibase --logFile=${CI_JOB_NAME}_${CI_PIPELINE_ID}.log --logLevel=info update
- liquibase history
artifacts:
paths:
- ${CI_JOB_NAME}_${CI_PIPELINE_ID}.log
expire_in: 1 week
# This job runs in the build stage, which runs first.
build-job:
extends: .liquibase_job
stage: build
environment:
name: DEV
script:
- echo "This job tested successfully with liquibase in DEV environment"
rules:
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
# This job runs in the test stage. It only starts when the job in the build stage completes successfully.
test-job:
extends: .liquibase_job
stage: test
environment:
name: TEST
script:
- echo "This job testsed successfully with liquibase in TEST environment"
rules:
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
# This job runs in the deploy stage. It only starts when the jobs in the test stage completes successfully.
deploy-prod:
extends: .liquibase_job
stage: deploy
environment:
name: PROD
script:
- echo "This job deployed successfully Liquibase in a production environment from the $CI_COMMIT_BRANCH branch."
rules:
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
# This job compares dev database with test database to detect any drifts in the pipeline. Learn more about comparing database with Liquibase here https://docs.liquibase.com/commands/diff.html
DEV->TEST:
image: liquibase/liquibase:latest # Using the Liquibase Docker Image
stage: compare
environment:
name: TEST
script:
- echo "Comparing databases DEV --> TEST"
- liquibase diff
- liquibase --outputFile=diff_between_DEV_TEST.json diff --format=json
rules:
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
artifacts:
paths:
- diff_between_DEV_TEST.json
expire_in: 1 week
# This job compares test database with prod database to detect any drifts in the pipeline.
TEST->PROD:
image: liquibase/liquibase:latest # Using the Liquibase Docker Image
stage: compare
environment:
name: PROD
script:
- echo "Comparing databases TEST --> PROD"
- liquibase diff
- liquibase --outputFile=diff_between_TEST_PROD.json diff --format=json
rules:
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
artifacts:
paths:
- diff_between_TEST_PROD.json
expire_in: 1 week
# This job creates a snapshot of prod database. You can use the snapshot file to run comparisons with the production database to investigate for any potential issues. https://www.liquibase.com/devsecops
snapshot PROD:
image: liquibase/liquibase:latest # Using the Liquibase Docker Image
stage: .post
environment:
name: PROD
script:
- echo "Snapshotting database PROD"
- liquibase --outputFile=snapshot_PROD_${CI_PIPELINE_ID}.json snapshot --snapshotFormat=json --log-level debug
rules:
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
artifacts:
paths:
- snapshot_PROD_${CI_PIPELINE_ID}.json
expire_in: 1 week

View File

@ -0,0 +1,122 @@
# frozen_string_literal: true
module Gitlab
module Database
class ConsistencyChecker
BATCH_SIZE = 1000
MAX_BATCHES = 25
MAX_RUNTIME = 30.seconds # must be less than the scheduling frequency of the ConsistencyCheck jobs
delegate :monotonic_time, to: :'Gitlab::Metrics::System'
def initialize(source_model:, target_model:, source_columns:, target_columns:)
@source_model = source_model
@target_model = target_model
@source_columns = source_columns
@target_columns = target_columns
@source_sort_column = source_columns.first
@target_sort_column = target_columns.first
@result = { matches: 0, mismatches: 0, batches: 0, mismatches_details: [] }
end
# rubocop:disable Metrics/AbcSize
def execute(start_id:)
current_start_id = start_id
return build_result(next_start_id: nil) if max_id.nil?
return build_result(next_start_id: min_id) if current_start_id > max_id
@start_time = monotonic_time
MAX_BATCHES.times do
if (current_start_id <= max_id) && !over_time_limit?
ids_range = current_start_id...(current_start_id + BATCH_SIZE)
# rubocop: disable CodeReuse/ActiveRecord
source_data = source_model.where(source_sort_column => ids_range)
.order(source_sort_column => :asc).pluck(*source_columns)
target_data = target_model.where(target_sort_column => ids_range)
.order(target_sort_column => :asc).pluck(*target_columns)
# rubocop: enable CodeReuse/ActiveRecord
current_start_id += BATCH_SIZE
result[:matches] += append_mismatches_details(source_data, target_data)
result[:batches] += 1
else
break
end
end
result[:mismatches] = result[:mismatches_details].length
metrics_counter.increment({ source_table: source_model.table_name, result: "match" }, result[:matches])
metrics_counter.increment({ source_table: source_model.table_name, result: "mismatch" }, result[:mismatches])
build_result(next_start_id: current_start_id > max_id ? min_id : current_start_id)
end
# rubocop:enable Metrics/AbcSize
private
attr_reader :source_model, :target_model, :source_columns, :target_columns,
:source_sort_column, :target_sort_column, :start_time, :result
def build_result(next_start_id:)
{ next_start_id: next_start_id }.merge(result)
end
def over_time_limit?
(monotonic_time - start_time) >= MAX_RUNTIME
end
# This where comparing the items happen, and building the diff log
# It returns the number of matching elements
def append_mismatches_details(source_data, target_data)
# Mapping difference the sort key to the item values
# source - target
source_diff_hash = (source_data - target_data).index_by { |item| item.shift }
# target - source
target_diff_hash = (target_data - source_data).index_by { |item| item.shift }
matches = source_data.length - source_diff_hash.length
# Items that exist in the first table + Different items
source_diff_hash.each do |id, values|
result[:mismatches_details] << {
id: id,
source_table: values,
target_table: target_diff_hash[id]
}
end
# Only the items that exist in the target table
target_diff_hash.each do |id, values|
next if source_diff_hash[id] # It's already added
result[:mismatches_details] << {
id: id,
source_table: source_diff_hash[id],
target_table: values
}
end
matches
end
# rubocop: disable CodeReuse/ActiveRecord
def min_id
@min_id ||= source_model.minimum(source_sort_column)
end
def max_id
@max_id ||= source_model.maximum(source_sort_column)
end
# rubocop: enable CodeReuse/ActiveRecord
def metrics_counter
@metrics_counter ||= Gitlab::Metrics.counter(
:consistency_checks,
"Consistency Check Results"
)
end
end
end
end

View File

@ -615,3 +615,7 @@
category: ci_templates
redis_slot: ci_templates
aggregation: weekly
- name: p_ci_templates_database_liquibase
category: ci_templates
redis_slot: ci_templates
aggregation: weekly

View File

@ -93,6 +93,7 @@ describe QA::Support::Formatters::TestStatsFormatter do
before do
allow(InfluxDB2::Client).to receive(:new).with(url, token, **influx_client_args) { influx_client }
allow(QA::Tools::TestResourceDataProcessor).to receive(:resources) { fabrication_resources }
allow_any_instance_of(RSpec::Core::Example::ExecutionResult).to receive(:run_time).and_return(0) # rubocop:disable RSpec/AnyInstanceOf
end
context "without influxdb variables configured" do

BIN
spec/fixtures/avatars/avatar1.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

BIN
spec/fixtures/avatars/avatar2.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

BIN
spec/fixtures/avatars/avatar3.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

BIN
spec/fixtures/avatars/avatar4.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

BIN
spec/fixtures/avatars/avatar5.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

View File

@ -42,18 +42,19 @@ describe('InviteGroupsModal', () => {
wrapper = null;
});
const findModal = () => wrapper.findComponent(GlModal);
const findGroupSelect = () => wrapper.findComponent(GroupSelect);
const findIntroText = () => wrapper.findByTestId('modal-base-intro-text').text();
const findCancelButton = () => wrapper.findByTestId('cancel-button');
const findInviteButton = () => wrapper.findByTestId('invite-button');
const findMembersFormGroup = () => wrapper.findByTestId('members-form-group');
const membersFormGroupInvalidFeedback = () =>
findMembersFormGroup().attributes('invalid-feedback');
const clickInviteButton = () => findInviteButton().vm.$emit('click');
const clickCancelButton = () => findCancelButton().vm.$emit('click');
const triggerGroupSelect = (val) => findGroupSelect().vm.$emit('input', val);
const findBase = () => wrapper.findComponent(InviteModalBase);
const hideModal = () => wrapper.findComponent(GlModal).vm.$emit('hide');
const triggerGroupSelect = (val) => findGroupSelect().vm.$emit('input', val);
const emitEventFromModal = (eventName) => () =>
findModal().vm.$emit(eventName, { preventDefault: jest.fn() });
const hideModal = emitEventFromModal('hidden');
const clickInviteButton = emitEventFromModal('primary');
const clickCancelButton = emitEventFromModal('cancel');
describe('displaying the correct introText and form group description', () => {
describe('when inviting to a project', () => {

View File

@ -85,12 +85,13 @@ describe('InviteMembersModal', () => {
mock.restore();
});
const findModal = () => wrapper.findComponent(GlModal);
const findBase = () => wrapper.findComponent(InviteModalBase);
const findIntroText = () => wrapper.findByTestId('modal-base-intro-text').text();
const findCancelButton = () => wrapper.findByTestId('cancel-button');
const findInviteButton = () => wrapper.findByTestId('invite-button');
const clickInviteButton = () => findInviteButton().vm.$emit('click');
const clickCancelButton = () => findCancelButton().vm.$emit('click');
const emitEventFromModal = (eventName) => () =>
findModal().vm.$emit(eventName, { preventDefault: jest.fn() });
const clickInviteButton = emitEventFromModal('primary');
const clickCancelButton = emitEventFromModal('cancel');
const findMembersFormGroup = () => wrapper.findByTestId('members-form-group');
const membersFormGroupInvalidFeedback = () =>
findMembersFormGroup().attributes('invalid-feedback');
@ -276,7 +277,7 @@ describe('InviteMembersModal', () => {
});
it('renders the modal with the correct title', () => {
expect(wrapper.findComponent(GlModal).props('title')).toBe(MEMBERS_MODAL_CELEBRATE_TITLE);
expect(findModal().props('title')).toBe(MEMBERS_MODAL_CELEBRATE_TITLE);
});
it('includes the correct celebration text and emoji', () => {
@ -337,7 +338,7 @@ describe('InviteMembersModal', () => {
});
it('sets isLoading on the Invite button when it is clicked', () => {
expect(findInviteButton().props('loading')).toBe(true);
expect(findModal().props('actionPrimary').attributes.loading).toBe(true);
});
it('calls Api addGroupMembersByUserId with the correct params', () => {
@ -380,7 +381,7 @@ describe('InviteMembersModal', () => {
expect(membersFormGroupInvalidFeedback()).toBe('Member already exists');
expect(findMembersSelect().props('validationState')).toBe(false);
expect(findInviteButton().props('loading')).toBe(false);
expect(findModal().props('actionPrimary').attributes.loading).toBe(false);
});
describe('clearing the invalid state and message', () => {
@ -414,7 +415,7 @@ describe('InviteMembersModal', () => {
});
it('clears the error when the modal is hidden', async () => {
wrapper.findComponent(GlModal).vm.$emit('hide');
findModal().vm.$emit('hidden');
await nextTick();
@ -432,7 +433,7 @@ describe('InviteMembersModal', () => {
expect(membersFormGroupInvalidFeedback()).toBe('Member already exists');
expect(findMembersSelect().props('validationState')).toBe(false);
expect(findInviteButton().props('loading')).toBe(false);
expect(findModal().props('actionPrimary').attributes.loading).toBe(false);
findMembersSelect().vm.$emit('clear');
@ -440,7 +441,7 @@ describe('InviteMembersModal', () => {
expect(membersFormGroupInvalidFeedback()).toBe('');
expect(findMembersSelect().props('validationState')).toBe(null);
expect(findInviteButton().props('loading')).toBe(false);
expect(findModal().props('actionPrimary').attributes.loading).toBe(false);
});
it('displays the generic error for http server error', async () => {
@ -542,7 +543,7 @@ describe('InviteMembersModal', () => {
expect(membersFormGroupInvalidFeedback()).toBe(expectedSyntaxError);
expect(findMembersSelect().props('validationState')).toBe(false);
expect(findInviteButton().props('loading')).toBe(false);
expect(findModal().props('actionPrimary').attributes.loading).toBe(false);
});
it('displays the restricted email error when restricted email is invited', async () => {
@ -554,7 +555,7 @@ describe('InviteMembersModal', () => {
expect(membersFormGroupInvalidFeedback()).toContain(expectedEmailRestrictedError);
expect(findMembersSelect().props('validationState')).toBe(false);
expect(findInviteButton().props('loading')).toBe(false);
expect(findModal().props('actionPrimary').attributes.loading).toBe(false);
});
it('displays the successful toast message when email has already been invited', async () => {

View File

@ -49,8 +49,6 @@ describe('InviteModalBase', () => {
const findDatepicker = () => wrapper.findComponent(GlDatepicker);
const findLink = () => wrapper.findComponent(GlLink);
const findIntroText = () => wrapper.findByTestId('modal-base-intro-text').text();
const findCancelButton = () => wrapper.findByTestId('cancel-button');
const findInviteButton = () => wrapper.findByTestId('invite-button');
const findMembersFormGroup = () => wrapper.findByTestId('members-form-group');
describe('rendering the modal', () => {
@ -67,15 +65,21 @@ describe('InviteModalBase', () => {
});
it('renders the Cancel button text correctly', () => {
expect(findCancelButton().text()).toBe(CANCEL_BUTTON_TEXT);
expect(wrapper.findComponent(GlModal).props('actionCancel')).toMatchObject({
text: CANCEL_BUTTON_TEXT,
});
});
it('renders the Invite button text correctly', () => {
expect(findInviteButton().text()).toBe(INVITE_BUTTON_TEXT);
});
it('renders the Invite button modal without isLoading', () => {
expect(findInviteButton().props('loading')).toBe(false);
it('renders the Invite button correctly', () => {
expect(wrapper.findComponent(GlModal).props('actionPrimary')).toMatchObject({
text: INVITE_BUTTON_TEXT,
attributes: {
variant: 'confirm',
disabled: false,
loading: false,
'data-qa-selector': 'invite_button',
},
});
});
describe('rendering the access levels dropdown', () => {
@ -114,7 +118,7 @@ describe('InviteModalBase', () => {
isLoading: true,
});
expect(findInviteButton().props('loading')).toBe(true);
expect(wrapper.findComponent(GlModal).props('actionPrimary').attributes.loading).toBe(true);
});
it('with invalidFeedbackMessage, set members form group validation state', () => {

View File

@ -149,7 +149,7 @@ RSpec.describe ApplicationSettingsHelper do
end
end
describe '.storage_weights' do
describe '#storage_weights' do
let(:application_setting) { build(:application_setting) }
before do
@ -158,12 +158,13 @@ RSpec.describe ApplicationSettingsHelper do
stub_application_setting(repository_storages_weighted: { 'default' => 100, 'storage_1' => 50, 'storage_2' => nil })
end
it 'returns storages correctly' do
expect(helper.storage_weights).to eq(OpenStruct.new(
default: 100,
storage_1: 50,
storage_2: 0
))
it 'returns storage objects with assigned weights' do
expect(helper.storage_weights)
.to have_attributes(
default: 100,
storage_1: 50,
storage_2: 0
)
end
end

View File

@ -0,0 +1,135 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::MergeTopicsWithSameName, schema: 20220223124428 do
def set_avatar(topic_id, avatar)
topic = ::Projects::Topic.find(topic_id)
topic.avatar = avatar
topic.save!
topic.avatar.absolute_path
end
it 'merges project topics with same case insensitive name' do
namespaces = table(:namespaces)
projects = table(:projects)
topics = table(:topics)
project_topics = table(:project_topics)
group = namespaces.create!(name: 'group', path: 'group')
project_1 = projects.create!(namespace_id: group.id, visibility_level: 20)
project_2 = projects.create!(namespace_id: group.id, visibility_level: 10)
project_3 = projects.create!(namespace_id: group.id, visibility_level: 0)
topic_1_keep = topics.create!(
name: 'topic1',
description: 'description 1 to keep',
total_projects_count: 2,
non_private_projects_count: 2
)
topic_1_remove = topics.create!(
name: 'TOPIC1',
description: 'description 1 to remove',
total_projects_count: 2,
non_private_projects_count: 1
)
topic_2_remove = topics.create!(
name: 'topic2',
total_projects_count: 0
)
topic_2_keep = topics.create!(
name: 'TOPIC2',
description: 'description 2 to keep',
total_projects_count: 1
)
topic_3_remove_1 = topics.create!(
name: 'topic3',
total_projects_count: 2,
non_private_projects_count: 1
)
topic_3_keep = topics.create!(
name: 'Topic3',
total_projects_count: 2,
non_private_projects_count: 2
)
topic_3_remove_2 = topics.create!(
name: 'TOPIC3',
description: 'description 3 to keep',
total_projects_count: 2,
non_private_projects_count: 1
)
topic_4_keep = topics.create!(
name: 'topic4'
)
project_topics_1 = []
project_topics_3 = []
project_topics_removed = []
project_topics_1 << project_topics.create!(topic_id: topic_1_keep.id, project_id: project_1.id)
project_topics_1 << project_topics.create!(topic_id: topic_1_keep.id, project_id: project_2.id)
project_topics_removed << project_topics.create!(topic_id: topic_1_remove.id, project_id: project_2.id)
project_topics_1 << project_topics.create!(topic_id: topic_1_remove.id, project_id: project_3.id)
project_topics_3 << project_topics.create!(topic_id: topic_3_keep.id, project_id: project_1.id)
project_topics_3 << project_topics.create!(topic_id: topic_3_keep.id, project_id: project_2.id)
project_topics_removed << project_topics.create!(topic_id: topic_3_remove_1.id, project_id: project_1.id)
project_topics_3 << project_topics.create!(topic_id: topic_3_remove_1.id, project_id: project_3.id)
project_topics_removed << project_topics.create!(topic_id: topic_3_remove_2.id, project_id: project_1.id)
project_topics_removed << project_topics.create!(topic_id: topic_3_remove_2.id, project_id: project_3.id)
avatar_paths = {
topic_1_keep: set_avatar(topic_1_keep.id, fixture_file_upload('spec/fixtures/avatars/avatar1.png')),
topic_1_remove: set_avatar(topic_1_remove.id, fixture_file_upload('spec/fixtures/avatars/avatar2.png')),
topic_2_remove: set_avatar(topic_2_remove.id, fixture_file_upload('spec/fixtures/avatars/avatar3.png')),
topic_3_remove_1: set_avatar(topic_3_remove_1.id, fixture_file_upload('spec/fixtures/avatars/avatar4.png')),
topic_3_remove_2: set_avatar(topic_3_remove_2.id, fixture_file_upload('spec/fixtures/avatars/avatar5.png'))
}
subject.perform(%w[topic1 topic2 topic3 topic4])
# Topics
[topic_1_keep, topic_2_keep, topic_3_keep, topic_4_keep].each(&:reload)
expect(topic_1_keep.name).to eq('topic1')
expect(topic_1_keep.description).to eq('description 1 to keep')
expect(topic_1_keep.total_projects_count).to eq(3)
expect(topic_1_keep.non_private_projects_count).to eq(2)
expect(topic_2_keep.name).to eq('TOPIC2')
expect(topic_2_keep.description).to eq('description 2 to keep')
expect(topic_2_keep.total_projects_count).to eq(0)
expect(topic_2_keep.non_private_projects_count).to eq(0)
expect(topic_3_keep.name).to eq('Topic3')
expect(topic_3_keep.description).to eq('description 3 to keep')
expect(topic_3_keep.total_projects_count).to eq(3)
expect(topic_3_keep.non_private_projects_count).to eq(2)
expect(topic_4_keep.reload.name).to eq('topic4')
[topic_1_remove, topic_2_remove, topic_3_remove_1, topic_3_remove_2].each do |topic|
expect { topic.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
# Topic avatars
expect(topic_1_keep.avatar).to eq('avatar1.png')
expect(File.exist?(::Projects::Topic.find(topic_1_keep.id).avatar.absolute_path)).to be_truthy
expect(topic_2_keep.avatar).to eq('avatar3.png')
expect(File.exist?(::Projects::Topic.find(topic_2_keep.id).avatar.absolute_path)).to be_truthy
expect(topic_3_keep.avatar).to eq('avatar4.png')
expect(File.exist?(::Projects::Topic.find(topic_3_keep.id).avatar.absolute_path)).to be_truthy
[:topic_1_remove, :topic_2_remove, :topic_3_remove_1, :topic_3_remove_2].each do |topic|
expect(File.exist?(avatar_paths[topic])).to be_falsey
end
# Project Topic assignments
project_topics_1.each do |project_topic|
expect(project_topic.reload.topic_id).to eq(topic_1_keep.id)
end
project_topics_3.each do |project_topic|
expect(project_topic.reload.topic_id).to eq(topic_3_keep.id)
end
project_topics_removed.each do |project_topic|
expect { project_topic.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
end
end

View File

@ -0,0 +1,189 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Database::ConsistencyChecker do
let(:batch_size) { 10 }
let(:max_batches) { 4 }
let(:max_runtime) { described_class::MAX_RUNTIME }
let(:metrics_counter) { Gitlab::Metrics.registry.get(:consistency_checks) }
subject(:consistency_checker) do
described_class.new(
source_model: Namespace,
target_model: Ci::NamespaceMirror,
source_columns: %w[id traversal_ids],
target_columns: %w[namespace_id traversal_ids]
)
end
before do
stub_const("#{described_class.name}::BATCH_SIZE", batch_size)
stub_const("#{described_class.name}::MAX_BATCHES", max_batches)
redis_shared_state_cleanup! # For Prometheus Counters
end
after do
Gitlab::Metrics.reset_registry!
end
describe '#over_time_limit?' do
before do
allow(consistency_checker).to receive(:start_time).and_return(0)
end
it 'returns true only if the running time has exceeded MAX_RUNTIME' do
allow(consistency_checker).to receive(:monotonic_time).and_return(0, max_runtime - 1, max_runtime + 1)
expect(consistency_checker.monotonic_time).to eq(0)
expect(consistency_checker.send(:over_time_limit?)).to eq(false)
expect(consistency_checker.send(:over_time_limit?)).to eq(true)
end
end
describe '#execute' do
context 'when empty tables' do
it 'returns an empty response' do
expected_result = { matches: 0, mismatches: 0, batches: 0, mismatches_details: [], next_start_id: nil }
expect(consistency_checker.execute(start_id: 1)).to eq(expected_result)
end
end
context 'when the tables contain matching items' do
before do
create_list(:namespace, 50) # This will also create Ci::NameSpaceMirror objects
end
it 'does not process more than MAX_BATCHES' do
max_batches = 3
stub_const("#{described_class.name}::MAX_BATCHES", max_batches)
result = consistency_checker.execute(start_id: Namespace.minimum(:id))
expect(result[:batches]).to eq(max_batches)
expect(result[:matches]).to eq(max_batches * batch_size)
end
it 'doesn not exceed the MAX_RUNTIME' do
allow(consistency_checker).to receive(:monotonic_time).and_return(0, max_runtime - 1, max_runtime + 1)
result = consistency_checker.execute(start_id: Namespace.minimum(:id))
expect(result[:batches]).to eq(1)
expect(result[:matches]).to eq(1 * batch_size)
end
it 'returns the correct number of matches and batches checked' do
expected_result = {
next_start_id: Namespace.minimum(:id) + described_class::MAX_BATCHES * described_class::BATCH_SIZE,
batches: max_batches,
matches: max_batches * batch_size,
mismatches: 0,
mismatches_details: []
}
expect(consistency_checker.execute(start_id: Namespace.minimum(:id))).to eq(expected_result)
end
it 'returns the min_id as the next_start_id if the check reaches the last element' do
expect(Gitlab::Metrics).to receive(:counter).at_most(:once)
.with(:consistency_checks, "Consistency Check Results")
.and_call_original
# Starting from the 5th last element
start_id = Namespace.all.order(id: :desc).limit(5).pluck(:id).last
expected_result = {
next_start_id: Namespace.first.id,
batches: 1,
matches: 5,
mismatches: 0,
mismatches_details: []
}
expect(consistency_checker.execute(start_id: start_id)).to eq(expected_result)
expect(metrics_counter.get(source_table: "namespaces", result: "mismatch")).to eq(0)
expect(metrics_counter.get(source_table: "namespaces", result: "match")).to eq(5)
end
end
context 'when some items are missing from the first table' do
let(:missing_namespace) { Namespace.all.order(:id).limit(2).last }
before do
create_list(:namespace, 50) # This will also create Ci::NameSpaceMirror objects
missing_namespace.delete
end
it 'reports the missing elements' do
expected_result = {
next_start_id: Namespace.first.id + described_class::MAX_BATCHES * described_class::BATCH_SIZE,
batches: max_batches,
matches: 39,
mismatches: 1,
mismatches_details: [{
id: missing_namespace.id,
source_table: nil,
target_table: [missing_namespace.traversal_ids]
}]
}
expect(consistency_checker.execute(start_id: Namespace.first.id)).to eq(expected_result)
expect(metrics_counter.get(source_table: "namespaces", result: "mismatch")).to eq(1)
expect(metrics_counter.get(source_table: "namespaces", result: "match")).to eq(39)
end
end
context 'when some items are missing from the second table' do
let(:missing_ci_namespace_mirror) { Ci::NamespaceMirror.all.order(:id).limit(2).last }
before do
create_list(:namespace, 50) # This will also create Ci::NameSpaceMirror objects
missing_ci_namespace_mirror.delete
end
it 'reports the missing elements' do
expected_result = {
next_start_id: Namespace.first.id + described_class::MAX_BATCHES * described_class::BATCH_SIZE,
batches: 4,
matches: 39,
mismatches: 1,
mismatches_details: [{
id: missing_ci_namespace_mirror.namespace_id,
source_table: [missing_ci_namespace_mirror.traversal_ids],
target_table: nil
}]
}
expect(consistency_checker.execute(start_id: Namespace.first.id)).to eq(expected_result)
expect(metrics_counter.get(source_table: "namespaces", result: "mismatch")).to eq(1)
expect(metrics_counter.get(source_table: "namespaces", result: "match")).to eq(39)
end
end
context 'when elements are different between the two tables' do
let(:different_namespaces) { Namespace.order(:id).limit(max_batches * batch_size).sample(3).sort_by(&:id) }
before do
create_list(:namespace, 50) # This will also create Ci::NameSpaceMirror objects
different_namespaces.each do |namespace|
namespace.update_attribute(:traversal_ids, [])
end
end
it 'reports the difference between the two tables' do
expected_result = {
next_start_id: Namespace.first.id + described_class::MAX_BATCHES * described_class::BATCH_SIZE,
batches: 4,
matches: 37,
mismatches: 3,
mismatches_details: different_namespaces.map do |namespace|
{
id: namespace.id,
source_table: [[]],
target_table: [[namespace.id]] # old traversal_ids of the namespace
}
end
}
expect(consistency_checker.execute(start_id: Namespace.first.id)).to eq(expected_result)
expect(metrics_counter.get(source_table: "namespaces", result: "mismatch")).to eq(3)
expect(metrics_counter.get(source_table: "namespaces", result: "match")).to eq(37)
end
end
end
end

View File

@ -0,0 +1,36 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe ScheduleMergeTopicsWithSameName do
let(:topics) { table(:topics) }
describe '#up' do
before do
stub_const("#{described_class}::BATCH_SIZE", 2)
topics.create!(name: 'topic1')
topics.create!(name: 'Topic2')
topics.create!(name: 'Topic3')
topics.create!(name: 'Topic4')
topics.create!(name: 'topic2')
topics.create!(name: 'topic3')
topics.create!(name: 'topic4')
topics.create!(name: 'TOPIC2')
topics.create!(name: 'topic5')
end
it 'schedules MergeTopicsWithSameName background jobs', :aggregate_failures do
Sidekiq::Testing.fake! do
freeze_time do
migrate!
expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, %w[topic2 topic3])
expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, %w[topic4])
expect(BackgroundMigrationWorker.jobs.size).to eq(2)
end
end
end
end
end

View File

@ -6288,6 +6288,10 @@ RSpec.describe Project, factory_default: :keep do
expect(subject.find_or_initialize_integration('prometheus')).to be_nil
end
it 'returns nil if integration does not exist' do
expect(subject.find_or_initialize_integration('non-existing')).to be_nil
end
context 'with an existing integration' do
subject { create(:project) }

View File

@ -0,0 +1,154 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Database::ConsistencyCheckService do
let(:batch_size) { 5 }
let(:max_batches) { 2 }
before do
stub_const("Gitlab::Database::ConsistencyChecker::BATCH_SIZE", batch_size)
stub_const("Gitlab::Database::ConsistencyChecker::MAX_BATCHES", max_batches)
end
after do
redis_shared_state_cleanup!
end
subject(:consistency_check_service) do
described_class.new(
source_model: Namespace,
target_model: Ci::NamespaceMirror,
source_columns: %w[id traversal_ids],
target_columns: %w[namespace_id traversal_ids]
)
end
describe '#random_start_id' do
let(:batch_size) { 5 }
before do
create_list(:namespace, 50) # This will also create Ci::NameSpaceMirror objects
end
it 'generates a random start_id within the records ids' do
10.times do
start_id = subject.send(:random_start_id)
expect(start_id).to be_between(Namespace.first.id, Namespace.last.id).inclusive
end
end
end
describe '#execute' do
let(:empty_results) do
{ batches: 0, matches: 0, mismatches: 0, mismatches_details: [] }
end
context 'when empty tables' do
it 'returns results with zero counters' do
result = consistency_check_service.execute
expect(result).to eq(empty_results)
end
it 'does not call the ConsistencyCheckService' do
expect(Gitlab::Database::ConsistencyChecker).not_to receive(:new)
consistency_check_service.execute
end
end
context 'no cursor has been saved before' do
let(:selected_start_id) { Namespace.order(:id).limit(5).pluck(:id).last }
let(:expected_next_start_id) { selected_start_id + batch_size * max_batches }
before do
create_list(:namespace, 50) # This will also create Ci::NameSpaceMirror objects
expect(consistency_check_service).to receive(:random_start_id).and_return(selected_start_id)
end
it 'picks a random start_id' do
expected_result = {
batches: 2,
matches: 10,
mismatches: 0,
mismatches_details: [],
start_id: selected_start_id,
next_start_id: expected_next_start_id
}
expect(consistency_check_service.execute).to eq(expected_result)
end
it 'calls the ConsistencyCheckService with the expected parameters' do
allow_next_instance_of(Gitlab::Database::ConsistencyChecker) do |instance|
expect(instance).to receive(:execute).with(start_id: selected_start_id).and_return({
batches: 2,
next_start_id: expected_next_start_id,
matches: 10,
mismatches: 0,
mismatches_details: []
})
end
expect(Gitlab::Database::ConsistencyChecker).to receive(:new).with(
source_model: Namespace,
target_model: Ci::NamespaceMirror,
source_columns: %w[id traversal_ids],
target_columns: %w[namespace_id traversal_ids]
).and_call_original
expected_result = {
batches: 2,
start_id: selected_start_id,
next_start_id: expected_next_start_id,
matches: 10,
mismatches: 0,
mismatches_details: []
}
expect(consistency_check_service.execute).to eq(expected_result)
end
it 'saves the next_start_id in Redis for he next iteration' do
expect(consistency_check_service).to receive(:save_next_start_id).with(expected_next_start_id).and_call_original
consistency_check_service.execute
end
end
context 'cursor saved in Redis and moving' do
let(:first_namespace_id) { Namespace.order(:id).first.id }
let(:second_namespace_id) { Namespace.order(:id).second.id }
before do
create_list(:namespace, 30) # This will also create Ci::NameSpaceMirror objects
end
it "keeps moving the cursor with each call to the service" do
expect(consistency_check_service).to receive(:random_start_id).at_most(:once).and_return(first_namespace_id)
allow_next_instance_of(Gitlab::Database::ConsistencyChecker) do |instance|
expect(instance).to receive(:execute).ordered.with(start_id: first_namespace_id).and_call_original
expect(instance).to receive(:execute).ordered.with(start_id: first_namespace_id + 10).and_call_original
expect(instance).to receive(:execute).ordered.with(start_id: first_namespace_id + 20).and_call_original
# Gets back to the start of the table
expect(instance).to receive(:execute).ordered.with(start_id: first_namespace_id).and_call_original
end
4.times do
consistency_check_service.execute
end
end
it "keeps moving the cursor from any start point" do
expect(consistency_check_service).to receive(:random_start_id).at_most(:once).and_return(second_namespace_id)
allow_next_instance_of(Gitlab::Database::ConsistencyChecker) do |instance|
expect(instance).to receive(:execute).ordered.with(start_id: second_namespace_id).and_call_original
expect(instance).to receive(:execute).ordered.with(start_id: second_namespace_id + 10).and_call_original
end
2.times do
consistency_check_service.execute
end
end
end
end
end

View File

@ -10,26 +10,7 @@ RSpec.describe 'admin/application_settings/_repository_storage.html.haml' do
assign(:application_setting, app_settings)
end
context 'additional storage config' do
let(:repository_storages_weighted) do
{
'default' => 100,
'mepmep' => 50
}
end
it 'lists them all' do
render
Gitlab.config.repositories.storages.keys.each do |storage_name|
expect(rendered).to have_content(storage_name)
end
expect(rendered).to have_content('foobar')
end
end
context 'fewer storage configs' do
context 'with storage weights configured' do
let(:repository_storages_weighted) do
{
'default' => 100,
@ -38,14 +19,23 @@ RSpec.describe 'admin/application_settings/_repository_storage.html.haml' do
}
end
it 'lists storages with weight', :aggregate_failures do
render
expect(rendered).to have_field('default', with: 100)
expect(rendered).to have_field('mepmep', with: 50)
end
it 'lists storages without weight' do
render
expect(rendered).to have_field('foobar', with: 0)
end
it 'lists only configured storages' do
render
Gitlab.config.repositories.storages.keys.each do |storage_name|
expect(rendered).to have_content(storage_name)
end
expect(rendered).not_to have_content('something_old')
expect(rendered).not_to have_field('something_old')
end
end
end

View File

@ -0,0 +1,67 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Database::CiNamespaceMirrorsConsistencyCheckWorker do
let(:worker) { described_class.new }
describe '#perform' do
context 'feature flag is disabled' do
before do
stub_feature_flags(ci_namespace_mirrors_consistency_check: false)
end
it 'does not perform the consistency check on namespaces' do
expect(Database::ConsistencyCheckService).not_to receive(:new)
expect(worker).not_to receive(:log_extra_metadata_on_done)
worker.perform
end
end
context 'feature flag is enabled' do
before do
stub_feature_flags(ci_namespace_mirrors_consistency_check: true)
end
it 'executes the consistency check on namespaces' do
expect(Database::ConsistencyCheckService).to receive(:new).and_call_original
expected_result = { batches: 0, matches: 0, mismatches: 0, mismatches_details: [] }
expect(worker).to receive(:log_extra_metadata_on_done).with(:results, expected_result)
worker.perform
end
end
context 'logs should contain the detailed mismatches' do
let(:first_namespace) { Namespace.all.order(:id).limit(1).first }
let(:missing_namespace) { Namespace.all.order(:id).limit(2).last }
before do
redis_shared_state_cleanup!
stub_feature_flags(ci_namespace_mirrors_consistency_check: true)
create_list(:namespace, 10) # This will also create Ci::NameSpaceMirror objects
missing_namespace.delete
allow_next_instance_of(Database::ConsistencyCheckService) do |instance|
allow(instance).to receive(:random_start_id).and_return(Namespace.first.id)
end
end
it 'reports the differences to the logs' do
expected_result = {
batches: 1,
matches: 9,
mismatches: 1,
mismatches_details: [{
id: missing_namespace.id,
source_table: nil,
target_table: [missing_namespace.traversal_ids]
}],
start_id: first_namespace.id,
next_start_id: first_namespace.id # The batch size > number of namespaces
}
expect(worker).to receive(:log_extra_metadata_on_done).with(:results, expected_result)
worker.perform
end
end
end
end

View File

@ -0,0 +1,67 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Database::CiProjectMirrorsConsistencyCheckWorker do
let(:worker) { described_class.new }
describe '#perform' do
context 'feature flag is disabled' do
before do
stub_feature_flags(ci_project_mirrors_consistency_check: false)
end
it 'does not perform the consistency check on projects' do
expect(Database::ConsistencyCheckService).not_to receive(:new)
expect(worker).not_to receive(:log_extra_metadata_on_done)
worker.perform
end
end
context 'feature flag is enabled' do
before do
stub_feature_flags(ci_project_mirrors_consistency_check: true)
end
it 'executes the consistency check on projects' do
expect(Database::ConsistencyCheckService).to receive(:new).and_call_original
expected_result = { batches: 0, matches: 0, mismatches: 0, mismatches_details: [] }
expect(worker).to receive(:log_extra_metadata_on_done).with(:results, expected_result)
worker.perform
end
end
context 'logs should contain the detailed mismatches' do
let(:first_project) { Project.all.order(:id).limit(1).first }
let(:missing_project) { Project.all.order(:id).limit(2).last }
before do
redis_shared_state_cleanup!
stub_feature_flags(ci_project_mirrors_consistency_check: true)
create_list(:project, 10) # This will also create Ci::NameSpaceMirror objects
missing_project.delete
allow_next_instance_of(Database::ConsistencyCheckService) do |instance|
allow(instance).to receive(:random_start_id).and_return(Project.first.id)
end
end
it 'reports the differences to the logs' do
expected_result = {
batches: 1,
matches: 9,
mismatches: 1,
mismatches_details: [{
id: missing_project.id,
source_table: nil,
target_table: [missing_project.namespace_id]
}],
start_id: first_project.id,
next_start_id: first_project.id # The batch size > number of projects
}
expect(worker).to receive(:log_extra_metadata_on_done).with(:results, expected_result)
worker.perform
end
end
end
end

View File

@ -8,9 +8,9 @@ module Tooling
REVIEW_LABEL = 'product intelligence::review pending'
CHANGED_FILES_MESSAGE = <<~MSG
For the following files, a review from the [Data team and Product Intelligence team](https://gitlab.com/groups/gitlab-org/growth/product-intelligence/engineers/-/group_members?with_inherited_permissions=exclude) is recommended
Please check the ~"product intelligence" [guide](https://docs.gitlab.com/ee/development/usage_ping.html).
Please check the ~"product intelligence" [Service Ping guide](https://docs.gitlab.com/ee/development/service_ping/) or the [Snowplow guide](https://docs.gitlab.com/ee/development/snowplow/).
For MR review guidelines, see the [Service Ping review guidelines](https://docs.gitlab.com/ee/development/usage_ping/review_guidelines.html) or the [Snowplow review guidelines](https://docs.gitlab.com/ee/development/snowplow/review_guidelines.html).
For MR review guidelines, see the [Service Ping review guidelines](https://docs.gitlab.com/ee/development/service_ping/review_guidelines.html) or the [Snowplow review guidelines](https://docs.gitlab.com/ee/development/snowplow/review_guidelines.html).
%<changed_files>s