Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-02-10 06:17:41 +00:00
parent 8e6b1dc7f7
commit b4ea95860f
28 changed files with 290 additions and 176 deletions

View File

@ -54,7 +54,6 @@
- knapsack/
- rspec/
- tmp/capybara/
- tmp/memory_test/
- log/*.log
reports:
junit: ${JUNIT_RESULT_FILE}
@ -312,6 +311,8 @@ rspec system pg12:
- .rspec-base-pg12
- .rails:rules:ee-and-foss-system
- .rspec-system-parallel
variables:
DEBUG_GITLAB_TRANSACTION_STACK: "true"
rspec system pg12 minimal:
extends:
@ -571,7 +572,6 @@ rspec:coverage:
- rspec system pg12-as-if-foss decomposed
script:
- run_timed_command "bundle exec scripts/merge-simplecov"
- run_timed_command "bundle exec scripts/gather-test-memory-data"
coverage: '/LOC \((\d+\.\d+%)\) covered.$/'
artifacts:
name: coverage
@ -580,7 +580,6 @@ rspec:coverage:
- coverage/index.html
- coverage/assets/
- coverage/lcov/
- tmp/memory_test/
reports:
cobertura: coverage/coverage.xml

View File

@ -41,7 +41,6 @@ import {
i18n,
MAX_LIST_SIZE,
PAGE_SIZE,
PARAM_DUE_DATE,
PARAM_STATE,
RELATIVE_POSITION_ASC,
TOKEN_TYPE_ASSIGNEE,
@ -65,7 +64,6 @@ import {
convertToApiParams,
convertToSearchQuery,
convertToUrlParams,
getDueDateValue,
getFilterTokens,
getInitialPageParams,
getSortKey,
@ -161,7 +159,6 @@ export default {
}
return {
dueDateFilter: getDueDateValue(getParameterByName(PARAM_DUE_DATE)),
exportCsvPathWithQuery: this.getExportCsvPathWithQuery(),
filterTokens: isSearchDisabled ? [] : getFilterTokens(window.location.search),
issues: [],
@ -391,7 +388,6 @@ export default {
},
urlParams() {
return {
due_date: this.dueDateFilter,
search: this.searchQuery,
sort: urlSortParams[this.sortKey],
state: this.state,

View File

@ -55,8 +55,6 @@ export const i18n = {
export const MAX_LIST_SIZE = 10;
export const PAGE_SIZE = 20;
export const PAGE_SIZE_MANUAL = 100;
export const PARAM_DUE_DATE = 'due_date';
export const PARAM_SORT = 'sort';
export const PARAM_STATE = 'state';
export const RELATIVE_POSITION = 'relative_position';
@ -68,21 +66,6 @@ export const largePageSizeParams = {
firstPageSize: PAGE_SIZE_MANUAL,
};
export const DUE_DATE_NONE = '0';
export const DUE_DATE_ANY = '';
export const DUE_DATE_OVERDUE = 'overdue';
export const DUE_DATE_WEEK = 'week';
export const DUE_DATE_MONTH = 'month';
export const DUE_DATE_NEXT_MONTH_AND_PREVIOUS_TWO_WEEKS = 'next_month_and_previous_two_weeks';
export const DUE_DATE_VALUES = [
DUE_DATE_NONE,
DUE_DATE_ANY,
DUE_DATE_OVERDUE,
DUE_DATE_WEEK,
DUE_DATE_MONTH,
DUE_DATE_NEXT_MONTH_AND_PREVIOUS_TWO_WEEKS,
];
export const BLOCKING_ISSUES_ASC = 'BLOCKING_ISSUES_ASC';
export const BLOCKING_ISSUES_DESC = 'BLOCKING_ISSUES_DESC';
export const CREATED_ASC = 'CREATED_ASC';

View File

@ -13,7 +13,6 @@ import {
defaultPageSizeParams,
DUE_DATE_ASC,
DUE_DATE_DESC,
DUE_DATE_VALUES,
filters,
LABEL_PRIORITY_ASC,
LABEL_PRIORITY_DESC,
@ -52,8 +51,6 @@ export const getSortKey = (sort) =>
export const isSortKey = (sort) => Object.keys(urlSortParams).includes(sort);
export const getDueDateValue = (value) => (DUE_DATE_VALUES.includes(value) ? value : undefined);
export const getSortOptions = (hasIssueWeightsFeature, hasBlockedIssuesFeature) => {
const sortOptions = [
{

View File

@ -38,19 +38,17 @@ class ReleasesFinder
if parent.is_a?(Project)
Ability.allowed?(current_user, :read_release, parent) ? [parent] : []
elsif parent.is_a?(Group)
accessible_projects
Ability.allowed?(current_user, :read_release, parent) ? accessible_projects : []
end
end
end
def accessible_projects
projects = if include_subgroups?
Project.for_group_and_its_subgroups(parent)
else
parent.projects
end
projects.select { |project| Ability.allowed?(current_user, :read_release, project) }
if include_subgroups?
Project.for_group_and_its_subgroups(parent)
else
parent.projects
end
end
# rubocop: disable CodeReuse/ActiveRecord

View File

@ -4,15 +4,45 @@ module CrossDatabaseModification
extend ActiveSupport::Concern
class TransactionStackTrackRecord
DEBUG_STACK = Rails.env.test? && ENV['DEBUG_GITLAB_TRANSACTION_STACK']
LOG_FILENAME = Rails.root.join("log", "gitlab_transaction_stack.log")
def self.logger
@logger ||= Logger.new(LOG_FILENAME, formatter: ->(_, _, _, msg) { Gitlab::Json.dump(msg) + "\n" })
end
def self.log_gitlab_transactions_stack(action: nil, example: nil)
return unless DEBUG_STACK
message = "gitlab_transactions_stack performing #{action}"
message += " in example #{example}" if example
cleaned_backtrace = Gitlab::BacktraceCleaner.clean_backtrace(caller)
.reject { |line| line.include?('lib/gitlab/database/query_analyzer') }
.first(5)
logger.warn({
message: message,
action: action,
gitlab_transactions_stack: ::ApplicationRecord.gitlab_transactions_stack,
caller: cleaned_backtrace,
thread: Thread.current.object_id
})
end
def initialize(subject, gitlab_schema)
@subject = subject
@gitlab_schema = gitlab_schema
@subject.gitlab_transactions_stack.push(gitlab_schema)
self.class.log_gitlab_transactions_stack(action: :after_push)
end
def done!
unless @done
@done = true
self.class.log_gitlab_transactions_stack(action: :before_pop)
@subject.gitlab_transactions_stack.pop
end

View File

@ -29,6 +29,10 @@ class NamespaceStatistics < ApplicationRecord # rubocop:disable Gitlab/Namespace
end
def update_storage_size
# This prevents failures with older database schemas, such as those
# in migration specs.
return unless self.class.database.cached_column_exists?(:dependency_proxy_size)
self.storage_size = dependency_proxy_size
end

View File

@ -100,6 +100,7 @@ class GroupPolicy < Namespaces::GroupProjectNamespaceSharedPolicy
enable :read_group
enable :upload_file
enable :guest_access
enable :read_release
end
rule { admin }.policy do

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class AddTmpIndexRoutesIdForNamespaces < Gitlab::Database::Migration[1.0]
INDEX_NAME = 'tmp_index_for_namespace_id_migration_on_routes'
disable_ddl_transaction!
def up
# Temporary index to be removed in 14.9
# https://gitlab.com/gitlab-org/gitlab/-/issues/352353
add_concurrent_index :routes, :id, where: "routes.namespace_id is null and routes.source_type = 'Namespace'", name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :routes, INDEX_NAME
end
end

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
class BackfillNamespaceIdForNamespaceRoutes < Gitlab::Database::Migration[1.0]
MIGRATION = 'BackfillNamespaceIdForNamespaceRoute'
INTERVAL = 2.minutes
BATCH_SIZE = 1_000
MAX_BATCH_SIZE = 10_000
SUB_BATCH_SIZE = 200
def up
queue_batched_background_migration(
MIGRATION,
:routes,
:id,
job_interval: INTERVAL,
batch_size: BATCH_SIZE,
max_batch_size: MAX_BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
Gitlab::Database::BackgroundMigration::BatchedMigration
.for_configuration(MIGRATION, :routes, :id, [])
.delete_all
end
end

View File

@ -0,0 +1 @@
9e274eae18520821dd890a11c8c6192da82a6051dce9ec2934b1365e57a10fdb

View File

@ -0,0 +1 @@
9d2f0b0d2cb1a5844bdca5bcb4b82fa5cc6b465fa19177f9f6ca16574128fdc8

View File

@ -28311,6 +28311,8 @@ CREATE INDEX tmp_idx_deduplicate_vulnerability_occurrences ON vulnerability_occu
CREATE INDEX tmp_idx_vulnerability_occurrences_on_id_where_report_type_7_99 ON vulnerability_occurrences USING btree (id) WHERE (report_type = ANY (ARRAY[7, 99]));
CREATE INDEX tmp_index_for_namespace_id_migration_on_routes ON routes USING btree (id) WHERE ((namespace_id IS NULL) AND ((source_type)::text = 'Namespace'::text));
CREATE INDEX tmp_index_members_on_state ON members USING btree (state) WHERE (state = 2);
CREATE INDEX tmp_index_namespaces_empty_traversal_ids_with_child_namespaces ON namespaces USING btree (id) WHERE ((parent_id IS NOT NULL) AND (traversal_ids = '{}'::integer[]));

View File

@ -974,7 +974,7 @@ failure.
- `on_success` (default): Upload artifacts only when the job succeeds.
- `on_failure`: Upload artifacts only when the job fails.
- `always`: Always upload artifacts. For example, when
- `always`: Always upload artifacts (except when jobs time out). For example, when
[uploading artifacts](../unit_test_reports.md#viewing-junit-screenshots-on-gitlab)
required to troubleshoot failing tests.

View File

@ -570,6 +570,32 @@ The following variables are used for configuring specific analyzers (used for a
| `RETIREJS_NODE_ADVISORY_DB` | `retire.js` | `https://raw.githubusercontent.com/RetireJS/retire.js/master/repository/npmrepository.json` | Path or URL to `retire.js` node vulnerability data file. Note that if the URL hosting the data file uses a custom SSL certificate, for example in an offline installation, you can pass the certificate in the `ADDITIONAL_CA_CERT_BUNDLE` variable. |
| `RETIREJS_ADVISORY_DB_INSECURE` | `retire.js` | `false` | Enable fetching remote JS and Node vulnerability data files (defined by the `RETIREJS_JS_ADVISORY_DB` and `RETIREJS_NODE_ADVISORY_DB` variables) from hosts using an insecure or self-signed SSL (TLS) certificate. |
#### Other variables
The previous tables are not an exhaustive list of all variables that can be used. They contain all specific GitLab and analyzer variables we support and test. There are many variables, such as environment variables, that you can pass in and they will work. This is a large list, many of which we may be unaware of, and as such is not documented.
For example, to pass the non-GitLab environment variable `HTTPS_PROXY` to all Dependency Scanning jobs,
set it as a [custom CI/CD variable in your `.gitlab-ci.yml`](../../../ci/variables/#create-a-custom-cicd-variable-in-the-gitlab-ciyml-file)
file like this:
```yaml
variables:
HTTPS_PROXY: "https://squid-proxy:3128"
```
Alternatively we may use it in specific jobs, like Dependency Scanning:
```yaml
dependency_scanning:
variables:
HTTPS_PROXY: $HTTPS_PROXY
```
As we have not tested all variables you may find some will work and others will not.
If one does not work and you need it we suggest
[submitting a feature request](https://gitlab.com/gitlab-org/gitlab/-/issues/new?issuable_template=Feature%20proposal%20-%20detailed&issue[title]=Docs%20feedback%20-%20feature%20proposal:%20Write%20your%20title)
or [contributing to the code](../../../development/index.md) to enable it to be used.
### Using a custom SSL CA certificate authority
You can use the `ADDITIONAL_CA_CERT_BUNDLE` CI/CD variable to configure a custom SSL CA certificate authority. The `ADDITIONAL_CA_CERT_BUNDLE` value should contain the [text representation of the X.509 PEM public-key certificate](https://tools.ietf.org/html/rfc7468#section-5.1). For example, to configure this value in the `.gitlab-ci.yml` file, use the following:

View File

@ -0,0 +1,38 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Backfills the `routes.namespace_id` column, by copying source_id value
# (for groups and user namespaces source_id == namespace_id)
class BackfillNamespaceIdForNamespaceRoute
include Gitlab::Database::DynamicModelHelpers
def perform(start_id, end_id, batch_table, batch_column, sub_batch_size, pause_ms)
parent_batch_relation = relation_scoped_to_range(batch_table, batch_column, start_id, end_id)
parent_batch_relation.each_batch(column: batch_column, of: sub_batch_size) do |sub_batch|
batch_metrics.time_operation(:update_all) do
sub_batch.update_all('namespace_id=source_id')
end
pause_ms = [0, pause_ms].max
sleep(pause_ms * 0.001)
end
end
def batch_metrics
@batch_metrics ||= Gitlab::Database::BackgroundMigration::BatchMetrics.new
end
private
def relation_scoped_to_range(source_table, source_key_column, start_id, stop_id)
define_batchable_model(source_table, connection: ActiveRecord::Base.connection)
.joins('inner join namespaces on routes.source_id = namespaces.id')
.where(source_key_column => start_id..stop_id)
.where(namespace_id: nil)
.where(source_type: 'Namespace')
end
end
end
end

View File

@ -56,6 +56,9 @@ module Gitlab
context[:transaction_depth_by_db][database] -= 1
if context[:transaction_depth_by_db][database] == 0
context[:modified_tables_by_db][database].clear
# Attempt to troubleshoot https://gitlab.com/gitlab-org/gitlab/-/issues/351531
::CrossDatabaseModification::TransactionStackTrackRecord.log_gitlab_transactions_stack(action: :end_of_transaction)
elsif context[:transaction_depth_by_db][database] < 0
context[:transaction_depth_by_db][database] = 0
raise CrossDatabaseModificationAcrossUnsupportedTablesError, "Misaligned cross-DB transactions discovered at query #{sql}. This could be a bug in #{self.class} or a valid issue to investigate. Read more at https://docs.gitlab.com/ee/development/database/multiple_databases.html#removing-cross-database-transactions ."

View File

@ -1,22 +0,0 @@
#!/usr/bin/env ruby
# frozen_string_literal: true
require 'csv'
def join_csv_files(output_path, input_paths)
return if input_paths.empty?
input_csvs = input_paths.map do |input_path|
CSV.read(input_path, headers: true)
end
CSV.open(output_path, "w", headers: input_csvs.first.headers, write_headers: true) do |output_csv|
input_csvs.each do |input_csv|
input_csv.each do |line|
output_csv << line
end
end
end
end
join_csv_files('tmp/memory_test/report.csv', Dir['tmp/memory_test/*.csv'].sort)

View File

@ -268,10 +268,6 @@ function rspec_paralellized_job() {
debug_rspec_variables
mkdir -p tmp/memory_test
export MEMORY_TEST_PATH="tmp/memory_test/${report_name}_memory.csv"
if [[ -n $RSPEC_TESTS_MAPPING_ENABLED ]]; then
tooling/bin/parallel_rspec --rspec_args "$(rspec_args "${rspec_opts}")" --filter "tmp/matching_tests.txt" || rspec_run_status=$?
else

View File

@ -6,14 +6,14 @@ RSpec.describe Groups::ReleasesController do
let(:group) { create(:group) }
let!(:project) { create(:project, :repository, :public, namespace: group) }
let!(:private_project) { create(:project, :repository, :private, namespace: group) }
let(:developer) { create(:user) }
let(:guest) { create(:user) }
let!(:release_1) { create(:release, project: project, tag: 'v1', released_at: Time.zone.parse('2020-02-15')) }
let!(:release_2) { create(:release, project: project, tag: 'v2', released_at: Time.zone.parse('2020-02-20')) }
let!(:private_release_1) { create(:release, project: private_project, tag: 'p1', released_at: Time.zone.parse('2020-03-01')) }
let!(:private_release_2) { create(:release, project: private_project, tag: 'p2', released_at: Time.zone.parse('2020-03-05')) }
before do
private_project.add_developer(developer)
group.add_guest(guest)
end
describe 'GET #index' do
@ -42,7 +42,7 @@ RSpec.describe Groups::ReleasesController do
end
it 'does not return any releases' do
expect(json_response.map {|r| r['tag'] } ).to match_array(%w(v2 v1))
expect(json_response.map {|r| r['tag'] } ).to be_empty
end
it 'returns OK' do
@ -52,7 +52,7 @@ RSpec.describe Groups::ReleasesController do
context 'the user is authorized' do
it "returns all group's public and private project's releases as JSON, ordered by released_at" do
sign_in(developer)
sign_in(guest)
subject

View File

@ -23,6 +23,16 @@ RSpec.describe ReleasesFinder do
end
end
shared_examples_for 'when the user is not part of the group' do
before do
allow(Ability).to receive(:allowed?).with(user, :read_release, group).and_return(false)
end
it 'returns no releases' do
is_expected.to be_empty
end
end
# See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/27716
shared_examples_for 'when tag is nil' do
before do
@ -66,9 +76,9 @@ RSpec.describe ReleasesFinder do
it_behaves_like 'when the user is not part of the project'
context 'when the user is a project developer' do
context 'when the user is a project guest' do
before do
project.add_developer(user)
project.add_guest(user)
end
it 'sorts by release date' do
@ -118,25 +128,24 @@ RSpec.describe ReleasesFinder do
subject { described_class.new(group, user, params).execute(**args) }
it_behaves_like 'when the user is not part of the project'
it_behaves_like 'when the user is not part of the group'
context 'when the user is a project developer on one sibling project' do
context 'when the user is a project guest on one sibling project' do
before do
project.add_developer(user)
project.add_guest(user)
v1_0_0.update_attribute(:released_at, 3.days.ago)
v1_1_0.update_attribute(:released_at, 1.day.ago)
end
it 'sorts by release date' do
expect(subject.size).to eq(2)
expect(subject).to eq([v1_1_0, v1_0_0])
it 'does not return any releases' do
expect(subject.size).to eq(0)
expect(subject).to eq([])
end
end
context 'when the user is a project developer on all projects' do
context 'when the user is a guest on the group' do
before do
project.add_developer(user)
project2.add_developer(user)
group.add_guest(user)
v1_0_0.update_attribute(:released_at, 3.days.ago)
v6.update_attribute(:released_at, 2.days.ago)
v1_1_0.update_attribute(:released_at, 1.day.ago)
@ -161,22 +170,21 @@ RSpec.describe ReleasesFinder do
let(:project2) { create(:project, :repository, namespace: subgroup) }
let!(:v6) { create(:release, project: project2, tag: 'v6') }
it_behaves_like 'when the user is not part of the project'
it_behaves_like 'when the user is not part of the group'
context 'when the user a project developer in the subgroup project' do
context 'when the user a project guest in the subgroup project' do
before do
project2.add_developer(user)
project2.add_guest(user)
end
it 'returns only the subgroup releases' do
expect(subject).to match_array([v6])
it 'does not return any releases' do
expect(subject).to match_array([])
end
end
context 'when the user a project developer in both projects' do
context 'when the user is a guest on the group' do
before do
project.add_developer(user)
project2.add_developer(user)
group.add_guest(user)
v6.update_attribute(:released_at, 2.days.ago)
end
@ -201,34 +209,32 @@ RSpec.describe ReleasesFinder do
p3.update_attribute(:released_at, 3.days.ago)
end
it_behaves_like 'when the user is not part of the project'
it_behaves_like 'when the user is not part of the group'
context 'when the user a project developer in the subgroup and subsubgroup project' do
context 'when the user a project guest in the subgroup and subsubgroup project' do
before do
project2.add_developer(user)
project3.add_developer(user)
project2.add_guest(user)
project3.add_guest(user)
end
it 'returns only the subgroup and subsubgroup releases' do
expect(subject).to match_array([v6, p3])
it 'does not return any releases' do
expect(subject).to match_array([])
end
end
context 'when the user a project developer in the subsubgroup project' do
context 'when the user a project guest in the subsubgroup project' do
before do
project3.add_developer(user)
project3.add_guest(user)
end
it 'returns only the subsubgroup releases' do
expect(subject).to match_array([p3])
it 'does not return any releases' do
expect(subject).to match_array([])
end
end
context 'when the user a project developer in all projects' do
context 'when the user a guest on the group' do
before do
project.add_developer(user)
project2.add_developer(user)
project3.add_developer(user)
group.add_guest(user)
end
it 'returns all releases' do

View File

@ -30,8 +30,6 @@ import IssuesListApp from '~/issues/list/components/issues_list_app.vue';
import NewIssueDropdown from '~/issues/list/components/new_issue_dropdown.vue';
import {
CREATED_DESC,
DUE_DATE_OVERDUE,
PARAM_DUE_DATE,
RELATIVE_POSITION,
RELATIVE_POSITION_ASC,
TOKEN_TYPE_ASSIGNEE,
@ -296,16 +294,6 @@ describe('CE IssuesListApp component', () => {
});
describe('initial url params', () => {
describe('due_date', () => {
it('is set from the url params', () => {
setWindowLocation(`?${PARAM_DUE_DATE}=${DUE_DATE_OVERDUE}`);
wrapper = mountComponent();
expect(findIssuableList().props('urlParams')).toMatchObject({ due_date: DUE_DATE_OVERDUE });
});
});
describe('search', () => {
it('is set from the url params', () => {
setWindowLocation(locationSearch);

View File

@ -10,7 +10,6 @@ import {
} from 'jest/issues/list/mock_data';
import {
defaultPageSizeParams,
DUE_DATE_VALUES,
largePageSizeParams,
RELATIVE_POSITION_ASC,
urlSortParams,
@ -19,7 +18,6 @@ import {
convertToApiParams,
convertToSearchQuery,
convertToUrlParams,
getDueDateValue,
getFilterTokens,
getInitialPageParams,
getSortKey,
@ -56,16 +54,6 @@ describe('isSortKey', () => {
});
});
describe('getDueDateValue', () => {
it.each(DUE_DATE_VALUES)('returns the argument when it is `%s`', (value) => {
expect(getDueDateValue(value)).toBe(value);
});
it('returns undefined when the argument is invalid', () => {
expect(getDueDateValue('invalid value')).toBeUndefined();
});
});
describe('getSortOptions', () => {
describe.each`
hasIssueWeightsFeature | hasBlockedIssuesFeature | length | containsWeight | containsBlocking

View File

@ -0,0 +1,50 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceIdForNamespaceRoute, :migration, schema: 20220120123800 do
let(:migration) { described_class.new }
let(:namespaces_table) { table(:namespaces) }
let(:projects_table) { table(:projects) }
let(:routes_table) { table(:routes) }
let(:table_name) { 'routes' }
let(:batch_column) { :id }
let(:sub_batch_size) { 200 }
let(:pause_ms) { 0 }
let(:namespace1) { namespaces_table.create!(name: 'namespace1', path: 'namespace1', type: 'User') }
let(:namespace2) { namespaces_table.create!(name: 'namespace2', path: 'namespace2', type: 'Group') }
let(:namespace3) { namespaces_table.create!(name: 'namespace3', path: 'namespace3', type: 'Group') }
let(:namespace4) { namespaces_table.create!(name: 'namespace4', path: 'namespace4', type: 'Group') }
let(:project1) { projects_table.create!(name: 'project1', namespace_id: namespace1.id) }
subject(:perform_migration) { migration.perform(1, 10, table_name, batch_column, sub_batch_size, pause_ms) }
before do
routes_table.create!(id: 1, name: 'test1', path: 'test1', source_id: namespace1.id,
source_type: namespace1.class.sti_name)
routes_table.create!(id: 2, name: 'test2', path: 'test2', source_id: namespace2.id,
source_type: namespace2.class.sti_name)
routes_table.create!(id: 5, name: 'test3', path: 'test3', source_id: project1.id,
source_type: project1.class.sti_name) # should be ignored - project route
routes_table.create!(id: 6, name: 'test4', path: 'test4', source_id: non_existing_record_id,
source_type: namespace3.class.sti_name) # should be ignored - invalid source_id
routes_table.create!(id: 10, name: 'test5', path: 'test5', source_id: namespace3.id,
source_type: namespace3.class.sti_name)
routes_table.create!(id: 11, name: 'test6', path: 'test6', source_id: namespace4.id,
source_type: namespace4.class.sti_name) # should be ignored - outside the scope
end
it 'backfills `type` for the selected records', :aggregate_failures do
perform_migration
expect(routes_table.where.not(namespace_id: nil).pluck(:id)).to match_array([1, 2, 10])
end
it 'tracks timings of queries' do
expect(migration.batch_metrics.timings).to be_empty
expect { perform_migration }.to change { migration.batch_metrics.timings }
end
end

View File

@ -0,0 +1,29 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe BackfillNamespaceIdForNamespaceRoutes do
let_it_be(:migration) { described_class::MIGRATION }
describe '#up' do
it 'schedules background jobs for each batch of routes' do
migrate!
expect(migration).to have_scheduled_batched_migration(
table_name: :routes,
column_name: :id,
interval: described_class::INTERVAL
)
end
end
describe '#down' do
it 'deletes all batched migration records' do
migrate!
schema_migrate_down!
expect(migration).not_to have_scheduled_batched_migration
end
end
end

View File

@ -114,6 +114,15 @@ RSpec.configure do |config|
config.run_all_when_everything_filtered = true
end
# Attempt to troubleshoot https://gitlab.com/gitlab-org/gitlab/-/issues/351531
config.after do |example|
if example.exception.is_a?(Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification::CrossDatabaseModificationAcrossUnsupportedTablesError)
::CrossDatabaseModification::TransactionStackTrackRecord.log_gitlab_transactions_stack(action: :after_failure, example: example.description)
else
::CrossDatabaseModification::TransactionStackTrackRecord.log_gitlab_transactions_stack(action: :after_example, example: example.description)
end
end
# Re-run failures locally with `--only-failures`
config.example_status_persistence_file_path = ENV.fetch('RSPEC_LAST_RUN_RESULTS_FILE', './spec/examples.txt')
@ -184,7 +193,6 @@ RSpec.configure do |config|
config.include RedisHelpers
config.include Rails.application.routes.url_helpers, type: :routing
config.include PolicyHelpers, type: :policy
config.include MemoryUsageHelper
config.include ExpectRequestWithStatus, type: :request
config.include IdempotentWorkerHelper, type: :worker
config.include RailsHelpers
@ -244,10 +252,6 @@ RSpec.configure do |config|
::Ci::ApplicationRecord.set_open_transactions_baseline
end
config.append_before do
Thread.current[:current_example_group] = ::RSpec.current_example.metadata[:example_group]
end
config.append_after do
ApplicationRecord.reset_open_transactions_baseline
::Ci::ApplicationRecord.reset_open_transactions_baseline

View File

@ -1,37 +0,0 @@
# frozen_string_literal: true
module MemoryUsageHelper
extend ActiveSupport::Concern
def gather_memory_data(csv_path)
write_csv_entry(csv_path,
{
example_group_path: TestEnv.topmost_example_group[:location],
example_group_description: TestEnv.topmost_example_group[:description],
time: Time.current,
job_name: ENV['CI_JOB_NAME']
}.merge(get_memory_usage))
end
def write_csv_entry(path, entry)
CSV.open(path, "a", headers: entry.keys, write_headers: !File.exist?(path)) do |file|
file << entry.values
end
end
def get_memory_usage
output, status = Gitlab::Popen.popen(%w(free -m))
abort "`free -m` return code is #{status}: #{output}" unless status == 0
result = output.split("\n")[1].split(" ")[1..]
attrs = %i(m_total m_used m_free m_shared m_buffers_cache m_available).freeze
attrs.zip(result).to_h
end
included do |config|
config.after(:all) do
gather_memory_data(ENV['MEMORY_TEST_PATH']) if ENV['MEMORY_TEST_PATH']
end
end
end

View File

@ -371,17 +371,6 @@ module TestEnv
FileUtils.rm_rf(path)
end
def current_example_group
Thread.current[:current_example_group]
end
# looking for a top-level `describe`
def topmost_example_group
example_group = current_example_group
example_group = example_group[:parent_example_group] until example_group[:parent_example_group].nil?
example_group
end
def seed_db
Gitlab::DatabaseImporters::WorkItems::BaseTypeImporter.import
end