Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-05-25 21:10:26 +00:00
parent a880341a7b
commit c0bc55ffe1
38 changed files with 447 additions and 79 deletions

View file

@ -11,12 +11,4 @@ fragment AlertListItem on AlertManagementAlert {
title
webUrl
}
assignees {
nodes {
name
username
avatarUrl
webUrl
}
}
}

View file

@ -1,4 +1,5 @@
#import "~/graphql_shared/fragments/alert.fragment.graphql"
#import "~/graphql_shared/fragments/user.fragment.graphql"
query getAlerts(
$projectPath: ID!
@ -26,6 +27,11 @@ query getAlerts(
) {
nodes {
...AlertListItem
assignees {
nodes {
...User
}
}
}
pageInfo {
hasNextPage

View file

@ -16,6 +16,7 @@ export const IssuableType = {
Issue: 'issue',
Epic: 'epic',
MergeRequest: 'merge_request',
Alert: 'alert',
};
export const IssueStateEvent = {

View file

@ -61,7 +61,7 @@ export default {
required: false,
default: IssuableType.Issue,
validator(value) {
return [IssuableType.Issue, IssuableType.MergeRequest].includes(value);
return [IssuableType.Issue, IssuableType.MergeRequest, IssuableType.Alert].includes(value);
},
},
issuableId: {

View file

@ -19,6 +19,8 @@ import updateIssueConfidentialMutation from '~/sidebar/queries/update_issue_conf
import updateIssueDueDateMutation from '~/sidebar/queries/update_issue_due_date.mutation.graphql';
import updateIssueSubscriptionMutation from '~/sidebar/queries/update_issue_subscription.mutation.graphql';
import updateMergeRequestSubscriptionMutation from '~/sidebar/queries/update_merge_request_subscription.mutation.graphql';
import updateAlertAssigneesMutation from '~/vue_shared/alert_details/graphql/mutations/alert_set_assignees.mutation.graphql';
import getAlertAssignees from '~/vue_shared/components/sidebar/queries/get_alert_assignees.query.graphql';
import getIssueAssignees from '~/vue_shared/components/sidebar/queries/get_issue_assignees.query.graphql';
import issueParticipantsQuery from '~/vue_shared/components/sidebar/queries/get_issue_participants.query.graphql';
import getIssueTimelogsQuery from '~/vue_shared/components/sidebar/queries/get_issue_timelogs.query.graphql';
@ -40,6 +42,10 @@ export const assigneesQueries = {
query: getMergeRequestAssignees,
mutation: updateMergeRequestAssigneesMutation,
},
[IssuableType.Alert]: {
query: getAlertAssignees,
mutation: updateAlertAssigneesMutation,
},
};
export const participantsQueries = {
@ -52,6 +58,10 @@ export const participantsQueries = {
[IssuableType.Epic]: {
query: epicParticipantsQuery,
},
[IssuableType.Alert]: {
query: '',
skipQuery: true,
},
};
export const confidentialityQueries = {

View file

@ -24,7 +24,7 @@ import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
import { PAGE_CONFIG, SEVERITY_LEVELS } from '../constants';
import createIssueMutation from '../graphql/mutations/alert_issue_create.mutation.graphql';
import toggleSidebarStatusMutation from '../graphql/mutations/alert_sidebar_status.mutation.graphql';
import alertQuery from '../graphql/queries/alert_details.query.graphql';
import alertQuery from '../graphql/queries/alert_sidebar_details.query.graphql';
import sidebarStatusQuery from '../graphql/queries/alert_sidebar_status.query.graphql';
import AlertMetrics from './alert_metrics.vue';
import AlertSidebar from './alert_sidebar.vue';

View file

@ -167,10 +167,10 @@ export default {
variables: {
iid: this.alert.iid,
assigneeUsernames: [this.isActive(assignees) ? '' : assignees],
projectPath: this.projectPath,
fullPath: this.projectPath,
},
})
.then(({ data: { alertSetAssignees: { errors } = [] } = {} } = {}) => {
.then(({ data: { issuableSetAssignees: { errors } = [] } = {} } = {}) => {
this.hideDropdown();
if (errors[0]) {

View file

@ -4,7 +4,7 @@ import todoMarkDoneMutation from '~/graphql_shared/mutations/todo_mark_done.muta
import { s__ } from '~/locale';
import Todo from '~/sidebar/components/todo_toggle/todo.vue';
import createAlertTodoMutation from '../../graphql/mutations/alert_todo_create.mutation.graphql';
import alertQuery from '../../graphql/queries/alert_details.query.graphql';
import alertQuery from '../../graphql/queries/alert_sidebar_details.query.graphql';
export default {
i18n: {

View file

@ -1,18 +1,18 @@
#import "~/graphql_shared/fragments/alert_note.fragment.graphql"
#import "~/graphql_shared/fragments/user.fragment.graphql"
#import "~/graphql_shared/fragments/user_availability.fragment.graphql"
mutation alertSetAssignees($projectPath: ID!, $assigneeUsernames: [String!]!, $iid: String!) {
alertSetAssignees(
input: { iid: $iid, assigneeUsernames: $assigneeUsernames, projectPath: $projectPath }
mutation alertSetAssignees($fullPath: ID!, $assigneeUsernames: [String!]!, $iid: String!) {
issuableSetAssignees: alertSetAssignees(
input: { iid: $iid, assigneeUsernames: $assigneeUsernames, projectPath: $fullPath }
) {
errors
alert {
issuable: alert {
iid
assignees {
nodes {
username
name
avatarUrl
webUrl
...User
...UserAvailability
}
}
notes {

View file

@ -1,10 +1,16 @@
#import "../fragments/alert_detail_item.fragment.graphql"
#import "~/graphql_shared/fragments/alert_detail_item.fragment.graphql"
#import "~/graphql_shared/fragments/user.fragment.graphql"
mutation alertTodoCreate($projectPath: ID!, $iid: String!) {
alertTodoCreate(input: { iid: $iid, projectPath: $projectPath }) {
errors
alert {
...AlertDetailItem
assignees {
nodes {
...User
}
}
}
}
}

View file

@ -0,0 +1,17 @@
#import "~/graphql_shared/fragments/alert_detail_item.fragment.graphql"
#import "~/graphql_shared/fragments/user.fragment.graphql"
query alertDetails($fullPath: ID!, $alertId: String) {
project(fullPath: $fullPath) {
alertManagementAlerts(iid: $alertId) {
nodes {
...AlertDetailItem
assignees {
nodes {
...User
}
}
}
}
}
}

View file

@ -0,0 +1,20 @@
#import "~/graphql_shared/fragments/user.fragment.graphql"
#import "~/graphql_shared/fragments/user_availability.fragment.graphql"
query alertAssignees(
$domain: AlertManagementDomainFilter = threat_monitoring
$fullPath: ID!
$iid: String!
) {
workspace: project(fullPath: $fullPath) {
issuable: alertManagementAlert(domain: $domain, iid: $iid) {
iid
assignees {
nodes {
...User
...UserAvailability
}
}
}
}
}

View file

@ -74,6 +74,9 @@ export default {
query() {
return participantsQueries[this.issuableType].query;
},
skip() {
return Boolean(participantsQueries[this.issuableType].skipQuery);
},
variables() {
return {
iid: this.iid,

View file

@ -363,11 +363,7 @@ module Ci
end
def pick_build!(build)
if Feature.enabled?(:ci_reduce_queries_when_ticking_runner_queue, self, default_enabled: :yaml)
tick_runner_queue if matches_build?(build)
else
tick_runner_queue if can_pick?(build)
end
tick_runner_queue if matches_build?(build)
end
def uncached_contacted_at

View file

@ -1,8 +0,0 @@
---
name: ci_reduce_queries_when_ticking_runner_queue
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/55496
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/323328
milestone: '13.10'
type: development
group: group::continuous integration
default_enabled: true

View file

@ -9,3 +9,7 @@ ActsAsTaggableOn.tags_counter = false
# validate that counter cache is disabled
raise "Counter cache is not disabled" if
ActsAsTaggableOn::Tagging.reflections["tag"].options[:counter_cache]
ActsAsTaggableOn::Tagging.include IgnorableColumns
ActsAsTaggableOn::Tagging.ignore_column :id_convert_to_bigint, remove_with: '14.2', remove_after: '2021-08-22'
ActsAsTaggableOn::Tagging.ignore_column :taggable_id_convert_to_bigint, remove_with: '14.2', remove_after: '2021-08-22'

View file

@ -5,4 +5,13 @@ Gitlab::Experiment.configure do |config|
config.cache = Gitlab::Experiment::Cache::RedisHashStore.new(
pool: ->(&block) { Gitlab::Redis::SharedState.with { |redis| block.call(redis) } }
)
# TODO: This will be deprecated as of v0.6.0, but needs to stay intact for
# actively running experiments until a versioning concept is put in place to
# enable migrating into the new SHA2 strategy.
config.context_hash_strategy = lambda do |source, seed|
source = source.keys + source.values if source.is_a?(Hash)
data = Array(source).map { |v| (v.respond_to?(:to_global_id) ? v.to_global_id : v).to_s }
Digest::MD5.hexdigest(data.unshift(seed).join('|'))
end
end

View file

@ -0,0 +1,29 @@
# frozen_string_literal: true
class CreateVulnerabilityFindingEvidenceHeaders < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
create_table_with_constraints :vulnerability_finding_evidence_headers do |t|
t.timestamps_with_timezone null: false
t.references :vulnerability_finding_evidence_request, index: { name: 'finding_evidence_header_on_finding_evidence_request_id' }, null: true, foreign_key: { on_delete: :cascade }
t.references :vulnerability_finding_evidence_response, index: { name: 'finding_evidence_header_on_finding_evidence_response_id' }, null: true, foreign_key: { on_delete: :cascade }
t.text :name, null: false
t.text :value, null: false
t.text_limit :name, 255
t.text_limit :value, 8192
end
end
def down
with_lock_retries do
drop_table :vulnerability_finding_evidence_headers
end
end
end

View file

@ -0,0 +1,16 @@
# frozen_string_literal: true
class InitializeConversionOfTaggingsToBigint < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
TABLE = :taggings
COLUMNS = %i(id taggable_id)
def up
initialize_conversion_of_integer_to_bigint(TABLE, COLUMNS)
end
def down
revert_initialize_conversion_of_integer_to_bigint(TABLE, COLUMNS)
end
end

View file

@ -0,0 +1,16 @@
# frozen_string_literal: true
class BackfillTaggingsForBigintConversion < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
TABLE = :taggings
COLUMNS = %i(id taggable_id)
def up
backfill_conversion_of_integer_to_bigint TABLE, COLUMNS, batch_size: 15000, sub_batch_size: 100
end
def down
revert_backfill_conversion_of_integer_to_bigint TABLE, COLUMNS
end
end

View file

@ -0,0 +1,37 @@
# frozen_string_literal: true
class ScheduleRecalculateUuidOnVulnerabilitiesOccurrences2 < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
MIGRATION = 'RecalculateVulnerabilitiesOccurrencesUuid'
DELAY_INTERVAL = 2.minutes.to_i
BATCH_SIZE = 2_500
disable_ddl_transaction!
class VulnerabilitiesFinding < ActiveRecord::Base
include ::EachBatch
self.inheritance_column = :_type_disabled
self.table_name = "vulnerability_occurrences"
end
def up
# Make sure that RemoveDuplicateVulnerabilitiesFindings has finished running
# so that we don't run into duplicate UUID issues
Gitlab::BackgroundMigration.steal('RemoveDuplicateVulnerabilitiesFindings')
say "Scheduling #{MIGRATION} jobs"
queue_background_migration_jobs_by_range_at_intervals(
VulnerabilitiesFinding,
MIGRATION,
DELAY_INTERVAL,
batch_size: BATCH_SIZE,
track_jobs: true
)
end
def down
# no-op
end
end

View file

@ -0,0 +1 @@
50d370d2465fa4c0d3c2bd963d5745474ca35a43609d0e754f3fe69eb7a7179f

View file

@ -0,0 +1 @@
18fdca797ea7f3a60ce5b421bec7af1ea0b0b73fbf6e1c23592acbc9d13a0a52

View file

@ -0,0 +1 @@
eddbcd18c17f9017a2cdfb6fc0144dcfcb539d3617271722b2918bdbe48c481a

View file

@ -0,0 +1 @@
3ee15db28406522a5fb591395dd3d4a46b10e958339dc60ded3751e23096864d

View file

@ -170,6 +170,16 @@ BEGIN
END;
$$;
CREATE FUNCTION trigger_aebe8b822ad3() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
NEW."id_convert_to_bigint" := NEW."id";
NEW."taggable_id_convert_to_bigint" := NEW."taggable_id";
RETURN NEW;
END;
$$;
CREATE FUNCTION trigger_be1804f21693() RETURNS trigger
LANGUAGE plpgsql
AS $$
@ -18218,7 +18228,9 @@ CREATE TABLE taggings (
tagger_id integer,
tagger_type character varying,
context character varying,
created_at timestamp without time zone
created_at timestamp without time zone,
id_convert_to_bigint bigint DEFAULT 0 NOT NULL,
taggable_id_convert_to_bigint bigint
);
CREATE SEQUENCE taggings_id_seq
@ -18924,6 +18936,27 @@ CREATE SEQUENCE vulnerability_feedback_id_seq
ALTER SEQUENCE vulnerability_feedback_id_seq OWNED BY vulnerability_feedback.id;
CREATE TABLE vulnerability_finding_evidence_headers (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
vulnerability_finding_evidence_request_id bigint,
vulnerability_finding_evidence_response_id bigint,
name text NOT NULL,
value text NOT NULL,
CONSTRAINT check_01d21e8d92 CHECK ((char_length(name) <= 255)),
CONSTRAINT check_3f9011f903 CHECK ((char_length(value) <= 8192))
);
CREATE SEQUENCE vulnerability_finding_evidence_headers_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE vulnerability_finding_evidence_headers_id_seq OWNED BY vulnerability_finding_evidence_headers.id;
CREATE TABLE vulnerability_finding_evidence_requests (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
@ -20299,6 +20332,8 @@ ALTER TABLE ONLY vulnerability_external_issue_links ALTER COLUMN id SET DEFAULT
ALTER TABLE ONLY vulnerability_feedback ALTER COLUMN id SET DEFAULT nextval('vulnerability_feedback_id_seq'::regclass);
ALTER TABLE ONLY vulnerability_finding_evidence_headers ALTER COLUMN id SET DEFAULT nextval('vulnerability_finding_evidence_headers_id_seq'::regclass);
ALTER TABLE ONLY vulnerability_finding_evidence_requests ALTER COLUMN id SET DEFAULT nextval('vulnerability_finding_evidence_requests_id_seq'::regclass);
ALTER TABLE ONLY vulnerability_finding_evidence_responses ALTER COLUMN id SET DEFAULT nextval('vulnerability_finding_evidence_responses_id_seq'::regclass);
@ -21981,6 +22016,9 @@ ALTER TABLE ONLY vulnerability_external_issue_links
ALTER TABLE ONLY vulnerability_feedback
ADD CONSTRAINT vulnerability_feedback_pkey PRIMARY KEY (id);
ALTER TABLE ONLY vulnerability_finding_evidence_headers
ADD CONSTRAINT vulnerability_finding_evidence_headers_pkey PRIMARY KEY (id);
ALTER TABLE ONLY vulnerability_finding_evidence_requests
ADD CONSTRAINT vulnerability_finding_evidence_requests_pkey PRIMARY KEY (id);
@ -22221,6 +22259,10 @@ CREATE UNIQUE INDEX epic_user_mentions_on_epic_id_and_note_id_index ON epic_user
CREATE UNIQUE INDEX epic_user_mentions_on_epic_id_index ON epic_user_mentions USING btree (epic_id) WHERE (note_id IS NULL);
CREATE INDEX finding_evidence_header_on_finding_evidence_request_id ON vulnerability_finding_evidence_headers USING btree (vulnerability_finding_evidence_request_id);
CREATE INDEX finding_evidence_header_on_finding_evidence_response_id ON vulnerability_finding_evidence_headers USING btree (vulnerability_finding_evidence_response_id);
CREATE INDEX finding_evidence_requests_on_finding_evidence_id ON vulnerability_finding_evidence_requests USING btree (vulnerability_finding_evidence_id);
CREATE INDEX finding_evidence_responses_on_finding_evidences_id ON vulnerability_finding_evidence_responses USING btree (vulnerability_finding_evidence_id);
@ -25263,6 +25305,8 @@ CREATE TRIGGER trigger_8487d4de3e7b BEFORE INSERT OR UPDATE ON ci_builds_metadat
CREATE TRIGGER trigger_91dc388a5fe6 BEFORE INSERT OR UPDATE ON ci_build_trace_sections FOR EACH ROW EXECUTE FUNCTION trigger_91dc388a5fe6();
CREATE TRIGGER trigger_aebe8b822ad3 BEFORE INSERT OR UPDATE ON taggings FOR EACH ROW EXECUTE FUNCTION trigger_aebe8b822ad3();
CREATE TRIGGER trigger_be1804f21693 BEFORE INSERT OR UPDATE ON ci_job_artifacts FOR EACH ROW EXECUTE FUNCTION trigger_be1804f21693();
CREATE TRIGGER trigger_cf2f9e35f002 BEFORE INSERT OR UPDATE ON ci_build_trace_chunks FOR EACH ROW EXECUTE FUNCTION trigger_cf2f9e35f002();
@ -26710,6 +26754,9 @@ ALTER TABLE ONLY vulnerability_findings_remediations
ALTER TABLE ONLY resource_iteration_events
ADD CONSTRAINT fk_rails_6830c13ac1 FOREIGN KEY (merge_request_id) REFERENCES merge_requests(id) ON DELETE CASCADE;
ALTER TABLE ONLY vulnerability_finding_evidence_headers
ADD CONSTRAINT fk_rails_683b8e000c FOREIGN KEY (vulnerability_finding_evidence_response_id) REFERENCES vulnerability_finding_evidence_responses(id) ON DELETE CASCADE;
ALTER TABLE ONLY geo_hashed_storage_migrated_events
ADD CONSTRAINT fk_rails_687ed7d7c5 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
@ -27280,6 +27327,9 @@ ALTER TABLE ONLY operations_strategies_user_lists
ALTER TABLE ONLY issue_tracker_data
ADD CONSTRAINT fk_rails_ccc0840427 FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE;
ALTER TABLE ONLY vulnerability_finding_evidence_headers
ADD CONSTRAINT fk_rails_ce7f121a03 FOREIGN KEY (vulnerability_finding_evidence_request_id) REFERENCES vulnerability_finding_evidence_requests(id) ON DELETE CASCADE;
ALTER TABLE ONLY resource_milestone_events
ADD CONSTRAINT fk_rails_cedf8cce4d FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE SET NULL;

View file

@ -58,6 +58,13 @@ class Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrencesUuid
end
::Gitlab::Database::BulkUpdate.execute(%i[uuid], mappings)
logger.info(message: 'RecalculateVulnerabilitiesOccurrencesUuid Migration: recalculation is done for:',
finding_ids: mappings.keys.pluck(:id))
mark_job_as_succeeded(start_id, end_id)
rescue StandardError => error
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error)
end
private
@ -76,4 +83,15 @@ class Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrencesUuid
CalculateFindingUUID.call(name)
end
def logger
@logger ||= Gitlab::BackgroundMigration::Logger.build
end
def mark_job_as_succeeded(*arguments)
Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded(
'RecalculateVulnerabilitiesOccurrencesUuid',
arguments
)
end
end

View file

@ -14,7 +14,6 @@ module Gitlab
'directives' => {
'default_src' => "'self'",
'base_uri' => "'self'",
'child_src' => "'none'",
'connect_src' => "'self'",
'font_src' => "'self'",
'form_action' => "'self' https: http:",
@ -31,6 +30,11 @@ module Gitlab
}
}
# frame-src was deprecated in CSP level 2 in favor of child-src
# CSP level 3 "undeprecated" frame-src and browsers fall back on child-src if it's missing
# However Safari seems to read child-src first so we'll just keep both equal
settings_hash['directives']['child_src'] = settings_hash['directives']['frame_src']
allow_webpack_dev_server(settings_hash) if Rails.env.development?
allow_cdn(settings_hash) if ENV['GITLAB_CDN_HOST'].present?

View file

@ -112,13 +112,12 @@ namespace :gitlab do
# The remove_date of redirects.yaml should be nine months in the future.
# To not be confused with the remove_date of the Markdown page.
#
if remove_date < today
File.delete(filename) if File.exist?(filename)
next unless remove_date < today
puts " - from: #{old_path}"
puts " to: #{new_path(frontmatter['redirect_to'], filename)}"
puts " remove_date: #{remove_date >> 9}"
end
File.delete(filename) if File.exist?(filename)
puts " - from: #{old_path}"
puts " to: #{new_path(frontmatter['redirect_to'], filename)}"
puts " remove_date: #{remove_date >> 9}"
end
end
end

View file

@ -16295,6 +16295,9 @@ msgstr ""
msgid "Hashed storage can't be disabled anymore for new projects"
msgstr ""
msgid "Header cannot be associated with both a request and a response"
msgstr ""
msgid "Header logo"
msgstr ""
@ -16307,6 +16310,9 @@ msgstr ""
msgid "Header message"
msgstr ""
msgid "Header must be associated with a request or response"
msgstr ""
msgid "Headings"
msgstr ""

View file

@ -21,6 +21,7 @@ describe('Alert Details Sidebar Assignees', () => {
id: 1,
name: 'User 1',
username: 'root',
webUrl: 'https://gitlab:3443/root',
},
{
avatar_url:
@ -28,6 +29,7 @@ describe('Alert Details Sidebar Assignees', () => {
id: 2,
name: 'User 2',
username: 'not-root',
webUrl: 'https://gitlab:3443/non-root',
},
];
@ -128,7 +130,7 @@ describe('Alert Details Sidebar Assignees', () => {
variables: {
iid: '1527542',
assigneeUsernames: ['root'],
projectPath: 'projectPath',
fullPath: 'projectPath',
},
});
});
@ -137,7 +139,7 @@ describe('Alert Details Sidebar Assignees', () => {
wrapper.setData({ isDropdownSearching: false });
const errorMutationResult = {
data: {
alertSetAssignees: {
issuableSetAssignees: {
errors: ['There was a problem for sure.'],
alert: {},
},

View file

@ -73,6 +73,14 @@ RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrence
expect(vulnerabilities_findings.pluck(:uuid)).to eq([desired_uuid_v5])
end
it 'logs recalculation' do
expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
expect(instance).to receive(:info).once
end
subject
end
end
context "when finding has a UUIDv5" do
@ -99,6 +107,32 @@ RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrence
end
end
context 'when recalculation fails' do
before do
@uuid_v4 = create_finding!(
vulnerability_id: vulnerability_for_uuidv4.id,
project_id: project.id,
scanner_id: different_scanner.id,
primary_identifier_id: different_vulnerability_identifier.id,
report_type: 0, # "sast"
location_fingerprint: "fa18f432f1d56675f4098d318739c3cd5b14eb3e",
uuid: known_uuid_v4
)
allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
allow(::Gitlab::Database::BulkUpdate).to receive(:execute).and_raise(expected_error)
end
let(:finding) { @uuid_v4 }
let(:expected_error) { RuntimeError.new }
it 'captures the errors and does not crash entirely' do
expect { subject }.not_to raise_error
expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_for_dev_exception).with(expected_error).once
end
end
private
def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)

View file

@ -35,6 +35,7 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
expect(directives.has_key?('report_uri')).to be_truthy
expect(directives['report_uri']).to be_nil
expect(directives['child_src']).to eq(directives['frame_src'])
end
context 'when GITLAB_CDN_HOST is set' do

View file

@ -0,0 +1,127 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe ScheduleRecalculateUuidOnVulnerabilitiesOccurrences2 do
let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
let(:users) { table(:users) }
let(:user) { create_user! }
let(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) }
let(:scanners) { table(:vulnerability_scanners) }
let(:scanner) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
let(:different_scanner) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') }
let(:vulnerabilities) { table(:vulnerabilities) }
let(:vulnerabilities_findings) { table(:vulnerability_occurrences) }
let(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
let(:vulnerability_identifier) do
vulnerability_identifiers.create!(
project_id: project.id,
external_type: 'uuid-v5',
external_id: 'uuid-v5',
fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a',
name: 'Identifier for UUIDv5')
end
let(:different_vulnerability_identifier) do
vulnerability_identifiers.create!(
project_id: project.id,
external_type: 'uuid-v4',
external_id: 'uuid-v4',
fingerprint: '772da93d34a1ba010bcb5efa9fb6f8e01bafcc89',
name: 'Identifier for UUIDv4')
end
let(:vulnerability_for_uuidv4) do
create_vulnerability!(
project_id: project.id,
author_id: user.id
)
end
let(:vulnerability_for_uuidv5) do
create_vulnerability!(
project_id: project.id,
author_id: user.id
)
end
let!(:finding1) do
create_finding!(
vulnerability_id: vulnerability_for_uuidv4.id,
project_id: project.id,
scanner_id: different_scanner.id,
primary_identifier_id: different_vulnerability_identifier.id,
location_fingerprint: 'fa18f432f1d56675f4098d318739c3cd5b14eb3e',
uuid: 'b3cc2518-5446-4dea-871c-89d5e999c1ac'
)
end
let!(:finding2) do
create_finding!(
vulnerability_id: vulnerability_for_uuidv5.id,
project_id: project.id,
scanner_id: scanner.id,
primary_identifier_id: vulnerability_identifier.id,
location_fingerprint: '838574be0210968bf6b9f569df9c2576242cbf0a',
uuid: '77211ed6-7dff-5f6b-8c9a-da89ad0a9b60'
)
end
before do
stub_const("#{described_class}::BATCH_SIZE", 1)
end
around do |example|
freeze_time { Sidekiq::Testing.fake! { example.run } }
end
it 'schedules background migrations', :aggregate_failures do
migrate!
expect(BackgroundMigrationWorker.jobs.size).to eq(2)
expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, finding1.id, finding1.id)
expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, finding2.id, finding2.id)
end
private
def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
vulnerabilities.create!(
project_id: project_id,
author_id: author_id,
title: title,
severity: severity,
confidence: confidence,
report_type: report_type
)
end
def create_finding!(
vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:, location_fingerprint:, uuid:)
vulnerabilities_findings.create!(
vulnerability_id: vulnerability_id,
project_id: project_id,
name: 'test',
severity: 7,
confidence: 7,
report_type: 0,
project_fingerprint: '123qweasdzxc',
scanner_id: scanner_id,
primary_identifier_id: primary_identifier_id,
location_fingerprint: location_fingerprint,
metadata_version: 'test',
raw_metadata: 'test',
uuid: uuid
)
end
def create_user!(name: "Example User", email: "user@example.com", user_type: nil)
users.create!(
name: name,
email: email,
username: name,
projects_limit: 0
)
end
end

View file

@ -919,29 +919,13 @@ RSpec.describe Ci::Runner do
end
end
context 'build picking improvement enabled' do
before do
stub_feature_flags(ci_reduce_queries_when_ticking_runner_queue: true)
end
context 'build picking improvement' do
it 'does not check if the build is assignable to a runner' do
expect(runner).not_to receive(:can_pick?)
runner.pick_build!(build)
end
end
context 'build picking improvement disabled' do
before do
stub_feature_flags(ci_reduce_queries_when_ticking_runner_queue: false)
end
it 'checks if the build is assignable to a runner' do
expect(runner).to receive(:can_pick?).and_call_original
runner.pick_build!(build)
end
end
end
describe 'project runner without projects is destroyable' do

View file

@ -129,19 +129,6 @@ RSpec.describe Ci::UpdateBuildQueueService do
subject.tick(build)
end
context 'when feature flag ci_reduce_queries_when_ticking_runner_queue is disabled' do
before do
stub_feature_flags(ci_reduce_queries_when_ticking_runner_queue: false)
stub_feature_flags(ci_runners_short_circuit_assignable_for: false)
end
it 'runs redundant queries using `owned_or_instance_wide` scope' do
expect(Ci::Runner).to receive(:owned_or_instance_wide).and_call_original
subject.tick(build)
end
end
end
end
@ -218,10 +205,9 @@ RSpec.describe Ci::UpdateBuildQueueService do
let!(:build) { create(:ci_build, pipeline: pipeline, tag_list: %w[a b]) }
let!(:project_runner) { create(:ci_runner, :project, :online, projects: [project], tag_list: %w[a b c]) }
context 'when ci_preload_runner_tags and ci_reduce_queries_when_ticking_runner_queue are enabled' do
context 'when ci_preload_runner_tags is enabled' do
before do
stub_feature_flags(
ci_reduce_queries_when_ticking_runner_queue: true,
ci_preload_runner_tags: true
)
end
@ -235,10 +221,9 @@ RSpec.describe Ci::UpdateBuildQueueService do
end
end
context 'when ci_preload_runner_tags and ci_reduce_queries_when_ticking_runner_queue are disabled' do
context 'when ci_preload_runner_tags are disabled' do
before do
stub_feature_flags(
ci_reduce_queries_when_ticking_runner_queue: false,
ci_preload_runner_tags: false
)
end