Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
91264a6752
commit
a807e50ade
39 changed files with 523 additions and 211 deletions
|
@ -8,7 +8,7 @@ include:
|
|||
- local: .gitlab/ci/package-and-test/rules.gitlab-ci.yml
|
||||
- local: .gitlab/ci/package-and-test/variables.gitlab-ci.yml
|
||||
- project: gitlab-org/quality/pipeline-common
|
||||
ref: 1.2.1
|
||||
ref: 1.3.0
|
||||
file:
|
||||
- /ci/base.gitlab-ci.yml
|
||||
- /ci/allure-report.yml
|
||||
|
@ -623,6 +623,7 @@ notify-slack:
|
|||
SLACK_ICON_EMOJI: ci_failing
|
||||
STATUS_SYM: ☠️
|
||||
STATUS: failed
|
||||
TYPE: (package-and-test)
|
||||
when: on_failure
|
||||
script:
|
||||
- bundle exec gitlab-qa-report --prepare-stage-reports "$CI_PROJECT_DIR/gitlab-qa-run-*/**/rspec-*.xml" # generate summary
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
include:
|
||||
- project: gitlab-org/quality/pipeline-common
|
||||
ref: 1.2.2
|
||||
ref: 1.3.0
|
||||
file:
|
||||
- /ci/base.gitlab-ci.yml
|
||||
- /ci/allure-report.yml
|
||||
- /ci/knapsack-report.yml
|
||||
|
||||
|
@ -172,3 +173,21 @@ delete-test-resources:
|
|||
- bundle exec rake "test_resources:delete[$QA_TEST_RESOURCES_FILE_PATTERN]"
|
||||
allow_failure: true
|
||||
when: always
|
||||
|
||||
notify-slack:
|
||||
extends:
|
||||
- .notify-slack-qa
|
||||
- .qa-cache
|
||||
- .rules:notify-slack
|
||||
stage: post-qa
|
||||
variables:
|
||||
RUN_WITH_BUNDLE: "true"
|
||||
QA_PATH: qa
|
||||
ALLURE_JOB_NAME: e2e-review-qa
|
||||
SLACK_ICON_EMOJI: ci_failing
|
||||
STATUS_SYM: ☠️
|
||||
STATUS: failed
|
||||
TYPE: (review-app)
|
||||
script:
|
||||
- bundle exec gitlab-qa-report --prepare-stage-reports "$CI_PROJECT_DIR/qa/tmp/rspec-*.xml" # generate summary
|
||||
- !reference [.notify-slack-qa, script]
|
||||
|
|
|
@ -19,6 +19,9 @@
|
|||
.qa-framework-changes: &qa-framework-changes
|
||||
if: $QA_FRAMEWORK_CHANGES == "true"
|
||||
|
||||
.default-branch: &default-branch
|
||||
if: $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH
|
||||
|
||||
.qa-manual: &qa-manual
|
||||
when: manual
|
||||
allow_failure: true
|
||||
|
@ -94,3 +97,8 @@
|
|||
.rules:prepare-report:
|
||||
rules:
|
||||
- when: always
|
||||
|
||||
.rules:notify-slack:
|
||||
rules:
|
||||
- <<: *default-branch
|
||||
when: on_failure
|
||||
|
|
|
@ -48,26 +48,6 @@ Layout/FirstArrayElementIndentation:
|
|||
- 'spec/requests/api/issues/put_projects_issues_spec.rb'
|
||||
- 'spec/requests/api/merge_requests_spec.rb'
|
||||
- 'spec/requests/api/task_completion_status_spec.rb'
|
||||
- 'spec/requests/projects/ci/promeheus_metrics/histograms_controller_spec.rb'
|
||||
- 'spec/requests/projects/issues_controller_spec.rb'
|
||||
- 'spec/requests/projects/merge_requests_controller_spec.rb'
|
||||
- 'spec/serializers/build_trace_entity_spec.rb'
|
||||
- 'spec/serializers/ci/daily_build_group_report_result_serializer_spec.rb'
|
||||
- 'spec/serializers/merge_request_poll_widget_entity_spec.rb'
|
||||
- 'spec/services/award_emojis/copy_service_spec.rb'
|
||||
- 'spec/services/bulk_update_integration_service_spec.rb'
|
||||
- 'spec/services/ci/compare_test_reports_service_spec.rb'
|
||||
- 'spec/services/ci/create_pipeline_service/rules_spec.rb'
|
||||
- 'spec/services/ci/find_exposed_artifacts_service_spec.rb'
|
||||
- 'spec/services/design_management/move_designs_service_spec.rb'
|
||||
- 'spec/services/git/tag_hooks_service_spec.rb'
|
||||
- 'spec/services/google_cloud/setup_cloudsql_instance_service_spec.rb'
|
||||
- 'spec/services/import/gitlab_projects/create_project_service_spec.rb'
|
||||
- 'spec/services/jira_connect/sync_service_spec.rb'
|
||||
- 'spec/services/merge_requests/link_lfs_objects_service_spec.rb'
|
||||
- 'spec/services/merge_requests/refresh_service_spec.rb'
|
||||
- 'spec/services/repositories/changelog_service_spec.rb'
|
||||
- 'spec/services/resource_events/synthetic_milestone_notes_builder_service_spec.rb'
|
||||
- 'spec/services/security/merge_reports_service_spec.rb'
|
||||
- 'spec/simplecov_env.rb'
|
||||
- 'spec/support/atlassian/jira_connect/schemata.rb'
|
||||
|
|
|
@ -108,6 +108,9 @@ export default {
|
|||
? this.propsSource.fields.filter((field) => !field.section)
|
||||
: this.propsSource.fields;
|
||||
},
|
||||
hasFieldsWithoutSection() {
|
||||
return this.fieldsWithoutSection.length;
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
...mapActions(['setOverride', 'requestJiraIssueTypes']),
|
||||
|
@ -227,6 +230,27 @@ export default {
|
|||
@change="setOverride"
|
||||
/>
|
||||
|
||||
<div v-if="!hasSections" class="row">
|
||||
<div class="col-lg-4"></div>
|
||||
|
||||
<div class="col-lg-8">
|
||||
<!-- helpHtml is trusted input -->
|
||||
<div v-if="helpHtml" v-safe-html:[$options.helpHtmlConfig]="helpHtml"></div>
|
||||
|
||||
<active-checkbox
|
||||
v-if="propsSource.showActive"
|
||||
:key="`${currentKey}-active-checkbox`"
|
||||
@toggle-integration-active="onToggleIntegrationState"
|
||||
/>
|
||||
<trigger-fields
|
||||
v-if="propsSource.triggerEvents.length"
|
||||
:key="`${currentKey}-trigger-fields`"
|
||||
:events="propsSource.triggerEvents"
|
||||
:type="propsSource.type"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<template v-if="hasSections">
|
||||
<div
|
||||
v-for="(section, index) in customState.sections"
|
||||
|
@ -266,24 +290,10 @@ export default {
|
|||
</div>
|
||||
</template>
|
||||
|
||||
<div class="row">
|
||||
<div v-if="hasFieldsWithoutSection" class="row">
|
||||
<div class="col-lg-4"></div>
|
||||
|
||||
<div class="col-lg-8">
|
||||
<!-- helpHtml is trusted input -->
|
||||
<div v-if="helpHtml && !hasSections" v-safe-html:[$options.helpHtmlConfig]="helpHtml"></div>
|
||||
|
||||
<active-checkbox
|
||||
v-if="propsSource.showActive && !hasSections"
|
||||
:key="`${currentKey}-active-checkbox`"
|
||||
@toggle-integration-active="onToggleIntegrationState"
|
||||
/>
|
||||
<trigger-fields
|
||||
v-if="propsSource.triggerEvents.length && !hasSections"
|
||||
:key="`${currentKey}-trigger-fields`"
|
||||
:events="propsSource.triggerEvents"
|
||||
:type="propsSource.type"
|
||||
/>
|
||||
<dynamic-field
|
||||
v-for="field in fieldsWithoutSection"
|
||||
:key="`${currentKey}-${field.name}`"
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddMigratedToStateTransitionToVulnerabilityFeedback < Gitlab::Database::Migration[2.0]
|
||||
def change
|
||||
add_column :vulnerability_feedback, :migrated_to_state_transition, :boolean, default: false
|
||||
end
|
||||
end
|
|
@ -0,0 +1,22 @@
|
|||
# frozen_string_literal: true
|
||||
class AddTmpIndexVulnsOnReportType < Gitlab::Database::Migration[2.0]
|
||||
# Temporary index to perform migration removing invalid vulnerabilities
|
||||
INDEX_NAME = 'tmp_idx_vulnerabilities_on_id_where_report_type_7_99'
|
||||
|
||||
REPORT_TYPES = {
|
||||
cluster_image_scanning: 7,
|
||||
custom: 99
|
||||
}
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_concurrent_index :vulnerabilities, :id,
|
||||
where: "report_type IN (#{REPORT_TYPES.values.join(', ')})",
|
||||
name: INDEX_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index_by_name :vulnerabilities, INDEX_NAME
|
||||
end
|
||||
end
|
|
@ -0,0 +1,25 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class ScheduleDeleteOrphanedOperationalVulnerabilities < Gitlab::Database::Migration[2.0]
|
||||
MIGRATION = 'DeleteOrphanedOperationalVulnerabilities'
|
||||
INTERVAL = 2.minutes
|
||||
BATCH_SIZE = 10_000
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main
|
||||
|
||||
def up
|
||||
queue_batched_background_migration(
|
||||
MIGRATION,
|
||||
:vulnerabilities,
|
||||
:id,
|
||||
job_interval: INTERVAL,
|
||||
batch_size: BATCH_SIZE
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
delete_batched_background_migration(MIGRATION, :vulnerabilities, :id, [])
|
||||
end
|
||||
end
|
1
db/schema_migrations/20220929091500
Normal file
1
db/schema_migrations/20220929091500
Normal file
|
@ -0,0 +1 @@
|
|||
c636f10079d7b718b777b78c22b66ca8828518d9dc8a3f6abe47e25af14ae20b
|
1
db/schema_migrations/20220929213730
Normal file
1
db/schema_migrations/20220929213730
Normal file
|
@ -0,0 +1 @@
|
|||
f74b8524fcdabd1646bbb47a32f8e3a29a848eb457f465d33a9e81bf4462e2ea
|
1
db/schema_migrations/20220930125609
Normal file
1
db/schema_migrations/20220930125609
Normal file
|
@ -0,0 +1 @@
|
|||
cba5fad6379efe681dfc19a02522de7c904705790de588d07ed12f032fab760e
|
|
@ -22579,7 +22579,8 @@ CREATE TABLE vulnerability_feedback (
|
|||
comment text,
|
||||
comment_timestamp timestamp with time zone,
|
||||
finding_uuid uuid,
|
||||
dismissal_reason smallint
|
||||
dismissal_reason smallint,
|
||||
migrated_to_state_transition boolean DEFAULT false
|
||||
);
|
||||
|
||||
CREATE SEQUENCE vulnerability_feedback_id_seq
|
||||
|
@ -30964,6 +30965,8 @@ CREATE UNIQUE INDEX taggings_idx ON taggings USING btree (tag_id, taggable_id, t
|
|||
|
||||
CREATE UNIQUE INDEX term_agreements_unique_index ON term_agreements USING btree (user_id, term_id);
|
||||
|
||||
CREATE INDEX tmp_idx_vulnerabilities_on_id_where_report_type_7_99 ON vulnerabilities USING btree (id) WHERE (report_type = ANY (ARRAY[7, 99]));
|
||||
|
||||
CREATE INDEX tmp_index_approval_merge_request_rules_on_report_type_equal_one ON approval_merge_request_rules USING btree (id, report_type) WHERE (report_type = 1);
|
||||
|
||||
CREATE INDEX tmp_index_ci_job_artifacts_on_expire_at_where_locked_unknown ON ci_job_artifacts USING btree (expire_at, job_id) WHERE ((locked = 2) AND (expire_at IS NOT NULL));
|
||||
|
|
|
@ -257,14 +257,10 @@ Configure the group-level protected environments by using the [REST API](../../a
|
|||
|
||||
Protected environments can also be used to require manual approvals before deployments. See [Deployment approvals](deployment_approvals.md) for more information.
|
||||
|
||||
<!-- ## Troubleshooting
|
||||
## Troubleshooting
|
||||
|
||||
Include any troubleshooting steps that you can foresee. If you know beforehand what issues
|
||||
one might have when setting this up, or when something is changed, or on upgrading, it's
|
||||
important to describe those, too. Think of things that may go wrong and include them here.
|
||||
This is important to minimize requests for support, and to avoid doc comments with
|
||||
questions that you know someone might ask.
|
||||
### Reporter can't run a trigger job that deploys to a protected environment in downstream pipeline
|
||||
|
||||
Each scenario can be a third-level heading, e.g. `### Getting error message X`.
|
||||
If you have none to add when creating a doc, leave this section in place
|
||||
but commented out to help encourage others to add to it in the future. -->
|
||||
A user who has [deployment-only access to protected environments](#deployment-only-access-to-protected-environments) might **not** be able to run a job if it's with a [`trigger`](../yaml/index.md#trigger) keyword. This is because the job is missing the [`environment`](../yaml/index.md#environment) keyword definition to associate the job with the protected environment, therefore the job is recognized as a normal job that uses [regular CI/CD permission model](../../user/permissions.md#gitlab-cicd-permissions).
|
||||
|
||||
Please see [this issue](https://gitlab.com/groups/gitlab-org/-/epics/8483) for more information about supporting `environment` keyword with `trigger` keyword.
|
||||
|
|
|
@ -0,0 +1,33 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module BackgroundMigration
|
||||
# Background migration for deleting orphaned operational vulnerabilities (without findings)
|
||||
class DeleteOrphanedOperationalVulnerabilities < ::Gitlab::BackgroundMigration::BatchedMigrationJob
|
||||
REPORT_TYPES = {
|
||||
cluster_image_scanning: 7,
|
||||
custom: 99
|
||||
}.freeze
|
||||
|
||||
NOT_EXISTS_SQL = <<-SQL
|
||||
NOT EXISTS (
|
||||
SELECT FROM vulnerability_occurrences
|
||||
WHERE "vulnerability_occurrences"."vulnerability_id" = "vulnerabilities"."id"
|
||||
)
|
||||
SQL
|
||||
|
||||
scope_to ->(relation) do
|
||||
relation
|
||||
.where(report_type: [REPORT_TYPES[:cluster_image_scanning], REPORT_TYPES[:custom]])
|
||||
end
|
||||
|
||||
def perform
|
||||
each_sub_batch(operation_name: :delete_orphaned_operational_vulnerabilities) do |sub_batch|
|
||||
sub_batch
|
||||
.where(NOT_EXISTS_SQL)
|
||||
.delete_all
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -6,8 +6,6 @@ module Gitlab
|
|||
class ConvertTableToFirstListPartition
|
||||
UnableToPartition = Class.new(StandardError)
|
||||
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
SQL_STATEMENT_SEPARATOR = ";\n\n"
|
||||
|
||||
attr_reader :partitioning_column, :table_name, :parent_table_name, :zero_partition_value
|
||||
|
|
|
@ -71,12 +71,17 @@ module Gitlab
|
|||
"#{type}_#{hashed_identifier}"
|
||||
end
|
||||
|
||||
def with_lock_retries(&block)
|
||||
Gitlab::Database::WithLockRetries.new(
|
||||
def with_lock_retries(*args, **kwargs, &block)
|
||||
raise_on_exhaustion = !!kwargs.delete(:raise_on_exhaustion)
|
||||
merged_args = {
|
||||
connection: connection,
|
||||
klass: self.class,
|
||||
logger: Gitlab::BackgroundMigration::Logger
|
||||
).run(&block)
|
||||
logger: Gitlab::BackgroundMigration::Logger,
|
||||
allow_savepoints: true
|
||||
}.merge(kwargs)
|
||||
|
||||
Gitlab::Database::WithLockRetries.new(**merged_args)
|
||||
.run(raise_on_exhaustion: raise_on_exhaustion, &block)
|
||||
end
|
||||
|
||||
def assert_not_in_transaction_block(scope:)
|
||||
|
|
|
@ -0,0 +1,111 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::BackgroundMigration::DeleteOrphanedOperationalVulnerabilities, :migration do
|
||||
include MigrationHelpers::VulnerabilitiesHelper
|
||||
|
||||
let_it_be(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
|
||||
let_it_be(:users) { table(:users) }
|
||||
let_it_be(:user) do
|
||||
users.create!(
|
||||
name: "Example User",
|
||||
email: "user@example.com",
|
||||
username: "Example User",
|
||||
projects_limit: 0,
|
||||
confirmed_at: Time.current
|
||||
)
|
||||
end
|
||||
|
||||
let_it_be(:project) do
|
||||
table(:projects).create!(
|
||||
id: 123,
|
||||
namespace_id: namespace.id,
|
||||
project_namespace_id: namespace.id
|
||||
)
|
||||
end
|
||||
|
||||
let_it_be(:scanners) { table(:vulnerability_scanners) }
|
||||
let_it_be(:scanner) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
|
||||
let_it_be(:different_scanner) do
|
||||
scanners.create!(
|
||||
project_id: project.id,
|
||||
external_id: 'test 2',
|
||||
name: 'test scanner 2'
|
||||
)
|
||||
end
|
||||
|
||||
let_it_be(:vulnerabilities) { table(:vulnerabilities) }
|
||||
let_it_be(:vulnerability_with_finding) do
|
||||
create_vulnerability!(
|
||||
project_id: project.id,
|
||||
author_id: user.id
|
||||
)
|
||||
end
|
||||
|
||||
let_it_be(:vulnerability_without_finding) do
|
||||
create_vulnerability!(
|
||||
project_id: project.id,
|
||||
author_id: user.id
|
||||
)
|
||||
end
|
||||
|
||||
let_it_be(:cis_vulnerability_without_finding) do
|
||||
create_vulnerability!(
|
||||
project_id: project.id,
|
||||
author_id: user.id,
|
||||
report_type: 7
|
||||
)
|
||||
end
|
||||
|
||||
let_it_be(:custom_vulnerability_without_finding) do
|
||||
create_vulnerability!(
|
||||
project_id: project.id,
|
||||
author_id: user.id,
|
||||
report_type: 99
|
||||
)
|
||||
end
|
||||
|
||||
let_it_be(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
|
||||
let_it_be(:primary_identifier) do
|
||||
vulnerability_identifiers.create!(
|
||||
project_id: project.id,
|
||||
external_type: 'uuid-v5',
|
||||
external_id: 'uuid-v5',
|
||||
fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a',
|
||||
name: 'Identifier for UUIDv5')
|
||||
end
|
||||
|
||||
let_it_be(:vulnerabilities_findings) { table(:vulnerability_occurrences) }
|
||||
let_it_be(:finding) do
|
||||
create_finding!(
|
||||
vulnerability_id: vulnerability_with_finding.id,
|
||||
project_id: project.id,
|
||||
scanner_id: scanner.id,
|
||||
primary_identifier_id: primary_identifier.id
|
||||
)
|
||||
end
|
||||
|
||||
subject(:background_migration) do
|
||||
described_class.new(start_id: vulnerabilities.minimum(:id),
|
||||
end_id: vulnerabilities.maximum(:id),
|
||||
batch_table: :vulnerabilities,
|
||||
batch_column: :id,
|
||||
sub_batch_size: 2,
|
||||
pause_ms: 0,
|
||||
connection: ActiveRecord::Base.connection)
|
||||
end
|
||||
|
||||
it 'drops Cluster Image Scanning and Custom Vulnerabilities without any Findings' do
|
||||
expect(vulnerabilities.pluck(:id)).to match_array([
|
||||
vulnerability_with_finding.id,
|
||||
vulnerability_without_finding.id,
|
||||
cis_vulnerability_without_finding.id,
|
||||
custom_vulnerability_without_finding.id
|
||||
])
|
||||
|
||||
expect { background_migration.perform }.to change(vulnerabilities, :count).by(-2)
|
||||
|
||||
expect(vulnerabilities.pluck(:id)).to match_array([vulnerability_with_finding.id, vulnerability_without_finding.id])
|
||||
end
|
||||
end
|
|
@ -0,0 +1,32 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require_migration!
|
||||
|
||||
RSpec.describe ScheduleDeleteOrphanedOperationalVulnerabilities do
|
||||
let_it_be(:migration) { described_class.new }
|
||||
let_it_be(:post_migration) { described_class::MIGRATION }
|
||||
|
||||
describe '#up' do
|
||||
it 'schedules background jobs for each batch of vulnerabilities' do
|
||||
migration.up
|
||||
|
||||
expect(post_migration).to(
|
||||
have_scheduled_batched_migration(
|
||||
table_name: :vulnerabilities,
|
||||
column_name: :id,
|
||||
interval: described_class::INTERVAL,
|
||||
batch_size: described_class::BATCH_SIZE
|
||||
)
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#down' do
|
||||
it 'deletes all batched migration records' do
|
||||
migration.down
|
||||
|
||||
expect(post_migration).not_to have_scheduled_batched_migration
|
||||
end
|
||||
end
|
||||
end
|
|
@ -8,10 +8,11 @@ RSpec.describe 'Projects::Ci::PrometheusMetrics::HistogramsController' do
|
|||
describe 'POST /*namespace_id/:project_id/-/ci/prometheus_metrics/histograms' do
|
||||
context 'with known histograms' do
|
||||
it 'returns 201 Created' do
|
||||
post histograms_route(histograms: [
|
||||
{ name: :pipeline_graph_link_calculation_duration_seconds, value: 1 },
|
||||
{ name: :pipeline_graph_links_total, value: 10 }
|
||||
])
|
||||
post histograms_route(histograms:
|
||||
[
|
||||
{ name: :pipeline_graph_link_calculation_duration_seconds, value: 1 },
|
||||
{ name: :pipeline_graph_links_total, value: 10 }
|
||||
])
|
||||
|
||||
expect(response).to have_gitlab_http_status(:created)
|
||||
end
|
||||
|
|
|
@ -31,11 +31,12 @@ RSpec.describe Projects::IssuesController do
|
|||
notes = discussions.flat_map { |d| d['notes'] }
|
||||
|
||||
expect(discussions.count).to eq(2)
|
||||
expect(notes).to match([
|
||||
a_hash_including('id' => discussion.id.to_s),
|
||||
a_hash_including('id' => discussion_reply.id.to_s),
|
||||
a_hash_including('type' => 'StateNote')
|
||||
])
|
||||
expect(notes).to match(
|
||||
[
|
||||
a_hash_including('id' => discussion.id.to_s),
|
||||
a_hash_including('id' => discussion_reply.id.to_s),
|
||||
a_hash_including('type' => 'StateNote')
|
||||
])
|
||||
|
||||
cursor = response.header['X-Next-Page-Cursor']
|
||||
expect(cursor).to be_present
|
||||
|
@ -46,9 +47,7 @@ RSpec.describe Projects::IssuesController do
|
|||
notes = discussions.flat_map { |d| d['notes'] }
|
||||
|
||||
expect(discussions.count).to eq(1)
|
||||
expect(notes).to match([
|
||||
a_hash_including('id' => discussion_2.id.to_s)
|
||||
])
|
||||
expect(notes).to match([a_hash_including('id' => discussion_2.id.to_s)])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -29,11 +29,12 @@ RSpec.describe Projects::MergeRequestsController do
|
|||
notes = discussions.flat_map { |d| d['notes'] }
|
||||
|
||||
expect(discussions.count).to eq(2)
|
||||
expect(notes).to match([
|
||||
a_hash_including('id' => discussion.id.to_s),
|
||||
a_hash_including('id' => discussion_reply.id.to_s),
|
||||
a_hash_including('type' => 'StateNote')
|
||||
])
|
||||
expect(notes).to match(
|
||||
[
|
||||
a_hash_including('id' => discussion.id.to_s),
|
||||
a_hash_including('id' => discussion_reply.id.to_s),
|
||||
a_hash_including('type' => 'StateNote')
|
||||
])
|
||||
|
||||
cursor = response.header['X-Next-Page-Cursor']
|
||||
expect(cursor).to be_present
|
||||
|
@ -44,9 +45,7 @@ RSpec.describe Projects::MergeRequestsController do
|
|||
notes = discussions.flat_map { |d| d['notes'] }
|
||||
|
||||
expect(discussions.count).to eq(1)
|
||||
expect(notes).to match([
|
||||
a_hash_including('id' => discussion_2.id.to_s)
|
||||
])
|
||||
expect(notes).to match([a_hash_including('id' => discussion_2.id.to_s)])
|
||||
end
|
||||
|
||||
context 'when paginated_mr_discussions is disabled' do
|
||||
|
|
|
@ -38,8 +38,9 @@ RSpec.describe BuildTraceEntity do
|
|||
end
|
||||
|
||||
it 'includes the trace content in json' do
|
||||
expect(subject[:lines]).to eq([
|
||||
{ offset: 0, content: [{ text: 'the-trace' }] }
|
||||
])
|
||||
expect(subject[:lines]).to eq(
|
||||
[
|
||||
{ offset: 0, content: [{ text: 'the-trace' }] }
|
||||
])
|
||||
end
|
||||
end
|
||||
|
|
|
@ -18,22 +18,23 @@ RSpec.describe Ci::DailyBuildGroupReportResultSerializer do
|
|||
let(:json) { Gitlab::Json.parse(serializer.to_json) }
|
||||
|
||||
it 'returns an array of group results' do
|
||||
expect(json).to eq([
|
||||
{
|
||||
'group_name' => 'rspec',
|
||||
'data' => [
|
||||
{ 'date' => '2020-05-20', 'coverage' => 79.1 },
|
||||
{ 'date' => '2020-05-19', 'coverage' => 77.1 }
|
||||
]
|
||||
},
|
||||
{
|
||||
'group_name' => 'karma',
|
||||
'data' => [
|
||||
{ 'date' => '2020-05-20', 'coverage' => 90.1 },
|
||||
{ 'date' => '2020-05-19', 'coverage' => 89.1 }
|
||||
]
|
||||
}
|
||||
])
|
||||
expect(json).to eq(
|
||||
[
|
||||
{
|
||||
'group_name' => 'rspec',
|
||||
'data' => [
|
||||
{ 'date' => '2020-05-20', 'coverage' => 79.1 },
|
||||
{ 'date' => '2020-05-19', 'coverage' => 77.1 }
|
||||
]
|
||||
},
|
||||
{
|
||||
'group_name' => 'karma',
|
||||
'data' => [
|
||||
{ 'date' => '2020-05-20', 'coverage' => 90.1 },
|
||||
{ 'date' => '2020-05-19', 'coverage' => 89.1 }
|
||||
]
|
||||
}
|
||||
])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -152,17 +152,19 @@ RSpec.describe MergeRequestPollWidgetEntity do
|
|||
|
||||
describe '#builds_with_coverage' do
|
||||
it 'serializes the builds with coverage' do
|
||||
allow(resource).to receive(:head_pipeline_builds_with_coverage).and_return([
|
||||
double(name: 'rspec', coverage: 91.5),
|
||||
double(name: 'jest', coverage: 94.1)
|
||||
])
|
||||
allow(resource).to receive(:head_pipeline_builds_with_coverage).and_return(
|
||||
[
|
||||
double(name: 'rspec', coverage: 91.5),
|
||||
double(name: 'jest', coverage: 94.1)
|
||||
])
|
||||
|
||||
result = subject[:builds_with_coverage]
|
||||
|
||||
expect(result).to eq([
|
||||
{ name: 'rspec', coverage: 91.5 },
|
||||
{ name: 'jest', coverage: 94.1 }
|
||||
])
|
||||
expect(result).to eq(
|
||||
[
|
||||
{ name: 'rspec', coverage: 91.5 },
|
||||
{ name: 'jest', coverage: 94.1 }
|
||||
])
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -4,10 +4,12 @@ require 'spec_helper'
|
|||
|
||||
RSpec.describe AwardEmojis::CopyService do
|
||||
let_it_be(:from_awardable) do
|
||||
create(:issue, award_emoji: [
|
||||
build(:award_emoji, name: 'thumbsup'),
|
||||
build(:award_emoji, name: 'thumbsdown')
|
||||
])
|
||||
create(
|
||||
:issue,
|
||||
award_emoji: [
|
||||
build(:award_emoji, name: 'thumbsup'),
|
||||
build(:award_emoji, name: 'thumbsdown')
|
||||
])
|
||||
end
|
||||
|
||||
describe '#initialize' do
|
||||
|
|
|
@ -11,8 +11,8 @@ RSpec.describe BulkUpdateIntegrationService do
|
|||
|
||||
let(:excluded_attributes) do
|
||||
%w[
|
||||
id project_id group_id inherit_from_id instance template
|
||||
created_at updated_at encrypted_properties encrypted_properties_iv
|
||||
id project_id group_id inherit_from_id instance template
|
||||
created_at updated_at encrypted_properties encrypted_properties_iv
|
||||
]
|
||||
end
|
||||
|
||||
|
|
|
@ -72,10 +72,11 @@ RSpec.describe Ci::CompareTestReportsService do
|
|||
|
||||
it 'loads recent failures on limited test cases to avoid building up a huge DB query', :aggregate_failures do
|
||||
expect(comparison[:data]).to match_schema('entities/test_reports_comparer')
|
||||
expect(recent_failures_per_test_case).to eq([
|
||||
{ 'count' => 1, 'base_branch' => 'master' },
|
||||
{ 'count' => 1, 'base_branch' => 'master' }
|
||||
])
|
||||
expect(recent_failures_per_test_case).to eq(
|
||||
[
|
||||
{ 'count' => 1, 'base_branch' => 'master' },
|
||||
{ 'count' => 1, 'base_branch' => 'master' }
|
||||
])
|
||||
expect(new_failures.count).to eq(2)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -540,9 +540,10 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
|
|||
let(:compare_to) { 'invalid-branch' }
|
||||
|
||||
it 'returns an error' do
|
||||
expect(pipeline.errors.full_messages).to eq([
|
||||
'Failed to parse rule for job1: rules:changes:compare_to is not a valid ref'
|
||||
])
|
||||
expect(pipeline.errors.full_messages).to eq(
|
||||
[
|
||||
'Failed to parse rule for job1: rules:changes:compare_to is not a valid ref'
|
||||
])
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -157,20 +157,21 @@ RSpec.describe Ci::FindExposedArtifactsService do
|
|||
subject { described_class.new(project, user).for_pipeline(pipeline, limit: 2) }
|
||||
|
||||
it 'returns first 2 results' do
|
||||
expect(subject).to eq([
|
||||
{
|
||||
text: 'artifact 1',
|
||||
url: file_project_job_artifacts_path(project, job1, 'ci_artifacts.txt'),
|
||||
job_name: job1.name,
|
||||
job_path: project_job_path(project, job1)
|
||||
},
|
||||
{
|
||||
text: 'artifact 2',
|
||||
url: browse_project_job_artifacts_path(project, job2),
|
||||
job_name: job2.name,
|
||||
job_path: project_job_path(project, job2)
|
||||
}
|
||||
])
|
||||
expect(subject).to eq(
|
||||
[
|
||||
{
|
||||
text: 'artifact 1',
|
||||
url: file_project_job_artifacts_path(project, job1, 'ci_artifacts.txt'),
|
||||
job_name: job1.name,
|
||||
job_path: project_job_path(project, job1)
|
||||
},
|
||||
{
|
||||
text: 'artifact 2',
|
||||
url: browse_project_job_artifacts_path(project, job2),
|
||||
job_name: job2.name,
|
||||
job_path: project_job_path(project, job2)
|
||||
}
|
||||
])
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -199,20 +200,21 @@ RSpec.describe Ci::FindExposedArtifactsService do
|
|||
subject { described_class.new(project, user).for_pipeline(pipeline, limit: 2) }
|
||||
|
||||
it 'returns the correct path for cross-project MRs' do
|
||||
expect(subject).to eq([
|
||||
{
|
||||
text: 'file artifact',
|
||||
url: file_project_job_artifacts_path(foreign_project, job_show, 'ci_artifacts.txt'),
|
||||
job_name: job_show.name,
|
||||
job_path: project_job_path(foreign_project, job_show)
|
||||
},
|
||||
{
|
||||
text: 'directory artifact',
|
||||
url: browse_project_job_artifacts_path(foreign_project, job_browse),
|
||||
job_name: job_browse.name,
|
||||
job_path: project_job_path(foreign_project, job_browse)
|
||||
}
|
||||
])
|
||||
expect(subject).to eq(
|
||||
[
|
||||
{
|
||||
text: 'file artifact',
|
||||
url: file_project_job_artifacts_path(foreign_project, job_show, 'ci_artifacts.txt'),
|
||||
job_name: job_show.name,
|
||||
job_path: project_job_path(foreign_project, job_show)
|
||||
},
|
||||
{
|
||||
text: 'directory artifact',
|
||||
url: browse_project_job_artifacts_path(foreign_project, job_browse),
|
||||
job_name: job_browse.name,
|
||||
job_path: project_job_path(foreign_project, job_browse)
|
||||
}
|
||||
])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -88,23 +88,24 @@ RSpec.describe DesignManagement::MoveDesignsService do
|
|||
|
||||
expect(subject).to be_success
|
||||
|
||||
expect(issue.designs.ordered).to eq([
|
||||
# Existing designs which already had a relative_position set.
|
||||
# These should stay at the beginning, in the same order.
|
||||
other_design1,
|
||||
other_design2,
|
||||
expect(issue.designs.ordered).to eq(
|
||||
[
|
||||
# Existing designs which already had a relative_position set.
|
||||
# These should stay at the beginning, in the same order.
|
||||
other_design1,
|
||||
other_design2,
|
||||
|
||||
# The designs we're passing into the service.
|
||||
# These should be placed between the existing designs, in the correct order.
|
||||
previous_design,
|
||||
current_design,
|
||||
next_design,
|
||||
# The designs we're passing into the service.
|
||||
# These should be placed between the existing designs, in the correct order.
|
||||
previous_design,
|
||||
current_design,
|
||||
next_design,
|
||||
|
||||
# Existing designs which didn't have a relative_position set.
|
||||
# These should be placed at the end, in the order of their IDs.
|
||||
other_design3,
|
||||
other_design4
|
||||
])
|
||||
# Existing designs which didn't have a relative_position set.
|
||||
# These should be placed at the end, in the order of their IDs.
|
||||
other_design3,
|
||||
other_design4
|
||||
])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -104,12 +104,12 @@ RSpec.describe Git::TagHooksService, :service do
|
|||
id: commit.id,
|
||||
message: commit.safe_message,
|
||||
url: [
|
||||
Gitlab.config.gitlab.url,
|
||||
project.namespace.to_param,
|
||||
project.to_param,
|
||||
'-',
|
||||
'commit',
|
||||
commit.id
|
||||
Gitlab.config.gitlab.url,
|
||||
project.namespace.to_param,
|
||||
project.to_param,
|
||||
'-',
|
||||
'commit',
|
||||
commit.id
|
||||
].join('/')
|
||||
)
|
||||
end
|
||||
|
|
|
@ -8,17 +8,19 @@ RSpec.describe GoogleCloud::SetupCloudsqlInstanceService do
|
|||
let(:list_databases_empty) { Google::Apis::SqladminV1beta4::ListDatabasesResponse.new(items: []) }
|
||||
let(:list_users_empty) { Google::Apis::SqladminV1beta4::ListUsersResponse.new(items: []) }
|
||||
let(:list_databases) do
|
||||
Google::Apis::SqladminV1beta4::ListDatabasesResponse.new(items: [
|
||||
Google::Apis::SqladminV1beta4::Database.new(name: 'postgres'),
|
||||
Google::Apis::SqladminV1beta4::Database.new(name: 'main_db')
|
||||
])
|
||||
Google::Apis::SqladminV1beta4::ListDatabasesResponse.new(
|
||||
items: [
|
||||
Google::Apis::SqladminV1beta4::Database.new(name: 'postgres'),
|
||||
Google::Apis::SqladminV1beta4::Database.new(name: 'main_db')
|
||||
])
|
||||
end
|
||||
|
||||
let(:list_users) do
|
||||
Google::Apis::SqladminV1beta4::ListUsersResponse.new(items: [
|
||||
Google::Apis::SqladminV1beta4::User.new(name: 'postgres'),
|
||||
Google::Apis::SqladminV1beta4::User.new(name: 'main_user')
|
||||
])
|
||||
Google::Apis::SqladminV1beta4::ListUsersResponse.new(
|
||||
items: [
|
||||
Google::Apis::SqladminV1beta4::User.new(name: 'postgres'),
|
||||
Google::Apis::SqladminV1beta4::User.new(name: 'main_user')
|
||||
])
|
||||
end
|
||||
|
||||
context 'when unauthorized user triggers worker' do
|
||||
|
|
|
@ -139,10 +139,11 @@ RSpec.describe ::Import::GitlabProjects::CreateProjectService, :aggregate_failur
|
|||
expect(response.http_status).to eq(:bad_request)
|
||||
expect(response.message)
|
||||
.to eq(%{Project namespace path can contain only letters, digits, '_', '-' and '.'. Cannot start with '-', end in '.git' or end in '.atom'})
|
||||
expect(response.payload).to eq(other_errors: [
|
||||
%{Path can contain only letters, digits, '_', '-' and '.'. Cannot start with '-', end in '.git' or end in '.atom'},
|
||||
%{Path must not start or end with a special character and must not contain consecutive special characters.}
|
||||
])
|
||||
expect(response.payload).to eq(
|
||||
other_errors: [
|
||||
%{Path can contain only letters, digits, '_', '-' and '.'. Cannot start with '-', end in '.git' or end in '.atom'},
|
||||
%{Path must not start or end with a special character and must not contain consecutive special characters.}
|
||||
])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -46,10 +46,11 @@ RSpec.describe JiraConnect::SyncService do
|
|||
|
||||
context 'when a request returns an error' do
|
||||
it 'logs the response as an error' do
|
||||
expect_next(client).to store_info([
|
||||
{ 'errorMessages' => ['some error message'] },
|
||||
{ 'errorMessages' => ['x'] }
|
||||
])
|
||||
expect_next(client).to store_info(
|
||||
[
|
||||
{ 'errorMessages' => ['some error message'] },
|
||||
{ 'errorMessages' => ['x'] }
|
||||
])
|
||||
|
||||
expect_log(:error, { 'errorMessages' => ['some error message'] })
|
||||
expect_log(:error, { 'errorMessages' => ['x'] })
|
||||
|
|
|
@ -52,10 +52,11 @@ RSpec.describe MergeRequests::LinkLfsObjectsService, :sidekiq_inline do
|
|||
|
||||
it 'calls Projects::LfsPointers::LfsLinkService#execute with OIDs of LFS objects in merge request' do
|
||||
expect_next_instance_of(Projects::LfsPointers::LfsLinkService) do |service|
|
||||
expect(service).to receive(:execute).with(%w[
|
||||
8b12507783d5becacbf2ebe5b01a60024d8728a8f86dcc818bce699e8b3320bc
|
||||
94a72c074cfe574742c9e99e863322f73feff82981d065ff65a0308f44f19f62
|
||||
])
|
||||
expect(service).to receive(:execute).with(
|
||||
%w[
|
||||
8b12507783d5becacbf2ebe5b01a60024d8728a8f86dcc818bce699e8b3320bc
|
||||
94a72c074cfe574742c9e99e863322f73feff82981d065ff65a0308f44f19f62
|
||||
])
|
||||
end
|
||||
|
||||
execute
|
||||
|
|
|
@ -799,23 +799,24 @@ RSpec.describe MergeRequests::RefreshService do
|
|||
it 'does not mark as draft based on commits that do not belong to an MR' do
|
||||
allow(refresh_service).to receive(:find_new_commits)
|
||||
|
||||
refresh_service.instance_variable_set("@commits", [
|
||||
double(
|
||||
id: 'aaaaaaa',
|
||||
sha: 'aaaaaaa',
|
||||
short_id: 'aaaaaaa',
|
||||
title: 'Fix issue',
|
||||
draft?: false
|
||||
),
|
||||
double(
|
||||
id: 'bbbbbbb',
|
||||
sha: 'bbbbbbbb',
|
||||
short_id: 'bbbbbbb',
|
||||
title: 'fixup! Fix issue',
|
||||
draft?: true,
|
||||
to_reference: 'bbbbbbb'
|
||||
)
|
||||
])
|
||||
refresh_service.instance_variable_set("@commits",
|
||||
[
|
||||
double(
|
||||
id: 'aaaaaaa',
|
||||
sha: 'aaaaaaa',
|
||||
short_id: 'aaaaaaa',
|
||||
title: 'Fix issue',
|
||||
draft?: false
|
||||
),
|
||||
double(
|
||||
id: 'bbbbbbb',
|
||||
sha: 'bbbbbbbb',
|
||||
short_id: 'bbbbbbb',
|
||||
title: 'fixup! Fix issue',
|
||||
draft?: true,
|
||||
to_reference: 'bbbbbbb'
|
||||
)
|
||||
])
|
||||
|
||||
refresh_service.execute(@oldrev, @newrev, 'refs/heads/master')
|
||||
reload_mrs
|
||||
|
|
|
@ -67,10 +67,11 @@ RSpec.describe Repositories::ChangelogService do
|
|||
allow(MergeRequestDiffCommit)
|
||||
.to receive(:oldest_merge_request_id_per_commit)
|
||||
.with(project.id, [commit2.id, commit1.id])
|
||||
.and_return([
|
||||
{ sha: sha2, merge_request_id: mr1.id },
|
||||
{ sha: sha3, merge_request_id: mr2.id }
|
||||
])
|
||||
.and_return(
|
||||
[
|
||||
{ sha: sha2, merge_request_id: mr1.id },
|
||||
{ sha: sha3, merge_request_id: mr2.id }
|
||||
])
|
||||
|
||||
service = described_class
|
||||
.new(project, creator, version: '1.0.0', from: sha1, to: sha3)
|
||||
|
@ -135,10 +136,11 @@ RSpec.describe Repositories::ChangelogService do
|
|||
allow(MergeRequestDiffCommit)
|
||||
.to receive(:oldest_merge_request_id_per_commit)
|
||||
.with(project.id, [commit2.id, commit1.id])
|
||||
.and_return([
|
||||
{ sha: sha2, merge_request_id: mr1.id },
|
||||
{ sha: sha3, merge_request_id: mr2.id }
|
||||
])
|
||||
.and_return(
|
||||
[
|
||||
{ sha: sha2, merge_request_id: mr1.id },
|
||||
{ sha: sha3, merge_request_id: mr2.id }
|
||||
])
|
||||
|
||||
service = described_class
|
||||
.new(project, creator, version: '1.0.0', from: sha1, to: sha3)
|
||||
|
|
|
@ -19,10 +19,11 @@ RSpec.describe ResourceEvents::SyntheticMilestoneNotesBuilderService do
|
|||
notes = described_class.new(issue, user).execute
|
||||
|
||||
expect(notes.map(&:created_at)).to eq(events.map(&:created_at))
|
||||
expect(notes.map(&:note)).to eq([
|
||||
"changed milestone to %#{milestone.iid}",
|
||||
'removed milestone'
|
||||
])
|
||||
expect(notes.map(&:note)).to eq(
|
||||
[
|
||||
"changed milestone to %#{milestone.iid}",
|
||||
'removed milestone'
|
||||
])
|
||||
end
|
||||
|
||||
it_behaves_like 'filters by paginated notes', :resource_milestone_event
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module MigrationHelpers
|
||||
module VulnerabilitiesHelper
|
||||
# rubocop:disable Metrics/ParameterLists
|
||||
def create_finding!(
|
||||
vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:,
|
||||
name: "test", severity: 7, confidence: 7, report_type: 0,
|
||||
project_fingerprint: '123qweasdzxc', location_fingerprint: 'test',
|
||||
metadata_version: 'test', raw_metadata: 'test', uuid: 'b1cee17e-3d7a-11ed-b878-0242ac120002')
|
||||
table(:vulnerability_occurrences).create!(
|
||||
vulnerability_id: vulnerability_id,
|
||||
project_id: project_id,
|
||||
name: name,
|
||||
severity: severity,
|
||||
confidence: confidence,
|
||||
report_type: report_type,
|
||||
project_fingerprint: project_fingerprint,
|
||||
scanner_id: scanner_id,
|
||||
primary_identifier_id: primary_identifier_id,
|
||||
location_fingerprint: location_fingerprint,
|
||||
metadata_version: metadata_version,
|
||||
raw_metadata: raw_metadata,
|
||||
uuid: uuid
|
||||
)
|
||||
end
|
||||
# rubocop:enable Metrics/ParameterLists
|
||||
|
||||
def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
|
||||
table(:vulnerabilities).create!(
|
||||
project_id: project_id,
|
||||
author_id: author_id,
|
||||
title: title,
|
||||
severity: severity,
|
||||
confidence: confidence,
|
||||
report_type: report_type
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
Loading…
Reference in a new issue