Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-03-27 09:08:28 +00:00
parent d2b64c37bd
commit 6ac4a6713e
33 changed files with 302 additions and 37 deletions

View file

@ -16,6 +16,8 @@ stages:
default:
tags:
- gitlab-org
# All jobs are interruptible by default
interruptible: true
workflow:
rules:

View file

@ -16,6 +16,9 @@
.if-master-refs: &if-master-refs
if: '$CI_COMMIT_REF_NAME == "master"'
.if-auto-deploy-branches: &if-auto-deploy-branches
if: '$CI_COMMIT_BRANCH =~ /^\d+-\d+-auto-deploy-\d+$/'
.if-master-or-tag: &if-master-or-tag
if: '$CI_COMMIT_REF_NAME == "master" || $CI_COMMIT_TAG'
@ -509,6 +512,14 @@
changes: *code-backstage-qa-patterns
when: on_success
.setup:rules:dont-interrupt-me:
rules:
- <<: *if-master-or-tag
when: on_success
- <<: *if-auto-deploy-branches
when: on_success
- when: manual
.setup:rules:gitlab_git_test:
rules:
- <<: *if-default-refs

View file

@ -23,6 +23,18 @@ cache gems:
- .default-retry
needs: []
dont-interrupt-me:
extends: .setup:rules:dont-interrupt-me
stage: prepare
image: alpine:edge
interruptible: false
allow_failure: true
variables:
GIT_STRATEGY: none
dependencies: []
script:
- echo "This jobs makes sure this pipeline won't be interrupted! See https://docs.gitlab.com/ee/ci/yaml/#interruptible."
gitlab_git_test:
extends:
- .minimal-job

View file

@ -46,7 +46,7 @@ class KeysFinder
return keys unless params[:fingerprint].present?
raise InvalidFingerprint unless valid_fingerprint_param?
keys.where(fingerprint_query).first # rubocop: disable CodeReuse/ActiveRecord
keys.find_by(fingerprint_query) # rubocop: disable CodeReuse/ActiveRecord
end
def valid_fingerprint_param?

View file

@ -74,10 +74,12 @@ class NotificationRecipient
end
def unsubscribed?
return false unless @target
return false unless @target.respond_to?(:subscriptions)
subscribable_target = @target.is_a?(Note) ? @target.noteable : @target
subscription = @target.subscriptions.find { |subscription| subscription.user_id == @user.id }
return false unless subscribable_target
return false unless subscribable_target.respond_to?(:subscriptions)
subscription = subscribable_target.subscriptions.find { |subscription| subscription.user_id == @user.id }
subscription && !subscription.subscribed
end

View file

@ -19,7 +19,7 @@ class NotePolicy < BasePolicy
condition(:confidential, scope: :subject) { @subject.confidential? }
condition(:can_read_confidential) do
access_level >= Gitlab::Access::REPORTER || @subject.noteable_assignee_or_author?(@user)
access_level >= Gitlab::Access::REPORTER || @subject.noteable_assignee_or_author?(@user) || admin?
end
rule { ~editable }.prevent :admin_note

View file

@ -23,6 +23,11 @@ module NotificationRecipients
raise 'abstract'
end
# override if needed
def recipients_target
target
end
def project
target.project
end
@ -59,7 +64,7 @@ module NotificationRecipients
project: project,
group: group,
custom_action: custom_action,
target: target,
target: recipients_target,
acting_user: acting_user
)
end

View file

@ -12,6 +12,10 @@ module NotificationRecipients
note.noteable
end
def recipients_target
note
end
# NOTE: may be nil, in the case of a PersonalSnippet
#
# (this is okay because NotificationRecipient is written

View file

@ -0,0 +1,5 @@
---
title: Complete the migration of Job Artifact to Security Scan
merge_request: 24244
author:
type: other

View file

@ -6,14 +6,28 @@ review from the Data team and Telemetry team is recommended.
@gitlab-org/growth/telemetry group is mentioned in order to notify team members.
MSG
USAGE_DATA_FILES_MESSAGE = <<~MSG
For the following files, a review from the [Data team and Telemetry team](https://gitlab.com/groups/gitlab-org/growth/telemetry/-/group_members?with_inherited_permissions=exclude) is recommended:
MSG
usage_data_changed_files = git.modified_files.grep(%r{usage_data})
def has_label?(label)
gitlab.mr_labels.include?(label)
end
def labels_for_merge_request(labels)
labels_list = labels.map { |label| %Q{~"#{label}"} }.join(' ')
"/label #{labels_list}"
end
if usage_data_changed_files.any?
warn format(TELEMETRY_CHANGED_FILES_MESSAGE)
USAGE_DATA_FILES_MESSAGE = <<~MSG
For the following files, a review from the [Data team and Telemetry team](https://gitlab.com/groups/gitlab-org/growth/telemetry/-/group_members?with_inherited_permissions=exclude) is recommended:
MSG
markdown(USAGE_DATA_FILES_MESSAGE + helper.markdown_list(usage_data_changed_files))
telemetry_labels = ['telemetry']
telemetry_labels << 'telemetry::review pending' unless has_label?('telemetry::reviewed')
markdown(labels_for_merge_request(telemetry_labels))
end

View file

@ -0,0 +1,13 @@
# frozen_string_literal: true
class CompleteMigrateSecurityScans < ActiveRecord::Migration[6.0]
disable_ddl_transaction!
def up
Gitlab::BackgroundMigration.steal('MigrateSecurityScans')
end
def down
# intentionally blank
end
end

View file

@ -0,0 +1,24 @@
# frozen_string_literal: true
class RemoveIndexUsedForScanMigration < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
INDEX_NAME = 'job_artifacts_secure_reports_temp_index'
COLUMNS = [:id, :file_type, :job_id, :created_at, :updated_at]
disable_ddl_transaction!
def up
if index_exists?(:ci_job_artifacts, COLUMNS, name: INDEX_NAME)
remove_concurrent_index(:ci_job_artifacts, COLUMNS, name: INDEX_NAME)
end
end
def down
add_concurrent_index(:ci_job_artifacts,
COLUMNS,
name: INDEX_NAME,
where: 'file_type BETWEEN 5 AND 8')
end
end

View file

@ -10214,8 +10214,6 @@ CREATE UNIQUE INDEX issue_user_mentions_on_issue_id_and_note_id_index ON public.
CREATE UNIQUE INDEX issue_user_mentions_on_issue_id_index ON public.issue_user_mentions USING btree (issue_id) WHERE (note_id IS NULL);
CREATE INDEX job_artifacts_secure_reports_temp_index ON public.ci_job_artifacts USING btree (id, file_type, job_id, created_at, updated_at) WHERE ((file_type >= 5) AND (file_type <= 8));
CREATE UNIQUE INDEX kubernetes_namespaces_cluster_and_namespace ON public.clusters_kubernetes_namespaces USING btree (cluster_id, namespace);
CREATE INDEX merge_request_mentions_temp_index ON public.merge_requests USING btree (id) WHERE ((description ~~ '%@%'::text) OR ((title)::text ~~ '%@%'::text));
@ -12800,6 +12798,8 @@ COPY "schema_migrations" (version) FROM STDIN;
20200319203901
20200320112455
20200320123839
20200323011225
20200323011955
20200323071918
20200323074147
20200323075043

View file

@ -6401,6 +6401,26 @@ type Project {
Returns the last _n_ elements from the list.
"""
last: Int
"""
Filter vulnerabilities by project
"""
projectId: [ID!]
"""
Filter vulnerabilities by report type
"""
reportType: [VulnerabilityReportType!]
"""
Filter vulnerabilities by severity
"""
severity: [VulnerabilitySeverity!]
"""
Filter vulnerabilities by state
"""
state: [VulnerabilityState!]
): VulnerabilityConnection
"""

View file

@ -19018,6 +19018,78 @@
"name": "vulnerabilities",
"description": "Vulnerabilities reported on the project. Available only when feature flag `first_class_vulnerabilities` is enabled",
"args": [
{
"name": "projectId",
"description": "Filter vulnerabilities by project",
"type": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "ID",
"ofType": null
}
}
},
"defaultValue": null
},
{
"name": "reportType",
"description": "Filter vulnerabilities by report type",
"type": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "ENUM",
"name": "VulnerabilityReportType",
"ofType": null
}
}
},
"defaultValue": null
},
{
"name": "severity",
"description": "Filter vulnerabilities by severity",
"type": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "ENUM",
"name": "VulnerabilitySeverity",
"ofType": null
}
}
},
"defaultValue": null
},
{
"name": "state",
"description": "Filter vulnerabilities by state",
"type": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "ENUM",
"name": "VulnerabilityState",
"ofType": null
}
}
},
"defaultValue": null
},
{
"name": "after",
"description": "Returns the elements in the list that come after the specified cursor.",

View file

@ -142,6 +142,15 @@ and included in `rules` definitions via [YAML anchors](../ci/yaml/README.md#anch
| `code-qa-patterns` | Combination of `code-patterns` and `qa-patterns`. |
| `code-backstage-qa-patterns` | Combination of `code-patterns`, `backstage-patterns`, and `qa-patterns`. |
## Interruptible jobs pipelines
By default, all jobs are [interruptible](../ci/yaml/README.md#interruptible), except the
`dont-interrupt-me` job which runs automatically on `master`, and is `manual`
otherwise.
If you want a running pipeline to finish even if you push new commits to a merge
request, be sure to start the `dont-interrupt-me` job before pushing.
## Directed acyclic graph
We're using the [`needs:`](../ci/yaml/README.md#needs) keyword to

View file

@ -45,7 +45,7 @@ module Gitlab
reverts_for_type('namespace') do |path_before_rename, current_path|
matches_path = MigrationClasses::Route.arel_table[:path].matches(current_path)
namespace = MigrationClasses::Namespace.joins(:route)
.where(matches_path).first&.becomes(MigrationClasses::Namespace)
.find_by(matches_path)&.becomes(MigrationClasses::Namespace)
if namespace
perform_rename(namespace, current_path, path_before_rename)

View file

@ -37,7 +37,7 @@ module Gitlab
reverts_for_type('project') do |path_before_rename, current_path|
matches_path = MigrationClasses::Route.arel_table[:path].matches(current_path)
project = MigrationClasses::Project.joins(:route)
.where(matches_path).first
.find_by(matches_path)
if project
perform_rename(project, current_path, path_before_rename)

View file

@ -62,7 +62,7 @@ module Gitlab
end
def find_object
klass.where(where_clause).first
klass.find_by(where_clause)
end
def where_clause

View file

@ -19,7 +19,7 @@ module Gitlab
@exported_members.inject(missing_keys_tracking_hash) do |hash, member|
if member['user']
old_user_id = member['user']['id']
existing_user = User.where(find_user_query(member)).first
existing_user = User.find_by(find_user_query(member))
hash[old_user_id] = existing_user.id if existing_user && add_team_member(member, existing_user)
else
add_team_member(member)

View file

@ -9144,6 +9144,9 @@ msgstr ""
msgid "Geo Nodes"
msgstr ""
msgid "Geo Nodes|Cannot remove a primary node if there is a secondary node"
msgstr ""
msgid "Geo Settings"
msgstr ""

View file

@ -67,7 +67,7 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, schema: 201802081
it 'does not add hashed files to the untracked_files_for_uploads table' do
described_class.new.perform
hashed_file_path = get_uploads(project2, 'Project').where(uploader: 'FileUploader').first.path
hashed_file_path = get_uploads(project2, 'Project').find_by(uploader: 'FileUploader').path
expect(untracked_files_for_uploads.where("path like '%#{hashed_file_path}%'").exists?).to be_falsey
end

View file

@ -123,7 +123,7 @@ describe Gitlab::ImportExport::Project::TreeRestorer do
end
it 'preserves updated_at on issues' do
issue = Issue.where(description: 'Aliquam enim illo et possimus.').first
issue = Issue.find_by(description: 'Aliquam enim illo et possimus.')
expect(issue.reload.updated_at.to_s).to eq('2016-06-14 15:02:47 UTC')
end
@ -170,7 +170,7 @@ describe Gitlab::ImportExport::Project::TreeRestorer do
end
context 'event at forth level of the tree' do
let(:event) { Event.where(action: 6).first }
let(:event) { Event.find_by(action: 6) }
it 'restores the event' do
expect(event).not_to be_nil
@ -440,7 +440,7 @@ describe Gitlab::ImportExport::Project::TreeRestorer do
end
it 'restores external pull request for the restored pipeline' do
pipeline_with_external_pr = @project.ci_pipelines.where(source: 'external_pull_request_event').first
pipeline_with_external_pr = @project.ci_pipelines.find_by(source: 'external_pull_request_event')
expect(pipeline_with_external_pr.external_pull_request).to be_persisted
end

View file

@ -26,7 +26,7 @@ describe GenerateMissingRoutes do
described_class.new.up
route = routes.where(source_type: 'Project').take
route = routes.find_by(source_type: 'Project')
expect(route.source_id).to eq(project.id)
expect(route.path).to eq("gitlab/gitlab-ce-#{project.id}")
@ -37,7 +37,7 @@ describe GenerateMissingRoutes do
described_class.new.up
route = routes.where(source_type: 'Namespace').take
route = routes.find_by(source_type: 'Namespace')
expect(route.source_id).to eq(namespace.id)
expect(route.path).to eq("gitlab-#{namespace.id}")

View file

@ -89,11 +89,11 @@ describe MigrateAutoDevOpsDomainToClusterDomain do
end
def find_cluster_project(project_id)
cluster_projects_table.where(project_id: project_id).first
cluster_projects_table.find_by(project_id: project_id)
end
def find_cluster(cluster_id)
clusters_table.where(id: cluster_id).first
clusters_table.find_by(id: cluster_id)
end
def project_auto_devops_with_domain

View file

@ -18,16 +18,16 @@ describe NullifyUsersRole do
it 'nullifies the role of the user with updated_at < 2019-11-05 12:08:00 and a role of 0' do
expect(users.where(role: nil).count).to eq(1)
expect(users.where(role: nil).first.email).to eq('1')
expect(users.find_by(role: nil).email).to eq('1')
end
it 'leaves the user with role of 1' do
expect(users.where(role: 1).count).to eq(1)
expect(users.where(role: 1).first.email).to eq('2')
expect(users.find_by(role: 1).email).to eq('2')
end
it 'leaves the user with updated_at > 2019-11-05 12:08:00' do
expect(users.where(role: 0).count).to eq(1)
expect(users.where(role: 0).first.email).to eq('3')
expect(users.find_by(role: 0).email).to eq('3')
end
end

View file

@ -39,9 +39,9 @@ describe ScheduleToArchiveLegacyTraces do
expect(File.exist?(legacy_trace_path(@build_failed))).to be_falsy
expect(File.exist?(legacy_trace_path(@builds_canceled))).to be_falsy
expect(File.exist?(legacy_trace_path(@build_running))).to be_truthy
expect(File.exist?(archived_trace_path(job_artifacts.where(job_id: @build_success.id).first))).to be_truthy
expect(File.exist?(archived_trace_path(job_artifacts.where(job_id: @build_failed.id).first))).to be_truthy
expect(File.exist?(archived_trace_path(job_artifacts.where(job_id: @builds_canceled.id).first))).to be_truthy
expect(File.exist?(archived_trace_path(job_artifacts.find_by(job_id: @build_success.id)))).to be_truthy
expect(File.exist?(archived_trace_path(job_artifacts.find_by(job_id: @build_failed.id)))).to be_truthy
expect(File.exist?(archived_trace_path(job_artifacts.find_by(job_id: @builds_canceled.id)))).to be_truthy
expect(job_artifacts.where(job_id: @build_running.id)).not_to be_exist
end
end

View file

@ -263,6 +263,7 @@ describe NotePolicy do
let(:non_member) { create(:user) }
let(:author) { create(:user) }
let(:assignee) { create(:user) }
let(:admin) { create(:admin) }
before do
project.add_reporter(reporter)
@ -294,6 +295,10 @@ describe NotePolicy do
expect(permissions(maintainer, confidential_note)).to be_allowed(:read_note, :admin_note, :resolve_note, :award_emoji)
end
it 'allows admins to read all notes and admin them' do
expect(permissions(admin, confidential_note)).to be_allowed(:read_note, :admin_note, :resolve_note, :award_emoji)
end
it 'allows noteable author to read and resolve all notes' do
expect(permissions(author, confidential_note)).to be_allowed(:read_note, :resolve_note, :award_emoji)
expect(permissions(author, confidential_note)).to be_disallowed(:admin_note)

View file

@ -23,9 +23,9 @@ describe API::Jobs do
json_job['artifacts'].each do |artifact|
expect(artifact).not_to be_nil
file_type = Ci::JobArtifact.file_types[artifact['file_type']]
expect(artifact['size']).to eq(second_job.job_artifacts.where(file_type: file_type).first.size)
expect(artifact['filename']).to eq(second_job.job_artifacts.where(file_type: file_type).first.filename)
expect(artifact['file_format']).to eq(second_job.job_artifacts.where(file_type: file_type).first.file_format)
expect(artifact['size']).to eq(second_job.job_artifacts.find_by(file_type: file_type).size)
expect(artifact['filename']).to eq(second_job.job_artifacts.find_by(file_type: file_type).filename)
expect(artifact['file_format']).to eq(second_job.job_artifacts.find_by(file_type: file_type).file_format)
end
end
end

View file

@ -16,7 +16,7 @@ describe Emails::CreateService do
it 'creates an email with additional attributes' do
expect { service.execute(confirmation_token: 'abc') }.to change { Email.count }.by(1)
expect(Email.where(opts).first.confirmation_token).to eq 'abc'
expect(Email.find_by(opts).confirmation_token).to eq 'abc'
end
it 'has the right user association' do

View file

@ -100,7 +100,7 @@ describe Issues::MoveService do
context 'when issue has notes with mentions' do
it 'saves user mentions with actual mentions for new issue' do
expect(new_issue.user_mentions.where(note_id: nil).first.mentioned_users_ids).to match_array([user.id])
expect(new_issue.user_mentions.find_by(note_id: nil).mentioned_users_ids).to match_array([user.id])
expect(new_issue.user_mentions.where.not(note_id: nil).first.mentioned_users_ids).to match_array([user.id])
expect(new_issue.user_mentions.where.not(note_id: nil).count).to eq 1
expect(new_issue.user_mentions.count).to eq 2

View file

@ -0,0 +1,64 @@
# frozen_string_literal: true
require 'spec_helper'
describe NotificationRecipients::Builder::NewNote do
describe '#notification_recipients' do
let_it_be(:group) { create(:group, :public) }
let_it_be(:project) { create(:project, :public, group: group) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:other_user) { create(:user) }
let_it_be(:participant) { create(:user) }
let_it_be(:non_member_participant) { create(:user) }
let_it_be(:group_watcher) { create(:user) }
let_it_be(:project_watcher) { create(:user) }
let_it_be(:guest_project_watcher) { create(:user) }
let_it_be(:subscriber) { create(:user) }
let_it_be(:unsubscribed_user) { create(:user) }
let_it_be(:non_member_subscriber) { create(:user) }
let_it_be(:notification_setting_project_w) { create(:notification_setting, source: project, user: project_watcher, level: 2) }
let_it_be(:notification_setting_guest_w) { create(:notification_setting, source: project, user: guest_project_watcher, level: 2) }
let_it_be(:notification_setting_group_w) { create(:notification_setting, source: group, user: group_watcher, level: 2) }
let_it_be(:subscriptions) do
[
create(:subscription, project: project, user: subscriber, subscribable: issue, subscribed: true),
create(:subscription, project: project, user: unsubscribed_user, subscribable: issue, subscribed: false),
create(:subscription, project: project, user: non_member_subscriber, subscribable: issue, subscribed: true)
]
end
subject { described_class.new(note) }
before do
project.add_developer(participant)
project.add_developer(project_watcher)
project.add_guest(guest_project_watcher)
project.add_developer(subscriber)
group.add_developer(group_watcher)
expect(issue).to receive(:participants).and_return([participant, non_member_participant])
end
context 'for public notes' do
let_it_be(:note) { create(:note, noteable: issue, project: project) }
it 'adds all participants, watchers and subscribers' do
expect(subject.notification_recipients.map(&:user)).to contain_exactly(
participant, non_member_participant, project_watcher, group_watcher, guest_project_watcher, subscriber, non_member_subscriber
)
end
end
context 'for confidential notes' do
let_it_be(:note) { create(:note, :confidential, noteable: issue, project: project) }
it 'adds all participants, watchers and subscribers that are project memebrs' do
expect(subject.notification_recipients.map(&:user)).to contain_exactly(
participant, project_watcher, group_watcher, subscriber
)
end
end
end
end

View file

@ -321,9 +321,9 @@ describe Projects::ForkService do
Projects::UpdateRepositoryStorageService.new(project).execute('test_second_storage')
fork_after_move = fork_project(project)
pool_repository_before_move = PoolRepository.joins(:shard)
.where(source_project: project, shards: { name: 'default' }).first
.find_by(source_project: project, shards: { name: 'default' })
pool_repository_after_move = PoolRepository.joins(:shard)
.where(source_project: project, shards: { name: 'test_second_storage' }).first
.find_by(source_project: project, shards: { name: 'test_second_storage' })
expect(fork_before_move.pool_repository).to eq(pool_repository_before_move)
expect(fork_after_move.pool_repository).to eq(pool_repository_after_move)