Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-10-11 00:10:39 +00:00
parent b520023279
commit 67d19cc004
18 changed files with 228 additions and 76 deletions

View file

@ -0,0 +1,21 @@
# frozen_string_literal: true
module Mutations
module Ci
module PipelineSchedule
class Base < BaseMutation
PipelineScheduleID = ::Types::GlobalIDType[::Ci::PipelineSchedule]
argument :id, PipelineScheduleID,
required: true,
description: 'ID of the pipeline schedule to mutate.'
private
def find_object(id:)
GlobalID::Locator.locate(id)
end
end
end
end
end

View file

@ -0,0 +1,27 @@
# frozen_string_literal: true
module Mutations
module Ci
module PipelineSchedule
class Delete < Base
graphql_name 'PipelineScheduleDelete'
authorize :admin_pipeline_schedule
def resolve(id:)
schedule = authorized_find!(id: id)
if schedule.destroy
{
errors: []
}
else
{
errors: ['Failed to remove the pipeline schedule']
}
end
end
end
end
end
end

View file

@ -114,6 +114,7 @@ module Types
mount_mutation Mutations::Ci::Pipeline::Cancel
mount_mutation Mutations::Ci::Pipeline::Destroy
mount_mutation Mutations::Ci::Pipeline::Retry
mount_mutation Mutations::Ci::PipelineSchedule::Delete
mount_mutation Mutations::Ci::CiCdSettingsUpdate, deprecated: {
reason: :renamed,
replacement: 'ProjectCiCdSettingsUpdate',

View file

@ -1738,7 +1738,7 @@ class User < ApplicationRecord
end
def authorized_project_mirrors(level)
projects = Ci::ProjectMirror.by_project_id(ci_project_mirrors_for_project_members(level))
projects = Ci::ProjectMirror.by_project_id(ci_project_ids_for_project_members(level))
namespace_projects = Ci::ProjectMirror.by_namespace_id(ci_namespace_mirrors_for_group_members(level).select(:namespace_id))
@ -2210,7 +2210,7 @@ class User < ApplicationRecord
end
# rubocop: enable CodeReuse/ServiceClass
def ci_project_mirrors_for_project_members(level)
def ci_project_ids_for_project_members(level)
project_members.where('access_level >= ?', level).pluck(:source_id)
end
@ -2364,7 +2364,7 @@ class User < ApplicationRecord
end
def ci_owned_project_runners_from_project_members
project_ids = ci_project_mirrors_for_project_members(Gitlab::Access::MAINTAINER)
project_ids = ci_project_ids_for_project_members(Gitlab::Access::MAINTAINER)
Ci::Runner
.joins(:runner_projects)

View file

@ -26,7 +26,6 @@ module Gitlab
RefreshImportJidWorker.perform_in_the_future(project.id, jid)
info(project.id, message: "starting importer", importer: 'Importer::RepositoryImporter')
importer = Importer::RepositoryImporter.new(project, client)
importer.execute

View file

@ -18,9 +18,13 @@ class MergeRequests::DeleteSourceBranchWorker
# Source branch changed while it's being removed
return if merge_request.source_branch_sha != source_branch_sha
::Branches::DeleteService.new(merge_request.source_project, user)
delete_service_result = ::Branches::DeleteService.new(merge_request.source_project, user)
.execute(merge_request.source_branch)
if Feature.enabled?(:track_delete_source_errors, merge_request.source_project)
delete_service_result.track_exception if delete_service_result&.error?
end
::MergeRequests::RetargetChainService.new(project: merge_request.source_project, current_user: user)
.execute(merge_request)
rescue ActiveRecord::RecordNotFound

View file

@ -0,0 +1,8 @@
---
name: track_delete_source_errors
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/99028
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/377258
milestone: '15.5'
type: development
group: group::code review
default_enabled: false

View file

@ -4158,6 +4158,24 @@ Input type: `PipelineRetryInput`
| <a id="mutationpipelineretryerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
| <a id="mutationpipelineretrypipeline"></a>`pipeline` | [`Pipeline`](#pipeline) | Pipeline after mutation. |
### `Mutation.pipelineScheduleDelete`
Input type: `PipelineScheduleDeleteInput`
#### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mutationpipelinescheduledeleteclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationpipelinescheduledeleteid"></a>`id` | [`CiPipelineScheduleID!`](#cipipelinescheduleid) | ID of the pipeline schedule to mutate. |
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mutationpipelinescheduledeleteclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationpipelinescheduledeleteerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
### `Mutation.projectCiCdSettingsUpdate`
Input type: `ProjectCiCdSettingsUpdateInput`
@ -21810,6 +21828,12 @@ A `CiPipelineID` is a global ID. It is encoded as a string.
An example `CiPipelineID` is: `"gid://gitlab/Ci::Pipeline/1"`.
### `CiPipelineScheduleID`
A `CiPipelineScheduleID` is a global ID. It is encoded as a string.
An example `CiPipelineScheduleID` is: `"gid://gitlab/Ci::PipelineSchedule/1"`.
### `CiRunnerID`
A `CiRunnerID` is a global ID. It is encoded as a string.

View file

@ -158,6 +158,10 @@ You can sort members by **Account**, **Access granted**, **Max role**, or **Last
You can give a user access to all projects in a group.
Prerequisite:
- You must have the Owner role.
1. On the top bar, select **Main menu > Groups** and find your group.
1. On the left sidebar, select **Group information > Members**.
1. Select **Invite members**.

View file

@ -60,10 +60,6 @@ module Gitlab
work_item_type_id: issue.work_item_type_id
}
Issue.with_project_iid_supply(project) do |supply|
attributes[:iid] = supply.next_value
end
insert_and_return_id(attributes, project.issues)
rescue ActiveRecord::InvalidForeignKey
# It's possible the project has been deleted since scheduling this

View file

@ -48,7 +48,7 @@ module Gitlab
end
def schedule_issue_import_workers(issues)
next_iid = project.issues.maximum(:iid).to_i + 1
next_iid = Issue.with_project_iid_supply(project, &:next_value)
issues.each do |jira_issue|
# Technically it's possible that the same work is performed multiple
@ -70,7 +70,8 @@ module Gitlab
Gitlab::JiraImport::ImportIssueWorker.perform_async(project.id, jira_issue.id, issue_attrs, job_waiter.key)
job_waiter.jobs_remaining += 1
next_iid += 1
next_iid = Issue.with_project_iid_supply(project, &:next_value)
# Mark the issue as imported immediately so we don't end up
# importing it multiple times within same import.

View file

@ -19,7 +19,6 @@ module Gitlab
ALLOWED_AGGREGATIONS = %i(daily weekly).freeze
CATEGORIES_FOR_TOTALS = %w[
analytics
compliance
error_tracking
ide_edit
@ -27,6 +26,7 @@ module Gitlab
].freeze
CATEGORIES_COLLECTED_FROM_METRICS_DEFINITIONS = %w[
analytics
ci_users
deploy_token_packages
code_review

View file

@ -10,54 +10,18 @@
category: analytics
redis_slot: analytics
aggregation: weekly
- name: p_analytics_merge_request
category: analytics
redis_slot: analytics
aggregation: weekly
- name: i_analytics_instance_statistics
category: analytics
redis_slot: analytics
aggregation: weekly
- name: g_analytics_contribution
category: analytics
redis_slot: analytics
aggregation: weekly
- name: g_analytics_insights
category: analytics
redis_slot: analytics
aggregation: weekly
- name: g_analytics_issues
category: analytics
redis_slot: analytics
aggregation: weekly
- name: g_analytics_productivity
category: analytics
redis_slot: analytics
aggregation: weekly
- name: g_analytics_valuestream
category: analytics
redis_slot: analytics
aggregation: weekly
- name: p_analytics_pipelines
category: analytics
redis_slot: analytics
aggregation: weekly
- name: p_analytics_code_reviews
category: analytics
redis_slot: analytics
aggregation: weekly
- name: p_analytics_valuestream
category: analytics
redis_slot: analytics
aggregation: weekly
- name: p_analytics_insights
category: analytics
redis_slot: analytics
aggregation: weekly
- name: p_analytics_issues
category: analytics
redis_slot: analytics
aggregation: weekly
- name: p_analytics_repo
category: analytics
redis_slot: analytics
@ -86,23 +50,3 @@
category: analytics
redis_slot: analytics
aggregation: weekly
- name: g_analytics_ci_cd_release_statistics
category: analytics
redis_slot: analytics
aggregation: weekly
- name: g_analytics_ci_cd_deployment_frequency
category: analytics
redis_slot: analytics
aggregation: weekly
- name: g_analytics_ci_cd_lead_time
category: analytics
redis_slot: analytics
aggregation: weekly
- name: g_analytics_ci_cd_time_to_restore_service
category: analytics
redis_slot: analytics
aggregation: weekly
- name: g_analytics_ci_cd_change_failure_rate
category: analytics
redis_slot: analytics
aggregation: weekly

View file

@ -20,7 +20,7 @@ require_relative 'shared'
# Factorybot factory methods to create persisted model objects with stable
# and consistent data values, to ensure consistent example snapshot HTML
# across various machines and environments. RSpec also makes it easy to invoke
# the API # and obtain the response.
# the API and obtain the response.
#
# It is intended to be invoked as a helper subprocess from the `update_example_snapshots.rb`
# script class. It's not intended to be run or used directly. This usage is also reinforced
@ -32,7 +32,7 @@ RSpec.describe 'Render Static HTML', :api, type: :request do # rubocop:disable R
# noinspection RailsParamDefResolve (RubyMine can't find the shared context from this file location)
include_context 'with GLFM example snapshot fixtures'
it 'can create a project dependency graph using factories' do
it do
markdown_hash = YAML.safe_load(File.open(ENV.fetch('INPUT_MARKDOWN_YML_PATH')), symbolize_names: true)
metadata_hash = YAML.safe_load(File.open(ENV.fetch('INPUT_METADATA_YML_PATH')), symbolize_names: true) || {}

View file

@ -141,7 +141,7 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueImporter, :clean_gitlab_redi
.to receive(:insert_and_return_id)
.with(
{
iid: 1,
iid: 42,
title: 'My Issue',
author_id: user.id,
project_id: project.id,
@ -172,7 +172,7 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueImporter, :clean_gitlab_redi
.to receive(:insert_and_return_id)
.with(
{
iid: 1,
iid: 42,
title: 'My Issue',
author_id: project.creator_id,
project_id: project.id,

View file

@ -44,7 +44,7 @@ RSpec.describe Gitlab::JiraImport::IssuesImporter do
def mock_issue_serializer(count, raise_exception_on_even_mocks: false)
serializer = instance_double(Gitlab::JiraImport::IssueSerializer, execute: { key: 'data' })
next_iid = project.issues.maximum(:iid).to_i
allow(Issue).to receive(:with_project_iid_supply).and_return('issue_iid')
count.times do |i|
if raise_exception_on_even_mocks && i.even?
@ -53,16 +53,15 @@ RSpec.describe Gitlab::JiraImport::IssuesImporter do
jira_issues[i],
current_user.id,
default_issue_type_id,
{ iid: next_iid + 1 }
{ iid: 'issue_iid' }
).and_raise('Some error')
else
next_iid += 1
expect(Gitlab::JiraImport::IssueSerializer).to receive(:new).with(
project,
jira_issues[i],
current_user.id,
default_issue_type_id,
{ iid: next_iid }
{ iid: 'issue_iid' }
).and_return(serializer)
end
end

View file

@ -0,0 +1,82 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'PipelineScheduleDelete' do
include GraphqlHelpers
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project, owner: user) }
let(:mutation) do
graphql_mutation(
:pipeline_schedule_delete,
{ id: pipeline_schedule_id },
<<-QL
errors
QL
)
end
let(:pipeline_schedule_id) { pipeline_schedule.to_global_id.to_s }
let(:mutation_response) { graphql_mutation_response(:pipeline_schedule_delete) }
context 'when unauthorized' do
it 'returns an error' do
post_graphql_mutation(mutation, current_user: create(:user))
expect(graphql_errors).not_to be_empty
expect(graphql_errors[0]['message'])
.to eq(
"The resource that you are attempting to access does not exist " \
"or you don't have permission to perform this action"
)
end
end
context 'when authorized' do
before do
project.add_maintainer(user)
end
context 'when success' do
it do
post_graphql_mutation(mutation, current_user: user)
expect(response).to have_gitlab_http_status(:success)
expect(mutation_response['errors']).to eq([])
end
end
context 'when failure' do
context 'when destroy fails' do
before do
allow_next_found_instance_of(Ci::PipelineSchedule) do |pipeline_schedule|
allow(pipeline_schedule).to receive(:destroy).and_return(false)
end
end
it do
post_graphql_mutation(mutation, current_user: user)
expect(response).to have_gitlab_http_status(:success)
expect(mutation_response['errors']).to match_array(['Failed to remove the pipeline schedule'])
end
end
context 'when pipeline schedule not found' do
let(:pipeline_schedule_id) { 'gid://gitlab/Ci::PipelineSchedule/0' }
it do
post_graphql_mutation(mutation, current_user: user)
expect(graphql_errors).not_to be_empty
expect(graphql_errors[0]['message'])
.to eq("Internal server error: Couldn't find Ci::PipelineSchedule with 'id'=0")
end
end
end
end
end

View file

@ -53,6 +53,48 @@ RSpec.describe MergeRequests::DeleteSourceBranchWorker do
worker.perform(merge_request.id, 'new-source-branch-sha', user.id)
end
end
context 'when delete service returns an error' do
let(:service_result) { ServiceResponse.error(message: 'placeholder') }
it 'tracks the exception' do
expect_next_instance_of(::Branches::DeleteService) do |instance|
expect(instance).to receive(:execute).with(merge_request.source_branch).and_return(service_result)
end
expect(service_result).to receive(:track_exception).and_call_original
worker.perform(merge_request.id, sha, user.id)
end
context 'when track_delete_source_errors is disabled' do
before do
stub_feature_flags(track_delete_source_errors: false)
end
it 'does not track the exception' do
expect_next_instance_of(::Branches::DeleteService) do |instance|
expect(instance).to receive(:execute).with(merge_request.source_branch).and_return(service_result)
end
expect(service_result).not_to receive(:track_exception)
worker.perform(merge_request.id, sha, user.id)
end
end
it 'still retargets the merge request' do
expect_next_instance_of(::Branches::DeleteService) do |instance|
expect(instance).to receive(:execute).with(merge_request.source_branch).and_return(service_result)
end
expect_next_instance_of(::MergeRequests::RetargetChainService) do |instance|
expect(instance).to receive(:execute).with(merge_request)
end
worker.perform(merge_request.id, sha, user.id)
end
end
end
it_behaves_like 'an idempotent worker' do