Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-09-14 12:12:34 +00:00
parent 3cb798d80b
commit 16daf112d6
90 changed files with 1829 additions and 267 deletions

View File

@ -81,12 +81,14 @@ export default {
this.infoAlert = createAlert({ message: this.alertInfoMessage, variant: VARIANT_INFO });
// Reset all input fields except the datepicker.
this.form.querySelectorAll('input:not([id$=expires_at])').forEach((el) => {
// The form token creation is not controlled by Vue.
el.checked = false;
// Selectively reset all input fields except for the date picker and submit.
// The form token creation is not controlled by Vue.
this.form.querySelectorAll('input[type=text]:not([id$=expires_at])').forEach((el) => {
el.value = '';
});
this.form.querySelectorAll('input[type=checkbox]').forEach((el) => {
el.checked = false;
});
},
},
};

View File

@ -47,6 +47,7 @@ const Api = {
projectSharePath: '/api/:version/projects/:id/share',
projectMilestonesPath: '/api/:version/projects/:id/milestones',
projectIssuePath: '/api/:version/projects/:id/issues/:issue_iid',
projectCreateIssuePath: '/api/:version/projects/:id/issues',
mergeRequestsPath: '/api/:version/merge_requests',
groupLabelsPath: '/api/:version/groups/:namespace_path/labels',
issuableTemplatePath: '/:namespace_path/:project_path/templates/:type/:key',

View File

@ -38,11 +38,18 @@ export default {
expandDetailsTooltip: __('Expand merge details'),
collapseDetailsTooltip: __('Collapse merge details'),
},
computed: {
wrapperClasses() {
if (this.status === 'merged') return 'gl-bg-blue-50';
if (this.status === 'closed') return 'gl-bg-red-50';
return null;
},
},
};
</script>
<template>
<div class="mr-widget-body media" v-on="$listeners">
<div class="mr-widget-body media" :class="wrapperClasses" v-on="$listeners">
<div v-if="isLoading" class="gl-w-full mr-conflict-loader">
<slot name="loading">
<div class="gl-display-flex">

View File

@ -1,16 +1,14 @@
<script>
import MrWidgetAuthorTime from '../mr_widget_author_time.vue';
import StatusIcon from '../mr_widget_status_icon.vue';
import StateContainer from '../state_container.vue';
export default {
name: 'MRWidgetClosed',
components: {
MrWidgetAuthorTime,
StatusIcon,
StateContainer,
},
props: {
/* TODO: This is providing all store and service down when it
only needs metrics and targetBranch */
mr: {
type: Object,
required: true,
@ -19,15 +17,12 @@ export default {
};
</script>
<template>
<div class="mr-widget-body media">
<status-icon status="closed" />
<div class="media-body">
<mr-widget-author-time
:action-text="s__('mrWidget|Closed by')"
:author="mr.metrics.closedBy"
:date-title="mr.metrics.closedAt"
:date-readable="mr.metrics.readableClosedAt"
/>
</div>
</div>
<state-container :mr="mr" status="closed">
<mr-widget-author-time
:action-text="s__('mrWidget|Closed by')"
:author="mr.metrics.closedBy"
:date-title="mr.metrics.closedAt"
:date-readable="mr.metrics.readableClosedAt"
/>
</state-container>
</template>

View File

@ -2,13 +2,16 @@
module Ci
class PipelineVariable < Ci::ApplicationRecord
include Ci::Partitionable
include Ci::HasVariable
belongs_to :pipeline
partitionable scope: :pipeline
alias_attribute :secret_value, :value
validates :key, presence: true
validates :key, :pipeline, presence: true
def hook_attrs
{ key: key, value: value }

View File

@ -133,6 +133,22 @@ class Commit
def parent_class
::Project
end
def build_from_sidekiq_hash(project, hash)
hash = hash.dup
date_suffix = '_date'
# When processing Sidekiq payloads various timestamps are stored as Strings.
# Commit in turn expects Time-like instances upon input, so we have to
# manually parse these values.
hash.each do |key, value|
if key.to_s.end_with?(date_suffix) && value.is_a?(String)
hash[key] = Time.zone.parse(value)
end
end
from_hash(hash, project)
end
end
attr_accessor :raw

View File

@ -179,18 +179,16 @@ module MergeRequests
old_title_draft = MergeRequest.draft?(old_title)
new_title_draft = MergeRequest.draft?(new_title)
# notify the draft status changed. Added/removed message is handled in the
# email template itself, see `change_in_merge_request_draft_status_email` template.
notify_draft_status_changed(merge_request) if old_title_draft || new_title_draft
if !old_title_draft && new_title_draft
# Marked as Draft
#
merge_request_activity_counter
.track_marked_as_draft_action(user: current_user)
merge_request_activity_counter.track_marked_as_draft_action(user: current_user)
elsif old_title_draft && !new_title_draft
# Unmarked as Draft
#
notify_draft_status_changed(merge_request)
merge_request_activity_counter
.track_unmarked_as_draft_action(user: current_user)
merge_request_activity_counter.track_unmarked_as_draft_action(user: current_user)
end
end

View File

@ -65,11 +65,20 @@ module Projects
def build_commit_status
GenericCommitStatus.new(
user: build.user,
stage: 'deploy',
ci_stage: stage,
name: 'pages:deploy'
)
end
# rubocop: disable Performance/ActiveRecordSubtransactionMethods
def stage
build.pipeline.stages.safe_find_or_create_by(name: 'deploy', pipeline_id: build.pipeline.id) do |stage|
stage.position = GenericCommitStatus::EXTERNAL_STAGE_IDX
stage.project = build.project
end
end
# rubocop: enable Performance/ActiveRecordSubtransactionMethods
def create_pages_deployment(artifacts_path, build)
sha256 = build.job_artifacts_archive.file_sha256
File.open(artifacts_path) do |file|

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
module Users
class MigrateRecordsToGhostUserInBatchesService
def initialize
@execution_tracker = Gitlab::Utils::ExecutionTracker.new
end
def execute
Users::GhostUserMigration.find_each do |user_to_migrate|
break if execution_tracker.over_limit?
service = Users::MigrateRecordsToGhostUserService.new(user_to_migrate.user,
user_to_migrate.initiator_user,
execution_tracker)
service.execute(hard_delete: user_to_migrate.hard_delete)
end
rescue Gitlab::Utils::ExecutionTracker::ExecutionTimeOutError
# no-op
end
private
attr_reader :execution_tracker
end
end

View File

@ -14,9 +14,10 @@ module Users
attr_reader :ghost_user, :user, :initiator_user, :hard_delete
def initialize(user, initiator_user)
def initialize(user, initiator_user, execution_tracker)
@user = user
@initiator_user = initiator_user
@execution_tracker = execution_tracker
@ghost_user = User.ghost
end
@ -29,6 +30,8 @@ module Users
private
attr_reader :execution_tracker
def migrate_records
return if hard_delete
@ -98,6 +101,7 @@ module Users
loop do
update_count = base_scope.where(column => user.id).limit(batch_size).update_all(column => ghost_user.id)
break if update_count == 0
raise Gitlab::Utils::ExecutionTracker::ExecutionTimeOutError if execution_tracker.over_limit?
end
end
# rubocop:enable CodeReuse/ActiveRecord

View File

@ -1,2 +1,6 @@
%p= html_escape(_('%{username} changed the draft status of merge request %{mr_link}')) % { username: link_to(@updated_by_user.name, user_url(@updated_by_user)),
mr_link: merge_request_reference_link(@merge_request) }
- if @merge_request.draft?
%p= html_escape(_('%{username} marked merge request %{mr_link} as draft')) % { username: link_to(@updated_by_user.name, user_url(@updated_by_user)),
mr_link: merge_request_reference_link(@merge_request) }
- else
%p= html_escape(_('%{username} marked merge request %{mr_link} as ready')) % { username: link_to(@updated_by_user.name, user_url(@updated_by_user)),
mr_link: merge_request_reference_link(@merge_request) }

View File

@ -1 +1,5 @@
<%= "#{sanitize_name(@updated_by_user.name)} changed the draft status of merge request #{@merge_request.to_reference}" %>
<% if @merge_request.draft? %>
<%= _("#{sanitize_name(@updated_by_user.name)} marked merge request #{@merge_request.to_reference} as draft") %>
<% else %>
<%= _("#{sanitize_name(@updated_by_user.name)} marked merge request #{@merge_request.to_reference} as ready") %>
<% end %>

View File

@ -786,6 +786,15 @@
:weight: 1
:idempotent: false
:tags: []
- :name: cronjob:users_migrate_records_to_ghost_user_in_batches
:worker_name: Users::MigrateRecordsToGhostUserInBatchesWorker
:feature_category: :users
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: cronjob:x509_issuer_crl_check
:worker_name: X509IssuerCrlCheckWorker
:feature_category: :source_code_management
@ -1056,6 +1065,15 @@
:weight: 1
:idempotent: false
:tags: []
- :name: github_importer:github_import_import_protected_branch
:worker_name: Gitlab::GithubImport::ImportProtectedBranchWorker
:feature_category: :importers
:has_external_dependencies: true
:urgency: :low
:resource_boundary: :cpu
:weight: 1
:idempotent: false
:tags: []
- :name: github_importer:github_import_import_pull_request
:worker_name: Gitlab::GithubImport::ImportPullRequestWorker
:feature_category: :importers
@ -1164,6 +1182,15 @@
:weight: 1
:idempotent: false
:tags: []
- :name: github_importer:github_import_stage_import_protected_branches
:worker_name: Gitlab::GithubImport::Stage::ImportProtectedBranchesWorker
:feature_category: :importers
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: false
:tags: []
- :name: github_importer:github_import_stage_import_pull_requests
:worker_name: Gitlab::GithubImport::Stage::ImportPullRequestsWorker
:feature_category: :importers
@ -2532,6 +2559,15 @@
:weight: 1
:idempotent: true
:tags: []
- :name: issues_close
:worker_name: Issues::CloseWorker
:feature_category: :source_code_management
:has_external_dependencies: false
:urgency: :high
:resource_boundary: :unknown
:weight: 2
:idempotent: true
:tags: []
- :name: issues_placement
:worker_name: Issues::PlacementWorker
:feature_category: :team_planning

View File

@ -26,6 +26,7 @@ module Gitlab
issue_events: Stage::ImportIssueEventsWorker,
notes: Stage::ImportNotesWorker,
attachments: Stage::ImportAttachmentsWorker,
protected_branches: Stage::ImportProtectedBranchesWorker,
lfs_objects: Stage::ImportLfsObjectsWorker,
finish: Stage::FinishImportWorker
}.freeze

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
module Gitlab
module GithubImport
class ImportProtectedBranchWorker # rubocop:disable Scalability/IdempotentWorker
include ObjectImporter
worker_resource_boundary :cpu
def representation_class
Gitlab::GithubImport::Representation::ProtectedBranch
end
def importer_class
Importer::ProtectedBranchImporter
end
def object_type
:protected_branch
end
end
end
end

View File

@ -42,7 +42,11 @@ module Gitlab
end
def move_to_next_stage(project, waiters = {})
AdvanceStageWorker.perform_async(project.id, waiters, :lfs_objects)
AdvanceStageWorker.perform_async(
project.id,
waiters,
:protected_branches
)
end
def feature_disabled?(project)

View File

@ -0,0 +1,45 @@
# frozen_string_literal: true
module Gitlab
module GithubImport
module Stage
class ImportProtectedBranchesWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
data_consistency :always
sidekiq_options retry: 3
include GithubImport::Queue
include StageMethods
# client - An instance of Gitlab::GithubImport::Client.
# project - An instance of Project.
def import(client, project)
info(project.id,
message: "starting importer",
importer: 'Importer::ProtectedBranchesImporter')
waiter = Importer::ProtectedBranchesImporter
.new(project, client)
.execute
project.import_state.refresh_jid_expiration
AdvanceStageWorker.perform_async(
project.id,
{ waiter.key => waiter.jobs_remaining },
:lfs_objects
)
rescue StandardError => e
Gitlab::Import::ImportFailureService.track(
project_id: project.id,
error_source: self.class.name,
exception: e,
metrics: true
)
raise(e)
end
end
end
end
end

View File

@ -0,0 +1,50 @@
# frozen_string_literal: true
module Issues
class CloseWorker
include ApplicationWorker
data_consistency :always
sidekiq_options retry: 3
idempotent!
deduplicate :until_executed, including_scheduled: true
feature_category :source_code_management
urgency :high
weight 2
def perform(project_id, issue_id, issue_type, params = {})
project = Project.find_by_id(project_id)
unless project
logger.info(structured_payload(message: "Project not found.", project_id: project_id))
return
end
issue = case issue_type
when "ExternalIssue"
ExternalIssue.new(issue_id, project)
else
Issue.find_by_id(issue_id)
end
unless issue
logger.info(structured_payload(message: "Issue not found.", issue_id: issue_id))
return
end
author = User.find_by_id(params["closed_by"])
unless author
logger.info(structured_payload(message: "User not found.", user_id: params["closed_by"]))
return
end
commit = Commit.build_from_sidekiq_hash(project, params["commit_hash"])
service = Issues::CloseService.new(project: project, current_user: author)
service.execute(issue, commit: commit)
end
end
end

View File

@ -34,7 +34,7 @@ class ProcessCommitWorker
return unless user
commit = build_commit(project, commit_hash)
commit = Commit.build_from_sidekiq_hash(project, commit_hash)
author = commit.author || user
process_commit_message(project, commit, user, author, default)
@ -51,12 +51,22 @@ class ProcessCommitWorker
end
def close_issues(project, user, author, commit, issues)
# We don't want to run permission related queries for every single issue,
# therefore we use IssueCollection here and skip the authorization check in
# Issues::CloseService#execute.
IssueCollection.new(issues).updatable_by_user(user).each do |issue|
Issues::CloseService.new(project: project, current_user: author)
.close_issue(issue, closed_via: commit)
if Feature.enabled?(:process_issue_closure_in_background, project)
Issues::CloseWorker.bulk_perform_async_with_contexts(
issues,
arguments_proc: -> (issue) {
[project.id, issue.id, issue.class.to_s, { closed_by: author.id, commit_hash: commit.to_hash }]
},
context_proc: -> (issue) { { project: project } }
)
else
# We don't want to run permission related queries for every single issue,
# therefore we use IssueCollection here and skip the authorization check in
# Issues::CloseService#execute.
IssueCollection.new(issues).updatable_by_user(user).each do |issue|
Issues::CloseService.new(project: project, current_user: author)
.close_issue(issue, closed_via: commit)
end
end
end
@ -75,19 +85,4 @@ class ProcessCommitWorker
.with_first_mention_not_earlier_than(commit.committed_date)
.update_all(first_mentioned_in_commit_at: commit.committed_date)
end
def build_commit(project, hash)
date_suffix = '_date'
# When processing Sidekiq payloads various timestamps are stored as Strings.
# Commit in turn expects Time-like instances upon input, so we have to
# manually parse these values.
hash.each do |key, value|
if key.to_s.end_with?(date_suffix) && value.is_a?(String)
hash[key] = Time.zone.parse(value)
end
end
Commit.from_hash(hash, project)
end
end

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
module Users
class MigrateRecordsToGhostUserInBatchesWorker
include ApplicationWorker
include Gitlab::ExclusiveLeaseHelpers
include CronjobQueue # rubocop: disable Scalability/CronWorkerContext
sidekiq_options retry: false
feature_category :users
data_consistency :always
idempotent!
def perform
return unless Feature.enabled?(:user_destroy_with_limited_execution_time_worker)
in_lock(self.class.name.underscore, ttl: Gitlab::Utils::ExecutionTracker::MAX_RUNTIME, retries: 0) do
Users::MigrateRecordsToGhostUserInBatchesService.new.execute
end
end
end
end

View File

@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/367683
milestone: '15.3'
type: development
group: group::workspace
default_enabled: false
default_enabled: true

View File

@ -0,0 +1,8 @@
---
name: process_issue_closure_in_background
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/94981
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/371024
milestone: '15.4'
type: development
group: group::source code
default_enabled: false

View File

@ -0,0 +1,8 @@
---
name: user_destroy_with_limited_execution_time_worker
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/97141
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/373138
milestone: '15.4'
type: development
group: group::authentication and authorization
default_enabled: false

View File

@ -632,6 +632,9 @@ Settings.cron_jobs['loose_foreign_keys_cleanup_worker']['job_class'] = 'LooseFor
Settings.cron_jobs['ci_runner_versions_reconciliation_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['ci_runner_versions_reconciliation_worker']['cron'] ||= '@daily'
Settings.cron_jobs['ci_runner_versions_reconciliation_worker']['job_class'] = 'Ci::Runners::ReconcileExistingRunnerVersionsCronWorker'
Settings.cron_jobs['users_migrate_records_to_ghost_user_in_batches_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['users_migrate_records_to_ghost_user_in_batches_worker']['cron'] ||= '*/1 * * * *'
Settings.cron_jobs['users_migrate_records_to_ghost_user_in_batches_worker']['job_class'] = 'Users::MigrateRecordsToGhostUserInBatchesWorker'
Gitlab.ee do
Settings.cron_jobs['analytics_devops_adoption_create_all_snapshots_worker'] ||= Settingslogic.new({})
@ -787,7 +790,7 @@ end
#
Settings['sidekiq'] ||= Settingslogic.new({})
Settings['sidekiq']['log_format'] ||= 'default'
Settings['sidekiq']['routing_rules'] ||= []
Settings['sidekiq']['routing_rules'] = Settings.__send__(:build_sidekiq_routing_rules, Settings['sidekiq']['routing_rules'])
#
# GitLab Shell

View File

@ -0,0 +1,24 @@
---
key_path: usage_activity_by_stage_monthly.manage.count_user_auth
description: Number of unique user logins
product_section: dev
product_stage: manage
product_group: authentication_and_authorization
product_category: system_access
value_type: number
status: active
milestone: "15.4"
introduced_by_url: "https://gitlab.com/gitlab-org/gitlab/-/merge_requests/96321"
time_frame: 28d
data_source: database
instrumentation_class: CountUserAuthMetric
data_category: optional
performance_indicator_type:
- smau
distribution:
- ce
- ee
tier:
- free
- premium
- ultimate

View File

@ -13,8 +13,7 @@ time_frame: all
data_source: database
instrumentation_class: CountUserAuthMetric
data_category: optional
performance_indicator_type:
- smau
performance_indicator_type: []
distribution:
- ce
- ee

View File

@ -204,5 +204,11 @@ class Settings < Settingslogic
"#{minute} #{hour} * * #{day_of_week}"
end
# Route all jobs to 'default' queue. This setting is meant for self-managed instances use to keep things simple.
# See https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1491
def build_sidekiq_routing_rules(rules)
rules.nil? || rules&.empty? ? [['*', 'default']] : rules
end
end
end

View File

@ -261,6 +261,8 @@
- 1
- - issuables_clear_groups_issue_counter
- 1
- - issues_close
- 2
- - issues_placement
- 2
- - issues_rebalancing

View File

@ -154,6 +154,8 @@ class Gitlab::Seeder::CycleAnalytics
@developers << user
end
AuthorizedProjectUpdate::ProjectRecalculateService.new(project).execute
end
def create_new_vsm_project

View File

@ -24,6 +24,8 @@ Sidekiq::Testing.inline! do
# the `after_commit` queue to ensure the job is run now.
fork_project.send(:_run_after_commit_queue)
fork_project.import_state.send(:_run_after_commit_queue)
# We should also force-run the project authorizations refresh job for the created project.
AuthorizedProjectUpdate::ProjectRecalculateService.new(fork_project).execute
# Expire repository cache after import to ensure
# valid_repo? call below returns a correct answer

View File

@ -1,7 +1,10 @@
# frozen_string_literal: true
response = Sidekiq::Worker.skipping_transaction_check do
::Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService.new.execute
result = ::Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService.new.execute
AuthorizedProjectUpdate::ProjectRecalculateService.new(result[:project]).execute
result
end
if response[:status] == :success

View File

@ -59,12 +59,12 @@ METHOD /endpoint
Supported attributes:
| Attribute | Type | Required | Description |
|:-------------------------|:---------|:-----------------------|:----------------------|
| `attribute` | datatype | **{check-circle}** Yes | Detailed description. |
| `attribute` **(<tier>)** | datatype | **{dotted-circle}** No | Detailed description. |
| `attribute` | datatype | **{dotted-circle}** No | Detailed description. |
| `attribute` | datatype | **{dotted-circle}** No | Detailed description. |
| Attribute | Type | Required | Description |
|:-------------------------|:---------|:---------|:----------------------|
| `attribute` | datatype | Yes | Detailed description. |
| `attribute` **(<tier>)** | datatype | No | Detailed description. |
| `attribute` | datatype | No | Detailed description. |
| `attribute` | datatype | No | Detailed description. |
If successful, returns [`<status_code>`](../../api/index.md#status-codes) and the following
response attributes:
@ -123,9 +123,9 @@ To deprecate an attribute:
1. Add inline deprecation text to the description.
```markdown
| Attribute | Type | Required | Description |
|:--------------|:-------|:-----------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------|
| `widget_name` | string | **{dotted-circle}** No | [Deprecated](<link-to-issue>) in GitLab 14.7 and is planned for removal in 15.4. Use `widget_id` instead. The name of the widget. |
| Attribute | Type | Required | Description |
|:--------------|:-------|:---------|:----------------------------------------------------------------------------------------------------------------------------------------------------|
| `widget_name` | string | No | [Deprecated](<link-to-issue>) in GitLab 14.7 and is planned for removal in 15.4. Use `widget_id` instead. The name of the widget. |
```
To widely announce a deprecation, or if it's a breaking change,
@ -139,20 +139,20 @@ always be in code blocks using backticks (`` ` ``).
Sort the table by required attributes first, then alphabetically.
```markdown
| Attribute | Type | Required | Description |
|:-----------------------------|:--------------|:-----------------------|:----------------------------------------------------|
| `title` | string | **{check-circle}** Yes | Title of the issue. |
| `assignee_ids` **(PREMIUM)** | integer array | **{dotted-circle}** No | IDs of the users to assign the issue to. |
| `confidential` | boolean | **{dotted-circle}** No | Sets the issue to confidential. Default is `false`. |
| Attribute | Type | Required | Description |
|:-----------------------------|:--------------|:---------|:----------------------------------------------------|
| `title` | string | Yes | Title of the issue. |
| `assignee_ids` **(PREMIUM)** | integer array | No | IDs of the users to assign the issue to. |
| `confidential` | boolean | No | Sets the issue to confidential. Default is `false`. |
```
Rendered example:
| Attribute | Type | Required | Description |
|:-----------------------------|:--------------|:-----------------------|:----------------------------------------------------|
| `title` | string | **{check-circle}** Yes | Title of the issue. |
| `assignee_ids` **(PREMIUM)** | integer array | **{dotted-circle}** No | IDs of the users to assign the issue to. |
| `confidential` | boolean | **{dotted-circle}** No | Sets the issue to confidential. Default is `false`. |
| Attribute | Type | Required | Description |
|:-----------------------------|:--------------|:---------|:----------------------------------------------------|
| `title` | string | Yes | Title of the issue. |
| `assignee_ids` **(PREMIUM)** | integer array | No | IDs of the users to assign the issue to. |
| `confidential` | boolean | No | Sets the issue to confidential. Default is `false`. |
For information about writing attribute descriptions, see the [GraphQL API description style guide](../api_graphql_styleguide.md#description-style-guide).

View File

@ -137,7 +137,16 @@ Each job:
1. Downloads the attachment.
1. Replaces the old link with a newly-generated link to GitLab.
### 11. Stage::FinishImportWorker
### 11. Stage::ImportProtectedBranchesWorker
This worker imports protected branch rules.
For every rule that exists on GitHub, we schedule a job of
`Gitlab::GithubImport::ImportProtectedBranchWorker`.
Each job compares the branch protection rules from GitHub and GitLab and applies
the strictest of the rules to the branches in GitLab.
### 12. Stage::FinishImportWorker
This worker completes the import process by performing some housekeeping
(such as flushing any caches) and by marking the import as completed.

View File

@ -68,7 +68,7 @@ merge requests to be deployed to a production environment. This chart is availab
- For time periods in which no merge requests were deployed, the charts render a
red, dashed line.
lead time for changes is one of the four [DORA metrics](index.md#devops-research-and-assessment-dora-key-metrics) that DevOps teams use for measuring excellence in software delivery.
Lead time for changes is one of the four [DORA metrics](index.md#devops-research-and-assessment-dora-key-metrics) that DevOps teams use for measuring excellence in software delivery.
To view the lead time for changes chart:

View File

@ -171,6 +171,7 @@ The following items of a project are imported:
- Repository description.
- Git repository data.
- Branch protection rules. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/22650) in GitLab 15.4.
- Issues.
- Pull requests.
- Wiki pages.

View File

@ -0,0 +1,122 @@
# frozen_string_literal: true
module Gitlab
module Audit
module Type
class Definition
include ActiveModel::Validations
attr_reader :path
attr_reader :attributes
validate :validate_schema
validate :validate_file_name
InvalidAuditEventTypeError = Class.new(StandardError)
AUDIT_EVENT_TYPE_SCHEMA_PATH = Rails.root.join('config', 'audit_events', 'types', 'type_schema.json')
AUDIT_EVENT_TYPE_SCHEMA = JSONSchemer.schema(AUDIT_EVENT_TYPE_SCHEMA_PATH)
# The PARAMS in config/audit_events/types/type_schema.json
PARAMS = %i[
name
description
introduced_by_issue
introduced_by_mr
group
milestone
saved_to_database
streamed
].freeze
PARAMS.each do |param|
define_method(param) do
attributes[param]
end
end
def initialize(path, opts = {})
@path = path
@attributes = {}
# assign nil, for all unknown opts
PARAMS.each do |param|
@attributes[param] = opts[param]
end
end
def key
name.to_sym
end
private
def validate_schema
schema_errors = AUDIT_EVENT_TYPE_SCHEMA
.validate(attributes.to_h.deep_stringify_keys)
.map { |error| JSONSchemer::Errors.pretty(error) }
errors.add(:base, schema_errors) if schema_errors.present?
end
def validate_file_name
# ignoring Style/GuardClause because if we move this into one line, we cause Layout/LineLength errors
# rubocop:disable Style/GuardClause
unless File.basename(path, ".yml") == name
errors.add(:base, "Audit event type '#{name}' has an invalid path: '#{path}'. " \
"'#{name}' must match the filename")
end
# rubocop:enable Style/GuardClause
end
class << self
def paths
@paths ||= [Rails.root.join('config', 'audit_events', 'types', '*.yml')]
end
def definitions
# We lazily load all definitions
@definitions ||= load_all!
end
def get(key)
definitions[key.to_sym]
end
private
def load_all!
paths.each_with_object({}) do |glob_path, definitions|
load_all_from_path!(definitions, glob_path)
end
end
def load_all_from_path!(definitions, glob_path)
Dir.glob(glob_path).each do |path|
definition = load_from_file(path)
if previous = definitions[definition.key]
raise InvalidAuditEventTypeError, "Audit event type '#{definition.key}' " \
"is already defined in '#{previous.path}'"
end
definitions[definition.key] = definition
end
end
def load_from_file(path)
definition = File.read(path)
definition = YAML.safe_load(definition)
definition.deep_symbolize_keys!
new(path, definition).tap(&:validate!)
rescue StandardError => e
raise InvalidAuditEventTypeError, "Invalid definition for `#{path}`: #{e.message}"
end
end
end
end
end
end
Gitlab::Audit::Type::Definition.prepend_mod

View File

@ -99,6 +99,14 @@ module Gitlab
each_object(:releases, *args)
end
def branches(*args)
each_object(:branches, *args)
end
def branch_protection(repo_name, branch_name)
with_rate_limit { octokit.branch_protection(repo_name, branch_name) }
end
# Fetches data from the GitHub API and yields a Page object for every page
# of data, without loading all of them into memory.
#

View File

@ -0,0 +1,48 @@
# frozen_string_literal: true
module Gitlab
module GithubImport
module Importer
class ProtectedBranchImporter
attr_reader :protected_branch, :project, :client
# protected_branch - An instance of
# `Gitlab::GithubImport::Representation::ProtectedBranch`.
# project - An instance of `Project`
# client - An instance of `Gitlab::GithubImport::Client`
def initialize(protected_branch, project, client)
@protected_branch = protected_branch
@project = project
@client = client
end
def execute
# The creator of the project is always allowed to create protected
# branches, so we skip the authorization check in this service class.
ProtectedBranches::CreateService
.new(project, project.creator, params)
.execute(skip_authorization: true)
end
private
def params
{
name: protected_branch.id,
push_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }],
merge_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }],
allow_force_push: allow_force_push?
}
end
def allow_force_push?
if ProtectedBranch.protected?(project, protected_branch.id)
ProtectedBranch.allow_force_push?(project, protected_branch.id) && protected_branch.allow_force_pushes
else
protected_branch.allow_force_pushes
end
end
end
end
end
end

View File

@ -0,0 +1,52 @@
# frozen_string_literal: true
module Gitlab
module GithubImport
module Importer
class ProtectedBranchesImporter
include ParallelScheduling
# The method that will be called for traversing through all the objects to
# import, yielding them to the supplied block.
def each_object_to_import
repo = project.import_source
protected_branches = client.branches(repo).select { |branch| branch.protection.enabled }
protected_branches.each do |protected_branch|
object = client.branch_protection(repo, protected_branch.name)
next if object.nil? || already_imported?(object)
yield object
Gitlab::GithubImport::ObjectCounter.increment(project, object_type, :fetched)
mark_as_imported(object)
end
end
def importer_class
ProtectedBranchImporter
end
def representation_class
Gitlab::GithubImport::Representation::ProtectedBranch
end
def sidekiq_worker_class
ImportProtectedBranchWorker
end
def object_type
:protected_branch
end
def collection_method
:protected_branches
end
def id_for_already_imported_cache(protected_branch)
protected_branch.name
end
end
end
end
end

View File

@ -0,0 +1,46 @@
# frozen_string_literal: true
module Gitlab
module GithubImport
module Representation
class ProtectedBranch
include ToHash
include ExposeAttribute
attr_reader :attributes
expose_attribute :id, :allow_force_pushes
# Builds a Branch Protection info from a GitHub API response.
# Resource structure details:
# https://docs.github.com/en/rest/branches/branch-protection#get-branch-protection
# branch_protection - An instance of `Sawyer::Resource` containing the protection details.
def self.from_api_response(branch_protection, _additional_object_data = {})
branch_name = branch_protection.url.match(%r{/branches/(\S{1,255})/protection$})[1]
hash = {
id: branch_name,
allow_force_pushes: branch_protection.allow_force_pushes.enabled
}
new(hash)
end
# Builds a new Protection using a Hash that was built from a JSON payload.
def self.from_json_hash(raw_hash)
new(Representation.symbolize_hash(raw_hash))
end
# attributes - A Hash containing the raw Protection details. The keys of this
# Hash (and any nested hashes) must be symbols.
def initialize(attributes)
@attributes = attributes
end
def github_identifiers
{ id: id }
end
end
end
end
end

View File

@ -16,6 +16,7 @@ module Gitlab
].freeze
PARALLEL_IMPORTERS = [
Importer::ProtectedBranchesImporter,
Importer::PullRequestsImporter,
Importer::IssuesImporter,
Importer::DiffNotesImporter,

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
module Gitlab
module Utils
class ExecutionTracker
MAX_RUNTIME = 30.seconds
ExecutionTimeOutError = Class.new(StandardError)
delegate :monotonic_time, to: :'Gitlab::Metrics::System'
def initialize
@start_time = monotonic_time
end
def over_limit?
monotonic_time - start_time >= MAX_RUNTIME
end
private
attr_reader :start_time
end
end
end

View File

@ -1153,10 +1153,13 @@ msgstr ""
msgid "%{user_name} profile page"
msgstr ""
msgid "%{username} changed the draft status of merge request %{mr_link}"
msgid "%{username} has asked for a GitLab account on your instance %{host}:"
msgstr ""
msgid "%{username} has asked for a GitLab account on your instance %{host}:"
msgid "%{username} marked merge request %{mr_link} as draft"
msgstr ""
msgid "%{username} marked merge request %{mr_link} as ready"
msgstr ""
msgid "%{username}'s avatar"

View File

@ -150,4 +150,18 @@ RSpec.describe Settings do
expect(Settings.encrypted('tmp/tests/test.enc').read).to be_empty
end
end
describe '.build_sidekiq_routing_rules' do
[
[nil, [['*', 'default']]],
[[], [['*', 'default']]],
[[['name=foobar', 'foobar']], [['name=foobar', 'foobar']]]
].each do |input_rules, output_rules|
context "Given input routing_rules #{input_rules}" do
it "returns output routing_rules #{output_rules}" do
expect(described_class.send(:build_sidekiq_routing_rules, input_rules)).to eq(output_rules)
end
end
end
end
end

View File

@ -11,8 +11,9 @@ RSpec.shared_examples "no multiple viewers" do |commit_ref|
end
RSpec.describe 'Multiple view Diffs', :js do
let(:user) { create(:user) }
let(:project) { create(:project, :repository, visibility_level: Gitlab::VisibilityLevel::PUBLIC) }
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository, visibility_level: Gitlab::VisibilityLevel::PUBLIC) }
let(:ref) { '5d6ed1503801ca9dc28e95eeb85a7cf863527aee' }
let(:path) { project_commit_path(project, ref) }
let(:feature_flag_on) { false }

View File

@ -41,7 +41,7 @@ describe('~/access_tokens/components/new_access_token_app', () => {
<input type="text" id="expires_at" value="2022-01-01"/>
<input type="text" value='1'/>
<input type="checkbox" checked/>
<input type="submit"/>
<input type="submit" value="Create"/>
</form>`,
);
@ -102,15 +102,29 @@ describe('~/access_tokens/components/new_access_token_app', () => {
});
});
it('should reset all input fields except the date', async () => {
expect(document.querySelector('input[type=text][id$=expires_at]').value).toBe('2022-01-01');
expect(document.querySelector('input[type=text]:not([id$=expires_at])').value).toBe('1');
expect(document.querySelector('input[type=checkbox]').checked).toBe(true);
await triggerSuccess();
describe('when resetting the form', () => {
it('should reset selectively some input fields', async () => {
expect(document.querySelector('input[type=text]:not([id$=expires_at])').value).toBe('1');
expect(document.querySelector('input[type=checkbox]').checked).toBe(true);
await triggerSuccess();
expect(document.querySelector('input[type=text][id$=expires_at]').value).toBe('2022-01-01');
expect(document.querySelector('input[type=text]:not([id$=expires_at])').value).toBe('');
expect(document.querySelector('input[type=checkbox]').checked).toBe(false);
expect(document.querySelector('input[type=text]:not([id$=expires_at])').value).toBe('');
expect(document.querySelector('input[type=checkbox]').checked).toBe(false);
});
it('should not reset the date field', async () => {
expect(document.querySelector('input[type=text][id$=expires_at]').value).toBe('2022-01-01');
await triggerSuccess();
expect(document.querySelector('input[type=text][id$=expires_at]').value).toBe('2022-01-01');
});
it('should not reset the submit button value', async () => {
expect(document.querySelector('input[type=submit]').value).toBe('Create');
await triggerSuccess();
expect(document.querySelector('input[type=submit]').value).toBe('Create');
});
});
});

View File

@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import closedComponent from '~/vue_merge_request_widget/components/states/mr_widget_closed.vue';
import MrWidgetAuthorTime from '~/vue_merge_request_widget/components/mr_widget_author_time.vue';
import StatusIcon from '~/vue_merge_request_widget/components/mr_widget_status_icon.vue';
import StateContainer from '~/vue_merge_request_widget/components/state_container.vue';
const MOCK_DATA = {
metrics: {
@ -38,8 +38,8 @@ describe('MRWidgetClosed', () => {
});
it('renders closed icon', () => {
expect(wrapper.findComponent(StatusIcon).exists()).toBe(true);
expect(wrapper.findComponent(StatusIcon).props().status).toBe('closed');
expect(wrapper.findComponent(StateContainer).exists()).toBe(true);
expect(wrapper.findComponent(StateContainer).props().status).toBe('closed');
});
it('renders mr widget author time', () => {

View File

@ -0,0 +1,219 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Audit::Type::Definition do
let(:attributes) do
{ name: 'group_deploy_token_destroyed',
description: 'Group deploy token is deleted',
introduced_by_issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/1',
introduced_by_mr: 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/1',
group: 'govern::compliance',
milestone: '15.4',
saved_to_database: true,
streamed: true }
end
let(:path) { File.join('types', 'group_deploy_token_destroyed.yml') }
let(:definition) { described_class.new(path, attributes) }
let(:yaml_content) { attributes.deep_stringify_keys.to_yaml }
describe '#key' do
subject { definition.key }
it 'returns a symbol from name' do
is_expected.to eq(:group_deploy_token_destroyed)
end
end
describe '#validate!', :aggregate_failures do
using RSpec::Parameterized::TableSyntax
# rubocop:disable Layout/LineLength
where(:param, :value, :result) do
:path | 'audit_event/types/invalid.yml' | /Audit event type 'group_deploy_token_destroyed' has an invalid path/
:name | nil | %r{property '/name' is not of type: string}
:description | nil | %r{property '/description' is not of type: string}
:introduced_by_issue | nil | %r{property '/introduced_by_issue' is not of type: string}
:introduced_by_mr | nil | %r{property '/introduced_by_mr' is not of type: string}
:group | nil | %r{property '/group' is not of type: string}
:milestone | nil | %r{property '/milestone' is not of type: string}
end
# rubocop:enable Layout/LineLength
with_them do
let(:params) { attributes.merge(path: path) }
before do
params[param] = value
end
it do
expect do
described_class.new(
params[:path], params.except(:path)
).validate!
end.to raise_error(result)
end
end
context 'when both saved_to_database and streamed are false' do
let(:params) { attributes.merge({ path: path, saved_to_database: false, streamed: false }) }
it 'raises an exception' do
expect do
described_class.new(
params[:path], params.except(:path)
).validate!
end.to raise_error(/root is invalid: error_type=not/)
end
end
end
describe '.paths' do
it 'returns at least one path' do
expect(described_class.paths).not_to be_empty
end
end
describe '.get' do
before do
allow(described_class).to receive(:definitions) do
{ definition.key => definition }
end
end
context 'when audit event type is not defined' do
let(:undefined_audit_event_type) { 'undefined_audit_event_type' }
it 'returns nil' do
expect(described_class.get(undefined_audit_event_type)).to be nil
end
end
context 'when audit event type is defined' do
let(:audit_event_type) { 'group_deploy_token_destroyed' }
it 'returns an instance of Gitlab::Audit::Type::Definition' do
expect(described_class.get(audit_event_type)).to be_an_instance_of(described_class)
end
it 'returns the properties as defined for that audit event type', :aggregate_failures do
audit_event_type_definition = described_class.get(audit_event_type)
expect(audit_event_type_definition.name).to eq "group_deploy_token_destroyed"
expect(audit_event_type_definition.description).to eq "Group deploy token is deleted"
expect(audit_event_type_definition.group).to eq "govern::compliance"
expect(audit_event_type_definition.milestone).to eq "15.4"
expect(audit_event_type_definition.saved_to_database).to be true
expect(audit_event_type_definition.streamed).to be true
end
end
end
describe '.load_from_file' do
it 'properly loads a definition from file' do
expect_file_read(path, content: yaml_content)
expect(described_class.send(:load_from_file, path).attributes)
.to eq(definition.attributes)
end
context 'for missing file' do
let(:path) { 'missing/audit_events/type/file.yml' }
it 'raises exception' do
expect do
described_class.send(:load_from_file, path)
end.to raise_error(/Invalid definition for/)
end
end
context 'for invalid definition' do
it 'raises exception' do
expect_file_read(path, content: '{}')
expect do
described_class.send(:load_from_file, path)
end.to raise_error(%r{property '/name' is not of type: string})
end
end
end
describe '.load_all!' do
let(:store1) { Dir.mktmpdir('path1') }
let(:store2) { Dir.mktmpdir('path2') }
let(:definitions) { {} }
before do
allow(described_class).to receive(:paths).and_return(
[
File.join(store1, '**', '*.yml'),
File.join(store2, '**', '*.yml')
]
)
end
subject { described_class.send(:load_all!) }
after do
FileUtils.rm_rf(store1)
FileUtils.rm_rf(store2)
end
it "when there are no audit event types a list of definitions is empty" do
is_expected.to be_empty
end
it "when there's a single audit event type it properly loads them" do
write_audit_event_type(store1, path, yaml_content)
is_expected.to be_one
end
it "when the same audit event type is stored multiple times raises exception" do
write_audit_event_type(store1, path, yaml_content)
write_audit_event_type(store2, path, yaml_content)
expect { subject }
.to raise_error(/Audit event type 'group_deploy_token_destroyed' is already defined/)
end
it "when one of the YAMLs is invalid it does raise exception" do
write_audit_event_type(store1, path, '{}')
expect { subject }.to raise_error(/Invalid definition for .* '' must match the filename/)
end
end
describe '.definitions' do
let(:store1) { Dir.mktmpdir('path1') }
before do
allow(described_class).to receive(:paths).and_return(
[
File.join(store1, '**', '*.yml')
]
)
end
subject { described_class.definitions }
after do
FileUtils.rm_rf(store1)
end
it "loads the definitions for all the audit event types" do
write_audit_event_type(store1, path, yaml_content)
is_expected.to be_one
end
end
def write_audit_event_type(store, path, content)
path = File.join(store, path)
dir = File.dirname(path)
FileUtils.mkdir_p(dir)
File.write(path, content)
end
end

View File

@ -5,7 +5,8 @@ require 'spec_helper'
RSpec.describe Gitlab::Diff::Rendered::Notebook::DiffFile do
include RepoHelpers
let(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
let(:commit) { project.commit("5d6ed1503801ca9dc28e95eeb85a7cf863527aee") }
let(:diffs) { commit.raw_diffs.to_a }
let(:diff) { diffs.first }

View File

@ -2,17 +2,19 @@
require "spec_helper"
RSpec.describe Gitlab::Git::CommitStats, :seed_helper do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
let(:commit) { Gitlab::Git::Commit.find(repository, SeedRepo::Commit::ID) }
RSpec.describe Gitlab::Git::CommitStats do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:repository) { project.repository.raw }
let(:commit) { Gitlab::Git::Commit.find(repository, TestEnv::BRANCH_SHA['feature']) }
def verify_stats!
stats = described_class.new(repository, commit)
expect(stats).to have_attributes(
additions: eq(11),
deletions: eq(6),
total: eq(17)
additions: eq(5),
deletions: eq(0),
total: eq(5)
)
end
@ -21,7 +23,7 @@ RSpec.describe Gitlab::Git::CommitStats, :seed_helper do
verify_stats!
expect(Rails.cache.fetch("commit_stats:group/project:#{commit.id}")).to eq([11, 6])
expect(Rails.cache.fetch("commit_stats:#{repository.gl_project_path}:#{commit.id}")).to eq([5, 0])
expect(repository.gitaly_commit_client).not_to receive(:commit_stats)

View File

@ -2,8 +2,9 @@
require "spec_helper"
RSpec.describe Gitlab::Git::Compare, :seed_helper do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
RSpec.describe Gitlab::Git::Compare do
let_it_be(:repository) { create(:project, :repository).repository.raw }
let(:compare) { Gitlab::Git::Compare.new(repository, SeedRepo::BigCommit::ID, SeedRepo::Commit::ID, straight: false) }
let(:compare_straight) { Gitlab::Git::Compare.new(repository, SeedRepo::BigCommit::ID, SeedRepo::Commit::ID, straight: true) }

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
RSpec.describe Gitlab::Git::DiffCollection do
before do
stub_const('MutatingConstantIterator', Class.new)

View File

@ -4,7 +4,7 @@ require 'spec_helper'
require 'json'
require 'tempfile'
RSpec.describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do
RSpec.describe Gitlab::Git::RuggedImpl::UseRugged do
let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
let(:feature_flag_name) { wrapper.rugged_feature_keys.first }

View File

@ -98,6 +98,30 @@ RSpec.describe Gitlab::GithubImport::Client do
end
end
describe '#branches' do
it 'returns the branches' do
client = described_class.new('foo')
expect(client)
.to receive(:each_object)
.with(:branches, 'foo/bar')
client.branches('foo/bar')
end
end
describe '#branch_protection' do
it 'returns the protection details for the given branch' do
client = described_class.new('foo')
expect(client.octokit)
.to receive(:branch_protection).with('org/repo', 'bar')
expect(client).to receive(:with_rate_limit).and_yield
client.branch_protection('org/repo', 'bar')
end
end
describe '#each_page' do
let(:client) { described_class.new('foo') }
let(:object1) { double(:object1) }

View File

@ -0,0 +1,91 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchImporter do
subject(:importer) { described_class.new(github_protected_branch, project, client) }
let(:allow_force_pushes_on_github) { true }
let(:github_protected_branch) do
Gitlab::GithubImport::Representation::ProtectedBranch.new(
id: 'protection',
allow_force_pushes: allow_force_pushes_on_github
)
end
let(:project) { create(:project, :repository) }
let(:client) { instance_double('Gitlab::GithubImport::Client') }
describe '#execute' do
let(:create_service) { instance_double('ProtectedBranches::CreateService') }
shared_examples 'create branch protection by the strictest ruleset' do
let(:expected_ruleset) do
{
name: 'protection',
push_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }],
merge_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }],
allow_force_push: expected_allow_force_push
}
end
it 'calls service with the correct arguments' do
expect(ProtectedBranches::CreateService).to receive(:new).with(
project,
project.creator,
expected_ruleset
).and_return(create_service)
expect(create_service).to receive(:execute).with(skip_authorization: true)
importer.execute
end
it 'creates protected branch and access levels for given github rule' do
expect { importer.execute }.to change(ProtectedBranch, :count).by(1)
.and change(ProtectedBranch::PushAccessLevel, :count).by(1)
.and change(ProtectedBranch::MergeAccessLevel, :count).by(1)
end
end
context 'when branch is protected on GitLab' do
before do
create(
:protected_branch,
project: project,
name: 'protect*',
allow_force_push: allow_force_pushes_on_gitlab
)
end
context 'when branch protection rule on Gitlab is stricter than on Github' do
let(:allow_force_pushes_on_github) { true }
let(:allow_force_pushes_on_gitlab) { false }
let(:expected_allow_force_push) { false }
it_behaves_like 'create branch protection by the strictest ruleset'
end
context 'when branch protection rule on Github is stricter than on Gitlab' do
let(:allow_force_pushes_on_github) { false }
let(:allow_force_pushes_on_gitlab) { true }
let(:expected_allow_force_push) { false }
it_behaves_like 'create branch protection by the strictest ruleset'
end
context 'when branch protection rules on Github and Gitlab are the same' do
let(:allow_force_pushes_on_github) { true }
let(:allow_force_pushes_on_gitlab) { true }
let(:expected_allow_force_push) { true }
it_behaves_like 'create branch protection by the strictest ruleset'
end
end
context 'when branch is not protected on GitLab' do
let(:expected_allow_force_push) { true }
it_behaves_like 'create branch protection by the strictest ruleset'
end
end
end

View File

@ -0,0 +1,222 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchesImporter do
subject(:importer) { described_class.new(project, client, parallel: parallel) }
let(:project) { instance_double('Project', id: 4, import_source: 'foo/bar') }
let(:client) { instance_double('Gitlab::GithubImport::Client') }
let(:parallel) { true }
let(:branches) do
branch = Struct.new(:name, :protection, keyword_init: true)
protection = Struct.new(:enabled, keyword_init: true)
[
branch.new(name: 'main', protection: protection.new(enabled: false)),
branch.new(name: 'staging', protection: protection.new(enabled: true))
]
end
let(:github_protection_rule) do
response = Struct.new(:name, :url, :required_signatures, :enforce_admins, :required_linear_history,
:allow_force_pushes, :allow_deletion, :block_creations, :required_conversation_resolution,
keyword_init: true
)
required_signatures = Struct.new(:url, :enabled, keyword_init: true)
enforce_admins = Struct.new(:url, :enabled, keyword_init: true)
allow_option = Struct.new(:enabled, keyword_init: true)
response.new(
name: 'main',
url: 'https://example.com/branches/main/protection',
required_signatures: required_signatures.new(
url: 'https://example.com/branches/main/protection/required_signatures',
enabled: false
),
enforce_admins: enforce_admins.new(
url: 'https://example.com/branches/main/protection/enforce_admins',
enabled: false
),
required_linear_history: allow_option.new(
enabled: false
),
allow_force_pushes: allow_option.new(
enabled: false
),
allow_deletion: allow_option.new(
enabled: false
),
block_creations: allow_option.new(
enabled: true
),
required_conversation_resolution: allow_option.new(
enabled: false
)
)
end
describe '#parallel?' do
context 'when running in parallel mode' do
it { expect(importer).to be_parallel }
end
context 'when running in sequential mode' do
let(:parallel) { false }
it { expect(importer).not_to be_parallel }
end
end
describe '#execute' do
context 'when running in parallel mode' do
it 'imports protected branches in parallel' do
expect(importer).to receive(:parallel_import)
importer.execute
end
end
context 'when running in sequential mode' do
let(:parallel) { false }
it 'imports protected branches in sequence' do
expect(importer).to receive(:sequential_import)
importer.execute
end
end
end
describe '#sequential_import', :clean_gitlab_redis_cache do
let(:parallel) { false }
before do
allow(client).to receive(:branches).and_return(branches)
allow(client)
.to receive(:branch_protection)
.with(project.import_source, 'staging')
.and_return(github_protection_rule)
.once
end
it 'imports each protected branch in sequence' do
protected_branch_importer = instance_double('Gitlab::GithubImport::Importer::ProtectedBranchImporter')
expect(Gitlab::GithubImport::Importer::ProtectedBranchImporter)
.to receive(:new)
.with(
an_instance_of(Gitlab::GithubImport::Representation::ProtectedBranch),
project,
client
)
.and_return(protected_branch_importer)
expect(protected_branch_importer).to receive(:execute)
expect(Gitlab::GithubImport::ObjectCounter)
.to receive(:increment).with(project, :protected_branch, :fetched)
importer.sequential_import
end
end
describe '#parallel_import', :clean_gitlab_redis_cache do
before do
allow(client).to receive(:branches).and_return(branches)
allow(client)
.to receive(:branch_protection)
.with(project.import_source, 'staging')
.and_return(github_protection_rule)
.once
end
it 'imports each protected branch in parallel' do
expect(Gitlab::GithubImport::ImportProtectedBranchWorker)
.to receive(:bulk_perform_in)
.with(
1.second,
[[project.id, an_instance_of(Hash), an_instance_of(String)]],
batch_delay: 1.minute,
batch_size: 1000
)
expect(Gitlab::GithubImport::ObjectCounter)
.to receive(:increment).with(project, :protected_branch, :fetched)
waiter = importer.parallel_import
expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
expect(waiter.jobs_remaining).to eq(1)
end
end
describe '#each_object_to_import', :clean_gitlab_redis_cache do
let(:branch_struct) { Struct.new(:protection, :name, :url, keyword_init: true) }
let(:protection_struct) { Struct.new(:enabled, keyword_init: true) }
let(:protected_branch) { branch_struct.new(name: 'main', protection: protection_struct.new(enabled: true)) }
let(:unprotected_branch) { branch_struct.new(name: 'staging', protection: protection_struct.new(enabled: false)) }
let(:page_counter) { instance_double(Gitlab::GithubImport::PageCounter) }
before do
allow(client).to receive(:branches).with(project.import_source)
.and_return([protected_branch, unprotected_branch])
allow(client).to receive(:branch_protection)
.with(project.import_source, protected_branch.name).once
.and_return(github_protection_rule)
allow(Gitlab::GithubImport::ObjectCounter).to receive(:increment)
.with(project, :protected_branch, :fetched)
end
it 'imports each protected branch page by page' do
subject.each_object_to_import do |object|
expect(object).to eq github_protection_rule
end
expect(Gitlab::GithubImport::ObjectCounter).to have_received(:increment).once
end
context 'when protected branch is already processed' do
it "doesn't process this branch" do
subject.mark_as_imported(protected_branch)
subject.each_object_to_import {}
expect(Gitlab::GithubImport::ObjectCounter).not_to have_received(:increment)
end
end
end
describe '#importer_class' do
it { expect(importer.importer_class).to eq Gitlab::GithubImport::Importer::ProtectedBranchImporter }
end
describe '#representation_class' do
it { expect(importer.representation_class).to eq Gitlab::GithubImport::Representation::ProtectedBranch }
end
describe '#sidekiq_worker_class' do
it { expect(importer.sidekiq_worker_class).to eq Gitlab::GithubImport::ImportProtectedBranchWorker }
end
describe '#object_type' do
it { expect(importer.object_type).to eq :protected_branch }
end
describe '#collection_method' do
it { expect(importer.collection_method).to eq :protected_branches }
end
describe '#id_for_already_imported_cache' do
it 'returns the ID of the given protected branch' do
expect(importer.id_for_already_imported_cache(github_protection_rule)).to eq('main')
end
end
describe '#collection_options' do
it 'returns an empty Hash' do
# For large projects (e.g. kubernetes/kubernetes) GitHub's API may produce
# HTTP 500 errors when using explicit sorting options, regardless of what
# order you sort in. Not using any sorting options at all allows us to
# work around this.
expect(importer.collection_options).to eq({})
end
end
end

View File

@ -0,0 +1,51 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Representation::ProtectedBranch do
shared_examples 'a ProtectedBranch rule' do
it 'returns an instance of ProtectedBranch' do
expect(protected_branch).to be_an_instance_of(described_class)
end
context 'with ProtectedBranch' do
it 'includes the protected branch ID (name)' do
expect(protected_branch.id).to eq 'main'
end
it 'includes the protected branch allow_force_pushes' do
expect(protected_branch.allow_force_pushes).to eq true
end
end
end
describe '.from_api_response' do
let(:response) do
response = Struct.new(:url, :allow_force_pushes, keyword_init: true)
allow_force_pushes = Struct.new(:enabled, keyword_init: true)
response.new(
url: 'https://example.com/branches/main/protection',
allow_force_pushes: allow_force_pushes.new(
enabled: true
)
)
end
it_behaves_like 'a ProtectedBranch rule' do
let(:protected_branch) { described_class.from_api_response(response) }
end
end
describe '.from_json_hash' do
it_behaves_like 'a ProtectedBranch rule' do
let(:hash) do
{
'id' => 'main',
'allow_force_pushes' => true
}
end
let(:protected_branch) { described_class.from_json_hash(hash) }
end
end
end

View File

@ -41,10 +41,10 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Server, :clean_gitlab_r
describe '#call' do
it 'removes the stored job from redis before execution' do
bare_job = { 'class' => 'TestDeduplicationWorker', 'args' => ['hello'] }
job_definition = Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob.new(bare_job.dup, 'test_deduplication')
job_definition = Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob.new(bare_job.dup, 'default')
expect(Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob)
.to receive(:new).with(a_hash_including(bare_job), 'test_deduplication')
.to receive(:new).with(a_hash_including(bare_job), 'default')
.and_return(job_definition).twice # once in client middleware
expect(job_definition).to receive(:delete!).ordered.and_call_original
@ -60,10 +60,10 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Server, :clean_gitlab_r
it 'removes the stored job from redis after execution' do
bare_job = { 'class' => 'TestDeduplicationWorker', 'args' => ['hello'] }
job_definition = Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob.new(bare_job.dup, 'test_deduplication')
job_definition = Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob.new(bare_job.dup, 'default')
expect(Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob)
.to receive(:new).with(a_hash_including(bare_job), 'test_deduplication')
.to receive(:new).with(a_hash_including(bare_job), 'default')
.and_return(job_definition).twice # once in client middleware
expect(TestDeduplicationWorker).to receive(:work).ordered.and_call_original

View File

@ -46,7 +46,7 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
expect(migrator.execute('PostReceive' => 'new_queue')).to eq(scanned: 3, migrated: 0)
expect(set_after.length).to eq(3)
expect(set_after.map(&:first)).to all(include('queue' => 'authorized_projects',
expect(set_after.map(&:first)).to all(include('queue' => 'default',
'class' => 'AuthorizedProjectsWorker'))
end
end
@ -62,7 +62,7 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
if item['class'] == 'AuthorizedProjectsWorker'
expect(item).to include('queue' => 'new_queue', 'args' => [i])
else
expect(item).to include('queue' => 'post_receive', 'args' => [i])
expect(item).to include('queue' => 'default', 'args' => [i])
end
expect(score).to be_within(schedule_jitter).of(i.succ.hours.from_now.to_i)
@ -116,7 +116,7 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
expect(migrator.execute('PostReceive' => 'new_queue')).to eq(scanned: 4, migrated: 0)
expect(set_after.length).to eq(3)
expect(set_after.map(&:first)).to all(include('queue' => 'authorized_projects'))
expect(set_after.map(&:first)).to all(include('queue' => 'default'))
end
end
@ -138,7 +138,7 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
expect(migrator.execute('PostReceive' => 'new_queue')).to eq(scanned: 4, migrated: 1)
expect(set_after.group_by { |job| job.first['queue'] }.transform_values(&:count))
.to eq('authorized_projects' => 6, 'new_queue' => 1)
.to eq('default' => 6, 'new_queue' => 1)
end
it 'iterates through the entire set of jobs' do

View File

@ -4,15 +4,15 @@ require 'spec_helper'
RSpec.describe Gitlab::SidekiqQueue, :clean_gitlab_redis_queues do
around do |example|
Sidekiq::Queue.new('default').clear
Sidekiq::Queue.new('foobar').clear
Sidekiq::Testing.disable!(&example)
Sidekiq::Queue.new('default').clear
Sidekiq::Queue.new('foobar').clear
end
def add_job(args, user:, klass: 'AuthorizedProjectsWorker')
Sidekiq::Client.push(
'class' => klass,
'queue' => 'default',
'queue' => 'foobar',
'args' => args,
'meta.user' => user.username
)
@ -20,7 +20,7 @@ RSpec.describe Gitlab::SidekiqQueue, :clean_gitlab_redis_queues do
describe '#drop_jobs!' do
shared_examples 'queue processing' do
let(:sidekiq_queue) { described_class.new('default') }
let(:sidekiq_queue) { described_class.new('foobar') }
let_it_be(:sidekiq_queue_user) { create(:user) }
before do
@ -80,7 +80,7 @@ RSpec.describe Gitlab::SidekiqQueue, :clean_gitlab_redis_queues do
it 'raises NoMetadataError' do
add_job([1], user: create(:user))
expect { described_class.new('default').drop_jobs!({ username: 'sidekiq_queue_user' }, timeout: 1) }
expect { described_class.new('foobar').drop_jobs!({ username: 'sidekiq_queue_user' }, timeout: 1) }
.to raise_error(described_class::NoMetadataError)
end
end

View File

@ -0,0 +1,24 @@
# frozen_string_literal: true
require 'fast_spec_helper'
RSpec.describe Gitlab::Utils::ExecutionTracker do
subject(:tracker) { described_class.new }
describe '#over_limit?' do
it 'is true when max runtime is exceeded' do
monotonic_time_before = 1 # this will be the start time
monotonic_time_after = described_class::MAX_RUNTIME.to_i + 1 # this will be returned when over_limit? is called
allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(monotonic_time_before, monotonic_time_after)
tracker
expect(tracker).to be_over_limit
end
it 'is false when max runtime is not exceeded' do
expect(tracker).not_to be_over_limit
end
end
end

View File

@ -17,4 +17,25 @@ RSpec.describe Ci::PipelineVariable do
it { is_expected.to be_a(Hash) }
it { is_expected.to eq({ key: 'foo', value: 'bar' }) }
end
describe 'partitioning' do
context 'with pipeline' do
let(:pipeline) { build(:ci_pipeline, partition_id: 123) }
let(:variable) { build(:ci_pipeline_variable, pipeline: pipeline, partition_id: nil) }
it 'copies the partition_id from pipeline' do
expect { variable.valid? }.to change(variable, :partition_id).from(nil).to(123)
end
end
context 'without pipeline' do
subject(:variable) { build(:ci_pipeline_variable, pipeline: nil, partition_id: nil) }
it { is_expected.to validate_presence_of(:partition_id) }
it 'does not change the partition_id value' do
expect { variable.valid? }.not_to change(variable, :partition_id)
end
end
end
end

View File

@ -84,6 +84,22 @@ RSpec.describe Commit do
end
end
describe '.build_from_sidekiq_hash' do
it 'returns a Commit' do
commit = described_class.build_from_sidekiq_hash(project, id: '123')
expect(commit).to be_an_instance_of(Commit)
end
it 'parses date strings into Time instances' do
commit = described_class.build_from_sidekiq_hash(project,
id: '123',
authored_date: Time.current.to_s)
expect(commit.authored_date).to be_a_kind_of(Time)
end
end
describe '#diff_refs' do
it 'is equal to itself' do
expect(commit.diff_refs).to eq(commit.diff_refs)

View File

@ -5,11 +5,11 @@ require 'spec_helper'
RSpec.describe Blobs::NotebookPresenter do
include RepoHelpers
let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
let(:blob) { repository.blob_at('HEAD', 'files/ruby/regex.rb') }
let(:user) { project.first_owner }
let(:git_blob) { blob.__getobj__ }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:repository) { project.repository }
let_it_be(:blob) { repository.blob_at('HEAD', 'files/ruby/regex.rb') }
let_it_be(:user) { project.first_owner }
let_it_be(:git_blob) { blob.__getobj__ }
subject(:presenter) { described_class.new(blob, current_user: user) }

View File

@ -1,4 +1,5 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness, :aggregate_failures do
@ -69,6 +70,34 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
expect(metadata.expanded_environment_name).to eq('review/deploy')
end
context 'with pipeline variables' do
let(:variables_attributes) do
[
{ key: 'SOME_VARIABLE', secret_value: 'SOME_VAL' },
{ key: 'OTHER_VARIABLE', secret_value: 'OTHER_VAL' }
]
end
let(:service) do
described_class.new(
project,
user,
{ ref: 'master', variables_attributes: variables_attributes })
end
it 'assigns partition_id to pipeline' do
expect(pipeline).to be_created_successfully
expect(pipeline.partition_id).to eq(current_partition_id)
end
it 'assigns partition_id to variables' do
variables_partition_ids = pipeline.variables.map(&:partition_id).uniq
expect(pipeline.variables.size).to eq(2)
expect(variables_partition_ids).to eq([current_partition_id])
end
end
def find_metadata(name)
pipeline
.processables

View File

@ -285,7 +285,7 @@ RSpec.describe Git::BranchPushService, :use_clean_rails_redis_caching, services:
author_email: commit_author.email
)
allow_any_instance_of(ProcessCommitWorker).to receive(:build_commit)
allow(Commit).to receive(:build_from_sidekiq_hash)
.and_return(commit)
allow(project.repository).to receive(:commits_between).and_return([commit])
@ -348,7 +348,7 @@ RSpec.describe Git::BranchPushService, :use_clean_rails_redis_caching, services:
committed_date: commit_time
)
allow_any_instance_of(ProcessCommitWorker).to receive(:build_commit)
allow(Commit).to receive(:build_from_sidekiq_hash)
.and_return(commit)
allow(project.repository).to receive(:commits_between).and_return([commit])
@ -387,7 +387,7 @@ RSpec.describe Git::BranchPushService, :use_clean_rails_redis_caching, services:
allow(project.repository).to receive(:commits_between)
.and_return([closing_commit])
allow_any_instance_of(ProcessCommitWorker).to receive(:build_commit)
allow(Commit).to receive(:build_from_sidekiq_hash)
.and_return(closing_commit)
project.add_maintainer(commit_author)

View File

@ -844,7 +844,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
@merge_request = described_class.new(project: project, current_user: user, params: opts).execute(merge_request)
end
should_not_email(subscriber)
should_email(subscriber)
should_not_email(non_subscriber)
end

View File

@ -19,6 +19,25 @@ RSpec.describe Projects::UpdatePagesService do
subject { described_class.new(project, build) }
context 'when a deploy stage already exists' do
let!(:stage) { create(:ci_stage, name: 'deploy', pipeline: pipeline) }
it 'assigns the deploy stage' do
subject.execute
expect(GenericCommitStatus.last.ci_stage).to eq(stage)
expect(GenericCommitStatus.last.ci_stage.name).to eq('deploy')
end
end
context 'when a deploy stage does not exists' do
it 'assigns the deploy stage' do
subject.execute
expect(GenericCommitStatus.last.ci_stage.name).to eq('deploy')
end
end
context 'for new artifacts' do
context "for a valid job" do
let!(:artifacts_archive) { create(:ci_job_artifact, :correct_checksum, file: file, job: build) }

View File

@ -0,0 +1,31 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Users::MigrateRecordsToGhostUserInBatchesService do
let(:service) { described_class.new }
let_it_be(:ghost_user_migration) { create(:ghost_user_migration) }
describe '#execute' do
it 'stops when execution time limit reached' do
expect_next_instance_of(::Gitlab::Utils::ExecutionTracker) do |tracker|
expect(tracker).to receive(:over_limit?).and_return(true)
end
expect(Users::MigrateRecordsToGhostUserService).not_to receive(:new)
service.execute
end
it 'calls Users::MigrateRecordsToGhostUserService' do
expect_next_instance_of(Users::MigrateRecordsToGhostUserService) do |service|
expect(service).to(
receive(:execute)
.with(hard_delete: ghost_user_migration.hard_delete))
end
service.execute
end
end
end

View File

@ -4,7 +4,8 @@ require 'spec_helper'
RSpec.describe Users::MigrateRecordsToGhostUserService do
let!(:user) { create(:user) }
let(:service) { described_class.new(user, admin) }
let(:service) { described_class.new(user, admin, execution_tracker) }
let(:execution_tracker) { instance_double(::Gitlab::Utils::ExecutionTracker, over_limit?: false) }
let_it_be(:admin) { create(:admin) }
let_it_be(:project) { create(:project, :repository) }

View File

@ -3448,7 +3448,6 @@
- './ee/spec/workers/concerns/elastic/indexing_control_spec.rb'
- './ee/spec/workers/concerns/elastic/migration_obsolete_spec.rb'
- './ee/spec/workers/concerns/elastic/migration_options_spec.rb'
- './ee/spec/workers/concerns/geo_queue_spec.rb'
- './ee/spec/workers/concerns/update_orchestration_policy_configuration_spec.rb'
- './ee/spec/workers/create_github_webhook_worker_spec.rb'
- './ee/spec/workers/deployments/auto_rollback_worker_spec.rb'
@ -10874,18 +10873,14 @@
- './spec/workers/cluster_wait_for_ingress_ip_address_worker_spec.rb'
- './spec/workers/concerns/application_worker_spec.rb'
- './spec/workers/concerns/cluster_agent_queue_spec.rb'
- './spec/workers/concerns/cluster_queue_spec.rb'
- './spec/workers/concerns/cronjob_queue_spec.rb'
- './spec/workers/concerns/gitlab/github_import/object_importer_spec.rb'
- './spec/workers/concerns/gitlab/github_import/queue_spec.rb'
- './spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb'
- './spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb'
- './spec/workers/concerns/gitlab/notify_upon_death_spec.rb'
- './spec/workers/concerns/limited_capacity/job_tracker_spec.rb'
- './spec/workers/concerns/limited_capacity/worker_spec.rb'
- './spec/workers/concerns/packages/cleanup_artifact_worker_spec.rb'
- './spec/workers/concerns/pipeline_background_queue_spec.rb'
- './spec/workers/concerns/pipeline_queue_spec.rb'
- './spec/workers/concerns/project_import_options_spec.rb'
- './spec/workers/concerns/reenqueuer_spec.rb'
- './spec/workers/concerns/repository_check_queue_spec.rb'

View File

@ -12,10 +12,25 @@ RSpec.describe 'notify/change_in_merge_request_draft_status_email.html.haml' do
assign(:merge_request, merge_request)
end
it 'shows user added draft status on email' do
merge_request.update!(title: merge_request.draft_title)
render
expect(merge_request.draft).to be_truthy
expect(rendered).to have_content("#{user.name} marked merge request #{merge_request.to_reference} as draft")
end
it 'shows user removed draft status on email' do
render
expect(merge_request.draft).to be_falsy
expect(rendered).to have_content("#{user.name} marked merge request #{merge_request.to_reference} as ready")
end
it 'renders the email correctly' do
render
expect(rendered).to have_content("#{user.name} changed the draft status of merge request #{merge_request.to_reference}")
expect(rendered).to have_link(user.name, href: user_url(user))
expect(rendered).to have_link(merge_request.to_reference, href: merge_request_link)
end

View File

@ -12,9 +12,19 @@ RSpec.describe 'notify/change_in_merge_request_draft_status_email.text.erb' do
it_behaves_like 'renders plain text email correctly'
it 'renders the email correctly' do
it 'shows user added draft status on email' do
merge_request.update!(title: merge_request.draft_title)
render
expect(rendered).to have_content("#{user.name} changed the draft status of merge request #{merge_request.to_reference}")
expect(merge_request.draft).to be_truthy
expect(rendered).to have_content("#{user.name} marked merge request #{merge_request.to_reference} as draft")
end
it 'shows user removed draft status on email' do
render
expect(merge_request.draft).to be_falsy
expect(rendered).to have_content("#{user.name} marked merge request #{merge_request.to_reference} as ready")
end
end

View File

@ -14,6 +14,5 @@ RSpec.describe ClusterAgentQueue do
end
end
it { expect(worker.queue).to eq('cluster_agent:example') }
it { expect(worker.get_feature_category).to eq(:kubernetes_management) }
end

View File

@ -1,21 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ClusterQueue do
let(:worker) do
Class.new do
def self.name
'DummyWorker'
end
include ApplicationWorker
include ClusterQueue
end
end
it 'sets a default pipelines queue automatically' do
expect(worker.sidekiq_options['queue'])
.to eq 'gcp_cluster:dummy'
end
end

View File

@ -40,10 +40,6 @@ RSpec.describe CronjobQueue do
stub_const("AnotherWorker", another_worker)
end
it 'sets the queue name of a worker' do
expect(worker.sidekiq_options['queue'].to_s).to eq('cronjob:dummy')
end
it 'disables retrying of failed jobs' do
expect(worker.sidekiq_options['retry']).to eq(false)
end

View File

@ -1,18 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Queue do
it 'sets the Sidekiq options for the worker' do
worker = Class.new do
def self.name
'DummyWorker'
end
include ApplicationWorker
include Gitlab::GithubImport::Queue
end
expect(worker.sidekiq_options['queue']).to eq('github_importer:dummy')
end
end

View File

@ -1,21 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe PipelineBackgroundQueue do
let(:worker) do
Class.new do
def self.name
'DummyWorker'
end
include ApplicationWorker
include PipelineBackgroundQueue
end
end
it 'sets a default object storage queue automatically' do
expect(worker.sidekiq_options['queue'])
.to eq 'pipeline_background:dummy'
end
end

View File

@ -1,21 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe PipelineQueue do
let(:worker) do
Class.new do
def self.name
'DummyWorker'
end
include ApplicationWorker
include PipelineQueue
end
end
it 'sets a default pipelines queue automatically' do
expect(worker.sidekiq_options['queue'])
.to eq 'pipeline_default:dummy'
end
end

View File

@ -14,10 +14,6 @@ RSpec.describe RepositoryCheckQueue do
end
end
it 'sets the queue name of a worker' do
expect(worker.sidekiq_options['queue'].to_s).to eq('repository_check:dummy')
end
it 'disables retrying of failed jobs' do
expect(worker.sidekiq_options['retry']).to eq(false)
end

View File

@ -49,8 +49,7 @@ RSpec.describe WaitableWorker do
expect(Gitlab::AppJsonLogger).to(
receive(:info).with(a_hash_including('message' => 'running inline',
'class' => 'Gitlab::Foo::Bar::DummyWorker',
'job_status' => 'running',
'queue' => 'foo_bar_dummy'))
'job_status' => 'running'))
.once)
worker.bulk_perform_and_wait(args_list)

View File

@ -263,6 +263,7 @@ RSpec.describe 'Every Sidekiq worker' do
'Gitlab::GithubImport::ImportIssueEventWorker' => 5,
'Gitlab::GithubImport::ImportLfsObjectWorker' => 5,
'Gitlab::GithubImport::ImportNoteWorker' => 5,
'Gitlab::GithubImport::ImportProtectedBranchWorker' => 5,
'Gitlab::GithubImport::ImportPullRequestMergedByWorker' => 5,
'Gitlab::GithubImport::ImportPullRequestReviewWorker' => 5,
'Gitlab::GithubImport::ImportPullRequestWorker' => 5,
@ -273,6 +274,7 @@ RSpec.describe 'Every Sidekiq worker' do
'Gitlab::GithubImport::Stage::ImportIssueEventsWorker' => 5,
'Gitlab::GithubImport::Stage::ImportLfsObjectsWorker' => 5,
'Gitlab::GithubImport::Stage::ImportAttachmentsWorker' => 5,
'Gitlab::GithubImport::Stage::ImportProtectedBranchesWorker' => 5,
'Gitlab::GithubImport::Stage::ImportNotesWorker' => 5,
'Gitlab::GithubImport::Stage::ImportPullRequestsMergedByWorker' => 5,
'Gitlab::GithubImport::Stage::ImportPullRequestsReviewsWorker' => 5,
@ -313,6 +315,7 @@ RSpec.describe 'Every Sidekiq worker' do
'Integrations::IrkerWorker' => 3,
'InvalidGpgSignatureUpdateWorker' => 3,
'IssuableExportCsvWorker' => 3,
'Issues::CloseWorker' => 3,
'Issues::PlacementWorker' => 3,
'Issues::RebalancingWorker' => 3,
'IterationsUpdateStatusWorker' => 3,

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::ImportProtectedBranchWorker do
let(:worker) { described_class.new }
let(:import_state) { build_stubbed(:import_state, :started) }
let(:project) { instance_double('Project', full_path: 'foo/bar', id: 1, import_state: import_state) }
let(:client) { instance_double('Gitlab::GithubImport::Client') }
let(:importer) { instance_double('Gitlab::GithubImport::Importer::ProtectedBranchImporter') }
describe '#import' do
let(:json_hash) do
{
id: 'main',
allow_force_pushes: true
}
end
it 'imports protected branch rule' do
expect(Gitlab::GithubImport::Importer::ProtectedBranchImporter)
.to receive(:new)
.with(
an_instance_of(Gitlab::GithubImport::Representation::ProtectedBranch),
project,
client
)
.and_return(importer)
expect(importer).to receive(:execute)
expect(Gitlab::GithubImport::ObjectCounter)
.to receive(:increment)
.with(project, :protected_branch, :imported)
worker.import(project, client, json_hash)
end
end
end

View File

@ -29,7 +29,7 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportAttachmentsWorker do
expect(Gitlab::GithubImport::AdvanceStageWorker)
.to receive(:perform_async)
.with(project.id, { '123' => 2 }, :lfs_objects)
.with(project.id, { '123' => 2 }, :protected_branches)
worker.import(client, project)
end
@ -39,7 +39,8 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportAttachmentsWorker do
it 'skips release attachments import and calls next stage' do
expect(Gitlab::GithubImport::Importer::ReleasesAttachmentsImporter).not_to receive(:new)
expect(Gitlab::GithubImport::AdvanceStageWorker).to receive(:perform_async).with(project.id, {}, :lfs_objects)
expect(Gitlab::GithubImport::AdvanceStageWorker)
.to receive(:perform_async).with(project.id, {}, :protected_branches)
worker.import(client, project)
end

View File

@ -0,0 +1,58 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Stage::ImportProtectedBranchesWorker do
let_it_be(:project) { create(:project) }
let_it_be(:import_state) { create(:import_state, project: project) }
let(:worker) { described_class.new }
let(:importer) { instance_double('Gitlab::GithubImport::Importer::ProtectedBranchImporter') }
let(:client) { instance_double('Gitlab::GithubImport::Client') }
describe '#import' do
it 'imports all the pull requests' do
waiter = Gitlab::JobWaiter.new(2, '123')
expect(Gitlab::GithubImport::Importer::ProtectedBranchesImporter)
.to receive(:new)
.with(project, client)
.and_return(importer)
expect(importer)
.to receive(:execute)
.and_return(waiter)
expect(import_state)
.to receive(:refresh_jid_expiration)
expect(Gitlab::GithubImport::AdvanceStageWorker)
.to receive(:perform_async)
.with(project.id, { '123' => 2 }, :lfs_objects)
worker.import(client, project)
end
context 'when an error raised' do
let(:exception) { StandardError.new('_some_error_') }
before do
allow_next_instance_of(Gitlab::GithubImport::Importer::ProtectedBranchesImporter) do |importer|
allow(importer).to receive(:execute).and_raise(exception)
end
end
it 'raises an error' do
expect(Gitlab::Import::ImportFailureService).to receive(:track)
.with(
project_id: project.id,
exception: exception,
error_source: described_class.name,
metrics: true
).and_call_original
expect { worker.import(client, project) }.to raise_error(StandardError)
end
end
end
end

View File

@ -0,0 +1,86 @@
# frozen_string_literal: true
require "spec_helper"
RSpec.describe Issues::CloseWorker do
describe "#perform" do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:issue) { create(:issue, project: project, author: user) }
let(:commit) { project.commit }
let(:opts) do
{ "closed_by" => user&.id, "commit_hash" => commit.to_hash }
end
subject(:worker) { described_class.new }
describe "#perform" do
context "when the user can update the issues" do
it "closes the issues" do
worker.perform(project.id, issue.id, issue.class.to_s, opts)
issue.reload
expect(issue.closed?).to eq(true)
end
it "closes external issues" do
external_issue = ExternalIssue.new("foo", project)
closer = instance_double(Issues::CloseService, execute: true)
expect(Issues::CloseService).to receive(:new).with(project: project, current_user: user).and_return(closer)
expect(closer).to receive(:execute).with(external_issue, commit: commit)
worker.perform(project.id, external_issue.id, external_issue.class.to_s, opts)
end
end
context "when the user can not update the issues" do
it "does not close the issues" do
other_user = create(:user)
opts = { "closed_by" => other_user.id, "commit_hash" => commit.to_hash }
worker.perform(project.id, issue.id, issue.class.to_s, opts)
issue.reload
expect(issue.closed?).to eq(false)
end
end
end
shared_examples "when object does not exist" do
it "does not call the close issue service" do
expect(Issues::CloseService).not_to receive(:new)
expect { worker.perform(project.id, issue.id, issue.class.to_s, opts) }
.not_to raise_exception
end
end
context "when the project does not exist" do
before do
allow(Project).to receive(:find_by_id).with(project.id).and_return(nil)
end
it_behaves_like "when object does not exist"
end
context "when the user does not exist" do
before do
allow(User).to receive(:find_by_id).with(user.id).and_return(nil)
end
it_behaves_like "when object does not exist"
end
context "when the issue does not exist" do
before do
allow(Issue).to receive(:find_by_id).with(issue.id).and_return(nil)
end
it_behaves_like "when object does not exist"
end
end
end

View File

@ -115,25 +115,37 @@ RSpec.describe ProcessCommitWorker do
end
describe '#close_issues' do
context 'when the user can update the issues' do
it 'closes the issues' do
it 'creates Issue::CloseWorker jobs' do
expect do
worker.close_issues(project, user, user, commit, [issue])
issue.reload
expect(issue.closed?).to eq(true)
end
end.to change(Issues::CloseWorker.jobs, :size).by(1)
end
context 'when the user can not update the issues' do
it 'does not close the issues' do
other_user = create(:user)
context 'when process_issue_closure_in_background flag is disabled' do
before do
stub_feature_flags(process_issue_closure_in_background: false)
end
worker.close_issues(project, other_user, other_user, commit, [issue])
context 'when the user can update the issues' do
it 'closes the issues' do
worker.close_issues(project, user, user, commit, [issue])
issue.reload
issue.reload
expect(issue.closed?).to eq(false)
expect(issue.closed?).to eq(true)
end
end
context 'when the user can not update the issues' do
it 'does not close the issues' do
other_user = create(:user)
worker.close_issues(project, other_user, other_user, commit, [issue])
issue.reload
expect(issue.closed?).to eq(false)
end
end
end
end
@ -189,20 +201,4 @@ RSpec.describe ProcessCommitWorker do
end
end
end
describe '#build_commit' do
it 'returns a Commit' do
commit = worker.build_commit(project, id: '123')
expect(commit).to be_an_instance_of(Commit)
end
it 'parses date strings into Time instances' do
commit = worker.build_commit(project,
id: '123',
authored_date: Time.current.to_s)
expect(commit.authored_date).to be_a_kind_of(Time)
end
end
end

View File

@ -7,7 +7,6 @@ RSpec.describe SshKeys::ExpiredNotificationWorker, type: :worker do
it 'uses a cronjob queue' do
expect(worker.sidekiq_options_hash).to include(
'queue' => 'cronjob:ssh_keys_expired_notification',
'queue_namespace' => :cronjob
)
end

View File

@ -7,7 +7,6 @@ RSpec.describe SshKeys::ExpiringSoonNotificationWorker, type: :worker do
it 'uses a cronjob queue' do
expect(worker.sidekiq_options_hash).to include(
'queue' => 'cronjob:ssh_keys_expiring_soon_notification',
'queue_namespace' => :cronjob
)
end

View File

@ -0,0 +1,53 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Users::MigrateRecordsToGhostUserInBatchesWorker do
include ExclusiveLeaseHelpers
let(:worker) { described_class.new }
describe '#perform', :clean_gitlab_redis_shared_state do
it 'executes service with lease' do
lease_key = described_class.name.underscore
expect_to_obtain_exclusive_lease(lease_key, 'uuid')
expect_next_instance_of(Users::MigrateRecordsToGhostUserInBatchesService) do |service|
expect(service).to receive(:execute).and_return(true)
end
worker.perform
end
end
include_examples 'an idempotent worker' do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, namespace: create(:group)) }
let_it_be(:issue) { create(:issue, project: project, author: user, last_edited_by: user) }
subject { worker.perform }
before do
create(:ghost_user_migration, user: user, initiator_user: user)
end
it 'migrates issue to ghost user' do
subject
expect(issue.reload.author).to eq(User.ghost)
expect(issue.last_edited_by).to eq(User.ghost)
end
end
context 'when user_destroy_with_limited_execution_time_worker is disabled' do
before do
stub_feature_flags(user_destroy_with_limited_execution_time_worker: false)
end
it 'does not execute the service' do
expect(Users::MigrateRecordsToGhostUserInBatchesService).not_to receive(:new)
worker.perform
end
end
end