Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-07-13 12:09:54 +00:00
parent 152d3b652d
commit affec3ced2
69 changed files with 1003 additions and 162 deletions

View file

@ -23,3 +23,9 @@ workhorse:verify:
workhorse:test using go 1.17:
extends: .workhorse:test
image: ${REGISTRY_HOST}/${REGISTRY_GROUP}/gitlab-build-images:debian-${DEBIAN_VERSION}-ruby-${RUBY_VERSION}-golang-1.17-git-2.31
workhorse:test using go 1.17 with FIPS:
extends: .workhorse:test
variables:
WORKHORSE_TEST_FIPS_ENABLED: 1
image: ${REGISTRY_HOST}/${REGISTRY_GROUP}/gitlab-build-images:debian-${DEBIAN_VERSION}-ruby-${RUBY_VERSION}-golang-1.17-git-2.31

View file

@ -1 +1 @@
8e3eafce11e3b48177872c28c58614226ae18602
7e0dccaed3f2ee60d0b03c0e2382c912ea59c729

View file

@ -896,9 +896,9 @@ GEM
jwt (>= 2.0)
omniauth (>= 1.1.1)
omniauth-oauth2 (>= 1.5)
omniauth-oauth (1.1.0)
omniauth-oauth (1.2.0)
oauth
omniauth (~> 1.0)
omniauth (>= 1.0, < 3)
omniauth-oauth2 (1.7.3)
oauth2 (>= 1.4, < 3)
omniauth (>= 1.9, < 3)

View file

@ -24,6 +24,8 @@ import { visitParents, SKIP } from 'unist-util-visit-parents';
import { toString } from 'hast-util-to-string';
import { isFunction, isString, noop } from 'lodash';
const NO_ATTRIBUTES = {};
/**
* Merges two ProseMirror text nodes if both text nodes
* have the same set of marks.
@ -269,7 +271,8 @@ const createProseMirrorNodeFactories = (schema, proseMirrorFactorySpecs, source)
root: {
selector: 'root',
wrapInParagraph: true,
handle: (state, hastNode) => state.openNode(schema.topNodeType, hastNode, {}, {}),
handle: (state, hastNode) =>
state.openNode(schema.topNodeType, hastNode, NO_ATTRIBUTES, factories.root),
},
text: {
selector: 'text',
@ -287,11 +290,7 @@ const createProseMirrorNodeFactories = (schema, proseMirrorFactorySpecs, source)
};
for (const [proseMirrorName, factorySpec] of Object.entries(proseMirrorFactorySpecs)) {
const factory = {
selector: factorySpec.selector,
skipChildren: factorySpec.skipChildren,
processText: factorySpec.processText,
parent: factorySpec.parent,
wrapInParagraph: factorySpec.wrapInParagraph,
...factorySpec,
};
if (factorySpec.type === 'block') {
@ -299,48 +298,35 @@ const createProseMirrorNodeFactories = (schema, proseMirrorFactorySpecs, source)
const nodeType = schema.nodeType(proseMirrorName);
state.closeUntil(parent);
state.openNode(
nodeType,
hastNode,
getAttrs(factorySpec, hastNode, parent, source),
factorySpec,
);
state.openNode(nodeType, hastNode, getAttrs(factory, hastNode, parent, source), factory);
/**
* If a getContent function is provided, we immediately close
* the node to delegate content processing to this function.
* */
if (isFunction(factorySpec.getContent)) {
state.addText(
schema,
factorySpec.getContent({ hastNode, hastNodeText: toString(hastNode) }),
);
if (isFunction(factory.getContent)) {
state.addText(schema, factory.getContent({ hastNode, hastNodeText: toString(hastNode) }));
state.closeNode();
}
};
} else if (factorySpec.type === 'inline') {
} else if (factory.type === 'inline') {
const nodeType = schema.nodeType(proseMirrorName);
factory.handle = (state, hastNode, parent) => {
state.closeUntil(parent);
state.openNode(
nodeType,
hastNode,
getAttrs(factorySpec, hastNode, parent, source),
factorySpec,
);
state.openNode(nodeType, hastNode, getAttrs(factory, hastNode, parent, source), factory);
// Inline nodes do not have children therefore they are immediately closed
state.closeNode();
};
} else if (factorySpec.type === 'mark') {
} else if (factory.type === 'mark') {
const markType = schema.marks[proseMirrorName];
factory.handle = (state, hastNode, parent) => {
state.openMark(markType, getAttrs(factorySpec, hastNode, parent, source));
state.openMark(markType, getAttrs(factory, hastNode, parent, source));
if (factorySpec.inlineContent) {
if (factory.inlineContent) {
state.addText(schema, hastNode.value);
}
};
} else if (factorySpec.type === 'ignore') {
} else if (factory.type === 'ignore') {
factory.handle = noop;
} else {
throw new RangeError(

View file

@ -22,7 +22,7 @@ class Projects::IssuesController < Projects::ApplicationController
before_action :issue, unless: ->(c) { ISSUES_EXCEPT_ACTIONS.include?(c.action_name.to_sym) }
before_action :redirect_if_task, unless: ->(c) { ISSUES_EXCEPT_ACTIONS.include?(c.action_name.to_sym) }
after_action :log_issue_show, unless: ->(c) { ISSUES_EXCEPT_ACTIONS.include?(c.action_name.to_sym) }
after_action :log_issue_show, only: :show
before_action :set_issuables_index, if: ->(c) {
SET_ISSUABLES_INDEX_ONLY_ACTIONS.include?(c.action_name.to_sym) && !index_html_request?

View file

@ -27,6 +27,7 @@ module Types
def coerce_input(string, ctx)
gid = super
return if gid.nil?
# Always return a WorkItemID even if an Issue Global ID is provided as input
return work_item_gid(gid) if suitable?(gid)

View file

@ -8,8 +8,8 @@ module Types
argument :parent_id, ::Types::GlobalIDType[::WorkItem],
required: false,
description: 'Global ID of the parent work item.',
prepare: ->(id, _) { id&.model_id }
loads: ::Types::WorkItemType,
description: 'Global ID of the parent work item.'
end
end
end

View file

@ -8,8 +8,8 @@ module Types
argument :parent_id, ::Types::GlobalIDType[::WorkItem],
required: false,
description: 'Global ID of the parent work item.',
prepare: ->(id, _) { id&.model_id }
loads: ::Types::WorkItemType,
description: 'Global ID of the parent work item. Use `null` to remove the association.'
argument :children_ids, [::Types::GlobalIDType[::WorkItem]],
required: false,

View file

@ -228,6 +228,7 @@ module DiffHelper
def conflicts(allow_tree_conflicts: false)
return unless options[:merge_ref_head_diff]
return unless merge_request.cannot_be_merged?
conflicts_service = MergeRequests::Conflicts::ListService.new(merge_request, allow_tree_conflicts: allow_tree_conflicts) # rubocop:disable CodeReuse/ServiceClass

View file

@ -4,7 +4,6 @@ module Ci
class RunnerVersion < Ci::ApplicationRecord
include EachBatch
include EnumWithNil
include BulkInsertSafe # include this last (see https://docs.gitlab.com/ee/development/insert_into_tables_in_batches.html#prepare-applicationrecords-for-bulk-insertion)
enum_with_nil status: {
not_processed: nil,

View file

@ -4,6 +4,8 @@ module Operations
class FeatureFlagsClient < ApplicationRecord
include TokenAuthenticatable
DEFAULT_UNLEASH_API_VERSION = 1
self.table_name = 'operations_feature_flags_clients'
belongs_to :project
@ -13,6 +15,8 @@ module Operations
add_authentication_token_field :token, encrypted: :required
attr_accessor :unleash_app_name
before_validation :ensure_token!
def self.find_for_project_and_token(project, token)
@ -21,5 +25,25 @@ module Operations
where(project_id: project).find_by_token(token)
end
def self.update_last_feature_flag_updated_at!(project)
where(project: project).update_all(last_feature_flag_updated_at: Time.current)
end
def unleash_api_version
DEFAULT_UNLEASH_API_VERSION
end
def unleash_api_features
return [] unless unleash_app_name.present?
Operations::FeatureFlag.for_unleash_client(project, unleash_app_name)
end
def unleash_api_cache_key
"api_version:#{unleash_api_version}:" \
"app_name:#{unleash_app_name}:" \
"updated_at:#{last_feature_flag_updated_at.to_i}"
end
end
end

View file

@ -30,13 +30,19 @@ module Ci
versions_from_runners = Set[]
new_record_count = 0
Ci::Runner.distinct_each_batch(column: :version, of: VERSION_BATCH_SIZE) do |version_batch|
batch_versions = version_batch.pluck(:version)
batch_versions = version_batch.pluck(:version).to_set
versions_from_runners += batch_versions
new_record_count += Ci::RunnerVersion.bulk_insert!(
version_batch,
returns: :ids,
skip_duplicates: true,
validate: false).count
# Avoid hitting primary DB
already_existing_versions = Ci::RunnerVersion.where(version: batch_versions).pluck(:version)
new_versions = batch_versions - already_existing_versions
if new_versions.any?
new_record_count += Ci::RunnerVersion.insert_all(
new_versions.map { |v| { version: v } },
returning: :version,
unique_by: :version).count
end
end
{ versions_from_runners: versions_from_runners, new_record_count: new_record_count }

View file

@ -15,6 +15,10 @@ module FeatureFlags
protected
def update_last_feature_flag_updated_at!
Operations::FeatureFlagsClient.update_last_feature_flag_updated_at!(project)
end
def audit_event(feature_flag)
message = audit_message(feature_flag)

View file

@ -10,6 +10,8 @@ module FeatureFlags
feature_flag = project.operations_feature_flags.new(params)
if feature_flag.save
update_last_feature_flag_updated_at!
success(feature_flag: feature_flag)
else
error(feature_flag.errors.full_messages, 400)

View file

@ -13,6 +13,8 @@ module FeatureFlags
ApplicationRecord.transaction do
if feature_flag.destroy
update_last_feature_flag_updated_at!
success(feature_flag: feature_flag)
else
error(feature_flag.errors.full_messages)

View file

@ -29,6 +29,8 @@ module FeatureFlags
audit_event = audit_event(feature_flag)
if feature_flag.save
update_last_feature_flag_updated_at!
success(feature_flag: feature_flag, audit_event: audit_event)
else
error(feature_flag.errors.full_messages, :bad_request)

View file

@ -0,0 +1,32 @@
# frozen_string_literal: true
module WorkItems
module ParentLinks
class DestroyService < IssuableLinks::DestroyService
attr_reader :link, :current_user, :parent, :child
def initialize(link, user)
@link = link
@current_user = user
@parent = link.work_item_parent
@child = link.work_item
end
private
# TODO: Create system notes when work item's parent or children are removed
# See https://gitlab.com/gitlab-org/gitlab/-/issues/362213
def create_notes
# no-op
end
def not_found_message
_('No Work Item Link found')
end
def permission_to_remove_relation?
can?(current_user, :update_work_item, child) && can?(current_user, :update_work_item, parent)
end
end
end
end

View file

@ -10,16 +10,42 @@ module WorkItems
return feature_flag_error unless feature_flag_enabled?
return incompatible_args_error if incompatible_args?(params)
update_hierarchy(params)
if params.key?(:parent)
update_work_item_parent(params.delete(:parent))
elsif params.key?(:children_ids)
update_work_item_children(params.delete(:children_ids))
else
invalid_args_error
end
end
def update_hierarchy(params)
parent_id = params.delete(:parent_id)
children_ids = params.delete(:children_ids)
def update_work_item_parent(parent)
if parent.nil?
remove_parent
else
set_parent(parent)
end
end
return update_work_item_parent(parent_id) if parent_id
def set_parent(parent)
::WorkItems::ParentLinks::CreateService
.new(parent, current_user, { target_issuable: widget.work_item })
.execute
end
update_work_item_children(children_ids) if children_ids
# rubocop: disable CodeReuse/ActiveRecord
def remove_parent
link = ::WorkItems::ParentLink.find_by(work_item: widget.work_item)
return success unless link.present?
::WorkItems::ParentLinks::DestroyService.new(link, current_user).execute
end
# rubocop: enable CodeReuse/ActiveRecord
def update_work_item_children(children_ids)
::WorkItems::ParentLinks::CreateService
.new(widget.work_item, current_user, { issuable_references: children_ids })
.execute
end
def feature_flag_enabled?
@ -27,7 +53,7 @@ module WorkItems
end
def incompatible_args?(params)
params[:parent_id] && params[:children_ids]
params[:children_ids] && params[:parent]
end
def feature_flag_error
@ -38,26 +64,14 @@ module WorkItems
error(_('A Work Item can be a parent or a child, but not both.'))
end
def update_work_item_parent(parent_id)
begin
parent = ::WorkItem.find(parent_id)
rescue ActiveRecord::RecordNotFound
return parent_not_found_error(parent_id)
end
::WorkItems::ParentLinks::CreateService
.new(parent, current_user, { target_issuable: widget.work_item })
.execute
def invalid_args_error
error(_("One or more arguments are invalid: %{args}." % { args: params.keys.to_sentence } ))
end
def update_work_item_children(children_ids)
::WorkItems::ParentLinks::CreateService
.new(widget.work_item, current_user, { issuable_references: children_ids })
.execute
end
def service_response!(result)
return result unless result[:status] == :error
def parent_not_found_error(id)
error(_('No Work Item found with ID: %{id}.' % { id: id }))
raise WidgetError, result[:message]
end
end
end

View file

@ -7,9 +7,7 @@ module WorkItems
def after_create_in_transaction(params:)
return unless params.present?
result = handle_hierarchy_changes(params)
raise WidgetError, result[:message] if result[:status] == :error
service_response!(handle_hierarchy_changes(params))
end
end
end

View file

@ -7,9 +7,7 @@ module WorkItems
def before_update_in_transaction(params:)
return unless params.present?
result = handle_hierarchy_changes(params)
raise WidgetError, result[:message] if result[:status] == :error
service_response!(handle_hierarchy_changes(params))
end
end
end

View file

@ -0,0 +1,8 @@
---
name: cache_unleash_client_api
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/90490
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/365575
milestone: '15.2'
type: development
group: group::release
default_enabled: false

View file

@ -0,0 +1,9 @@
---
table_name: vulnerability_merge_request_links
classes:
- Vulnerabilities::MergeRequestLink
feature_categories:
- vulnerability_management
description: Join table between Vulnerabilities and Merge Requests
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/92096
milestone: '15.2'

View file

@ -0,0 +1,7 @@
# frozen_string_literal: true
class AddLastFeatureFlagUpdatedAtToOperationsFeatureFlagsClients < Gitlab::Database::Migration[2.0]
def change
add_column :operations_feature_flags_clients, :last_feature_flag_updated_at, :datetime_with_timezone
end
end

View file

@ -0,0 +1,29 @@
# frozen_string_literal: true
class CreateVulnerabilityMergeRequestLinks < Gitlab::Database::Migration[2.0]
INDEX_NAME = "unique_vuln_merge_request_link_vuln_id_and_mr_id"
def up
create_table :vulnerability_merge_request_links do |t|
t.bigint :vulnerability_id, null: false
t.integer :merge_request_id, null: false
t.index :merge_request_id
t.timestamps_with_timezone null: false
end
add_index(
:vulnerability_merge_request_links,
%i[vulnerability_id merge_request_id],
unique: true,
name: INDEX_NAME
)
end
def down
drop_table(
:vulnerability_merge_request_links,
if_exists: true
)
end
end

View file

@ -0,0 +1,16 @@
# frozen_string_literal: true
class AddVulnerabilitiesForeignKeyToVulnerabilityMergeRequestLinks < Gitlab::Database::Migration[2.0]
disable_ddl_transaction!
def up
add_concurrent_foreign_key :vulnerability_merge_request_links, :vulnerabilities, column: :vulnerability_id,
on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :vulnerability_merge_request_links, column: :vulnerability_id
end
end
end

View file

@ -0,0 +1,16 @@
# frozen_string_literal: true
class AddMergeRequestsForeignKeyToVulnerabilityMergeRequestLinks < Gitlab::Database::Migration[2.0]
disable_ddl_transaction!
def up
add_concurrent_foreign_key :vulnerability_merge_request_links, :merge_requests, column: :merge_request_id,
on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :vulnerability_merge_request_links, column: :merge_request_id
end
end
end

View file

@ -0,0 +1 @@
0511a510621fec3b4b22ac55f151ec3fd83206cc39e97ac3b93a61a80e7a43f8

View file

@ -0,0 +1 @@
a91b2e3c9f89c6b7a0e4330fe617b22ee3b22100fc868ef13b5c656580175816

View file

@ -0,0 +1 @@
925069c0dd5058e38da16496b140ea4139318a40c8207fcd7116d76562b0e959

View file

@ -0,0 +1 @@
9a41920cb988c3c5459e33c143f4bb97d8d6cf4fc691aa87f3fd7ef9f2a726f8

View file

@ -17905,7 +17905,8 @@ CREATE TABLE operations_feature_flags (
CREATE TABLE operations_feature_flags_clients (
id bigint NOT NULL,
project_id integer NOT NULL,
token_encrypted character varying
token_encrypted character varying,
last_feature_flag_updated_at timestamp with time zone
);
CREATE SEQUENCE operations_feature_flags_clients_id_seq
@ -22208,6 +22209,23 @@ CREATE SEQUENCE vulnerability_issue_links_id_seq
ALTER SEQUENCE vulnerability_issue_links_id_seq OWNED BY vulnerability_issue_links.id;
CREATE TABLE vulnerability_merge_request_links (
id bigint NOT NULL,
vulnerability_id bigint NOT NULL,
merge_request_id integer NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL
);
CREATE SEQUENCE vulnerability_merge_request_links_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE vulnerability_merge_request_links_id_seq OWNED BY vulnerability_merge_request_links.id;
CREATE TABLE vulnerability_occurrence_identifiers (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
@ -23629,6 +23647,8 @@ ALTER TABLE ONLY vulnerability_identifiers ALTER COLUMN id SET DEFAULT nextval('
ALTER TABLE ONLY vulnerability_issue_links ALTER COLUMN id SET DEFAULT nextval('vulnerability_issue_links_id_seq'::regclass);
ALTER TABLE ONLY vulnerability_merge_request_links ALTER COLUMN id SET DEFAULT nextval('vulnerability_merge_request_links_id_seq'::regclass);
ALTER TABLE ONLY vulnerability_occurrence_identifiers ALTER COLUMN id SET DEFAULT nextval('vulnerability_occurrence_identifiers_id_seq'::regclass);
ALTER TABLE ONLY vulnerability_occurrence_pipelines ALTER COLUMN id SET DEFAULT nextval('vulnerability_occurrence_pipelines_id_seq'::regclass);
@ -25936,6 +25956,9 @@ ALTER TABLE ONLY vulnerability_identifiers
ALTER TABLE ONLY vulnerability_issue_links
ADD CONSTRAINT vulnerability_issue_links_pkey PRIMARY KEY (id);
ALTER TABLE ONLY vulnerability_merge_request_links
ADD CONSTRAINT vulnerability_merge_request_links_pkey PRIMARY KEY (id);
ALTER TABLE ONLY vulnerability_occurrence_identifiers
ADD CONSTRAINT vulnerability_occurrence_identifiers_pkey PRIMARY KEY (id);
@ -30053,6 +30076,8 @@ CREATE UNIQUE INDEX index_vulnerability_identifiers_on_project_id_and_fingerprin
CREATE INDEX index_vulnerability_issue_links_on_issue_id ON vulnerability_issue_links USING btree (issue_id);
CREATE INDEX index_vulnerability_merge_request_links_on_merge_request_id ON vulnerability_merge_request_links USING btree (merge_request_id);
CREATE INDEX index_vulnerability_occurrence_identifiers_on_identifier_id ON vulnerability_occurrence_identifiers USING btree (identifier_id);
CREATE UNIQUE INDEX index_vulnerability_occurrence_identifiers_on_unique_keys ON vulnerability_occurrence_identifiers USING btree (occurrence_id, identifier_id);
@ -30271,6 +30296,8 @@ CREATE UNIQUE INDEX unique_merge_request_metrics_by_merge_request_id ON merge_re
CREATE UNIQUE INDEX unique_projects_on_name_namespace_id ON projects USING btree (name, namespace_id);
CREATE UNIQUE INDEX unique_vuln_merge_request_link_vuln_id_and_mr_id ON vulnerability_merge_request_links USING btree (vulnerability_id, merge_request_id);
CREATE INDEX user_follow_users_followee_id_idx ON user_follow_users USING btree (followee_id);
CREATE INDEX users_forbidden_state_idx ON users USING btree (id) WHERE ((confirmed_at IS NOT NULL) AND ((state)::text <> ALL (ARRAY['blocked'::text, 'banned'::text, 'ldap_blocked'::text])));
@ -31777,6 +31804,9 @@ ALTER TABLE ONLY members
ALTER TABLE ONLY lfs_objects_projects
ADD CONSTRAINT fk_2eb33f7a78 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE NOT VALID;
ALTER TABLE ONLY vulnerability_merge_request_links
ADD CONSTRAINT fk_2ef3954596 FOREIGN KEY (vulnerability_id) REFERENCES vulnerabilities(id) ON DELETE CASCADE;
ALTER TABLE ONLY analytics_cycle_analytics_group_stages
ADD CONSTRAINT fk_3078345d6d FOREIGN KEY (stage_event_hash_id) REFERENCES analytics_cycle_analytics_stage_event_hashes(id) ON DELETE CASCADE;
@ -31939,6 +31969,9 @@ ALTER TABLE ONLY projects
ALTER TABLE ONLY dast_profile_schedules
ADD CONSTRAINT fk_6cca0d8800 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY vulnerability_merge_request_links
ADD CONSTRAINT fk_6d7aa8796e FOREIGN KEY (merge_request_id) REFERENCES merge_requests(id) ON DELETE CASCADE;
ALTER TABLE ONLY issues
ADD CONSTRAINT fk_6e10d4d38a FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE SET NULL;

View file

@ -22148,7 +22148,7 @@ A time-frame defined as a closed inclusive range of two dates.
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="workitemwidgethierarchyupdateinputchildrenids"></a>`childrenIds` | [`[WorkItemID!]`](#workitemid) | Global IDs of children work items. |
| <a id="workitemwidgethierarchyupdateinputparentid"></a>`parentId` | [`WorkItemID`](#workitemid) | Global ID of the parent work item. |
| <a id="workitemwidgethierarchyupdateinputparentid"></a>`parentId` | [`WorkItemID`](#workitemid) | Global ID of the parent work item. Use `null` to remove the association. |
### `WorkItemWidgetWeightInput`

View file

@ -441,7 +441,7 @@ GitLab also returns the following additional pagination headers:
| `x-next-page` | The index of the next page. |
| `x-page` | The index of the current page (starting at 1). |
| `x-per-page` | The number of items per page. |
| `X-prev-page` | The index of the previous page. |
| `x-prev-page` | The index of the previous page. |
| `x-total` | The total number of items. |
| `x-total-pages` | The total number of pages. |

View file

@ -314,6 +314,66 @@ background migration.
After the batched migration is completed, you can safely depend on the
data in `routes.namespace_id` being populated.
### Batching over non-distinct columns
The default batching strategy provides an efficient way to iterate over primary key columns.
However, if you need to iterate over columns where values are not unique, you must use a
different batching strategy.
The `LooseIndexScanBatchingStrategy` batching strategy uses a special version of [`EachBatch`](../iterating_tables_in_batches.md#loose-index-scan-with-distinct_each_batch)
to provide efficient and stable iteration over the distinct column values.
This example shows a batched background migration where the `issues.project_id` column is used as
the batching column.
Database post-migration:
```ruby
class ProjectsWithIssuesMigration < Gitlab::Database::Migration[2.0]
MIGRATION = 'BatchProjectsWithIssues'
INTERVAL = 2.minutes
BATCH_SIZE = 5000
SUB_BATCH_SIZE = 500
restrict_gitlab_migration gitlab_schema: :gitlab_main
disable_ddl_transaction!
def up
queue_batched_background_migration(
MIGRATION,
:issues,
:project_id,
job_interval: INTERVAL,
batch_size: BATCH_SIZE,
batch_class_name: 'LooseIndexScanBatchingStrategy', # Override the default batching strategy
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(MIGRATION, :issues, :project_id, [])
end
end
```
Implementing the background migration class:
```ruby
module Gitlab
module BackgroundMigration
class BatchProjectsWithIssues < Gitlab::BackgroundMigration::BatchedMigrationJob
include Gitlab::Database::DynamicModelHelpers
def perform
distinct_each_batch(operation_name: :backfill_issues) do |batch|
project_ids = batch.pluck(batch_column)
# do something with the distinct project_ids
end
end
end
end
end
```
## Testing
Writing tests is required for:

View file

@ -117,36 +117,28 @@ GitLab has several features which can help you manage the number of users:
users manually.
- View a breakdown of users by role in the [Users statistics](../../user/admin_area/index.md#users-statistics) page.
## Cloud licensing
## Sync your subscription data with GitLab
> Introduced in GitLab 14.1.
Cloud licensing manages licenses for self-managed GitLab subscription plans. Cloud licensing includes:
To sync subscription data between your self-managed instance and GitLab, you must [activate your instance](../../user/admin_area/license.md) with an
activation code.
- Activation: Unlock plan features and activate your self-managed instance by using an activation code.
- License sync: Sync subscription data between your self-managed instance and GitLab.
After you activate your instance, the following processes are automated:
### How cloud licensing works
- [Quarterly subscription reconciliation](../quarterly_reconciliation.md).
- Subscription renewals.
- Subscription updates, such as adding more seats or upgrading a GitLab tier.
#### Add your license
At approximately 03:00 UTC, a daily sync job sends subscription data to the Customers Portal. For this reason, updates and renewals may not
apply immediately.
1. When you purchase a GitLab self-managed plan, an activation code is generated.
This activation code is sent to the email address associated with the Customers Portal account.
1. In GitLab, on the top bar, select **Menu > Admin**.
1. On the left sidebar, select **Subscription** and paste the activation code in the text field.
1. Select **Add license**.
The data is sent securely through an encrypted HTTPS connection to `customers.gitlab.com` on port `443`.
If the job fails, it retries up to 12 times over approximately 17 hours.
The page displays the details of the subscription.
### Subscription data that GitLab receives
#### License sync
Once a day, a job sends license data to the Customers Portal. This information automates activation,
provisioning, co-terms, and renewals. The data is sent securely through an encrypted HTTPS connection
to `customers.gitlab.com` on port `443`.
This sync job runs daily around 3AM UTC. If the job fails, it is retried up to 12 times over approximately 17 hours.
The daily job provides **only** the following information to the Customers Portal:
The daily sync job sends **only** the following information to the Customers Portal:
- Date
- Timestamp
@ -160,7 +152,7 @@ The daily job provides **only** the following information to the Customers Porta
- Hostname
- Instance ID
Example of a cloud licensing sync request:
Example of a license sync request:
```json
{
@ -211,7 +203,12 @@ Example of a cloud licensing sync request:
}
```
#### Sync subscription details
### Troubleshoot automatic subscription sync
If the sync job is not working, ensure you allow network traffic from your GitLab instance
to IP address `104.18.26.123:443` (`customers.gitlab.com`).
## Manually sync your subscription details
You can manually sync your subscription details at any time.
@ -221,11 +218,6 @@ You can manually sync your subscription details at any time.
A job is queued. When the job finishes, the subscription details are updated.
#### Troubleshooting cloud licensing sync
If the sync job is not working, ensure you allow network traffic from your GitLab instance
to IP address `104.18.26.123:443` (`customers.gitlab.com`).
## Obtain a subscription
To subscribe to GitLab through a GitLab self-managed installation:
@ -271,7 +263,7 @@ If you are an administrator, you can export your license usage into a CSV:
1. On the left sidebar, select **Subscription**.
1. In the top right, select **Export license usage file**.
This file contains the information GitLab uses to manually process quarterly reconciliations or renewals. If your instance is firewalled or in an offline environment, you must provide GitLab with this information.
This file contains the information GitLab uses to manually process quarterly reconciliations or renewals. If your instance is firewalled or an offline environment, you must provide GitLab with this information.
The **License Usage** CSV includes the following details:

View file

@ -0,0 +1,12 @@
# frozen_string_literal: true
module API
module Entities
module Unleash
class ClientFeatureFlags < Grape::Entity
expose :unleash_api_version, as: :version
expose :unleash_api_features, as: :features, using: ::API::Entities::UnleashFeature
end
end
end
end

View file

@ -44,9 +44,13 @@ module API
requires :user_xids, type: String, desc: 'A comma separated list of external user ids'
end
post do
# TODO: Move the business logic to a service class in app/services/feature_flags.
# https://gitlab.com/gitlab-org/gitlab/-/issues/367021
list = user_project.operations_feature_flags_user_lists.create(declared_params)
if list.save
update_last_feature_flag_updated_at!
present list, with: ::API::Entities::FeatureFlag::UserList
else
render_api_error!(list.errors.full_messages, :bad_request)
@ -76,9 +80,13 @@ module API
optional :user_xids, type: String, desc: 'A comma separated list of external user ids'
end
put do
# TODO: Move the business logic to a service class in app/services/feature_flags.
# https://gitlab.com/gitlab-org/gitlab/-/issues/367021
list = user_project.operations_feature_flags_user_lists.find_by_iid!(params[:iid])
if list.update(declared_params(include_missing: false))
update_last_feature_flag_updated_at!
present list, with: ::API::Entities::FeatureFlag::UserList
else
render_api_error!(list.errors.full_messages, :bad_request)
@ -89,8 +97,14 @@ module API
detail 'This feature was introduced in GitLab 12.10'
end
delete do
# TODO: Move the business logic to a service class in app/services/feature_flags.
# https://gitlab.com/gitlab-org/gitlab/-/issues/367021
list = user_project.operations_feature_flags_user_lists.find_by_iid!(params[:iid])
unless list.destroy
if list.destroy
update_last_feature_flag_updated_at!
nil
else
render_api_error!(list.errors.full_messages, :conflict)
end
end
@ -101,6 +115,10 @@ module API
def authorize_admin_feature_flags_user_lists!
authorize! :admin_feature_flags_user_lists, user_project
end
def update_last_feature_flag_updated_at!
Operations::FeatureFlagsClient.update_last_feature_flag_updated_at!(user_project)
end
end
end
end

View file

@ -25,14 +25,22 @@ module API
desc 'Get a list of features (deprecated, v2 client support)'
get 'features' do
present :version, 1
present :features, feature_flags, with: ::API::Entities::UnleashFeature
if ::Feature.enabled?(:cache_unleash_client_api, project)
present_feature_flags
else
present :version, 1
present :features, feature_flags, with: ::API::Entities::UnleashFeature
end
end
desc 'Get a list of features'
get 'client/features' do
present :version, 1
present :features, feature_flags, with: ::API::Entities::UnleashFeature
if ::Feature.enabled?(:cache_unleash_client_api, project)
present_feature_flags
else
present :version, 1
present :features, feature_flags, with: ::API::Entities::UnleashFeature
end
end
post 'client/register' do
@ -49,10 +57,24 @@ module API
end
helpers do
def present_feature_flags
present_cached feature_flags_client,
with: ::API::Entities::Unleash::ClientFeatureFlags,
cache_context: -> (client) { client.unleash_api_cache_key }
end
def project
@project ||= find_project(params[:project_id])
end
def feature_flags_client
strong_memoize(:feature_flags_client) do
client = Operations::FeatureFlagsClient.find_for_project_and_token(project, unleash_instance_id)
client.unleash_app_name = unleash_app_name if client
client
end
end
def unleash_instance_id
env['HTTP_UNLEASH_INSTANCEID'] || params[:instance_id]
end
@ -62,8 +84,7 @@ module API
end
def authorize_by_unleash_instance_id!
unauthorized! unless Operations::FeatureFlagsClient
.find_for_project_and_token(project, unleash_instance_id)
unauthorized! unless feature_flags_client
end
def feature_flags

View file

@ -44,7 +44,19 @@ module Gitlab
end
end
def parent_batch_relation(batching_scope)
def distinct_each_batch(operation_name: :default, batching_arguments: {})
all_batching_arguments = { column: batch_column, of: sub_batch_size }.merge(batching_arguments)
parent_batch_relation.distinct_each_batch(**all_batching_arguments) do |relation|
batch_metrics.instrument_operation(operation_name) do
yield relation
end
sleep([pause_ms, 0].max * 0.001)
end
end
def parent_batch_relation(batching_scope = nil)
parent_relation = define_batchable_model(batch_table, connection: connection)
.where(batch_column => start_id..end_id)

View file

@ -0,0 +1,37 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
module BatchingStrategies
# This strategy provides an efficient way to iterate over columns with non-distinct values.
# A common use case would be iterating over a foreign key columns, for example issues.project_id
class LooseIndexScanBatchingStrategy < BaseStrategy
include Gitlab::Database::DynamicModelHelpers
# Finds and returns the next batch in the table.
#
# table_name - The table to batch over
# column_name - The column to batch over
# batch_min_value - The minimum value which the next batch will start at
# batch_size - The size of the next batch
# job_arguments - The migration job arguments
# job_class - The migration job class
def next_batch(table_name, column_name, batch_min_value:, batch_size:, job_arguments:, job_class: nil)
model_class = define_batchable_model(table_name, connection: connection)
quoted_column_name = model_class.connection.quote_column_name(column_name)
relation = model_class.where("#{quoted_column_name} >= ?", batch_min_value)
next_batch_bounds = nil
relation.distinct_each_batch(of: batch_size, column: column_name) do |batch| # rubocop:disable Lint/UnreachableLoop
next_batch_bounds = batch.pluck(Arel.sql("MIN(#{quoted_column_name}), MAX(#{quoted_column_name})")).first
break
end
next_batch_bounds
end
end
end
end
end

View file

@ -554,6 +554,7 @@ vulnerability_flags: :gitlab_main
vulnerability_historical_statistics: :gitlab_main
vulnerability_identifiers: :gitlab_main
vulnerability_issue_links: :gitlab_main
vulnerability_merge_request_links: :gitlab_main
vulnerability_occurrence_identifiers: :gitlab_main
vulnerability_occurrence_pipelines: :gitlab_main
vulnerability_occurrences: :gitlab_main

View file

@ -25908,7 +25908,7 @@ msgstr ""
msgid "No Scopes"
msgstr ""
msgid "No Work Item found with ID: %{id}."
msgid "No Work Item Link found"
msgstr ""
msgid "No active admin user found"
@ -27039,6 +27039,9 @@ msgid_plural "%d more items"
msgstr[0] ""
msgstr[1] ""
msgid "One or more arguments are invalid: %{args}."
msgstr ""
msgid "One or more contacts were successfully added."
msgstr ""

View file

@ -172,7 +172,7 @@ module QA
Page::Project::Menu.perform(&:go_to_container_registry)
Page::Project::Registry::Show.perform do |registry|
expect(registry).to have_registry_repository(project.path_with_namespace)
expect(registry).to have_registry_repository(project.name)
registry.click_on_image(project.path_with_namespace)
expect(registry).to have_tag('master')
@ -230,7 +230,7 @@ module QA
Page::Project::Menu.perform(&:go_to_container_registry)
Page::Project::Registry::Show.perform do |registry|
expect(registry).to have_registry_repository(project.path_with_namespace)
expect(registry).to have_registry_repository(project.name)
registry.click_on_image(project.path_with_namespace)

View file

@ -77,7 +77,7 @@ module QA
Page::Project::Menu.perform(&:go_to_container_registry)
Page::Project::Registry::Show.perform do |registry|
expect(registry).to have_registry_repository(registry_repository.name)
expect(registry).to have_registry_repository(project.name)
registry.click_on_image(registry_repository.name)
expect(registry).to have_tag('master')

View file

@ -470,7 +470,7 @@ RSpec.describe DiffHelper do
end
describe '#conflicts' do
let(:merge_request) { instance_double(MergeRequest) }
let(:merge_request) { instance_double(MergeRequest, cannot_be_merged?: true) }
let(:merge_ref_head_diff) { true }
let(:can_be_resolved_in_ui?) { true }
let(:allow_tree_conflicts) { false }
@ -504,6 +504,14 @@ RSpec.describe DiffHelper do
end
end
context 'when merge request can be merged' do
let(:merge_request) { instance_double(MergeRequest, cannot_be_merged?: false) }
it 'returns nil' do
expect(helper.conflicts).to be_nil
end
end
context 'when conflicts cannot be resolved in UI' do
let(:can_be_resolved_in_ui?) { false }

View file

@ -92,5 +92,69 @@ RSpec.describe Gitlab::BackgroundMigration::BatchedMigrationJob do
end
end
end
context 'when the subclass uses distinct each batch' do
let(:job_instance) do
job_class.new(start_id: 1,
end_id: 100,
batch_table: '_test_table',
batch_column: 'from_column',
sub_batch_size: 2,
pause_ms: 10,
connection: connection)
end
let(:job_class) do
Class.new(described_class) do
def perform(*job_arguments)
distinct_each_batch(operation_name: :insert) do |sub_batch|
sub_batch.pluck(:from_column).each do |value|
connection.execute("INSERT INTO _test_insert_table VALUES (#{value})")
end
sub_batch.size
end
end
end
end
let(:test_table) { table(:_test_table) }
let(:test_insert_table) { table(:_test_insert_table) }
before do
allow(job_instance).to receive(:sleep)
connection.create_table :_test_table do |t|
t.timestamps_with_timezone null: false
t.integer :from_column, null: false
end
connection.create_table :_test_insert_table, id: false do |t|
t.integer :to_column
t.index :to_column, unique: true
end
test_table.create!(id: 1, from_column: 5)
test_table.create!(id: 2, from_column: 10)
test_table.create!(id: 3, from_column: 10)
test_table.create!(id: 4, from_column: 5)
test_table.create!(id: 5, from_column: 15)
end
after do
connection.drop_table(:_test_table)
connection.drop_table(:_test_insert_table)
end
it 'calls the operation for each distinct batch' do
expect { perform_job }.to change { test_insert_table.pluck(:to_column) }.from([]).to([5, 10, 15])
end
it 'stores the affected rows' do
perform_job
expect(job_instance.batch_metrics.affected_rows[:insert]).to contain_exactly(2, 1)
end
end
end
end

View file

@ -0,0 +1,67 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::LooseIndexScanBatchingStrategy, '#next_batch' do
let(:batching_strategy) { described_class.new(connection: ActiveRecord::Base.connection) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:issues) { table(:issues) }
let!(:namespace1) { namespaces.create!(name: 'ns1', path: 'ns1') }
let!(:namespace2) { namespaces.create!(name: 'ns2', path: 'ns2') }
let!(:namespace3) { namespaces.create!(name: 'ns3', path: 'ns3') }
let!(:namespace4) { namespaces.create!(name: 'ns4', path: 'ns4') }
let!(:namespace5) { namespaces.create!(name: 'ns5', path: 'ns5') }
let!(:project1) { projects.create!(name: 'p1', namespace_id: namespace1.id, project_namespace_id: namespace1.id) }
let!(:project2) { projects.create!(name: 'p2', namespace_id: namespace2.id, project_namespace_id: namespace2.id) }
let!(:project3) { projects.create!(name: 'p3', namespace_id: namespace3.id, project_namespace_id: namespace3.id) }
let!(:project4) { projects.create!(name: 'p4', namespace_id: namespace4.id, project_namespace_id: namespace4.id) }
let!(:project5) { projects.create!(name: 'p5', namespace_id: namespace5.id, project_namespace_id: namespace5.id) }
let!(:issue1) { issues.create!(title: 'title', description: 'description', project_id: project2.id) }
let!(:issue2) { issues.create!(title: 'title', description: 'description', project_id: project1.id) }
let!(:issue3) { issues.create!(title: 'title', description: 'description', project_id: project2.id) }
let!(:issue4) { issues.create!(title: 'title', description: 'description', project_id: project3.id) }
let!(:issue5) { issues.create!(title: 'title', description: 'description', project_id: project2.id) }
let!(:issue6) { issues.create!(title: 'title', description: 'description', project_id: project4.id) }
let!(:issue7) { issues.create!(title: 'title', description: 'description', project_id: project5.id) }
it { expect(described_class).to be < Gitlab::BackgroundMigration::BatchingStrategies::BaseStrategy }
context 'when starting on the first batch' do
it 'returns the bounds of the next batch' do
batch_bounds = batching_strategy
.next_batch(:issues, :project_id, batch_min_value: project1.id, batch_size: 2, job_arguments: [])
expect(batch_bounds).to eq([project1.id, project2.id])
end
end
context 'when additional batches remain' do
it 'returns the bounds of the next batch' do
batch_bounds = batching_strategy
.next_batch(:issues, :project_id, batch_min_value: project2.id, batch_size: 3, job_arguments: [])
expect(batch_bounds).to eq([project2.id, project4.id])
end
end
context 'when on the final batch' do
it 'returns the bounds of the next batch' do
batch_bounds = batching_strategy
.next_batch(:issues, :project_id, batch_min_value: project4.id, batch_size: 3, job_arguments: [])
expect(batch_bounds).to eq([project4.id, project5.id])
end
end
context 'when no additional batches remain' do
it 'returns nil' do
batch_bounds = batching_strategy
.next_batch(:issues, :project_id, batch_min_value: project5.id + 1, batch_size: 1, job_arguments: [])
expect(batch_bounds).to be_nil
end
end
end

View file

@ -3,7 +3,15 @@
require 'spec_helper'
RSpec.describe Operations::FeatureFlagsClient do
subject { create(:operations_feature_flags_client) }
let_it_be(:project) { create(:project) }
let!(:client) { create(:operations_feature_flags_client, project: project) }
subject { client }
before do
client.unleash_app_name = 'production'
end
describe 'associations' do
it { is_expected.to belong_to(:project) }
@ -18,4 +26,64 @@ RSpec.describe Operations::FeatureFlagsClient do
expect(subject.token).not_to be_empty
end
end
describe '.update_last_feature_flag_updated_at!' do
subject { described_class.update_last_feature_flag_updated_at!(project) }
it 'updates the last_feature_flag_updated_at of the project client' do
freeze_time do
expect { subject }.to change { client.reload.last_feature_flag_updated_at }.from(nil).to(Time.current)
end
end
end
describe '#unleash_api_version' do
subject { client.unleash_api_version }
it { is_expected.to eq(described_class::DEFAULT_UNLEASH_API_VERSION) }
end
describe '#unleash_api_features' do
subject { client.unleash_api_features }
it 'fetches' do
expect(Operations::FeatureFlag).to receive(:for_unleash_client).with(project, 'production').once
subject
end
context 'when unleash app name is not set' do
before do
client.unleash_app_name = nil
end
it 'does not fetch' do
expect(Operations::FeatureFlag).not_to receive(:for_unleash_client)
subject
end
end
end
describe '#unleash_api_cache_key' do
subject { client.unleash_api_cache_key }
it 'constructs the cache key' do
is_expected.to eq("api_version:#{client.unleash_api_version}"\
":app_name:#{client.unleash_app_name}"\
":updated_at:#{client.last_feature_flag_updated_at.to_i}")
end
context 'when unleash app name is not set' do
before do
client.unleash_app_name = nil
end
it 'constructs the cache key without unleash app name' do
is_expected.to eq("api_version:#{client.unleash_api_version}"\
":app_name:"\
":updated_at:#{client.last_feature_flag_updated_at.to_i}")
end
end
end
end

View file

@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe API::FeatureFlagsUserLists do
let_it_be(:project, refind: true) { create(:project) }
let_it_be(:client, refind: true) { create(:operations_feature_flags_client, project: project) }
let_it_be(:developer) { create(:user) }
let_it_be(:reporter) { create(:user) }
@ -215,6 +216,7 @@ RSpec.describe API::FeatureFlagsUserLists do
}
expect(response).to have_gitlab_http_status(:forbidden)
expect(client.reload.last_feature_flag_updated_at).to be_nil
end
it 'creates the flag' do
@ -231,6 +233,7 @@ RSpec.describe API::FeatureFlagsUserLists do
})
expect(project.operations_feature_flags_user_lists.count).to eq(1)
expect(project.operations_feature_flags_user_lists.last.name).to eq('mylist')
expect(client.reload.last_feature_flag_updated_at).not_to be_nil
end
it 'requires name' do
@ -298,6 +301,7 @@ RSpec.describe API::FeatureFlagsUserLists do
}
expect(response).to have_gitlab_http_status(:forbidden)
expect(client.reload.last_feature_flag_updated_at).to be_nil
end
it 'updates the list' do
@ -313,6 +317,7 @@ RSpec.describe API::FeatureFlagsUserLists do
'user_xids' => '456,789'
})
expect(list.reload.name).to eq('mylist')
expect(client.reload.last_feature_flag_updated_at).not_to be_nil
end
it 'preserves attributes not listed in the request' do
@ -377,6 +382,7 @@ RSpec.describe API::FeatureFlagsUserLists do
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response).to eq({ 'message' => '404 Not found' })
expect(client.reload.last_feature_flag_updated_at).to be_nil
end
it 'deletes the list' do
@ -387,6 +393,7 @@ RSpec.describe API::FeatureFlagsUserLists do
expect(response).to have_gitlab_http_status(:no_content)
expect(response.body).to be_blank
expect(project.operations_feature_flags_user_lists.count).to eq(0)
expect(client.reload.last_feature_flag_updated_at).not_to be_nil
end
it 'does not delete the list if it is associated with a strategy' do

View file

@ -124,6 +124,16 @@ RSpec.describe 'Create a work item' do
expect(mutation_response['workItem']).to be_nil
end
end
context 'when parent work item is not found' do
let_it_be(:parent) { build_stubbed(:work_item, id: non_existing_record_id)}
it 'returns a top level error' do
post_graphql_mutation(mutation, current_user: current_user)
expect(graphql_errors.first['message']).to include('No object found for `parentId')
end
end
end
end

View file

@ -166,13 +166,13 @@ RSpec.describe 'Update a work item' do
context 'when updating parent' do
let_it_be(:work_item) { create(:work_item, :task, project: project) }
let_it_be(:valid_parent) { create(:work_item, project: project) }
let_it_be(:invalid_parent) { create(:work_item, :task, project: project) }
context 'when parent work item type is invalid' do
let_it_be(:parent_task) { create(:work_item, :task, project: project) }
let(:error) { "#{work_item.to_reference} cannot be added: Only Issue can be parent of Task." }
let(:input) do
{ 'hierarchyWidget' => { 'parentId' => parent_task.to_global_id.to_s }, 'title' => 'new title' }
{ 'hierarchyWidget' => { 'parentId' => invalid_parent.to_global_id.to_s }, 'title' => 'new title' }
end
it 'returns response with errors' do
@ -187,21 +187,19 @@ RSpec.describe 'Update a work item' do
end
context 'when parent work item has a valid type' do
let_it_be(:parent) { create(:work_item, project: project) }
let(:input) { { 'hierarchyWidget' => { 'parentId' => parent.to_global_id.to_s } } }
let(:input) { { 'hierarchyWidget' => { 'parentId' => valid_parent.to_global_id.to_s } } }
it 'sets the parent for the work item' do
expect do
post_graphql_mutation(mutation, current_user: current_user)
work_item.reload
end.to change(work_item, :work_item_parent).from(nil).to(parent)
end.to change(work_item, :work_item_parent).from(nil).to(valid_parent)
expect(response).to have_gitlab_http_status(:success)
expect(widgets_response).to include(
{
'children' => { 'edges' => [] },
'parent' => { 'id' => parent.to_global_id.to_s },
'parent' => { 'id' => valid_parent.to_global_id.to_s },
'type' => 'HIERARCHY'
}
)
@ -218,10 +216,62 @@ RSpec.describe 'Update a work item' do
expect do
post_graphql_mutation(mutation, current_user: current_user)
work_item.reload
end.to change(work_item, :work_item_parent).from(existing_parent).to(parent)
end.to change(work_item, :work_item_parent).from(existing_parent).to(valid_parent)
end
end
end
context 'when parentId is null' do
let(:input) { { 'hierarchyWidget' => { 'parentId' => nil } } }
context 'when parent is present' do
before do
work_item.update!(work_item_parent: valid_parent)
end
it 'removes parent and returns success message' do
expect do
post_graphql_mutation(mutation, current_user: current_user)
work_item.reload
end.to change(work_item, :work_item_parent).from(valid_parent).to(nil)
expect(response).to have_gitlab_http_status(:success)
expect(widgets_response)
.to include(
{
'children' => { 'edges' => [] },
'parent' => nil,
'type' => 'HIERARCHY'
}
)
end
end
context 'when parent is not present' do
before do
work_item.update!(work_item_parent: nil)
end
it 'does not change work item and returns success message' do
expect do
post_graphql_mutation(mutation, current_user: current_user)
work_item.reload
end.not_to change(work_item, :work_item_parent)
expect(response).to have_gitlab_http_status(:success)
end
end
end
context 'when parent work item is not found' do
let(:input) { { 'hierarchyWidget' => { 'parentId' => "gid://gitlab/WorkItem/#{non_existing_record_id}" } } }
it 'returns a top level error' do
post_graphql_mutation(mutation, current_user: current_user)
expect(graphql_errors.first['message']).to include('No object found for `parentId')
end
end
end
context 'when updating children' do

View file

@ -168,7 +168,7 @@ RSpec.describe API::Unleash do
end
%w(/feature_flags/unleash/:project_id/features /feature_flags/unleash/:project_id/client/features).each do |features_endpoint|
describe "GET #{features_endpoint}" do
describe "GET #{features_endpoint}", :use_clean_rails_redis_caching do
let(:features_url) { features_endpoint.sub(':project_id', project_id.to_s) }
let(:client) { create(:operations_feature_flags_client, project: project) }
@ -176,6 +176,46 @@ RSpec.describe API::Unleash do
it_behaves_like 'authenticated request'
context 'when a client fetches feature flags several times' do
let(:headers) { { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'production' } }
before do
create_list(:operations_feature_flag, 3, project: project)
end
it 'serializes feature flags for the first time and read cached data from the second time' do
expect(API::Entities::Unleash::ClientFeatureFlags)
.to receive(:represent).with(instance_of(Operations::FeatureFlagsClient), any_args)
.once
5.times { get api(features_url), params: params, headers: headers }
end
it 'increments the cache key when feature flags are modified' do
expect(API::Entities::Unleash::ClientFeatureFlags)
.to receive(:represent).with(instance_of(Operations::FeatureFlagsClient), any_args)
.twice
2.times { get api(features_url), params: params, headers: headers }
::FeatureFlags::CreateService.new(project, project.owner, name: 'feature_flag').execute
3.times { get api(features_url), params: params, headers: headers }
end
context 'when cache_unleash_client_api is disabled' do
before do
stub_feature_flags(cache_unleash_client_api: false)
end
it 'serializes feature flags every time' do
expect(::API::Entities::UnleashFeature).to receive(:represent).exactly(5).times
5.times { get api(features_url), params: params, headers: headers }
end
end
end
context 'with version 2 feature flags' do
it 'does not return a flag without any strategies' do
create(:operations_feature_flag, project: project,

View file

@ -100,6 +100,7 @@ RSpec.describe DiffsEntity do
let(:options) { super().merge(merge_ref_head_diff: merge_ref_head_diff) }
before do
allow(merge_request).to receive(:cannot_be_merged?).and_return(true)
allow(MergeRequests::Conflicts::ListService).to receive(:new).and_return(conflicts)
end

View file

@ -65,6 +65,7 @@ RSpec.describe DiffsMetadataEntity do
let(:conflicts) { double(conflicts: double(files: [conflict_file]), can_be_resolved_in_ui?: false) }
before do
allow(merge_request).to receive(:cannot_be_merged?).and_return(true)
allow(MergeRequests::Conflicts::ListService).to receive(:new).and_return(conflicts)
end

View file

@ -43,6 +43,7 @@ RSpec.describe PaginatedDiffEntity do
let(:options) { super().merge(merge_ref_head_diff: merge_ref_head_diff) }
before do
allow(merge_request).to receive(:cannot_be_merged?).and_return(true)
allow(MergeRequests::Conflicts::ListService).to receive(:new).and_return(conflicts)
end

View file

@ -32,6 +32,12 @@ RSpec.describe ::Ci::Runners::ReconcileExistingRunnerVersionsService, '#execute'
end
it 'creates and updates expected ci_runner_versions entries', :aggregate_failures do
expect(Ci::RunnerVersion).to receive(:insert_all)
.ordered
.with([{ version: '14.0.2' }], anything)
.once
.and_call_original
result = nil
expect { result = execute }
.to change { runner_version_14_0_0.reload.status }.from('not_available').to('recommended')

View file

@ -41,6 +41,8 @@ RSpec.describe FeatureFlags::CreateService do
subject
end
it_behaves_like 'does not update feature flag client'
end
context 'when feature flag is saved correctly' do
@ -62,6 +64,8 @@ RSpec.describe FeatureFlags::CreateService do
expect { subject }.to change { Operations::FeatureFlag.count }.by(1)
end
it_behaves_like 'update feature flag client'
context 'when Jira Connect subscription does not exist' do
it 'does not sync the feature flag to Jira' do
expect(::JiraConnect::SyncFeatureFlagsWorker).not_to receive(:perform_async)

View file

@ -36,6 +36,8 @@ RSpec.describe FeatureFlags::DestroyService do
expect(audit_event_message).to eq("Deleted feature flag #{feature_flag.name}.")
end
it_behaves_like 'update feature flag client'
context 'when user is reporter' do
let(:user) { reporter }
@ -57,6 +59,8 @@ RSpec.describe FeatureFlags::DestroyService do
it 'does not create audit log' do
expect { subject }.not_to change { AuditEvent.count }
end
it_behaves_like 'does not update feature flag client'
end
end
end

View file

@ -58,6 +58,8 @@ RSpec.describe FeatureFlags::UpdateService do
)
end
it_behaves_like 'update feature flag client'
context 'with invalid params' do
let(:params) { { name: nil } }
@ -79,6 +81,8 @@ RSpec.describe FeatureFlags::UpdateService do
subject
end
it_behaves_like 'does not update feature flag client'
end
context 'when user is reporter' do

View file

@ -6,6 +6,7 @@ RSpec.describe WorkItems::CreateService do
include AfterNextHelpers
let_it_be_with_reload(:project) { create(:project) }
let_it_be(:parent) { create(:work_item, project: project) }
let_it_be(:guest) { create(:user) }
let_it_be(:user_with_no_access) { create(:user) }
@ -93,7 +94,7 @@ RSpec.describe WorkItems::CreateService do
it_behaves_like 'work item widgetable service' do
let(:widget_params) do
{
hierarchy_widget: { parent_id: 1 }
hierarchy_widget: { parent: parent }
}
end
@ -111,16 +112,18 @@ RSpec.describe WorkItems::CreateService do
let(:supported_widgets) do
[
{ klass: WorkItems::Widgets::HierarchyService::CreateService, callback: :after_create_in_transaction, params: { parent_id: 1 } }
{
klass: WorkItems::Widgets::HierarchyService::CreateService,
callback: :after_create_in_transaction,
params: { parent: parent }
}
]
end
end
describe 'hierarchy widget' do
context 'when parent is valid work item' do
let_it_be(:parent) { create(:work_item, project: project) }
let(:widget_params) { { hierarchy_widget: { parent_id: parent.id } } }
let(:widget_params) { { hierarchy_widget: { parent: parent } } }
let(:opts) do
{
@ -149,7 +152,7 @@ RSpec.describe WorkItems::CreateService do
end
end
context 'when hiearchy feature flag is disabled' do
context 'when hierarchy feature flag is disabled' do
before do
stub_feature_flags(work_items_hierarchy: false)
end

View file

@ -0,0 +1,41 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe WorkItems::ParentLinks::DestroyService do
describe '#execute' do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:work_item) { create(:work_item, project: project) }
let_it_be(:task) { create(:work_item, :task, project: project) }
let_it_be(:parent_link) { create(:parent_link, work_item: task, work_item_parent: work_item)}
let(:parent_link_class) { WorkItems::ParentLink }
subject { described_class.new(parent_link, user).execute }
context 'when user has permissions to update work items' do
before do
project.add_developer(user)
end
it 'removes relation' do
expect { subject }.to change(parent_link_class, :count).by(-1)
end
it 'returns success message' do
is_expected.to eq(message: 'Relation was removed', status: :success)
end
end
context 'when user has insufficient permissions' do
it 'does not remove relation' do
expect { subject }.not_to change(parent_link_class, :count).from(1)
end
it 'returns error message' do
is_expected.to eq(message: 'No Work Item Link found', status: :error, http_status: 404)
end
end
end
end

View file

@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe WorkItems::UpdateService do
let_it_be(:developer) { create(:user) }
let_it_be(:project) { create(:project).tap { |proj| proj.add_developer(developer) } }
let_it_be(:parent) { create(:work_item, project: project) }
let_it_be_with_reload(:work_item) { create(:work_item, project: project, assignees: [developer]) }
let(:spam_params) { double }
@ -80,7 +81,7 @@ RSpec.describe WorkItems::UpdateService do
it_behaves_like 'work item widgetable service' do
let(:widget_params) do
{
hierarchy_widget: { parent_id: 1 },
hierarchy_widget: { parent: parent },
description_widget: { description: 'foo' },
weight_widget: { weight: 1 }
}
@ -102,7 +103,7 @@ RSpec.describe WorkItems::UpdateService do
[
{ klass: WorkItems::Widgets::DescriptionService::UpdateService, callback: :update, params: { description: 'foo' } },
{ klass: WorkItems::Widgets::WeightService::UpdateService, callback: :update, params: { weight: 1 } },
{ klass: WorkItems::Widgets::HierarchyService::UpdateService, callback: :before_update_in_transaction, params: { parent_id: 1 } }
{ klass: WorkItems::Widgets::HierarchyService::UpdateService, callback: :before_update_in_transaction, params: { parent: parent } }
]
end
end
@ -144,6 +145,7 @@ RSpec.describe WorkItems::UpdateService do
end
context 'for the hierarchy widget' do
let(:opts) { { title: 'changed' } }
let_it_be(:child_work_item) { create(:work_item, :task, project: project) }
let(:widget_params) { { hierarchy_widget: { children_ids: [child_work_item.id] } } }
@ -156,6 +158,23 @@ RSpec.describe WorkItems::UpdateService do
expect(work_item.work_item_children).to include(child_work_item)
end
context 'when child type is invalid' do
let_it_be(:child_work_item) { create(:work_item, project: project) }
it 'returns error status' do
expect(subject[:status]).to be(:error)
expect(subject[:message])
.to match("#{child_work_item.to_reference} cannot be added: Only Task can be assigned as a child in hierarchy.")
end
it 'does not update work item attributes' do
expect do
update_work_item
work_item.reload
end.to not_change(WorkItems::ParentLink, :count).and(not_change(work_item, :title))
end
end
end
end
end

View file

@ -21,8 +21,8 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService do
describe '#update' do
subject { described_class.new(widget: widget, current_user: user).before_update_in_transaction(params: params) }
context 'when parent_id and children_ids params are present' do
let(:params) { { parent_id: parent_work_item.id, children_ids: [child_work_item.id] } }
context 'when parent and children_ids params are present' do
let(:params) { { parent: parent_work_item, children_ids: [child_work_item.id] } }
it_behaves_like 'raises a WidgetError' do
let(:message) { 'A Work Item can be a parent or a child, but not both.' }
@ -95,7 +95,7 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService do
context 'when updating parent' do
let_it_be(:work_item) { create(:work_item, :task, project: project) }
let(:params) {{ parent_id: parent_work_item.id } }
let(:params) {{ parent: parent_work_item } }
context 'when work_items_hierarchy feature flag is disabled' do
before do
@ -107,15 +107,6 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService do
end
end
context 'when parent_id does not match an existing work item' do
let(:invalid_id) { non_existing_record_iid }
let(:params) {{ parent_id: invalid_id } }
it_behaves_like 'raises a WidgetError' do
let(:message) { "No Work Item found with ID: #{invalid_id}." }
end
end
context 'when user has insufficient permissions to link work items' do
it_behaves_like 'raises a WidgetError' do
let(:message) { not_found_error }
@ -127,16 +118,35 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService do
project.add_developer(user)
end
it 'correctly sets work item parent' do
subject
it 'correctly sets new parent' do
expect(subject[:status]).to eq(:success)
expect(work_item.work_item_parent).to eq(parent_work_item)
end
context 'when parent is nil' do
let(:params) { { parent: nil } }
it 'removes the work item parent if present' do
work_item.update!(work_item_parent: parent_work_item)
expect do
subject
work_item.reload
end.to change(work_item, :work_item_parent).from(parent_work_item).to(nil)
end
it 'returns success status if parent not present', :aggregate_failure do
work_item.update!(work_item_parent: nil)
expect(subject[:status]).to eq(:success)
expect(work_item.reload.work_item_parent).to be_nil
end
end
context 'when type is invalid' do
let_it_be(:parent_task) { create(:work_item, :task, project: project)}
let(:params) {{ parent_id: parent_task.id } }
let(:params) {{ parent: parent_task } }
it_behaves_like 'raises a WidgetError' do
let(:message) { "#{work_item.to_reference} cannot be added: Only Issue can be parent of Task." }

View file

@ -0,0 +1,19 @@
# frozen_string_literal: true
shared_examples_for 'update feature flag client' do
let!(:client) { create(:operations_feature_flags_client, project: project) }
it 'updates last feature flag updated at' do
freeze_time do
expect { subject }.to change { client.reload.last_feature_flag_updated_at }.from(nil).to(Time.current)
end
end
end
shared_examples_for 'does not update feature flag client' do
let!(:client) { create(:operations_feature_flags_client, project: project) }
it 'does not update last feature flag updated at' do
expect { subject }.not_to change { client.reload.last_feature_flag_updated_at }
end
end

View file

@ -92,7 +92,11 @@ func echoProxy(t *testing.T, expectedBodyLength int) http.Handler {
require.Equal(t, "application/x-www-form-urlencoded", r.Header.Get("Content-Type"), "Wrong Content-Type header")
require.Contains(t, r.PostForm, "file.md5")
if destination.FIPSEnabled() {
require.NotContains(t, r.PostForm, "file.md5")
} else {
require.Contains(t, r.PostForm, "file.md5")
}
require.Contains(t, r.PostForm, "file.sha1")
require.Contains(t, r.PostForm, "file.sha256")
require.Contains(t, r.PostForm, "file.sha512")
@ -119,7 +123,11 @@ func echoProxy(t *testing.T, expectedBodyLength int) http.Handler {
require.Contains(t, uploadFields, "remote_url")
require.Contains(t, uploadFields, "remote_id")
require.Contains(t, uploadFields, "size")
require.Contains(t, uploadFields, "md5")
if destination.FIPSEnabled() {
require.NotContains(t, uploadFields, "md5")
} else {
require.Contains(t, uploadFields, "md5")
}
require.Contains(t, uploadFields, "sha1")
require.Contains(t, uploadFields, "sha256")
require.Contains(t, uploadFields, "sha512")

View file

@ -206,7 +206,11 @@ func TestUpload(t *testing.T) {
}
require.Equal(t, test.ObjectSize, fh.Size)
require.Equal(t, test.ObjectMD5, fh.MD5())
if destination.FIPSEnabled() {
require.Empty(t, fh.MD5())
} else {
require.Equal(t, test.ObjectMD5, fh.MD5())
}
require.Equal(t, test.ObjectSHA256, fh.SHA256())
require.Equal(t, expectedPuts, osStub.PutsCnt(), "ObjectStore PutObject count mismatch")
@ -478,7 +482,11 @@ func checkFileHandlerWithFields(t *testing.T, fh *destination.FileHandler, field
require.Equal(t, fh.RemoteURL, fields[key("remote_url")])
require.Equal(t, fh.RemoteID, fields[key("remote_id")])
require.Equal(t, strconv.FormatInt(test.ObjectSize, 10), fields[key("size")])
require.Equal(t, test.ObjectMD5, fields[key("md5")])
if destination.FIPSEnabled() {
require.Empty(t, fields[key("md5")])
} else {
require.Equal(t, test.ObjectMD5, fields[key("md5")])
}
require.Equal(t, test.ObjectSHA1, fields[key("sha1")])
require.Equal(t, test.ObjectSHA256, fields[key("sha256")])
require.Equal(t, test.ObjectSHA512, fields[key("sha512")])

View file

@ -8,6 +8,9 @@ import (
"encoding/hex"
"hash"
"io"
"os"
"gitlab.com/gitlab-org/labkit/fips"
)
var hashFactories = map[string](func() hash.Hash){
@ -17,6 +20,28 @@ var hashFactories = map[string](func() hash.Hash){
"sha512": sha512.New,
}
var fipsHashFactories = map[string](func() hash.Hash){
"sha1": sha1.New,
"sha256": sha256.New,
"sha512": sha512.New,
}
func factories() map[string](func() hash.Hash) {
if FIPSEnabled() {
return fipsHashFactories
}
return hashFactories
}
func FIPSEnabled() bool {
if fips.Enabled() {
return true
}
return os.Getenv("WORKHORSE_TEST_FIPS_ENABLED") == "1"
}
type multiHash struct {
io.Writer
hashes map[string]hash.Hash
@ -27,7 +52,7 @@ func newMultiHash() (m *multiHash) {
m.hashes = make(map[string]hash.Hash)
var writers []io.Writer
for hash, hashFactory := range hashFactories {
for hash, hashFactory := range factories() {
writer := hashFactory()
m.hashes[hash] = writer

View file

@ -24,6 +24,7 @@ import (
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/proxy"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upstream/roundtripper"
)
@ -99,7 +100,6 @@ func TestUploadHandlerRewritingMultiPartData(t *testing.T) {
require.Equal(t, "4", r.FormValue("file.size"), "Expected to receive the file size")
hashes := map[string]string{
"md5": "098f6bcd4621d373cade4e832627b4f6",
"sha1": "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3",
"sha256": "9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08",
"sha512": "ee26b0dd4af7e749aa1a8ee3c10ae9923f618980772e473f8819a5d4940e0db27ac185f8a0e1d5f84f88bc887fd67b143732c304cc5fa9ad8e6f57f50028a8ff",
@ -109,7 +109,16 @@ func TestUploadHandlerRewritingMultiPartData(t *testing.T) {
require.Equal(t, hash, r.FormValue("file."+algo), "file hash %s", algo)
}
require.Len(t, r.MultipartForm.Value, 12, "multipart form values")
expectedLen := 12
if destination.FIPSEnabled() {
expectedLen--
require.Empty(t, r.FormValue("file.md5"), "file hash md5")
} else {
require.Equal(t, "098f6bcd4621d373cade4e832627b4f6", r.FormValue("file.md5"), "file hash md5")
}
require.Len(t, r.MultipartForm.Value, expectedLen, "multipart form values")
w.WriteHeader(202)
fmt.Fprint(w, "RESPONSE")

View file

@ -20,6 +20,7 @@ import (
"gitlab.com/gitlab-org/gitlab/workhorse/internal/secret"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
)
type uploadArtifactsFunction func(url, contentType string, body io.Reader) (*http.Response, string, error)
@ -82,7 +83,13 @@ func uploadTestServer(t *testing.T, authorizeTests func(r *http.Request), extraT
require.NoError(t, r.ParseMultipartForm(100000))
const nValues = 11 // file name, path, remote_url, remote_id, size, md5, sha1, sha256, sha512, upload_duration, gitlab-workhorse-upload for just the upload (no metadata because we are not POSTing a valid zip file)
var nValues int // file name, path, remote_url, remote_id, size, md5, sha1, sha256, sha512, upload_duration, gitlab-workhorse-upload for just the upload (no metadata because we are not POSTing a valid zip file)
if destination.FIPSEnabled() {
nValues = 10
} else {
nValues = 11
}
require.Len(t, r.MultipartForm.Value, nValues)
require.Empty(t, r.MultipartForm.File, "multipart form files")
@ -183,7 +190,11 @@ func TestAcceleratedUpload(t *testing.T) {
require.Contains(t, uploadFields, "remote_url")
require.Contains(t, uploadFields, "remote_id")
require.Contains(t, uploadFields, "size")
require.Contains(t, uploadFields, "md5")
if destination.FIPSEnabled() {
require.NotContains(t, uploadFields, "md5")
} else {
require.Contains(t, uploadFields, "md5")
}
require.Contains(t, uploadFields, "sha1")
require.Contains(t, uploadFields, "sha256")
require.Contains(t, uploadFields, "sha512")