Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-04-13 15:08:16 +00:00
parent 0cb47d7129
commit 907fd5d94e
76 changed files with 1379 additions and 338 deletions

View File

@ -95,15 +95,10 @@ function trackShowUserDropdownLink(trackEvent, elToTrack, el) {
export function initNavUserDropdownTracking() {
const el = document.querySelector('.js-nav-user-dropdown');
const buyEl = document.querySelector('.js-buy-pipeline-minutes-link');
const upgradeEl = document.querySelector('.js-upgrade-plan-link');
if (el && buyEl) {
trackShowUserDropdownLink('show_buy_ci_minutes', buyEl, el);
}
if (el && upgradeEl) {
trackShowUserDropdownLink('show_upgrade_link', upgradeEl, el);
}
}
requestIdleCallback(initStatusTriggers);

View File

@ -458,7 +458,6 @@
vertical-align: text-top;
}
a.upgrade-plan-link gl-emoji,
a.ci-minutes-emoji gl-emoji,
a.trial-link gl-emoji {
font-size: $gl-font-size;

View File

@ -1,15 +1,53 @@
# frozen_string_literal: true
class Analytics::CycleAnalytics::Aggregation < ApplicationRecord
include IgnorableColumns
include FromUnion
belongs_to :group, optional: false
validates :incremental_runtimes_in_seconds, :incremental_processed_records, :last_full_run_runtimes_in_seconds, :last_full_run_processed_records, presence: true, length: { maximum: 10 }, allow_blank: true
validates :incremental_runtimes_in_seconds, :incremental_processed_records, :full_runtimes_in_seconds, :full_processed_records, presence: true, length: { maximum: 10 }, allow_blank: true
scope :priority_order, -> (column_to_sort = :last_incremental_run_at) { order(arel_table[column_to_sort].asc.nulls_first) }
scope :enabled, -> { where('enabled IS TRUE') }
# These columns were added with wrong naming convention, the columns were never used.
ignore_column :last_full_run_processed_records, remove_with: '15.1', remove_after: '2022-05-22'
ignore_column :last_full_run_runtimes_in_seconds, remove_with: '15.1', remove_after: '2022-05-22'
ignore_column :last_full_run_issues_updated_at, remove_with: '15.1', remove_after: '2022-05-22'
ignore_column :last_full_run_mrs_updated_at, remove_with: '15.1', remove_after: '2022-05-22'
ignore_column :last_full_run_issues_id, remove_with: '15.1', remove_after: '2022-05-22'
ignore_column :last_full_run_merge_requests_id, remove_with: '15.1', remove_after: '2022-05-22'
def cursor_for(mode, model)
{
updated_at: self["last_#{mode}_#{model.table_name}_updated_at"],
id: self["last_#{mode}_#{model.table_name}_id"]
}.compact
end
def refresh_last_run(mode)
self["last_#{mode}_run_at"] = Time.current
end
def reset_full_run_cursors
self.last_full_issues_id = nil
self.last_full_issues_updated_at = nil
self.last_full_merge_requests_id = nil
self.last_full_merge_requests_updated_at = nil
end
def set_cursor(mode, model, cursor)
self["last_#{mode}_#{model.table_name}_id"] = cursor[:id]
self["last_#{mode}_#{model.table_name}_updated_at"] = cursor[:updated_at]
end
def set_stats(mode, runtime, processed_records)
# We only store the last 10 data points
self["#{mode}_runtimes_in_seconds"] = (self["#{mode}_runtimes_in_seconds"] + [runtime]).last(10)
self["#{mode}_processed_records"] = (self["#{mode}_processed_records"] + [processed_records]).last(10)
end
def estimated_next_run_at
return unless enabled
return if last_incremental_run_at.nil?

View File

@ -10,9 +10,11 @@ class Integration < ApplicationRecord
include FromUnion
include EachBatch
include IgnorableColumns
extend ::Gitlab::Utils::Override
ignore_column :template, remove_with: '15.0', remove_after: '2022-04-22'
ignore_column :type, remove_with: '15.0', remove_after: '2022-04-22'
ignore_column :properties, remove_with: '15.1', remove_after: '2022-05-22'
UnknownType = Class.new(StandardError)
@ -47,10 +49,7 @@ class Integration < ApplicationRecord
SECTION_TYPE_CONNECTION = 'connection'
serialize :properties, JSON # rubocop:disable Cop/ActiveRecordSerialize
attr_encrypted :encrypted_properties_tmp,
attribute: :encrypted_properties,
attr_encrypted :properties,
mode: :per_attribute_iv,
key: Settings.attr_encrypted_db_key_base_32,
algorithm: 'aes-256-gcm',
@ -59,6 +58,15 @@ class Integration < ApplicationRecord
encode: false,
encode_iv: false
# Handle assignment of props with symbol keys.
# To do this correctly, we need to call the method generated by attr_encrypted.
alias_method :attr_encrypted_props=, :properties=
private :attr_encrypted_props=
def properties=(props)
self.attr_encrypted_props = props&.with_indifferent_access&.freeze
end
alias_attribute :type, :type_new
default_value_for :active, false
@ -77,8 +85,6 @@ class Integration < ApplicationRecord
default_value_for :wiki_page_events, true
after_initialize :initialize_properties
after_initialize :copy_properties_to_encrypted_properties
before_save :copy_properties_to_encrypted_properties
after_commit :reset_updated_properties
@ -165,16 +171,14 @@ class Integration < ApplicationRecord
class_eval <<~RUBY, __FILE__, __LINE__ + 1
unless method_defined?(arg)
def #{arg}
properties['#{arg}']
properties['#{arg}'] if properties.present?
end
end
def #{arg}=(value)
self.properties ||= {}
self.encrypted_properties_tmp = properties
updated_properties['#{arg}'] = #{arg} unless #{arg}_changed?
self.properties['#{arg}'] = value
self.encrypted_properties_tmp['#{arg}'] = value
self.properties = self.properties.merge('#{arg}' => value)
end
def #{arg}_changed?
@ -195,11 +199,13 @@ class Integration < ApplicationRecord
# Provide convenient boolean accessor methods for each serialized property.
# Also keep track of updated properties in a similar way as ActiveModel::Dirty
def self.boolean_accessor(*args)
self.prop_accessor(*args)
prop_accessor(*args)
args.each do |arg|
class_eval <<~RUBY, __FILE__, __LINE__ + 1
def #{arg}
return if properties.blank?
Gitlab::Utils.to_boolean(properties['#{arg}'])
end
@ -318,18 +324,31 @@ class Integration < ApplicationRecord
def self.build_from_integration(integration, project_id: nil, group_id: nil)
new_integration = integration.dup
if integration.supports_data_fields?
data_fields = integration.data_fields.dup
data_fields.integration = new_integration
end
new_integration.instance = false
new_integration.project_id = project_id
new_integration.group_id = group_id
new_integration.inherit_from_id = integration.id if integration.instance_level? || integration.group_level?
new_integration.inherit_from_id = integration.id if integration.inheritable?
new_integration
end
# Duplicating an integration also duplicates the data fields. Duped records have different ciphertexts.
override :dup
def dup
new_integration = super
new_integration.assign_attributes(reencrypt_properties)
if supports_data_fields?
fields = data_fields.dup
fields.integration = new_integration
end
new_integration
end
def inheritable?
instance_level? || group_level?
end
def self.instance_exists_for?(type)
exists?(instance: true, type: type)
end
@ -402,13 +421,7 @@ class Integration < ApplicationRecord
end
def initialize_properties
self.properties = {} if has_attribute?(:properties) && properties.nil?
end
def copy_properties_to_encrypted_properties
self.encrypted_properties_tmp = properties
rescue ActiveModel::MissingAttributeError
# ignore - in a record built from using a restricted select list
self.properties = {} if has_attribute?(:encrypted_properties) && encrypted_properties.nil?
end
def title
@ -445,21 +458,26 @@ class Integration < ApplicationRecord
%w[active]
end
# properties is always nil - ignore it.
override :attributes
def attributes
super.except('properties')
end
# return a hash of columns => values suitable for passing to insert_all
def to_integration_hash
column = self.class.attribute_aliases.fetch('type', 'type')
copy_properties_to_encrypted_properties
as_json(except: %w[id instance project_id group_id encrypted_properties_tmp])
as_json(except: %w[id instance project_id group_id])
.merge(column => type)
.merge(reencrypt_properties)
end
def reencrypt_properties
unless properties.nil? || properties.empty?
alg = self.class.encrypted_attributes[:encrypted_properties_tmp][:algorithm]
alg = self.class.encrypted_attributes[:properties][:algorithm]
iv = generate_iv(alg)
ep = self.class.encrypt(:encrypted_properties_tmp, properties, { iv: iv })
ep = self.class.encrypt(:properties, properties, { iv: iv })
end
{ 'encrypted_properties' => ep, 'encrypted_properties_iv' => iv }

View File

@ -35,8 +35,9 @@ module Integrations
validates :labels_to_be_notified_behavior, inclusion: { in: LABEL_NOTIFICATION_BEHAVIOURS }, allow_blank: true
def initialize_properties
if properties.nil?
self.properties = {}
super
if properties.empty?
self.notify_only_broken_pipelines = true
self.branches_to_be_notified = "default"
self.labels_to_be_notified_behavior = MATCH_ANY_LABEL

View File

@ -25,12 +25,15 @@ module Integrations
def handle_properties
# this has been moved from initialize_properties and should be improved
# as part of https://gitlab.com/gitlab-org/gitlab/issues/29404
return unless properties
return unless properties.present?
safe_keys = data_fields.attributes.keys.grep_v(/encrypted/) - %w[id service_id created_at]
@legacy_properties_data = properties.dup
data_values = properties.slice!('title', 'description')
data_values = properties.slice(*safe_keys)
data_values.reject! { |key| data_fields.changed.include?(key) }
data_values.slice!(*data_fields.attributes.keys)
data_fields.assign_attributes(data_values) if data_values.present?
self.properties = {}
@ -68,10 +71,6 @@ module Integrations
issue_url(iid)
end
def initialize_properties
{}
end
# Initialize with default properties values
def set_default_data
return unless issues_tracker.present?

View File

@ -27,12 +27,12 @@ module Integrations
end
# Since SSL verification will always be enabled for Buildkite,
# we no longer needs to store the boolean.
# we no longer need to store the boolean.
# This is a stub method to work with deprecated API param.
# TODO: remove enable_ssl_verification after 14.0
# https://gitlab.com/gitlab-org/gitlab/-/issues/222808
def enable_ssl_verification=(_value)
self.properties.delete('enable_ssl_verification') # Remove unused key
self.properties = properties.except('enable_ssl_verification') # Remove unused key
end
override :hook_url

View File

@ -94,10 +94,6 @@ module Integrations
!!URI(url).hostname&.end_with?(JIRA_CLOUD_HOST)
end
def initialize_properties
{}
end
def data_fields
jira_tracker_data || self.build_jira_tracker_data
end
@ -106,7 +102,7 @@ module Integrations
return unless reset_password?
data_fields.password = nil
properties.delete('password') if properties
self.properties = properties.except('password')
end
def set_default_data

View File

@ -12,8 +12,9 @@ module Integrations
validate :number_of_recipients_within_limit, if: :validate_recipients?
def initialize_properties
if properties.nil?
self.properties = {}
super
if properties.blank?
self.notify_only_broken_pipelines = true
self.branches_to_be_notified = "default"
elsif !self.notify_only_default_branch.nil?

View File

@ -32,12 +32,6 @@ module Integrations
scope :preload_project, -> { preload(:project) }
scope :with_clusters_with_cilium, -> { joins(project: [:clusters]).merge(Clusters::Cluster.with_available_cilium) }
def initialize_properties
if properties.nil?
self.properties = {}
end
end
def show_active_box?
false
end

View File

@ -112,8 +112,9 @@ module Projects
integration = project.find_or_initialize_integration(::Integrations::Prometheus.to_param)
integration.assign_attributes(attrs)
attrs = integration.to_integration_hash.except('created_at', 'updated_at')
{ prometheus_integration_attributes: integration.attributes.except(*%w[id project_id created_at updated_at]) }
{ prometheus_integration_attributes: attrs }
end
def incident_management_setting_params

View File

@ -27,7 +27,6 @@
%li
= link_to s_("CurrentUser|Preferences"), profile_preferences_path
= render_if_exists 'layouts/header/buy_pipeline_minutes', project: @project, namespace: @group
= render_if_exists 'layouts/header/upgrade'
- if current_user_menu?(:help)
%li.divider.d-md-none

View File

@ -0,0 +1,8 @@
---
name: vsa_reaggregation_worker
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/84171
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/357647
milestone: '14.10'
type: development
group: group::optimize
default_enabled: false

View File

@ -1,7 +1,7 @@
---
name: pql_three_cta_test
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/74054
rollout_issue_url:
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/349799
milestone: '14.7'
type: experiment
group: group::conversion

View File

@ -640,6 +640,9 @@ Gitlab.ee do
Settings.cron_jobs['analytics_cycle_analytics_consistency_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['analytics_cycle_analytics_consistency_worker']['cron'] ||= '*/30 * * * *'
Settings.cron_jobs['analytics_cycle_analytics_consistency_worker']['job_class'] = 'Analytics::CycleAnalytics::ConsistencyWorker'
Settings.cron_jobs['analytics_cycle_analytics_reaggregation_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['analytics_cycle_analytics_reaggregation_worker']['cron'] ||= '44 * * * *'
Settings.cron_jobs['analytics_cycle_analytics_reaggregation_worker']['job_class'] = 'Analytics::CycleAnalytics::ReaggregationWorker'
Settings.cron_jobs['active_user_count_threshold_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['active_user_count_threshold_worker']['cron'] ||= '0 12 * * *'
Settings.cron_jobs['active_user_count_threshold_worker']['job_class'] = 'ActiveUserCountThresholdWorker'

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
class AddRuntimeDataColumnsToVsaAggregations < Gitlab::Database::Migration[1.0]
def up
change_table(:analytics_cycle_analytics_aggregations, bulk: true) do |t|
t.integer :full_runtimes_in_seconds, array: true, default: [], null: false
t.integer :full_processed_records, array: true, default: [], null: false
t.column :last_full_merge_requests_updated_at, :datetime_with_timezone
t.column :last_full_issues_updated_at, :datetime_with_timezone
t.integer :last_full_issues_id
t.integer :last_full_merge_requests_id
end
end
def down
remove_column :analytics_cycle_analytics_aggregations, :full_runtimes_in_seconds
remove_column :analytics_cycle_analytics_aggregations, :full_processed_records
remove_column :analytics_cycle_analytics_aggregations, :last_full_merge_requests_updated_at
remove_column :analytics_cycle_analytics_aggregations, :last_full_issues_updated_at
remove_column :analytics_cycle_analytics_aggregations, :last_full_issues_id
remove_column :analytics_cycle_analytics_aggregations, :last_full_merge_requests_id
end
end

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
class AddCheckConstraintToVsaAggregationRuntimeDataColumns < Gitlab::Database::Migration[1.0]
FULL_RUNTIMES_IN_SECONDS_CONSTRAINT = 'full_runtimes_in_seconds_size'
FULL_PROCESSED_RECORDS_CONSTRAINT = 'full_processed_records_size'
disable_ddl_transaction!
def up
add_check_constraint(:analytics_cycle_analytics_aggregations,
'CARDINALITY(full_runtimes_in_seconds) <= 10',
FULL_RUNTIMES_IN_SECONDS_CONSTRAINT)
add_check_constraint(:analytics_cycle_analytics_aggregations,
'CARDINALITY(full_processed_records) <= 10',
FULL_PROCESSED_RECORDS_CONSTRAINT)
end
def down
remove_check_constraint :analytics_cycle_analytics_aggregations, FULL_RUNTIMES_IN_SECONDS_CONSTRAINT
remove_check_constraint :analytics_cycle_analytics_aggregations, FULL_PROCESSED_RECORDS_CONSTRAINT
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
#
class AddPartialIndexOnUnencryptedIntegrations < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
INDEX_NAME = 'index_integrations_on_id_where_not_encrypted'
INDEX_FILTER_CONDITION = 'properties IS NOT NULL AND encrypted_properties IS NULL'
def up
add_concurrent_index :integrations, [:id],
where: INDEX_FILTER_CONDITION,
name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :integrations, INDEX_NAME
end
end

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
class ConsumeRemainingEncryptIntegrationPropertyJobs < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
BATCH_SIZE = 50
def up
Gitlab::BackgroundMigration.steal('EncryptIntegrationProperties')
model = define_batchable_model('integrations')
relation = model.where.not(properties: nil).where(encrypted_properties: nil)
relation.each_batch(of: BATCH_SIZE) do |batch|
range = batch.pluck('MIN(id)', 'MAX(id)').first
Gitlab::BackgroundMigration::EncryptIntegrationProperties.new.perform(*range)
end
end
def down
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
#
# The inverse of 20220412143551_add_partial_index_on_unencrypted_integrations.rb
class RemovePartialIndexOnUnencryptedIntegrations < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
INDEX_NAME = 'index_integrations_on_id_where_not_encrypted'
INDEX_FILTER_CONDITION = 'properties IS NOT NULL AND encrypted_properties IS NULL'
def down
add_concurrent_index :integrations, [:id],
where: INDEX_FILTER_CONDITION,
name: INDEX_NAME
end
def up
remove_concurrent_index_by_name :integrations, INDEX_NAME
end
end

View File

@ -0,0 +1 @@
f5c934c691b50bff8c4029a975e37e86177cdb24b10bb65be2edd5bda50938b0

View File

@ -0,0 +1 @@
4ffb630e2949769c0ad64d43c2f8b6ad432358c44b00da99ec8ce538bb245e1a

View File

@ -0,0 +1 @@
beff437160d30bc0cb6577e5b88edb751f1325b316534010844e053a567906ff

View File

@ -0,0 +1 @@
6211f4f1e2708606aa68c139639acdb366cd1f8e4be225800a2e49888f420498

View File

@ -0,0 +1 @@
442300bd5c2f05807bdf752a9c3280a11f1cc84b21c2d61d99fb73268f7a495f

View File

@ -10639,10 +10639,18 @@ CREATE TABLE analytics_cycle_analytics_aggregations (
last_full_run_mrs_updated_at timestamp with time zone,
last_consistency_check_updated_at timestamp with time zone,
enabled boolean DEFAULT true NOT NULL,
full_runtimes_in_seconds integer[] DEFAULT '{}'::integer[] NOT NULL,
full_processed_records integer[] DEFAULT '{}'::integer[] NOT NULL,
last_full_merge_requests_updated_at timestamp with time zone,
last_full_issues_updated_at timestamp with time zone,
last_full_issues_id integer,
last_full_merge_requests_id integer,
CONSTRAINT chk_rails_1ef688e577 CHECK ((cardinality(incremental_runtimes_in_seconds) <= 10)),
CONSTRAINT chk_rails_7810292ec9 CHECK ((cardinality(last_full_run_processed_records) <= 10)),
CONSTRAINT chk_rails_8b9e89687c CHECK ((cardinality(last_full_run_runtimes_in_seconds) <= 10)),
CONSTRAINT chk_rails_e16bf3913a CHECK ((cardinality(incremental_processed_records) <= 10))
CONSTRAINT chk_rails_e16bf3913a CHECK ((cardinality(incremental_processed_records) <= 10)),
CONSTRAINT full_processed_records_size CHECK ((cardinality(full_processed_records) <= 10)),
CONSTRAINT full_runtimes_in_seconds_size CHECK ((cardinality(full_runtimes_in_seconds) <= 10))
);
CREATE TABLE analytics_cycle_analytics_group_stages (

View File

@ -88,27 +88,44 @@ node running Rails (Puma, Sidekiq, or Geo Log Cursor) on the Geo **secondary** s
sudo gitlab-rake geo:status
```
Example output:
The output includes:
- a count of "failed" items if any failures occurred
- the percentage of "succeeded" items, relative to the "total"
Example:
```plaintext
http://secondary.example.com/
-----------------------------------------------------
GitLab Version: 11.10.4-ee
GitLab Version: 14.9.2-ee
Geo Role: Secondary
Health Status: Healthy
Repositories: 289/289 (100%)
Verified Repositories: 289/289 (100%)
Wikis: 289/289 (100%)
Verified Wikis: 289/289 (100%)
LFS Objects: 8/8 (100%)
Attachments: 5/5 (100%)
CI job artifacts: 0/0 (0%)
Repositories Checked: 0/289 (0%)
Repositories: succeeded 12345 / total 12345 (100%)
Verified Repositories: succeeded 12345 / total 12345 (100%)
Wikis: succeeded 6789 / total 6789 (100%)
Verified Wikis: succeeded 6789 / total 6789 (100%)
Attachments: succeeded 4 / total 4 (100%)
CI job artifacts: succeeded 0 / total 0 (0%)
Design repositories: succeeded 1 / total 1 (100%)
LFS Objects: failed 1 / succeeded 2 / total 3 (67%)
Merge Request Diffs: succeeded 0 / total 0 (0%)
Package Files: failed 1 / succeeded 2 / total 3 (67%)
Terraform State Versions: failed 1 / succeeded 2 / total 3 (67%)
Snippet Repositories: failed 1 / succeeded 2 / total 3 (67%)
Group Wiki Repositories: succeeded 4 / total 4 (100%)
Pipeline Artifacts: failed 3 / succeeded 0 / total 3 (0%)
Pages Deployments: succeeded 0 / total 0 (0%)
Repositories Checked: failed 5 / succeeded 0 / total 5 (0%)
Package Files Verified: succeeded 0 / total 10 (0%)
Terraform State Versions Verified: succeeded 0 / total 10 (0%)
Snippet Repositories Verified: succeeded 99 / total 100 (99%)
Pipeline Artifacts Verified: succeeded 0 / total 10 (0%)
Sync Settings: Full
Database replication lag: 0 seconds
Last event ID seen from primary: 10215 (about 2 minutes ago)
Last event ID processed by cursor: 10215 (about 2 minutes ago)
Last status report was: 2 minutes ago
Last event ID seen from primary: 12345 (about 2 minutes ago)
Last event ID processed by cursor: 12345 (about 2 minutes ago)
Last status report was: 1 minute ago
```
### Check if PostgreSQL replication is working

View File

@ -21,76 +21,77 @@ See also:
The following API resources are available in the project context:
| Resource | Available endpoints |
|:------------------------------------------------------------------------|:--------------------|
| [Access requests](access_requests.md) | `/projects/:id/access_requests` (also available for groups) |
| [Access tokens](resource_access_tokens.md) | `/projects/:id/access_tokens` (also available for groups) |
| [Award emoji](award_emoji.md) | `/projects/:id/issues/.../award_emoji`, `/projects/:id/merge_requests/.../award_emoji`, `/projects/:id/snippets/.../award_emoji` |
| [Branches](branches.md) | `/projects/:id/repository/branches/`, `/projects/:id/repository/merged_branches` |
| [Commits](commits.md) | `/projects/:id/repository/commits`, `/projects/:id/statuses` |
| [Container Registry](container_registry.md) | `/projects/:id/registry/repositories` |
| [Custom attributes](custom_attributes.md) | `/projects/:id/custom_attributes` (also available for groups and users) |
| [Debian distributions](packages/debian_project_distributions.md) | `/projects/:id/debian_distributions` (also available for groups) |
| [Dependencies](dependencies.md) **(ULTIMATE)** | `/projects/:id/dependencies` |
| [Deploy keys](deploy_keys.md) | `/projects/:id/deploy_keys` (also available standalone) |
| [Deploy tokens](deploy_tokens.md) | `/projects/:id/deploy_tokens` (also available for groups and standalone) |
| [Deployments](deployments.md) | `/projects/:id/deployments` |
| Resource | Available endpoints |
|:------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| [Access requests](access_requests.md) | `/projects/:id/access_requests` (also available for groups) |
| [Access tokens](resource_access_tokens.md) | `/projects/:id/access_tokens` (also available for groups) |
| [Agents](cluster_agents.md) | `/projects/:id/cluster_agents` |
| [Award emoji](award_emoji.md) | `/projects/:id/issues/.../award_emoji`, `/projects/:id/merge_requests/.../award_emoji`, `/projects/:id/snippets/.../award_emoji` |
| [Branches](branches.md) | `/projects/:id/repository/branches/`, `/projects/:id/repository/merged_branches` |
| [Commits](commits.md) | `/projects/:id/repository/commits`, `/projects/:id/statuses` |
| [Container Registry](container_registry.md) | `/projects/:id/registry/repositories` |
| [Custom attributes](custom_attributes.md) | `/projects/:id/custom_attributes` (also available for groups and users) |
| [Debian distributions](packages/debian_project_distributions.md) | `/projects/:id/debian_distributions` (also available for groups) |
| [Dependencies](dependencies.md) **(ULTIMATE)** | `/projects/:id/dependencies` |
| [Deploy keys](deploy_keys.md) | `/projects/:id/deploy_keys` (also available standalone) |
| [Deploy tokens](deploy_tokens.md) | `/projects/:id/deploy_tokens` (also available for groups and standalone) |
| [Deployments](deployments.md) | `/projects/:id/deployments` |
| [Discussions](discussions.md) (threaded comments) | `/projects/:id/issues/.../discussions`, `/projects/:id/snippets/.../discussions`, `/projects/:id/merge_requests/.../discussions`, `/projects/:id/commits/.../discussions` (also available for groups) |
| [Environments](environments.md) | `/projects/:id/environments` |
| [Error Tracking](error_tracking.md) | `/projects/:id/error_tracking/settings` |
| [Events](events.md) | `/projects/:id/events` (also available for users and standalone) |
| [Feature Flag User Lists](feature_flag_user_lists.md) | `/projects/:id/feature_flags_user_lists` |
| [Feature Flags](feature_flags.md) | `/projects/:id/feature_flags` |
| [Freeze Periods](freeze_periods.md) | `/projects/:id/freeze_periods` |
| [Integrations](integrations.md) (Formerly "services") | `/projects/:id/integrations` |
| [Invitations](invitations.md) | `/projects/:id/invitations` (also available for groups) |
| [Issue boards](boards.md) | `/projects/:id/boards` |
| [Issue links](issue_links.md) | `/projects/:id/issues/.../links` |
| [Issues Statistics](issues_statistics.md) | `/projects/:id/issues_statistics` (also available for groups and standalone) |
| [Issues](issues.md) | `/projects/:id/issues` (also available for groups and standalone) |
| [Iterations](iterations.md) **(PREMIUM)** | `/projects/:id/iterations` (also available for groups) |
| [Jobs](jobs.md) | `/projects/:id/jobs`, `/projects/:id/pipelines/.../jobs` |
| [Labels](labels.md) | `/projects/:id/labels` |
| [Managed licenses](managed_licenses.md) **(ULTIMATE)** | `/projects/:id/managed_licenses` |
| [Members](members.md) | `/projects/:id/members` (also available for groups) |
| [Merge request approvals](merge_request_approvals.md) **(PREMIUM)** | `/projects/:id/approvals`, `/projects/:id/merge_requests/.../approvals` |
| [Merge requests](merge_requests.md) | `/projects/:id/merge_requests` (also available for groups and standalone) |
| [Merge trains](merge_trains.md) | `/projects/:id/merge_trains` |
| [Notes](notes.md) (comments) | `/projects/:id/issues/.../notes`, `/projects/:id/snippets/.../notes`, `/projects/:id/merge_requests/.../notes` (also available for groups) |
| [Notification settings](notification_settings.md) | `/projects/:id/notification_settings` (also available for groups and standalone) |
| [Packages](packages.md) | `/projects/:id/packages` |
| [Pages domains](pages_domains.md) | `/projects/:id/pages` (also available standalone) |
| [Pipeline schedules](pipeline_schedules.md) | `/projects/:id/pipeline_schedules` |
| [Pipeline triggers](pipeline_triggers.md) | `/projects/:id/triggers` |
| [Pipelines](pipelines.md) | `/projects/:id/pipelines` |
| [Project badges](project_badges.md) | `/projects/:id/badges` |
| [Project clusters](project_clusters.md) | `/projects/:id/clusters` |
| [Project import/export](project_import_export.md) | `/projects/:id/export`, `/projects/import`, `/projects/:id/import` |
| [Project milestones](milestones.md) | `/projects/:id/milestones` |
| [Project snippets](project_snippets.md) | `/projects/:id/snippets` |
| [Project templates](project_templates.md) | `/projects/:id/templates` |
| [Project vulnerabilities](project_vulnerabilities.md) **(ULTIMATE)** | `/projects/:id/templates` |
| [Project wikis](wikis.md) | `/projects/:id/wikis` |
| [Project-level variables](project_level_variables.md) | `/projects/:id/variables` |
| [Projects](projects.md) including setting Webhooks | `/projects`, `/projects/:id/hooks` (also available for users) |
| [Protected branches](protected_branches.md) | `/projects/:id/protected_branches` |
| [Protected environments](protected_environments.md) | `/projects/:id/protected_environments` |
| [Protected tags](protected_tags.md) | `/projects/:id/protected_tags` |
| [Release links](releases/links.md) | `/projects/:id/releases/.../assets/links` |
| [Releases](releases/index.md) | `/projects/:id/releases` |
| [Remote mirrors](remote_mirrors.md) | `/projects/:id/remote_mirrors` |
| [Repositories](repositories.md) | `/projects/:id/repository` |
| [Repository files](repository_files.md) | `/projects/:id/repository/files` |
| [Repository submodules](repository_submodules.md) | `/projects/:id/repository/submodules` |
| [Resource label events](resource_label_events.md) | `/projects/:id/issues/.../resource_label_events`, `/projects/:id/merge_requests/.../resource_label_events` (also available for groups) |
| [Runners](runners.md) | `/projects/:id/runners` (also available standalone) |
| [Search](search.md) | `/projects/:id/search` (also available for groups and standalone) |
| [Tags](tags.md) | `/projects/:id/repository/tags` |
| [User-starred metrics dashboards](metrics_user_starred_dashboards.md ) | `/projects/:id/metrics/user_starred_dashboards` |
| [Visual Review discussions](visual_review_discussions.md) **(PREMIUM)** | `/projects/:id/merge_requests/:merge_request_id/visual_review_discussions` |
| [Vulnerabilities](vulnerabilities.md) **(ULTIMATE)** | `/vulnerabilities/:id` |
| [Vulnerability exports](vulnerability_exports.md) **(ULTIMATE)** | `/projects/:id/vulnerability_exports` |
| [Vulnerability findings](vulnerability_findings.md) **(ULTIMATE)** | `/projects/:id/vulnerability_findings` |
| [Environments](environments.md) | `/projects/:id/environments` |
| [Error Tracking](error_tracking.md) | `/projects/:id/error_tracking/settings` |
| [Events](events.md) | `/projects/:id/events` (also available for users and standalone) |
| [Feature Flag User Lists](feature_flag_user_lists.md) | `/projects/:id/feature_flags_user_lists` |
| [Feature Flags](feature_flags.md) | `/projects/:id/feature_flags` |
| [Freeze Periods](freeze_periods.md) | `/projects/:id/freeze_periods` |
| [Integrations](integrations.md) (Formerly "services") | `/projects/:id/integrations` |
| [Invitations](invitations.md) | `/projects/:id/invitations` (also available for groups) |
| [Issue boards](boards.md) | `/projects/:id/boards` |
| [Issue links](issue_links.md) | `/projects/:id/issues/.../links` |
| [Issues Statistics](issues_statistics.md) | `/projects/:id/issues_statistics` (also available for groups and standalone) |
| [Issues](issues.md) | `/projects/:id/issues` (also available for groups and standalone) |
| [Iterations](iterations.md) **(PREMIUM)** | `/projects/:id/iterations` (also available for groups) |
| [Jobs](jobs.md) | `/projects/:id/jobs`, `/projects/:id/pipelines/.../jobs` |
| [Labels](labels.md) | `/projects/:id/labels` |
| [Managed licenses](managed_licenses.md) **(ULTIMATE)** | `/projects/:id/managed_licenses` |
| [Members](members.md) | `/projects/:id/members` (also available for groups) |
| [Merge request approvals](merge_request_approvals.md) **(PREMIUM)** | `/projects/:id/approvals`, `/projects/:id/merge_requests/.../approvals` |
| [Merge requests](merge_requests.md) | `/projects/:id/merge_requests` (also available for groups and standalone) |
| [Merge trains](merge_trains.md) | `/projects/:id/merge_trains` |
| [Notes](notes.md) (comments) | `/projects/:id/issues/.../notes`, `/projects/:id/snippets/.../notes`, `/projects/:id/merge_requests/.../notes` (also available for groups) |
| [Notification settings](notification_settings.md) | `/projects/:id/notification_settings` (also available for groups and standalone) |
| [Packages](packages.md) | `/projects/:id/packages` |
| [Pages domains](pages_domains.md) | `/projects/:id/pages` (also available standalone) |
| [Pipeline schedules](pipeline_schedules.md) | `/projects/:id/pipeline_schedules` |
| [Pipeline triggers](pipeline_triggers.md) | `/projects/:id/triggers` |
| [Pipelines](pipelines.md) | `/projects/:id/pipelines` |
| [Project badges](project_badges.md) | `/projects/:id/badges` |
| [Project clusters](project_clusters.md) | `/projects/:id/clusters` |
| [Project import/export](project_import_export.md) | `/projects/:id/export`, `/projects/import`, `/projects/:id/import` |
| [Project milestones](milestones.md) | `/projects/:id/milestones` |
| [Project snippets](project_snippets.md) | `/projects/:id/snippets` |
| [Project templates](project_templates.md) | `/projects/:id/templates` |
| [Project vulnerabilities](project_vulnerabilities.md) **(ULTIMATE)** | `/projects/:id/templates` |
| [Project wikis](wikis.md) | `/projects/:id/wikis` |
| [Project-level variables](project_level_variables.md) | `/projects/:id/variables` |
| [Projects](projects.md) including setting Webhooks | `/projects`, `/projects/:id/hooks` (also available for users) |
| [Protected branches](protected_branches.md) | `/projects/:id/protected_branches` |
| [Protected environments](protected_environments.md) | `/projects/:id/protected_environments` |
| [Protected tags](protected_tags.md) | `/projects/:id/protected_tags` |
| [Release links](releases/links.md) | `/projects/:id/releases/.../assets/links` |
| [Releases](releases/index.md) | `/projects/:id/releases` |
| [Remote mirrors](remote_mirrors.md) | `/projects/:id/remote_mirrors` |
| [Repositories](repositories.md) | `/projects/:id/repository` |
| [Repository files](repository_files.md) | `/projects/:id/repository/files` |
| [Repository submodules](repository_submodules.md) | `/projects/:id/repository/submodules` |
| [Resource label events](resource_label_events.md) | `/projects/:id/issues/.../resource_label_events`, `/projects/:id/merge_requests/.../resource_label_events` (also available for groups) |
| [Runners](runners.md) | `/projects/:id/runners` (also available standalone) |
| [Search](search.md) | `/projects/:id/search` (also available for groups and standalone) |
| [Tags](tags.md) | `/projects/:id/repository/tags` |
| [User-starred metrics dashboards](metrics_user_starred_dashboards.md ) | `/projects/:id/metrics/user_starred_dashboards` |
| [Visual Review discussions](visual_review_discussions.md) **(PREMIUM)** | `/projects/:id/merge_requests/:merge_request_id/visual_review_discussions` |
| [Vulnerabilities](vulnerabilities.md) **(ULTIMATE)** | `/vulnerabilities/:id` |
| [Vulnerability exports](vulnerability_exports.md) **(ULTIMATE)** | `/projects/:id/vulnerability_exports` |
| [Vulnerability findings](vulnerability_findings.md) **(ULTIMATE)** | `/projects/:id/vulnerability_findings` |
## Group resources

238
doc/api/cluster_agents.md Normal file
View File

@ -0,0 +1,238 @@
---
stage: Configure
group: Configure
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---
# Agents API **(FREE)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/83270) in GitLab 14.10.
Use the Agents API to work with the GitLab agent for Kubernetes.
## List the agents for a project
Returns the list of agents registered for the project.
You must have at least the Developer role to use this endpoint.
```plaintext
GET /projects/:id/cluster_agents
```
Parameters:
| Attribute | Type | Required | Description |
|-----------|-------------------|-----------|-----------------------------------------------------------------------------------------------------------------|
| `id` | integer or string | yes | ID or [URL-encoded path of the project](index.md#namespaced-path-encoding) maintained by the authenticated user |
Response:
The response is a list of agents with the following fields:
| Attribute | Type | Description |
|--------------------------------------|----------|------------------------------------------------------|
| `id` | integer | ID of the agent |
| `name` | string | Name of the agent |
| `config_project` | object | Object representing the project the agent belongs to |
| `config_project.id` | integer | ID of the project |
| `config_project.description` | string | Description of the project |
| `config_project.name` | string | Name of the project |
| `config_project.name_with_namespace` | string | Full name with namespace of the project |
| `config_project.path` | string | Path to the project |
| `config_project.path_with_namespace` | string | Full path with namespace to the project |
| `config_project.created_at` | string | ISO8601 datetime when the project was created |
| `created_at` | string | ISO8601 datetime when the agent was created |
| `created_by_user_id` | integer | ID of the user who created the agent |
Example request:
```shell
curl --header "Private-Token: <your_access_token>" "https://gitlab.example.com/api/v4/projects/20/cluster_agents"
```
Example response:
```json
[
{
"id": 1,
"name": "agent-1",
"config_project": {
"id": 20,
"description": "",
"name": "test",
"name_with_namespace": "Administrator / test",
"path": "test",
"path_with_namespace": "root/test",
"created_at": "2022-03-20T20:42:40.221Z"
},
"created_at": "2022-04-20T20:42:40.221Z",
"created_by_user_id": 42
},
{
"id": 2,
"name": "agent-2",
"config_project": {
"id": 20,
"description": "",
"name": "test",
"name_with_namespace": "Administrator / test",
"path": "test",
"path_with_namespace": "root/test",
"created_at": "2022-03-20T20:42:40.221Z"
},
"created_at": "2022-04-20T20:42:40.221Z",
"created_by_user_id": 42
}
]
```
## Get details about an agent
Gets a single agent details.
You must have at least the Developer role to use this endpoint.
```shell
GET /projects/:id/cluster_agents/:agent_id
```
Parameters:
| Attribute | Type | Required | Description |
|------------|-------------------|----------|-----------------------------------------------------------------------------------------------------------------|
| `id` | integer or string | yes | ID or [URL-encoded path of the project](index.md#namespaced-path-encoding) maintained by the authenticated user |
| `agent_id` | integer | yes | ID of the agent |
Response:
The response is a single agent with the following fields:
| Attribute | Type | Description |
|--------------------------------------|---------|------------------------------------------------------|
| `id` | integer | ID of the agent |
| `name` | string | Name of the agent |
| `config_project` | object | Object representing the project the agent belongs to |
| `config_project.id` | integer | ID of the project |
| `config_project.description` | string | Description of the project |
| `config_project.name` | string | Name of the project |
| `config_project.name_with_namespace` | string | Full name with namespace of the project |
| `config_project.path` | string | Path to the project |
| `config_project.path_with_namespace` | string | Full path with namespace to the project |
| `config_project.created_at` | string | ISO8601 datetime when the project was created |
| `created_at` | string | ISO8601 datetime when the agent was created |
| `created_by_user_id` | integer | ID of the user who created the agent |
Example request:
```shell
curl --header "Private-Token: <your_access_token>" "https://gitlab.example.com/api/v4/projects/20/cluster_agents/1"
```
Example response:
```json
{
"id": 1,
"name": "agent-1",
"config_project": {
"id": 20,
"description": "",
"name": "test",
"name_with_namespace": "Administrator / test",
"path": "test",
"path_with_namespace": "root/test",
"created_at": "2022-03-20T20:42:40.221Z"
},
"created_at": "2022-04-20T20:42:40.221Z",
"created_by_user_id": 42
}
```
## Register an agent with a project
Registers an agent to the project.
You must have at least the Maintainer role to use this endpoint.
```shell
POST /projects/:id/cluster_agents
```
Parameters:
| Attribute | Type | Required | Description |
|-----------|-------------------|----------|-----------------------------------------------------------------------------------------------------------------|
| `id` | integer or string | yes | ID or [URL-encoded path of the project](index.md#namespaced-path-encoding) maintained by the authenticated user |
| `name` | string | yes | Name for the agent |
Response:
The response is the new agent with the following fields:
| Attribute | Type | Description |
|--------------------------------------|---------|------------------------------------------------------|
| `id` | integer | ID of the agent |
| `name` | string | Name of the agent |
| `config_project` | object | Object representing the project the agent belongs to |
| `config_project.id` | integer | ID of the project |
| `config_project.description` | string | Description of the project |
| `config_project.name` | string | Name of the project |
| `config_project.name_with_namespace` | string | Full name with namespace of the project |
| `config_project.path` | string | Path to the project |
| `config_project.path_with_namespace` | string | Full path with namespace to the project |
| `config_project.created_at` | string | ISO8601 datetime when the project was created |
| `created_at` | string | ISO8601 datetime when the agent was created |
| `created_by_user_id` | integer | ID of the user who created the agent |
Example request:
```shell
curl --header "Private-Token: <your_access_token>" "https://gitlab.example.com/api/v4/projects/20/cluster_agents" \
-H "Content-Type:application/json" \
-X POST --data '{"name":"some-agent"}'
```
Example response:
```json
{
"id": 1,
"name": "agent-1",
"config_project": {
"id": 20,
"description": "",
"name": "test",
"name_with_namespace": "Administrator / test",
"path": "test",
"path_with_namespace": "root/test",
"created_at": "2022-03-20T20:42:40.221Z"
},
"created_at": "2022-04-20T20:42:40.221Z",
"created_by_user_id": 42
}
```
## Delete a registered agent
Deletes an existing agent registration.
You must have at least the Maintainer role to use this endpoint.
```plaintext
DELETE /projects/:id/cluster_agents/:agent_id
```
Parameters:
| Attribute | Type | Required | Description |
|------------|-------------------|----------|-----------------------------------------------------------------------------------------------------------------|
| `id` | integer or string | yes | ID or [URL-encoded path of the project](index.md#namespaced-path-encoding) maintained by the authenticated user |
| `agent_id` | integer | yes | ID of the agent |
Example request:
```shell
curl --request DELETE --header "Private-Token: <your_access_token>" "https://gitlab.example.com/api/v4/projects/20/cluster_agents/1
```

View File

@ -265,7 +265,7 @@ Example response:
}
```
Deployments created by users on GitLab Premium or higher include the `approvals` and `pending_approval_count` properties:
When the [unified approval setting](../ci/environments/deployment_approvals.md#unified-approval-setting) is configured, deployments created by users on GitLab Premium or higher include the `approvals` and `pending_approval_count` properties:
```json
{
@ -290,6 +290,48 @@ Deployments created by users on GitLab Premium or higher include the `approvals`
}
```
When the [multiple approval rules](../ci/environments/deployment_approvals.md#multiple-approval-rules) is configured, deployments created by users on GitLab Premium or higher include the `approval_summary` property:
```json
{
"approval_summary": {
"rules": [
{
"user_id": null,
"group_id": 134,
"access_level": null,
"access_level_description": "qa-group",
"required_approvals": 1,
"deployment_approvals": []
},
{
"user_id": null,
"group_id": 135,
"access_level": null,
"access_level_description": "security-group",
"required_approvals": 2,
"deployment_approvals": [
{
"user": {
"id": 100,
"username": "security-user-1",
"name": "security user-1",
"state": "active",
"avatar_url": "https://www.gravatar.com/avatar/e130fcd3a1681f41a3de69d10841afa9?s=80&d=identicon",
"web_url": "http://localhost:3000/security-user-1"
},
"status": "approved",
"created_at": "2022-04-11T03:37:03.058Z",
"comment": null
}
]
}
]
}
...
}
```
## Create a deployment
```plaintext
@ -455,9 +497,10 @@ POST /projects/:id/deployments/:deployment_id/approval
| `deployment_id` | integer | yes | The ID of the deployment. |
| `status` | string | yes | The status of the approval (either `approved` or `rejected`). |
| `comment` | string | no | A comment to go with the approval |
| `represented_as`| string | no | The name of the User/Group/Role to use for the approval, when the user belongs to [multiple approval rules](../ci/environments/deployment_approvals.md#multiple-approval-rules). |
```shell
curl --data "status=approved&comment=Looks good to me" \
curl --data "status=approved&comment=Looks good to me&represented_as=security" \
--header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/deployments/1/approval"
```
@ -466,12 +509,12 @@ Example response:
```json
{
"user": {
"name": "Administrator",
"username": "root",
"id": 1,
"id": 100,
"username": "security-user-1",
"name": "security user-1",
"state": "active",
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"web_url": "http://localhost:3000/root"
"avatar_url": "https://www.gravatar.com/avatar/e130fcd3a1681f41a3de69d10841afa9?s=80&d=identicon",
"web_url": "http://localhost:3000/security-user-1"
},
"status": "approved",
"created_at": "2022-02-24T20:22:30.097Z",

View File

@ -107,6 +107,7 @@ POST /groups/:id/protected_environments
| `name` | string | yes | The deployment tier of the protected environment. One of `production`, `staging`, `testing`, `development`, or `other`. Read more about [deployment tiers](../ci/environments/index.md#deployment-tier-of-environments).|
| `deploy_access_levels` | array | yes | Array of access levels allowed to deploy, with each described by a hash. One of `user_id`, `group_id` or `access_level`. They take the form of `{user_id: integer}`, `{group_id: integer}` or `{access_level: integer}` respectively. |
| `required_approval_count` | integer | no | The number of approvals required to deploy to this environment. This is part of Deployment Approvals, which isn't yet available for use. For details, see [issue](https://gitlab.com/gitlab-org/gitlab/-/issues/343864). |
| `approval_rules` | array | no | Array of access levels allowed to approve, with each described by a hash. One of `user_id`, `group_id` or `access_level`. They take the form of `{user_id: integer}`, `{group_id: integer}` or `{access_level: integer}` respectively. You can also specify the number of required approvals from the specified entity with `required_approvals` field. See [Multiple approval rules](../ci/environments/deployment_approvals.md#multiple-approval-rules) for more information. |
The assignable `user_id` are the users who belong to the given group with the Maintainer role (or above).
The assignable `group_id` are the sub-groups under the given group.

View File

@ -99,7 +99,7 @@ POST /projects/:id/protected_environments
```shell
curl --header 'Content-Type: application/json' --request POST \
--data '{"name": "production", "deploy_access_levels": [{"group_id": 9899826}]}' \
--data '{"name": "production", "deploy_access_levels": [{"group_id": 9899826}], "approval_rules": [{"group_id": 134}, {"group_id": 135, "required_approvals": 2}]}' \
--header "PRIVATE-TOKEN: <your_access_token>" \
"https://gitlab.example.com/api/v4/projects/22034114/protected_environments"
```
@ -110,8 +110,9 @@ curl --header 'Content-Type: application/json' --request POST \
| `name` | string | yes | The name of the environment. |
| `deploy_access_levels` | array | yes | Array of access levels allowed to deploy, with each described by a hash. |
| `required_approval_count` | integer | no | The number of approvals required to deploy to this environment. This is part of Deployment Approvals, which isn't yet available for use. For details, see [issue](https://gitlab.com/gitlab-org/gitlab/-/issues/343864). |
| `approval_rules` | array | no | Array of access levels allowed to approve, with each described by a hash. See [Multiple approval rules](../ci/environments/deployment_approvals.md#multiple-approval-rules) for more information. |
Elements in the `deploy_access_levels` array should be one of `user_id`, `group_id` or
Elements in the `deploy_access_levels` and `approval_rules` array should be one of `user_id`, `group_id` or
`access_level`, and take the form `{user_id: integer}`, `{group_id: integer}` or
`{access_level: integer}`.
Each user must have access to the project and each group must [have this project shared](../user/project/members/share_project_with_groups.md).
@ -129,7 +130,23 @@ Example response:
"group_id": 9899826
}
],
"required_approval_count": 0
"required_approval_count": 0,
"approval_rules": [
{
"user_id": null,
"group_id": 134,
"access_level": null,
"access_level_description": "qa-group",
"required_approvals": 1
},
{
"user_id": null,
"group_id": 135,
"access_level": null,
"access_level_description": "security-group",
"required_approvals": 2
}
]
}
```

View File

@ -52,6 +52,19 @@ Example:
### Require approvals for a protected environment
There are two ways to configure the approval requirements:
- [Unified approval setting](#unified-approval-setting) ... You can define who can execute **and** approve deployments.
This is useful when there is no separation of duties between executors and approvers in your oraganization.
- [Multiple approval rules](#multiple-approval-rules) ... You can define who can execute **or** approve deployments.
This is useful when there is a separation of duties between executors and approvers in your oraganization.
NOTE:
Multiple approval rules is a more flexible option than the unified approval setting, thus both configurations shouldn't
co-exist and multiple approval rules takes the precedence over the unified approval setting if it happens.
#### Unified approval setting
NOTE:
At this time, it is not possible to require approvals for an existing protected environment. The workaround is to unprotect the environment and configure approvals when re-protecting the environment.
@ -77,6 +90,35 @@ NOTE:
To protect, update, or unprotect an environment, you must have at least the
Maintainer role.
#### Multiple approval rules
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/345678) in GitLab 14.10 with a flag named `deployment_approval_rules`. Disabled by default.
1. Using the [REST API](../../api/group_protected_environments.md#protect-an-environment).
1. `deploy_access_levels` represents which entity can execute the deployment job.
1. `approval_rules` represents which entity can approve the deployment job.
After this is configured, all jobs deploying to this environment automatically go into a blocked state and wait for approvals before running. Ensure that the number of required approvals is less than the number of users allowed to deploy.
Example:
```shell
curl --header 'Content-Type: application/json' --request POST \
--data '{"name": "production", "deploy_access_levels": [{"group_id": 138}], "approval_rules": [{"group_id": 134}, {"group_id": 135, "required_approvals": 2}]}' \
--header "PRIVATE-TOKEN: <your_access_token>" \
"https://gitlab.example.com/api/v4/groups/128/protected_environments"
```
With this setup:
- The operator group (`group_id: 138`) has permission to execute the deployment jobs to the `production` environment in the organization (`group_id: 128`).
- The QA tester group (`group_id: 134`) and security group (`group_id: 135`) have permission to approve the deployment jobs to the `production` environment in the organization (`group_id: 128`).
- Unless two approvals from security group and one approval from QA tester group have been collected, the operator group can't execute the deployment jobs.
NOTE:
To protect, update, or unprotect an environment, you must have at least the
Maintainer role.
## Approve or reject a deployment
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/342180/) in GitLab 14.9
@ -99,6 +141,10 @@ To approve or reject a deployment to a protected environment using the UI:
1. In the deployment's row, select **Approval options** (**{thumb-up}**).
1. Select **Approve** or **Reject**.
NOTE:
This feature might not work as expected when [Multiple approval rules](#multiple-approval-rules) is configured.
See the [issue](https://gitlab.com/gitlab-org/gitlab/-/issues/355708) for planned improvement.
### Approve or reject a deployment using the API
Prerequisites:
@ -127,11 +173,14 @@ curl --data "status=approved&comment=Looks good to me" \
### Using the API
Use the [Deployments API](../../api/deployments.md) to see deployments.
Use the [Deployments API](../../api/deployments.md#get-a-specific-deployment) to see deployments.
- The `status` field indicates if a deployment is blocked.
- The `pending_approval_count` field indicates how many approvals are remaining to run a deployment.
- The `approvals` field contains the deployment's approvals.
- When the [unified approval setting](#unified-approval-setting) is configured:
- The `pending_approval_count` field indicates how many approvals are remaining to run a deployment.
- The `approvals` field contains the deployment's approvals.
- When the [multiple approval rules](#multiple-approval-rules) is configured:
- The `approval_summary` field contains the current approval status per rule.
## Related features

View File

@ -76,9 +76,27 @@ if you need help finding the correct person or labels:
1. Create the epic in `gitlab-org` group:
- Title the epic `Update Go version to <VERSION_NUMBER>`.
- Ping the engineering managers responsible for [the projects listed below](#known-dependencies-using-go).
- Most engineering managers can be identified on
[the product page](https://about.gitlab.com/handbook/product/categories/) or the
[feature page](https://about.gitlab.com/handbook/product/categories/features/).
- If you still can't find the engineering manager, use
[Git blame](/ee/user/project/repository/git_blame.md) to identify a maintainer
involved in the project.
1. Create an upgrade issue for each dependency in the [location indicated below](#known-dependencies-using-go)
titled `Support building with Go <VERSION_NUMBER>`. Add the proper label to each issue for easier triage.
1. Create an upgrade issue for each dependency in the
[location indicated below](#known-dependencies-using-go) titled
`Support building with Go <VERSION_NUMBER>`. Add the proper labels to each issue
for easier triage. These should include the stage, group and section.
- The issue should be assigned by a member of the maintaining group.
- The milestone should be assigned by a member of the maintaining group.
NOTE:
Some overlap exists between project dependencies. When creating an issue for a
dependency that is part of a larger product, note the relationship in the issue
body. For example: Projects built in the context of Omnibus GitLab have their
runtime Go version managed by Omnibus, but "support" and compatibility should
be a concern of the individual project. Issues in the parent project's dependencies
issue should be about adding support for the updated Go version.
NOTE:
The upgrade issues must include [upgrade validation items](#upgrade-validation)
@ -94,9 +112,10 @@ if you need help finding the correct person or labels:
- [Composition Analysis tracker](https://gitlab.com/gitlab-org/gitlab/-/issues).
- [Container Security tracker](https://gitlab.com/gitlab-org/gitlab/-/issues).
NOTE:
Updates to these Security analyzers should not block upgrades to Charts or Omnibus since
the analyzers are built independently as separate container images.
NOTE:
Updates to these Security analyzers should not block upgrades to Charts or Omnibus since
the analyzers are built independently as separate container images.
1. Schedule builder updates with Distribution projects:
- Dependency and GitLab Development Kit issues created in previous steps should be set as blockers.
- Each issue should have the title `Support building with Go <VERSION_NUMBER>` and description as noted:

View File

@ -21,8 +21,25 @@ For the recommended frontend tracking implementation, see [Usage recommendations
Structured events and page views include the [`gitlab_standard`](schemas.md#gitlab_standard)
context, using the `window.gl.snowplowStandardContext` object which includes
[default data](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/views/layouts/_snowplow.html.haml)
as base. This object can be modified for any subsequent structured event fired,
although it's not recommended.
as base:
| Property | Example |
| -------- | ------- |
| `context_generated_at` | `"2022-01-01T01:00:00.000Z"` |
| `environment` | `"production"` |
| `extra` | `{}` |
| `namespace_id` | `123` |
| `plan` | `"gold"` |
| `project_id` | `456` |
| `source` | `"gitlab-rails"` |
| `user_id` | `789`* |
_\* Undergoes a pseudonymization process at the collector level._
These properties [are overriden](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/assets/javascripts/tracking/get_standard_context.js)
with frontend-specific values, like `source` (`gitlab-javascript`), `google_analytics_id`
and the custom `extra` object. You can modify this object for any subsequent
structured event that fires, although this is not recommended.
Tracking implementations must have an `action` and a `category`. You can provide additional
properties from the [structured event taxonomy](index.md#structured-event-taxonomy), in
@ -396,13 +413,13 @@ Use the following arguments:
|------------|---------------------------|---------------|-----------------------------------------------------------------------------------------------------------------------------------|
| `category` | String | | Area or aspect of the application. For example, `HealthCheckController` or `Lfs::FileTransformer`. |
| `action` | String | | The action being taken. For example, a controller action such as `create`, or an Active Record callback. |
| `label` | String | nil | The specific element or object to act on. This can be one of the following: the label of the element, for example, a tab labeled 'Create from template' for `create_from_template`; a unique identifier if no text is available, for example, `groups_dropdown_close` for closing the Groups dropdown in the top bar; or the name or title attribute of a record being created. |
| `property` | String | nil | Any additional property of the element, or object being acted on. |
| `value` | Numeric | nil | Describes a numeric value (decimal) directly related to the event. This could be the value of an input. For example, `10` when clicking `internal` visibility. |
| `context` | Array\[SelfDescribingJSON\] | nil | An array of custom contexts to send with this event. Most events should not have any custom contexts. |
| `project` | Project | nil | The project associated with the event. |
| `user` | User | nil | The user associated with the event. |
| `namespace` | Namespace | nil | The namespace associated with the event. |
| `label` | String | `nil` | The specific element or object to act on. This can be one of the following: the label of the element, for example, a tab labeled 'Create from template' for `create_from_template`; a unique identifier if no text is available, for example, `groups_dropdown_close` for closing the Groups dropdown in the top bar; or the name or title attribute of a record being created. |
| `property` | String | `nil` | Any additional property of the element, or object being acted on. |
| `value` | Numeric | `nil` | Describes a numeric value (decimal) directly related to the event. This could be the value of an input. For example, `10` when clicking `internal` visibility. |
| `context` | Array\[SelfDescribingJSON\] | `nil` | An array of custom contexts to send with this event. Most events should not have any custom contexts. |
| `project` | Project | `nil` | The project associated with the event. |
| `user` | User | `nil` | The user associated with the event. This value undergoes a pseudonymization process at the collector level. |
| `namespace` | Namespace | `nil` | The namespace associated with the event. |
| `extra` | Hash | `{}` | Additional keyword arguments are collected into a hash and sent with the event. |
### Unit testing

View File

@ -150,6 +150,23 @@ ORDER BY page_view_start DESC
LIMIT 100
```
#### Top 20 users who fired `reply_comment_button` in the last 30 days
```sql
SELECT
count(*) as hits,
se_action,
se_category,
gsc_pseudonymized_user_id
FROM legacy.snowplow_gitlab_events_30
WHERE
se_label = 'reply_comment_button'
AND gsc_pseudonymized_user_id IS NOT NULL
GROUP BY gsc_pseudonymized_user_id, se_category, se_action
ORDER BY count(*) DESC
LIMIT 20
```
#### Query JSON formatted data
```sql

View File

@ -10,17 +10,18 @@ This page provides Snowplow schema reference for GitLab events.
## `gitlab_standard`
We are including the [`gitlab_standard` schema](https://gitlab.com/gitlab-org/iglu/-/blob/master/public/schemas/com.gitlab/gitlab_standard/jsonschema/) with every event. See [Standardize Snowplow Schema](https://gitlab.com/groups/gitlab-org/-/epics/5218) for details.
We are including the [`gitlab_standard` schema](https://gitlab.com/gitlab-org/iglu/-/blob/master/public/schemas/com.gitlab/gitlab_standard/jsonschema/) for structured events and page views.
The [`StandardContext`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/tracking/standard_context.rb)
class represents this schema in the application. Some properties are automatically populated for [frontend](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/views/layouts/_snowplow.html.haml)
events.
class represents this schema in the application. Some properties are
[automatically populated for frontend events](implementation.md#snowplow-javascript-frontend-tracking),
and can be [provided manually for backend events](implementation.md#implement-ruby-backend-tracking).
| Field Name | Required | Default value | Type | Description |
|----------------|:-------------------:|-----------------------|--|---------------------------------------------------------------------------------------------|
| `project_id` | **{dotted-circle}** | Current project ID * | integer | |
| `namespace_id` | **{dotted-circle}** | Current group/namespace ID * | integer | |
| `user_id` | **{dotted-circle}** | Current user ID * | integer | User database record ID attribute. This file undergoes a pseudonymization process at the collector level. |
| `user_id` | **{dotted-circle}** | Current user ID * | integer | User database record ID attribute. This value undergoes a pseudonymization process at the collector level. |
| `context_generated_at` | **{dotted-circle}** | Current timestamp | string (date time format) | Timestamp indicating when context was generated. |
| `environment` | **{check-circle}** | Current environment | string (max 32 chars) | Name of the source environment, such as `production` or `staging` |
| `source` | **{check-circle}** | Event source | string (max 32 chars) | Name of the source application, such as `gitlab-rails` or `gitlab-javascript` |

View File

@ -70,6 +70,7 @@ To authorize the agent to access the GitLab project where you keep Kubernetes ma
```
- The Kubernetes projects must be in the same group hierarchy as the project where the agent's configuration is.
- You can install additional agents into the same cluster to accommodate additional hierarchies.
- You can authorize up to 100 projects.
All CI/CD jobs now include a `KUBECONFIG` with contexts for every shared agent connection.
@ -92,9 +93,11 @@ To authorize the agent to access all of the GitLab projects in a group or subgro
```
- The Kubernetes projects must be in the same group hierarchy as the project where the agent's configuration is.
- You can install additional agents into the same cluster to accommodate additional hierarchies.
- All of the subgroups of an authorized group also have access to the same agent (without being specified individually).
- You can authorize up to 100 groups.
All the projects that belong to the group are now authorized to access the agent.
All the projects that belong to the group and its subgroups are now authorized to access the agent.
All CI/CD jobs now include a `KUBECONFIG` with contexts for every shared agent connection.
Choose the context to run `kubectl` commands from your CI/CD scripts.

View File

@ -43,8 +43,8 @@ This workflow is considered push-based, because GitLab is pushing requests from
GitLab supports the following Kubernetes versions. You can upgrade your
Kubernetes version to a supported version at any time:
- 1.21 (support ends on November 22, 2022)
- 1.20 (support ends on July 22, 2022)
- 1.19 (support ends on February 22, 2022)
GitLab supports at least two production-ready Kubernetes minor
versions at any given time. GitLab regularly reviews the supported versions and

View File

@ -254,6 +254,11 @@ README @group @group/with-nested/subgroup
# `docs/index.md` but not `docs/projects/index.md`:
/docs/* @root-docs
# Include `/**` to specify Code Owners for all subdirectories
# in a directory. This rule matches `docs/projects/index.md` or
# `docs/development/index.md`
/docs/**/*.md @root-docs
# This code makes matches a `lib` directory nested anywhere in the repository:
lib/ @lib-owner

View File

@ -112,7 +112,7 @@ module API
helpers do
def clusterable_instance
Clusters::Instance.new
::Clusters::Instance.new
end
def clusters_for_current_user

View File

@ -182,6 +182,7 @@ module API
mount ::API::Ci::SecureFiles
mount ::API::Ci::Triggers
mount ::API::Ci::Variables
mount ::API::Clusters::Agents
mount ::API::Commits
mount ::API::CommitStatuses
mount ::API::ContainerRegistryEvent

View File

@ -197,7 +197,7 @@ module API
pipeline = current_authenticated_job.pipeline
project = current_authenticated_job.project
agent_authorizations = Clusters::AgentAuthorizationsFinder.new(project).execute
agent_authorizations = ::Clusters::AgentAuthorizationsFinder.new(project).execute
project_groups = project.group&.self_and_ancestor_ids&.map { |id| { id: id } } || []
user_access_level = project.team.max_member_access(current_user.id)
roles_in_project = Gitlab::Access.sym_options_with_owner

View File

@ -0,0 +1,81 @@
# frozen_string_literal: true
module API
module Clusters
class Agents < ::API::Base
include PaginationParams
before { authenticate! }
feature_category :kubernetes_management
params do
requires :id, type: String, desc: 'The ID of a project'
end
resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
desc 'List agents' do
detail 'This feature was introduced in GitLab 14.10.'
success Entities::Clusters::Agent
end
params do
use :pagination
end
get ':id/cluster_agents' do
authorize! :read_cluster, user_project
agents = ::Clusters::AgentsFinder.new(user_project, current_user).execute
present paginate(agents), with: Entities::Clusters::Agent
end
desc 'Get single agent' do
detail 'This feature was introduced in GitLab 14.10.'
success Entities::Clusters::Agent
end
params do
requires :agent_id, type: Integer, desc: 'The ID of an agent'
end
get ':id/cluster_agents/:agent_id' do
authorize! :read_cluster, user_project
agent = user_project.cluster_agents.find(params[:agent_id])
present agent, with: Entities::Clusters::Agent
end
desc 'Add an agent to a project' do
detail 'This feature was introduced in GitLab 14.10.'
success Entities::Clusters::Agent
end
params do
requires :name, type: String, desc: 'The name of the agent'
end
post ':id/cluster_agents' do
authorize! :create_cluster, user_project
params = declared_params(include_missing: false)
result = ::Clusters::Agents::CreateService.new(user_project, current_user).execute(name: params[:name])
bad_request!(result[:message]) if result[:status] == :error
present result[:cluster_agent], with: Entities::Clusters::Agent
end
desc 'Delete an agent' do
detail 'This feature was introduced in GitLab 14.10.'
end
params do
requires :agent_id, type: Integer, desc: 'The ID of an agent'
end
delete ':id/cluster_agents/:agent_id' do
authorize! :admin_cluster, user_project
agent = user_project.cluster_agents.find(params.delete(:agent_id))
destroy_conditionally!(agent)
end
end
end
end
end

View File

@ -5,7 +5,10 @@ module API
module Clusters
class Agent < Grape::Entity
expose :id
expose :name
expose :project, with: Entities::ProjectIdentity, as: :config_project
expose :created_at
expose :created_by_user_id
end
end
end

View File

@ -54,7 +54,7 @@ module API
def check_agent_token
unauthorized! unless agent_token
Clusters::AgentTokens::TrackUsageService.new(agent_token).execute
::Clusters::AgentTokens::TrackUsageService.new(agent_token).execute
end
end
@ -91,9 +91,9 @@ module API
requires :agent_config, type: JSON, desc: 'Configuration for the Agent'
end
post '/' do
agent = Clusters::Agent.find(params[:agent_id])
agent = ::Clusters::Agent.find(params[:agent_id])
Clusters::Agents::RefreshAuthorizationService.new(agent, config: params[:agent_config]).execute
::Clusters::Agents::RefreshAuthorizationService.new(agent, config: params[:agent_config]).execute
no_content!
end

View File

@ -12,7 +12,7 @@ module API
ANNOTATIONS_SOURCES = [
{ class: ::Environment, resource: :environments, create_service_param_key: :environment },
{ class: Clusters::Cluster, resource: :clusters, create_service_param_key: :cluster }
{ class: ::Clusters::Cluster, resource: :clusters, create_service_param_key: :cluster }
].freeze
ANNOTATIONS_SOURCES.each do |annotations_source|

View File

@ -201,7 +201,7 @@ module Backup
end
def build_db_task
force = ENV['force'] == 'yes'
force = Gitlab::Utils.to_boolean(ENV['force'], default: false)
Database.new(progress, force: force)
end

View File

@ -13,7 +13,7 @@ variables:
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
# Pip's cache doesn't store the python packages
# https://pip.pypa.io/en/stable/reference/pip_install/#caching
# https://pip.pypa.io/en/stable/topics/caching/
#
# If you want to also cache the installed packages, you have to install
# them in a virtualenv and cache it as well.

View File

@ -3,12 +3,12 @@
module Gitlab
module Integrations
class StiType < ActiveRecord::Type::String
NAMESPACED_INTEGRATIONS = Set.new(%w(
NAMESPACED_INTEGRATIONS = %w[
Asana Assembla Bamboo Bugzilla Buildkite Campfire Confluence CustomIssueTracker Datadog
Discord DroneCi EmailsOnPush Ewm ExternalWiki Flowdock HangoutsChat Harbor Irker Jenkins Jira Mattermost
MattermostSlashCommands MicrosoftTeams MockCi MockMonitoring Packagist PipelinesEmail Pivotaltracker
Prometheus Pushover Redmine Shimo Slack SlackSlashCommands Teamcity UnifyCircuit WebexTeams Youtrack Zentao
)).freeze
].to_set.freeze
def self.namespaced_integrations
NAMESPACED_INTEGRATIONS

View File

@ -206,3 +206,9 @@
redis_slot: project_management
aggregation: daily
feature_flag: track_epics_activity
- name: g_project_management_epic_blocking_removed
category: epics_usage
redis_slot: project_management
aggregation: daily
feature_flag: track_epics_activity

View File

@ -10951,9 +10951,6 @@ msgstr ""
msgid "CurrentUser|Start an Ultimate trial"
msgstr ""
msgid "CurrentUser|Upgrade"
msgstr ""
msgid "Custom Attributes"
msgstr ""

View File

@ -64,6 +64,7 @@ module QA
name: example.full_description,
file_path: file_path,
status: example.execution_result.status,
smoke: example.metadata.key?(:smoke).to_s,
reliable: example.metadata.key?(:reliable).to_s,
quarantined: quarantined(example.metadata),
retried: ((example.metadata[:retry_attempts] || 0) > 0).to_s,

View File

@ -316,6 +316,7 @@ module QA
|> filter(fn: (r) => r.status != "pending" and
r.merge_request == "false" and
r.quarantined == "false" and
r.smoke == "false" and
r.reliable == "#{reliable}" and
r._field == "id"
)

View File

@ -8,14 +8,15 @@ describe QA::Support::Formatters::TestStatsFormatter do
include QA::Specs::Helpers::RSpec
include ActiveSupport::Testing::TimeHelpers
let(:url) { "http://influxdb.net" }
let(:token) { "token" }
let(:ci_timestamp) { "2021-02-23T20:58:41Z" }
let(:ci_job_name) { "test-job 1/5" }
let(:ci_job_url) { "url" }
let(:ci_pipeline_url) { "url" }
let(:ci_pipeline_id) { "123" }
let(:url) { 'http://influxdb.net' }
let(:token) { 'token' }
let(:ci_timestamp) { '2021-02-23T20:58:41Z' }
let(:ci_job_name) { 'test-job 1/5' }
let(:ci_job_url) { 'url' }
let(:ci_pipeline_url) { 'url' }
let(:ci_pipeline_id) { '123' }
let(:run_type) { 'staging-full' }
let(:smoke) { 'false' }
let(:reliable) { 'false' }
let(:quarantined) { 'false' }
let(:influx_client) { instance_double('InfluxDB2::Client', create_write_api: influx_write_api) }
@ -42,11 +43,12 @@ describe QA::Support::Formatters::TestStatsFormatter do
name: 'stats export spec',
file_path: file_path.gsub('./qa/specs/features', ''),
status: :passed,
smoke: smoke,
reliable: reliable,
quarantined: quarantined,
retried: "false",
job_name: "test-job",
merge_request: "false",
retried: 'false',
job_name: 'test-job',
merge_request: 'false',
run_type: run_type,
stage: stage.match(%r{\d{1,2}_(\w+)}).captures.first,
testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/1234'
@ -96,8 +98,8 @@ describe QA::Support::Formatters::TestStatsFormatter do
allow_any_instance_of(RSpec::Core::Example::ExecutionResult).to receive(:run_time).and_return(0) # rubocop:disable RSpec/AnyInstanceOf
end
context "without influxdb variables configured" do
it "skips export without influxdb url" do
context 'without influxdb variables configured' do
it 'skips export without influxdb url' do
stub_env('QA_INFLUXDB_URL', nil)
stub_env('QA_INFLUXDB_TOKEN', nil)
@ -106,7 +108,7 @@ describe QA::Support::Formatters::TestStatsFormatter do
expect(influx_client).not_to have_received(:create_write_api)
end
it "skips export without influxdb token" do
it 'skips export without influxdb token' do
stub_env('QA_INFLUXDB_URL', url)
stub_env('QA_INFLUXDB_TOKEN', nil)
@ -146,6 +148,19 @@ describe QA::Support::Formatters::TestStatsFormatter do
end
end
context 'with smoke spec' do
let(:smoke) { 'true' }
it 'exports data to influxdb with correct smoke tag' do
run_spec do
it('spec', :smoke, testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/1234') {}
end
expect(influx_write_api).to have_received(:write).once
expect(influx_write_api).to have_received(:write).with(data: [data])
end
end
context 'with quarantined spec' do
let(:quarantined) { 'true' }

View File

@ -71,6 +71,7 @@ describe QA::Tools::ReliableReport do
|> filter(fn: (r) => r.status != "pending" and
r.merge_request == "false" and
r.quarantined == "false" and
r.smoke == "false" and
r.reliable == "#{reliable}" and
r._field == "id"
)

View File

@ -359,10 +359,9 @@ RSpec.describe Projects::ServicesController do
def prometheus_integration_as_data
pi = project.prometheus_integration.reload
attrs = pi.attributes.except('encrypted_properties',
'encrypted_properties_iv',
'encrypted_properties_tmp')
'encrypted_properties_iv')
[attrs, pi.encrypted_properties_tmp]
[attrs, pi.properties]
end
end

View File

@ -22,7 +22,7 @@ RSpec.describe 'Database schema' do
approvals: %w[user_id],
approver_groups: %w[target_id],
approvers: %w[target_id user_id],
analytics_cycle_analytics_aggregations: %w[last_full_run_issues_id last_full_run_merge_requests_id last_incremental_issues_id last_incremental_merge_requests_id],
analytics_cycle_analytics_aggregations: %w[last_full_issues_id last_full_merge_requests_id last_incremental_issues_id last_full_run_issues_id last_full_run_merge_requests_id last_incremental_merge_requests_id],
analytics_cycle_analytics_merge_request_stage_events: %w[author_id group_id merge_request_id milestone_id project_id stage_event_hash_id state_id],
analytics_cycle_analytics_issue_stage_events: %w[author_id group_id issue_id milestone_id project_id stage_event_hash_id state_id],
audit_events: %w[author_id entity_id target_id],

View File

@ -189,7 +189,7 @@ FactoryBot.define do
end
trait :chat_notification do
webhook { 'https://example.com/webhook' }
sequence(:webhook) { |n| "https://example.com/webhook/#{n}" }
end
trait :inactive do

View File

@ -0,0 +1,18 @@
{
"type": "object",
"required": [
"id",
"name",
"config_project",
"created_at",
"created_by_user_id"
],
"properties": {
"id": { "type": "integer" },
"name": { "type": "string" },
"config_project": { "$ref": "project_identity.json" },
"created_at": { "type": "string", "format": "date-time" },
"created_by_user_id": { "type": "integer" }
},
"additionalProperties": false
}

View File

@ -0,0 +1,4 @@
{
"type": "array",
"items": { "$ref": "agent.json" }
}

View File

@ -0,0 +1,22 @@
{
"type": "object",
"required": [
"id",
"description",
"name",
"name_with_namespace",
"path",
"path_with_namespace",
"created_at"
],
"properties": {
"id": { "type": "integer" },
"description": { "type": ["string", "null"] },
"name": { "type": "string" },
"name_with_namespace": { "type": "string" },
"path": { "type": "string" },
"path_with_namespace": { "type": "string" },
"created_at": { "type": "string", "format": "date-time" }
},
"additionalProperties": false
}

View File

@ -60,7 +60,6 @@ describe('Header', () => {
setFixtures(`
<li class="js-nav-user-dropdown">
<a class="js-buy-pipeline-minutes-link" data-track-action="click_buy_ci_minutes" data-track-label="free" data-track-property="user_dropdown">Buy Pipeline minutes</a>
<a class="js-upgrade-plan-link" data-track-action="click_upgrade_link" data-track-label="free" data-track-property="user_dropdown">Upgrade</a>
</li>`);
trackingSpy = mockTracking('_category_', $('.js-nav-user-dropdown').element, jest.spyOn);
@ -81,14 +80,5 @@ describe('Header', () => {
property: 'user_dropdown',
});
});
it('sends a tracking event when the dropdown is opened and contains Upgrade link', () => {
$('.js-nav-user-dropdown').trigger('shown.bs.dropdown');
expect(trackingSpy).toHaveBeenCalledWith('some:page', 'show_upgrade_link', {
label: 'free',
property: 'user_dropdown',
});
});
});
});

View File

@ -222,66 +222,6 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
end
end
context 'when multiple orders with nil values are defined' do
let!(:project1) { create(:project, last_repository_check_at: 10.days.ago) } # Asc: project5 Desc: project3
let!(:project2) { create(:project, last_repository_check_at: nil) } # Asc: project1 Desc: project1
let!(:project3) { create(:project, last_repository_check_at: 5.days.ago) } # Asc: project3 Desc: project5
let!(:project4) { create(:project, last_repository_check_at: nil) } # Asc: project2 Desc: project2
let!(:project5) { create(:project, last_repository_check_at: 20.days.ago) } # Asc: project4 Desc: project4
context 'when ascending' do
let(:nodes) do
Project.order(Arel.sql('projects.last_repository_check_at IS NULL')).order(last_repository_check_at: :asc).order(id: :asc)
end
let(:ascending_nodes) { [project5, project1, project3, project2, project4] }
it_behaves_like 'nodes are in ascending order'
context 'when before cursor value is NULL' do
let(:arguments) { { before: encoded_cursor(project4) } }
it 'returns all projects before the cursor' do
expect(subject.sliced_nodes).to eq([project5, project1, project3, project2])
end
end
context 'when after cursor value is NULL' do
let(:arguments) { { after: encoded_cursor(project2) } }
it 'returns all projects after the cursor' do
expect(subject.sliced_nodes).to eq([project4])
end
end
end
context 'when descending' do
let(:nodes) do
Project.order(Arel.sql('projects.last_repository_check_at IS NULL')).order(last_repository_check_at: :desc).order(id: :asc)
end
let(:descending_nodes) { [project3, project1, project5, project2, project4] }
it_behaves_like 'nodes are in descending order'
context 'when before cursor value is NULL' do
let(:arguments) { { before: encoded_cursor(project4) } }
it 'returns all projects before the cursor' do
expect(subject.sliced_nodes).to eq([project3, project1, project5, project2])
end
end
context 'when after cursor value is NULL' do
let(:arguments) { { after: encoded_cursor(project2) } }
it 'returns all projects after the cursor' do
expect(subject.sliced_nodes).to eq([project4])
end
end
end
end
context 'when ordering uses LOWER' do
let!(:project1) { create(:project, name: 'A') } # Asc: project1 Desc: project4
let!(:project2) { create(:project, name: 'c') } # Asc: project5 Desc: project2

View File

@ -0,0 +1,42 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe ConsumeRemainingEncryptIntegrationPropertyJobs, :migration do
subject(:migration) { described_class.new }
let(:integrations) { table(:integrations) }
let(:bg_migration_class) { ::Gitlab::BackgroundMigration::EncryptIntegrationProperties }
let(:bg_migration) { instance_double(bg_migration_class) }
before do
stub_const("#{described_class.name}::BATCH_SIZE", 2)
end
it 'performs remaining background migrations', :aggregate_failures do
# Already migrated
integrations.create!(properties: some_props, encrypted_properties: 'abc')
integrations.create!(properties: some_props, encrypted_properties: 'def')
integrations.create!(properties: some_props, encrypted_properties: 'xyz')
# update required
record1 = integrations.create!(properties: some_props)
record2 = integrations.create!(properties: some_props)
record3 = integrations.create!(properties: some_props)
# No update required
integrations.create!(properties: nil)
integrations.create!(properties: nil)
expect(Gitlab::BackgroundMigration).to receive(:steal).with(bg_migration_class.name.demodulize)
expect(bg_migration_class).to receive(:new).twice.and_return(bg_migration)
expect(bg_migration).to receive(:perform).with(record1.id, record2.id)
expect(bg_migration).to receive(:perform).with(record3.id, record3.id)
migrate!
end
def some_props
{ iid: generate(:iid), url: generate(:url), username: generate(:username) }.to_json
end
end

View File

@ -10,7 +10,7 @@ RSpec.describe Analytics::CycleAnalytics::Aggregation, type: :model do
it { is_expected.not_to validate_presence_of(:group) }
it { is_expected.not_to validate_presence_of(:enabled) }
%i[incremental_runtimes_in_seconds incremental_processed_records last_full_run_runtimes_in_seconds last_full_run_processed_records].each do |column|
%i[incremental_runtimes_in_seconds incremental_processed_records full_runtimes_in_seconds full_processed_records].each do |column|
it "validates the array length of #{column}" do
record = described_class.new(column => [1] * 11)
@ -20,6 +20,81 @@ RSpec.describe Analytics::CycleAnalytics::Aggregation, type: :model do
end
end
describe 'attribute updater methods' do
subject(:aggregation) { build(:cycle_analytics_aggregation) }
describe '#cursor_for' do
it 'returns empty cursors' do
aggregation.last_full_issues_id = nil
aggregation.last_full_issues_updated_at = nil
expect(aggregation.cursor_for(:full, Issue)).to eq({})
end
context 'when cursor is not empty' do
it 'returns the cursor values' do
current_time = Time.current
aggregation.last_full_issues_id = 1111
aggregation.last_full_issues_updated_at = current_time
expect(aggregation.cursor_for(:full, Issue)).to eq({ id: 1111, updated_at: current_time })
end
end
end
describe '#refresh_last_run' do
it 'updates the run_at column' do
freeze_time do
aggregation.refresh_last_run(:incremental)
expect(aggregation.last_incremental_run_at).to eq(Time.current)
end
end
end
describe '#reset_full_run_cursors' do
it 'resets all full run cursors to nil' do
aggregation.last_full_issues_id = 111
aggregation.last_full_issues_updated_at = Time.current
aggregation.last_full_merge_requests_id = 111
aggregation.last_full_merge_requests_updated_at = Time.current
aggregation.reset_full_run_cursors
expect(aggregation).to have_attributes(
last_full_issues_id: nil,
last_full_issues_updated_at: nil,
last_full_merge_requests_id: nil,
last_full_merge_requests_updated_at: nil
)
end
end
describe '#set_cursor' do
it 'sets the cursor values for the given mode' do
aggregation.set_cursor(:full, Issue, { id: 2222, updated_at: nil })
expect(aggregation).to have_attributes(
last_full_issues_id: 2222,
last_full_issues_updated_at: nil
)
end
end
describe '#set_stats' do
it 'appends stats to the runtime and processed_records attributes' do
aggregation.set_stats(:full, 10, 20)
aggregation.set_stats(:full, 20, 30)
expect(aggregation).to have_attributes(
full_runtimes_in_seconds: [10, 20],
full_processed_records: [20, 30]
)
end
end
end
describe '#safe_create_for_group' do
let_it_be(:group) { create(:group) }
let_it_be(:subgroup) { create(:group, parent: group) }

View File

@ -117,6 +117,23 @@ RSpec.describe Clusters::Agent do
end
end
describe '#last_used_agent_tokens' do
let_it_be(:agent) { create(:cluster_agent) }
subject { agent.last_used_agent_tokens }
context 'agent has no tokens' do
it { is_expected.to be_empty }
end
context 'agent has active and inactive tokens' do
let!(:active_token) { create(:cluster_agent_token, agent: agent, last_used_at: 1.minute.ago) }
let!(:inactive_token) { create(:cluster_agent_token, agent: agent, last_used_at: 2.hours.ago) }
it { is_expected.to contain_exactly(active_token, inactive_token) }
end
end
describe '#activity_event_deletion_cutoff' do
let_it_be(:agent) { create(:cluster_agent) }
let_it_be(:event1) { create(:agent_activity_event, agent: agent, recorded_at: 1.hour.ago) }

View File

@ -276,6 +276,20 @@ RSpec.describe Integration do
end
end
describe '#inheritable?' do
it 'is true for an instance integration' do
expect(create(:integration, :instance)).to be_inheritable
end
it 'is true for a group integration' do
expect(create(:integration, :group)).to be_inheritable
end
it 'is false for a project integration' do
expect(create(:integration)).not_to be_inheritable
end
end
describe '.build_from_integration' do
context 'when integration is invalid' do
let(:invalid_integration) do
@ -644,6 +658,33 @@ RSpec.describe Integration do
end
end
describe '#properties=' do
let(:integration_type) do
Class.new(described_class) do
field :foo
field :bar
end
end
it 'supports indifferent access' do
integration = integration_type.new
integration.properties = { foo: 1, 'bar' => 2 }
expect(integration).to have_attributes(foo: 1, bar: 2)
end
end
describe '#properties' do
it 'is not mutable' do
integration = described_class.new
integration.properties = { foo: 1, bar: 2 }
expect { integration.properties[:foo] = 3 }.to raise_error
end
end
describe "{property}_touched?" do
let(:integration) do
Integrations::Bamboo.create!(
@ -896,45 +937,26 @@ RSpec.describe Integration do
end
end
describe 'encrypted_properties' do
describe '#to_integration_hash' do
let(:properties) { { foo: 1, bar: true } }
let(:db_props) { properties.stringify_keys }
let(:record) { create(:integration, :instance, properties: properties) }
it 'contains the same data as properties' do
expect(record).to have_attributes(
properties: db_props,
encrypted_properties_tmp: db_props
)
end
it 'does not include the properties key' do
hash = record.to_integration_hash
it 'is persisted' do
encrypted_properties = described_class.id_in(record.id)
expect(encrypted_properties).to contain_exactly have_attributes(encrypted_properties_tmp: db_props)
end
it 'is updated when using prop_accessors' do
some_integration = Class.new(described_class) do
prop_accessor :foo
end
record = some_integration.new
record.foo = 'the foo'
expect(record.encrypted_properties_tmp).to eq({ 'foo' => 'the foo' })
expect(hash).not_to have_key('properties')
end
it 'saves correctly using insert_all' do
hash = record.to_integration_hash
hash[:project_id] = project
hash[:project_id] = project.id
expect do
described_class.insert_all([hash])
end.to change(described_class, :count).by(1)
expect(described_class.last).to have_attributes(encrypted_properties_tmp: db_props)
expect(described_class.last).to have_attributes(properties: db_props)
end
it 'is part of the to_integration_hash' do
@ -944,7 +966,7 @@ RSpec.describe Integration do
expect(hash['encrypted_properties']).not_to eq(record.encrypted_properties)
expect(hash['encrypted_properties_iv']).not_to eq(record.encrypted_properties_iv)
decrypted = described_class.decrypt(:encrypted_properties_tmp,
decrypted = described_class.decrypt(:properties,
hash['encrypted_properties'],
{ iv: hash['encrypted_properties_iv'] })
@ -969,7 +991,7 @@ RSpec.describe Integration do
end.to change(described_class, :count).by(1)
expect(described_class.last).not_to eq record
expect(described_class.last).to have_attributes(encrypted_properties_tmp: db_props)
expect(described_class.last).to have_attributes(properties: db_props)
end
end
end
@ -1094,4 +1116,47 @@ RSpec.describe Integration do
)
end
end
describe '#attributes' do
it 'does not include properties' do
expect(create(:integration).attributes).not_to have_key('properties')
end
it 'can be used in assign_attributes without nullifying properties' do
record = create(:integration, :instance, properties: { url: generate(:url) })
attrs = record.attributes
expect { record.assign_attributes(attrs) }.not_to change(record, :properties)
end
end
describe '#dup' do
let(:original) { create(:integration, properties: { one: 1, two: 2, three: 3 }) }
it 'results in distinct ciphertexts, but identical properties' do
copy = original.dup
expect(copy).to have_attributes(properties: eq(original.properties))
expect(copy).not_to have_attributes(
encrypted_properties: eq(original.encrypted_properties)
)
end
context 'when the model supports data-fields' do
let(:original) { create(:jira_integration, username: generate(:username), url: generate(:url)) }
it 'creates distinct but identical data-fields' do
copy = original.dup
expect(copy).to have_attributes(
username: original.username,
url: original.url
)
expect(copy.data_fields).not_to eq(original.data_fields)
end
end
end
end

View File

@ -24,7 +24,7 @@ RSpec.describe Integrations::ExternalWiki do
describe 'test' do
before do
subject.properties['external_wiki_url'] = url
subject.external_wiki_url = url
end
let(:url) { 'http://foo' }

View File

@ -187,7 +187,7 @@ RSpec.describe Integrations::Jira do
subject(:integration) { described_class.create!(params) }
it 'does not store data into properties' do
expect(integration.properties).to be_nil
expect(integration.properties).to be_empty
end
it 'stores data in data_fields correctly' do

View File

@ -6,12 +6,12 @@ RSpec.describe Integrations::Slack do
it_behaves_like Integrations::SlackMattermostNotifier, "Slack"
describe '#execute' do
let_it_be(:slack_integration) { create(:integrations_slack, branches_to_be_notified: 'all') }
before do
stub_request(:post, slack_integration.webhook)
end
let_it_be(:slack_integration) { create(:integrations_slack, branches_to_be_notified: 'all') }
it 'uses only known events', :aggregate_failures do
described_class::SUPPORTED_EVENTS_FOR_USAGE_LOG.each do |action|
expect(Gitlab::UsageDataCounters::HLLRedisCounter.known_event?("i_ecosystem_slack_service_#{action}_notification")).to be true

View File

@ -0,0 +1,153 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe API::Clusters::Agents do
let_it_be(:agent) { create(:cluster_agent) }
let(:user) { agent.created_by_user }
let(:unauthorized_user) { create(:user) }
let!(:project) { agent.project }
before do
project.add_maintainer(user)
end
describe 'GET /projects/:id/cluster_agents' do
context 'authorized user' do
it 'returns project agents' do
get api("/projects/#{project.id}/cluster_agents", user)
aggregate_failures "testing response" do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(response).to match_response_schema('public_api/v4/agents')
expect(json_response.count).to eq(1)
expect(json_response.first['name']).to eq(agent.name)
end
end
end
context 'unauthorized user' do
it 'unable to access agents' do
get api("/projects/#{project.id}/cluster_agents", unauthorized_user)
expect(response).to have_gitlab_http_status(:not_found)
end
end
it 'avoids N+1 queries', :request_store do
# Establish baseline
get api("/projects/#{project.id}/cluster_agents", user)
control = ActiveRecord::QueryRecorder.new do
get api("/projects/#{project.id}/cluster_agents", user)
end
# Now create a second record and ensure that the API does not execute
# any more queries than before
create(:cluster_agent, project: project)
expect do
get api("/projects/#{project.id}/cluster_agents", user)
end.not_to exceed_query_limit(control)
end
end
describe 'GET /projects/:id/cluster_agents/:agent_id' do
context 'authorized user' do
it 'returns a project agent' do
get api("/projects/#{project.id}/cluster_agents/#{agent.id}", user)
aggregate_failures "testing response" do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/agent')
expect(json_response['name']).to eq(agent.name)
end
end
it 'returns a 404 error if agent id is not available' do
get api("/projects/#{project.id}/cluster_agents/#{non_existing_record_id}", user)
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'unauthorized user' do
it 'unable to access an existing agent' do
get api("/projects/#{project.id}/cluster_agents/#{agent.id}", unauthorized_user)
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
describe 'POST /projects/:id/cluster_agents' do
it 'adds agent to project' do
expect do
post(api("/projects/#{project.id}/cluster_agents", user),
params: { name: 'some-agent' })
end.to change {project.cluster_agents.count}.by(1)
aggregate_failures "testing response" do
expect(response).to have_gitlab_http_status(:created)
expect(response).to match_response_schema('public_api/v4/agent')
expect(json_response['name']).to eq('some-agent')
end
end
it 'returns a 400 error if name not given' do
post api("/projects/#{project.id}/cluster_agents", user)
expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns a 400 error if name is invalid' do
post api("/projects/#{project.id}/cluster_agents", user), params: { name: '#4^x' }
aggregate_failures "testing response" do
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message'])
.to include("Name can contain only lowercase letters, digits, and '-', but cannot start or end with '-'")
end
end
it 'returns 404 error if project does not exist' do
post api("/projects/#{non_existing_record_id}/cluster_agents", user), params: { name: 'some-agent' }
expect(response).to have_gitlab_http_status(:not_found)
end
end
describe 'DELETE /projects/:id/cluster_agents/:agent_id' do
it 'deletes agent from project' do
expect do
delete api("/projects/#{project.id}/cluster_agents/#{agent.id}", user)
expect(response).to have_gitlab_http_status(:no_content)
end.to change {project.cluster_agents.count}.by(-1)
end
it 'returns a 404 error when deleting non existent agent' do
delete api("/projects/#{project.id}/cluster_agents/#{non_existing_record_id}", user)
expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns a 404 error if agent id not given' do
delete api("/projects/#{project.id}/cluster_agents", user)
expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns a 404 if the user is unauthorized to delete' do
delete api("/projects/#{project.id}/cluster_agents/#{agent.id}", unauthorized_user)
expect(response).to have_gitlab_http_status(:not_found)
end
it_behaves_like '412 response' do
let(:request) { api("/projects/#{project.id}/cluster_agents/#{agent.id}", user) }
end
end
end

View File

@ -9,7 +9,13 @@ RSpec.describe BulkUpdateIntegrationService do
stub_jira_integration_test
end
let(:excluded_attributes) { %w[id project_id group_id inherit_from_id instance template created_at updated_at] }
let(:excluded_attributes) do
%w[
id project_id group_id inherit_from_id instance template
created_at updated_at encrypted_properties encrypted_properties_iv
]
end
let(:batch) do
Integration.inherited_descendants_from_self_or_ancestors_from(subgroup_integration).where(id: group_integration.id..integration.id)
end
@ -50,7 +56,9 @@ RSpec.describe BulkUpdateIntegrationService do
end
context 'with integration with data fields' do
let(:excluded_attributes) { %w[id service_id created_at updated_at] }
let(:excluded_attributes) do
%w[id service_id created_at updated_at encrypted_properties encrypted_properties_iv]
end
it 'updates the data fields from the integration', :aggregate_failures do
described_class.new(subgroup_integration, batch).execute

View File

@ -407,10 +407,11 @@ RSpec.describe Projects::Operations::UpdateService do
context 'prometheus integration' do
context 'prometheus params were passed into service' do
let(:prometheus_integration) do
build_stubbed(:prometheus_integration, project: project, properties: {
let!(:prometheus_integration) do
create(:prometheus_integration, :instance, properties: {
api_url: "http://example.prometheus.com",
manual_configuration: "0"
manual_configuration: "0",
google_iap_audience_client_id: 123
})
end
@ -424,21 +425,23 @@ RSpec.describe Projects::Operations::UpdateService do
end
it 'uses Project#find_or_initialize_integration to include instance defined defaults and pass them to Projects::UpdateService', :aggregate_failures do
project_update_service = double(Projects::UpdateService)
expect(project)
.to receive(:find_or_initialize_integration)
.with('prometheus')
.and_return(prometheus_integration)
expect(Projects::UpdateService).to receive(:new) do |project_arg, user_arg, update_params_hash|
prometheus_attrs = update_params_hash[:prometheus_integration_attributes]
expect(project_arg).to eq project
expect(user_arg).to eq user
expect(update_params_hash[:prometheus_integration_attributes]).to include('properties' => { 'api_url' => 'http://new.prometheus.com', 'manual_configuration' => '1' })
expect(update_params_hash[:prometheus_integration_attributes]).not_to include(*%w(id project_id created_at updated_at))
end.and_return(project_update_service)
expect(project_update_service).to receive(:execute)
expect(prometheus_attrs).to have_key('encrypted_properties')
expect(prometheus_attrs.keys).not_to include(*%w(id project_id created_at updated_at properties))
expect(prometheus_attrs['encrypted_properties']).not_to eq(prometheus_integration.encrypted_properties)
end.and_call_original
subject.execute
expect { subject.execute }.to change(Integrations::Prometheus, :count).by(1)
expect(Integrations::Prometheus.last).to have_attributes(
api_url: 'http://new.prometheus.com',
manual_configuration: true,
google_iap_audience_client_id: 123
)
end
end

View File

@ -198,23 +198,23 @@ RSpec.describe Projects::TransferService do
context 'with a project integration' do
let_it_be_with_reload(:project) { create(:project, namespace: user.namespace) }
let_it_be(:instance_integration) { create(:integrations_slack, :instance, webhook: 'http://project.slack.com') }
let_it_be(:instance_integration) { create(:integrations_slack, :instance) }
let_it_be(:project_integration) { create(:integrations_slack, project: project) }
context 'with an inherited integration' do
let_it_be(:project_integration) { create(:integrations_slack, project: project, webhook: 'http://project.slack.com', inherit_from_id: instance_integration.id) }
context 'when it inherits from instance_integration' do
before do
project_integration.update!(inherit_from_id: instance_integration.id, webhook: instance_integration.webhook)
end
it 'replaces inherited integrations', :aggregate_failures do
execute_transfer
expect(project.slack_integration.webhook).to eq(group_integration.webhook)
expect(Integration.count).to eq(3)
expect { execute_transfer }
.to change(Integration, :count).by(0)
.and change { project.slack_integration.webhook }.to eq(group_integration.webhook)
end
end
context 'with a custom integration' do
let_it_be(:project_integration) { create(:integrations_slack, project: project, webhook: 'http://project.slack.com') }
it 'does not updates the integrations' do
it 'does not update the integrations' do
expect { execute_transfer }.not_to change { project.slack_integration.webhook }
end
end

View File

@ -63,7 +63,7 @@ RSpec.describe Projects::PostCreationWorker do
end
it 'cleans invalid record and logs warning', :aggregate_failures do
invalid_integration_record = build(:prometheus_integration, properties: { api_url: nil, manual_configuration: true }.to_json)
invalid_integration_record = build(:prometheus_integration, properties: { api_url: nil, manual_configuration: true })
allow(::Integrations::Prometheus).to receive(:new).and_return(invalid_integration_record)
expect(Gitlab::ErrorTracking).to receive(:track_exception).with(an_instance_of(ActiveRecord::RecordInvalid), include(extra: { project_id: a_kind_of(Integer) })).twice