Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
1d5ae049f0
commit
f781b0b693
22 changed files with 553 additions and 37 deletions
|
@ -334,3 +334,22 @@ webpack-dev-server:
|
||||||
expire_in: 31d
|
expire_in: 31d
|
||||||
paths:
|
paths:
|
||||||
- webpack-dev-server.json
|
- webpack-dev-server.json
|
||||||
|
|
||||||
|
bundle-size-review:
|
||||||
|
extends:
|
||||||
|
- .default-retry
|
||||||
|
- .frontend:rules:bundle-size-review
|
||||||
|
image: registry.gitlab.com/gitlab-org/gitlab-build-images:danger
|
||||||
|
stage: test
|
||||||
|
needs: ["gitlab:assets:compile pull-cache"]
|
||||||
|
script:
|
||||||
|
- mkdir -p bundle-size-review
|
||||||
|
- cp webpack-report/index.html bundle-size-review/bundle-report.html
|
||||||
|
- yarn global add https://gitlab.com/gitlab-org/frontend/playground/webpack-memory-metrics.git
|
||||||
|
- danger --dangerfile=danger/bundle_size/Dangerfile --fail-on-errors=true --verbose --danger_id=bundle-size-review
|
||||||
|
artifacts:
|
||||||
|
when: always
|
||||||
|
name: bundle-size-review
|
||||||
|
expire_in: 31d
|
||||||
|
paths:
|
||||||
|
- bundle-size-review
|
||||||
|
|
|
@ -78,9 +78,11 @@
|
||||||
|
|
||||||
.frontend-patterns: &frontend-patterns
|
.frontend-patterns: &frontend-patterns
|
||||||
- "{package.json,yarn.lock}"
|
- "{package.json,yarn.lock}"
|
||||||
- "{babel.config,jest.config}.js"
|
- "babel.config.js"
|
||||||
|
- "jest.config.{base,integration,unit}.js"
|
||||||
- ".csscomb.json"
|
- ".csscomb.json"
|
||||||
- "Dockerfile.assets"
|
- "Dockerfile.assets"
|
||||||
|
- "config/**/*.js"
|
||||||
- "vendor/assets/**/*"
|
- "vendor/assets/**/*"
|
||||||
- "{,ee/}{app/assets,app/helpers,app/presenters,app/views,locale,public,symbol}/**/*"
|
- "{,ee/}{app/assets,app/helpers,app/presenters,app/views,locale,public,symbol}/**/*"
|
||||||
|
|
||||||
|
@ -93,7 +95,8 @@
|
||||||
|
|
||||||
.code-patterns: &code-patterns
|
.code-patterns: &code-patterns
|
||||||
- "{package.json,yarn.lock}"
|
- "{package.json,yarn.lock}"
|
||||||
- "{babel.config,jest.config}.js"
|
- "babel.config.js"
|
||||||
|
- "jest.config.{base,integration,unit}.js"
|
||||||
- ".csscomb.json"
|
- ".csscomb.json"
|
||||||
- "Dockerfile.assets"
|
- "Dockerfile.assets"
|
||||||
- "vendor/assets/**/*"
|
- "vendor/assets/**/*"
|
||||||
|
@ -113,7 +116,8 @@
|
||||||
|
|
||||||
.code-backstage-patterns: &code-backstage-patterns
|
.code-backstage-patterns: &code-backstage-patterns
|
||||||
- "{package.json,yarn.lock}"
|
- "{package.json,yarn.lock}"
|
||||||
- "{babel.config,jest.config}.js"
|
- "babel.config.js"
|
||||||
|
- "jest.config.{base,integration,unit}.js"
|
||||||
- ".csscomb.json"
|
- ".csscomb.json"
|
||||||
- "Dockerfile.assets"
|
- "Dockerfile.assets"
|
||||||
- "vendor/assets/**/*"
|
- "vendor/assets/**/*"
|
||||||
|
@ -135,7 +139,8 @@
|
||||||
|
|
||||||
.code-qa-patterns: &code-qa-patterns
|
.code-qa-patterns: &code-qa-patterns
|
||||||
- "{package.json,yarn.lock}"
|
- "{package.json,yarn.lock}"
|
||||||
- "{babel.config,jest.config}.js"
|
- "babel.config.js"
|
||||||
|
- "jest.config.{base,integration,unit}.js"
|
||||||
- ".csscomb.json"
|
- ".csscomb.json"
|
||||||
- "Dockerfile.assets"
|
- "Dockerfile.assets"
|
||||||
- "vendor/assets/**/*"
|
- "vendor/assets/**/*"
|
||||||
|
@ -154,7 +159,8 @@
|
||||||
|
|
||||||
.code-backstage-qa-patterns: &code-backstage-qa-patterns
|
.code-backstage-qa-patterns: &code-backstage-qa-patterns
|
||||||
- "{package.json,yarn.lock}"
|
- "{package.json,yarn.lock}"
|
||||||
- "{babel.config,jest.config}.js"
|
- "babel.config.js"
|
||||||
|
- "jest.config.{base,integration,unit}.js"
|
||||||
- ".csscomb.json"
|
- ".csscomb.json"
|
||||||
- "Dockerfile.assets"
|
- "Dockerfile.assets"
|
||||||
- "vendor/assets/**/*"
|
- "vendor/assets/**/*"
|
||||||
|
@ -335,6 +341,12 @@
|
||||||
changes: *frontend-dependency-patterns
|
changes: *frontend-dependency-patterns
|
||||||
allow_failure: true
|
allow_failure: true
|
||||||
|
|
||||||
|
.frontend:rules:bundle-size-review:
|
||||||
|
rules:
|
||||||
|
- if: '$DANGER_GITLAB_API_TOKEN && $CI_MERGE_REQUEST_IID && $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "master"'
|
||||||
|
changes: *frontend-patterns
|
||||||
|
allow_failure: true
|
||||||
|
|
||||||
################
|
################
|
||||||
# Memory rules #
|
# Memory rules #
|
||||||
################
|
################
|
||||||
|
|
|
@ -375,7 +375,6 @@ RSpec/LeakyConstantDeclaration:
|
||||||
- 'spec/models/concerns/bulk_insertable_associations_spec.rb'
|
- 'spec/models/concerns/bulk_insertable_associations_spec.rb'
|
||||||
- 'spec/models/concerns/triggerable_hooks_spec.rb'
|
- 'spec/models/concerns/triggerable_hooks_spec.rb'
|
||||||
- 'spec/models/repository_spec.rb'
|
- 'spec/models/repository_spec.rb'
|
||||||
- 'spec/requests/api/graphql/tasks/task_completion_status_spec.rb'
|
|
||||||
- 'spec/serializers/commit_entity_spec.rb'
|
- 'spec/serializers/commit_entity_spec.rb'
|
||||||
- 'spec/services/clusters/applications/check_installation_progress_service_spec.rb'
|
- 'spec/services/clusters/applications/check_installation_progress_service_spec.rb'
|
||||||
- 'spec/services/clusters/applications/check_uninstall_progress_service_spec.rb'
|
- 'spec/services/clusters/applications/check_uninstall_progress_service_spec.rb'
|
||||||
|
|
|
@ -16,10 +16,12 @@ module IntegrationsActions
|
||||||
|
|
||||||
def update
|
def update
|
||||||
saved = integration.update(service_params[:service])
|
saved = integration.update(service_params[:service])
|
||||||
|
overwrite = ActiveRecord::Type::Boolean.new.cast(params[:overwrite])
|
||||||
|
|
||||||
respond_to do |format|
|
respond_to do |format|
|
||||||
format.html do
|
format.html do
|
||||||
if saved
|
if saved
|
||||||
|
PropagateIntegrationWorker.perform_async(integration.id, overwrite)
|
||||||
redirect_to scoped_edit_integration_path(integration), notice: success_message
|
redirect_to scoped_edit_integration_path(integration), notice: success_message
|
||||||
else
|
else
|
||||||
render 'shared/integrations/edit'
|
render 'shared/integrations/edit'
|
||||||
|
|
25
app/models/data_list.rb
Normal file
25
app/models/data_list.rb
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class DataList
|
||||||
|
def initialize(batch, data_fields_hash, klass)
|
||||||
|
@batch = batch
|
||||||
|
@data_fields_hash = data_fields_hash
|
||||||
|
@klass = klass
|
||||||
|
end
|
||||||
|
|
||||||
|
def to_array
|
||||||
|
[klass, columns, values]
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
attr_reader :batch, :data_fields_hash, :klass
|
||||||
|
|
||||||
|
def columns
|
||||||
|
data_fields_hash.keys << 'service_id'
|
||||||
|
end
|
||||||
|
|
||||||
|
def values
|
||||||
|
batch.map { |row| data_fields_hash.values << row['id'] }
|
||||||
|
end
|
||||||
|
end
|
|
@ -134,6 +134,14 @@ class Service < ApplicationRecord
|
||||||
%w(active)
|
%w(active)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def to_service_hash
|
||||||
|
as_json(methods: :type, except: %w[id template instance project_id])
|
||||||
|
end
|
||||||
|
|
||||||
|
def to_data_fields_hash
|
||||||
|
data_fields.as_json(only: data_fields.class.column_names).except('id', 'service_id')
|
||||||
|
end
|
||||||
|
|
||||||
def test_data(project, user)
|
def test_data(project, user)
|
||||||
Gitlab::DataBuilder::Push.build_sample(project, user)
|
Gitlab::DataBuilder::Push.build_sample(project, user)
|
||||||
end
|
end
|
||||||
|
|
27
app/models/service_list.rb
Normal file
27
app/models/service_list.rb
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class ServiceList
|
||||||
|
def initialize(batch, service_hash, extra_hash = {})
|
||||||
|
@batch = batch
|
||||||
|
@service_hash = service_hash
|
||||||
|
@extra_hash = extra_hash
|
||||||
|
end
|
||||||
|
|
||||||
|
def to_array
|
||||||
|
[Service, columns, values]
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
attr_reader :batch, :service_hash, :extra_hash
|
||||||
|
|
||||||
|
def columns
|
||||||
|
(service_hash.keys << 'project_id') + extra_hash.keys
|
||||||
|
end
|
||||||
|
|
||||||
|
def values
|
||||||
|
batch.map do |project_id|
|
||||||
|
(service_hash.values << project_id) + extra_hash.values
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
144
app/services/admin/propagate_integration_service.rb
Normal file
144
app/services/admin/propagate_integration_service.rb
Normal file
|
@ -0,0 +1,144 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
module Admin
|
||||||
|
class PropagateIntegrationService
|
||||||
|
BATCH_SIZE = 100
|
||||||
|
|
||||||
|
delegate :data_fields_present?, to: :integration
|
||||||
|
|
||||||
|
def self.propagate(integration:, overwrite:)
|
||||||
|
new(integration, overwrite).propagate
|
||||||
|
end
|
||||||
|
|
||||||
|
def initialize(integration, overwrite)
|
||||||
|
@integration = integration
|
||||||
|
@overwrite = overwrite
|
||||||
|
end
|
||||||
|
|
||||||
|
def propagate
|
||||||
|
if overwrite
|
||||||
|
update_integration_for_all_projects
|
||||||
|
else
|
||||||
|
update_integration_for_inherited_projects
|
||||||
|
end
|
||||||
|
|
||||||
|
create_integration_for_projects_without_integration
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
attr_reader :integration, :overwrite
|
||||||
|
|
||||||
|
# rubocop: disable Cop/InBatches
|
||||||
|
# rubocop: disable CodeReuse/ActiveRecord
|
||||||
|
def update_integration_for_inherited_projects
|
||||||
|
Service.where(type: integration.type, inherit_from_id: integration.id).in_batches(of: BATCH_SIZE) do |batch|
|
||||||
|
bulk_update_from_integration(batch)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def update_integration_for_all_projects
|
||||||
|
Service.where(type: integration.type).in_batches(of: BATCH_SIZE) do |batch|
|
||||||
|
bulk_update_from_integration(batch)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
# rubocop: enable Cop/InBatches
|
||||||
|
# rubocop: enable CodeReuse/ActiveRecord
|
||||||
|
|
||||||
|
# rubocop: disable CodeReuse/ActiveRecord
|
||||||
|
def bulk_update_from_integration(batch)
|
||||||
|
# Retrieving the IDs instantiates the ActiveRecord relation (batch)
|
||||||
|
# into concrete models, otherwise update_all will clear the relation.
|
||||||
|
# https://stackoverflow.com/q/34811646/462015
|
||||||
|
batch_ids = batch.pluck(:id)
|
||||||
|
|
||||||
|
Service.transaction do
|
||||||
|
batch.update_all(service_hash)
|
||||||
|
|
||||||
|
if data_fields_present?
|
||||||
|
integration.data_fields.class.where(service_id: batch_ids).update_all(data_fields_hash)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
# rubocop: enable CodeReuse/ActiveRecord
|
||||||
|
|
||||||
|
def create_integration_for_projects_without_integration
|
||||||
|
loop do
|
||||||
|
batch = Project.uncached { project_ids_without_integration }
|
||||||
|
|
||||||
|
bulk_create_from_integration(batch) unless batch.empty?
|
||||||
|
|
||||||
|
break if batch.size < BATCH_SIZE
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def bulk_create_from_integration(batch)
|
||||||
|
service_list = ServiceList.new(batch, service_hash, { 'inherit_from_id' => integration.id }).to_array
|
||||||
|
|
||||||
|
Project.transaction do
|
||||||
|
results = bulk_insert(*service_list)
|
||||||
|
|
||||||
|
if data_fields_present?
|
||||||
|
data_list = DataList.new(results, data_fields_hash, integration.data_fields.class).to_array
|
||||||
|
|
||||||
|
bulk_insert(*data_list)
|
||||||
|
end
|
||||||
|
|
||||||
|
run_callbacks(batch)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def bulk_insert(klass, columns, values_array)
|
||||||
|
items_to_insert = values_array.map { |array| Hash[columns.zip(array)] }
|
||||||
|
|
||||||
|
klass.insert_all(items_to_insert, returning: [:id])
|
||||||
|
end
|
||||||
|
|
||||||
|
# rubocop: disable CodeReuse/ActiveRecord
|
||||||
|
def run_callbacks(batch)
|
||||||
|
if active_external_issue_tracker?
|
||||||
|
Project.where(id: batch).update_all(has_external_issue_tracker: true)
|
||||||
|
end
|
||||||
|
|
||||||
|
if active_external_wiki?
|
||||||
|
Project.where(id: batch).update_all(has_external_wiki: true)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
# rubocop: enable CodeReuse/ActiveRecord
|
||||||
|
|
||||||
|
def active_external_issue_tracker?
|
||||||
|
integration.issue_tracker? && !integration.default
|
||||||
|
end
|
||||||
|
|
||||||
|
def active_external_wiki?
|
||||||
|
integration.type == 'ExternalWikiService'
|
||||||
|
end
|
||||||
|
|
||||||
|
def project_ids_without_integration
|
||||||
|
Project.connection.select_values(
|
||||||
|
<<-SQL
|
||||||
|
SELECT id
|
||||||
|
FROM projects
|
||||||
|
WHERE NOT EXISTS (
|
||||||
|
SELECT true
|
||||||
|
FROM services
|
||||||
|
WHERE services.project_id = projects.id
|
||||||
|
AND services.type = #{ActiveRecord::Base.connection.quote(integration.type)}
|
||||||
|
)
|
||||||
|
AND projects.pending_delete = false
|
||||||
|
AND projects.archived = false
|
||||||
|
LIMIT #{BATCH_SIZE}
|
||||||
|
SQL
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def service_hash
|
||||||
|
@service_hash ||= integration.to_service_hash
|
||||||
|
.tap { |json| json['inherit_from_id'] = integration.id }
|
||||||
|
end
|
||||||
|
|
||||||
|
def data_fields_hash
|
||||||
|
@data_fields_hash ||= integration.to_data_fields_hash
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -35,17 +35,15 @@ module Projects
|
||||||
end
|
end
|
||||||
|
|
||||||
def bulk_create_from_template(batch)
|
def bulk_create_from_template(batch)
|
||||||
service_list = batch.map do |project_id|
|
service_list = ServiceList.new(batch, service_hash).to_array
|
||||||
service_hash.values << project_id
|
|
||||||
end
|
|
||||||
|
|
||||||
Project.transaction do
|
Project.transaction do
|
||||||
results = bulk_insert(Service, service_hash.keys << 'project_id', service_list)
|
results = bulk_insert(*service_list)
|
||||||
|
|
||||||
if data_fields_present?
|
if data_fields_present?
|
||||||
data_list = results.map { |row| data_hash.values << row['id'] }
|
data_list = DataList.new(results, data_fields_hash, template.data_fields.class).to_array
|
||||||
|
|
||||||
bulk_insert(template.data_fields.class, data_hash.keys << 'service_id', data_list)
|
bulk_insert(*data_list)
|
||||||
end
|
end
|
||||||
|
|
||||||
run_callbacks(batch)
|
run_callbacks(batch)
|
||||||
|
@ -77,11 +75,11 @@ module Projects
|
||||||
end
|
end
|
||||||
|
|
||||||
def service_hash
|
def service_hash
|
||||||
@service_hash ||= template.as_json(methods: :type, except: %w[id template project_id])
|
@service_hash ||= template.to_service_hash
|
||||||
end
|
end
|
||||||
|
|
||||||
def data_hash
|
def data_fields_hash
|
||||||
@data_hash ||= template.data_fields.as_json(only: template.data_fields.class.column_names).except('id', 'service_id')
|
@data_fields_hash ||= template.to_data_fields_hash
|
||||||
end
|
end
|
||||||
|
|
||||||
# rubocop: disable CodeReuse/ActiveRecord
|
# rubocop: disable CodeReuse/ActiveRecord
|
||||||
|
|
|
@ -1291,6 +1291,13 @@
|
||||||
:resource_boundary: :unknown
|
:resource_boundary: :unknown
|
||||||
:weight: 1
|
:weight: 1
|
||||||
:idempotent: true
|
:idempotent: true
|
||||||
|
- :name: propagate_integration
|
||||||
|
:feature_category: :integrations
|
||||||
|
:has_external_dependencies:
|
||||||
|
:urgency: :low
|
||||||
|
:resource_boundary: :unknown
|
||||||
|
:weight: 1
|
||||||
|
:idempotent: true
|
||||||
- :name: propagate_service_template
|
- :name: propagate_service_template
|
||||||
:feature_category: :source_code_management
|
:feature_category: :source_code_management
|
||||||
:has_external_dependencies:
|
:has_external_dependencies:
|
||||||
|
|
16
app/workers/propagate_integration_worker.rb
Normal file
16
app/workers/propagate_integration_worker.rb
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class PropagateIntegrationWorker
|
||||||
|
include ApplicationWorker
|
||||||
|
|
||||||
|
feature_category :integrations
|
||||||
|
|
||||||
|
idempotent!
|
||||||
|
|
||||||
|
def perform(integration_id, overwrite)
|
||||||
|
Admin::PropagateIntegrationService.propagate(
|
||||||
|
integration: Service.find(integration_id),
|
||||||
|
overwrite: overwrite
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
5
changelogs/unreleased/leaky-constant-fix-14.yml
Normal file
5
changelogs/unreleased/leaky-constant-fix-14.yml
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
title: Fix leaky constant issue in task completion status spec
|
||||||
|
merge_request: 32043
|
||||||
|
author: Rajendra Kadam
|
||||||
|
type: fixed
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
title: Replace slot syntax for Vue 3 migration
|
||||||
|
merge_request: 31987
|
||||||
|
author: gaslan
|
||||||
|
type: other
|
|
@ -210,6 +210,8 @@
|
||||||
- 1
|
- 1
|
||||||
- - prometheus_create_default_alerts
|
- - prometheus_create_default_alerts
|
||||||
- 1
|
- 1
|
||||||
|
- - propagate_integration
|
||||||
|
- 1
|
||||||
- - propagate_service_template
|
- - propagate_service_template
|
||||||
- 1
|
- 1
|
||||||
- - reactive_caching
|
- - reactive_caching
|
||||||
|
|
38
danger/bundle_size/Dangerfile
Normal file
38
danger/bundle_size/Dangerfile
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
analysis_result = "./bundle-size-review/analysis.json"
|
||||||
|
markdown_result = "./bundle-size-review/comparison.md"
|
||||||
|
|
||||||
|
# Executing the webpack-entry-point-analyser
|
||||||
|
# We would like to do that in the CI file directly,
|
||||||
|
# but unfortunately the head_commit SHA is not available
|
||||||
|
# as a CI variable due to our merge into master simulation
|
||||||
|
analyze_cmd = [
|
||||||
|
"webpack-entry-point-analyser",
|
||||||
|
"--from-file ./webpack-report/stats.json",
|
||||||
|
"--json #{analysis_result}",
|
||||||
|
" --sha #{gitlab&.head_commit}"
|
||||||
|
].join(" ")
|
||||||
|
|
||||||
|
# execute analysis
|
||||||
|
`#{analyze_cmd}`
|
||||||
|
|
||||||
|
# We are executing the comparison by comparing the start_sha
|
||||||
|
# to the current pipeline result. The start_sha is the commit
|
||||||
|
# from master that was merged into for the merged pipeline.
|
||||||
|
comparison_cmd = [
|
||||||
|
"webpack-compare-reports",
|
||||||
|
"--from-sha #{gitlab.mr_json["diff_refs"]["start_sha"]}",
|
||||||
|
"--to-file #{analysis_result}",
|
||||||
|
"--html ./bundle-size-review/comparison.html",
|
||||||
|
"--markdown #{markdown_result}"
|
||||||
|
].join(" ")
|
||||||
|
|
||||||
|
# execute comparison
|
||||||
|
`#{comparison_cmd}`
|
||||||
|
|
||||||
|
comment = `cat #{markdown_result}`
|
||||||
|
|
||||||
|
markdown(<<~MARKDOWN)
|
||||||
|
#{comment}
|
||||||
|
MARKDOWN
|
|
@ -183,6 +183,27 @@ See also the [Why CI/CD?](https://docs.google.com/presentation/d/1OGgk2Tcxbpl7DJ
|
||||||
As GitLab CI/CD has evolved, certain breaking changes have
|
As GitLab CI/CD has evolved, certain breaking changes have
|
||||||
been necessary. These are:
|
been necessary. These are:
|
||||||
|
|
||||||
|
#### 13.0
|
||||||
|
|
||||||
|
- [Remove Backported
|
||||||
|
`os.Expand`](https://gitlab.com/gitlab-org/gitlab-runner/issues/4915)
|
||||||
|
- [Remove Fedora 29 package
|
||||||
|
support](https://gitlab.com/gitlab-org/gitlab-runner/issues/16158)
|
||||||
|
- [Remove macOS 32-bit
|
||||||
|
support](https://gitlab.com/gitlab-org/gitlab-runner/issues/25466)
|
||||||
|
- [Removed `debug/jobs/list?v=1`
|
||||||
|
endpoint](https://gitlab.com/gitlab-org/gitlab-runner/issues/6361)
|
||||||
|
- [Remove support for array of strings when defining services for Docker
|
||||||
|
executor](https://gitlab.com/gitlab-org/gitlab-runner/issues/4922)
|
||||||
|
- [Remove `--docker-services` flag on register
|
||||||
|
command](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/6404)
|
||||||
|
- [Remove legacy build directory
|
||||||
|
caching](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4180)
|
||||||
|
- [Remove `FF_USE_LEGACY_VOLUMES_MOUNTING_ORDER` feature
|
||||||
|
flag](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/6581)
|
||||||
|
- [Remove support for Windows Server
|
||||||
|
1803](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/6553)
|
||||||
|
|
||||||
#### 12.0
|
#### 12.0
|
||||||
|
|
||||||
- [Use refspec to clone/fetch Git
|
- [Use refspec to clone/fetch Git
|
||||||
|
|
|
@ -46,25 +46,25 @@ More useful links:
|
||||||
|
|
||||||
In this section we will explain the six different technologies we use to gather product usage data.
|
In this section we will explain the six different technologies we use to gather product usage data.
|
||||||
|
|
||||||
**Snowplow JS (Frontend)**
|
### Snowplow JS (Frontend)
|
||||||
|
|
||||||
Snowplow is an enterprise-grade marketing and product analytics platform which helps track the way users engage with our website and application. [Snowplow JS](https://github.com/snowplow/snowplow/wiki/javascript-tracker) is a frontend tracker for client-side events.
|
Snowplow is an enterprise-grade marketing and product analytics platform which helps track the way users engage with our website and application. [Snowplow JS](https://github.com/snowplow/snowplow/wiki/javascript-tracker) is a frontend tracker for client-side events.
|
||||||
|
|
||||||
**Snowplow Ruby (Backend)**
|
### Snowplow Ruby (Backend)
|
||||||
|
|
||||||
Snowplow is an enterprise-grade marketing and product analytics platform which helps track the way users engage with our website and application. [Snowplow Ruby](https://github.com/snowplow/snowplow/wiki/ruby-tracker) is a backend tracker for server-side events.
|
Snowplow is an enterprise-grade marketing and product analytics platform which helps track the way users engage with our website and application. [Snowplow Ruby](https://github.com/snowplow/snowplow/wiki/ruby-tracker) is a backend tracker for server-side events.
|
||||||
|
|
||||||
**Usage Ping**
|
### Usage Ping
|
||||||
|
|
||||||
Usage Ping is a method for GitLab Inc to collect usage data on a GitLab instance. Usage Ping is primarily composed of row counts for different tables in the instance’s database. By comparing these counts month over month (or week over week), we can get a rough sense for how an instance is using the different features within the product. This high-level data is used to help our product, support, and sales teams.
|
Usage Ping is a method for GitLab Inc to collect usage data on a GitLab instance. Usage Ping is primarily composed of row counts for different tables in the instance’s database. By comparing these counts month over month (or week over week), we can get a rough sense for how an instance is using the different features within the product. This high-level data is used to help our product, support, and sales teams.
|
||||||
|
|
||||||
Read more about how this works in the [Usage Ping guide](usage_ping.md)
|
Read more about how this works in the [Usage Ping guide](usage_ping.md)
|
||||||
|
|
||||||
**Database import**
|
### Database import
|
||||||
|
|
||||||
Database imports are full imports of data into GitLab's data warehouse. For GitLab.com, the PostgreSQL database is loaded into Snowflake data warehouse every 6 hours. For more details, see the [data team handbook](https://about.gitlab.com/handbook/business-ops/data-team/#extract-and-load).
|
Database imports are full imports of data into GitLab's data warehouse. For GitLab.com, the PostgreSQL database is loaded into Snowflake data warehouse every 6 hours. For more details, see the [data team handbook](https://about.gitlab.com/handbook/business-ops/data-team/#extract-and-load).
|
||||||
|
|
||||||
**Log system**
|
### Log system
|
||||||
|
|
||||||
System logs are the application logs generated from running the GitLab Rails application. For more details, see the [log system](../../administration/logs.md) and [logging infrastructure](https://gitlab.com/gitlab-com/runbooks/tree/master/logging/doc#logging-infrastructure-overview).
|
System logs are the application logs generated from running the GitLab Rails application. For more details, see the [log system](../../administration/logs.md) and [logging infrastructure](https://gitlab.com/gitlab-com/runbooks/tree/master/logging/doc#logging-infrastructure-overview).
|
||||||
|
|
||||||
|
@ -83,52 +83,52 @@ Our different tracking tools allows us to track different types of events. The e
|
||||||
| Logs | ❌ | ❌ | ❌ | ❌ | ✅ |
|
| Logs | ❌ | ❌ | ❌ | ❌ | ✅ |
|
||||||
| External services | ❌ | ❌ | ❌ | ❌ | ❌ |
|
| External services | ❌ | ❌ | ❌ | ❌ | ❌ |
|
||||||
|
|
||||||
**Database counts**
|
### Database counts
|
||||||
|
|
||||||
- How many Projects have been created by unique users
|
- How many Projects have been created by unique users
|
||||||
- How many users logged in the past 28 day
|
- How many users logged in the past 28 day
|
||||||
|
|
||||||
Database counts are row counts for different tables in an instance’s database. These are SQL count queries which have been filtered, grouped, or aggregated which provide high level usage data. The full list of available tables can be found in [structure.sql](https://gitlab.com/gitlab-org/gitlab/-/blob/master/db/structure.sql)
|
Database counts are row counts for different tables in an instance’s database. These are SQL count queries which have been filtered, grouped, or aggregated which provide high level usage data. The full list of available tables can be found in [structure.sql](https://gitlab.com/gitlab-org/gitlab/-/blob/master/db/structure.sql)
|
||||||
|
|
||||||
**Pageview events**
|
### Pageview events
|
||||||
|
|
||||||
- How many sessions visited the /dashboard/groups page
|
- How many sessions visited the /dashboard/groups page
|
||||||
|
|
||||||
**UI Events**
|
### UI Events
|
||||||
|
|
||||||
- How many sessions clicked on a button or link
|
- How many sessions clicked on a button or link
|
||||||
- How many sessions closed a modal
|
- How many sessions closed a modal
|
||||||
|
|
||||||
UI events are any interface-driven actions from the browser including click data.
|
UI events are any interface-driven actions from the browser including click data.
|
||||||
|
|
||||||
**CRUD or API events**
|
### CRUD or API events
|
||||||
|
|
||||||
- How many Git pushes were made
|
- How many Git pushes were made
|
||||||
- How many GraphQL queries were made
|
- How many GraphQL queries were made
|
||||||
- How many requests were made to a Rails action or controller.
|
- How many requests were made to a Rails action or controller.
|
||||||
|
|
||||||
These are backend events that include the creation, read, update, deletion of records and other events that might be triggered from layers that aren't necessarily only available in the interface.
|
These are backend events that include the creation, read, update, deletion of records, and other events that might be triggered from layers other than those available in the interface.
|
||||||
|
|
||||||
**Event funnels**
|
### Event funnels
|
||||||
|
|
||||||
- How many sessions performed action A, B, then C
|
- How many sessions performed action A, B, then C
|
||||||
- What is our conversion rate from step A to B?
|
- What is our conversion rate from step A to B?
|
||||||
|
|
||||||
**PostgreSQL data**
|
### PostgreSQL data
|
||||||
|
|
||||||
These are raw database records which can be explored using business intelligence tools like Sisense. The full list of available tables can be found in [structure.sql](https://gitlab.com/gitlab-org/gitlab/-/blob/master/db/structure.sql)
|
These are raw database records which can be explored using business intelligence tools like Sisense. The full list of available tables can be found in [structure.sql](https://gitlab.com/gitlab-org/gitlab/-/blob/master/db/structure.sql)
|
||||||
|
|
||||||
**Logs**
|
### Logs
|
||||||
|
|
||||||
These are raw logs such as the [Production logs](../../administration/logs.md#production_jsonlog), [API logs](../../administration/logs.md#api_jsonlog), or [Sidekiq logs](../../administration/logs.md#sidekiqlog). See the [overview of Logging Infrastructure](https://gitlab.com/gitlab-com/runbooks/tree/master/logging/doc#logging-infrastructure-overview) for more details.
|
These are raw logs such as the [Production logs](../../administration/logs.md#production_jsonlog), [API logs](../../administration/logs.md#api_jsonlog), or [Sidekiq logs](../../administration/logs.md#sidekiqlog). See the [overview of Logging Infrastructure](https://gitlab.com/gitlab-com/runbooks/tree/master/logging/doc#logging-infrastructure-overview) for more details.
|
||||||
|
|
||||||
**External services**
|
### External services
|
||||||
|
|
||||||
These are external services a GitLab instance interacts with such as an [external storage provider](../../administration/static_objects_external_storage.md) or an [external container registry](../../administration/packages/container_registry.md#use-an-external-container-registry-with-gitlab-as-an-auth-endpoint). These services must be able to send data back into a GitLab instance for data to be tracked.
|
These are external services a GitLab instance interacts with such as an [external storage provider](../../administration/static_objects_external_storage.md) or an [external container registry](../../administration/packages/container_registry.md#use-an-external-container-registry-with-gitlab-as-an-auth-endpoint). These services must be able to send data back into a GitLab instance for data to be tracked.
|
||||||
|
|
||||||
## Telemetry systems overview
|
## Telemetry systems overview
|
||||||
|
|
||||||
The systems overview is a simplified diagram showing the interactions between GitLab Inc and self-managed nstances.
|
The systems overview is a simplified diagram showing the interactions between GitLab Inc and self-managed instances.
|
||||||
|
|
||||||
![Telemetry_Overview](../img/telemetry_system_overview.png)
|
![Telemetry_Overview](../img/telemetry_system_overview.png)
|
||||||
|
|
||||||
|
@ -140,7 +140,7 @@ For Telemetry purposes, GitLab Inc has three major components:
|
||||||
|
|
||||||
1. [Data Infrastructure](https://about.gitlab.com/handbook/business-ops/data-team/data-infrastructure/): This contains everything managed by our data team including Sisense Dashboards for visualization, Snowflake for Data Warehousing, incoming data sources such as PostgreSQL Pipeline and S3 Bucket, and lastly our data collectors [GitLab.com's Snowplow Collector](https://about.gitlab.com/handbook/engineering/infrastructure/library/snowplow/) and GitLab's Versions Application.
|
1. [Data Infrastructure](https://about.gitlab.com/handbook/business-ops/data-team/data-infrastructure/): This contains everything managed by our data team including Sisense Dashboards for visualization, Snowflake for Data Warehousing, incoming data sources such as PostgreSQL Pipeline and S3 Bucket, and lastly our data collectors [GitLab.com's Snowplow Collector](https://about.gitlab.com/handbook/engineering/infrastructure/library/snowplow/) and GitLab's Versions Application.
|
||||||
1. GitLab.com: This is the production GitLab application which is made up of a Client and Server. On the Client or browser side, a Snowplow JS Tracker (Frontend) is used to track client-side events. On the Server or application side, a Snowplow Ruby Tracker (Backend) is used to track server-side events. The server also contains Usage Ping which leverages a PostgreSQL database and a Redis in-memory data store to report on usage data. Lastly, the server also contains System Logs which are generated from running the GitLab application.
|
1. GitLab.com: This is the production GitLab application which is made up of a Client and Server. On the Client or browser side, a Snowplow JS Tracker (Frontend) is used to track client-side events. On the Server or application side, a Snowplow Ruby Tracker (Backend) is used to track server-side events. The server also contains Usage Ping which leverages a PostgreSQL database and a Redis in-memory data store to report on usage data. Lastly, the server also contains System Logs which are generated from running the GitLab application.
|
||||||
1. [Monitoring infrastructure](https://about.gitlab.com/handbook/engineering/monitoring/): This is the infrastructure used to ensure GitLab.com is operating smoothly. System Logs are sent from GitLab.com to our monitoring infrastructure and collected by a FluentD collector. From FluentD, logs are either sent to long term Google Cloud Services cold storage via Stackdriver, or, they are sent to our Elastic Cluster via Cloud Pub/Sub which can be explored in real-time using Kibana
|
1. [Monitoring infrastructure](https://about.gitlab.com/handbook/engineering/monitoring/): This is the infrastructure used to ensure GitLab.com is operating smoothly. System Logs are sent from GitLab.com to our monitoring infrastructure and collected by a FluentD collector. From FluentD, logs are either sent to long term Google Cloud Services cold storage via Stackdriver, or, they are sent to our Elastic Cluster via Cloud Pub/Sub which can be explored in real-time using Kibana.
|
||||||
|
|
||||||
### Self-managed
|
### Self-managed
|
||||||
|
|
||||||
|
@ -151,7 +151,7 @@ For Telemetry purposes, self-managed instances have two major components:
|
||||||
|
|
||||||
### Differences between GitLab Inc and Self-managed
|
### Differences between GitLab Inc and Self-managed
|
||||||
|
|
||||||
As shown by the orange lines, on GitLab.com Snowplow JS, Snowplow Ruby, Usage Ping, and PostgreSQL database imports all flow into GitLab Inc's data fnfrastructure. However, on self-managed, only Usage Ping flows into GitLab Inc's data infrastructure.
|
As shown by the orange lines, on GitLab.com Snowplow JS, Snowplow Ruby, Usage Ping, and PostgreSQL database imports all flow into GitLab Inc's data infrastructure. However, on self-managed, only Usage Ping flows into GitLab Inc's data infrastructure.
|
||||||
|
|
||||||
As shown by the green lines, on GitLab.com system logs flow into GitLab Inc's monitoring infrastructure. On self-managed, there are no logs sent to GitLab Inc's monitoring infrastructure.
|
As shown by the green lines, on GitLab.com system logs flow into GitLab Inc's monitoring infrastructure. On self-managed, there are no logs sent to GitLab Inc's monitoring infrastructure.
|
||||||
|
|
||||||
|
|
|
@ -20243,6 +20243,9 @@ msgstr ""
|
||||||
msgid "SortOptions|Size"
|
msgid "SortOptions|Size"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
|
msgid "SortOptions|Sort by:"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
msgid "SortOptions|Sort direction"
|
msgid "SortOptions|Sort direction"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
|
|
|
@ -36,7 +36,9 @@ describe Admin::IntegrationsController do
|
||||||
let(:integration) { create(:jira_service, :instance) }
|
let(:integration) { create(:jira_service, :instance) }
|
||||||
|
|
||||||
before do
|
before do
|
||||||
put :update, params: { id: integration.class.to_param, service: { url: url } }
|
allow(PropagateIntegrationWorker).to receive(:perform_async)
|
||||||
|
|
||||||
|
put :update, params: { id: integration.class.to_param, overwrite: true, service: { url: url } }
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'valid params' do
|
context 'valid params' do
|
||||||
|
@ -46,6 +48,10 @@ describe Admin::IntegrationsController do
|
||||||
expect(response).to have_gitlab_http_status(:found)
|
expect(response).to have_gitlab_http_status(:found)
|
||||||
expect(integration.reload.url).to eq(url)
|
expect(integration.reload.url).to eq(url)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
it 'calls to PropagateIntegrationWorker' do
|
||||||
|
expect(PropagateIntegrationWorker).to have_received(:perform_async).with(integration.id, true)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'invalid params' do
|
context 'invalid params' do
|
||||||
|
@ -56,6 +62,10 @@ describe Admin::IntegrationsController do
|
||||||
expect(response).to render_template(:edit)
|
expect(response).to render_template(:edit)
|
||||||
expect(integration.reload.url).not_to eq(url)
|
expect(integration.reload.url).not_to eq(url)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
it 'does not call to PropagateIntegrationWorker' do
|
||||||
|
expect(PropagateIntegrationWorker).not_to have_received(:perform_async)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -5,9 +5,9 @@ require 'spec_helper'
|
||||||
describe 'getting task completion status information' do
|
describe 'getting task completion status information' do
|
||||||
include GraphqlHelpers
|
include GraphqlHelpers
|
||||||
|
|
||||||
DESCRIPTION_0_DONE = '- [ ] task 1\n- [ ] task 2'
|
description_0_done = '- [ ] task 1\n- [ ] task 2'
|
||||||
DESCRIPTION_1_DONE = '- [x] task 1\n- [ ] task 2'
|
description_1_done = '- [x] task 1\n- [ ] task 2'
|
||||||
DESCRIPTION_2_DONE = '- [x] task 1\n- [x] task 2'
|
description_2_done = '- [x] task 1\n- [x] task 2'
|
||||||
|
|
||||||
let_it_be(:user1) { create(:user) }
|
let_it_be(:user1) { create(:user) }
|
||||||
let_it_be(:project) { create(:project, :repository, :public) }
|
let_it_be(:project) { create(:project, :repository, :public) }
|
||||||
|
@ -42,7 +42,7 @@ describe 'getting task completion status information' do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
[DESCRIPTION_0_DONE, DESCRIPTION_1_DONE, DESCRIPTION_2_DONE].each do |desc|
|
[description_0_done, description_1_done, description_2_done].each do |desc|
|
||||||
context "with description #{desc}" do
|
context "with description #{desc}" do
|
||||||
context 'when type is issue' do
|
context 'when type is issue' do
|
||||||
it_behaves_like 'graphql task completion status provider', 'issue' do
|
it_behaves_like 'graphql task completion status provider', 'issue' do
|
||||||
|
|
149
spec/services/admin/propagate_integration_service_spec.rb
Normal file
149
spec/services/admin/propagate_integration_service_spec.rb
Normal file
|
@ -0,0 +1,149 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'spec_helper'
|
||||||
|
|
||||||
|
RSpec.describe Admin::PropagateIntegrationService do
|
||||||
|
describe '.propagate' do
|
||||||
|
let(:excluded_attributes) { %w[id project_id inherit_from_id instance created_at updated_at title description] }
|
||||||
|
let!(:project) { create(:project) }
|
||||||
|
let!(:instance_integration) do
|
||||||
|
JiraService.create!(
|
||||||
|
instance: true,
|
||||||
|
active: true,
|
||||||
|
push_events: true,
|
||||||
|
url: 'http://update-jira.instance.com',
|
||||||
|
username: 'user',
|
||||||
|
password: 'secret'
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
let!(:inherited_integration) do
|
||||||
|
JiraService.create!(
|
||||||
|
project: create(:project),
|
||||||
|
inherit_from_id: instance_integration.id,
|
||||||
|
instance: false,
|
||||||
|
active: true,
|
||||||
|
push_events: false,
|
||||||
|
url: 'http://jira.instance.com',
|
||||||
|
username: 'user',
|
||||||
|
password: 'secret'
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
let!(:not_inherited_integration) do
|
||||||
|
JiraService.create!(
|
||||||
|
project: create(:project),
|
||||||
|
inherit_from_id: nil,
|
||||||
|
instance: false,
|
||||||
|
active: true,
|
||||||
|
push_events: false,
|
||||||
|
url: 'http://jira.instance.com',
|
||||||
|
username: 'user',
|
||||||
|
password: 'secret'
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
let!(:another_inherited_integration) do
|
||||||
|
BambooService.create!(
|
||||||
|
project: create(:project),
|
||||||
|
inherit_from_id: instance_integration.id,
|
||||||
|
instance: false,
|
||||||
|
active: true,
|
||||||
|
push_events: false,
|
||||||
|
bamboo_url: 'http://gitlab.com',
|
||||||
|
username: 'mic',
|
||||||
|
password: 'password',
|
||||||
|
build_key: 'build'
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
shared_examples 'inherits settings from integration' do
|
||||||
|
it 'updates the inherited integrations' do
|
||||||
|
described_class.propagate(integration: instance_integration, overwrite: overwrite)
|
||||||
|
|
||||||
|
expect(integration.reload.inherit_from_id).to eq(instance_integration.id)
|
||||||
|
expect(integration.attributes.except(*excluded_attributes))
|
||||||
|
.to eq(instance_integration.attributes.except(*excluded_attributes))
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'integration with data fields' do
|
||||||
|
let(:excluded_attributes) { %w[id service_id created_at updated_at] }
|
||||||
|
|
||||||
|
it 'updates the data fields from inherited integrations' do
|
||||||
|
described_class.propagate(integration: instance_integration, overwrite: overwrite)
|
||||||
|
|
||||||
|
expect(integration.reload.data_fields.attributes.except(*excluded_attributes))
|
||||||
|
.to eq(instance_integration.data_fields.attributes.except(*excluded_attributes))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
shared_examples 'does not inherit settings from integration' do
|
||||||
|
it 'does not update the not inherited integrations' do
|
||||||
|
described_class.propagate(integration: instance_integration, overwrite: overwrite)
|
||||||
|
|
||||||
|
expect(integration.reload.attributes.except(*excluded_attributes))
|
||||||
|
.not_to eq(instance_integration.attributes.except(*excluded_attributes))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'update only inherited integrations' do
|
||||||
|
let(:overwrite) { false }
|
||||||
|
|
||||||
|
it_behaves_like 'inherits settings from integration' do
|
||||||
|
let(:integration) { inherited_integration }
|
||||||
|
end
|
||||||
|
|
||||||
|
it_behaves_like 'does not inherit settings from integration' do
|
||||||
|
let(:integration) { not_inherited_integration }
|
||||||
|
end
|
||||||
|
|
||||||
|
it_behaves_like 'does not inherit settings from integration' do
|
||||||
|
let(:integration) { another_inherited_integration }
|
||||||
|
end
|
||||||
|
|
||||||
|
it_behaves_like 'inherits settings from integration' do
|
||||||
|
let(:integration) { project.jira_service }
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'update all integrations' do
|
||||||
|
let(:overwrite) { true }
|
||||||
|
|
||||||
|
it_behaves_like 'inherits settings from integration' do
|
||||||
|
let(:integration) { inherited_integration }
|
||||||
|
end
|
||||||
|
|
||||||
|
it_behaves_like 'inherits settings from integration' do
|
||||||
|
let(:integration) { not_inherited_integration }
|
||||||
|
end
|
||||||
|
|
||||||
|
it_behaves_like 'does not inherit settings from integration' do
|
||||||
|
let(:integration) { another_inherited_integration }
|
||||||
|
end
|
||||||
|
|
||||||
|
it_behaves_like 'inherits settings from integration' do
|
||||||
|
let(:integration) { project.jira_service }
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'updates project#has_external_issue_tracker for issue tracker services' do
|
||||||
|
described_class.propagate(integration: instance_integration, overwrite: true)
|
||||||
|
|
||||||
|
expect(project.reload.has_external_issue_tracker).to eq(true)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'updates project#has_external_wiki for external wiki services' do
|
||||||
|
instance_integration = ExternalWikiService.create!(
|
||||||
|
instance: true,
|
||||||
|
active: true,
|
||||||
|
push_events: false,
|
||||||
|
external_wiki_url: 'http://external-wiki-url.com'
|
||||||
|
)
|
||||||
|
|
||||||
|
described_class.propagate(integration: instance_integration, overwrite: true)
|
||||||
|
|
||||||
|
expect(project.reload.has_external_wiki).to eq(true)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
26
spec/workers/propagate_integration_worker_spec.rb
Normal file
26
spec/workers/propagate_integration_worker_spec.rb
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'spec_helper'
|
||||||
|
|
||||||
|
describe PropagateIntegrationWorker do
|
||||||
|
describe '#perform' do
|
||||||
|
let(:integration) do
|
||||||
|
PushoverService.create(
|
||||||
|
template: true,
|
||||||
|
active: true,
|
||||||
|
device: 'MyDevice',
|
||||||
|
sound: 'mic',
|
||||||
|
priority: 4,
|
||||||
|
user_key: 'asdf',
|
||||||
|
api_key: '123456789'
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'calls the propagate service with the integration' do
|
||||||
|
expect(Admin::PropagateIntegrationService).to receive(:propagate)
|
||||||
|
.with(integration: integration, overwrite: true)
|
||||||
|
|
||||||
|
subject.perform(integration.id, true)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
Loading…
Reference in a new issue