Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
1e88fd9da8
commit
71b7a9d5b2
27 changed files with 529 additions and 216 deletions
|
@ -42,6 +42,10 @@ module Ci
|
||||||
has_many :trace_chunks, class_name: 'Ci::BuildTraceChunk', foreign_key: :build_id, inverse_of: :build
|
has_many :trace_chunks, class_name: 'Ci::BuildTraceChunk', foreign_key: :build_id, inverse_of: :build
|
||||||
has_many :report_results, class_name: 'Ci::BuildReportResult', inverse_of: :build
|
has_many :report_results, class_name: 'Ci::BuildReportResult', inverse_of: :build
|
||||||
|
|
||||||
|
# Projects::DestroyService destroys Ci::Pipelines, which use_fast_destroy on :job_artifacts
|
||||||
|
# before we delete builds. By doing this, the relation should be empty and not fire any
|
||||||
|
# DELETE queries when the Ci::Build is destroyed. The next step is to remove `dependent: :destroy`.
|
||||||
|
# Details: https://gitlab.com/gitlab-org/gitlab/-/issues/24644#note_689472685
|
||||||
has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy, inverse_of: :job # rubocop:disable Cop/ActiveRecordDependent
|
has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy, inverse_of: :job # rubocop:disable Cop/ActiveRecordDependent
|
||||||
has_many :job_variables, class_name: 'Ci::JobVariable', foreign_key: :job_id
|
has_many :job_variables, class_name: 'Ci::JobVariable', foreign_key: :job_id
|
||||||
has_many :sourced_pipelines, class_name: 'Ci::Sources::Pipeline', foreign_key: :source_job_id
|
has_many :sourced_pipelines, class_name: 'Ci::Sources::Pipeline', foreign_key: :source_job_id
|
||||||
|
|
|
@ -32,19 +32,19 @@ module Metrics
|
||||||
def ending_at_after_starting_at
|
def ending_at_after_starting_at
|
||||||
return if ending_at.blank? || starting_at.blank? || starting_at <= ending_at
|
return if ending_at.blank? || starting_at.blank? || starting_at <= ending_at
|
||||||
|
|
||||||
errors.add(:ending_at, s_("Metrics::Dashboard::Annotation|can't be before starting_at time"))
|
errors.add(:ending_at, s_("MetricsDashboardAnnotation|can't be before starting_at time"))
|
||||||
end
|
end
|
||||||
|
|
||||||
def single_ownership
|
def single_ownership
|
||||||
return if cluster.nil? ^ environment.nil?
|
return if cluster.nil? ^ environment.nil?
|
||||||
|
|
||||||
errors.add(:base, s_("Metrics::Dashboard::Annotation|Annotation can't belong to both a cluster and an environment at the same time"))
|
errors.add(:base, s_("MetricsDashboardAnnotation|Annotation can't belong to both a cluster and an environment at the same time"))
|
||||||
end
|
end
|
||||||
|
|
||||||
def orphaned_annotation
|
def orphaned_annotation
|
||||||
return if cluster.present? || environment.present?
|
return if cluster.present? || environment.present?
|
||||||
|
|
||||||
errors.add(:base, s_("Metrics::Dashboard::Annotation|Annotation must belong to a cluster or an environment"))
|
errors.add(:base, s_("MetricsDashboardAnnotation|Annotation must belong to a cluster or an environment"))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -9,6 +9,9 @@ module Ci
|
||||||
|
|
||||||
pipeline.cancel_running if pipeline.cancelable?
|
pipeline.cancel_running if pipeline.cancelable?
|
||||||
|
|
||||||
|
# Ci::Pipeline#destroy triggers `use_fast_destroy :job_artifacts` and
|
||||||
|
# ci_builds has ON DELETE CASCADE to ci_pipelines. The pipeline, the builds,
|
||||||
|
# job and pipeline artifacts all get destroyed here.
|
||||||
pipeline.reset.destroy!
|
pipeline.reset.destroy!
|
||||||
|
|
||||||
ServiceResponse.success(message: 'Pipeline not found')
|
ServiceResponse.success(message: 'Pipeline not found')
|
||||||
|
|
|
@ -30,7 +30,7 @@ module Metrics
|
||||||
options[:environment] = environment
|
options[:environment] = environment
|
||||||
success(options)
|
success(options)
|
||||||
else
|
else
|
||||||
error(s_('Metrics::Dashboard::Annotation|You are not authorized to create annotation for selected environment'))
|
error(s_('MetricsDashboardAnnotation|You are not authorized to create annotation for selected environment'))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ module Metrics
|
||||||
options[:cluster] = cluster
|
options[:cluster] = cluster
|
||||||
success(options)
|
success(options)
|
||||||
else
|
else
|
||||||
error(s_('Metrics::Dashboard::Annotation|You are not authorized to create annotation for selected cluster'))
|
error(s_('MetricsDashboardAnnotation|You are not authorized to create annotation for selected cluster'))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -51,7 +51,7 @@ module Metrics
|
||||||
|
|
||||||
success(options)
|
success(options)
|
||||||
rescue Gitlab::Template::Finders::RepoTemplateFinder::FileNotFoundError
|
rescue Gitlab::Template::Finders::RepoTemplateFinder::FileNotFoundError
|
||||||
error(s_('Metrics::Dashboard::Annotation|Dashboard with requested path can not be found'))
|
error(s_('MetricsDashboardAnnotation|Dashboard with requested path can not be found'))
|
||||||
end
|
end
|
||||||
|
|
||||||
def create(options)
|
def create(options)
|
||||||
|
|
|
@ -27,7 +27,7 @@ module Metrics
|
||||||
if Ability.allowed?(user, :delete_metrics_dashboard_annotation, annotation)
|
if Ability.allowed?(user, :delete_metrics_dashboard_annotation, annotation)
|
||||||
success
|
success
|
||||||
else
|
else
|
||||||
error(s_('Metrics::Dashboard::Annotation|You are not authorized to delete this annotation'))
|
error(s_('MetricsDashboardAnnotation|You are not authorized to delete this annotation'))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -35,7 +35,7 @@ module Metrics
|
||||||
if annotation.destroy
|
if annotation.destroy
|
||||||
success
|
success
|
||||||
else
|
else
|
||||||
error(s_('Metrics::Dashboard::Annotation|Annotation has not been deleted'))
|
error(s_('MetricsDashboardAnnotation|Annotation has not been deleted'))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -35,7 +35,7 @@ module Metrics
|
||||||
if Ability.allowed?(user, :create_metrics_user_starred_dashboard, project)
|
if Ability.allowed?(user, :create_metrics_user_starred_dashboard, project)
|
||||||
success(user: user, project: project)
|
success(user: user, project: project)
|
||||||
else
|
else
|
||||||
error(s_('Metrics::UsersStarredDashboards|You are not authorized to add star to this dashboard'))
|
error(s_('MetricsUsersStarredDashboards|You are not authorized to add star to this dashboard'))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -44,7 +44,7 @@ module Metrics
|
||||||
options[:dashboard_path] = dashboard_path
|
options[:dashboard_path] = dashboard_path
|
||||||
success(options)
|
success(options)
|
||||||
else
|
else
|
||||||
error(s_('Metrics::UsersStarredDashboards|Dashboard with requested path can not be found'))
|
error(s_('MetricsUsersStarredDashboards|Dashboard with requested path can not be found'))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -5,6 +5,7 @@ module Projects
|
||||||
include Gitlab::ShellAdapter
|
include Gitlab::ShellAdapter
|
||||||
|
|
||||||
DestroyError = Class.new(StandardError)
|
DestroyError = Class.new(StandardError)
|
||||||
|
BATCH_SIZE = 100
|
||||||
|
|
||||||
def async_execute
|
def async_execute
|
||||||
project.update_attribute(:pending_delete, true)
|
project.update_attribute(:pending_delete, true)
|
||||||
|
@ -119,6 +120,12 @@ module Projects
|
||||||
destroy_web_hooks!
|
destroy_web_hooks!
|
||||||
destroy_project_bots!
|
destroy_project_bots!
|
||||||
|
|
||||||
|
if ::Feature.enabled?(:ci_optimize_project_records_destruction, project, default_enabled: :yaml) &&
|
||||||
|
Feature.enabled?(:abort_deleted_project_pipelines, default_enabled: :yaml)
|
||||||
|
|
||||||
|
destroy_ci_records!
|
||||||
|
end
|
||||||
|
|
||||||
# Rails attempts to load all related records into memory before
|
# Rails attempts to load all related records into memory before
|
||||||
# destroying: https://github.com/rails/rails/issues/22510
|
# destroying: https://github.com/rails/rails/issues/22510
|
||||||
# This ensures we delete records in batches.
|
# This ensures we delete records in batches.
|
||||||
|
@ -133,6 +140,23 @@ module Projects
|
||||||
log_info("Attempting to destroy #{project.full_path} (#{project.id})")
|
log_info("Attempting to destroy #{project.full_path} (#{project.id})")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def destroy_ci_records!
|
||||||
|
project.all_pipelines.find_each(batch_size: BATCH_SIZE) do |pipeline| # rubocop: disable CodeReuse/ActiveRecord
|
||||||
|
# Destroy artifacts, then builds, then pipelines
|
||||||
|
# All builds have already been dropped by Ci::AbortPipelinesService,
|
||||||
|
# so no Ci::Build-instantiating cancellations happen here.
|
||||||
|
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/71342#note_691523196
|
||||||
|
|
||||||
|
::Ci::DestroyPipelineService.new(project, current_user).execute(pipeline)
|
||||||
|
end
|
||||||
|
|
||||||
|
deleted_count = project.commit_statuses.delete_all
|
||||||
|
|
||||||
|
if deleted_count > 0
|
||||||
|
Gitlab::AppLogger.info "Projects::DestroyService - Project #{project.id} - #{deleted_count} leftover commit statuses"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
# The project can have multiple webhooks with hundreds of thousands of web_hook_logs.
|
# The project can have multiple webhooks with hundreds of thousands of web_hook_logs.
|
||||||
# By default, they are removed with "DELETE CASCADE" option defined via foreign_key.
|
# By default, they are removed with "DELETE CASCADE" option defined via foreign_key.
|
||||||
# But such queries can exceed the statement_timeout limit and fail to delete the project.
|
# But such queries can exceed the statement_timeout limit and fail to delete the project.
|
||||||
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
---
|
||||||
|
name: ci_optimize_project_records_destruction
|
||||||
|
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/71342
|
||||||
|
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/341936
|
||||||
|
milestone: '14.4'
|
||||||
|
type: development
|
||||||
|
group: group::pipeline execution
|
||||||
|
default_enabled: false
|
|
@ -0,0 +1,17 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class RemoveAnalyticsSnapshotsSegmentIdColumn < Gitlab::Database::Migration[1.0]
|
||||||
|
disable_ddl_transaction!
|
||||||
|
|
||||||
|
def up
|
||||||
|
remove_column :analytics_devops_adoption_snapshots, :segment_id
|
||||||
|
end
|
||||||
|
|
||||||
|
def down
|
||||||
|
add_column :analytics_devops_adoption_snapshots, :segment_id, :bigint, after: :id
|
||||||
|
add_concurrent_foreign_key :analytics_devops_adoption_snapshots, :analytics_devops_adoption_segments,
|
||||||
|
column: :segment_id, name: 'fk_rails_25da9a92c0', on_delete: :cascade
|
||||||
|
add_concurrent_index :analytics_devops_adoption_snapshots, [:segment_id, :end_time], name: :index_on_snapshots_segment_id_end_time
|
||||||
|
add_concurrent_index :analytics_devops_adoption_snapshots, [:segment_id, :recorded_at], name: :index_on_snapshots_segment_id_recorded_at
|
||||||
|
end
|
||||||
|
end
|
1
db/schema_migrations/20211007093340
Normal file
1
db/schema_migrations/20211007093340
Normal file
|
@ -0,0 +1 @@
|
||||||
|
fbb3092caba901ddd5a740bb67a91d1c8a4c458651afaf02704399844acbd2b8
|
|
@ -9910,7 +9910,6 @@ ALTER SEQUENCE analytics_devops_adoption_segments_id_seq OWNED BY analytics_devo
|
||||||
|
|
||||||
CREATE TABLE analytics_devops_adoption_snapshots (
|
CREATE TABLE analytics_devops_adoption_snapshots (
|
||||||
id bigint NOT NULL,
|
id bigint NOT NULL,
|
||||||
segment_id bigint,
|
|
||||||
recorded_at timestamp with time zone NOT NULL,
|
recorded_at timestamp with time zone NOT NULL,
|
||||||
issue_opened boolean NOT NULL,
|
issue_opened boolean NOT NULL,
|
||||||
merge_request_opened boolean NOT NULL,
|
merge_request_opened boolean NOT NULL,
|
||||||
|
@ -25934,10 +25933,6 @@ CREATE INDEX index_on_projects_lower_path ON projects USING btree (lower((path):
|
||||||
|
|
||||||
CREATE INDEX index_on_routes_lower_path ON routes USING btree (lower((path)::text));
|
CREATE INDEX index_on_routes_lower_path ON routes USING btree (lower((path)::text));
|
||||||
|
|
||||||
CREATE INDEX index_on_snapshots_segment_id_end_time ON analytics_devops_adoption_snapshots USING btree (segment_id, end_time);
|
|
||||||
|
|
||||||
CREATE INDEX index_on_snapshots_segment_id_recorded_at ON analytics_devops_adoption_snapshots USING btree (segment_id, recorded_at);
|
|
||||||
|
|
||||||
CREATE INDEX index_on_users_lower_email ON users USING btree (lower((email)::text));
|
CREATE INDEX index_on_users_lower_email ON users USING btree (lower((email)::text));
|
||||||
|
|
||||||
CREATE INDEX index_on_users_lower_username ON users USING btree (lower((username)::text));
|
CREATE INDEX index_on_users_lower_username ON users USING btree (lower((username)::text));
|
||||||
|
@ -28636,9 +28631,6 @@ ALTER TABLE ONLY incident_management_oncall_rotations
|
||||||
ALTER TABLE ONLY ci_unit_test_failures
|
ALTER TABLE ONLY ci_unit_test_failures
|
||||||
ADD CONSTRAINT fk_rails_259da3e79c FOREIGN KEY (unit_test_id) REFERENCES ci_unit_tests(id) ON DELETE CASCADE;
|
ADD CONSTRAINT fk_rails_259da3e79c FOREIGN KEY (unit_test_id) REFERENCES ci_unit_tests(id) ON DELETE CASCADE;
|
||||||
|
|
||||||
ALTER TABLE ONLY analytics_devops_adoption_snapshots
|
|
||||||
ADD CONSTRAINT fk_rails_25da9a92c0 FOREIGN KEY (segment_id) REFERENCES analytics_devops_adoption_segments(id) ON DELETE CASCADE;
|
|
||||||
|
|
||||||
ALTER TABLE ONLY cluster_agents
|
ALTER TABLE ONLY cluster_agents
|
||||||
ADD CONSTRAINT fk_rails_25e9fc2d5d FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
|
ADD CONSTRAINT fk_rails_25e9fc2d5d FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,7 @@ experience for [GitLab Flavored Markdown](../../user/markdown.md) in the GitLab
|
||||||
It also serves as the foundation for implementing Markdown-focused editors
|
It also serves as the foundation for implementing Markdown-focused editors
|
||||||
that target other engines, like static site generators.
|
that target other engines, like static site generators.
|
||||||
|
|
||||||
We use [tiptap 2.0](https://www.tiptap.dev/) and [ProseMirror](https://prosemirror.net/)
|
We use [tiptap 2.0](https://tiptap.dev/) and [ProseMirror](https://prosemirror.net/)
|
||||||
to build the Content Editor. These frameworks provide a level of abstraction on top of
|
to build the Content Editor. These frameworks provide a level of abstraction on top of
|
||||||
the native
|
the native
|
||||||
[`contenteditable`](https://developer.mozilla.org/en-US/docs/Web/Guide/HTML/Editable_content) web technology.
|
[`contenteditable`](https://developer.mozilla.org/en-US/docs/Web/Guide/HTML/Editable_content) web technology.
|
||||||
|
@ -143,7 +143,7 @@ The Content Editor is composed of three main layers:
|
||||||
### Editing tools UI
|
### Editing tools UI
|
||||||
|
|
||||||
The editing tools UI are Vue components that display the editor's state and
|
The editing tools UI are Vue components that display the editor's state and
|
||||||
dispatch [commands](https://www.tiptap.dev/api/commands/#commands) to mutate it.
|
dispatch [commands](https://tiptap.dev/api/commands/#commands) to mutate it.
|
||||||
They are located in the `~/content_editor/components` directory. For example,
|
They are located in the `~/content_editor/components` directory. For example,
|
||||||
the **Bold** toolbar button displays the editor's state by becoming active when
|
the **Bold** toolbar button displays the editor's state by becoming active when
|
||||||
the user selects bold text. This button also dispatches the `toggleBold` command
|
the user selects bold text. This button also dispatches the `toggleBold` command
|
||||||
|
@ -159,7 +159,7 @@ sequenceDiagram
|
||||||
|
|
||||||
#### Node views
|
#### Node views
|
||||||
|
|
||||||
We implement [node views](https://www.tiptap.dev/guide/node-views/vue/#node-views-with-vue)
|
We implement [node views](https://tiptap.dev/guide/node-views/vue/#node-views-with-vue)
|
||||||
to provide inline editing tools for some content types, like tables and images. Node views
|
to provide inline editing tools for some content types, like tables and images. Node views
|
||||||
allow separating the presentation of a content type from its
|
allow separating the presentation of a content type from its
|
||||||
[model](https://prosemirror.net/docs/guide/#doc.data_structures). Using a Vue component in
|
[model](https://prosemirror.net/docs/guide/#doc.data_structures). Using a Vue component in
|
||||||
|
@ -209,7 +209,7 @@ the following events:
|
||||||
- `blur`
|
- `blur`
|
||||||
- `error`.
|
- `error`.
|
||||||
|
|
||||||
Learn more about these events in [Tiptap's event guide](https://www.tiptap.dev/api/events/).
|
Learn more about these events in [Tiptap's event guide](https://tiptap.dev/api/events/).
|
||||||
|
|
||||||
```html
|
```html
|
||||||
<script>
|
<script>
|
||||||
|
@ -246,7 +246,7 @@ export default {
|
||||||
|
|
||||||
### The Tiptap editor object
|
### The Tiptap editor object
|
||||||
|
|
||||||
The Tiptap [Editor](https://www.tiptap.dev/api/editor) class manages
|
The Tiptap [Editor](https://tiptap.dev/api/editor) class manages
|
||||||
the editor's state and encapsulates all the business logic that powers
|
the editor's state and encapsulates all the business logic that powers
|
||||||
the Content Editor. The Content Editor constructs a new instance of this class and
|
the Content Editor. The Content Editor constructs a new instance of this class and
|
||||||
provides all the necessary extensions to support
|
provides all the necessary extensions to support
|
||||||
|
@ -255,9 +255,9 @@ provides all the necessary extensions to support
|
||||||
#### Implement new extensions
|
#### Implement new extensions
|
||||||
|
|
||||||
Extensions are the building blocks of the Content Editor. You can learn how to implement
|
Extensions are the building blocks of the Content Editor. You can learn how to implement
|
||||||
new ones by reading [Tiptap's guide](https://www.tiptap.dev/guide/custom-extensions).
|
new ones by reading [Tiptap's guide](https://tiptap.dev/guide/custom-extensions).
|
||||||
We recommend checking the list of built-in [nodes](https://www.tiptap.dev/api/nodes) and
|
We recommend checking the list of built-in [nodes](https://tiptap.dev/api/nodes) and
|
||||||
[marks](https://www.tiptap.dev/api/marks) before implementing a new extension
|
[marks](https://tiptap.dev/api/marks) before implementing a new extension
|
||||||
from scratch.
|
from scratch.
|
||||||
|
|
||||||
Store the Content Editor extensions in the `~/content_editor/extensions` directory.
|
Store the Content Editor extensions in the `~/content_editor/extensions` directory.
|
||||||
|
@ -326,8 +326,8 @@ sequenceDiagram
|
||||||
```
|
```
|
||||||
|
|
||||||
Deserializers live in the extension modules. Read Tiptap's
|
Deserializers live in the extension modules. Read Tiptap's
|
||||||
[parseHTML](https://www.tiptap.dev/guide/custom-extensions#parse-html) and
|
[parseHTML](https://tiptap.dev/guide/custom-extensions#parse-html) and
|
||||||
[addAttributes](https://www.tiptap.dev/guide/custom-extensions#attributes) documentation to
|
[addAttributes](https://tiptap.dev/guide/custom-extensions#attributes) documentation to
|
||||||
learn how to implement them. Titap's API is a wrapper around ProseMirror's
|
learn how to implement them. Titap's API is a wrapper around ProseMirror's
|
||||||
[schema spec API](https://prosemirror.net/docs/ref/#model.SchemaSpec).
|
[schema spec API](https://prosemirror.net/docs/ref/#model.SchemaSpec).
|
||||||
|
|
||||||
|
|
|
@ -718,8 +718,8 @@ documentation on how to use SAML to sign in to GitLab.
|
||||||
Examples:
|
Examples:
|
||||||
|
|
||||||
- [ADFS (Active Directory Federation Services)](https://docs.microsoft.com/en-us/windows-server/identity/ad-fs/operations/create-a-relying-party-trust)
|
- [ADFS (Active Directory Federation Services)](https://docs.microsoft.com/en-us/windows-server/identity/ad-fs/operations/create-a-relying-party-trust)
|
||||||
- [Auth0](https://auth0.com/docs/protocols/saml-protocol/configure-auth0-as-saml-identity-provider)
|
- [Auth0](https://auth0.com/docs/configure/saml-configuration/configure-auth0-saml-identity-provider)
|
||||||
- [PingOne by Ping Identity](https://docs.pingidentity.com/bundle/pingone/page/xsh1564020480660-1.html)
|
- [PingOne by Ping Identity](http://docs.pingidentity.com/bundle/pingoneforenterprise/page/xsh1564020480660-1.html)
|
||||||
|
|
||||||
GitLab provides the following setup notes for guidance only.
|
GitLab provides the following setup notes for guidance only.
|
||||||
If you have any questions on configuring the SAML app, please contact your provider's support.
|
If you have any questions on configuring the SAML app, please contact your provider's support.
|
||||||
|
|
|
@ -42,10 +42,20 @@ To enable container scanning in your pipeline, you need the following:
|
||||||
shared runners on GitLab.com, then this is already the case.
|
shared runners on GitLab.com, then this is already the case.
|
||||||
- An image matching the [supported distributions](#supported-distributions).
|
- An image matching the [supported distributions](#supported-distributions).
|
||||||
- [Build and push](../../packages/container_registry/index.md#build-and-push-by-using-gitlab-cicd)
|
- [Build and push](../../packages/container_registry/index.md#build-and-push-by-using-gitlab-cicd)
|
||||||
the Docker image to your project's container registry. If using a third-party container
|
the Docker image to your project's container registry.
|
||||||
registry, you might need to provide authentication credentials using the `DOCKER_USER` and
|
|
||||||
`DOCKER_PASSWORD` [configuration variables](#available-cicd-variables).
|
|
||||||
- The name of the Docker image to scan, in the `DOCKER_IMAGE` [configuration variable](#available-cicd-variables).
|
- The name of the Docker image to scan, in the `DOCKER_IMAGE` [configuration variable](#available-cicd-variables).
|
||||||
|
- If you're using a third-party container registry, you might need to provide authentication
|
||||||
|
credentials through the `DOCKER_USER` and `DOCKER_PASSWORD` [configuration variables](#available-cicd-variables).
|
||||||
|
For example, if you are connecting to AWS ECR, you might use the following:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
export AWS_ECR_PASSWORD=$(aws ecr get-login-password --region region)
|
||||||
|
|
||||||
|
include:
|
||||||
|
- template: Security/Container-Scanning.gitlab-ci.yml
|
||||||
|
DOCKER_USER: AWS
|
||||||
|
DOCKER_PASSWORD: "$AWS_ECR_PASSWORD"
|
||||||
|
```
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|
||||||
|
|
|
@ -946,3 +946,18 @@ include:
|
||||||
variables:
|
variables:
|
||||||
DS_DISABLE_DIND: "true"
|
DS_DISABLE_DIND: "true"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Message `<file> does not exist in <commit SHA>`
|
||||||
|
|
||||||
|
When the `Location` of a dependency in a file is shown, the path in the link goes to a specific Git
|
||||||
|
SHA.
|
||||||
|
|
||||||
|
If the lock file that our dependency scanning tools reviewed was cached, however, selecting that
|
||||||
|
link redirects you to the repository root, with the message:
|
||||||
|
`<file> does not exist in <commit SHA>`.
|
||||||
|
|
||||||
|
The lock file is cached during the build phase and passed to the dependency scanning job before the
|
||||||
|
scan occurs. Because the cache is downloaded before the analyzer run occurs, the existence of a lock
|
||||||
|
file in the `CI_BUILDS_DIR` directory triggers the dependency scanning job.
|
||||||
|
|
||||||
|
We recommend committing the lock files, which prevents this warning.
|
||||||
|
|
|
@ -106,7 +106,7 @@ fingerprints in the open for you to check:
|
||||||
|
|
||||||
- [AWS CodeCommit](https://docs.aws.amazon.com/codecommit/latest/userguide/regions.html#regions-fingerprints)
|
- [AWS CodeCommit](https://docs.aws.amazon.com/codecommit/latest/userguide/regions.html#regions-fingerprints)
|
||||||
- [Bitbucket](https://support.atlassian.com/bitbucket-cloud/docs/configure-ssh-and-two-step-verification/)
|
- [Bitbucket](https://support.atlassian.com/bitbucket-cloud/docs/configure-ssh-and-two-step-verification/)
|
||||||
- [GitHub](https://docs.github.com/en/github/authenticating-to-github/keeping-your-account-and-data-secure/githubs-ssh-key-fingerprints)
|
- [GitHub](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/githubs-ssh-key-fingerprints)
|
||||||
- [GitLab.com](../../../gitlab_com/index.md#ssh-host-keys-fingerprints)
|
- [GitLab.com](../../../gitlab_com/index.md#ssh-host-keys-fingerprints)
|
||||||
- [Launchpad](https://help.launchpad.net/SSHFingerprints)
|
- [Launchpad](https://help.launchpad.net/SSHFingerprints)
|
||||||
- [Savannah](http://savannah.gnu.org/maintenance/SshAccess/)
|
- [Savannah](http://savannah.gnu.org/maintenance/SshAccess/)
|
||||||
|
|
|
@ -51,8 +51,8 @@ After you configure a GitLab repository as a pull mirror:
|
||||||
Prerequisite:
|
Prerequisite:
|
||||||
|
|
||||||
- If your remote repository is on GitHub and you have
|
- If your remote repository is on GitHub and you have
|
||||||
[two-factor authentication (2FA) configured](https://docs.github.com/en/github/authenticating-to-github/securing-your-account-with-two-factor-authentication-2fa),
|
[two-factor authentication (2FA) configured](https://docs.github.com/en/authentication/securing-your-account-with-two-factor-authentication-2fa),
|
||||||
create a [personal access token for GitHub](https://docs.github.com/en/github/authenticating-to-github/keeping-your-account-and-data-secure/creating-a-personal-access-token)
|
create a [personal access token for GitHub](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token)
|
||||||
with the `repo` scope. If 2FA is enabled, this personal access
|
with the `repo` scope. If 2FA is enabled, this personal access
|
||||||
token serves as your GitHub password.
|
token serves as your GitHub password.
|
||||||
|
|
||||||
|
|
|
@ -66,7 +66,7 @@ After the mirror is created, this option can only be modified via the [API](../.
|
||||||
|
|
||||||
To set up a mirror from GitLab to GitHub, you must follow these steps:
|
To set up a mirror from GitLab to GitHub, you must follow these steps:
|
||||||
|
|
||||||
1. Create a [GitHub personal access token](https://docs.github.com/en/github/authenticating-to-github/keeping-your-account-and-data-secure/creating-a-personal-access-token) with the `public_repo` box checked.
|
1. Create a [GitHub personal access token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token) with the `public_repo` box checked.
|
||||||
1. Fill in the **Git repository URL** field using this format: `https://<your_github_username>@github.com/<your_github_group>/<your_github_project>.git`.
|
1. Fill in the **Git repository URL** field using this format: `https://<your_github_username>@github.com/<your_github_group>/<your_github_project>.git`.
|
||||||
1. Fill in **Password** field with your GitHub personal access token.
|
1. Fill in **Password** field with your GitHub personal access token.
|
||||||
1. Select **Mirror repository**.
|
1. Select **Mirror repository**.
|
||||||
|
|
|
@ -137,12 +137,12 @@ module API
|
||||||
bad_request!
|
bad_request!
|
||||||
end
|
end
|
||||||
|
|
||||||
track_package_event('push_package', :composer, project: authorized_user_project, user: current_user, namespace: authorized_user_project.namespace)
|
|
||||||
|
|
||||||
::Packages::Composer::CreatePackageService
|
::Packages::Composer::CreatePackageService
|
||||||
.new(authorized_user_project, current_user, declared_params.merge(build: current_authenticated_job))
|
.new(authorized_user_project, current_user, declared_params.merge(build: current_authenticated_job))
|
||||||
.execute
|
.execute
|
||||||
|
|
||||||
|
track_package_event('push_package', :composer, project: authorized_user_project, user: current_user, namespace: authorized_user_project.namespace)
|
||||||
|
|
||||||
created!
|
created!
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -21670,34 +21670,28 @@ msgstr ""
|
||||||
msgid "Metrics and profiling"
|
msgid "Metrics and profiling"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
msgid "Metrics::Dashboard::Annotation|Annotation can't belong to both a cluster and an environment at the same time"
|
msgid "MetricsDashboardAnnotation|Annotation can't belong to both a cluster and an environment at the same time"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
msgid "Metrics::Dashboard::Annotation|Annotation has not been deleted"
|
msgid "MetricsDashboardAnnotation|Annotation has not been deleted"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
msgid "Metrics::Dashboard::Annotation|Annotation must belong to a cluster or an environment"
|
msgid "MetricsDashboardAnnotation|Annotation must belong to a cluster or an environment"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
msgid "Metrics::Dashboard::Annotation|Dashboard with requested path can not be found"
|
msgid "MetricsDashboardAnnotation|Dashboard with requested path can not be found"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
msgid "Metrics::Dashboard::Annotation|You are not authorized to create annotation for selected cluster"
|
msgid "MetricsDashboardAnnotation|You are not authorized to create annotation for selected cluster"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
msgid "Metrics::Dashboard::Annotation|You are not authorized to create annotation for selected environment"
|
msgid "MetricsDashboardAnnotation|You are not authorized to create annotation for selected environment"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
msgid "Metrics::Dashboard::Annotation|You are not authorized to delete this annotation"
|
msgid "MetricsDashboardAnnotation|You are not authorized to delete this annotation"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
msgid "Metrics::Dashboard::Annotation|can't be before starting_at time"
|
msgid "MetricsDashboardAnnotation|can't be before starting_at time"
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
msgid "Metrics::UsersStarredDashboards|Dashboard with requested path can not be found"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
msgid "Metrics::UsersStarredDashboards|You are not authorized to add star to this dashboard"
|
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
msgid "MetricsSettings|Add a button to the metrics dashboard linking directly to your existing external dashboard."
|
msgid "MetricsSettings|Add a button to the metrics dashboard linking directly to your existing external dashboard."
|
||||||
|
@ -21724,6 +21718,12 @@ msgstr ""
|
||||||
msgid "MetricsSettings|User's local timezone"
|
msgid "MetricsSettings|User's local timezone"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
|
msgid "MetricsUsersStarredDashboards|Dashboard with requested path can not be found"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgid "MetricsUsersStarredDashboards|You are not authorized to add star to this dashboard"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
msgid "Metrics|1. Define and preview panel"
|
msgid "Metrics|1. Define and preview panel"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
|
|
|
@ -46,7 +46,7 @@ module QA
|
||||||
merge_request.visit!
|
merge_request.visit!
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'applies multiple suggestions', testcase: 'https://gitlab.com/gitlab-org/quality/testcases/-/quality/test_cases/1838' do
|
it 'applies multiple suggestions', testcase: 'https://gitlab.com/gitlab-org/quality/testcases/-/quality/test_cases/1838', quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/342131', type: :stale } do
|
||||||
Page::MergeRequest::Show.perform do |merge_request|
|
Page::MergeRequest::Show.perform do |merge_request|
|
||||||
merge_request.click_diffs_tab
|
merge_request.click_diffs_tab
|
||||||
4.times { merge_request.add_suggestion_to_batch }
|
4.times { merge_request.add_suggestion_to_batch }
|
||||||
|
|
140
spec/frontend/tracking/tracking_initialization_spec.js
Normal file
140
spec/frontend/tracking/tracking_initialization_spec.js
Normal file
|
@ -0,0 +1,140 @@
|
||||||
|
import { TRACKING_CONTEXT_SCHEMA } from '~/experimentation/constants';
|
||||||
|
import { getExperimentData, getAllExperimentContexts } from '~/experimentation/utils';
|
||||||
|
import Tracking, { initUserTracking, initDefaultTrackers } from '~/tracking';
|
||||||
|
import getStandardContext from '~/tracking/get_standard_context';
|
||||||
|
|
||||||
|
jest.mock('~/experimentation/utils', () => ({
|
||||||
|
getExperimentData: jest.fn(),
|
||||||
|
getAllExperimentContexts: jest.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe('Tracking', () => {
|
||||||
|
let standardContext;
|
||||||
|
let snowplowSpy;
|
||||||
|
let bindDocumentSpy;
|
||||||
|
let trackLoadEventsSpy;
|
||||||
|
let enableFormTracking;
|
||||||
|
let setAnonymousUrlsSpy;
|
||||||
|
|
||||||
|
beforeAll(() => {
|
||||||
|
window.gl = window.gl || {};
|
||||||
|
window.gl.snowplowStandardContext = {
|
||||||
|
schema: 'iglu:com.gitlab/gitlab_standard',
|
||||||
|
data: {
|
||||||
|
environment: 'testing',
|
||||||
|
source: 'unknown',
|
||||||
|
extra: {},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
standardContext = getStandardContext();
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
getExperimentData.mockReturnValue(undefined);
|
||||||
|
getAllExperimentContexts.mockReturnValue([]);
|
||||||
|
|
||||||
|
window.snowplow = window.snowplow || (() => {});
|
||||||
|
window.snowplowOptions = {
|
||||||
|
namespace: 'gl_test',
|
||||||
|
hostname: 'app.test.com',
|
||||||
|
cookieDomain: '.test.com',
|
||||||
|
};
|
||||||
|
|
||||||
|
snowplowSpy = jest.spyOn(window, 'snowplow');
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('initUserTracking', () => {
|
||||||
|
it('calls through to get a new tracker with the expected options', () => {
|
||||||
|
initUserTracking();
|
||||||
|
expect(snowplowSpy).toHaveBeenCalledWith('newTracker', 'gl_test', 'app.test.com', {
|
||||||
|
namespace: 'gl_test',
|
||||||
|
hostname: 'app.test.com',
|
||||||
|
cookieDomain: '.test.com',
|
||||||
|
appId: '',
|
||||||
|
userFingerprint: false,
|
||||||
|
respectDoNotTrack: true,
|
||||||
|
forceSecureTracker: true,
|
||||||
|
eventMethod: 'post',
|
||||||
|
contexts: { webPage: true, performanceTiming: true },
|
||||||
|
formTracking: false,
|
||||||
|
linkClickTracking: false,
|
||||||
|
pageUnloadTimer: 10,
|
||||||
|
formTrackingConfig: {
|
||||||
|
fields: { allow: [] },
|
||||||
|
forms: { allow: [] },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('initDefaultTrackers', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
bindDocumentSpy = jest.spyOn(Tracking, 'bindDocument').mockImplementation(() => null);
|
||||||
|
trackLoadEventsSpy = jest.spyOn(Tracking, 'trackLoadEvents').mockImplementation(() => null);
|
||||||
|
enableFormTracking = jest
|
||||||
|
.spyOn(Tracking, 'enableFormTracking')
|
||||||
|
.mockImplementation(() => null);
|
||||||
|
setAnonymousUrlsSpy = jest.spyOn(Tracking, 'setAnonymousUrls').mockImplementation(() => null);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should activate features based on what has been enabled', () => {
|
||||||
|
initDefaultTrackers();
|
||||||
|
expect(snowplowSpy).toHaveBeenCalledWith('enableActivityTracking', 30, 30);
|
||||||
|
expect(snowplowSpy).toHaveBeenCalledWith('trackPageView', null, [standardContext]);
|
||||||
|
expect(snowplowSpy).not.toHaveBeenCalledWith('enableFormTracking');
|
||||||
|
expect(snowplowSpy).not.toHaveBeenCalledWith('enableLinkClickTracking');
|
||||||
|
|
||||||
|
window.snowplowOptions = {
|
||||||
|
...window.snowplowOptions,
|
||||||
|
formTracking: true,
|
||||||
|
linkClickTracking: true,
|
||||||
|
formTrackingConfig: { forms: { whitelist: ['foo'] }, fields: { whitelist: ['bar'] } },
|
||||||
|
};
|
||||||
|
|
||||||
|
initDefaultTrackers();
|
||||||
|
expect(enableFormTracking).toHaveBeenCalledWith(window.snowplowOptions.formTrackingConfig);
|
||||||
|
expect(snowplowSpy).toHaveBeenCalledWith('enableLinkClickTracking');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('binds the document event handling', () => {
|
||||||
|
initDefaultTrackers();
|
||||||
|
expect(bindDocumentSpy).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('tracks page loaded events', () => {
|
||||||
|
initDefaultTrackers();
|
||||||
|
expect(trackLoadEventsSpy).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('calls the anonymized URLs method', () => {
|
||||||
|
initDefaultTrackers();
|
||||||
|
expect(setAnonymousUrlsSpy).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('when there are experiment contexts', () => {
|
||||||
|
const experimentContexts = [
|
||||||
|
{
|
||||||
|
schema: TRACKING_CONTEXT_SCHEMA,
|
||||||
|
data: { experiment: 'experiment1', variant: 'control' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
schema: TRACKING_CONTEXT_SCHEMA,
|
||||||
|
data: { experiment: 'experiment_two', variant: 'candidate' },
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
getAllExperimentContexts.mockReturnValue(experimentContexts);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('includes those contexts alongside the standard context', () => {
|
||||||
|
initDefaultTrackers();
|
||||||
|
expect(snowplowSpy).toHaveBeenCalledWith('trackPageView', null, [
|
||||||
|
standardContext,
|
||||||
|
...experimentContexts,
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -8,16 +8,16 @@ import getStandardContext from '~/tracking/get_standard_context';
|
||||||
|
|
||||||
jest.mock('~/experimentation/utils', () => ({
|
jest.mock('~/experimentation/utils', () => ({
|
||||||
getExperimentData: jest.fn(),
|
getExperimentData: jest.fn(),
|
||||||
getAllExperimentContexts: jest.fn(),
|
getAllExperimentContexts: jest.fn().mockReturnValue([]),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
const TEST_CATEGORY = 'root:index';
|
||||||
|
const TEST_ACTION = 'generic';
|
||||||
|
const TEST_LABEL = 'button';
|
||||||
|
|
||||||
describe('Tracking', () => {
|
describe('Tracking', () => {
|
||||||
let standardContext;
|
let standardContext;
|
||||||
let snowplowSpy;
|
let snowplowSpy;
|
||||||
let bindDocumentSpy;
|
|
||||||
let trackLoadEventsSpy;
|
|
||||||
let enableFormTracking;
|
|
||||||
let setAnonymousUrlsSpy;
|
|
||||||
|
|
||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
window.gl = window.gl || {};
|
window.gl = window.gl || {};
|
||||||
|
@ -30,132 +30,46 @@ describe('Tracking', () => {
|
||||||
extra: {},
|
extra: {},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
window.snowplowOptions = {
|
||||||
|
namespace: 'gl_test',
|
||||||
|
hostname: 'app.test.com',
|
||||||
|
cookieDomain: '.test.com',
|
||||||
|
formTracking: true,
|
||||||
|
linkClickTracking: true,
|
||||||
|
formTrackingConfig: { forms: { allow: ['foo'] }, fields: { allow: ['bar'] } },
|
||||||
|
};
|
||||||
|
|
||||||
standardContext = getStandardContext();
|
standardContext = getStandardContext();
|
||||||
|
window.snowplow = window.snowplow || (() => {});
|
||||||
|
document.body.dataset.page = TEST_CATEGORY;
|
||||||
|
|
||||||
|
initUserTracking();
|
||||||
|
initDefaultTrackers();
|
||||||
});
|
});
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
getExperimentData.mockReturnValue(undefined);
|
getExperimentData.mockReturnValue(undefined);
|
||||||
getAllExperimentContexts.mockReturnValue([]);
|
getAllExperimentContexts.mockReturnValue([]);
|
||||||
|
|
||||||
window.snowplow = window.snowplow || (() => {});
|
|
||||||
window.snowplowOptions = {
|
|
||||||
namespace: '_namespace_',
|
|
||||||
hostname: 'app.gitfoo.com',
|
|
||||||
cookieDomain: '.gitfoo.com',
|
|
||||||
};
|
|
||||||
snowplowSpy = jest.spyOn(window, 'snowplow');
|
snowplowSpy = jest.spyOn(window, 'snowplow');
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('initUserTracking', () => {
|
|
||||||
it('calls through to get a new tracker with the expected options', () => {
|
|
||||||
initUserTracking();
|
|
||||||
expect(snowplowSpy).toHaveBeenCalledWith('newTracker', '_namespace_', 'app.gitfoo.com', {
|
|
||||||
namespace: '_namespace_',
|
|
||||||
hostname: 'app.gitfoo.com',
|
|
||||||
cookieDomain: '.gitfoo.com',
|
|
||||||
appId: '',
|
|
||||||
userFingerprint: false,
|
|
||||||
respectDoNotTrack: true,
|
|
||||||
forceSecureTracker: true,
|
|
||||||
eventMethod: 'post',
|
|
||||||
contexts: { webPage: true, performanceTiming: true },
|
|
||||||
formTracking: false,
|
|
||||||
linkClickTracking: false,
|
|
||||||
pageUnloadTimer: 10,
|
|
||||||
formTrackingConfig: {
|
|
||||||
fields: { allow: [] },
|
|
||||||
forms: { allow: [] },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('initDefaultTrackers', () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
bindDocumentSpy = jest.spyOn(Tracking, 'bindDocument').mockImplementation(() => null);
|
|
||||||
trackLoadEventsSpy = jest.spyOn(Tracking, 'trackLoadEvents').mockImplementation(() => null);
|
|
||||||
enableFormTracking = jest
|
|
||||||
.spyOn(Tracking, 'enableFormTracking')
|
|
||||||
.mockImplementation(() => null);
|
|
||||||
setAnonymousUrlsSpy = jest.spyOn(Tracking, 'setAnonymousUrls').mockImplementation(() => null);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should activate features based on what has been enabled', () => {
|
|
||||||
initDefaultTrackers();
|
|
||||||
expect(snowplowSpy).toHaveBeenCalledWith('enableActivityTracking', 30, 30);
|
|
||||||
expect(snowplowSpy).toHaveBeenCalledWith('trackPageView', null, [standardContext]);
|
|
||||||
expect(snowplowSpy).not.toHaveBeenCalledWith('enableFormTracking');
|
|
||||||
expect(snowplowSpy).not.toHaveBeenCalledWith('enableLinkClickTracking');
|
|
||||||
|
|
||||||
window.snowplowOptions = {
|
|
||||||
...window.snowplowOptions,
|
|
||||||
formTracking: true,
|
|
||||||
linkClickTracking: true,
|
|
||||||
formTrackingConfig: { forms: { whitelist: ['foo'] }, fields: { whitelist: ['bar'] } },
|
|
||||||
};
|
|
||||||
|
|
||||||
initDefaultTrackers();
|
|
||||||
expect(enableFormTracking).toHaveBeenCalledWith(window.snowplowOptions.formTrackingConfig);
|
|
||||||
expect(snowplowSpy).toHaveBeenCalledWith('enableLinkClickTracking');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('binds the document event handling', () => {
|
|
||||||
initDefaultTrackers();
|
|
||||||
expect(bindDocumentSpy).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('tracks page loaded events', () => {
|
|
||||||
initDefaultTrackers();
|
|
||||||
expect(trackLoadEventsSpy).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('calls the anonymized URLs method', () => {
|
|
||||||
initDefaultTrackers();
|
|
||||||
expect(setAnonymousUrlsSpy).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('when there are experiment contexts', () => {
|
|
||||||
const experimentContexts = [
|
|
||||||
{
|
|
||||||
schema: TRACKING_CONTEXT_SCHEMA,
|
|
||||||
data: { experiment: 'experiment1', variant: 'control' },
|
|
||||||
},
|
|
||||||
{
|
|
||||||
schema: TRACKING_CONTEXT_SCHEMA,
|
|
||||||
data: { experiment: 'experiment_two', variant: 'candidate' },
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
getAllExperimentContexts.mockReturnValue(experimentContexts);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('includes those contexts alongside the standard context', () => {
|
|
||||||
initDefaultTrackers();
|
|
||||||
expect(snowplowSpy).toHaveBeenCalledWith('trackPageView', null, [
|
|
||||||
standardContext,
|
|
||||||
...experimentContexts,
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('.event', () => {
|
describe('.event', () => {
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
window.doNotTrack = undefined;
|
window.doNotTrack = undefined;
|
||||||
navigator.doNotTrack = undefined;
|
navigator.doNotTrack = undefined;
|
||||||
navigator.msDoNotTrack = undefined;
|
navigator.msDoNotTrack = undefined;
|
||||||
|
jest.clearAllMocks();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('tracks to snowplow (our current tracking system)', () => {
|
it('tracks to snowplow (our current tracking system)', () => {
|
||||||
Tracking.event('_category_', '_eventName_', { label: '_label_' });
|
Tracking.event(TEST_CATEGORY, TEST_ACTION, { label: TEST_LABEL });
|
||||||
|
|
||||||
expect(snowplowSpy).toHaveBeenCalledWith(
|
expect(snowplowSpy).toHaveBeenCalledWith(
|
||||||
'trackStructEvent',
|
'trackStructEvent',
|
||||||
'_category_',
|
TEST_CATEGORY,
|
||||||
'_eventName_',
|
TEST_ACTION,
|
||||||
'_label_',
|
TEST_LABEL,
|
||||||
undefined,
|
undefined,
|
||||||
undefined,
|
undefined,
|
||||||
[standardContext],
|
[standardContext],
|
||||||
|
@ -165,12 +79,12 @@ describe('Tracking', () => {
|
||||||
it('allows adding extra data to the default context', () => {
|
it('allows adding extra data to the default context', () => {
|
||||||
const extra = { foo: 'bar' };
|
const extra = { foo: 'bar' };
|
||||||
|
|
||||||
Tracking.event('_category_', '_eventName_', { extra });
|
Tracking.event(TEST_CATEGORY, TEST_ACTION, { extra });
|
||||||
|
|
||||||
expect(snowplowSpy).toHaveBeenCalledWith(
|
expect(snowplowSpy).toHaveBeenCalledWith(
|
||||||
'trackStructEvent',
|
'trackStructEvent',
|
||||||
'_category_',
|
TEST_CATEGORY,
|
||||||
'_eventName_',
|
TEST_ACTION,
|
||||||
undefined,
|
undefined,
|
||||||
undefined,
|
undefined,
|
||||||
undefined,
|
undefined,
|
||||||
|
@ -188,28 +102,28 @@ describe('Tracking', () => {
|
||||||
|
|
||||||
it('skips tracking if snowplow is unavailable', () => {
|
it('skips tracking if snowplow is unavailable', () => {
|
||||||
window.snowplow = false;
|
window.snowplow = false;
|
||||||
Tracking.event('_category_', '_eventName_');
|
Tracking.event(TEST_CATEGORY, TEST_ACTION);
|
||||||
|
|
||||||
expect(snowplowSpy).not.toHaveBeenCalled();
|
expect(snowplowSpy).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('skips tracking if the user does not want to be tracked (general spec)', () => {
|
it('skips tracking if the user does not want to be tracked (general spec)', () => {
|
||||||
window.doNotTrack = '1';
|
window.doNotTrack = '1';
|
||||||
Tracking.event('_category_', '_eventName_');
|
Tracking.event(TEST_CATEGORY, TEST_ACTION);
|
||||||
|
|
||||||
expect(snowplowSpy).not.toHaveBeenCalled();
|
expect(snowplowSpy).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('skips tracking if the user does not want to be tracked (firefox legacy)', () => {
|
it('skips tracking if the user does not want to be tracked (firefox legacy)', () => {
|
||||||
navigator.doNotTrack = 'yes';
|
navigator.doNotTrack = 'yes';
|
||||||
Tracking.event('_category_', '_eventName_');
|
Tracking.event(TEST_CATEGORY, TEST_ACTION);
|
||||||
|
|
||||||
expect(snowplowSpy).not.toHaveBeenCalled();
|
expect(snowplowSpy).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('skips tracking if the user does not want to be tracked (IE legacy)', () => {
|
it('skips tracking if the user does not want to be tracked (IE legacy)', () => {
|
||||||
navigator.msDoNotTrack = '1';
|
navigator.msDoNotTrack = '1';
|
||||||
Tracking.event('_category_', '_eventName_');
|
Tracking.event(TEST_CATEGORY, TEST_ACTION);
|
||||||
|
|
||||||
expect(snowplowSpy).not.toHaveBeenCalled();
|
expect(snowplowSpy).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
@ -237,7 +151,7 @@ describe('Tracking', () => {
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('does not add empty form whitelist rules', () => {
|
it('does not add empty form allow rules', () => {
|
||||||
Tracking.enableFormTracking({ fields: { allow: ['input-class1'] } });
|
Tracking.enableFormTracking({ fields: { allow: ['input-class1'] } });
|
||||||
|
|
||||||
expect(snowplowSpy).toHaveBeenCalledWith(
|
expect(snowplowSpy).toHaveBeenCalledWith(
|
||||||
|
@ -287,7 +201,7 @@ describe('Tracking', () => {
|
||||||
describe('.flushPendingEvents', () => {
|
describe('.flushPendingEvents', () => {
|
||||||
it('flushes any pending events', () => {
|
it('flushes any pending events', () => {
|
||||||
Tracking.initialized = false;
|
Tracking.initialized = false;
|
||||||
Tracking.event('_category_', '_eventName_', { label: '_label_' });
|
Tracking.event(TEST_CATEGORY, TEST_ACTION, { label: TEST_LABEL });
|
||||||
|
|
||||||
expect(snowplowSpy).not.toHaveBeenCalled();
|
expect(snowplowSpy).not.toHaveBeenCalled();
|
||||||
|
|
||||||
|
@ -295,9 +209,9 @@ describe('Tracking', () => {
|
||||||
|
|
||||||
expect(snowplowSpy).toHaveBeenCalledWith(
|
expect(snowplowSpy).toHaveBeenCalledWith(
|
||||||
'trackStructEvent',
|
'trackStructEvent',
|
||||||
'_category_',
|
TEST_CATEGORY,
|
||||||
'_eventName_',
|
TEST_ACTION,
|
||||||
'_label_',
|
TEST_LABEL,
|
||||||
undefined,
|
undefined,
|
||||||
undefined,
|
undefined,
|
||||||
[standardContext],
|
[standardContext],
|
||||||
|
@ -413,15 +327,14 @@ describe('Tracking', () => {
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
eventSpy = jest.spyOn(Tracking, 'event');
|
eventSpy = jest.spyOn(Tracking, 'event');
|
||||||
Tracking.bindDocument('_category_'); // only happens once
|
|
||||||
setHTMLFixture(`
|
setHTMLFixture(`
|
||||||
<input data-track-action="click_input1" data-track-label="_label_" value=0 />
|
<input data-track-action="click_input1" data-track-label="button" value="0" />
|
||||||
<input data-track-action="click_input2" data-track-value=0 value=0/>
|
<input data-track-action="click_input2" data-track-value="0" value="0" />
|
||||||
<input type="checkbox" data-track-action="toggle_checkbox" value=1 checked/>
|
<input type="checkbox" data-track-action="toggle_checkbox" value=1 checked />
|
||||||
<input class="dropdown" data-track-action="toggle_dropdown"/>
|
<input class="dropdown" data-track-action="toggle_dropdown"/>
|
||||||
<div data-track-action="nested_event"><span class="nested"></span></div>
|
<div data-track-action="nested_event"><span class="nested"></span></div>
|
||||||
<input data-track-bogus="click_bogusinput" data-track-label="_label_" value="_value_"/>
|
<input data-track-bogus="click_bogusinput" data-track-label="button" value="1" />
|
||||||
<input data-track-action="click_input3" data-track-experiment="example" value="_value_"/>
|
<input data-track-action="click_input3" data-track-experiment="example" value="1" />
|
||||||
<input data-track-action="event_with_extra" data-track-extra='{ "foo": "bar" }' />
|
<input data-track-action="event_with_extra" data-track-extra='{ "foo": "bar" }' />
|
||||||
<input data-track-action="event_with_invalid_extra" data-track-extra="invalid_json" />
|
<input data-track-action="event_with_invalid_extra" data-track-extra="invalid_json" />
|
||||||
`);
|
`);
|
||||||
|
@ -430,8 +343,8 @@ describe('Tracking', () => {
|
||||||
it(`binds to clicks on elements matching [data-track-action]`, () => {
|
it(`binds to clicks on elements matching [data-track-action]`, () => {
|
||||||
document.querySelector(`[data-track-action="click_input1"]`).click();
|
document.querySelector(`[data-track-action="click_input1"]`).click();
|
||||||
|
|
||||||
expect(eventSpy).toHaveBeenCalledWith('_category_', 'click_input1', {
|
expect(eventSpy).toHaveBeenCalledWith(TEST_CATEGORY, 'click_input1', {
|
||||||
label: '_label_',
|
label: TEST_LABEL,
|
||||||
value: '0',
|
value: '0',
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -445,7 +358,7 @@ describe('Tracking', () => {
|
||||||
it('allows value override with the data-track-value attribute', () => {
|
it('allows value override with the data-track-value attribute', () => {
|
||||||
document.querySelector(`[data-track-action="click_input2"]`).click();
|
document.querySelector(`[data-track-action="click_input2"]`).click();
|
||||||
|
|
||||||
expect(eventSpy).toHaveBeenCalledWith('_category_', 'click_input2', {
|
expect(eventSpy).toHaveBeenCalledWith(TEST_CATEGORY, 'click_input2', {
|
||||||
value: '0',
|
value: '0',
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -455,13 +368,13 @@ describe('Tracking', () => {
|
||||||
|
|
||||||
checkbox.click(); // unchecking
|
checkbox.click(); // unchecking
|
||||||
|
|
||||||
expect(eventSpy).toHaveBeenCalledWith('_category_', 'toggle_checkbox', {
|
expect(eventSpy).toHaveBeenCalledWith(TEST_CATEGORY, 'toggle_checkbox', {
|
||||||
value: 0,
|
value: 0,
|
||||||
});
|
});
|
||||||
|
|
||||||
checkbox.click(); // checking
|
checkbox.click(); // checking
|
||||||
|
|
||||||
expect(eventSpy).toHaveBeenCalledWith('_category_', 'toggle_checkbox', {
|
expect(eventSpy).toHaveBeenCalledWith(TEST_CATEGORY, 'toggle_checkbox', {
|
||||||
value: '1',
|
value: '1',
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -471,17 +384,17 @@ describe('Tracking', () => {
|
||||||
|
|
||||||
dropdown.dispatchEvent(new Event('show.bs.dropdown', { bubbles: true }));
|
dropdown.dispatchEvent(new Event('show.bs.dropdown', { bubbles: true }));
|
||||||
|
|
||||||
expect(eventSpy).toHaveBeenCalledWith('_category_', 'toggle_dropdown_show', {});
|
expect(eventSpy).toHaveBeenCalledWith(TEST_CATEGORY, 'toggle_dropdown_show', {});
|
||||||
|
|
||||||
dropdown.dispatchEvent(new Event('hide.bs.dropdown', { bubbles: true }));
|
dropdown.dispatchEvent(new Event('hide.bs.dropdown', { bubbles: true }));
|
||||||
|
|
||||||
expect(eventSpy).toHaveBeenCalledWith('_category_', 'toggle_dropdown_hide', {});
|
expect(eventSpy).toHaveBeenCalledWith(TEST_CATEGORY, 'toggle_dropdown_hide', {});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('handles nested elements inside an element with tracking', () => {
|
it('handles nested elements inside an element with tracking', () => {
|
||||||
document.querySelector('span.nested').click();
|
document.querySelector('span.nested').click();
|
||||||
|
|
||||||
expect(eventSpy).toHaveBeenCalledWith('_category_', 'nested_event', {});
|
expect(eventSpy).toHaveBeenCalledWith(TEST_CATEGORY, 'nested_event', {});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('includes experiment data if linked to an experiment', () => {
|
it('includes experiment data if linked to an experiment', () => {
|
||||||
|
@ -494,8 +407,8 @@ describe('Tracking', () => {
|
||||||
|
|
||||||
document.querySelector(`[data-track-action="click_input3"]`).click();
|
document.querySelector(`[data-track-action="click_input3"]`).click();
|
||||||
|
|
||||||
expect(eventSpy).toHaveBeenCalledWith('_category_', 'click_input3', {
|
expect(eventSpy).toHaveBeenCalledWith(TEST_CATEGORY, 'click_input3', {
|
||||||
value: '_value_',
|
value: '1',
|
||||||
context: { schema: TRACKING_CONTEXT_SCHEMA, data: mockExperimentData },
|
context: { schema: TRACKING_CONTEXT_SCHEMA, data: mockExperimentData },
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -503,7 +416,7 @@ describe('Tracking', () => {
|
||||||
it('supports extra data as JSON', () => {
|
it('supports extra data as JSON', () => {
|
||||||
document.querySelector(`[data-track-action="event_with_extra"]`).click();
|
document.querySelector(`[data-track-action="event_with_extra"]`).click();
|
||||||
|
|
||||||
expect(eventSpy).toHaveBeenCalledWith('_category_', 'event_with_extra', {
|
expect(eventSpy).toHaveBeenCalledWith(TEST_CATEGORY, 'event_with_extra', {
|
||||||
extra: { foo: 'bar' },
|
extra: { foo: 'bar' },
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -511,7 +424,7 @@ describe('Tracking', () => {
|
||||||
it('ignores extra if provided JSON is invalid', () => {
|
it('ignores extra if provided JSON is invalid', () => {
|
||||||
document.querySelector(`[data-track-action="event_with_invalid_extra"]`).click();
|
document.querySelector(`[data-track-action="event_with_invalid_extra"]`).click();
|
||||||
|
|
||||||
expect(eventSpy).toHaveBeenCalledWith('_category_', 'event_with_invalid_extra', {});
|
expect(eventSpy).toHaveBeenCalledWith(TEST_CATEGORY, 'event_with_invalid_extra', {});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -522,20 +435,20 @@ describe('Tracking', () => {
|
||||||
eventSpy = jest.spyOn(Tracking, 'event');
|
eventSpy = jest.spyOn(Tracking, 'event');
|
||||||
setHTMLFixture(`
|
setHTMLFixture(`
|
||||||
<div data-track-action="click_link" data-track-label="all_nested_links">
|
<div data-track-action="click_link" data-track-label="all_nested_links">
|
||||||
<input data-track-action="render" data-track-label="label1" value=1 data-track-property="_property_"/>
|
<input data-track-action="render" data-track-label="label1" value=1 data-track-property="_property_" />
|
||||||
<span data-track-action="render" data-track-label="label2" data-track-value=1>
|
<span data-track-action="render" data-track-label="label2" data-track-value="1">
|
||||||
<a href="#" id="link">Something</a>
|
<a href="#" id="link">Something</a>
|
||||||
</span>
|
</span>
|
||||||
<input data-track-action="_render_bogus_" data-track-label="label3" value="_value_" data-track-property="_property_"/>
|
<input data-track-action="_render_bogus_" data-track-label="label3" value="_value_" data-track-property="_property_" />
|
||||||
</div>
|
</div>
|
||||||
`);
|
`);
|
||||||
Tracking.trackLoadEvents('_category_'); // only happens once
|
Tracking.trackLoadEvents(TEST_CATEGORY);
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`sends tracking events when [data-track-action="render"] is on an element`, () => {
|
it(`sends tracking events when [data-track-action="render"] is on an element`, () => {
|
||||||
expect(eventSpy.mock.calls).toEqual([
|
expect(eventSpy.mock.calls).toEqual([
|
||||||
[
|
[
|
||||||
'_category_',
|
TEST_CATEGORY,
|
||||||
'render',
|
'render',
|
||||||
{
|
{
|
||||||
label: 'label1',
|
label: 'label1',
|
||||||
|
@ -544,7 +457,7 @@ describe('Tracking', () => {
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
'_category_',
|
TEST_CATEGORY,
|
||||||
'render',
|
'render',
|
||||||
{
|
{
|
||||||
label: 'label2',
|
label: 'label2',
|
||||||
|
@ -571,12 +484,12 @@ describe('Tracking', () => {
|
||||||
link.dispatchEvent(new Event(event, { bubbles: true }));
|
link.dispatchEvent(new Event(event, { bubbles: true }));
|
||||||
|
|
||||||
expect(eventSpy).not.toHaveBeenCalledWith(
|
expect(eventSpy).not.toHaveBeenCalledWith(
|
||||||
'_category_',
|
TEST_CATEGORY,
|
||||||
`render${actionSuffix}`,
|
`render${actionSuffix}`,
|
||||||
expect.any(Object),
|
expect.any(Object),
|
||||||
);
|
);
|
||||||
expect(eventSpy).toHaveBeenCalledWith(
|
expect(eventSpy).toHaveBeenCalledWith(
|
||||||
'_category_',
|
TEST_CATEGORY,
|
||||||
`click_link${actionSuffix}`,
|
`click_link${actionSuffix}`,
|
||||||
expect.objectContaining({ label: 'all_nested_links' }),
|
expect.objectContaining({ label: 'all_nested_links' }),
|
||||||
);
|
);
|
99
spec/frontend/tracking/utils_spec.js
Normal file
99
spec/frontend/tracking/utils_spec.js
Normal file
|
@ -0,0 +1,99 @@
|
||||||
|
import {
|
||||||
|
renameKey,
|
||||||
|
getReferrersCache,
|
||||||
|
addExperimentContext,
|
||||||
|
addReferrersCacheEntry,
|
||||||
|
filterOldReferrersCacheEntries,
|
||||||
|
} from '~/tracking/utils';
|
||||||
|
import { TRACKING_CONTEXT_SCHEMA } from '~/experimentation/constants';
|
||||||
|
import { REFERRER_TTL, URLS_CACHE_STORAGE_KEY } from '~/tracking/constants';
|
||||||
|
import { TEST_HOST } from 'helpers/test_constants';
|
||||||
|
|
||||||
|
jest.mock('~/experimentation/utils', () => ({
|
||||||
|
getExperimentData: jest.fn().mockReturnValue({}),
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe('~/tracking/utils', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
window.gl = window.gl || {};
|
||||||
|
window.gl.snowplowStandardContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('addExperimentContext', () => {
|
||||||
|
const options = {
|
||||||
|
category: 'root:index',
|
||||||
|
action: 'generic',
|
||||||
|
};
|
||||||
|
|
||||||
|
it('returns same options if no experiment is provided', () => {
|
||||||
|
expect(addExperimentContext({ options })).toStrictEqual({ options });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('adds experiment if provided', () => {
|
||||||
|
const experiment = 'TEST_EXPERIMENT_NAME';
|
||||||
|
|
||||||
|
expect(addExperimentContext({ experiment, ...options })).toStrictEqual({
|
||||||
|
...options,
|
||||||
|
context: { data: {}, schema: TRACKING_CONTEXT_SCHEMA },
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('renameKey', () => {
|
||||||
|
it('renames a given key', () => {
|
||||||
|
expect(renameKey({ allow: [] }, 'allow', 'permit')).toStrictEqual({ permit: [] });
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('referrers cache', () => {
|
||||||
|
describe('filterOldReferrersCacheEntries', () => {
|
||||||
|
it('removes entries with old or no timestamp', () => {
|
||||||
|
const now = Date.now();
|
||||||
|
const cache = [{ timestamp: now }, { timestamp: now - REFERRER_TTL }, { referrer: '' }];
|
||||||
|
|
||||||
|
expect(filterOldReferrersCacheEntries(cache)).toStrictEqual([{ timestamp: now }]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getReferrersCache', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
localStorage.removeItem(URLS_CACHE_STORAGE_KEY);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns an empty array if cache is not found', () => {
|
||||||
|
expect(getReferrersCache()).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns an empty array if cache is invalid', () => {
|
||||||
|
localStorage.setItem(URLS_CACHE_STORAGE_KEY, 'Invalid JSON');
|
||||||
|
|
||||||
|
expect(getReferrersCache()).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns parsed entries if valid', () => {
|
||||||
|
localStorage.setItem(
|
||||||
|
URLS_CACHE_STORAGE_KEY,
|
||||||
|
JSON.stringify([{ referrer: '', timestamp: Date.now() }]),
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(getReferrersCache()).toHaveLength(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('addReferrersCacheEntry', () => {
|
||||||
|
it('unshifts entry and adds timestamp', () => {
|
||||||
|
const now = Date.now();
|
||||||
|
|
||||||
|
addReferrersCacheEntry([{ referrer: '', originalUrl: TEST_HOST, timestamp: now }], {
|
||||||
|
referrer: TEST_HOST,
|
||||||
|
});
|
||||||
|
|
||||||
|
const cache = getReferrersCache();
|
||||||
|
|
||||||
|
expect(cache).toHaveLength(2);
|
||||||
|
expect(cache[0].referrer).toBe(TEST_HOST);
|
||||||
|
expect(cache[0].timestamp).toBeDefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -39,12 +39,15 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
|
||||||
let!(:job_variables) { create(:ci_job_variable, job: build) }
|
let!(:job_variables) { create(:ci_job_variable, job: build) }
|
||||||
let!(:report_result) { create(:ci_build_report_result, build: build) }
|
let!(:report_result) { create(:ci_build_report_result, build: build) }
|
||||||
let!(:pending_state) { create(:ci_build_pending_state, build: build) }
|
let!(:pending_state) { create(:ci_build_pending_state, build: build) }
|
||||||
|
let!(:pipeline_artifact) { create(:ci_pipeline_artifact, pipeline: pipeline) }
|
||||||
|
|
||||||
it 'deletes build related records' do
|
it 'deletes build and pipeline related records' do
|
||||||
expect { destroy_project(project, user, {}) }
|
expect { destroy_project(project, user, {}) }
|
||||||
.to change { Ci::Build.count }.by(-1)
|
.to change { Ci::Build.count }.by(-1)
|
||||||
.and change { Ci::BuildTraceChunk.count }.by(-1)
|
.and change { Ci::BuildTraceChunk.count }.by(-1)
|
||||||
.and change { Ci::JobArtifact.count }.by(-2)
|
.and change { Ci::JobArtifact.count }.by(-2)
|
||||||
|
.and change { Ci::DeletedObject.count }.by(2)
|
||||||
|
.and change { Ci::PipelineArtifact.count }.by(-1)
|
||||||
.and change { Ci::JobVariable.count }.by(-1)
|
.and change { Ci::JobVariable.count }.by(-1)
|
||||||
.and change { Ci::BuildPendingState.count }.by(-1)
|
.and change { Ci::BuildPendingState.count }.by(-1)
|
||||||
.and change { Ci::BuildReportResult.count }.by(-1)
|
.and change { Ci::BuildReportResult.count }.by(-1)
|
||||||
|
@ -52,15 +55,48 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
|
||||||
.and change { Ci::Pipeline.count }.by(-1)
|
.and change { Ci::Pipeline.count }.by(-1)
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'avoids N+1 queries', skip: 'skipped until fixed in https://gitlab.com/gitlab-org/gitlab/-/issues/24644' do
|
context 'with abort_deleted_project_pipelines disabled' do
|
||||||
recorder = ActiveRecord::QueryRecorder.new { destroy_project(project, user, {}) }
|
stub_feature_flags(abort_deleted_project_pipelines: false)
|
||||||
|
|
||||||
project = create(:project, :repository, namespace: user.namespace)
|
it 'avoids N+1 queries' do
|
||||||
pipeline = create(:ci_pipeline, project: project)
|
recorder = ActiveRecord::QueryRecorder.new { destroy_project(project, user, {}) }
|
||||||
builds = create_list(:ci_build, 3, :artifacts, pipeline: pipeline)
|
|
||||||
create_list(:ci_build_trace_chunk, 3, build: builds[0])
|
|
||||||
|
|
||||||
expect { destroy_project(project, project.owner, {}) }.not_to exceed_query_limit(recorder)
|
project = create(:project, :repository, namespace: user.namespace)
|
||||||
|
pipeline = create(:ci_pipeline, project: project)
|
||||||
|
builds = create_list(:ci_build, 3, :artifacts, pipeline: pipeline)
|
||||||
|
create(:ci_pipeline_artifact, pipeline: pipeline)
|
||||||
|
create_list(:ci_build_trace_chunk, 3, build: builds[0])
|
||||||
|
|
||||||
|
expect { destroy_project(project, project.owner, {}) }.not_to exceed_query_limit(recorder)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'with ci_optimize_project_records_destruction disabled' do
|
||||||
|
stub_feature_flags(ci_optimize_project_records_destruction: false)
|
||||||
|
|
||||||
|
it 'avoids N+1 queries' do
|
||||||
|
recorder = ActiveRecord::QueryRecorder.new { destroy_project(project, user, {}) }
|
||||||
|
|
||||||
|
project = create(:project, :repository, namespace: user.namespace)
|
||||||
|
pipeline = create(:ci_pipeline, project: project)
|
||||||
|
builds = create_list(:ci_build, 3, :artifacts, pipeline: pipeline)
|
||||||
|
create_list(:ci_build_trace_chunk, 3, build: builds[0])
|
||||||
|
|
||||||
|
expect { destroy_project(project, project.owner, {}) }.not_to exceed_query_limit(recorder)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'with ci_optimize_project_records_destruction and abort_deleted_project_pipelines enabled' do
|
||||||
|
it 'avoids N+1 queries' do
|
||||||
|
recorder = ActiveRecord::QueryRecorder.new { destroy_project(project, user, {}) }
|
||||||
|
|
||||||
|
project = create(:project, :repository, namespace: user.namespace)
|
||||||
|
pipeline = create(:ci_pipeline, project: project)
|
||||||
|
builds = create_list(:ci_build, 3, :artifacts, pipeline: pipeline)
|
||||||
|
create_list(:ci_build_trace_chunk, 3, build: builds[0])
|
||||||
|
|
||||||
|
expect { destroy_project(project, project.owner, {}) }.not_to exceed_query_limit(recorder)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it_behaves_like 'deleting the project'
|
it_behaves_like 'deleting the project'
|
||||||
|
@ -97,24 +133,63 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'with abort_deleted_project_pipelines feature disabled' do
|
context 'with abort_deleted_project_pipelines feature disabled' do
|
||||||
it 'does not cancel project ci pipelines' do
|
before do
|
||||||
stub_feature_flags(abort_deleted_project_pipelines: false)
|
stub_feature_flags(abort_deleted_project_pipelines: false)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'does not bulk-fail project ci pipelines' do
|
||||||
expect(::Ci::AbortPipelinesService).not_to receive(:new)
|
expect(::Ci::AbortPipelinesService).not_to receive(:new)
|
||||||
|
|
||||||
destroy_project(project, user, {})
|
destroy_project(project, user, {})
|
||||||
end
|
end
|
||||||
|
|
||||||
|
it 'does not destroy CI records via DestroyPipelineService' do
|
||||||
|
expect(::Ci::DestroyPipelineService).not_to receive(:new)
|
||||||
|
|
||||||
|
destroy_project(project, user, {})
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'with abort_deleted_project_pipelines feature enabled' do
|
context 'with abort_deleted_project_pipelines feature enabled' do
|
||||||
it 'performs cancel for project ci pipelines' do
|
let!(:pipelines) { create_list(:ci_pipeline, 3, :running, project: project) }
|
||||||
stub_feature_flags(abort_deleted_project_pipelines: true)
|
let(:destroy_pipeline_service) { double('DestroyPipelineService', execute: nil) }
|
||||||
pipelines = build_list(:ci_pipeline, 3, :running)
|
|
||||||
allow(project).to receive(:all_pipelines).and_return(pipelines)
|
|
||||||
|
|
||||||
expect(::Ci::AbortPipelinesService).to receive_message_chain(:new, :execute).with(pipelines, :project_deleted)
|
context 'with ci_optimize_project_records_destruction disabled' do
|
||||||
|
before do
|
||||||
|
stub_feature_flags(ci_optimize_project_records_destruction: false)
|
||||||
|
end
|
||||||
|
|
||||||
destroy_project(project, user, {})
|
it 'bulk-fails project ci pipelines' do
|
||||||
|
expect(::Ci::AbortPipelinesService)
|
||||||
|
.to receive_message_chain(:new, :execute)
|
||||||
|
.with(project.all_pipelines, :project_deleted)
|
||||||
|
|
||||||
|
destroy_project(project, user, {})
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'does not destroy CI records via DestroyPipelineService' do
|
||||||
|
expect(::Ci::DestroyPipelineService).not_to receive(:new)
|
||||||
|
|
||||||
|
destroy_project(project, user, {})
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'with ci_optimize_project_records_destruction enabled' do
|
||||||
|
it 'executes DestroyPipelineService for project ci pipelines' do
|
||||||
|
allow(::Ci::DestroyPipelineService).to receive(:new).and_return(destroy_pipeline_service)
|
||||||
|
|
||||||
|
expect(::Ci::AbortPipelinesService)
|
||||||
|
.to receive_message_chain(:new, :execute)
|
||||||
|
.with(project.all_pipelines, :project_deleted)
|
||||||
|
|
||||||
|
pipelines.each do |pipeline|
|
||||||
|
expect(destroy_pipeline_service)
|
||||||
|
.to receive(:execute)
|
||||||
|
.with(pipeline)
|
||||||
|
end
|
||||||
|
|
||||||
|
destroy_project(project, user, {})
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -1340,3 +1340,4 @@
|
||||||
- "./spec/workers/stage_update_worker_spec.rb"
|
- "./spec/workers/stage_update_worker_spec.rb"
|
||||||
- "./spec/workers/stuck_merge_jobs_worker_spec.rb"
|
- "./spec/workers/stuck_merge_jobs_worker_spec.rb"
|
||||||
- "./ee/spec/requests/api/graphql/project/pipelines/dast_profile_spec.rb"
|
- "./ee/spec/requests/api/graphql/project/pipelines/dast_profile_spec.rb"
|
||||||
|
- "./spec/services/projects/overwrite_project_service_spec.rb"
|
||||||
|
|
|
@ -85,7 +85,18 @@ RSpec.shared_examples 'Composer package creation' do |user_type, status, add_mem
|
||||||
|
|
||||||
expect(response).to have_gitlab_http_status(status)
|
expect(response).to have_gitlab_http_status(status)
|
||||||
end
|
end
|
||||||
|
|
||||||
it_behaves_like 'a package tracking event', described_class.name, 'push_package'
|
it_behaves_like 'a package tracking event', described_class.name, 'push_package'
|
||||||
|
|
||||||
|
context 'when package creation fails' do
|
||||||
|
before do
|
||||||
|
allow_next_instance_of(::Packages::Composer::CreatePackageService) do |create_package_service|
|
||||||
|
allow(create_package_service).to receive(:execute).and_raise(StandardError)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
it_behaves_like 'not a package tracking event'
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue