Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-10-25 15:10:55 +00:00
parent f471d83619
commit c59393a068
66 changed files with 427 additions and 1023 deletions

View File

@ -150,12 +150,12 @@ Dangerfile @gl-quality/eng-prod
/ee/lib/ee/api/entities/merge_train.rb @gitlab-org/maintainers/cicd-verify
/**/javascripts/jobs/ @gitlab-org/ci-cd/verify/frontend
/**/javascripts/pipelines/ @gitlab-org/ci-cd/verify/frontend
/app/assets/javascripts/ci/ @gitlab-org/ci-cd/verify/frontend
/app/assets/javascripts/pipeline_new/ @gitlab-org/ci-cd/verify/frontend
/app/assets/javascripts/ci_lint/ @gitlab-org/ci-cd/verify/frontend
/app/assets/javascripts/ci_variable_list/ @gitlab-org/ci-cd/verify/frontend
/app/assets/javascripts/pipeline_schedules/ @gitlab-org/ci-cd/verify/frontend
/app/assets/javascripts/pipeline_editor/ @gitlab-org/ci-cd/verify/frontend
/app/assets/javascripts/ci/runner/ @gitlab-org/ci-cd/verify/frontend
/ee/app/assets/javascripts/ci_minutes_usage/ @gitlab-org/ci-cd/verify/frontend
/ee/app/assets/javascripts/usage_quotas/ci_minutes_usage/ @gitlab-org/ci-cd/verify/frontend
/ee/app/assets/javascripts/usage_quotas/pipelines/ @gitlab-org/ci-cd/verify/frontend
@ -769,7 +769,6 @@ lib/gitlab/checks/** @proglottis @toon @zj-gitlab
/doc/operations/index.md @msedlakjakubowski
/doc/operations/metrics/ @msedlakjakubowski
/doc/operations/metrics/dashboards/ @msedlakjakubowski
/doc/operations/product_analytics.md @lciutacu
/doc/operations/tracing.md @msedlakjakubowski
/doc/policy/ @axil
/doc/raketasks/ @axil
@ -1013,7 +1012,6 @@ lib/gitlab/checks/** @proglottis @toon @zj-gitlab
/app/assets/javascripts/related_issues/components/issue_token.vue @gitlab-org/manage/authentication-and-authorization/approvers
/app/assets/javascripts/ci/runner/components/registration/registration_token.vue @gitlab-org/manage/authentication-and-authorization/approvers
/app/assets/javascripts/ci/runner/components/registration/registration_token_reset_dropdown_item.vue @gitlab-org/manage/authentication-and-authorization/approvers
/app/assets/javascripts/ci/runner/components/search_tokens/ @gitlab-org/manage/authentication-and-authorization/approvers
/app/assets/javascripts/token_access/components/ @gitlab-org/manage/authentication-and-authorization/approvers
/app/assets/javascripts/token_access/index.js @gitlab-org/manage/authentication-and-authorization/approvers
/app/assets/stylesheets/page_bundles/profile_two_factor_auth.scss @gitlab-org/manage/authentication-and-authorization/approvers
@ -1148,7 +1146,6 @@ lib/gitlab/checks/** @proglottis @toon @zj-gitlab
/ee/app/assets/javascripts/password/ @gitlab-org/manage/authentication-and-authorization/approvers
/ee/app/assets/javascripts/pipelines/components/pipelines_list/ @gitlab-org/manage/authentication-and-authorization/approvers
/ee/app/assets/javascripts/requirements/components/tokens/ @gitlab-org/manage/authentication-and-authorization/approvers
/ee/app/assets/javascripts/ci/runner/components/search_tokens/ @gitlab-org/manage/authentication-and-authorization/approvers
/ee/app/assets/javascripts/saml_providers/scim_token_service.js @gitlab-org/manage/authentication-and-authorization/approvers
/ee/app/assets/javascripts/saml_sso/components/ @gitlab-org/manage/authentication-and-authorization/approvers
/ee/app/assets/javascripts/vue_merge_request_widget/components/approvals/approvals_auth.vue @gitlab-org/manage/authentication-and-authorization/approvers

View File

@ -1618,8 +1618,10 @@
rules:
- if: '$CODE_QUALITY_DISABLED'
when: never
# Run code_quality on master until https://gitlab.com/gitlab-org/gitlab/-/issues/363747 is resolved
- <<: *if-default-branch-refs
- <<: *if-default-refs
changes: *code-backstage-patterns
changes: *code-backstage-qa-patterns
.reports:rules:brakeman-sast:
rules:

View File

@ -471,7 +471,9 @@ RSpec/FactoriesInMigrationSpecs:
RSpec/FactoryBot/AvoidCreate:
Enabled: true
Include:
- 'spec/presenters/**/*.rb'
- 'spec/serializers/**/*.rb'
- 'ee/spec/presenters/**/*.rb'
- 'ee/spec/serializers/**/*.rb'
Cop/IncludeSidekiqWorker:

View File

@ -2,6 +2,28 @@
RSpec/FactoryBot/AvoidCreate:
Details: grace period
Exclude:
- 'ee/spec/presenters/approval_rule_presenter_spec.rb'
- 'ee/spec/presenters/audit_event_presenter_spec.rb'
- 'ee/spec/presenters/ci/build_presenter_spec.rb'
- 'ee/spec/presenters/ci/build_runner_presenter_spec.rb'
- 'ee/spec/presenters/ci/minutes/usage_presenter_spec.rb'
- 'ee/spec/presenters/ci/pipeline_presenter_spec.rb'
- 'ee/spec/presenters/dast/site_profile_presenter_spec.rb'
- 'ee/spec/presenters/ee/blob_presenter_spec.rb'
- 'ee/spec/presenters/ee/clusters/cluster_presenter_spec.rb'
- 'ee/spec/presenters/ee/instance_clusterable_presenter_spec.rb'
- 'ee/spec/presenters/ee/issue_presenter_spec.rb'
- 'ee/spec/presenters/ee/projects/security/configuration_presenter_spec.rb'
- 'ee/spec/presenters/epic_issue_presenter_spec.rb'
- 'ee/spec/presenters/epic_presenter_spec.rb'
- 'ee/spec/presenters/group_clusterable_presenter_spec.rb'
- 'ee/spec/presenters/label_presenter_spec.rb'
- 'ee/spec/presenters/merge_request_approver_presenter_spec.rb'
- 'ee/spec/presenters/merge_request_presenter_spec.rb'
- 'ee/spec/presenters/project_clusterable_presenter_spec.rb'
- 'ee/spec/presenters/subscription_presenter_spec.rb'
- 'ee/spec/presenters/vulnerability_presenter_spec.rb'
- 'ee/spec/presenters/web_hooks/group/hook_presenter_spec.rb'
- 'ee/spec/serializers/analytics/cycle_analytics/stage_entity_spec.rb'
- 'ee/spec/serializers/analytics/cycle_analytics/value_stream_errors_serializer_spec.rb'
- 'ee/spec/serializers/audit_event_entity_spec.rb'
@ -11,7 +33,6 @@ RSpec/FactoryBot/AvoidCreate:
- 'ee/spec/serializers/clusters/environment_entity_spec.rb'
- 'ee/spec/serializers/clusters/environment_serializer_spec.rb'
- 'ee/spec/serializers/dashboard_environment_entity_spec.rb'
- 'ee/spec/serializers/dashboard_environments_project_entity_spec.rb'
- 'ee/spec/serializers/dashboard_environments_serializer_spec.rb'
- 'ee/spec/serializers/dashboard_operations_project_entity_spec.rb'
- 'ee/spec/serializers/dependency_entity_spec.rb'
@ -73,6 +94,62 @@ RSpec/FactoryBot/AvoidCreate:
- 'ee/spec/serializers/vulnerabilities/scanner_entity_spec.rb'
- 'ee/spec/serializers/vulnerability_entity_spec.rb'
- 'ee/spec/serializers/vulnerability_note_entity_spec.rb'
- 'spec/presenters/alert_management/alert_presenter_spec.rb'
- 'spec/presenters/blob_presenter_spec.rb'
- 'spec/presenters/blobs/notebook_presenter_spec.rb'
- 'spec/presenters/ci/bridge_presenter_spec.rb'
- 'spec/presenters/ci/build_presenter_spec.rb'
- 'spec/presenters/ci/build_runner_presenter_spec.rb'
- 'spec/presenters/ci/group_variable_presenter_spec.rb'
- 'spec/presenters/ci/pipeline_artifacts/code_coverage_presenter_spec.rb'
- 'spec/presenters/ci/pipeline_artifacts/code_quality_mr_diff_presenter_spec.rb'
- 'spec/presenters/ci/pipeline_presenter_spec.rb'
- 'spec/presenters/ci/stage_presenter_spec.rb'
- 'spec/presenters/ci/trigger_presenter_spec.rb'
- 'spec/presenters/ci/variable_presenter_spec.rb'
- 'spec/presenters/clusterable_presenter_spec.rb'
- 'spec/presenters/clusters/cluster_presenter_spec.rb'
- 'spec/presenters/commit_presenter_spec.rb'
- 'spec/presenters/commit_status_presenter_spec.rb'
- 'spec/presenters/deployments/deployment_presenter_spec.rb'
- 'spec/presenters/event_presenter_spec.rb'
- 'spec/presenters/gitlab/blame_presenter_spec.rb'
- 'spec/presenters/group_clusterable_presenter_spec.rb'
- 'spec/presenters/instance_clusterable_presenter_spec.rb'
- 'spec/presenters/issue_presenter_spec.rb'
- 'spec/presenters/label_presenter_spec.rb'
- 'spec/presenters/merge_request_presenter_spec.rb'
- 'spec/presenters/milestone_presenter_spec.rb'
- 'spec/presenters/packages/composer/packages_presenter_spec.rb'
- 'spec/presenters/packages/conan/package_presenter_spec.rb'
- 'spec/presenters/packages/detail/package_presenter_spec.rb'
- 'spec/presenters/packages/helm/index_presenter_spec.rb'
- 'spec/presenters/packages/npm/package_presenter_spec.rb'
- 'spec/presenters/packages/nuget/package_metadata_presenter_spec.rb'
- 'spec/presenters/packages/nuget/packages_metadata_presenter_spec.rb'
- 'spec/presenters/packages/nuget/packages_versions_presenter_spec.rb'
- 'spec/presenters/packages/nuget/search_results_presenter_spec.rb'
- 'spec/presenters/packages/nuget/service_index_presenter_spec.rb'
- 'spec/presenters/packages/pypi/simple_index_presenter_spec.rb'
- 'spec/presenters/packages/pypi/simple_package_versions_presenter_spec.rb'
- 'spec/presenters/pages_domain_presenter_spec.rb'
- 'spec/presenters/project_clusterable_presenter_spec.rb'
- 'spec/presenters/project_hook_presenter_spec.rb'
- 'spec/presenters/project_presenter_spec.rb'
- 'spec/presenters/projects/import_export/project_export_presenter_spec.rb'
- 'spec/presenters/projects/security/configuration_presenter_spec.rb'
- 'spec/presenters/projects/settings/deploy_keys_presenter_spec.rb'
- 'spec/presenters/prometheus_alert_presenter_spec.rb'
- 'spec/presenters/release_presenter_spec.rb'
- 'spec/presenters/releases/link_presenter_spec.rb'
- 'spec/presenters/search_service_presenter_spec.rb'
- 'spec/presenters/service_hook_presenter_spec.rb'
- 'spec/presenters/snippet_blob_presenter_spec.rb'
- 'spec/presenters/snippet_presenter_spec.rb'
- 'spec/presenters/terraform/modules_presenter_spec.rb'
- 'spec/presenters/tree_entry_presenter_spec.rb'
- 'spec/presenters/user_presenter_spec.rb'
- 'spec/presenters/web_hook_log_presenter_spec.rb'
- 'spec/serializers/access_token_entity_base_spec.rb'
- 'spec/serializers/analytics_build_entity_spec.rb'
- 'spec/serializers/analytics_build_serializer_spec.rb'

View File

@ -31,7 +31,7 @@ gem 'responders', '~> 3.0'
gem 'sprockets', '~> 3.7.0'
gem 'view_component', '~> 2.71.0'
gem 'view_component', '~> 2.74.1'
# Default values for AR models
gem 'default_value_for', '~> 3.4.0'
@ -134,7 +134,7 @@ gem 'hashie', '~> 5.0.0'
gem 'hashie-forbidden_attributes'
# Pagination
gem 'kaminari', '~> 1.0'
gem 'kaminari', '~> 1.2.2'
# HAML
gem 'hamlit', '~> 2.15.0'

View File

@ -294,10 +294,10 @@
{"name":"json_schemer","version":"0.2.18","platform":"ruby","checksum":"3362c21efbefdd12ce994e541a1e7fdb86fd267a6541dd8715e8a580fe3b6be6"},
{"name":"jsonpath","version":"1.1.2","platform":"ruby","checksum":"6804124c244d04418218acb85b15c7caa79c592d7d6970195300428458946d3a"},
{"name":"jwt","version":"2.1.0","platform":"ruby","checksum":"7e7e7ffc1a5ebce628ac7da428341c50615a3a10ac47bb74c22c1cba325613f0"},
{"name":"kaminari","version":"1.2.1","platform":"ruby","checksum":"03b26388ebb732b708e40b4f1d858c4cbc58d7cb18d12eeb9364176f23c3b3ef"},
{"name":"kaminari-actionview","version":"1.2.1","platform":"ruby","checksum":"31a3dc6955e7dff95f6133e1f4c03b9dec36d714b632330034ee51b33d0c1770"},
{"name":"kaminari-activerecord","version":"1.2.1","platform":"ruby","checksum":"8327823ddf8a8e059ce10d9da0c784ef44d5323e9f6d53054e03d26876efc50a"},
{"name":"kaminari-core","version":"1.2.1","platform":"ruby","checksum":"9ff2f0fa5a454731943a847bbd9960f0b9f859dd6910df5f40d6c9e049660440"},
{"name":"kaminari","version":"1.2.2","platform":"ruby","checksum":"c4076ff9adccc6109408333f87b5c4abbda5e39dc464bd4c66d06d9f73442a3e"},
{"name":"kaminari-actionview","version":"1.2.2","platform":"ruby","checksum":"1330f6fc8b59a4a4ef6a549ff8a224797289ebf7a3a503e8c1652535287cc909"},
{"name":"kaminari-activerecord","version":"1.2.2","platform":"ruby","checksum":"0dd3a67bab356a356f36b3b7236bcb81cef313095365befe8e98057dd2472430"},
{"name":"kaminari-core","version":"1.2.2","platform":"ruby","checksum":"3bd26fec7370645af40ca73b9426a448d09b8a8ba7afa9ba3c3e0d39cdbb83ff"},
{"name":"kas-grpc","version":"0.0.2","platform":"ruby","checksum":"111ff7515952e939f491297ba4c69a218b72d9d0ef8e5bff80a5df6a56df9a16"},
{"name":"knapsack","version":"1.21.1","platform":"ruby","checksum":"82f70422adebcacec1b514f6ebff65265fc85d836e3c320718a160d8ac41cf14"},
{"name":"kramdown","version":"2.3.2","platform":"ruby","checksum":"cb4530c2e9d16481591df2c9336723683c354e5416a5dd3e447fa48215a6a71c"},
@ -620,7 +620,7 @@
{"name":"validates_hostname","version":"1.0.11","platform":"ruby","checksum":"d506bae0342ec14c920eb319e057fc1886c321a59b85b4b6e966ee4b88fab8c3"},
{"name":"version_gem","version":"1.1.0","platform":"ruby","checksum":"6b009518020db57f51ec7b410213fae2bf692baea9f1b51770db97fbc93d9a80"},
{"name":"version_sorter","version":"2.2.4","platform":"ruby","checksum":"7ad071609edfaa3cf28c42d83b1a03096e43512244ae5a9e2fce1404f7e06d41"},
{"name":"view_component","version":"2.71.0","platform":"ruby","checksum":"c1880647800d9cfb03ff4ba92313db624a4a4b3d5753e137effe86e5f2b3662b"},
{"name":"view_component","version":"2.74.1","platform":"ruby","checksum":"0bbd47a9c11455a45043dc01aa604db708654718a4d8755c911425482e8392c0"},
{"name":"vmstat","version":"2.3.0","platform":"ruby","checksum":"ab5446a3e3bd0a9cdb9d9ac69a0bbd119c4f161d945a0846a519dd7018af656d"},
{"name":"warden","version":"1.2.9","platform":"ruby","checksum":"46684f885d35a69dbb883deabf85a222c8e427a957804719e143005df7a1efd0"},
{"name":"warning","version":"1.3.0","platform":"ruby","checksum":"23695a5d8e50bd5c46068931b529bee0b28e4982cbcefbe77d867800dde8069e"},

View File

@ -778,18 +778,18 @@ GEM
jsonpath (1.1.2)
multi_json
jwt (2.1.0)
kaminari (1.2.1)
kaminari (1.2.2)
activesupport (>= 4.1.0)
kaminari-actionview (= 1.2.1)
kaminari-activerecord (= 1.2.1)
kaminari-core (= 1.2.1)
kaminari-actionview (1.2.1)
kaminari-actionview (= 1.2.2)
kaminari-activerecord (= 1.2.2)
kaminari-core (= 1.2.2)
kaminari-actionview (1.2.2)
actionview
kaminari-core (= 1.2.1)
kaminari-activerecord (1.2.1)
kaminari-core (= 1.2.2)
kaminari-activerecord (1.2.2)
activerecord
kaminari-core (= 1.2.1)
kaminari-core (1.2.1)
kaminari-core (= 1.2.2)
kaminari-core (1.2.2)
kas-grpc (0.0.2)
grpc (~> 1.0)
knapsack (1.21.1)
@ -1488,7 +1488,7 @@ GEM
activesupport (>= 3.0)
version_gem (1.1.0)
version_sorter (2.2.4)
view_component (2.71.0)
view_component (2.74.1)
activesupport (>= 5.0.0, < 8.0)
concurrent-ruby (~> 1.0)
method_source (~> 1.0)
@ -1677,7 +1677,7 @@ DEPENDENCIES
json (~> 2.5.1)
json_schemer (~> 0.2.18)
jwt (~> 2.1.0)
kaminari (~> 1.0)
kaminari (~> 1.2.2)
kas-grpc (~> 0.0.2)
knapsack (~> 1.21.1)
kramdown (~> 2.3.1)
@ -1825,7 +1825,7 @@ DEPENDENCIES
valid_email (~> 0.1)
validates_hostname (~> 1.0.11)
version_sorter (~> 2.2.4)
view_component (~> 2.71.0)
view_component (~> 2.74.1)
vmstat (~> 2.3.0)
warning (~> 1.3.0)
webauthn (~> 2.3)

View File

@ -8,7 +8,7 @@ export const initTimezoneDropdown = () => {
return null;
}
const { timezoneData, initialValue } = el.dataset;
const { timezoneData, initialValue, name } = el.dataset;
const timezones = JSON.parse(timezoneData);
const timezoneDropdown = new Vue({
@ -23,7 +23,7 @@ export const initTimezoneDropdown = () => {
props: {
value: this.value,
timezoneData: timezones,
name: 'user[timezone]',
name,
},
class: 'gl-md-form-input-lg',
});

View File

@ -6,8 +6,8 @@ import { REF_TYPE_BRANCHES, REF_TYPE_TAGS } from '~/ref/constants';
import setupNativeFormVariableList from '~/ci_variable_list/native_form_variable_list';
import GlFieldErrors from '~/gl_field_errors';
import Translate from '~/vue_shared/translate';
import { initTimezoneDropdown } from '../../../profiles/init_timezone_dropdown';
import IntervalPatternInput from './components/interval_pattern_input.vue';
import TimezoneDropdown from './components/timezone_dropdown';
Vue.use(Translate);
@ -81,13 +81,6 @@ export default () => {
const formElement = document.getElementById('new-pipeline-schedule-form');
gl.timezoneDropdown = new TimezoneDropdown({
$dropdownEl: $('.js-timezone-dropdown'),
$inputEl: $('#schedule_cron_timezone'),
onSelectTimezone: () => {
gl.pipelineScheduleFieldErrors.updateFormValidityState();
},
});
gl.pipelineScheduleFieldErrors = new GlFieldErrors(formElement);
initTargetRefDropdown();
@ -97,3 +90,5 @@ export default () => {
formField: 'schedule',
});
};
initTimezoneDropdown();

View File

@ -1,61 +0,0 @@
# frozen_string_literal: true
class Projects::ProductAnalyticsController < Projects::ApplicationController
before_action :feature_enabled!, only: [:index, :setup, :test, :graphs]
before_action :authorize_read_product_analytics!
before_action :tracker_variables, only: [:setup, :test]
feature_category :product_analytics
def index
@events = product_analytics_events.order_by_time.page(params[:page])
end
def setup
end
def test
@event = product_analytics_events.try(:first)
end
def graphs
@graphs = []
@timerange = 30
requested_graphs = %w(platform os_timezone br_lang doc_charset)
requested_graphs.each do |graph|
@graphs << ProductAnalytics::BuildGraphService
.new(project, { graph: graph, timerange: @timerange })
.execute
end
@activity_graph = ProductAnalytics::BuildActivityGraphService
.new(project, { timerange: @timerange })
.execute
end
private
def product_analytics_events
@project.product_analytics_events
end
def tracker_variables
# We use project id as Snowplow appId
@project_id = @project.id.to_s
# Snowplow remembers values like appId and platform between reloads.
# That is why we have to rename the tracker with a random integer.
@random = rand(999999)
# Generate random platform every time a tracker is rendered.
@platform = %w(web mob app)[(@random % 3)]
end
def feature_enabled!
render_404 unless Feature.enabled?(:product_analytics, @project)
end
end
Projects::ProductAnalyticsController.prepend_mod_with('Projects::ProductAnalyticsController')

View File

@ -43,7 +43,7 @@ class EnvironmentSerializer < BaseSerializer
# immediately.
items = @paginator.paginate(items) if paginated?
environments = batch_load(resource.where(id: items.map(&:last_id)))
environments = batch_load(Environment.where(id: items.map(&:last_id)))
environments_by_id = environments.index_by(&:id)
items.map do |item|

View File

@ -62,7 +62,7 @@
%p= s_("Profiles|Set your local time zone.")
.col-lg-8
= f.label :user_timezone, _("Time zone")
.js-timezone-dropdown{ data: { timezone_data: timezone_data.to_json, initial_value: @user.timezone } }
.js-timezone-dropdown{ data: { timezone_data: timezone_data.to_json, initial_value: @user.timezone, name: 'user[timezone]' } }
.col-lg-12
%hr
.row.js-search-settings-section

View File

@ -9,10 +9,10 @@
= f.label :cron, _('Interval Pattern'), class: 'label-bold'
#interval-pattern-input{ data: { initial_interval: @schedule.cron, daily_limit: @schedule.daily_limit } }
.form-group.row
.col-md-9
= f.label :cron_timezone, _('Cron Timezone'), class: 'label-bold'
= dropdown_tag(_("Select a timezone"), options: { toggle_class: 'gl-button btn btn-default js-timezone-dropdown w-100', dropdown_class: 'w-100', title: _("Select a timezone"), filter: true, placeholder: s_("OfSearchInADropdown|Filter"), data: { data: timezone_data } } )
= f.text_field :cron_timezone, value: @schedule.cron_timezone, id: 'schedule_cron_timezone', class: 'hidden', name: 'schedule[cron_timezone]', required: true
.col-md-9{ data: { testid: 'schedule-timezone' } }
= f.label :cron_timezone, _("Cron Timezone")
.js-timezone-dropdown{ data: { timezone_data: timezone_data.to_json, initial_value: @schedule.cron_timezone, name: 'schedule[cron_timezone]' } }
.form-group.row
.col-md-9
= f.label :ref, _('Target branch or tag'), class: 'label-bold'

View File

@ -1,6 +0,0 @@
- graph = local_assigns.fetch(:graph)
%h3
= graph[:id]
.js-project-analytics-chart{ "data-chart-data": graph.to_json, "data-chart-id": graph[:id] }

View File

@ -1,5 +0,0 @@
= gl_tabs_nav({ class: 'mb-3'}) do
= gl_tab_link_to _('Events'), project_product_analytics_path(@project)
= gl_tab_link_to _('Graphs'), graphs_project_product_analytics_path(@project)
= gl_tab_link_to _('Test'), test_project_product_analytics_path(@project)
= gl_tab_link_to _('Setup'), setup_project_product_analytics_path(@project)

View File

@ -1,10 +0,0 @@
;(function(p,l,o,w,i,n,g){if(!p[i]){p.GlobalSnowplowNamespace=p.GlobalSnowplowNamespace||[];
p.GlobalSnowplowNamespace.push(i);p[i]=function(){(p[i].q=p[i].q||[]).push(arguments)
};p[i].q=p[i].q||[];n=l.createElement(o);g=l.getElementsByTagName(o)[0];n.async=1;
n.src=w;g.parentNode.insertBefore(n,g)}}(window,document,"script","<%= product_analytics_tracker_url -%>","snowplow<%= @random -%>"));
snowplow<%= @random -%>("newTracker", "sp", "<%= product_analytics_tracker_collector_url -%>", {
appId: "<%= @project_id -%>",
platform: "<%= @platform -%>",
eventMethod: "get"
});
snowplow<%= @random -%>('trackPageView');

View File

@ -1,16 +0,0 @@
- page_title _('Product Analytics')
= render 'links'
%p
= _('Showing graphs based on events of the last %{timerange} days.') % { timerange: @timerange }
.gl-mb-3
= render 'graph', graph: @activity_graph
- @graphs.each_slice(2) do |pair|
.row.append-bottom-10
- pair.each do |graph|
.col-md-6{ id: graph[:id] }
= render 'graph', graph: graph

View File

@ -1,16 +0,0 @@
- page_title _('Product Analytics')
= render 'links'
- if @events.any?
%p
- if @events.total_count > @events.size
= _('Number of events for this project: %{total_count}.') % { total_count: number_with_delimiter(@events.total_count) }
%ol
- @events.each do |event|
%li
%code= event.as_json_wo_empty
- else
.empty-state
.text-content
= _('There are currently no events.')

View File

@ -1,12 +0,0 @@
- page_title _('Product Analytics')
= render 'links'
%p
= _('Copy the code below to implement tracking in your application:')
%pre
= render "tracker"
%p.hint
= _('A platform value can be web, mob or app.')

View File

@ -1,17 +0,0 @@
- page_title _('Product Analytics')
= render 'links'
%p
= _('This page sends a payload. Go back to the events page to see a newly created event.')
- if @event
%p
= _('Last item before this page loaded in your browser:')
%code
= @event.as_json_wo_empty
-# haml-lint:disable InlineJavaScript
:javascript
#{render 'tracker'}

View File

@ -1,8 +0,0 @@
---
name: product_analytics
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/36443
rollout_issue_url:
milestone: '13.2'
type: development
group: group::product intelligence
default_enabled: false

View File

@ -2,7 +2,6 @@
require 'sidekiq/web'
require 'sidekiq/cron/web'
require 'product_analytics/collector_app'
InitializerConnections.with_disabled_database_connections do
Rails.application.routes.draw do
@ -216,9 +215,6 @@ InitializerConnections.with_disabled_database_connections do
# Deprecated route for permanent failures
# https://gitlab.com/gitlab-org/gitlab/-/issues/362606
post '/members/mailgun/permanent_failures' => 'mailgun/webhooks#process_webhook'
# Product analytics collector
match '/collector/i', to: ProductAnalytics::CollectorApp.new, via: :all
end
# End of the /-/ scope.

View File

@ -380,14 +380,6 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
resources :projects, only: :index
end
resources :product_analytics, only: [:index] do
collection do
get :setup
get :test
get :graphs
end
end
resources :error_tracking, only: [:index], controller: :error_tracking do
collection do
get ':issue_id/details',

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
class AddUniqueIndexOnCiRunnersToken < Gitlab::Database::Migration[2.0]
disable_ddl_transaction!
INDEX_NAME = 'index_uniq_ci_runners_on_token'
def up
finalize_background_migration 'ResetDuplicateCiRunnersTokenValues'
add_concurrent_index :ci_runners,
:token,
name: INDEX_NAME,
unique: true
end
def down
remove_concurrent_index_by_name :ci_runners, INDEX_NAME
end
end

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
class AddUniqueIndexOnCiRunnersTokenEncrypted < Gitlab::Database::Migration[2.0]
disable_ddl_transaction!
INDEX_NAME = 'index_uniq_ci_runners_on_token_encrypted'
def up
finalize_background_migration 'ResetDuplicateCiRunnersTokenEncryptedValues'
add_concurrent_index :ci_runners,
:token_encrypted,
name: INDEX_NAME,
unique: true
end
def down
remove_concurrent_index_by_name :ci_runners, INDEX_NAME
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class DropIndexOnCiRunnersToken < Gitlab::Database::Migration[2.0]
disable_ddl_transaction!
INDEX_NAME = 'index_ci_runners_on_token'
def up
remove_concurrent_index_by_name :ci_runners, INDEX_NAME
end
def down
add_concurrent_index :ci_runners,
:token,
name: INDEX_NAME
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class DropIndexOnCiRunnersTokenEncrypted < Gitlab::Database::Migration[2.0]
disable_ddl_transaction!
INDEX_NAME = 'index_ci_runners_on_token_encrypted'
def up
remove_concurrent_index_by_name :ci_runners, INDEX_NAME
end
def down
add_concurrent_index :ci_runners,
:token_encrypted,
name: INDEX_NAME
end
end

View File

@ -0,0 +1 @@
10caa548bccc134775ed14f85eae2b2063e83afe4a932982c353ecf1549a557d

View File

@ -0,0 +1 @@
86d979a179c504508fd2e9c1a62e935884297054b13b78a4c1460679d75f5b96

View File

@ -0,0 +1 @@
a9122e3772587b85a889740ccc54d48b6ead91a3b472d712e1e8bf5946655cf4

View File

@ -0,0 +1 @@
d7c109cba935e1f355789dffa1d64b29b787f44ced7b0d3090e19a2dd0b8e266

View File

@ -28460,10 +28460,6 @@ CREATE INDEX index_ci_runners_on_locked ON ci_runners USING btree (locked);
CREATE INDEX index_ci_runners_on_runner_type ON ci_runners USING btree (runner_type);
CREATE INDEX index_ci_runners_on_token ON ci_runners USING btree (token);
CREATE INDEX index_ci_runners_on_token_encrypted ON ci_runners USING btree (token_encrypted);
CREATE INDEX index_ci_runners_on_token_expires_at_and_id_desc ON ci_runners USING btree (token_expires_at, id DESC);
CREATE INDEX index_ci_runners_on_token_expires_at_desc_and_id_desc ON ci_runners USING btree (token_expires_at DESC, id DESC);
@ -30668,6 +30664,10 @@ CREATE INDEX index_u2f_registrations_on_key_handle ON u2f_registrations USING bt
CREATE INDEX index_u2f_registrations_on_user_id ON u2f_registrations USING btree (user_id);
CREATE UNIQUE INDEX index_uniq_ci_runners_on_token ON ci_runners USING btree (token);
CREATE UNIQUE INDEX index_uniq_ci_runners_on_token_encrypted ON ci_runners USING btree (token_encrypted);
CREATE UNIQUE INDEX index_uniq_im_issuable_escalation_statuses_on_issue_id ON incident_management_issuable_escalation_statuses USING btree (issue_id);
CREATE UNIQUE INDEX index_uniq_projects_on_runners_token ON projects USING btree (runners_token);

View File

@ -118,10 +118,10 @@ Some feature flags can be enabled or disabled on a per project basis:
Feature.enable(:<feature flag>, Project.find(<project id>))
```
For example, to enable the [`:product_analytics`](../operations/product_analytics.md) feature flag for project `1234`:
For example, to enable the `:my_awesome_feature` feature flag for project `1234`:
```ruby
Feature.enable(:product_analytics, Project.find(1234))
Feature.enable(:my_awesome_feature, Project.find(1234))
```
`Feature.enable` and `Feature.disable` always return `true`, even if the application doesn't use the flag:

View File

@ -20,8 +20,10 @@ but there are [key differences](pipeline_architectures.md).
## Parent-child pipelines
A parent pipeline is one that triggers a downstream pipeline in the same project.
The downstream pipeline is called a child pipeline. Child pipelines:
A parent pipeline is a pipeline that triggers a downstream pipeline in the same project.
The downstream pipeline is called a child pipeline.
Child pipelines:
- Run under the same project, ref, and commit SHA as the parent pipeline.
- Do not directly affect the overall status of the ref the pipeline runs against. For example,
@ -30,18 +32,18 @@ The downstream pipeline is called a child pipeline. Child pipelines:
pipeline is triggered with [`strategy:depend`](../yaml/index.md#triggerstrategy).
- Are automatically canceled if the pipeline is configured with [`interruptible`](../yaml/index.md#interruptible)
when a new pipeline is created for the same ref.
- Are not displayed in the pipeline index page. You can only view child pipelines on
their parent pipeline's page.
- Are not displayed in the project's pipeline list. You can only view child pipelines on
their parent pipeline's details page.
### Nested child pipelines
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/29651) in GitLab 13.4.
> - [Feature flag removed](https://gitlab.com/gitlab-org/gitlab/-/issues/243747) in GitLab 13.5.
Parent and child pipelines were introduced with a maximum depth of one level of child
pipelines, which was later increased to two. A parent pipeline can trigger many child
pipelines, and these child pipelines can trigger their own child pipelines. It's not
possible to trigger another level of child pipelines.
Parent and child pipelines have a maximum depth of two levels of child pipelines.
A parent pipeline can trigger many child pipelines, and these child pipelines can trigger
their own child pipelines. You cannot trigger another level of child pipelines.
<i class="fa fa-youtube-play youtube" aria-hidden="true"></i>
For an overview, see [Nested Dynamic Pipelines](https://youtu.be/C5j3ju9je2M).
@ -52,11 +54,6 @@ A pipeline in one project can trigger downstream pipelines in another project,
called multi-project pipelines. The user triggering the upstream pipeline must be able to
start pipelines in the downstream project, otherwise [the downstream pipeline fails to start](#trigger-job-fails-and-does-not-create-multi-project-pipeline).
For example, you might deploy your web application from three different GitLab projects.
With multi-project pipelines you can trigger a pipeline in each project, where each
has its own build, test, and deploy process. You can visualize the connected pipelines
in one place, including all cross-project interdependencies.
Multi-project pipelines:
- Are triggered from another project's pipeline, but the upstream (triggering) pipeline does
@ -68,8 +65,7 @@ Multi-project pipelines:
- Are not automatically canceled in the downstream project when using [`interruptible`](../yaml/index.md#interruptible)
if a new pipeline runs for the same ref in the upstream pipeline. They can be
automatically canceled if a new pipeline is triggered for the same ref on the downstream project.
- Multi-project pipelines are standalone pipelines because they are normal pipelines
that happened to be triggered by an external project. They are all visible on the pipeline index page.
- Are visible in the downstream project's pipeline list.
- Are independent, so there are no nesting limits.
Learn more in the "Cross-project Pipeline Triggering and Visualization" demo at
@ -87,24 +83,10 @@ always displays:
Use the [`trigger`](../yaml/index.md#trigger) keyword in your `.gitlab-ci.yml` file
to create a job that triggers a downstream pipeline. This job is called a trigger job.
After the trigger job starts, the initial status of the job is `pending` while GitLab
attempts to create the downstream pipeline. If the downstream pipeline is created,
GitLab marks the job as passed, otherwise the job failed. Alternatively,
you can [set the trigger job to show the downstream pipeline's status](#mirror-the-status-of-a-downstream-pipeline-in-the-trigger-job)
instead.
For example:
::Tabs
:::TabTitle Multi-project pipeline
```yaml
trigger_job:
trigger:
project: project-group/my-downstream-project
```
:::TabTitle Parent-child pipeline
```yaml
@ -114,21 +96,36 @@ trigger_job:
- local: path/to/child-pipeline.yml
```
:::TabTitle Multi-project pipeline
```yaml
trigger_job:
trigger:
project: project-group/my-downstream-project
```
::EndTabs
After the trigger job starts, the initial status of the job is `pending` while GitLab
attempts to create the downstream pipeline. The trigger job shows `passed` if the
downstream pipeline is created successfully, otherwise it shows `failed`. Alternatively,
you can [set the trigger job to show the downstream pipeline's status](#mirror-the-status-of-a-downstream-pipeline-in-the-trigger-job)
instead.
### Use `rules` to control downstream pipeline jobs
You can use CI/CD variables or the [`rules`](../yaml/index.md#rulesif) keyword to
[control job behavior](../jobs/job_control.md) for downstream pipelines.
Use CI/CD variables or the [`rules`](../yaml/index.md#rulesif) keyword to
[control job behavior](../jobs/job_control.md) in downstream pipelines.
When a downstream pipeline is triggered with the [`trigger`](../yaml/index.md#trigger) keyword,
When you trigger a downstream pipeline with the [`trigger`](../yaml/index.md#trigger) keyword,
the value of the [`$CI_PIPELINE_SOURCE` predefined variable](../variables/predefined_variables.md)
for all jobs is:
- `pipeline` for multi-project pipelines.
- `parent` for parent-child pipelines.
For example, with a multi-project pipeline:
For example, to control jobs in multi-project pipelines in a project that also runs
merge request pipelines:
```yaml
job1:
@ -148,35 +145,6 @@ job3:
script: echo "This job runs in both multi-project and merge request pipelines"
```
### Specify a branch for multi-project pipelines
You can specify a branch name for a multi-project pipeline to use. GitLab uses
the commit on the head of the branch to create the downstream pipeline:
```yaml
rspec:
stage: test
script: bundle exec rspec
staging:
stage: deploy
trigger:
project: my/deployment
branch: stable-11-2
```
Use:
- The `project` keyword to specify the full path to a downstream project.
In [GitLab 15.3 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/367660), variable expansion is
supported.
- The `branch` keyword to specify the name of a branch or [tag](../../topics/git/tags.md)
in the project specified by `project`. If you use a tag when a branch exists with the same
name, the downstream pipeline fails to create with the error: `downstream pipeline can not be created, Ref is ambiguous`.
In [GitLab 12.4 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/10126), variable expansion is
supported.
### Use a child pipeline configuration file in a different project
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/205157) in GitLab 13.5.
@ -211,8 +179,6 @@ microservice_a:
### Dynamic child pipelines
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/35632) in GitLab 12.9.
You can trigger a child pipeline from a YAML file generated in a job, instead of a
static file saved in your project. This technique can be very powerful for generating pipelines
targeting content that changed or to build a matrix of targets and architectures.
@ -234,43 +200,42 @@ To trigger a child pipeline from a dynamically generated configuration file:
1. Generate the configuration file in a job and save it as an [artifact](../yaml/index.md#artifactspaths):
```yaml
generate-config:
stage: build
script: generate-ci-config > generated-config.yml
artifacts:
paths:
- generated-config.yml
```
```yaml
generate-config:
stage: build
script: generate-ci-config > generated-config.yml
artifacts:
paths:
- generated-config.yml
```
1. Configure the trigger job to run after the job that generated the configuration file,
and set `include: artifact` to the generated artifact:
```yaml
child-pipeline:
stage: test
trigger:
include:
- artifact: generated-config.yml
job: generate-config
```
```yaml
child-pipeline:
stage: test
trigger:
include:
- artifact: generated-config.yml
job: generate-config
```
In this example, `generated-config.yml` is extracted from the artifacts and used as the configuration
for triggering the child pipeline.
In this example, GitLab retrieves `generated-config.yml` and triggers a child pipeline
with the CI/CD configuration in that file.
The artifact path is parsed by GitLab, not the runner, so the path must match the
syntax for the OS running GitLab. If GitLab is running on Linux but using a Windows
runner for testing, the path separator for the trigger job is `/`. Other CI/CD
configuration for jobs that use the Windows runner, like scripts, use `\`.
configuration for jobs that use the Windows runner, like scripts, use <code>&#92;</code>.
### Run child pipelines with merge request pipelines
To trigger a child pipeline as a [merge request pipeline](merge_request_pipelines.md):
1. Set the trigger job to run on merge requests:
1. Set the trigger job to run on merge requests in the parent pipeline's configuration file:
```yaml
# parent .gitlab-ci.yml
microservice_a:
trigger:
include: path/to/microservice_a.yml
@ -278,45 +243,50 @@ To trigger a child pipeline as a [merge request pipeline](merge_request_pipeline
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
```
1. Configure the child pipeline jobs to run in merge request pipelines:
1. Configure the child pipeline jobs to run in merge request pipelines with [`rules`](../yaml/index.md#rules)
or [`workflow:rules`](../yaml/index.md#workflowrules). For example, with `rules`
in a child pipeline's configuration file:
- With [`workflow:rules`](../yaml/index.md#workflowrules):
```yaml
job1:
script: ...
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
```yaml
# child path/to/microservice_a.yml
workflow:
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
job2:
script: ...
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
```
job1:
script: ...
### Specify a branch for multi-project pipelines
job2:
script: ...
```
You can specify the branch to use when triggering a multi-project pipeline. GitLab uses
the commit on the head of the branch to create the downstream pipeline. For example:
- By configuring [rules](../yaml/index.md#rules) for each job:
```yaml
staging:
stage: deploy
trigger:
project: my/deployment
branch: stable-11-2
```
```yaml
# child path/to/microservice_a.yml
job1:
script: ...
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
Use:
job2:
script: ...
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
```
- The `project` keyword to specify the full path to the downstream project.
In [GitLab 15.3 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/367660),
you can use [variable expansion](../variables/where_variables_can_be_used.md#gitlab-ciyml-file).
- The `branch` keyword to specify the name of a branch or [tag](../../topics/git/tags.md)
in the project specified by `project`. You can use variable expansion.
## Trigger a multi-project pipeline by using the API
You can use the [CI/CD job token (`CI_JOB_TOKEN`)](../jobs/ci_job_token.md) with the
[pipeline trigger API endpoint](../../api/pipeline_triggers.md#trigger-a-pipeline-with-a-token)
to trigger multi-project pipelines from a CI/CD job. GitLab recognizes the source of the job token
and marks the pipelines as related. In the pipeline graph, the relationships are displayed
as inbound and outbound connections for upstream and downstream pipeline dependencies.
to trigger multi-project pipelines from inside a CI/CD job. GitLab sets pipelines triggered
with a job token as downstream pipelines of the pipeline that contains the job that
made the API call.
For example:
@ -360,20 +330,11 @@ To cancel a downstream pipeline that is still running, select **Cancel** (**{can
### Mirror the status of a downstream pipeline in the trigger job
You can mirror the pipeline status from the triggered pipeline to the source trigger job
You can mirror the status of the downstream pipeline in the trigger job
by using [`strategy: depend`](../yaml/index.md#triggerstrategy):
::Tabs
:::TabTitle Multi-project pipeline
```yaml
trigger_job:
trigger:
project: my/project
strategy: depend
```
:::TabTitle Parent-child pipeline
```yaml
@ -384,11 +345,20 @@ trigger_job:
strategy: depend
```
:::TabTitle Multi-project pipeline
```yaml
trigger_job:
trigger:
project: my/project
strategy: depend
```
::EndTabs
### View multi-project pipelines in pipeline graphs **(PREMIUM)**
When you trigger a multi-project pipeline, the downstream pipeline displays
After you trigger a multi-project pipeline, the downstream pipeline displays
to the right of the [pipeline graph](index.md#visualize-pipelines).
![Multi-project pipeline graph](img/multi_project_pipeline_graph_v14_3.png)
@ -398,12 +368,13 @@ displays to the right of the mini graph.
![Multi-project pipeline mini graph](img/pipeline_mini_graph_v15_0.png)
## Pass artifacts to a downstream pipeline
## Fetch artifacts from an upstream pipeline
You can pass artifacts to a downstream pipeline by using [`needs:project`](../yaml/index.md#needsproject).
Use [`needs:project`](../yaml/index.md#needsproject) to fetch artifacts from an
upstream pipeline:
1. In a job in the upstream pipeline, save the artifacts using the [`artifacts`](../yaml/index.md#artifacts) keyword.
1. Trigger the downstream pipeline with a trigger job:
1. In the upstream pipeline, save the artifacts in a job with the [`artifacts`](../yaml/index.md#artifacts)
keyword, then trigger the downstream pipeline with a trigger job:
```yaml
build_artifacts:
@ -419,9 +390,7 @@ You can pass artifacts to a downstream pipeline by using [`needs:project`](../ya
trigger: my/downstream_project
```
1. In a job in the downstream pipeline, fetch the artifacts from the upstream pipeline
by using `needs:project`. Set `job` to the job in the upstream pipeline to fetch artifacts from,
`ref` to the branch, and `artifacts: true`.
1. Use `needs:project` in a job in the downstream pipeline to fetch the artifacts.
```yaml
test:
@ -435,22 +404,27 @@ You can pass artifacts to a downstream pipeline by using [`needs:project`](../ya
artifacts: true
```
### Pass artifacts from a Merge Request pipeline
Set:
When you use `needs:project` to [pass artifacts to a downstream pipeline](#pass-artifacts-to-a-downstream-pipeline),
- `job` to the job in the upstream pipeline that created the artifacts.
- `ref` to the branch.
- `artifacts` to `true`.
### Fetch artifacts from an upstream merge request pipeline
When you use `needs:project` to [pass artifacts to a downstream pipeline](#fetch-artifacts-from-an-upstream-pipeline),
the `ref` value is usually a branch name, like `main` or `development`.
For merge request pipelines, the `ref` value is in the form of `refs/merge-requests/<id>/head`,
For [merge request pipelines](merge_request_pipelines.md), the `ref` value is in the form of `refs/merge-requests/<id>/head`,
where `id` is the merge request ID. You can retrieve this ref with the [`CI_MERGE_REQUEST_REF_PATH`](../variables/predefined_variables.md#predefined-variables-for-merge-request-pipelines)
CI/CD variable. Do not use a branch name as the `ref` with merge request pipelines,
because the downstream pipeline attempts to fetch artifacts from the latest branch pipeline.
To fetch the artifacts from the upstream `merge request` pipeline instead of the `branch` pipeline,
pass this variable to the downstream pipeline using variable inheritance:
pass `CI_MERGE_REQUEST_REF_PATH` to the downstream pipeline using [variable inheritance](#pass-yaml-defined-cicd-variables):
1. In a job in the upstream pipeline, save the artifacts using the [`artifacts`](../yaml/index.md#artifacts) keyword.
1. In the job that triggers the downstream pipeline, pass the `$CI_MERGE_REQUEST_REF_PATH` variable by using
[variable inheritance](#pass-yaml-defined-cicd-variables):
1. In the job that triggers the downstream pipeline, pass the `$CI_MERGE_REQUEST_REF_PATH` variable:
```yaml
build_artifacts:
@ -470,8 +444,7 @@ pass this variable to the downstream pipeline using variable inheritance:
```
1. In a job in the downstream pipeline, fetch the artifacts from the upstream pipeline
by using `needs:project`. Set the `ref` to the `UPSTREAM_REF` variable, and `job`
to the job in the upstream pipeline to fetch artifacts from:
by using `needs:project` and the passed variable as the `ref`:
```yaml
test:
@ -485,86 +458,133 @@ pass this variable to the downstream pipeline using variable inheritance:
artifacts: true
```
This method works for fetching artifacts from a regular merge request parent pipeline,
but fetching artifacts from [merge results](merged_results_pipelines.md) pipelines is not supported.
You can use this method to fetch artifacts from upstream merge request pipeline,
but not from [merge results pipelines](merged_results_pipelines.md).
## Pass CI/CD variables to a downstream pipeline
You can pass CI/CD variables to a downstream pipeline with a few different methods,
based on where the variable is created or defined.
You can pass [CI/CD variables](../variables/index.md) to a downstream pipeline with
a few different methods, based on where the variable is created or defined.
### Pass YAML-defined CI/CD variables
You can use the `variables` keyword to pass CI/CD variables to a downstream pipeline,
just like you would for any other job.
You can use the `variables` keyword to pass CI/CD variables to a downstream pipeline.
For example, in a [multi-project pipeline](#multi-project-pipelines):
For example:
::Tabs
:::TabTitle Parent-child pipeline
```yaml
rspec:
stage: test
script: bundle exec rspec
variables:
VERSION: "1.0.0"
staging:
variables:
ENVIRONMENT: staging
stage: deploy
trigger: my/deployment
trigger:
include:
- local: path/to/child-pipeline.yml
```
The `ENVIRONMENT` variable is passed to every job defined in a downstream
pipeline. It is available as a variable when GitLab Runner picks a job.
In the following configuration, the `MY_VARIABLE` variable is passed to the downstream pipeline
that is created when the `trigger-downstream` job is queued. This behavior is because `trigger-downstream`
job inherits variables declared in [global `variables`](../yaml/index.md#variables) blocks,
and then GitLab passes these variables to the downstream pipeline.
:::TabTitle Multi-project pipeline
```yaml
variables:
MY_VARIABLE: my-value
VERSION: "1.0.0"
trigger-downstream:
staging:
variables:
ENVIRONMENT: something
trigger: my/project
ENVIRONMENT: staging
stage: deploy
trigger: my-group/my-deployment-project
```
::EndTabs
The `ENVIRONMENT` variable is available in every job defined in the downstream pipeline.
The `VERSION` global variable is also available in the downstream pipeline, because
all jobs in a pipeline, including trigger jobs, inherit [global `variables`](../yaml/index.md#variables).
#### Prevent global variables from being passed
You can stop global variables from reaching the downstream pipeline by using the [`inherit:variables` keyword](../yaml/index.md#inheritvariables).
For example, in a [multi-project pipeline](#multi-project-pipelines):
You can stop global CI/CD variables from reaching the downstream pipeline with
[`inherit:variables:false`](../yaml/index.md#inheritvariables).
For example:
::Tabs
:::TabTitle Parent-child pipeline
```yaml
variables:
MY_GLOBAL_VAR: value
GLOBAL_VAR: value
trigger-downstream:
trigger-job:
inherit:
variables: false
variables:
MY_LOCAL_VAR: value
trigger: my/project
JOB_VAR: value
trigger:
include:
- local: path/to/child-pipeline.yml
```
In this example, the `MY_GLOBAL_VAR` variable is not available in the triggered pipeline.
:::TabTitle Multi-project pipeline
```yaml
variables:
GLOBAL_VAR: value
trigger-job:
inherit:
variables: false
variables:
JOB_VAR: value
trigger: my-group/my-project
```
::EndTabs
The `GLOBAL_VAR` variable is not available in the triggered pipeline, but `JOB_VAR`
is available.
### Pass a predefined variable
You might want to pass some information about the upstream pipeline using predefined variables.
To do that, you can use interpolation to pass any variable. For example,
in a [multi-project pipeline](#multi-project-pipelines):
To pass information about the upstream pipeline using [predefined CI/CD variables](../variables/predefined_variables.md).
use interpolation. Save the predefined variable as a new job variable in the trigger
job, which is passed to the downstream pipeline. For example:
::Tabs
:::TabTitle Parent-child pipeline
```yaml
downstream-job:
trigger-job:
variables:
UPSTREAM_BRANCH: $CI_COMMIT_REF_NAME
trigger: my/project
PARENT_BRANCH: $CI_COMMIT_REF_NAME
trigger:
include:
- local: path/to/child-pipeline.yml
```
In this scenario, the `UPSTREAM_BRANCH` variable with the value of the upstream pipeline's
`$CI_COMMIT_REF_NAME` is passed to `downstream-job`. It is available in the
context of all downstream builds.
:::TabTitle Multi-project pipeline
```yaml
trigger-job:
variables:
UPSTREAM_BRANCH: $CI_COMMIT_REF_NAME
trigger: my-group/my-project
```
::EndTabs
The `UPSTREAM_BRANCH` variable, which contains the value of the upstream pipeline's `$CI_COMMIT_REF_NAME`
predefined CI/CD variable, is available in the downstream pipeline.
You cannot use this method to forward [job-level persisted variables](../variables/where_variables_can_be_used.md#persisted-variables)
to a downstream pipeline, as they are not available in trigger jobs.
@ -626,3 +646,10 @@ With multi-project pipelines, the trigger job fails and does not create the down
- The downstream pipeline targets a protected branch and the user does not have permission
to run pipelines against the protected branch. See [pipeline security for protected branches](index.md#pipeline-security-on-protected-branches)
for more information.
### `Ref is ambiguous`
You cannot trigger a multi-project pipeline with a tag when a branch exists with the same
name. The downstream pipeline fails to create with the error: `downstream pipeline can not be created, Ref is ambiguous`.
Only trigger multi-project pipelines with tag names that do not match branch names.

View File

@ -23,6 +23,11 @@ own advantages. These methods can be mixed and matched if needed:
- [Multi-project pipelines](downstream_pipelines.md#multi-project-pipelines): Good for larger products that require cross-project interdependencies,
like those with a [microservices architecture](https://about.gitlab.com/blog/2016/08/16/trends-in-version-control-land-microservices/).
For example, you might deploy your web application from three different GitLab projects.
With multi-project pipelines you can trigger a pipeline in each project, where each
has its own build, test, and deploy process. You can visualize the connected pipelines
in one place, including all cross-project interdependencies.
<i class="fa fa-youtube-play youtube" aria-hidden="true"></i>
For an overview, see the [Multi-project pipelines demo](https://www.youtube.com/watch?v=g_PIwBM1J84).

View File

@ -41,7 +41,7 @@ There are two places defined variables can be used. On the:
| [`services:name`](../yaml/index.md#services) | yes | Runner | The variable expansion is made by GitLab Runner's [internal variable expansion mechanism](#gitlab-runner-internal-variable-expansion-mechanism). |
| [`services`](../yaml/index.md#services) | yes | Runner | The variable expansion is made by GitLab Runner's [internal variable expansion mechanism](#gitlab-runner-internal-variable-expansion-mechanism). |
| [`tags`](../yaml/index.md#tags) | yes | GitLab | The variable expansion is made by the [internal variable expansion mechanism](#gitlab-internal-variable-expansion-mechanism) in GitLab. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/35742) in GitLab 14.1. |
| [`trigger` and `trigger:project`](../yaml/index.md#trigger) | yes | GitLab | The variable expansion is made by the [internal variable expansion mechanism](#gitlab-internal-variable-expansion-mechanism) in GitLab. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/367660) in GitLab 15.3. |
| [`trigger` and `trigger:project`](../yaml/index.md#trigger) | yes | GitLab | The variable expansion is made by the [internal variable expansion mechanism](#gitlab-internal-variable-expansion-mechanism) in GitLab. Variable expansion for `trigger:project` [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/367660) in GitLab 15.3. |
| [`variables`](../yaml/index.md#variables) | yes | GitLab/Runner | The variable expansion is first made by the [internal variable expansion mechanism](#gitlab-internal-variable-expansion-mechanism) in GitLab, and then any unrecognized or unavailable variables are expanded by GitLab Runner's [internal variable expansion mechanism](#gitlab-runner-internal-variable-expansion-mechanism). |
### `config.toml` file

View File

@ -54,10 +54,12 @@ This process adds the `gitlab` user you created to a new group named `gitlab-dev
1. To add the `gitlab` user to the `gitlab-developers` group, select **Edit members**.
The `gitlab-developers` group should be listed in the leftmost box as a
selected group.
<!-- vale gitlab.BadPlurals = NO -->
1. In the **Add members to selected group(s)** section, enter `gitlab`.
1. Select **Add selected users**.
The `gitlab` user appears in the **Group member(s)**
section.
<!-- vale gitlab.BadPlurals = YES -->
![Jira added user to group](img/jira_added_user_to_group.png)

View File

@ -3,6 +3,7 @@ stage: none
group: unassigned
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
<!-- vale off -->
# Corporate contributor license agreement

View File

@ -3,6 +3,7 @@ stage: none
group: unassigned
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
<!-- vale off -->
# Developer Certificate of Origin Version 1.1

View File

@ -3,6 +3,7 @@ stage: none
group: unassigned
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
<!-- vale off -->
# Individual contributor license agreement

View File

@ -96,5 +96,4 @@ an environment.
- Deploy to different [environments](../ci/environments/index.md).
- Connect your project to a [Kubernetes cluster](../user/infrastructure/clusters/index.md).
- See how your application is used and analyze events with [Product Analytics](product_analytics.md).
- Create, toggle, and remove [Feature Flags](feature_flags.md).

View File

@ -1,45 +0,0 @@
---
stage: Analytics
group: Product Analytics
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
# Product Analytics **(FREE)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/225167) in GitLab 13.3 [with a flag](../administration/feature_flags.md) named `product_analytics`. Disabled by default.
FLAG:
On self-managed GitLab, by default this feature is not available. To make it available per project or for your entire instance, ask an administrator to [enable the feature flag](../administration/feature_flags.md) named `product_analytics`. On GitLab.com, this feature is not available. The feature is not ready for production use.
GitLab enables you to go from planning an application to getting feedback. You can use
Product Analytics to receive and analyze events sent from your application. This analysis
provides observability information and feedback on how people use your product.
Events are collected by a [Rails collector](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/36443) and
then processed with [Snowplow](https://github.com/snowplow/snowplow). Events are stored in a GitLab database.
## View Product Analytics
You can view the event data collected about your applications.
Prerequisite:
- You must have at least the Reporter role.
To access Product Analytics:
1. On the top bar, select **Main menu > Projects** and find your project.
1. On the left sidebar, select **Monitor > Product Analytics**.
The Product Analytics interface contains:
- An Events tab that shows the recent events and a total count.
- A Graph tab that shows graphs based on events of the last 30 days.
- A Test tab that sends a sample event payload.
- A Setup page containing the code to implement in your application.
## Rate limits
While Product Analytics is under development, it's rate-limited to
**100 events per minute** per project. This limit prevents the events table in the
database from growing too quickly.

View File

@ -310,9 +310,7 @@ for the group's projects to meet your group's needs.
[Feature flag `invite_members_group_modal`](https://gitlab.com/gitlab-org/gitlab/-/issues/352526) removed.
Similar to how you [share a project with a group](../project/members/share_project_with_groups.md),
you can share a group with another group. To invite a group, you must be a member of it. Members get direct access
to the shared group. This includes members who inherited group membership from a parent group.
you can share a group with another group. To invite a group, you must be a member of it.
To share a given group, for example, `Frontend` with another group, for example,
`Engineering`:
@ -327,7 +325,7 @@ After sharing the `Frontend` group with the `Engineering` group:
- The **Groups** tab lists the `Engineering` group.
- The **Groups** tab lists a group regardless of whether it is a public or private group.
- All members of the `Engineering` group have access to the `Frontend` group. The same access levels of the members apply up to the maximum access level selected when sharing the group.
- All direct members of the `Engineering` group have access to the `Frontend` group. Direct members of `Engineering` that gain access to the `Frontend` group keep their same access level as in `Engineering`, but up to the maximum access level selected when sharing the group. Inherited members of the `Engineering` group do not gain access to the `Frontend` group.
## Transfer a group

View File

@ -1,40 +0,0 @@
# frozen_string_literal: true
module ProductAnalytics
class CollectorApp
def call(env)
request = Rack::Request.new(env)
params = request.params
return not_found unless EventParams.has_required_params?(params)
# Product analytics feature is behind a flag and is disabled by default.
# We expect limited amount of projects with this feature enabled in first release.
# Since collector has no authentication we temporary prevent recording of events
# for project without the feature enabled. During increase of feature adoption, this
# check will be removed for better performance.
project = Project.find(params['aid'].to_i)
return not_found unless Feature.enabled?(:product_analytics, project)
# Snowplow tracker has own format of events.
# We need to convert them to match the schema of our database.
event_params = EventParams.parse_event_params(params)
if ProductAnalyticsEvent.create(event_params)
ok
else
not_found
end
rescue ActiveRecord::InvalidForeignKey, ActiveRecord::RecordNotFound
not_found
end
def ok
[200, {}, []]
end
def not_found
[404, {}, []]
end
end
end

View File

@ -12,7 +12,6 @@ module Sidebars
add_item(error_tracking_menu_item)
add_item(alert_management_menu_item)
add_item(incidents_menu_item)
add_item(product_analytics_menu_item)
true
end
@ -101,20 +100,6 @@ module Sidebars
item_id: :incidents
)
end
def product_analytics_menu_item
if Feature.disabled?(:product_analytics, context.project) ||
!can?(context.current_user, :read_product_analytics, context.project)
return ::Sidebars::NilMenuItem.new(item_id: :product_analytics)
end
::Sidebars::MenuItem.new(
title: _('Product Analytics'),
link: project_product_analytics_path(context.project),
active_routes: { controller: :product_analytics },
item_id: :product_analytics
)
end
end
end
end

View File

@ -1729,9 +1729,6 @@ msgstr ""
msgid "A plain HTML site that uses Netlify for CI/CD instead of GitLab, but still with all the other great GitLab features"
msgstr ""
msgid "A platform value can be web, mob or app."
msgstr ""
msgid "A project boilerplate for Salesforce App development with Salesforce Developer tools"
msgstr ""
@ -10863,9 +10860,6 @@ msgstr ""
msgid "Copy source branch name"
msgstr ""
msgid "Copy the code below to implement tracking in your application:"
msgstr ""
msgid "Copy this registration token."
msgstr ""
@ -18791,9 +18785,6 @@ msgstr ""
msgid "GraphViewType|Stage"
msgstr ""
msgid "Graphs"
msgstr ""
msgid "Gravatar"
msgstr ""
@ -23766,9 +23757,6 @@ msgstr ""
msgid "Last event"
msgstr ""
msgid "Last item before this page loaded in your browser:"
msgstr ""
msgid "Last modified"
msgstr ""
@ -27841,9 +27829,6 @@ msgstr ""
msgid "Number of events"
msgstr ""
msgid "Number of events for this project: %{total_count}."
msgstr ""
msgid "Number of files touched"
msgstr ""
@ -27868,9 +27853,6 @@ msgstr ""
msgid "October"
msgstr ""
msgid "OfSearchInADropdown|Filter"
msgstr ""
msgid "Off"
msgstr ""
@ -30881,9 +30863,6 @@ msgstr ""
msgid "Proceed"
msgstr ""
msgid "Product Analytics"
msgstr ""
msgid "ProductAnalytics|Audience"
msgstr ""
@ -36891,9 +36870,6 @@ msgstr ""
msgid "Select a template type"
msgstr ""
msgid "Select a timezone"
msgstr ""
msgid "Select all"
msgstr ""
@ -37439,9 +37415,6 @@ msgstr ""
msgid "Settings|Unable to load the merge request options settings. Try reloading the page."
msgstr ""
msgid "Setup"
msgstr ""
msgid "Severity"
msgstr ""
@ -37699,9 +37672,6 @@ msgstr ""
msgid "Showing data for workflow items completed in this date range. Date range limited to %{maxDateRange} days."
msgstr ""
msgid "Showing graphs based on events of the last %{timerange} days."
msgstr ""
msgid "Showing last %{size} of log -"
msgstr ""
@ -40811,9 +40781,6 @@ msgstr ""
msgid "Theme"
msgstr ""
msgid "There are currently no events."
msgstr ""
msgid "There are currently no mirrored repositories."
msgstr ""
@ -41654,9 +41621,6 @@ msgstr ""
msgid "This page is unavailable because you are not allowed to read information across multiple projects."
msgstr ""
msgid "This page sends a payload. Go back to the events page to see a newly created event."
msgstr ""
msgid "This pipeline makes use of a predefined CI/CD configuration enabled by %{b_open}Auto DevOps.%{b_close}"
msgstr ""

View File

@ -1,14 +0,0 @@
# frozen_string_literal: true
module QA
module Page
module Component
module UsersSelect
def select_user(element, username)
find("#{element_selector_css(element)} input").set(username)
find('.ajax-users-dropdown .user-username', text: "@#{username}").click
end
end
end
end
end

View File

@ -5,7 +5,6 @@ module QA
module Group
class Members < Page::Base
include Page::Component::InviteMembersModal
include Page::Component::UsersSelect
include Page::Component::MembersFilter
view 'app/assets/javascripts/members/components/modals/remove_member_modal.vue' do

View File

@ -74,3 +74,5 @@ module QA
end
end
end
QA::Page::Project::SubMenus::Issues.prepend_mod_with('Page::Project::SubMenus::Issues', namespace: QA)

View File

@ -90,6 +90,10 @@ module QA
# code_owner_approval_required: true
}
)
# GitHub branch protection rule "Require signed commits" is mapped to the
# "Reject unsigned commits" push rule
expect(imported_project.push_rules[:reject_unsigned_commits]).to be_truthy
end
def verify_commits_import

View File

@ -1,95 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::ProductAnalyticsController do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
before(:all) do
project.add_maintainer(user)
end
before do
sign_in(user)
stub_feature_flags(product_analytics: true)
end
describe 'GET #index' do
it 'renders index with 200 status code' do
get :index, params: project_params
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:index)
end
context 'with an anonymous user' do
before do
sign_out(user)
end
it 'redirects to sign-in page' do
get :index, params: project_params
expect(response).to redirect_to(new_user_session_path)
end
end
context 'feature flag disabled' do
before do
stub_feature_flags(product_analytics: false)
end
it 'returns not found' do
get :index, params: project_params
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
describe 'GET #test' do
it 'renders test with 200 status code' do
get :test, params: project_params
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:test)
end
end
describe 'GET #setup' do
it 'renders setup with 200 status code' do
get :setup, params: project_params
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:setup)
end
end
describe 'GET #graphs' do
it 'renders graphs with 200 status code' do
get :graphs, params: project_params
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:graphs)
end
context 'feature flag disabled' do
before do
stub_feature_flags(product_analytics: false)
end
it 'returns not found' do
get :graphs, params: project_params
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
private
def project_params(opts = {})
opts.reverse_merge(namespace_id: project.namespace, project_id: project)
end
end

View File

@ -15,7 +15,7 @@ RSpec.describe 'Database schema' do
}.with_indifferent_access.freeze
# List of columns historically missing a FK, don't add more columns
# See: https://docs.gitlab.com/ee/development/foreign_keys.html#naming-foreign-keys
# See: https://docs.gitlab.com/ee/development/database/foreign_keys.html#naming-foreign-keys
IGNORED_FK_COLUMNS = {
abuse_reports: %w[reporter_id user_id],
application_settings: %w[performance_bar_allowed_group_id slack_app_id snowplow_app_id eks_account_id eks_access_key_id],

View File

@ -64,7 +64,6 @@ RSpec.describe 'Monitor dropdown sidebar', :aggregate_failures do
expect(page).not_to have_link('Metrics', href: project_metrics_dashboard_path(project))
expect(page).not_to have_link('Alerts', href: project_alert_management_index_path(project))
expect(page).not_to have_link('Error Tracking', href: project_error_tracking_index_path(project))
expect(page).not_to have_link('Product Analytics', href: project_product_analytics_path(project))
expect(page).not_to have_link('Kubernetes', href: project_clusters_path(project))
end
@ -119,7 +118,6 @@ RSpec.describe 'Monitor dropdown sidebar', :aggregate_failures do
expect(page).not_to have_link('Metrics', href: project_metrics_dashboard_path(project))
expect(page).not_to have_link('Alerts', href: project_alert_management_index_path(project))
expect(page).not_to have_link('Error Tracking', href: project_error_tracking_index_path(project))
expect(page).not_to have_link('Product Analytics', href: project_product_analytics_path(project))
expect(page).not_to have_link('Kubernetes', href: project_clusters_path(project))
end
@ -135,7 +133,6 @@ RSpec.describe 'Monitor dropdown sidebar', :aggregate_failures do
expect(page).to have_link('Incidents', href: project_incidents_path(project))
expect(page).to have_link('Environments', href: project_environments_path(project))
expect(page).to have_link('Error Tracking', href: project_error_tracking_index_path(project))
expect(page).to have_link('Product Analytics', href: project_product_analytics_path(project))
expect(page).not_to have_link('Alerts', href: project_alert_management_index_path(project))
expect(page).not_to have_link('Kubernetes', href: project_clusters_path(project))
@ -154,7 +151,6 @@ RSpec.describe 'Monitor dropdown sidebar', :aggregate_failures do
expect(page).to have_link('Incidents', href: project_incidents_path(project))
expect(page).to have_link('Environments', href: project_environments_path(project))
expect(page).to have_link('Error Tracking', href: project_error_tracking_index_path(project))
expect(page).to have_link('Product Analytics', href: project_product_analytics_path(project))
expect(page).to have_link('Kubernetes', href: project_clusters_path(project))
end
@ -171,7 +167,6 @@ RSpec.describe 'Monitor dropdown sidebar', :aggregate_failures do
expect(page).to have_link('Incidents', href: project_incidents_path(project))
expect(page).to have_link('Environments', href: project_environments_path(project))
expect(page).to have_link('Error Tracking', href: project_error_tracking_index_path(project))
expect(page).to have_link('Product Analytics', href: project_product_analytics_path(project))
expect(page).to have_link('Kubernetes', href: project_clusters_path(project))
end

View File

@ -45,7 +45,7 @@ RSpec.describe 'Pipeline Schedules', :js do
description = find_field('schedule_description').value
expect(description).to eq('pipeline schedule')
expect(page).to have_button('master')
expect(page).to have_button('UTC')
expect(page).to have_button('Select timezone')
end
it 'edits the scheduled pipeline' do
@ -164,7 +164,7 @@ RSpec.describe 'Pipeline Schedules', :js do
it 'sets defaults for timezone and target branch' do
expect(page).to have_button('master')
expect(page).to have_button('UTC')
expect(page).to have_button('Select timezone')
end
it 'creates a new scheduled pipeline' do
@ -314,8 +314,8 @@ RSpec.describe 'Pipeline Schedules', :js do
end
def select_timezone
find('.js-timezone-dropdown').click
click_link 'American Samoa'
find('[data-testid="schedule-timezone"] .dropdown-toggle').click
find("button", text: "Arizona").click
end
def select_target_branch

View File

@ -1,30 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Product Analytics > Events' do
let_it_be(:project) { create(:project_empty_repo) }
let_it_be(:user) { create(:user) }
let(:event) { create(:product_analytics_event, project: project) }
before do
project.add_maintainer(user)
sign_in(user)
end
it 'shows no events message' do
visit(project_product_analytics_path(project))
expect(page).to have_content('There are currently no events')
end
it 'shows events' do
event
visit(project_product_analytics_path(project))
expect(page).to have_content('dvce_created_tstamp')
expect(page).to have_content(event.event_id)
end
end

View File

@ -1,25 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Product Analytics > Graphs' do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
before do
project.add_maintainer(user)
sign_in(user)
end
it 'shows graphs', :js do
create(:product_analytics_event, project: project)
visit(graphs_project_product_analytics_path(project))
expect(page).to have_content('Showing graphs based on events')
expect(page).to have_content('platform')
expect(page).to have_content('os_timezone')
expect(page).to have_content('br_lang')
expect(page).to have_content('doc_charset')
end
end

View File

@ -1,19 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Product Analytics > Setup' do
let_it_be(:project) { create(:project_empty_repo) }
let_it_be(:user) { create(:user) }
before do
project.add_maintainer(user)
sign_in(user)
end
it 'shows the setup instructions' do
visit(setup_project_product_analytics_path(project))
expect(page).to have_content('Copy the code below to implement tracking in your application')
end
end

View File

@ -1,27 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Product Analytics > Test' do
let_it_be(:project) { create(:project_empty_repo) }
let_it_be(:user) { create(:user) }
before do
project.add_maintainer(user)
sign_in(user)
end
it 'says it sends a payload' do
visit(test_project_product_analytics_path(project))
expect(page).to have_content('This page sends a payload.')
end
it 'shows the last event if there is one' do
event = create(:product_analytics_event, project: project)
visit(test_project_product_analytics_path(project))
expect(page).to have_content(event.event_id)
end
end

View File

@ -1,70 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::ResetDuplicateCiRunnersTokenEncryptedValues,
:migration,
schema: 20220922143634 do
it { expect(described_class).to be < Gitlab::BackgroundMigration::BatchedMigrationJob }
describe '#perform' do
let(:ci_runners) { table(:ci_runners, database: :ci) }
let(:test_worker) do
described_class.new(
start_id: 1,
end_id: 4,
batch_table: :ci_runners,
batch_column: :id,
sub_batch_size: 2,
pause_ms: 0,
connection: Ci::ApplicationRecord.connection
)
end
subject(:perform) { test_worker.perform }
before do
ci_runners.create!(id: 1, runner_type: 1, token_encrypted: 'duplicate')
ci_runners.create!(id: 2, runner_type: 1, token_encrypted: 'a-token')
ci_runners.create!(id: 3, runner_type: 1, token_encrypted: 'duplicate-2')
ci_runners.create!(id: 4, runner_type: 1, token_encrypted: nil)
ci_runners.create!(id: 5, runner_type: 1, token_encrypted: 'duplicate-2')
ci_runners.create!(id: 6, runner_type: 1, token_encrypted: 'duplicate')
ci_runners.create!(id: 7, runner_type: 1, token_encrypted: 'another-token')
ci_runners.create!(id: 8, runner_type: 1, token_encrypted: 'another-token')
end
it 'nullifies duplicate encrypted tokens', :aggregate_failures do
expect { perform }.to change { ci_runners.all.order(:id).pluck(:id, :token_encrypted).to_h }
.from(
{
1 => 'duplicate',
2 => 'a-token',
3 => 'duplicate-2',
4 => nil,
5 => 'duplicate-2',
6 => 'duplicate',
7 => 'another-token',
8 => 'another-token'
}
)
.to(
{
1 => nil,
2 => 'a-token',
3 => nil,
4 => nil,
5 => nil,
6 => nil,
7 => 'another-token',
8 => 'another-token'
}
)
expect(ci_runners.count).to eq(8)
expect(ci_runners.pluck(:token_encrypted).uniq).to match_array [
nil, 'a-token', 'another-token'
]
end
end
end

View File

@ -1,70 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::ResetDuplicateCiRunnersTokenValues,
:migration,
schema: 20220922143143 do
it { expect(described_class).to be < Gitlab::BackgroundMigration::BatchedMigrationJob }
describe '#perform' do
let(:ci_runners) { table(:ci_runners, database: :ci) }
let(:test_worker) do
described_class.new(
start_id: 1,
end_id: 4,
batch_table: :ci_runners,
batch_column: :id,
sub_batch_size: 2,
pause_ms: 0,
connection: Ci::ApplicationRecord.connection
)
end
subject(:perform) { test_worker.perform }
before do
ci_runners.create!(id: 1, runner_type: 1, token: 'duplicate')
ci_runners.create!(id: 2, runner_type: 1, token: 'a-token')
ci_runners.create!(id: 3, runner_type: 1, token: 'duplicate-2')
ci_runners.create!(id: 4, runner_type: 1, token: nil)
ci_runners.create!(id: 5, runner_type: 1, token: 'duplicate-2')
ci_runners.create!(id: 6, runner_type: 1, token: 'duplicate')
ci_runners.create!(id: 7, runner_type: 1, token: 'another-token')
ci_runners.create!(id: 8, runner_type: 1, token: 'another-token')
end
it 'nullifies duplicate tokens', :aggregate_failures do
expect { perform }.to change { ci_runners.all.order(:id).pluck(:id, :token).to_h }
.from(
{
1 => 'duplicate',
2 => 'a-token',
3 => 'duplicate-2',
4 => nil,
5 => 'duplicate-2',
6 => 'duplicate',
7 => 'another-token',
8 => 'another-token'
}
)
.to(
{
1 => nil,
2 => 'a-token',
3 => nil,
4 => nil,
5 => nil,
6 => nil,
7 => 'another-token',
8 => 'another-token'
}
)
expect(ci_runners.count).to eq(8)
expect(ci_runners.pluck(:token).uniq).to match_array [
nil, 'a-token', 'another-token'
]
end
end
end

View File

@ -102,19 +102,5 @@ RSpec.describe Sidebars::Projects::Menus::MonitorMenu do
it_behaves_like 'access rights checks'
end
describe 'Product Analytics' do
let(:item_id) { :product_analytics }
specify { is_expected.not_to be_nil }
describe 'when feature flag :product_analytics is disabled' do
specify do
stub_feature_flags(product_analytics: false)
is_expected.to be_nil
end
end
end
end
end

View File

@ -1,41 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'ProductAnalytics::CollectorApp throttle' do
include RackAttackSpecHelpers
include_context 'rack attack cache store'
let(:project1) { create(:project) }
let(:project2) { create(:project) }
before do
allow(ProductAnalyticsEvent).to receive(:create).and_return(true)
end
context 'per application id' do
let(:params) do
{
aid: project1.id,
eid: SecureRandom.uuid
}
end
it 'throttles the endpoint' do
# Allow requests under the rate limit.
100.times do
expect_ok { get '/-/collector/i', params: params }
end
# Ensure its not related to ip address
random_next_ip
# Reject request over the limit
expect_rejection { get '/-/collector/i', params: params }
# But allows request for different aid
expect_ok { get '/-/collector/i', params: params.merge(aid: project2.id) }
end
end
end

View File

@ -1,58 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'ProductAnalytics::CollectorApp' do
let_it_be(:project) { create(:project) }
let(:params) { {} }
let(:raw_event) { Gitlab::Json.parse(fixture_file('product_analytics/event.json')) }
subject { get '/-/collector/i', params: params }
RSpec.shared_examples 'not found' do
it 'repond with 404' do
expect { subject }.not_to change { ProductAnalyticsEvent.count }
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'correct event params' do
let(:params) { raw_event.merge(aid: project.id) }
it 'repond with 200' do
expect { subject }.to change { ProductAnalyticsEvent.count }.by(1)
expect(response).to have_gitlab_http_status(:ok)
end
context 'feature disabled' do
before do
stub_feature_flags(product_analytics: false)
end
it_behaves_like 'not found'
end
end
context 'empty event params' do
it_behaves_like 'not found'
end
context 'invalid project id in params' do
let(:params) do
{
aid: '-1',
p: 'web',
tna: 'sp',
tv: 'js-2.14.0',
eid: SecureRandom.uuid,
duid: SecureRandom.uuid,
sid: SecureRandom.uuid
}
end
it_behaves_like 'not found'
end
end

View File

@ -17,6 +17,6 @@ RSpec.shared_context 'runners resolver setup' do
end
let_it_be(:group_runner) { create(:ci_runner, :group, groups: [group], token: 'mnopqr', description: 'group runner', contacted_at: 2.seconds.ago) }
let_it_be(:subgroup_runner) { create(:ci_runner, :group, groups: [subgroup], token: 'mnopqr', description: 'subgroup runner', contacted_at: 1.second.ago) }
let_it_be(:subgroup_runner) { create(:ci_runner, :group, groups: [subgroup], token: '123456', description: 'subgroup runner', contacted_at: 1.second.ago) }
let_it_be(:instance_runner) { create(:ci_runner, :instance, description: 'shared runner', token: 'stuvxz', contacted_at: 2.minutes.ago, tag_list: %w(instance_runner active_runner)) }
end

View File

@ -85,8 +85,7 @@ RSpec.shared_context 'project navbar structure' do
_('Metrics'),
_('Error Tracking'),
_('Alerts'),
_('Incidents'),
_('Product Analytics')
_('Incidents')
]
},
{

View File

@ -472,24 +472,6 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
end
end
end
describe 'Product Analytics' do
it 'has a link to the product analytics page' do
render
expect(rendered).to have_link('Product Analytics', href: project_product_analytics_path(project))
end
describe 'when feature flag :product_analytics is disabled' do
it 'does not have a link to the feature flags page' do
stub_feature_flags(product_analytics: false)
render
expect(rendered).not_to have_link('Product Analytics')
end
end
end
end
describe 'Infrastructure' do