diff --git a/.rubocop_manual_todo.yml b/.rubocop_manual_todo.yml index cb181787494..ffae9094f73 100644 --- a/.rubocop_manual_todo.yml +++ b/.rubocop_manual_todo.yml @@ -2454,7 +2454,6 @@ Database/MultipleDatabases: - 'lib/gitlab/database/schema_cache_with_renamed_table.rb' - 'lib/gitlab/database/schema_migrations/context.rb' - 'lib/gitlab/database/schema_version_files.rb' - - 'lib/gitlab/database/unidirectional_copy_trigger.rb' - 'lib/gitlab/database/with_lock_retries.rb' - 'lib/gitlab/gitlab_import/importer.rb' - 'lib/gitlab/health_checks/db_check.rb' diff --git a/app/assets/javascripts/diffs/components/app.vue b/app/assets/javascripts/diffs/components/app.vue index cf0d2814136..66d06a3a1b6 100644 --- a/app/assets/javascripts/diffs/components/app.vue +++ b/app/assets/javascripts/diffs/components/app.vue @@ -570,7 +570,7 @@ export default { jumpToFile(step) { const targetIndex = this.currentDiffIndex + step; if (targetIndex >= 0 && targetIndex < this.diffFiles.length) { - this.scrollToFile(this.diffFiles[targetIndex].file_path); + this.scrollToFile({ path: this.diffFiles[targetIndex].file_path }); } }, setTreeDisplay() { diff --git a/app/assets/javascripts/diffs/components/tree_list.vue b/app/assets/javascripts/diffs/components/tree_list.vue index 41d885d3dc1..85e4199d1c1 100644 --- a/app/assets/javascripts/diffs/components/tree_list.vue +++ b/app/assets/javascripts/diffs/components/tree_list.vue @@ -98,7 +98,7 @@ export default { :file-row-component="$options.DiffFileRow" :current-diff-file-id="currentDiffFileId" @toggleTreeOpen="toggleTreeOpen" - @clickFile="scrollToFile" + @clickFile="(path) => scrollToFile({ path })" />

diff --git a/app/assets/javascripts/diffs/index.js b/app/assets/javascripts/diffs/index.js index 1b1ab59b2b4..260ebdf2141 100644 --- a/app/assets/javascripts/diffs/index.js +++ b/app/assets/javascripts/diffs/index.js @@ -138,7 +138,7 @@ export default function initDiffsApp(store) { ...mapActions('diffs', ['toggleFileFinder', 'scrollToFile']), openFile(file) { window.mrTabs.tabShown('diffs'); - this.scrollToFile(file.path); + this.scrollToFile({ path: file.path }); }, }, render(createElement) { diff --git a/app/assets/javascripts/diffs/store/actions.js b/app/assets/javascripts/diffs/store/actions.js index 5c94c6b803b..9e828450acf 100644 --- a/app/assets/javascripts/diffs/store/actions.js +++ b/app/assets/javascripts/diffs/store/actions.js @@ -518,7 +518,7 @@ export const toggleActiveFileByHash = ({ commit }, hash) => { commit(types.VIEW_DIFF_FILE, hash); }; -export const scrollToFile = ({ state, commit, getters }, path) => { +export const scrollToFile = ({ state, commit, getters }, { path, setHash = true }) => { if (!state.treeEntries[path]) return; const { fileHash } = state.treeEntries[path]; @@ -528,9 +528,11 @@ export const scrollToFile = ({ state, commit, getters }, path) => { if (getters.isVirtualScrollingEnabled) { eventHub.$emit('scrollToFileHash', fileHash); - setTimeout(() => { - window.history.replaceState(null, null, `#${fileHash}`); - }); + if (setHash) { + setTimeout(() => { + window.history.replaceState(null, null, `#${fileHash}`); + }); + } } else { document.location.hash = fileHash; diff --git a/app/assets/javascripts/issues_list/queries/iteration.fragment.graphql b/app/assets/javascripts/issues_list/queries/iteration.fragment.graphql index 78a368089a8..c094761e915 100644 --- a/app/assets/javascripts/issues_list/queries/iteration.fragment.graphql +++ b/app/assets/javascripts/issues_list/queries/iteration.fragment.graphql @@ -1,4 +1,8 @@ fragment Iteration on Iteration { id title + iterationCadence { + id + title + } } diff --git a/app/assets/javascripts/lib/utils/common_utils.js b/app/assets/javascripts/lib/utils/common_utils.js index 813fd3dbb1e..ecf04ee4879 100644 --- a/app/assets/javascripts/lib/utils/common_utils.js +++ b/app/assets/javascripts/lib/utils/common_utils.js @@ -220,16 +220,16 @@ export const scrollToElement = (element, options = {}) => { // In the previous implementation, jQuery naturally deferred this scrolling. // Unfortunately, we're quite coupled to this implementation detail now. defer(() => { - const { duration = 200, offset = 0 } = options; + const { duration = 200, offset = 0, behavior = duration ? 'smooth' : 'auto' } = options; const y = el.getBoundingClientRect().top + window.pageYOffset + offset - contentTop(); - window.scrollTo({ top: y, behavior: duration ? 'smooth' : 'auto' }); + window.scrollTo({ top: y, behavior }); }); } }; -export const scrollToElementWithContext = (element) => { +export const scrollToElementWithContext = (element, options) => { const offsetMultiplier = -0.1; - return scrollToElement(element, { offset: window.innerHeight * offsetMultiplier }); + return scrollToElement(element, { ...options, offset: window.innerHeight * offsetMultiplier }); }; /** diff --git a/app/assets/javascripts/notes/mixins/discussion_navigation.js b/app/assets/javascripts/notes/mixins/discussion_navigation.js index 96974c4fa2d..b313d1acdf1 100644 --- a/app/assets/javascripts/notes/mixins/discussion_navigation.js +++ b/app/assets/javascripts/notes/mixins/discussion_navigation.js @@ -2,6 +2,8 @@ import { mapGetters, mapActions, mapState } from 'vuex'; import { scrollToElementWithContext, scrollToElement } from '~/lib/utils/common_utils'; import eventHub from '../event_hub'; +const isDiffsVirtualScrollingEnabled = () => window.gon?.features?.diffsVirtualScrolling; + /** * @param {string} selector * @returns {boolean} @@ -11,7 +13,9 @@ function scrollTo(selector, { withoutContext = false } = {}) { const scrollFunction = withoutContext ? scrollToElement : scrollToElementWithContext; if (el) { - scrollFunction(el); + scrollFunction(el, { + behavior: isDiffsVirtualScrollingEnabled() ? 'auto' : 'smooth', + }); return true; } @@ -81,8 +85,15 @@ function handleDiscussionJump(self, fn, discussionId = self.currentDiscussionId) const discussion = self.getDiscussion(targetId); const discussionFilePath = discussion?.diff_file?.file_path; + if (isDiffsVirtualScrollingEnabled()) { + window.location.hash = ''; + } + if (discussionFilePath) { - self.scrollToFile(discussionFilePath); + self.scrollToFile({ + path: discussionFilePath, + setHash: !isDiffsVirtualScrollingEnabled(), + }); } self.$nextTick(() => { diff --git a/app/assets/javascripts/notes/stores/actions.js b/app/assets/javascripts/notes/stores/actions.js index 7eb10f647a0..935494dc193 100644 --- a/app/assets/javascripts/notes/stores/actions.js +++ b/app/assets/javascripts/notes/stores/actions.js @@ -1,4 +1,3 @@ -/* eslint-disable @gitlab/require-string-literal-i18n-helpers */ import $ from 'jquery'; import Visibility from 'visibilityjs'; import Vue from 'vue'; @@ -621,7 +620,7 @@ export const submitSuggestion = ( const flashMessage = errorMessage || defaultMessage; createFlash({ - message: __(flashMessage), + message: flashMessage, parent: flashContainer, }); }) @@ -657,7 +656,7 @@ export const submitSuggestionBatch = ({ commit, dispatch, state }, { message, fl const flashMessage = errorMessage || defaultMessage; createFlash({ - message: __(flashMessage), + message: flashMessage, parent: flashContainer, }); }) diff --git a/app/assets/javascripts/packages_and_registries/package_registry/components/list/app.vue b/app/assets/javascripts/packages_and_registries/package_registry/components/list/app.vue index 08481ac5655..b1fe7a7021b 100644 --- a/app/assets/javascripts/packages_and_registries/package_registry/components/list/app.vue +++ b/app/assets/javascripts/packages_and_registries/package_registry/components/list/app.vue @@ -4,7 +4,7 @@ * For a complete overview of the plan please check: https://gitlab.com/gitlab-org/gitlab/-/issues/330846 * This work is behind feature flag: https://gitlab.com/gitlab-org/gitlab/-/issues/341136 */ -// import { GlEmptyState, GlLink, GlSprintf } from '@gitlab/ui'; +import { GlEmptyState, GlLink, GlSprintf } from '@gitlab/ui'; import createFlash from '~/flash'; import { historyReplaceState } from '~/lib/utils/common_utils'; import { s__ } from '~/locale'; @@ -15,17 +15,18 @@ import { PROJECT_RESOURCE_TYPE, GROUP_RESOURCE_TYPE, LIST_QUERY_DEBOUNCE_TIME, + GRAPHQL_PAGE_SIZE, } from '~/packages_and_registries/package_registry/constants'; import PackageTitle from './package_title.vue'; import PackageSearch from './package_search.vue'; -// import PackageList from './packages_list.vue'; +import PackageList from './packages_list.vue'; export default { components: { - // GlEmptyState, - // GlLink, - // GlSprintf, - // PackageList, + GlEmptyState, + GlLink, + GlSprintf, + PackageList, PackageTitle, PackageSearch, }, @@ -64,17 +65,24 @@ export default { groupSort: this.isGroupPage ? this.sort : undefined, packageName: this.filters?.packageName, packageType: this.filters?.packageType, + first: GRAPHQL_PAGE_SIZE, }; }, graphqlResource() { return this.isGroupPage ? GROUP_RESOURCE_TYPE : PROJECT_RESOURCE_TYPE; }, + pageInfo() { + return this.packages?.pageInfo ?? {}; + }, packagesCount() { return this.packages?.count; }, hasFilters() { return this.filters.packageName && this.filters.packageType; }, + emptySearch() { + return !this.filters.packageName && !this.filters.packageType; + }, emptyStateTitle() { return this.emptySearch ? this.$options.i18n.emptyPageTitle @@ -99,6 +107,35 @@ export default { this.sort = sort; this.filters = { ...filters }; }, + updateQuery(_, { fetchMoreResult }) { + return fetchMoreResult; + }, + fetchNextPage() { + const variables = { + ...this.queryVariables, + first: GRAPHQL_PAGE_SIZE, + last: null, + after: this.pageInfo?.endCursor, + }; + + this.$apollo.queries.packages.fetchMore({ + variables, + updateQuery: this.updateQuery, + }); + }, + fetchPreviousPage() { + const variables = { + ...this.queryVariables, + first: null, + last: GRAPHQL_PAGE_SIZE, + before: this.pageInfo?.startCursor, + }; + + this.$apollo.queries.packages.fetchMore({ + variables, + updateQuery: this.updateQuery, + }); + }, }, i18n: { widenFilters: s__('PackageRegistry|To widen your search, change or remove the filters above.'), @@ -116,7 +153,13 @@ export default { - + diff --git a/app/assets/javascripts/packages_and_registries/package_registry/components/list/packages_list.vue b/app/assets/javascripts/packages_and_registries/package_registry/components/list/packages_list.vue index 25bac687dbf..35052ab5094 100644 --- a/app/assets/javascripts/packages_and_registries/package_registry/components/list/packages_list.vue +++ b/app/assets/javascripts/packages_and_registries/package_registry/components/list/packages_list.vue @@ -1,75 +1,79 @@ @@ -95,19 +100,19 @@ export default { v-for="packageEntity in list" :key="packageEntity.id" :package-entity="packageEntity" - :package-link="packageEntity._links.web_path" - :is-group="isGroupPage" @packageToDelete="setItemToBeDeleted" /> - +

+ +
- - + + diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb index f5f72250981..f663a6276cb 100644 --- a/app/models/ci/build.rb +++ b/app/models/ci/build.rb @@ -313,12 +313,6 @@ module Ci end after_transition pending: :running do |build| - unless build.update_deployment_after_transaction_commit? - Gitlab::Database.allow_cross_database_modification_within_transaction(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/338867') do - build.deployment&.run - end - end - build.run_after_commit do build.pipeline.persistent_ref.create @@ -339,35 +333,12 @@ module Ci end after_transition any => [:success] do |build| - unless build.update_deployment_after_transaction_commit? - Gitlab::Database.allow_cross_database_modification_within_transaction(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/338867') do - build.deployment&.succeed - end - end - build.run_after_commit do BuildSuccessWorker.perform_async(id) PagesWorker.perform_async(:deploy, id) if build.pages_generator? end end - after_transition any => [:failed] do |build| - next unless build.project - next unless build.deployment - - unless build.update_deployment_after_transaction_commit? - begin - Gitlab::Database.allow_cross_database_modification_within_transaction(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/338867') do - build.deployment.drop! - end - rescue StandardError => e - Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e, build_id: build.id) - end - end - - true - end - after_transition any => [:failed] do |build| next unless build.project @@ -380,25 +351,12 @@ module Ci end end - after_transition any => [:skipped, :canceled] do |build, transition| - unless build.update_deployment_after_transaction_commit? - Gitlab::Database.allow_cross_database_modification_within_transaction(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/338867') do - if transition.to_name == :skipped - build.deployment&.skip - else - build.deployment&.cancel - end - end - end - end - # Synchronize Deployment Status # Please note that the data integirty is not assured because we can't use # a database transaction due to DB decomposition. after_transition do |build, transition| next if transition.loopback? next unless build.project - next unless build.update_deployment_after_transaction_commit? build.run_after_commit do build.deployment&.sync_status_with(build) @@ -1120,12 +1078,6 @@ module Ci runner&.instance_type? end - def update_deployment_after_transaction_commit? - strong_memoize(:update_deployment_after_transaction_commit) do - Feature.enabled?(:update_deployment_after_transaction_commit, project, default_enabled: :yaml) - end - end - protected def run_status_commit_hooks! diff --git a/app/models/concerns/merge_request_reviewer_state.rb b/app/models/concerns/merge_request_reviewer_state.rb new file mode 100644 index 00000000000..b73a601bdb0 --- /dev/null +++ b/app/models/concerns/merge_request_reviewer_state.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +module MergeRequestReviewerState + extend ActiveSupport::Concern + + included do + enum state: { + unreviewed: 0, + reviewed: 1, + attention_required: 2 + } + + validates :state, + presence: true, + inclusion: { in: self.states.keys } + end +end diff --git a/app/models/merge_request_assignee.rb b/app/models/merge_request_assignee.rb index 86bf950ae19..35b2b2053bd 100644 --- a/app/models/merge_request_assignee.rb +++ b/app/models/merge_request_assignee.rb @@ -1,6 +1,8 @@ # frozen_string_literal: true class MergeRequestAssignee < ApplicationRecord + include MergeRequestReviewerState + belongs_to :merge_request, touch: true belongs_to :assignee, class_name: "User", foreign_key: :user_id, inverse_of: :merge_request_assignees diff --git a/app/models/merge_request_reviewer.rb b/app/models/merge_request_reviewer.rb index 4a1f31a7f39..3a2c8603956 100644 --- a/app/models/merge_request_reviewer.rb +++ b/app/models/merge_request_reviewer.rb @@ -1,14 +1,7 @@ # frozen_string_literal: true class MergeRequestReviewer < ApplicationRecord - enum state: { - unreviewed: 0, - reviewed: 1 - } - - validates :state, - presence: true, - inclusion: { in: MergeRequestReviewer.states.keys } + include MergeRequestReviewerState belongs_to :merge_request belongs_to :reviewer, class_name: 'User', foreign_key: :user_id, inverse_of: :merge_request_reviewers diff --git a/config/feature_flags/development/update_deployment_after_transaction_commit.yml b/config/feature_flags/development/update_deployment_after_transaction_commit.yml deleted file mode 100644 index c07622fc9b4..00000000000 --- a/config/feature_flags/development/update_deployment_after_transaction_commit.yml +++ /dev/null @@ -1,8 +0,0 @@ ---- -name: update_deployment_after_transaction_commit -introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/71450 -rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/342021 -milestone: '14.4' -type: development -group: group::release -default_enabled: false diff --git a/danger/product_intelligence/Dangerfile b/danger/product_intelligence/Dangerfile index fd6ae76b4f1..dda3fbfc589 100644 --- a/danger/product_intelligence/Dangerfile +++ b/danger/product_intelligence/Dangerfile @@ -12,12 +12,12 @@ For MR review guidelines, see the [Service Ping review guidelines](https://docs. MSG # exit if not matching files or if no product intelligence labels -matching_changed_files = product_intelligence.matching_changed_files +product_intelligence_paths_to_review = project_helper.changes_by_category[:product_intelligence] labels = product_intelligence.missing_labels -return if matching_changed_files.empty? || labels.empty? +return if product_intelligence_paths_to_review.empty? || labels.empty? -warn format(CHANGED_FILES_MESSAGE, changed_files: helper.markdown_list(matching_changed_files)) +warn format(CHANGED_FILES_MESSAGE, changed_files: helper.markdown_list(product_intelligence_paths_to_review)) gitlab.api.update_merge_request(gitlab.mr_json['project_id'], gitlab.mr_json['iid'], diff --git a/db/migrate/20211019153615_add_state_to_merge_request_assignees.rb b/db/migrate/20211019153615_add_state_to_merge_request_assignees.rb new file mode 100644 index 00000000000..0eb8d0989a5 --- /dev/null +++ b/db/migrate/20211019153615_add_state_to_merge_request_assignees.rb @@ -0,0 +1,8 @@ +# frozen_string_literal: true +class AddStateToMergeRequestAssignees < Gitlab::Database::Migration[1.0] + REVIEW_DEFAULT_STATE = 0 + + def change + add_column :merge_request_assignees, :state, :smallint, default: REVIEW_DEFAULT_STATE, null: false + end +end diff --git a/db/post_migrate/20210622041846_finalize_push_event_payloads_bigint_conversion.rb b/db/post_migrate/20210622041846_finalize_push_event_payloads_bigint_conversion.rb index 38b081e3e5e..f37c446f66c 100644 --- a/db/post_migrate/20210622041846_finalize_push_event_payloads_bigint_conversion.rb +++ b/db/post_migrate/20210622041846_finalize_push_event_payloads_bigint_conversion.rb @@ -49,7 +49,7 @@ class FinalizePushEventPayloadsBigintConversion < ActiveRecord::Migration[6.1] # We need to update the trigger function in order to make PostgreSQL to # regenerate the execution plan for it. This is to avoid type mismatch errors like # "type of parameter 15 (bigint) does not match that when preparing the plan (integer)" - function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(:event_id, :event_id_convert_to_bigint) + function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name(:event_id, :event_id_convert_to_bigint) execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL" # Swap defaults diff --git a/db/post_migrate/20210622045705_finalize_events_bigint_conversion.rb b/db/post_migrate/20210622045705_finalize_events_bigint_conversion.rb index b99a61e8e63..715bc392c68 100644 --- a/db/post_migrate/20210622045705_finalize_events_bigint_conversion.rb +++ b/db/post_migrate/20210622045705_finalize_events_bigint_conversion.rb @@ -60,7 +60,7 @@ class FinalizeEventsBigintConversion < ActiveRecord::Migration[6.1] # We need to update the trigger function in order to make PostgreSQL to # regenerate the execution plan for it. This is to avoid type mismatch errors like # "type of parameter 15 (bigint) does not match that when preparing the plan (integer)" - function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(:id, :id_convert_to_bigint) + function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name(:id, :id_convert_to_bigint) execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL" # Swap defaults diff --git a/db/post_migrate/20210701033622_finalize_ci_builds_needs_bigint_conversion.rb b/db/post_migrate/20210701033622_finalize_ci_builds_needs_bigint_conversion.rb index baee9fb3848..89a39660a04 100644 --- a/db/post_migrate/20210701033622_finalize_ci_builds_needs_bigint_conversion.rb +++ b/db/post_migrate/20210701033622_finalize_ci_builds_needs_bigint_conversion.rb @@ -47,7 +47,7 @@ class FinalizeCiBuildsNeedsBigintConversion < ActiveRecord::Migration[6.1] # We need to update the trigger function in order to make PostgreSQL to # regenerate the execution plan for it. This is to avoid type mismatch errors like # "type of parameter 15 (bigint) does not match that when preparing the plan (integer)" - function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(:build_id, :build_id_convert_to_bigint) + function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name(:build_id, :build_id_convert_to_bigint) execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL" # Swap defaults diff --git a/db/post_migrate/20210701141346_finalize_ci_builds_stage_id_bigint_conversion.rb b/db/post_migrate/20210701141346_finalize_ci_builds_stage_id_bigint_conversion.rb index a8a9fe037ec..161366590be 100644 --- a/db/post_migrate/20210701141346_finalize_ci_builds_stage_id_bigint_conversion.rb +++ b/db/post_migrate/20210701141346_finalize_ci_builds_stage_id_bigint_conversion.rb @@ -46,7 +46,7 @@ class FinalizeCiBuildsStageIdBigintConversion < ActiveRecord::Migration[6.1] execute "ALTER TABLE #{quoted_table_name} RENAME COLUMN #{quote_column_name(temporary_name)} TO #{quote_column_name(:stage_id_convert_to_bigint)}" # Reset the function so PG drops the plan cache for the incorrect integer type - function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME) + function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection) .name([:id, :stage_id], [:id_convert_to_bigint, :stage_id_convert_to_bigint]) execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL" diff --git a/db/post_migrate/20210706212710_finalize_ci_job_artifacts_bigint_conversion.rb b/db/post_migrate/20210706212710_finalize_ci_job_artifacts_bigint_conversion.rb index 40977277bd1..11045348672 100644 --- a/db/post_migrate/20210706212710_finalize_ci_job_artifacts_bigint_conversion.rb +++ b/db/post_migrate/20210706212710_finalize_ci_job_artifacts_bigint_conversion.rb @@ -58,7 +58,7 @@ class FinalizeCiJobArtifactsBigintConversion < ActiveRecord::Migration[6.1] # We need to update the trigger function in order to make PostgreSQL to # regenerate the execution plan for it. This is to avoid type mismatch errors like # "type of parameter 15 (bigint) does not match that when preparing the plan (integer)" - function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name([:id, :job_id], [:id_convert_to_bigint, :job_id_convert_to_bigint]) + function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name([:id, :job_id], [:id_convert_to_bigint, :job_id_convert_to_bigint]) execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL" # Swap defaults diff --git a/db/post_migrate/20210707210916_finalize_ci_stages_bigint_conversion.rb b/db/post_migrate/20210707210916_finalize_ci_stages_bigint_conversion.rb index 2e294a0b2e3..dbefbeb26cb 100644 --- a/db/post_migrate/20210707210916_finalize_ci_stages_bigint_conversion.rb +++ b/db/post_migrate/20210707210916_finalize_ci_stages_bigint_conversion.rb @@ -53,7 +53,7 @@ class FinalizeCiStagesBigintConversion < ActiveRecord::Migration[6.1] execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN #{id_convert_to_bigint_name} TO #{id_name}" execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN #{temp_name} TO #{id_convert_to_bigint_name}" - function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(:id, :id_convert_to_bigint) + function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name(:id, :id_convert_to_bigint) execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL" # Swap defaults diff --git a/db/post_migrate/20210708011426_finalize_ci_builds_metadata_bigint_conversion.rb b/db/post_migrate/20210708011426_finalize_ci_builds_metadata_bigint_conversion.rb index f75df04ba48..c2444ccbc6c 100644 --- a/db/post_migrate/20210708011426_finalize_ci_builds_metadata_bigint_conversion.rb +++ b/db/post_migrate/20210708011426_finalize_ci_builds_metadata_bigint_conversion.rb @@ -53,8 +53,8 @@ class FinalizeCiBuildsMetadataBigintConversion < Gitlab::Database::Migration[1.0 # We need to update the trigger function in order to make PostgreSQL to # regenerate the execution plan for it. This is to avoid type mismatch errors like # "type of parameter 15 (bigint) does not match that when preparing the plan (integer)" - execute "ALTER FUNCTION #{quote_table_name(Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(:id, :id_convert_to_bigint))} RESET ALL" - execute "ALTER FUNCTION #{quote_table_name(Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(:build_id, :build_id_convert_to_bigint))} RESET ALL" + execute "ALTER FUNCTION #{quote_table_name(Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name(:id, :id_convert_to_bigint))} RESET ALL" + execute "ALTER FUNCTION #{quote_table_name(Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name(:build_id, :build_id_convert_to_bigint))} RESET ALL" # Swap defaults for PK execute "ALTER SEQUENCE ci_builds_metadata_id_seq OWNED BY #{TABLE_NAME}.id" diff --git a/db/post_migrate/20210713042153_finalize_ci_sources_pipelines_bigint_conversion.rb b/db/post_migrate/20210713042153_finalize_ci_sources_pipelines_bigint_conversion.rb index 938c8c172a3..38b7852b320 100644 --- a/db/post_migrate/20210713042153_finalize_ci_sources_pipelines_bigint_conversion.rb +++ b/db/post_migrate/20210713042153_finalize_ci_sources_pipelines_bigint_conversion.rb @@ -47,7 +47,7 @@ class FinalizeCiSourcesPipelinesBigintConversion < ActiveRecord::Migration[6.1] # We need to update the trigger function in order to make PostgreSQL to # regenerate the execution plan for it. This is to avoid type mismatch errors like # "type of parameter 15 (bigint) does not match that when preparing the plan (integer)" - function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(:source_job_id, :source_job_id_convert_to_bigint) + function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name(:source_job_id, :source_job_id_convert_to_bigint) execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL" # No need to swap defaults, both columns have no default value diff --git a/db/post_migrate/20210714015537_finalize_ci_build_trace_chunks_bigint_conversion.rb b/db/post_migrate/20210714015537_finalize_ci_build_trace_chunks_bigint_conversion.rb index 9195b662776..af17b35d47d 100644 --- a/db/post_migrate/20210714015537_finalize_ci_build_trace_chunks_bigint_conversion.rb +++ b/db/post_migrate/20210714015537_finalize_ci_build_trace_chunks_bigint_conversion.rb @@ -45,7 +45,7 @@ class FinalizeCiBuildTraceChunksBigintConversion < ActiveRecord::Migration[6.1] # We need to update the trigger function in order to make PostgreSQL to # regenerate the execution plan for it. This is to avoid type mismatch errors like # "type of parameter 15 (bigint) does not match that when preparing the plan (integer)" - function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(:build_id, :build_id_convert_to_bigint) + function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name(:build_id, :build_id_convert_to_bigint) execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL" # Swap defaults diff --git a/db/post_migrate/20210722155635_finalize_convert_geo_job_artifact_deleted_events_bigint.rb b/db/post_migrate/20210722155635_finalize_convert_geo_job_artifact_deleted_events_bigint.rb index 7d3809a9dbe..5bf6a9e7911 100644 --- a/db/post_migrate/20210722155635_finalize_convert_geo_job_artifact_deleted_events_bigint.rb +++ b/db/post_migrate/20210722155635_finalize_convert_geo_job_artifact_deleted_events_bigint.rb @@ -44,7 +44,7 @@ class FinalizeConvertGeoJobArtifactDeletedEventsBigint < ActiveRecord::Migration change_column_default TABLE_NAME, COLUMN_NAME, nil change_column_default TABLE_NAME, COLUMN_NAME_CONVERTED, 0 - function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(COLUMN_NAME, COLUMN_NAME_CONVERTED) + function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name(COLUMN_NAME, COLUMN_NAME_CONVERTED) execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL" execute "DROP INDEX #{old_index_name}" diff --git a/db/post_migrate/20210802043253_finalize_push_event_payloads_bigint_conversion_3.rb b/db/post_migrate/20210802043253_finalize_push_event_payloads_bigint_conversion_3.rb index 4b825ae72ba..6dab29e10d4 100644 --- a/db/post_migrate/20210802043253_finalize_push_event_payloads_bigint_conversion_3.rb +++ b/db/post_migrate/20210802043253_finalize_push_event_payloads_bigint_conversion_3.rb @@ -61,7 +61,7 @@ class FinalizePushEventPayloadsBigintConversion3 < ActiveRecord::Migration[6.1] # We need to update the trigger function in order to make PostgreSQL to # regenerate the execution plan for it. This is to avoid type mismatch errors like # "type of parameter 15 (bigint) does not match that when preparing the plan (integer)" - function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(:event_id, :event_id_convert_to_bigint) + function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name(:event_id, :event_id_convert_to_bigint) execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL" # Swap defaults diff --git a/db/post_migrate/20210802131812_finalize_convert_deployments_bigint.rb b/db/post_migrate/20210802131812_finalize_convert_deployments_bigint.rb index 31afb7e0a29..067b7166cf3 100644 --- a/db/post_migrate/20210802131812_finalize_convert_deployments_bigint.rb +++ b/db/post_migrate/20210802131812_finalize_convert_deployments_bigint.rb @@ -35,7 +35,7 @@ class FinalizeConvertDeploymentsBigint < ActiveRecord::Migration[6.1] execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN #{quote_column_name(COLUMN_NAME_BIGINT)} TO #{quote_column_name(COLUMN_NAME)}" execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN #{quote_column_name(temp_name)} TO #{quote_column_name(COLUMN_NAME_BIGINT)}" - function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(COLUMN_NAME, COLUMN_NAME_BIGINT) + function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name(COLUMN_NAME, COLUMN_NAME_BIGINT) execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL" execute "DROP INDEX #{old_index_name}" diff --git a/db/post_migrate/20210805131510_finalize_ci_builds_runner_session_bigint_conversion.rb b/db/post_migrate/20210805131510_finalize_ci_builds_runner_session_bigint_conversion.rb index 9706f0036f3..5e18450f465 100644 --- a/db/post_migrate/20210805131510_finalize_ci_builds_runner_session_bigint_conversion.rb +++ b/db/post_migrate/20210805131510_finalize_ci_builds_runner_session_bigint_conversion.rb @@ -44,7 +44,7 @@ class FinalizeCiBuildsRunnerSessionBigintConversion < ActiveRecord::Migration[6. # We need to update the trigger function in order to make PostgreSQL to # regenerate the execution plan for it. This is to avoid type mismatch errors like # "type of parameter 15 (bigint) does not match that when preparing the plan (integer)" - function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(:build_id, :build_id_convert_to_bigint) + function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name(:build_id, :build_id_convert_to_bigint) execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL" # Swap defaults diff --git a/db/post_migrate/20210806131706_finalize_taggins_bigint_conversion.rb b/db/post_migrate/20210806131706_finalize_taggins_bigint_conversion.rb index bd76466d43a..5f094e48ed4 100644 --- a/db/post_migrate/20210806131706_finalize_taggins_bigint_conversion.rb +++ b/db/post_migrate/20210806131706_finalize_taggins_bigint_conversion.rb @@ -60,7 +60,7 @@ class FinalizeTagginsBigintConversion < ActiveRecord::Migration[6.1] # We need to update the trigger function in order to make PostgreSQL to # regenerate the execution plan for it. This is to avoid type mismatch errors like # "type of parameter 15 (bigint) does not match that when preparing the plan (integer)" - function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name([:id, :taggable_id], [:id_convert_to_bigint, :taggable_id_convert_to_bigint]) + function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name([:id, :taggable_id], [:id_convert_to_bigint, :taggable_id_convert_to_bigint]) execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL" # Swap defaults diff --git a/db/post_migrate/20210809143931_finalize_job_id_conversion_to_bigint_for_ci_job_artifacts.rb b/db/post_migrate/20210809143931_finalize_job_id_conversion_to_bigint_for_ci_job_artifacts.rb index bb12045b1de..5cec6fa30b8 100644 --- a/db/post_migrate/20210809143931_finalize_job_id_conversion_to_bigint_for_ci_job_artifacts.rb +++ b/db/post_migrate/20210809143931_finalize_job_id_conversion_to_bigint_for_ci_job_artifacts.rb @@ -55,7 +55,7 @@ class FinalizeJobIdConversionToBigintForCiJobArtifacts < ActiveRecord::Migration # We need to update the trigger function in order to make PostgreSQL to # regenerate the execution plan for it. This is to avoid type mismatch errors like # "type of parameter 15 (bigint) does not match that when preparing the plan (integer)" - function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name([:id, :job_id], [:id_convert_to_bigint, :job_id_convert_to_bigint]) + function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name([:id, :job_id], [:id_convert_to_bigint, :job_id_convert_to_bigint]) execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL" # Swap defaults diff --git a/db/post_migrate/20210907211557_finalize_ci_builds_bigint_conversion.rb b/db/post_migrate/20210907211557_finalize_ci_builds_bigint_conversion.rb index 02f98833703..7544842c196 100644 --- a/db/post_migrate/20210907211557_finalize_ci_builds_bigint_conversion.rb +++ b/db/post_migrate/20210907211557_finalize_ci_builds_bigint_conversion.rb @@ -136,7 +136,7 @@ class FinalizeCiBuildsBigintConversion < Gitlab::Database::Migration[1.0] execute "ALTER TABLE #{quoted_table_name} RENAME COLUMN #{quote_column_name(temporary_name)} TO #{quote_column_name(:id_convert_to_bigint)}" # Reset the function so PG drops the plan cache for the incorrect integer type - function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME) + function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection) .name([:id, :stage_id], [:id_convert_to_bigint, :stage_id_convert_to_bigint]) execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL" diff --git a/db/post_migrate/20211004110500_add_temporary_index_to_issue_metrics.rb b/db/post_migrate/20211004110500_add_temporary_index_to_issue_metrics.rb index cfc37c55121..2c2c3ac00af 100644 --- a/db/post_migrate/20211004110500_add_temporary_index_to_issue_metrics.rb +++ b/db/post_migrate/20211004110500_add_temporary_index_to_issue_metrics.rb @@ -6,7 +6,9 @@ class AddTemporaryIndexToIssueMetrics < Gitlab::Database::Migration[1.0] INDEX_NAME = 'index_issue_metrics_first_mentioned_in_commit' def up - add_concurrent_index :issue_metrics, :issue_id, where: 'EXTRACT(YEAR FROM first_mentioned_in_commit_at) > 2019', name: INDEX_NAME + condition = Gitlab::BackgroundMigration::FixFirstMentionedInCommitAt::TmpIssueMetrics + .first_mentioned_in_commit_at_condition + add_concurrent_index :issue_metrics, :issue_id, where: condition, name: INDEX_NAME end def down diff --git a/db/post_migrate/20211004110927_schedule_fix_first_mentioned_in_commit_at_job.rb b/db/post_migrate/20211004110927_schedule_fix_first_mentioned_in_commit_at_job.rb index c7612db3aaf..ffdb7f80f0e 100644 --- a/db/post_migrate/20211004110927_schedule_fix_first_mentioned_in_commit_at_job.rb +++ b/db/post_migrate/20211004110927_schedule_fix_first_mentioned_in_commit_at_job.rb @@ -8,8 +8,8 @@ class ScheduleFixFirstMentionedInCommitAtJob < Gitlab::Database::Migration[1.0] disable_ddl_transaction! def up - scope = define_batchable_model('issue_metrics') - .where('EXTRACT(YEAR FROM first_mentioned_in_commit_at) > 2019') + scope = Gitlab::BackgroundMigration::FixFirstMentionedInCommitAt::TmpIssueMetrics + .from_2020 queue_background_migration_jobs_by_range_at_intervals( scope, diff --git a/db/schema_migrations/20211019153615 b/db/schema_migrations/20211019153615 new file mode 100644 index 00000000000..7c1c7d7775f --- /dev/null +++ b/db/schema_migrations/20211019153615 @@ -0,0 +1 @@ +713efc9673bc6cda8eff4e433c3c85f0cc4b8b8ca7b5cc4308e57a6d0b0040a0 \ No newline at end of file diff --git a/db/structure.sql b/db/structure.sql index 045f800f8f7..9cd1f7b2a44 100644 --- a/db/structure.sql +++ b/db/structure.sql @@ -15706,7 +15706,8 @@ CREATE TABLE merge_request_assignees ( id bigint NOT NULL, user_id integer NOT NULL, merge_request_id integer NOT NULL, - created_at timestamp with time zone + created_at timestamp with time zone, + state smallint DEFAULT 0 NOT NULL ); CREATE SEQUENCE merge_request_assignees_id_seq diff --git a/doc/administration/instance_limits.md b/doc/administration/instance_limits.md index a2729e60545..93b77b07ccb 100644 --- a/doc/administration/instance_limits.md +++ b/doc/administration/instance_limits.md @@ -222,10 +222,12 @@ When the number exceeds the limit the page displays an alert and links to a pagi > [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/51401) in GitLab 11.10. The number of pipelines that can be created in a single push is 4. -This is to prevent the accidental creation of pipelines when `git push --all` +This limit prevents the accidental creation of pipelines when `git push --all` or `git push --mirror` is used. -Read more in the [CI documentation](../ci/yaml/index.md#processing-git-pushes). +This limit does not affect any of the updated merge request pipelines. +All updated merge requests have a pipeline created when using +[pipelines for merge requests](../ci/pipelines/merge_request_pipelines.md). ## Retention of activity history diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md index 26f61b8d04a..c38ac224fb4 100644 --- a/doc/api/graphql/reference/index.md +++ b/doc/api/graphql/reference/index.md @@ -16187,6 +16187,7 @@ State of a review of a GitLab merge request. | Value | Description | | ----- | ----------- | +| `ATTENTION_REQUIRED` | The merge request is attention_required. | | `REVIEWED` | The merge request is reviewed. | | `UNREVIEWED` | The merge request is unreviewed. | diff --git a/doc/ci/troubleshooting.md b/doc/ci/troubleshooting.md index 994e9294ff6..d3723e4b04d 100644 --- a/doc/ci/troubleshooting.md +++ b/doc/ci/troubleshooting.md @@ -246,6 +246,21 @@ If the merge train pipeline was canceled before the merge request was merged, wi - Add it to the train again. +### Project `group/project` not found or access denied + +This message is shown if configuration is added with [`include`](yaml/index.md#include) and one of the following: + +- The configuration refers to a project that can't be found. +- The user that is running the pipeline is unable to access any included projects. + +To resolve this, check that: + +- The path of the project is in the format `my-group/my-project` and does not include + any folders in the repository. +- The user running the pipeline is a [member of the projects](../user/project/members/index.md#add-users-to-a-project) + that contain the included files. Users must also have the [permission](../user/permissions.md#job-permissions) + to run CI/CD jobs in the same projects. + ## Pipeline warnings Pipeline configuration warnings are shown when you: diff --git a/doc/ci/yaml/index.md b/doc/ci/yaml/index.md index 6a46a792121..c9d3269f7bf 100644 --- a/doc/ci/yaml/index.md +++ b/doc/ci/yaml/index.md @@ -4741,15 +4741,6 @@ You can use [CI/CD variables](../variables/index.md) to configure how the runner You can also use variables to configure how many times a runner [attempts certain stages of job execution](../runners/configure_runners.md#job-stages-attempts). -## Processing Git pushes - -GitLab creates at most four branch and tag pipelines when -pushing multiple changes in a single `git push` invocation. - -This limitation does not affect any of the updated merge request pipelines. -All updated merge requests have a pipeline created when using -[pipelines for merge requests](../pipelines/merge_request_pipelines.md). - ## Deprecated keywords The following keywords are deprecated. diff --git a/doc/topics/git/getting_started.md b/doc/topics/git/getting_started.md index 7e04eae622f..7a836e5b659 100644 --- a/doc/topics/git/getting_started.md +++ b/doc/topics/git/getting_started.md @@ -21,6 +21,12 @@ comments: false git clone ``` +NOTE: +You can also clone GitLab projects with the +[GitLab Workflow VS Code extension](../../user/project/repository/vscode.md). +To learn more, read about the extension's +[`Git: Clone` command](https://marketplace.visualstudio.com/items?itemName=GitLab.gitlab-workflow#clone-gitlab-projects). + ## Central Repositories - To instantiate a central repository a `--bare` flag is required. diff --git a/doc/user/project/issues/sorting_issue_lists.md b/doc/user/project/issues/sorting_issue_lists.md index 8799845e99e..ebfc723280f 100644 --- a/doc/user/project/issues/sorting_issue_lists.md +++ b/doc/user/project/issues/sorting_issue_lists.md @@ -8,34 +8,59 @@ info: To determine the technical writer assigned to the Stage/Group associated w You can sort a list of issues several ways, including by: -- Blocking **(PREMIUM)** -- Created date -- Due date -- Label priority -- Last updated -- Milestone due date -- Popularity -- Priority -- Title ([introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/67234) in GitLab 14.3) -- Weight +- [Blocking issues](#sorting-by-blocking-issues) +- [Created date](#sorting-by-created-date) +- [Due date](#sorting-by-due-date) +- [Label priority](#sorting-by-label-priority) +- [Last updated](#sorting-by-last-updated) +- [Manual sorting](#manual-sorting) +- [Milestone due date](#sorting-by-milestone-due-date) +- [Popularity](#sorting-by-popularity) +- [Priority](#sorting-by-priority) +- [Title](#sorting-by-title) +- [Weight](#sorting-by-weight) The available sorting options can change based on the context of the list. -For sorting by issue priority, see [Label Priority](../labels.md#label-priority). -In group and project issue lists, it is also possible to order issues manually, -similar to [issue boards](../issue_board.md#ordering-issues-in-a-list). +## Sorting by blocking issues **(PREMIUM)** -## Sorting by popularity +> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/34247/) in GitLab 13.7. -When you select sorting by **Popularity**, the issue order changes to sort descending by the -number of upvotes ([awarded](../../award_emojis.md) "thumbs up" emoji) -on each issue. You can use this to identify issues that are in high demand. +When you sort by **Blocking**, the issue list changes to sort descending by the +number of issues each issue is blocking. + +## Sorting by created date + +When you sort by **Created date**, the issue list changes to sort descending by the issue +creation date. Issues created most recently are first. + +## Sorting by due date + +When you sort by **Due date**, the issue list changes to sort ascending by the issue +[due date](issue_data_and_actions.md#due-date). Issues with the earliest due date are first, +and issues without a due date are last. + +## Sorting by label priority + +When you sort by **Label priority**, the issue list changes to sort descending. +Issues with the highest priority label are first, then all other issues. + +Ties are broken arbitrarily. Only the highest prioritized label is checked, +and labels with a lower priority are ignored. +For more information, see [issue 14523](https://gitlab.com/gitlab-org/gitlab/-/issues/14523). + +To learn more about priority labels, read the [Labels](../labels.md#label-priority) documentation. + +## Sorting by last updated + +When you sort by **Last updated**, the issue list changes to sort by the time of a last +update. Issues changed the most recently are first. ## Manual sorting > [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/62178) in GitLab 12.2. -When you select **Manual** sorting, you can change +When you sort by **Manual** order, you can change the order by dragging and dropping the issues. The changed order persists, and everyone who visits the same list sees the updated issue order, with some exceptions. @@ -50,11 +75,45 @@ this ordering is maintained whenever they appear together in any list. This ordering also affects [issue boards](../issue_board.md#ordering-issues-in-a-list). Changing the order in an issue list changes the ordering in an issue board, -and vice versa. +and the other way around. -## Sorting by blocking issues **(PREMIUM)** +## Sorting by milestone due date -> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/34247/) in GitLab 13.7. +When you sort by **Milestone due date**, the issue list changes to sort ascending by the +assigned milestone due date. Issues with milestones with the earliest due date are first, +then issues with a milestone without a due date. -When you select to sort by **Blocking**, the issue list changes to sort descending by the -number of issues each issue is blocking. You can use this to determine the critical path for your backlog. +## Sorting by popularity + +When you sort by **Popularity**, the issue order changes to sort descending by the +number of upvotes ([awarded](../../award_emojis.md) a "thumbs up" emoji) +on each issue. You can use this to identify issues that are in high demand. + +## Sorting by priority + +When you sort by **Priority**, the issue order changes to sort in this order: + +1. Issues with milestones that have due dates, where the soonest assigned milestone is listed first. +1. Issues with milestones with no due dates. +1. Issues with a higher priority label. +1. Issues without a prioritized label. + +To learn more about priority, read the [Labels](../labels.md#label-priority) documentation. + +## Sorting by title + +> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/67234) in GitLab 14.3. + +When you sort by **Title**, the issue order changes to sort alphabetically by the issue +title in this order: + +- Emoji +- Special characters +- Numbers +- Letters: first Latin, then accented (for example, `ö`) + +## Sorting by weight + +When you sort by **Weight**, the issue list changes to sort ascending by the +[issue weight](issue_weight.md). +Issues with lowest weight are first, and issues without a weight are last. diff --git a/doc/user/project/merge_requests/reviews/index.md b/doc/user/project/merge_requests/reviews/index.md index e6f84f1c357..9b1ea833958 100644 --- a/doc/user/project/merge_requests/reviews/index.md +++ b/doc/user/project/merge_requests/reviews/index.md @@ -17,6 +17,10 @@ your merge request, and makes [code suggestions](suggestions.md) you can accept from the user interface. When your work is reviewed, your team members can choose to accept or reject it. +You can review merge requests from the GitLab interface. If you install the +[GitLab Workflow VS Code extension](../../repository/vscode.md), you can also +review merge requests in Visual Studio Code. + ## Review a merge request > - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/4213) in GitLab Premium 11.4. diff --git a/doc/user/project/repository/index.md b/doc/user/project/repository/index.md index 57cb491dd7e..bb1a55f6b2b 100644 --- a/doc/user/project/repository/index.md +++ b/doc/user/project/repository/index.md @@ -82,14 +82,19 @@ prompted to open XCode. > [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/220957) in GitLab 13.10. -All projects can be cloned into Visual Studio Code. To do that: +All projects can be cloned into Visual Studio Code from the GitLab user interface, but you +can also install the [GitLab Workflow VS Code extension](vscode.md) to clone from +Visual Studio Code: -1. From the GitLab UI, go to the project's overview page. -1. Click **Clone**. -1. Select **Clone with Visual Studio Code** under either HTTPS or SSH method. -1. Select a folder to clone the project into. +- From the GitLab interface: + 1. Go to the project's overview page. + 1. Select **Clone**. + 1. Under either the **HTTPS** or **SSH** method, select **Clone with Visual Studio Code**. + 1. Select a folder to clone the project into. -When VS Code has successfully cloned your project, it opens the folder. + After Visual Studio Code clones your project, it opens the folder. +- From Visual Studio Code, with the [extension](vscode.md) installed, use the + extension's [`Git: Clone` command](https://marketplace.visualstudio.com/items?itemName=GitLab.gitlab-workflow#clone-gitlab-projects). ## Download the code in a repository @@ -243,6 +248,10 @@ When you [rename a user](../../profile/index.md#change-your-username), - The redirects are available as long as the original path is not claimed by another group, user, or project. +## Related links + +- [GitLab Workflow VS Code extension](vscode.md) + ## Troubleshooting ### Repository Languages: excessive CPU use diff --git a/doc/user/snippets.md b/doc/user/snippets.md index 64da024f5ba..441c269da70 100644 --- a/doc/user/snippets.md +++ b/doc/user/snippets.md @@ -14,6 +14,9 @@ You can [comment on](#comment-on-snippets), [clone](#clone-snippets), and [syntax highlighting](#filenames), [embedding](#embed-snippets), [downloading](#download-snippets), and you can maintain your snippets with the [snippets API](../api/snippets.md). +You can create and manage your snippets through the GitLab user interface, or by +using the [GitLab Workflow VS Code extension](project/repository/vscode.md). + ![Example of snippet](img/snippet_intro_v13_11.png) GitLab provides two types of snippets: @@ -39,6 +42,8 @@ You can create snippets in multiple ways, depending on whether you want to creat - *For all other pages,* select the plus icon (**{plus-square-o}**) in the top navigation bar, then select **New snippet** from the dropdown menu. + - If you installed the [GitLab Workflow VS Code extension](project/repository/vscode.md), + use the [`Gitlab: Create snippet` command](https://marketplace.visualstudio.com/items?itemName=GitLab.gitlab-workflow#create-snippet). - **To create a project snippet**: Go to your project's page. Select the plus icon (**{plus-square-o}**), and then select **New snippet** from the **This project** section of the dropdown menu. diff --git a/lefthook.yml b/lefthook.yml index 55027f6bf59..c5c51b53fb3 100644 --- a/lefthook.yml +++ b/lefthook.yml @@ -33,6 +33,11 @@ pre-push: files: git diff --name-only --diff-filter=d $(git merge-base origin/master HEAD)..HEAD glob: '*.rb' run: REVEAL_RUBOCOP_TODO=0 bundle exec rubocop --parallel --force-exclusion {files} + graphql_docs: + tags: documentation + files: git diff --name-only --diff-filter=d $(git merge-base origin/master HEAD)..HEAD + glob: '{app/graphql/**/*.rb,ee/app/graphql/**/*.rb}' + run: bundle exec rake gitlab:graphql:check_docs vale: # Requires Vale: https://docs.gitlab.com/ee/development/documentation/#install-linters tags: documentation style files: git diff --name-only --diff-filter=d $(git merge-base origin/master HEAD)..HEAD diff --git a/lib/generators/post_deployment_migration/post_deployment_migration_generator.rb b/lib/generators/post_deployment_migration/post_deployment_migration_generator.rb index 66ee0e2440f..792c49a820d 100644 --- a/lib/generators/post_deployment_migration/post_deployment_migration_generator.rb +++ b/lib/generators/post_deployment_migration/post_deployment_migration_generator.rb @@ -1,17 +1,13 @@ # frozen_string_literal: true require 'rails/generators' +require 'rails/generators/active_record' +require 'rails/generators/active_record/migration/migration_generator' module PostDeploymentMigration - class PostDeploymentMigrationGenerator < Rails::Generators::NamedBase - def create_migration_file - timestamp = Time.now.utc.strftime('%Y%m%d%H%M%S') - - template "migration.rb", "db/post_migrate/#{timestamp}_#{file_name}.rb" - end - - def migration_class_name - file_name.camelize + class PostDeploymentMigrationGenerator < ActiveRecord::Generators::MigrationGenerator + def db_migrate_path + super.sub("migrate", "post_migrate") end end end diff --git a/lib/gitlab/background_migration/fix_first_mentioned_in_commit_at.rb b/lib/gitlab/background_migration/fix_first_mentioned_in_commit_at.rb index 9b278efaedd..8f785476aa0 100644 --- a/lib/gitlab/background_migration/fix_first_mentioned_in_commit_at.rb +++ b/lib/gitlab/background_migration/fix_first_mentioned_in_commit_at.rb @@ -14,7 +14,15 @@ module Gitlab self.table_name = 'issue_metrics' def self.from_2020 - where('EXTRACT(YEAR FROM first_mentioned_in_commit_at) > 2019') + where(first_mentioned_in_commit_at_condition) + end + + def self.first_mentioned_in_commit_at_condition + if columns_hash['first_mentioned_in_commit_at'].sql_type == 'timestamp without time zone' + 'EXTRACT(YEAR FROM first_mentioned_in_commit_at) > 2019' + else + "EXTRACT(YEAR FROM first_mentioned_in_commit_at at time zone 'UTC') > 2019" + end end end # rubocop: enable Style/Documentation diff --git a/lib/gitlab/content_security_policy/config_loader.rb b/lib/gitlab/content_security_policy/config_loader.rb index 1338b77630e..b31ac351f8d 100644 --- a/lib/gitlab/content_security_policy/config_loader.rb +++ b/lib/gitlab/content_security_policy/config_loader.rb @@ -33,10 +33,14 @@ module Gitlab # connect_src with 'self' includes https/wss variations of the origin, # however, safari hasn't covered this yet and we need to explicitly add # support for websocket origins until Safari catches up with the specs + if Rails.env.development? + allow_webpack_dev_server(directives) + allow_letter_opener(directives) + allow_customersdot(directives) if ENV['CUSTOMER_PORTAL_URL'].present? + end + allow_websocket_connections(directives) - allow_webpack_dev_server(directives) if Rails.env.development? allow_cdn(directives, Settings.gitlab.cdn_host) if Settings.gitlab.cdn_host.present? - allow_customersdot(directives) if Rails.env.development? && ENV['CUSTOMER_PORTAL_URL'].present? allow_sentry(directives) if Gitlab.config.sentry&.enabled && Gitlab.config.sentry&.clientside_dsn # The follow section contains workarounds to patch Safari's lack of support for CSP Level 3 @@ -127,10 +131,17 @@ module Gitlab append_to_directive(directives, 'connect_src', sentry_uri.to_s) end + def self.allow_letter_opener(directives) + append_to_directive(directives, 'frame_src', Gitlab::Utils.append_path(Gitlab.config.gitlab.url, '/rails/letter_opener/')) + end + # Using 'self' in the CSP introduces several CSP bypass opportunities # for this reason we list the URLs where GitLab frames itself instead def self.framed_gitlab_paths - ['/admin/sidekiq', '/-/speedscope/index.html'].map do |path| + # We need the version without trailing / for the sidekiq page itself + # and we also need the version with trailing / for "deeper" pages + # like /admin/sidekiq/busy + ['/admin/sidekiq', '/admin/sidekiq/', '/-/speedscope/index.html'].map do |path| Gitlab::Utils.append_path(Gitlab.config.gitlab.url, path) end end diff --git a/lib/gitlab/database/unidirectional_copy_trigger.rb b/lib/gitlab/database/unidirectional_copy_trigger.rb index 029c894a5ff..146b5cacd9e 100644 --- a/lib/gitlab/database/unidirectional_copy_trigger.rb +++ b/lib/gitlab/database/unidirectional_copy_trigger.rb @@ -3,7 +3,7 @@ module Gitlab module Database class UnidirectionalCopyTrigger - def self.on_table(table_name, connection: ActiveRecord::Base.connection) + def self.on_table(table_name, connection:) new(table_name, connection) end diff --git a/locale/gitlab.pot b/locale/gitlab.pot index f60697156a9..5135d009d75 100644 --- a/locale/gitlab.pot +++ b/locale/gitlab.pot @@ -25264,9 +25264,6 @@ msgstr "" msgid "Pipelines|There are currently no pipelines." msgstr "" -msgid "Pipelines|There was a problem with loading the pipeline data." -msgstr "" - msgid "Pipelines|There was an error fetching the pipelines. Try again in a few moments or contact your support team." msgstr "" @@ -36340,9 +36337,6 @@ msgstr "" msgid "Unable to fetch branches list, please close the form and try again" msgstr "" -msgid "Unable to fetch upstream and downstream pipelines." -msgstr "" - msgid "Unable to fetch vulnerable projects" msgstr "" diff --git a/qa/qa/specs/features/browser_ui/5_package/dependency_proxy_spec.rb b/qa/qa/specs/features/browser_ui/5_package/dependency_proxy_spec.rb index ea7f7cc1c05..ffdd17e5c7c 100644 --- a/qa/qa/specs/features/browser_ui/5_package/dependency_proxy_spec.rb +++ b/qa/qa/specs/features/browser_ui/5_package/dependency_proxy_spec.rb @@ -22,6 +22,7 @@ module QA let(:uri) { URI.parse(Runtime::Scenario.gitlab_address) } let(:gitlab_host_with_port) { "#{uri.host}:#{uri.port}" } let(:dependency_proxy_url) { "#{gitlab_host_with_port}/#{project.group.full_path}/dependency_proxy/containers" } + let(:image_sha) { 'alpine@sha256:c3d45491770c51da4ef58318e3714da686bc7165338b7ab5ac758e75c7455efb' } before do Flow::Login.sign_in @@ -63,15 +64,15 @@ module QA apk add --no-cache openssl true | openssl s_client -showcerts -connect gitlab.test:5050 > /usr/local/share/ca-certificates/gitlab.test.crt update-ca-certificates - dockerd-entrypoint.sh || exit + dockerd-entrypoint.sh || exit before_script: - apk add curl jq grep - docker login -u "$CI_DEPENDENCY_PROXY_USER" -p "$CI_DEPENDENCY_PROXY_PASSWORD" "$CI_DEPENDENCY_PROXY_SERVER" script: - - docker pull #{dependency_proxy_url}/alpine:latest + - docker pull #{dependency_proxy_url}/#{image_sha} - TOKEN=$(curl "https://auth.docker.io/token?service=registry.docker.io&scope=repository:ratelimitpreview/test:pull" | jq --raw-output .token) - 'curl --head --header "Authorization: Bearer $TOKEN" "https://registry-1.docker.io/v2/ratelimitpreview/test/manifests/latest" 2>&1' - - docker pull #{dependency_proxy_url}/alpine:latest + - docker pull #{dependency_proxy_url}/#{image_sha} - 'curl --head --header "Authorization: Bearer $TOKEN" "https://registry-1.docker.io/v2/ratelimitpreview/test/manifests/latest" 2>&1' tags: - "runner-for-#{project.name}" @@ -95,7 +96,7 @@ module QA Page::Group::Menu.perform(&:go_to_dependency_proxy) Page::Group::DependencyProxy.perform do |index| - expect(index).to have_blob_count("Contains 2 blobs of images") + expect(index).to have_blob_count("Contains 1 blobs of images") end end end diff --git a/spec/frontend/diffs/components/app_spec.js b/spec/frontend/diffs/components/app_spec.js index 9b63f84e617..d50ac0529d6 100644 --- a/spec/frontend/diffs/components/app_spec.js +++ b/spec/frontend/diffs/components/app_spec.js @@ -388,15 +388,24 @@ describe('diffs/components/app', () => { wrapper.vm.jumpToFile(+1); - expect(spy.mock.calls[spy.mock.calls.length - 1]).toEqual(['diffs/scrollToFile', '222.js']); + expect(spy.mock.calls[spy.mock.calls.length - 1]).toEqual([ + 'diffs/scrollToFile', + { path: '222.js' }, + ]); store.state.diffs.currentDiffFileId = '222'; wrapper.vm.jumpToFile(+1); - expect(spy.mock.calls[spy.mock.calls.length - 1]).toEqual(['diffs/scrollToFile', '333.js']); + expect(spy.mock.calls[spy.mock.calls.length - 1]).toEqual([ + 'diffs/scrollToFile', + { path: '333.js' }, + ]); store.state.diffs.currentDiffFileId = '333'; wrapper.vm.jumpToFile(-1); - expect(spy.mock.calls[spy.mock.calls.length - 1]).toEqual(['diffs/scrollToFile', '222.js']); + expect(spy.mock.calls[spy.mock.calls.length - 1]).toEqual([ + 'diffs/scrollToFile', + { path: '222.js' }, + ]); }); it('does not jump to previous file from the first one', async () => { diff --git a/spec/frontend/diffs/components/tree_list_spec.js b/spec/frontend/diffs/components/tree_list_spec.js index f316a9fdf01..31044b0818c 100644 --- a/spec/frontend/diffs/components/tree_list_spec.js +++ b/spec/frontend/diffs/components/tree_list_spec.js @@ -113,7 +113,9 @@ describe('Diffs tree list component', () => { wrapper.find('.file-row').trigger('click'); - expect(wrapper.vm.$store.dispatch).toHaveBeenCalledWith('diffs/scrollToFile', 'app/index.js'); + expect(wrapper.vm.$store.dispatch).toHaveBeenCalledWith('diffs/scrollToFile', { + path: 'app/index.js', + }); }); it('renders as file list when renderTreeList is false', () => { diff --git a/spec/frontend/diffs/store/actions_spec.js b/spec/frontend/diffs/store/actions_spec.js index 85734e05aeb..11ef3985abc 100644 --- a/spec/frontend/diffs/store/actions_spec.js +++ b/spec/frontend/diffs/store/actions_spec.js @@ -890,7 +890,7 @@ describe('DiffsStoreActions', () => { }, }; - scrollToFile({ state, commit, getters }, 'path'); + scrollToFile({ state, commit, getters }, { path: 'path' }); expect(document.location.hash).toBe('#test'); }); @@ -904,7 +904,7 @@ describe('DiffsStoreActions', () => { }, }; - scrollToFile({ state, commit, getters }, 'path'); + scrollToFile({ state, commit, getters }, { path: 'path' }); expect(commit).toHaveBeenCalledWith(types.VIEW_DIFF_FILE, 'test'); }); diff --git a/spec/frontend/lib/utils/common_utils_spec.js b/spec/frontend/lib/utils/common_utils_spec.js index f5a74ee7f09..94b1e1cbb15 100644 --- a/spec/frontend/lib/utils/common_utils_spec.js +++ b/spec/frontend/lib/utils/common_utils_spec.js @@ -279,6 +279,14 @@ describe('common_utils', () => { top: elementTopWithContext, }); }); + + it('passes through behaviour', () => { + commonUtils.scrollToElementWithContext(`#${id}`, { behavior: 'smooth' }); + expect(window.scrollTo).toHaveBeenCalledWith({ + behavior: 'smooth', + top: elementTopWithContext, + }); + }); }); }); diff --git a/spec/frontend/notes/mixins/discussion_navigation_spec.js b/spec/frontend/notes/mixins/discussion_navigation_spec.js index 6a6e47ffcc5..70a5ff5184a 100644 --- a/spec/frontend/notes/mixins/discussion_navigation_spec.js +++ b/spec/frontend/notes/mixins/discussion_navigation_spec.js @@ -1,4 +1,5 @@ import { shallowMount, createLocalVue } from '@vue/test-utils'; +import { nextTick } from 'vue'; import Vuex from 'vuex'; import { setHTMLFixture } from 'helpers/fixtures'; import createEventHub from '~/helpers/event_hub_factory'; @@ -7,12 +8,15 @@ import eventHub from '~/notes/event_hub'; import discussionNavigation from '~/notes/mixins/discussion_navigation'; import notesModule from '~/notes/stores/modules'; +let scrollToFile; const discussion = (id, index) => ({ id, resolvable: index % 2 === 0, active: true, notes: [{}], diff_discussion: true, + position: { new_line: 1, old_line: 1 }, + diff_file: { file_path: 'test.js' }, }); const createDiscussions = () => [...'abcde'].map(discussion); const createComponent = () => ({ @@ -45,6 +49,7 @@ describe('Discussion navigation mixin', () => { jest.spyOn(utils, 'scrollToElement'); expandDiscussion = jest.fn(); + scrollToFile = jest.fn(); const { actions, ...notesRest } = notesModule(); store = new Vuex.Store({ modules: { @@ -52,6 +57,10 @@ describe('Discussion navigation mixin', () => { ...notesRest, actions: { ...actions, expandDiscussion }, }, + diffs: { + namespaced: true, + actions: { scrollToFile }, + }, }, }); store.state.notes.discussions = createDiscussions(); @@ -136,6 +145,7 @@ describe('Discussion navigation mixin', () => { it('scrolls to element', () => { expect(utils.scrollToElement).toHaveBeenCalledWith( findDiscussion('div.discussion', expected), + { behavior: 'smooth' }, ); }); }); @@ -163,6 +173,7 @@ describe('Discussion navigation mixin', () => { expect(utils.scrollToElementWithContext).toHaveBeenCalledWith( findDiscussion('ul.notes', expected), + { behavior: 'smooth' }, ); }); }); @@ -203,10 +214,45 @@ describe('Discussion navigation mixin', () => { it('scrolls to discussion', () => { expect(utils.scrollToElement).toHaveBeenCalledWith( findDiscussion('div.discussion', expected), + { behavior: 'smooth' }, ); }); }); }); }); + + describe.each` + diffsVirtualScrolling + ${false} + ${true} + `('virtual scrolling feature is $diffsVirtualScrolling', ({ diffsVirtualScrolling }) => { + beforeEach(async () => { + window.gon = { features: { diffsVirtualScrolling } }; + + jest.spyOn(store, 'dispatch'); + + store.state.notes.currentDiscussionId = 'a'; + window.location.hash = 'test'; + wrapper.vm.jumpToNextDiscussion(); + + await nextTick(); + }); + + afterEach(() => { + window.gon = {}; + window.location.hash = ''; + }); + + it('resets location hash if diffsVirtualScrolling flag is true', () => { + expect(window.location.hash).toBe(diffsVirtualScrolling ? '' : '#test'); + }); + + it(`calls scrollToFile with setHash as ${diffsVirtualScrolling ? 'false' : 'true'}`, () => { + expect(store.dispatch).toHaveBeenCalledWith('diffs/scrollToFile', { + path: 'test.js', + setHash: !diffsVirtualScrolling, + }); + }); + }); }); }); diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/app_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/app_spec.js.snap index 1b556be5873..5af75868084 100644 --- a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/app_spec.js.snap +++ b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/app_spec.js.snap @@ -8,5 +8,62 @@ exports[`PackagesListApp renders 1`] = ` /> + +
+
+
+
+ +
+
+ +
+
+

+ There are no packages yet +

+ +

+ Learn how to + + publish and share your packages + + with GitLab. +

+ +
+ + + +
+
+
+
+
`; diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/app_spec.js b/spec/frontend/packages_and_registries/package_registry/components/list/app_spec.js index 3958cdf21bb..45be8121de7 100644 --- a/spec/frontend/packages_and_registries/package_registry/components/list/app_spec.js +++ b/spec/frontend/packages_and_registries/package_registry/components/list/app_spec.js @@ -2,22 +2,25 @@ import { GlEmptyState, GlSprintf, GlLink } from '@gitlab/ui'; import { createLocalVue } from '@vue/test-utils'; import VueApollo from 'vue-apollo'; +import { nextTick } from 'vue'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; import PackageListApp from '~/packages_and_registries/package_registry/components/list/app.vue'; import PackageTitle from '~/packages_and_registries/package_registry/components/list/package_title.vue'; import PackageSearch from '~/packages_and_registries/package_registry/components/list/package_search.vue'; +import OriginalPackageList from '~/packages_and_registries/package_registry/components/list/packages_list.vue'; import { PROJECT_RESOURCE_TYPE, GROUP_RESOURCE_TYPE, LIST_QUERY_DEBOUNCE_TIME, + GRAPHQL_PAGE_SIZE, } from '~/packages_and_registries/package_registry/constants'; import getPackagesQuery from '~/packages_and_registries/package_registry/graphql/queries/get_packages.query.graphql'; -import { packagesListQuery } from '../../mock_data'; +import { packagesListQuery, packageData, pagination } from '../../mock_data'; jest.mock('~/lib/utils/common_utils'); jest.mock('~/flash'); @@ -39,11 +42,19 @@ describe('PackagesListApp', () => { const PackageList = { name: 'package-list', template: '
', + props: OriginalPackageList.props, }; const GlLoadingIcon = { name: 'gl-loading-icon', template: '
loading
' }; + const searchPayload = { + sort: 'VERSION_DESC', + filters: { packageName: 'foo', packageType: 'CONAN' }, + }; + const findPackageTitle = () => wrapper.findComponent(PackageTitle); const findSearch = () => wrapper.findComponent(PackageSearch); + const findListComponent = () => wrapper.findComponent(PackageList); + const findEmptyState = () => wrapper.findComponent(GlEmptyState); const mountComponent = ({ resolver = jest.fn().mockResolvedValue(packagesListQuery()), @@ -105,25 +116,55 @@ describe('PackagesListApp', () => { const resolver = jest.fn().mockResolvedValue(packagesListQuery()); mountComponent({ resolver }); - const payload = { - sort: 'VERSION_DESC', - filters: { packageName: 'foo', packageType: 'CONAN' }, - }; - - findSearch().vm.$emit('update', payload); + findSearch().vm.$emit('update', searchPayload); await waitForDebouncedApollo(); jest.advanceTimersByTime(LIST_QUERY_DEBOUNCE_TIME); expect(resolver).toHaveBeenCalledWith( expect.objectContaining({ - groupSort: payload.sort, - ...payload.filters, + groupSort: searchPayload.sort, + ...searchPayload.filters, }), ); }); }); + describe('list component', () => { + let resolver; + + beforeEach(() => { + resolver = jest.fn().mockResolvedValue(packagesListQuery()); + mountComponent({ resolver }); + + return waitForDebouncedApollo(); + }); + + it('exists and has the right props', () => { + expect(findListComponent().props()).toMatchObject({ + list: expect.arrayContaining([expect.objectContaining({ id: packageData().id })]), + isLoading: false, + pageInfo: expect.objectContaining({ endCursor: pagination().endCursor }), + }); + }); + + it('when list emits next-page fetches the next set of records', () => { + findListComponent().vm.$emit('next-page'); + + expect(resolver).toHaveBeenCalledWith( + expect.objectContaining({ after: pagination().endCursor, first: GRAPHQL_PAGE_SIZE }), + ); + }); + + it('when list emits prev-page fetches the prev set of records', () => { + findListComponent().vm.$emit('prev-page'); + + expect(resolver).toHaveBeenCalledWith( + expect.objectContaining({ before: pagination().startCursor, last: GRAPHQL_PAGE_SIZE }), + ); + }); + }); + describe.each` type | sortType ${PROJECT_RESOURCE_TYPE} | ${'sort'} @@ -136,7 +177,7 @@ describe('PackagesListApp', () => { beforeEach(() => { provide = { ...defaultProvide, isGroupPage }; - resolver = jest.fn().mockResolvedValue(packagesListQuery(type)); + resolver = jest.fn().mockResolvedValue(packagesListQuery({ type })); mountComponent({ provide, resolver }); return waitForDebouncedApollo(); }); @@ -151,4 +192,40 @@ describe('PackagesListApp', () => { ); }); }); + + describe('empty state', () => { + beforeEach(() => { + const resolver = jest.fn().mockResolvedValue(packagesListQuery({ extend: { nodes: [] } })); + mountComponent({ resolver }); + + return waitForDebouncedApollo(); + }); + it('generate the correct empty list link', () => { + const link = findListComponent().findComponent(GlLink); + + expect(link.attributes('href')).toBe(defaultProvide.emptyListHelpUrl); + expect(link.text()).toBe('publish and share your packages'); + }); + + it('includes the right content on the default tab', () => { + expect(findEmptyState().text()).toContain(PackageListApp.i18n.emptyPageTitle); + }); + }); + + describe('filter without results', () => { + beforeEach(async () => { + mountComponent(); + + await waitForDebouncedApollo(); + + findSearch().vm.$emit('update', searchPayload); + + return nextTick(); + }); + + it('should show specific empty message', () => { + expect(findEmptyState().text()).toContain(PackageListApp.i18n.noResultsTitle); + expect(findEmptyState().text()).toContain(PackageListApp.i18n.widenFilters); + }); + }); }); diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js b/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js index b624e66482d..683bb58e979 100644 --- a/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js +++ b/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js @@ -1,93 +1,86 @@ -import { GlTable, GlPagination, GlModal } from '@gitlab/ui'; -import { mount, createLocalVue } from '@vue/test-utils'; -import { last } from 'lodash'; -import Vuex from 'vuex'; -import stubChildren from 'helpers/stub_children'; -import { packageList } from 'jest/packages/mock_data'; +import { GlKeysetPagination, GlModal, GlSprintf } from '@gitlab/ui'; +import { nextTick } from 'vue'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import PackagesListRow from '~/packages/shared/components/package_list_row.vue'; import PackagesListLoader from '~/packages/shared/components/packages_list_loader.vue'; -import { TrackingActions } from '~/packages/shared/constants'; -import * as SharedUtils from '~/packages/shared/utils'; +import { + DELETE_PACKAGE_TRACKING_ACTION, + REQUEST_DELETE_PACKAGE_TRACKING_ACTION, + CANCEL_DELETE_PACKAGE_TRACKING_ACTION, +} from '~/packages_and_registries/package_registry/constants'; import PackagesList from '~/packages_and_registries/package_registry/components/list/packages_list.vue'; import Tracking from '~/tracking'; - -const localVue = createLocalVue(); -localVue.use(Vuex); +import { packageData } from '../../mock_data'; describe('packages_list', () => { let wrapper; - let store; + + const firstPackage = packageData(); + const secondPackage = { + ...packageData(), + id: 'gid://gitlab/Packages::Package/112', + name: 'second-package', + }; + + const defaultProps = { + list: [firstPackage, secondPackage], + isLoading: false, + pageInfo: {}, + }; const EmptySlotStub = { name: 'empty-slot-stub', template: '
bar
' }; - - const findPackagesListLoader = () => wrapper.find(PackagesListLoader); - const findPackageListPagination = () => wrapper.find(GlPagination); - const findPackageListDeleteModal = () => wrapper.find(GlModal); - const findEmptySlot = () => wrapper.find(EmptySlotStub); - const findPackagesListRow = () => wrapper.find(PackagesListRow); - - const createStore = (isGroupPage, packages, isLoading) => { - const state = { - isLoading, - packages, - pagination: { - perPage: 1, - total: 1, - page: 1, - }, - config: { - isGroupPage, - }, - sorting: { - orderBy: 'version', - sort: 'desc', - }, - }; - store = new Vuex.Store({ - state, - getters: { - getList: () => packages, - }, - }); - store.dispatch = jest.fn(); + const GlModalStub = { + name: GlModal.name, + template: '
', + methods: { show: jest.fn() }, }; - const mountComponent = ({ - isGroupPage = false, - packages = packageList, - isLoading = false, - ...options - } = {}) => { - createStore(isGroupPage, packages, isLoading); + const findPackagesListLoader = () => wrapper.findComponent(PackagesListLoader); + const findPackageListPagination = () => wrapper.findComponent(GlKeysetPagination); + const findPackageListDeleteModal = () => wrapper.findComponent(GlModalStub); + const findEmptySlot = () => wrapper.findComponent(EmptySlotStub); + const findPackagesListRow = () => wrapper.findComponent(PackagesListRow); - wrapper = mount(PackagesList, { - localVue, - store, + const mountComponent = (props) => { + wrapper = shallowMountExtended(PackagesList, { + propsData: { + ...defaultProps, + ...props, + }, stubs: { - ...stubChildren(PackagesList), - GlTable, - GlModal, + GlModal: GlModalStub, + GlSprintf, + }, + slots: { + 'empty-state': EmptySlotStub, }, - ...options, }); }; + beforeEach(() => { + GlModalStub.methods.show.mockReset(); + }); + afterEach(() => { wrapper.destroy(); - wrapper = null; }); describe('when is loading', () => { beforeEach(() => { - mountComponent({ - packages: [], - isLoading: true, - }); + mountComponent({ isLoading: true }); }); - it('shows skeleton loader when loading', () => { + it('shows skeleton loader', () => { expect(findPackagesListLoader().exists()).toBe(true); }); + + it('does not show the rows', () => { + expect(findPackagesListRow().exists()).toBe(false); + }); + + it('does not show the pagination', () => { + expect(findPackageListPagination().exists()).toBe(false); + }); }); describe('when is not loading', () => { @@ -95,74 +88,68 @@ describe('packages_list', () => { mountComponent(); }); - it('does not show skeleton loader when not loading', () => { + it('does not show skeleton loader', () => { expect(findPackagesListLoader().exists()).toBe(false); }); + + it('shows the rows', () => { + expect(findPackagesListRow().exists()).toBe(true); + }); }); describe('layout', () => { - beforeEach(() => { - mountComponent(); - }); - it('contains a pagination component', () => { - const sorting = findPackageListPagination(); - expect(sorting.exists()).toBe(true); + mountComponent({ pageInfo: { hasPreviousPage: true } }); + + expect(findPackageListPagination().exists()).toBe(true); }); it('contains a modal component', () => { - const sorting = findPackageListDeleteModal(); - expect(sorting.exists()).toBe(true); + mountComponent(); + + expect(findPackageListDeleteModal().exists()).toBe(true); }); }); describe('when the user can destroy the package', () => { beforeEach(() => { mountComponent(); + findPackagesListRow().vm.$emit('packageToDelete', firstPackage); + return nextTick(); }); - it('setItemToBeDeleted sets itemToBeDeleted and open the modal', () => { - const mockModalShow = jest.spyOn(wrapper.vm.$refs.packageListDeleteModal, 'show'); - const item = last(wrapper.vm.list); - - findPackagesListRow().vm.$emit('packageToDelete', item); - - return wrapper.vm.$nextTick().then(() => { - expect(wrapper.vm.itemToBeDeleted).toEqual(item); - expect(mockModalShow).toHaveBeenCalled(); - }); + it('deleting a package opens the modal', () => { + expect(findPackageListDeleteModal().text()).toContain(firstPackage.name); }); - it('deleteItemConfirmation resets itemToBeDeleted', () => { - wrapper.setData({ itemToBeDeleted: 1 }); - wrapper.vm.deleteItemConfirmation(); - expect(wrapper.vm.itemToBeDeleted).toEqual(null); + it('confirming delete empties itemsToBeDeleted', async () => { + findPackageListDeleteModal().vm.$emit('ok'); + + await nextTick(); + + expect(findPackageListDeleteModal().text()).not.toContain(firstPackage.name); }); - it('deleteItemConfirmation emit package:delete', () => { - const itemToBeDeleted = { id: 2 }; - wrapper.setData({ itemToBeDeleted }); - wrapper.vm.deleteItemConfirmation(); - return wrapper.vm.$nextTick(() => { - expect(wrapper.emitted('package:delete')[0]).toEqual([itemToBeDeleted]); - }); + it('confirming on the modal emits package:delete', async () => { + findPackageListDeleteModal().vm.$emit('ok'); + + await nextTick(); + + expect(wrapper.emitted('package:delete')[0]).toEqual([firstPackage]); }); - it('deleteItemCanceled resets itemToBeDeleted', () => { - wrapper.setData({ itemToBeDeleted: 1 }); - wrapper.vm.deleteItemCanceled(); - expect(wrapper.vm.itemToBeDeleted).toEqual(null); + it('cancel event resets itemToBeDeleted', async () => { + findPackageListDeleteModal().vm.$emit('cancel'); + + await nextTick(); + + expect(findPackageListDeleteModal().text()).not.toContain(firstPackage.name); }); }); describe('when the list is empty', () => { beforeEach(() => { - mountComponent({ - packages: [], - slots: { - 'empty-state': EmptySlotStub, - }, - }); + mountComponent({ list: [] }); }); it('show the empty slot', () => { @@ -171,45 +158,59 @@ describe('packages_list', () => { }); }); - describe('pagination component', () => { - let pagination; - let modelEvent; - + describe('pagination ', () => { beforeEach(() => { - mountComponent(); - pagination = findPackageListPagination(); - // retrieve the event used by v-model, a more sturdy approach than hardcoding it - modelEvent = pagination.vm.$options.model.event; + mountComponent({ pageInfo: { hasPreviousPage: true } }); }); - it('emits page:changed events when the page changes', () => { - pagination.vm.$emit(modelEvent, 2); - expect(wrapper.emitted('page:changed')).toEqual([[2]]); + it('emits prev-page events when the prev event is fired', () => { + findPackageListPagination().vm.$emit('prev'); + + expect(wrapper.emitted('prev-page')).toEqual([[]]); + }); + + it('emits next-page events when the next event is fired', () => { + findPackageListPagination().vm.$emit('next'); + + expect(wrapper.emitted('next-page')).toEqual([[]]); }); }); describe('tracking', () => { let eventSpy; - let utilSpy; - const category = 'foo'; + const category = 'UI::NpmPackages'; beforeEach(() => { - mountComponent(); eventSpy = jest.spyOn(Tracking, 'event'); - utilSpy = jest.spyOn(SharedUtils, 'packageTypeToTrackCategory').mockReturnValue(category); - wrapper.setData({ itemToBeDeleted: { package_type: 'conan' } }); + mountComponent(); + findPackagesListRow().vm.$emit('packageToDelete', firstPackage); + return nextTick(); }); - it('tracking category calls packageTypeToTrackCategory', () => { - expect(wrapper.vm.tracking.category).toBe(category); - expect(utilSpy).toHaveBeenCalledWith('conan'); - }); - - it('deleteItemConfirmation calls event', () => { - wrapper.vm.deleteItemConfirmation(); + it('requesting the delete tracks the right action', () => { expect(eventSpy).toHaveBeenCalledWith( category, - TrackingActions.DELETE_PACKAGE, + REQUEST_DELETE_PACKAGE_TRACKING_ACTION, + expect.any(Object), + ); + }); + + it('confirming delete tracks the right action', () => { + findPackageListDeleteModal().vm.$emit('ok'); + + expect(eventSpy).toHaveBeenCalledWith( + category, + DELETE_PACKAGE_TRACKING_ACTION, + expect.any(Object), + ); + }); + + it('canceling delete tracks the right action', () => { + findPackageListDeleteModal().vm.$emit('cancel'); + + expect(eventSpy).toHaveBeenCalledWith( + category, + CANCEL_DELETE_PACKAGE_TRACKING_ACTION, expect.any(Object), ); }); diff --git a/spec/frontend/packages_and_registries/package_registry/mock_data.js b/spec/frontend/packages_and_registries/package_registry/mock_data.js index 70fc096fa44..bacc748db81 100644 --- a/spec/frontend/packages_and_registries/package_registry/mock_data.js +++ b/spec/frontend/packages_and_registries/package_registry/mock_data.js @@ -1,3 +1,5 @@ +import capitalize from 'lodash/capitalize'; + export const packageTags = () => [ { id: 'gid://gitlab/Packages::Tag/87', name: 'bananas_9', __typename: 'PackageTag' }, { id: 'gid://gitlab/Packages::Tag/86', name: 'bananas_8', __typename: 'PackageTag' }, @@ -156,6 +158,15 @@ export const nugetMetadata = () => ({ projectUrl: 'projectUrl', }); +export const pagination = (extend) => ({ + endCursor: 'eyJpZCI6IjIwNSIsIm5hbWUiOiJteS9jb21wYW55L2FwcC9teS1hcHAifQ', + hasNextPage: true, + hasPreviousPage: true, + startCursor: 'eyJpZCI6IjI0NyIsIm5hbWUiOiJ2ZXJzaW9uX3Rlc3QxIn0', + __typename: 'PageInfo', + ...extend, +}); + export const packageDetailsQuery = (extendPackage) => ({ data: { package: { @@ -256,7 +267,7 @@ export const packageDestroyFileMutationError = () => ({ ], }); -export const packagesListQuery = (type = 'group') => ({ +export const packagesListQuery = ({ type = 'group', extend = {}, extendPagination = {} } = {}) => ({ data: { [type]: { packages: { @@ -277,9 +288,11 @@ export const packagesListQuery = (type = 'group') => ({ pipelines: { nodes: [] }, }, ], + pageInfo: pagination(extendPagination), __typename: 'PackageConnection', }, - __typename: 'Group', + ...extend, + __typename: capitalize(type), }, }, }); diff --git a/spec/frontend/pipeline_editor/components/header/pipline_editor_mini_graph_spec.js b/spec/frontend/pipeline_editor/components/header/pipline_editor_mini_graph_spec.js index a09c3d0e315..3d7c3c839da 100644 --- a/spec/frontend/pipeline_editor/components/header/pipline_editor_mini_graph_spec.js +++ b/spec/frontend/pipeline_editor/components/header/pipline_editor_mini_graph_spec.js @@ -1,58 +1,22 @@ -import { shallowMount, createLocalVue } from '@vue/test-utils'; -import VueApollo from 'vue-apollo'; -import waitForPromises from 'helpers/wait_for_promises'; -import createMockApollo from 'helpers/mock_apollo_helper'; +import { shallowMount } from '@vue/test-utils'; import PipelineEditorMiniGraph from '~/pipeline_editor/components/header/pipeline_editor_mini_graph.vue'; import PipelineMiniGraph from '~/pipelines/components/pipelines_list/pipeline_mini_graph.vue'; -import getLinkedPipelinesQuery from '~/projects/commit_box/info/graphql/queries/get_linked_pipelines.query.graphql'; -import { PIPELINE_FAILURE } from '~/pipeline_editor/constants'; -import { mockLinkedPipelines, mockProjectFullPath, mockProjectPipeline } from '../../mock_data'; - -const localVue = createLocalVue(); -localVue.use(VueApollo); +import { mockProjectPipeline } from '../../mock_data'; describe('Pipeline Status', () => { let wrapper; - let mockApollo; - let mockLinkedPipelinesQuery; - const createComponent = ({ hasStages = true, options } = {}) => { + const createComponent = ({ hasStages = true } = {}) => { wrapper = shallowMount(PipelineEditorMiniGraph, { - provide: { - dataMethod: 'graphql', - projectFullPath: mockProjectFullPath, - }, propsData: { pipeline: mockProjectPipeline({ hasStages }).pipeline, }, - ...options, - }); - }; - - const createComponentWithApollo = (hasStages = true) => { - const handlers = [[getLinkedPipelinesQuery, mockLinkedPipelinesQuery]]; - mockApollo = createMockApollo(handlers); - - createComponent({ - hasStages, - options: { - localVue, - apolloProvider: mockApollo, - }, }); }; const findPipelineMiniGraph = () => wrapper.findComponent(PipelineMiniGraph); - const findUpstream = () => wrapper.find('[data-testid="pipeline-editor-mini-graph-upstream"]'); - const findDownstream = () => - wrapper.find('[data-testid="pipeline-editor-mini-graph-downstream"]'); - - beforeEach(() => { - mockLinkedPipelinesQuery = jest.fn(); - }); afterEach(() => { - mockLinkedPipelinesQuery.mockReset(); wrapper.destroy(); }); @@ -75,60 +39,4 @@ describe('Pipeline Status', () => { expect(findPipelineMiniGraph().exists()).toBe(false); }); }); - - describe('when querying upstream and downstream pipelines', () => { - describe('when query succeeds', () => { - beforeEach(() => { - mockLinkedPipelinesQuery.mockResolvedValue(mockLinkedPipelines()); - createComponentWithApollo(); - }); - - it('should call the query with the correct variables', () => { - expect(mockLinkedPipelinesQuery).toHaveBeenCalledTimes(1); - expect(mockLinkedPipelinesQuery).toHaveBeenCalledWith({ - fullPath: mockProjectFullPath, - iid: mockProjectPipeline().pipeline.iid, - }); - }); - - describe('linked pipeline rendering based on given data', () => { - it.each` - hasDownstream | hasUpstream | downstreamRenderAction | upstreamRenderAction - ${true} | ${true} | ${'renders'} | ${'renders'} - ${true} | ${false} | ${'renders'} | ${'hides'} - ${false} | ${true} | ${'hides'} | ${'renders'} - ${false} | ${false} | ${'hides'} | ${'hides'} - `( - '$downstreamRenderAction downstream and $upstreamRenderAction upstream', - async ({ hasDownstream, hasUpstream }) => { - mockLinkedPipelinesQuery.mockResolvedValue( - mockLinkedPipelines({ hasDownstream, hasUpstream }), - ); - createComponentWithApollo(); - await waitForPromises(); - - expect(findUpstream().exists()).toBe(hasUpstream); - expect(findDownstream().exists()).toBe(hasDownstream); - }, - ); - }); - }); - - describe('when query fails', () => { - beforeEach(() => { - mockLinkedPipelinesQuery.mockRejectedValue(new Error()); - createComponentWithApollo(); - }); - - it('should emit an error event when query fails', async () => { - expect(wrapper.emitted('showError')).toHaveLength(1); - expect(wrapper.emitted('showError')[0]).toEqual([ - { - type: PIPELINE_FAILURE, - reasons: [wrapper.vm.$options.i18n.linkedPipelinesFetchError], - }, - ]); - }); - }); - }); }); diff --git a/spec/frontend/pipeline_editor/components/ui/pipeline_editor_messages_spec.js b/spec/frontend/pipeline_editor/components/ui/pipeline_editor_messages_spec.js index a55176ccd79..9f910ed4f9c 100644 --- a/spec/frontend/pipeline_editor/components/ui/pipeline_editor_messages_spec.js +++ b/spec/frontend/pipeline_editor/components/ui/pipeline_editor_messages_spec.js @@ -11,7 +11,6 @@ import { DEFAULT_FAILURE, DEFAULT_SUCCESS, LOAD_FAILURE_UNKNOWN, - PIPELINE_FAILURE, } from '~/pipeline_editor/constants'; beforeEach(() => { @@ -66,7 +65,6 @@ describe('Pipeline Editor messages', () => { failureType | message | expectedFailureType ${COMMIT_FAILURE} | ${'failed commit'} | ${COMMIT_FAILURE} ${LOAD_FAILURE_UNKNOWN} | ${'loading failure'} | ${LOAD_FAILURE_UNKNOWN} - ${PIPELINE_FAILURE} | ${'pipeline failure'} | ${PIPELINE_FAILURE} ${'random'} | ${'error without a specified type'} | ${DEFAULT_FAILURE} `('shows a message for $message', ({ failureType, expectedFailureType }) => { createComponent({ failureType, showFailure: true }); diff --git a/spec/frontend/pipeline_editor/mock_data.js b/spec/frontend/pipeline_editor/mock_data.js index 9ad604a63ee..0b0ff14486e 100644 --- a/spec/frontend/pipeline_editor/mock_data.js +++ b/spec/frontend/pipeline_editor/mock_data.js @@ -290,62 +290,6 @@ export const mockProjectPipeline = ({ hasStages = true } = {}) => { }; }; -export const mockLinkedPipelines = ({ hasDownstream = true, hasUpstream = true } = {}) => { - let upstream = null; - let downstream = { - nodes: [], - __typename: 'PipelineConnection', - }; - - if (hasDownstream) { - downstream = { - nodes: [ - { - id: 'gid://gitlab/Ci::Pipeline/612', - path: '/root/job-log-sections/-/pipelines/612', - project: { name: 'job-log-sections', __typename: 'Project' }, - detailedStatus: { - group: 'success', - icon: 'status_success', - label: 'passed', - __typename: 'DetailedStatus', - }, - __typename: 'Pipeline', - }, - ], - __typename: 'PipelineConnection', - }; - } - - if (hasUpstream) { - upstream = { - id: 'gid://gitlab/Ci::Pipeline/610', - path: '/root/trigger-downstream/-/pipelines/610', - project: { name: 'trigger-downstream', __typename: 'Project' }, - detailedStatus: { - group: 'success', - icon: 'status_success', - label: 'passed', - __typename: 'DetailedStatus', - }, - __typename: 'Pipeline', - }; - } - - return { - data: { - project: { - pipeline: { - path: '/root/ci-project/-/pipelines/790', - downstream, - upstream, - }, - __typename: 'Project', - }, - }, - }; -}; - export const mockLintResponse = { valid: true, mergedYaml: mockCiYml, diff --git a/spec/graphql/types/merge_request_review_state_enum_spec.rb b/spec/graphql/types/merge_request_review_state_enum_spec.rb index 486e1c4f502..8333f303bec 100644 --- a/spec/graphql/types/merge_request_review_state_enum_spec.rb +++ b/spec/graphql/types/merge_request_review_state_enum_spec.rb @@ -12,6 +12,10 @@ RSpec.describe GitlabSchema.types['MergeRequestReviewState'] do 'UNREVIEWED' => have_attributes( description: 'The merge request is unreviewed.', value: 'unreviewed' + ), + 'ATTENTION_REQUIRED' => have_attributes( + description: 'The merge request is attention_required.', + value: 'attention_required' ) ) end diff --git a/spec/lib/gitlab/background_migration/fix_first_mentioned_in_commit_at_spec.rb b/spec/lib/gitlab/background_migration/fix_first_mentioned_in_commit_at_spec.rb index d2bfa86f0d1..7f15aceca42 100644 --- a/spec/lib/gitlab/background_migration/fix_first_mentioned_in_commit_at_spec.rb +++ b/spec/lib/gitlab/background_migration/fix_first_mentioned_in_commit_at_spec.rb @@ -1,6 +1,7 @@ # frozen_string_literal: true require 'spec_helper' +require Rails.root.join('db', 'post_migrate', '20211004110500_add_temporary_index_to_issue_metrics.rb') RSpec.describe Gitlab::BackgroundMigration::FixFirstMentionedInCommitAt, :migration, schema: 20211004110500 do let(:namespaces) { table(:namespaces) } @@ -99,42 +100,67 @@ RSpec.describe Gitlab::BackgroundMigration::FixFirstMentionedInCommitAt, :migrat .perform(issue_metrics.minimum(:issue_id), issue_metrics.maximum(:issue_id)) end - it "marks successful slices as completed" do - min_issue_id = issue_metrics.minimum(:issue_id) - max_issue_id = issue_metrics.maximum(:issue_id) + shared_examples 'fixes first_mentioned_in_commit_at' do + it "marks successful slices as completed" do + min_issue_id = issue_metrics.minimum(:issue_id) + max_issue_id = issue_metrics.maximum(:issue_id) - expect(subject).to receive(:mark_job_as_succeeded).with(min_issue_id, max_issue_id) + expect(subject).to receive(:mark_job_as_succeeded).with(min_issue_id, max_issue_id) - subject.perform(min_issue_id, max_issue_id) - end + subject.perform(min_issue_id, max_issue_id) + end - context 'when the persisted first_mentioned_in_commit_at is later than the first commit authored_date' do - it 'updates the issue_metrics record' do - record1 = issue_metrics.create!(issue_id: issue1.id, first_mentioned_in_commit_at: Time.current) - record2 = issue_metrics.create!(issue_id: issue2.id, first_mentioned_in_commit_at: Time.current) + context 'when the persisted first_mentioned_in_commit_at is later than the first commit authored_date' do + it 'updates the issue_metrics record' do + record1 = issue_metrics.create!(issue_id: issue1.id, first_mentioned_in_commit_at: Time.current) + record2 = issue_metrics.create!(issue_id: issue2.id, first_mentioned_in_commit_at: Time.current) - run_migration - record1.reload - record2.reload + run_migration + record1.reload + record2.reload - expect(record1.first_mentioned_in_commit_at).to be_within(2.seconds).of(commit2.authored_date) - expect(record2.first_mentioned_in_commit_at).to be_within(2.seconds).of(commit3.authored_date) + expect(record1.first_mentioned_in_commit_at).to be_within(2.seconds).of(commit2.authored_date) + expect(record2.first_mentioned_in_commit_at).to be_within(2.seconds).of(commit3.authored_date) + end + end + + context 'when the persisted first_mentioned_in_commit_at is earlier than the first commit authored_date' do + it 'does not update the issue_metrics record' do + record = issue_metrics.create!(issue_id: issue1.id, first_mentioned_in_commit_at: 20.days.ago) + + expect { run_migration }.not_to change { record.reload.first_mentioned_in_commit_at } + end + end + + context 'when the first_mentioned_in_commit_at is null' do + it 'does nothing' do + record = issue_metrics.create!(issue_id: issue1.id, first_mentioned_in_commit_at: nil) + + expect { run_migration }.not_to change { record.reload.first_mentioned_in_commit_at } + end end end - context 'when the persisted first_mentioned_in_commit_at is earlier than the first commit authored_date' do - it 'does not update the issue_metrics record' do - record = issue_metrics.create!(issue_id: issue1.id, first_mentioned_in_commit_at: 20.days.ago) - - expect { run_migration }.not_to change { record.reload.first_mentioned_in_commit_at } - end + describe 'running the migration when first_mentioned_in_commit_at is timestamp without time zone' do + it_behaves_like 'fixes first_mentioned_in_commit_at' end - context 'when the first_mentioned_in_commit_at is null' do - it 'does nothing' do - record = issue_metrics.create!(issue_id: issue1.id, first_mentioned_in_commit_at: nil) + describe 'running the migration when first_mentioned_in_commit_at is timestamp with time zone' do + around do |example| + AddTemporaryIndexToIssueMetrics.new.down - expect { run_migration }.not_to change { record.reload.first_mentioned_in_commit_at } + ActiveRecord::Base.connection.execute "ALTER TABLE issue_metrics ALTER first_mentioned_in_commit_at type timestamp with time zone" + Gitlab::BackgroundMigration::FixFirstMentionedInCommitAt::TmpIssueMetrics.reset_column_information + AddTemporaryIndexToIssueMetrics.new.up + + example.run + + AddTemporaryIndexToIssueMetrics.new.down + ActiveRecord::Base.connection.execute "ALTER TABLE issue_metrics ALTER first_mentioned_in_commit_at type timestamp without time zone" + Gitlab::BackgroundMigration::FixFirstMentionedInCommitAt::TmpIssueMetrics.reset_column_information + AddTemporaryIndexToIssueMetrics.new.up end + + it_behaves_like 'fixes first_mentioned_in_commit_at' end end diff --git a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb index 41095ba5f09..6ed7a99aaa2 100644 --- a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb +++ b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb @@ -99,8 +99,10 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do end context 'when CUSTOMER_PORTAL_URL is set' do + let(:customer_portal_url) { 'https://customers.example.com' } + before do - stub_env('CUSTOMER_PORTAL_URL', 'https://customers.example.com') + stub_env('CUSTOMER_PORTAL_URL', customer_portal_url) end context 'when in production' do @@ -109,7 +111,7 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do end it 'does not add CUSTOMER_PORTAL_URL to CSP' do - expect(directives['frame_src']).to eq("http://localhost/admin/sidekiq http://localhost/-/speedscope/index.html https://www.google.com/recaptcha/ https://www.recaptcha.net/ https://content.googleapis.com https://content-compute.googleapis.com https://content-cloudbilling.googleapis.com https://content-cloudresourcemanager.googleapis.com") + expect(directives['frame_src']).to eq("http://localhost/admin/sidekiq http://localhost/admin/sidekiq/ http://localhost/-/speedscope/index.html https://www.google.com/recaptcha/ https://www.recaptcha.net/ https://content.googleapis.com https://content-compute.googleapis.com https://content-cloudbilling.googleapis.com https://content-cloudresourcemanager.googleapis.com") end end @@ -119,7 +121,36 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do end it 'adds CUSTOMER_PORTAL_URL to CSP' do - expect(directives['frame_src']).to eq("http://localhost/admin/sidekiq http://localhost/-/speedscope/index.html https://www.google.com/recaptcha/ https://www.recaptcha.net/ https://content.googleapis.com https://content-compute.googleapis.com https://content-cloudbilling.googleapis.com https://content-cloudresourcemanager.googleapis.com https://customers.example.com") + expect(directives['frame_src']).to eq("http://localhost/admin/sidekiq http://localhost/admin/sidekiq/ http://localhost/-/speedscope/index.html https://www.google.com/recaptcha/ https://www.recaptcha.net/ https://content.googleapis.com https://content-compute.googleapis.com https://content-cloudbilling.googleapis.com https://content-cloudresourcemanager.googleapis.com http://localhost/rails/letter_opener/ https://customers.example.com") + end + end + end + + context 'letter_opener applicaiton URL' do + let(:gitlab_url) { 'http://gitlab.example.com' } + let(:letter_opener_url) { "#{gitlab_url}/rails/letter_opener/" } + + before do + stub_config_setting(url: gitlab_url) + end + + context 'when in production' do + before do + allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('production')) + end + + it 'does not add letter_opener to CSP' do + expect(directives['frame_src']).not_to include(letter_opener_url) + end + end + + context 'when in development' do + before do + allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('development')) + end + + it 'adds letter_opener to CSP' do + expect(directives['frame_src']).to include(letter_opener_url) end end end @@ -129,22 +160,22 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do context 'generates URLs to be added to child-src' do it 'with insecure domain' do stub_config_setting(url: 'http://example.com') - expect(described_class.framed_gitlab_paths).to eq(%w[http://example.com/admin/sidekiq http://example.com/-/speedscope/index.html]) + expect(described_class.framed_gitlab_paths).to eq(%w[http://example.com/admin/sidekiq http://example.com/admin/sidekiq/ http://example.com/-/speedscope/index.html]) end it 'with secure domain' do stub_config_setting(url: 'https://example.com') - expect(described_class.framed_gitlab_paths).to eq(%w[https://example.com/admin/sidekiq https://example.com/-/speedscope/index.html]) + expect(described_class.framed_gitlab_paths).to eq(%w[https://example.com/admin/sidekiq https://example.com/admin/sidekiq/ https://example.com/-/speedscope/index.html]) end it 'with custom port' do stub_config_setting(url: 'http://example.com:1234') - expect(described_class.framed_gitlab_paths).to eq(%w[http://example.com:1234/admin/sidekiq http://example.com:1234/-/speedscope/index.html]) + expect(described_class.framed_gitlab_paths).to eq(%w[http://example.com:1234/admin/sidekiq http://example.com:1234/admin/sidekiq/ http://example.com:1234/-/speedscope/index.html]) end it 'with custom port and secure domain' do stub_config_setting(url: 'https://example.com:1234') - expect(described_class.framed_gitlab_paths).to eq(%w[https://example.com:1234/admin/sidekiq https://example.com:1234/-/speedscope/index.html]) + expect(described_class.framed_gitlab_paths).to eq(%w[https://example.com:1234/admin/sidekiq https://example.com:1234/admin/sidekiq/ https://example.com:1234/-/speedscope/index.html]) end end end diff --git a/spec/lib/gitlab/database/unidirectional_copy_trigger_spec.rb b/spec/lib/gitlab/database/unidirectional_copy_trigger_spec.rb index 2955c208f16..bbddb5f1af5 100644 --- a/spec/lib/gitlab/database/unidirectional_copy_trigger_spec.rb +++ b/spec/lib/gitlab/database/unidirectional_copy_trigger_spec.rb @@ -7,7 +7,7 @@ RSpec.describe Gitlab::Database::UnidirectionalCopyTrigger do let(:table_name) { '_test_table' } let(:connection) { ActiveRecord::Base.connection } - let(:copy_trigger) { described_class.on_table(table_name) } + let(:copy_trigger) { described_class.on_table(table_name, connection: connection) } describe '#name' do context 'when a single column name is given' do diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb index e5ab00e1013..975d496a4be 100644 --- a/spec/models/ci/build_spec.rb +++ b/spec/models/ci/build_spec.rb @@ -1290,7 +1290,7 @@ RSpec.describe Ci::Build do end end - shared_examples_for 'state transition as a deployable' do + describe 'state transition as a deployable' do subject { build.send(event) } let!(:build) { create(:ci_build, :with_deployment, :start_review_app, project: project, pipeline: pipeline) } @@ -1332,6 +1332,22 @@ RSpec.describe Ci::Build do expect(deployment).to be_running end + + context 'when deployment is already running state' do + before do + build.deployment.success! + end + + it 'does not change deployment status and tracks an error' do + expect(Gitlab::ErrorTracking) + .to receive(:track_exception).with( + instance_of(Deployment::StatusSyncError), deployment_id: deployment.id, build_id: build.id) + + with_cross_database_modification_prevented do + expect { subject }.not_to change { deployment.reload.status } + end + end + end end context 'when transits to success' do @@ -1399,36 +1415,6 @@ RSpec.describe Ci::Build do end end - it_behaves_like 'state transition as a deployable' do - context 'when transits to running' do - let(:event) { :run! } - - context 'when deployment is already running state' do - before do - build.deployment.success! - end - - it 'does not change deployment status and tracks an error' do - expect(Gitlab::ErrorTracking) - .to receive(:track_exception).with( - instance_of(Deployment::StatusSyncError), deployment_id: deployment.id, build_id: build.id) - - with_cross_database_modification_prevented do - expect { subject }.not_to change { deployment.reload.status } - end - end - end - end - end - - context 'when update_deployment_after_transaction_commit feature flag is disabled' do - before do - stub_feature_flags(update_deployment_after_transaction_commit: false) - end - - it_behaves_like 'state transition as a deployable' - end - describe '#on_stop' do subject { build.on_stop } diff --git a/spec/models/merge_request_assignee_spec.rb b/spec/models/merge_request_assignee_spec.rb index d287392bf7f..650bef888cf 100644 --- a/spec/models/merge_request_assignee_spec.rb +++ b/spec/models/merge_request_assignee_spec.rb @@ -37,4 +37,6 @@ RSpec.describe MergeRequestAssignee do end end end + + it_behaves_like 'having unique enum values' end diff --git a/spec/models/merge_request_reviewer_spec.rb b/spec/models/merge_request_reviewer_spec.rb index 76b44abca54..1b8fd5da205 100644 --- a/spec/models/merge_request_reviewer_spec.rb +++ b/spec/models/merge_request_reviewer_spec.rb @@ -7,6 +7,8 @@ RSpec.describe MergeRequestReviewer do subject { merge_request.merge_request_reviewers.build(reviewer: create(:user)) } + it_behaves_like 'having unique enum values' + describe 'associations' do it { is_expected.to belong_to(:merge_request).class_name('MergeRequest') } it { is_expected.to belong_to(:reviewer).class_name('User').inverse_of(:merge_request_reviewers) } diff --git a/spec/support/database/prevent_cross_database_modification.rb b/spec/support/database/prevent_cross_database_modification.rb index 851f2e849e8..a7d1c3a47a2 100644 --- a/spec/support/database/prevent_cross_database_modification.rb +++ b/spec/support/database/prevent_cross_database_modification.rb @@ -102,10 +102,15 @@ module Database schemas = Database::GitlabSchema.table_schemas(all_tables) if schemas.many? - raise Database::PreventCrossDatabaseModification::CrossDatabaseModificationAcrossUnsupportedTablesError, - "Cross-database data modification of '#{schemas.to_a.join(", ")}' were detected within " \ + message = "Cross-database data modification of '#{schemas.to_a.join(", ")}' were detected within " \ "a transaction modifying the '#{all_tables.to_a.join(", ")}' tables." \ "Please refer to https://docs.gitlab.com/ee/development/database/multiple_databases.html#removing-cross-database-transactions for details on how to resolve this exception." + + if schemas.any? { |s| s.to_s.start_with?("undefined") } + message += " The gitlab_schema was undefined for one or more of the tables in this transaction. Any new tables must be added to spec/support/database/gitlab_schemas.yml ." + end + + raise Database::PreventCrossDatabaseModification::CrossDatabaseModificationAcrossUnsupportedTablesError, message end end end diff --git a/spec/support_specs/database/prevent_cross_database_modification_spec.rb b/spec/support_specs/database/prevent_cross_database_modification_spec.rb index e86559bb14a..c213c461642 100644 --- a/spec/support_specs/database/prevent_cross_database_modification_spec.rb +++ b/spec/support_specs/database/prevent_cross_database_modification_spec.rb @@ -68,6 +68,17 @@ RSpec.describe 'Database::PreventCrossDatabaseModification' do end end.to raise_error /Cross-database data modification/ end + + it 'raises an error when an undefined gitlab_schema table is modified with another table' do + expect do + with_cross_database_modification_prevented do + Project.transaction do + project.touch + project.connection.execute('UPDATE foo_bars_undefined_table SET a=1 WHERE id = -1') + end + end + end.to raise_error /Cross-database data modification.*The gitlab_schema was undefined/ + end end context 'when running tests with prevent_cross_database_modification', :prevent_cross_database_modification do diff --git a/spec/tooling/danger/product_intelligence_spec.rb b/spec/tooling/danger/product_intelligence_spec.rb index 5fd44ef5de0..3ba4f062856 100644 --- a/spec/tooling/danger/product_intelligence_spec.rb +++ b/spec/tooling/danger/product_intelligence_spec.rb @@ -68,71 +68,4 @@ RSpec.describe Tooling::Danger::ProductIntelligence do it { is_expected.to be_empty } end end - - describe '#matching_changed_files' do - subject { product_intelligence.matching_changed_files } - - let(:changed_files) do - [ - 'dashboard/todos_controller.rb', - 'components/welcome.vue', - 'admin/groups/_form.html.haml' - ] - end - - context 'with snowplow files changed' do - context 'when vue file changed' do - let(:changed_lines) { ['+data-track-action'] } - - it { is_expected.to match_array(['components/welcome.vue']) } - end - - context 'when haml file changed' do - let(:changed_lines) { ['+ data: { track_label:'] } - - it { is_expected.to match_array(['admin/groups/_form.html.haml']) } - end - - context 'when ruby file changed' do - let(:changed_lines) { ['+ Gitlab::Tracking.event'] } - let(:changed_files) { ['dashboard/todos_controller.rb', 'admin/groups/_form.html.haml'] } - - it { is_expected.to match_array(['dashboard/todos_controller.rb']) } - end - end - - context 'with metrics files changed' do - let(:changed_files) { ['config/metrics/counts_7d/test_metric.yml', 'ee/config/metrics/counts_7d/ee_metric.yml'] } - - it { is_expected.to match_array(changed_files) } - end - - context 'with metrics files not changed' do - it { is_expected.to be_empty } - end - - context 'with tracking files changed' do - let(:changed_files) do - [ - 'lib/gitlab/tracking.rb', - 'spec/lib/gitlab/tracking_spec.rb', - 'app/helpers/tracking_helper.rb' - ] - end - - it { is_expected.to match_array(changed_files) } - end - - context 'with usage_data files changed' do - let(:changed_files) do - [ - 'doc/api/usage_data.md', - 'ee/lib/ee/gitlab/usage_data.rb', - 'spec/lib/gitlab/usage_data_spec.rb' - ] - end - - it { is_expected.to match_array(changed_files) } - end - end end diff --git a/spec/tooling/danger/project_helper_spec.rb b/spec/tooling/danger/project_helper_spec.rb index 5edd9e54cc5..ae277c57236 100644 --- a/spec/tooling/danger/project_helper_spec.rb +++ b/spec/tooling/danger/project_helper_spec.rb @@ -40,7 +40,7 @@ RSpec.describe Tooling::Danger::ProjectHelper do using RSpec::Parameterized::TableSyntax before do - allow(fake_git).to receive(:diff_for_file).with('usage_data.rb') { double(:diff, patch: "+ count(User.active)") } + allow(fake_git).to receive(:diff_for_file).with(instance_of(String)) { double(:diff, patch: "+ count(User.active)") } end where(:path, :expected_categories) do @@ -189,6 +189,10 @@ RSpec.describe Tooling::Danger::ProjectHelper do 'spec/frontend/tracking/foo.js' | [:frontend, :product_intelligence] 'spec/frontend/tracking_spec.js' | [:frontend, :product_intelligence] 'lib/gitlab/usage_database/foo.rb' | [:backend] + 'config/metrics/counts_7d/test_metric.yml' | [:product_intelligence] + 'config/metrics/schema.json' | [:product_intelligence] + 'doc/api/usage_data.md' | [:product_intelligence] + 'spec/lib/gitlab/usage_data_spec.rb' | [:product_intelligence] end with_them do @@ -199,6 +203,9 @@ RSpec.describe Tooling::Danger::ProjectHelper do context 'having specific changes' do where(:expected_categories, :patch, :changed_files) do + [:product_intelligence] | '+data-track-action' | ['components/welcome.vue'] + [:product_intelligence] | '+ data: { track_label:' | ['admin/groups/_form.html.haml'] + [:product_intelligence] | '+ Gitlab::Tracking.event' | ['dashboard/todos_controller.rb', 'admin/groups/_form.html.haml'] [:database, :backend, :product_intelligence] | '+ count(User.active)' | ['usage_data.rb', 'lib/gitlab/usage_data.rb', 'ee/lib/ee/gitlab/usage_data.rb'] [:database, :backend, :product_intelligence] | '+ estimate_batch_distinct_count(User.active)' | ['usage_data.rb'] [:backend, :product_intelligence] | '+ alt_usage_data(User.active)' | ['lib/gitlab/usage_data.rb'] diff --git a/tooling/danger/product_intelligence.rb b/tooling/danger/product_intelligence.rb index 848f99eeff5..f56353cae61 100644 --- a/tooling/danger/product_intelligence.rb +++ b/tooling/danger/product_intelligence.rb @@ -9,26 +9,6 @@ module Tooling 'product intelligence::review pending' ].freeze - TRACKING_FILES = [ - 'lib/gitlab/tracking.rb', - 'spec/lib/gitlab/tracking_spec.rb', - 'app/helpers/tracking_helper.rb', - 'spec/helpers/tracking_helper_spec.rb', - 'app/assets/javascripts/tracking/index.js', - 'app/assets/javascripts/tracking/constants.js', - 'app/assets/javascripts/tracking/get_standard_context.js', - 'spec/frontend/tracking/get_standard_context_spec.js', - 'spec/frontend/tracking_spec.js', - 'generator_templates/usage_metric_definition/metric_definition.yml', - 'lib/generators/gitlab/usage_metric/usage_metric_generator.rb', - 'lib/generators/gitlab/usage_metric_definition_generator.rb', - 'lib/generators/gitlab/usage_metric_definition/redis_hll_generator.rb', - 'spec/lib/generators/gitlab/usage_metric_generator_spec.rb', - 'spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb', - 'spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb', - 'config/metrics/schema.json' - ].freeze - def missing_labels return [] if !helper.ci? || helper.mr_has_labels?('growth experiment') @@ -38,43 +18,6 @@ module Tooling labels end - - def matching_changed_files - tracking_changed_files = all_changed_files & TRACKING_FILES - usage_data_changed_files = all_changed_files.grep(%r{(usage_data)}) - - usage_data_changed_files + tracking_changed_files + metrics_changed_files + snowplow_changed_files - end - - private - - def all_changed_files - helper.all_changed_files - end - - def metrics_changed_files - all_changed_files.grep(%r{((ee/)?config/metrics/.*\.yml)}) - end - - def matching_files?(file, extension:, pattern:) - return unless file.end_with?(extension) - - helper.changed_lines(file).grep(pattern).any? - end - - def snowplow_changed_files - js_patterns = Regexp.union( - 'Tracking.event', - /\btrack\(/, - 'data-track-action' - ) - all_changed_files.select do |file| - matching_files?(file, extension: '.rb', pattern: %r{Gitlab::Tracking\.(event|enabled\?|options)$}) || - matching_files?(file, extension: '.js', pattern: js_patterns) || - matching_files?(file, extension: '.vue', pattern: js_patterns) || - matching_files?(file, extension: '.haml', pattern: %r{data: \{ track}) - end - end end end end diff --git a/tooling/danger/project_helper.rb b/tooling/danger/project_helper.rb index c552a75bba8..1eac7380b34 100644 --- a/tooling/danger/project_helper.rb +++ b/tooling/danger/project_helper.rb @@ -38,6 +38,8 @@ module Tooling %r{\A((ee|jh)/)?config/feature_flags/} => :feature_flag, + %r{doc/api/usage_data.md} => [:product_intelligence], + %r{\Adoc/.*(\.(md|png|gif|jpg|yml))\z} => :docs, %r{\A(CONTRIBUTING|LICENSE|MAINTENANCE|PHILOSOPHY|PROCESS|README)(\.md)?\z} => :docs, %r{\Adata/whats_new/} => :docs, @@ -100,6 +102,7 @@ module Tooling %r{\A((ee|jh)/)?spec/support/shared_contexts/features/} => :test, %r{\A((ee|jh)/)?spec/support/helpers/features/} => :test, + %r{\A((spec/)?lib/generators/gitlab/usage_metric_)} => [:product_intelligence], %r{\A((ee|jh)/)?lib/gitlab/usage_data_counters/.*\.yml\z} => [:product_intelligence], %r{\A((ee|jh)/)?config/metrics/((.*\.yml)|(schema\.json))\z} => [:product_intelligence], %r{\A((ee|jh)/)?lib/gitlab/usage_data(_counters)?(/|\.rb)} => [:backend, :product_intelligence], @@ -108,9 +111,16 @@ module Tooling spec/lib/gitlab/tracking_spec\.rb | app/helpers/tracking_helper\.rb | spec/helpers/tracking_helper_spec\.rb | + (spec/)?lib/generators/gitlab/usage_metric_\S+ | + (spec/)?lib/generators/gitlab/usage_metric_definition/redis_hll_generator(_spec)?\.rb | lib/generators/rails/usage_metric_definition_generator\.rb | spec/lib/generators/usage_metric_definition_generator_spec\.rb | generator_templates/usage_metric_definition/metric_definition\.yml)\z}x => [:backend, :product_intelligence], + %r{gitlab/usage_data(_spec)?\.rb} => [:product_intelligence], + [%r{\.haml\z}, %r{data: \{ track}] => [:product_intelligence], + [%r{\.(rb|haml)\z}, %r{Gitlab::Tracking\.(event|enabled\?|options)$}] => [:product_intelligence], + [%r{\.(vue|js)\z}, %r{(Tracking.event|/\btrack\(/|data-track-action)}] => [:product_intelligence], + %r{\A((ee|jh)/)?app/(?!assets|views)[^/]+} => :backend, %r{\A((ee|jh)/)?(bin|config|generator_templates|lib|rubocop)/} => :backend, %r{\A((ee|jh)/)?spec/migrations} => :database,