Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-10-25 09:12:21 +00:00
parent 0712a75cc0
commit 45760607bc
90 changed files with 938 additions and 788 deletions

View File

@ -2454,7 +2454,6 @@ Database/MultipleDatabases:
- 'lib/gitlab/database/schema_cache_with_renamed_table.rb'
- 'lib/gitlab/database/schema_migrations/context.rb'
- 'lib/gitlab/database/schema_version_files.rb'
- 'lib/gitlab/database/unidirectional_copy_trigger.rb'
- 'lib/gitlab/database/with_lock_retries.rb'
- 'lib/gitlab/gitlab_import/importer.rb'
- 'lib/gitlab/health_checks/db_check.rb'

View File

@ -570,7 +570,7 @@ export default {
jumpToFile(step) {
const targetIndex = this.currentDiffIndex + step;
if (targetIndex >= 0 && targetIndex < this.diffFiles.length) {
this.scrollToFile(this.diffFiles[targetIndex].file_path);
this.scrollToFile({ path: this.diffFiles[targetIndex].file_path });
}
},
setTreeDisplay() {

View File

@ -98,7 +98,7 @@ export default {
:file-row-component="$options.DiffFileRow"
:current-diff-file-id="currentDiffFileId"
@toggleTreeOpen="toggleTreeOpen"
@clickFile="scrollToFile"
@clickFile="(path) => scrollToFile({ path })"
/>
</template>
<p v-else class="prepend-top-20 append-bottom-20 text-center">

View File

@ -138,7 +138,7 @@ export default function initDiffsApp(store) {
...mapActions('diffs', ['toggleFileFinder', 'scrollToFile']),
openFile(file) {
window.mrTabs.tabShown('diffs');
this.scrollToFile(file.path);
this.scrollToFile({ path: file.path });
},
},
render(createElement) {

View File

@ -518,7 +518,7 @@ export const toggleActiveFileByHash = ({ commit }, hash) => {
commit(types.VIEW_DIFF_FILE, hash);
};
export const scrollToFile = ({ state, commit, getters }, path) => {
export const scrollToFile = ({ state, commit, getters }, { path, setHash = true }) => {
if (!state.treeEntries[path]) return;
const { fileHash } = state.treeEntries[path];
@ -528,9 +528,11 @@ export const scrollToFile = ({ state, commit, getters }, path) => {
if (getters.isVirtualScrollingEnabled) {
eventHub.$emit('scrollToFileHash', fileHash);
setTimeout(() => {
window.history.replaceState(null, null, `#${fileHash}`);
});
if (setHash) {
setTimeout(() => {
window.history.replaceState(null, null, `#${fileHash}`);
});
}
} else {
document.location.hash = fileHash;

View File

@ -1,4 +1,8 @@
fragment Iteration on Iteration {
id
title
iterationCadence {
id
title
}
}

View File

@ -220,16 +220,16 @@ export const scrollToElement = (element, options = {}) => {
// In the previous implementation, jQuery naturally deferred this scrolling.
// Unfortunately, we're quite coupled to this implementation detail now.
defer(() => {
const { duration = 200, offset = 0 } = options;
const { duration = 200, offset = 0, behavior = duration ? 'smooth' : 'auto' } = options;
const y = el.getBoundingClientRect().top + window.pageYOffset + offset - contentTop();
window.scrollTo({ top: y, behavior: duration ? 'smooth' : 'auto' });
window.scrollTo({ top: y, behavior });
});
}
};
export const scrollToElementWithContext = (element) => {
export const scrollToElementWithContext = (element, options) => {
const offsetMultiplier = -0.1;
return scrollToElement(element, { offset: window.innerHeight * offsetMultiplier });
return scrollToElement(element, { ...options, offset: window.innerHeight * offsetMultiplier });
};
/**

View File

@ -2,6 +2,8 @@ import { mapGetters, mapActions, mapState } from 'vuex';
import { scrollToElementWithContext, scrollToElement } from '~/lib/utils/common_utils';
import eventHub from '../event_hub';
const isDiffsVirtualScrollingEnabled = () => window.gon?.features?.diffsVirtualScrolling;
/**
* @param {string} selector
* @returns {boolean}
@ -11,7 +13,9 @@ function scrollTo(selector, { withoutContext = false } = {}) {
const scrollFunction = withoutContext ? scrollToElement : scrollToElementWithContext;
if (el) {
scrollFunction(el);
scrollFunction(el, {
behavior: isDiffsVirtualScrollingEnabled() ? 'auto' : 'smooth',
});
return true;
}
@ -81,8 +85,15 @@ function handleDiscussionJump(self, fn, discussionId = self.currentDiscussionId)
const discussion = self.getDiscussion(targetId);
const discussionFilePath = discussion?.diff_file?.file_path;
if (isDiffsVirtualScrollingEnabled()) {
window.location.hash = '';
}
if (discussionFilePath) {
self.scrollToFile(discussionFilePath);
self.scrollToFile({
path: discussionFilePath,
setHash: !isDiffsVirtualScrollingEnabled(),
});
}
self.$nextTick(() => {

View File

@ -1,4 +1,3 @@
/* eslint-disable @gitlab/require-string-literal-i18n-helpers */
import $ from 'jquery';
import Visibility from 'visibilityjs';
import Vue from 'vue';
@ -621,7 +620,7 @@ export const submitSuggestion = (
const flashMessage = errorMessage || defaultMessage;
createFlash({
message: __(flashMessage),
message: flashMessage,
parent: flashContainer,
});
})
@ -657,7 +656,7 @@ export const submitSuggestionBatch = ({ commit, dispatch, state }, { message, fl
const flashMessage = errorMessage || defaultMessage;
createFlash({
message: __(flashMessage),
message: flashMessage,
parent: flashContainer,
});
})

View File

@ -4,7 +4,7 @@
* For a complete overview of the plan please check: https://gitlab.com/gitlab-org/gitlab/-/issues/330846
* This work is behind feature flag: https://gitlab.com/gitlab-org/gitlab/-/issues/341136
*/
// import { GlEmptyState, GlLink, GlSprintf } from '@gitlab/ui';
import { GlEmptyState, GlLink, GlSprintf } from '@gitlab/ui';
import createFlash from '~/flash';
import { historyReplaceState } from '~/lib/utils/common_utils';
import { s__ } from '~/locale';
@ -15,17 +15,18 @@ import {
PROJECT_RESOURCE_TYPE,
GROUP_RESOURCE_TYPE,
LIST_QUERY_DEBOUNCE_TIME,
GRAPHQL_PAGE_SIZE,
} from '~/packages_and_registries/package_registry/constants';
import PackageTitle from './package_title.vue';
import PackageSearch from './package_search.vue';
// import PackageList from './packages_list.vue';
import PackageList from './packages_list.vue';
export default {
components: {
// GlEmptyState,
// GlLink,
// GlSprintf,
// PackageList,
GlEmptyState,
GlLink,
GlSprintf,
PackageList,
PackageTitle,
PackageSearch,
},
@ -64,17 +65,24 @@ export default {
groupSort: this.isGroupPage ? this.sort : undefined,
packageName: this.filters?.packageName,
packageType: this.filters?.packageType,
first: GRAPHQL_PAGE_SIZE,
};
},
graphqlResource() {
return this.isGroupPage ? GROUP_RESOURCE_TYPE : PROJECT_RESOURCE_TYPE;
},
pageInfo() {
return this.packages?.pageInfo ?? {};
},
packagesCount() {
return this.packages?.count;
},
hasFilters() {
return this.filters.packageName && this.filters.packageType;
},
emptySearch() {
return !this.filters.packageName && !this.filters.packageType;
},
emptyStateTitle() {
return this.emptySearch
? this.$options.i18n.emptyPageTitle
@ -99,6 +107,35 @@ export default {
this.sort = sort;
this.filters = { ...filters };
},
updateQuery(_, { fetchMoreResult }) {
return fetchMoreResult;
},
fetchNextPage() {
const variables = {
...this.queryVariables,
first: GRAPHQL_PAGE_SIZE,
last: null,
after: this.pageInfo?.endCursor,
};
this.$apollo.queries.packages.fetchMore({
variables,
updateQuery: this.updateQuery,
});
},
fetchPreviousPage() {
const variables = {
...this.queryVariables,
first: null,
last: GRAPHQL_PAGE_SIZE,
before: this.pageInfo?.startCursor,
};
this.$apollo.queries.packages.fetchMore({
variables,
updateQuery: this.updateQuery,
});
},
},
i18n: {
widenFilters: s__('PackageRegistry|To widen your search, change or remove the filters above.'),
@ -116,7 +153,13 @@ export default {
<package-title :help-url="packageHelpUrl" :count="packagesCount" />
<package-search @update="handleSearchUpdate" />
<!-- <package-list @page:changed="onPageChanged" @package:delete="onPackageDeleteRequest">
<package-list
:list="packages.nodes"
:is-loading="$apollo.queries.packages.loading"
:page-info="pageInfo"
@prev-page="fetchPreviousPage"
@next-page="fetchNextPage"
>
<template #empty-state>
<gl-empty-state :title="emptyStateTitle" :svg-path="emptyListIllustration">
<template #description>
@ -129,6 +172,6 @@ export default {
</template>
</gl-empty-state>
</template>
</package-list> -->
</package-list>
</div>
</template>

View File

@ -1,75 +1,79 @@
<script>
import { GlPagination, GlModal, GlSprintf } from '@gitlab/ui';
import { mapState, mapGetters } from 'vuex';
import { GlModal, GlSprintf, GlKeysetPagination } from '@gitlab/ui';
import { s__ } from '~/locale';
import PackagesListRow from '~/packages/shared/components/package_list_row.vue';
import PackagesListRow from '~/packages_and_registries/package_registry/components/list/package_list_row.vue';
import PackagesListLoader from '~/packages/shared/components/packages_list_loader.vue';
import { TrackingActions } from '~/packages/shared/constants';
import { packageTypeToTrackCategory } from '~/packages/shared/utils';
import {
DELETE_PACKAGE_TRACKING_ACTION,
REQUEST_DELETE_PACKAGE_TRACKING_ACTION,
CANCEL_DELETE_PACKAGE_TRACKING_ACTION,
} from '~/packages_and_registries/package_registry/constants';
import { packageTypeToTrackCategory } from '~/packages_and_registries/package_registry/utils';
import Tracking from '~/tracking';
export default {
components: {
GlPagination,
GlKeysetPagination,
GlModal,
GlSprintf,
PackagesListLoader,
PackagesListRow,
},
mixins: [Tracking.mixin()],
props: {
list: {
type: Array,
required: false,
default: () => [],
},
isLoading: {
type: Boolean,
required: false,
default: false,
},
pageInfo: {
type: Object,
required: true,
},
},
data() {
return {
itemToBeDeleted: null,
};
},
computed: {
...mapState({
perPage: (state) => state.pagination.perPage,
totalItems: (state) => state.pagination.total,
page: (state) => state.pagination.page,
isGroupPage: (state) => state.config.isGroupPage,
isLoading: 'isLoading',
}),
...mapGetters({ list: 'getList' }),
currentPage: {
get() {
return this.page;
},
set(value) {
this.$emit('page:changed', value);
},
},
isListEmpty() {
return !this.list || this.list.length === 0;
},
modalAction() {
return s__('PackageRegistry|Delete package');
},
deletePackageName() {
return this.itemToBeDeleted?.name ?? '';
},
tracking() {
const category = this.itemToBeDeleted
? packageTypeToTrackCategory(this.itemToBeDeleted.package_type)
? packageTypeToTrackCategory(this.itemToBeDeleted.packageType)
: undefined;
return {
category,
};
},
showPagination() {
return this.pageInfo.hasPreviousPage || this.pageInfo.hasNextPage;
},
},
methods: {
setItemToBeDeleted(item) {
this.itemToBeDeleted = { ...item };
this.track(TrackingActions.REQUEST_DELETE_PACKAGE);
this.track(REQUEST_DELETE_PACKAGE_TRACKING_ACTION);
this.$refs.packageListDeleteModal.show();
},
deleteItemConfirmation() {
this.$emit('package:delete', this.itemToBeDeleted);
this.track(TrackingActions.DELETE_PACKAGE);
this.track(DELETE_PACKAGE_TRACKING_ACTION);
this.itemToBeDeleted = null;
},
deleteItemCanceled() {
this.track(TrackingActions.CANCEL_DELETE_PACKAGE);
this.track(CANCEL_DELETE_PACKAGE_TRACKING_ACTION);
this.itemToBeDeleted = null;
},
},
@ -77,6 +81,7 @@ export default {
deleteModalContent: s__(
'PackageRegistry|You are about to delete %{name}, this operation is irreversible, are you sure?',
),
modalAction: s__('PackageRegistry|Delete package'),
},
};
</script>
@ -95,19 +100,19 @@ export default {
v-for="packageEntity in list"
:key="packageEntity.id"
:package-entity="packageEntity"
:package-link="packageEntity._links.web_path"
:is-group="isGroupPage"
@packageToDelete="setItemToBeDeleted"
/>
</div>
<gl-pagination
v-model="currentPage"
:per-page="perPage"
:total-items="totalItems"
align="center"
class="gl-w-full gl-mt-3"
/>
<div class="gl-display-flex gl-justify-content-center">
<gl-keyset-pagination
v-if="showPagination"
v-bind="pageInfo"
class="gl-mt-3"
@prev="$emit('prev-page')"
@next="$emit('next-page')"
/>
</div>
<gl-modal
ref="packageListDeleteModal"
@ -116,8 +121,8 @@ export default {
@ok="deleteItemConfirmation"
@cancel="deleteItemCanceled"
>
<template #modal-title>{{ modalAction }}</template>
<template #modal-ok>{{ modalAction }}</template>
<template #modal-title>{{ $options.i18n.modalAction }}</template>
<template #modal-ok>{{ $options.i18n.modalAction }}</template>
<gl-sprintf :message="$options.i18n.deleteModalContent">
<template #name>
<strong>{{ deletePackageName }}</strong>

View File

@ -59,12 +59,6 @@ export const TRACKING_ACTION_COPY_COMPOSER_REGISTRY_INCLUDE_COMMAND =
export const TRACKING_ACTION_COPY_COMPOSER_PACKAGE_INCLUDE_COMMAND =
'copy_composer_package_include_command';
export const TrackingCategories = {
[PACKAGE_TYPE_MAVEN]: 'MavenPackages',
[PACKAGE_TYPE_NPM]: 'NpmPackages',
[PACKAGE_TYPE_CONAN]: 'ConanPackages',
};
export const SHOW_DELETE_SUCCESS_ALERT = 'showSuccessDeleteAlert';
export const DELETE_PACKAGE_ERROR_MESSAGE = s__(
'PackageRegistry|Something went wrong while deleting the package.',
@ -93,3 +87,4 @@ export const INSTANCE_PACKAGE_ENDPOINT_TYPE = 'instance';
export const PROJECT_RESOURCE_TYPE = 'project';
export const GROUP_RESOURCE_TYPE = 'group';
export const LIST_QUERY_DEBOUNCE_TIME = 50;
export const GRAPHQL_PAGE_SIZE = 20;

View File

@ -1,4 +1,5 @@
#import "~/packages_and_registries/package_registry/graphql/fragments/package_data.fragment.graphql"
#import "~/graphql_shared/fragments/pageInfo.fragment.graphql"
query getPackages(
$fullPath: ID!
@ -7,21 +8,47 @@ query getPackages(
$groupSort: PackageGroupSort
$packageName: String
$packageType: PackageTypeEnum
$first: Int
$last: Int
$after: String
$before: String
) {
project(fullPath: $fullPath) @skip(if: $isGroupPage) {
packages(sort: $sort, packageName: $packageName, packageType: $packageType) {
packages(
sort: $sort
packageName: $packageName
packageType: $packageType
after: $after
before: $before
first: $first
last: $last
) {
count
nodes {
...PackageData
}
pageInfo {
...PageInfo
}
}
}
group(fullPath: $fullPath) @include(if: $isGroupPage) {
packages(sort: $groupSort, packageName: $packageName, packageType: $packageType) {
packages(
sort: $groupSort
packageName: $packageName
packageType: $packageType
after: $after
before: $before
first: $first
last: $last
) {
count
nodes {
...PackageData
}
pageInfo {
...PageInfo
}
}
}
}

View File

@ -1,3 +1,4 @@
import { capitalize } from 'lodash';
import { s__ } from '~/locale';
import {
PACKAGE_TYPE_CONAN,
@ -38,3 +39,5 @@ export const getPackageTypeLabel = (packageType) => {
return null;
}
};
export const packageTypeToTrackCategory = (type) => `UI::${capitalize(type)}Packages`;

View File

@ -63,7 +63,6 @@ export default {
v-if="showPipelineStatus"
:commit-sha="commitSha"
:class="$options.pipelineStatusClasses"
v-on="$listeners"
/>
<validation-segment :class="validationStyling" :ci-config="ciConfigData" />
</div>

View File

@ -1,52 +1,17 @@
<script>
import { __ } from '~/locale';
import PipelineMiniGraph from '~/pipelines/components/pipelines_list/pipeline_mini_graph.vue';
import getLinkedPipelinesQuery from '~/projects/commit_box/info/graphql/queries/get_linked_pipelines.query.graphql';
import { PIPELINE_FAILURE } from '../../constants';
export default {
i18n: {
linkedPipelinesFetchError: __('Unable to fetch upstream and downstream pipelines.'),
},
components: {
PipelineMiniGraph,
LinkedPipelinesMiniList: () =>
import('ee_component/vue_shared/components/linked_pipelines_mini_list.vue'),
},
inject: ['projectFullPath'],
props: {
pipeline: {
type: Object,
required: true,
},
},
apollo: {
linkedPipelines: {
query: getLinkedPipelinesQuery,
variables() {
return {
fullPath: this.projectFullPath,
iid: this.pipeline.iid,
};
},
skip() {
return !this.pipeline.iid;
},
update({ project }) {
return project?.pipeline;
},
error() {
this.$emit('showError', {
type: PIPELINE_FAILURE,
reasons: [this.$options.i18n.linkedPipelinesFetchError],
});
},
},
},
computed: {
downstreamPipelines() {
return this.linkedPipelines?.downstream?.nodes || [];
},
pipelinePath() {
return this.pipeline.detailedStatus?.detailsPath || '';
},
@ -73,29 +38,12 @@ export default {
};
});
},
showDownstreamPipelines() {
return this.downstreamPipelines.length > 0;
},
upstreamPipeline() {
return this.linkedPipelines?.upstream;
},
},
};
</script>
<template>
<div v-if="pipelineStages.length > 0" class="stage-cell gl-mr-5">
<linked-pipelines-mini-list
v-if="upstreamPipeline"
:triggered-by="[upstreamPipeline]"
data-testid="pipeline-editor-mini-graph-upstream"
/>
<pipeline-mini-graph class="gl-display-inline" :stages="pipelineStages" />
<linked-pipelines-mini-list
v-if="showDownstreamPipelines"
:triggered="downstreamPipelines"
:pipeline-path="pipelinePath"
data-testid="pipeline-editor-mini-graph-downstream"
/>
</div>
</template>

View File

@ -59,12 +59,11 @@ export default {
};
},
update(data) {
const { id, iid, commitPath = '', detailedStatus = {}, stages, status } =
const { id, commitPath = '', detailedStatus = {}, stages, status } =
data.project?.pipeline || {};
return {
id,
iid,
commitPath,
detailedStatus,
stages,
@ -160,7 +159,6 @@ export default {
<pipeline-editor-mini-graph
v-if="glFeatures.pipelineEditorMiniGraph"
:pipeline="pipeline"
v-on="$listeners"
/>
<gl-button
class="gl-mt-2 gl-md-mt-0"

View File

@ -8,7 +8,6 @@ import {
DEFAULT_FAILURE,
DEFAULT_SUCCESS,
LOAD_FAILURE_UNKNOWN,
PIPELINE_FAILURE,
} from '../../constants';
import CodeSnippetAlert from '../code_snippet_alert/code_snippet_alert.vue';
import {
@ -25,7 +24,6 @@ export default {
[COMMIT_FAILURE]: s__('Pipelines|The GitLab CI configuration could not be updated.'),
[DEFAULT_FAILURE]: __('Something went wrong on our end.'),
[LOAD_FAILURE_UNKNOWN]: s__('Pipelines|The CI configuration was not loaded, please try again.'),
[PIPELINE_FAILURE]: s__('Pipelines|There was a problem with loading the pipeline data.'),
},
successTexts: {
[COMMIT_SUCCESS]: __('Your changes have been successfully committed.'),
@ -76,11 +74,6 @@ export default {
text: this.$options.errorTexts[COMMIT_FAILURE],
variant: 'danger',
};
case PIPELINE_FAILURE:
return {
text: this.$options.errorTexts[PIPELINE_FAILURE],
variant: 'danger',
};
default:
return {
text: this.$options.errorTexts[DEFAULT_FAILURE],

View File

@ -16,7 +16,6 @@ export const COMMIT_SUCCESS = 'COMMIT_SUCCESS';
export const DEFAULT_FAILURE = 'DEFAULT_FAILURE';
export const DEFAULT_SUCCESS = 'DEFAULT_SUCCESS';
export const LOAD_FAILURE_UNKNOWN = 'LOAD_FAILURE_UNKNOWN';
export const PIPELINE_FAILURE = 'PIPELINE_FAILURE';
export const CREATE_TAB = 'CREATE_TAB';
export const LINT_TAB = 'LINT_TAB';

View File

@ -93,7 +93,6 @@ export const initPipelineEditor = (selector = '#js-pipeline-editor') => {
ciExamplesHelpPagePath,
ciHelpPagePath,
configurationPaths,
dataMethod: 'graphql',
defaultBranch,
emptyStateIllustrationPath,
helpPaths,

View File

@ -111,7 +111,6 @@ export default {
:ci-config-data="ciConfigData"
:commit-sha="commitSha"
:is-new-ci-config-file="isNewCiConfigFile"
v-on="$listeners"
/>
<pipeline-editor-tabs
:ci-config-data="ciConfigData"

View File

@ -1,5 +1,5 @@
<script>
import { GlFilteredSearchSuggestion } from '@gitlab/ui';
import { GlDropdownDivider, GlDropdownSectionHeader, GlFilteredSearchSuggestion } from '@gitlab/ui';
import createFlash from '~/flash';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { __ } from '~/locale';
@ -9,6 +9,8 @@ import { DEFAULT_ITERATIONS } from '../constants';
export default {
components: {
BaseToken,
GlDropdownDivider,
GlDropdownSectionHeader,
GlFilteredSearchSuggestion,
},
props: {
@ -40,6 +42,23 @@ export default {
getActiveIteration(iterations, data) {
return iterations.find((iteration) => this.getValue(iteration) === data);
},
groupIterationsByCadence(iterations) {
const cadences = [];
iterations.forEach((iteration) => {
if (!iteration.iterationCadence) {
return;
}
const { title } = iteration.iterationCadence;
const cadenceIteration = { id: iteration.id, title: iteration.title };
const cadence = cadences.find((cad) => cad.title === title);
if (cadence) {
cadence.iterations.push(cadenceIteration);
} else {
cadences.push({ title, iterations: [cadenceIteration] });
}
});
return cadences;
},
fetchIterations(searchTerm) {
this.loading = true;
this.config
@ -77,13 +96,23 @@ export default {
{{ activeTokenValue ? activeTokenValue.title : inputValue }}
</template>
<template #suggestions-list="{ suggestions }">
<gl-filtered-search-suggestion
v-for="iteration in suggestions"
:key="iteration.id"
:value="getValue(iteration)"
>
{{ iteration.title }}
</gl-filtered-search-suggestion>
<template v-for="(cadence, index) in groupIterationsByCadence(suggestions)">
<gl-dropdown-divider v-if="index !== 0" :key="index" />
<gl-dropdown-section-header
:key="cadence.title"
class="gl-overflow-hidden"
:title="cadence.title"
>
{{ cadence.title }}
</gl-dropdown-section-header>
<gl-filtered-search-suggestion
v-for="iteration in cadence.iterations"
:key="iteration.id"
:value="getValue(iteration)"
>
{{ iteration.title }}
</gl-filtered-search-suggestion>
</template>
</template>
</base-token>
</template>

View File

@ -313,12 +313,6 @@ module Ci
end
after_transition pending: :running do |build|
unless build.update_deployment_after_transaction_commit?
Gitlab::Database.allow_cross_database_modification_within_transaction(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/338867') do
build.deployment&.run
end
end
build.run_after_commit do
build.pipeline.persistent_ref.create
@ -339,35 +333,12 @@ module Ci
end
after_transition any => [:success] do |build|
unless build.update_deployment_after_transaction_commit?
Gitlab::Database.allow_cross_database_modification_within_transaction(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/338867') do
build.deployment&.succeed
end
end
build.run_after_commit do
BuildSuccessWorker.perform_async(id)
PagesWorker.perform_async(:deploy, id) if build.pages_generator?
end
end
after_transition any => [:failed] do |build|
next unless build.project
next unless build.deployment
unless build.update_deployment_after_transaction_commit?
begin
Gitlab::Database.allow_cross_database_modification_within_transaction(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/338867') do
build.deployment.drop!
end
rescue StandardError => e
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e, build_id: build.id)
end
end
true
end
after_transition any => [:failed] do |build|
next unless build.project
@ -380,25 +351,12 @@ module Ci
end
end
after_transition any => [:skipped, :canceled] do |build, transition|
unless build.update_deployment_after_transaction_commit?
Gitlab::Database.allow_cross_database_modification_within_transaction(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/338867') do
if transition.to_name == :skipped
build.deployment&.skip
else
build.deployment&.cancel
end
end
end
end
# Synchronize Deployment Status
# Please note that the data integirty is not assured because we can't use
# a database transaction due to DB decomposition.
after_transition do |build, transition|
next if transition.loopback?
next unless build.project
next unless build.update_deployment_after_transaction_commit?
build.run_after_commit do
build.deployment&.sync_status_with(build)
@ -1120,12 +1078,6 @@ module Ci
runner&.instance_type?
end
def update_deployment_after_transaction_commit?
strong_memoize(:update_deployment_after_transaction_commit) do
Feature.enabled?(:update_deployment_after_transaction_commit, project, default_enabled: :yaml)
end
end
protected
def run_status_commit_hooks!

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
module MergeRequestReviewerState
extend ActiveSupport::Concern
included do
enum state: {
unreviewed: 0,
reviewed: 1,
attention_required: 2
}
validates :state,
presence: true,
inclusion: { in: self.states.keys }
end
end

View File

@ -1,6 +1,8 @@
# frozen_string_literal: true
class MergeRequestAssignee < ApplicationRecord
include MergeRequestReviewerState
belongs_to :merge_request, touch: true
belongs_to :assignee, class_name: "User", foreign_key: :user_id, inverse_of: :merge_request_assignees

View File

@ -1,14 +1,7 @@
# frozen_string_literal: true
class MergeRequestReviewer < ApplicationRecord
enum state: {
unreviewed: 0,
reviewed: 1
}
validates :state,
presence: true,
inclusion: { in: MergeRequestReviewer.states.keys }
include MergeRequestReviewerState
belongs_to :merge_request
belongs_to :reviewer, class_name: 'User', foreign_key: :user_id, inverse_of: :merge_request_reviewers

View File

@ -1,8 +0,0 @@
---
name: update_deployment_after_transaction_commit
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/71450
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/342021
milestone: '14.4'
type: development
group: group::release
default_enabled: false

View File

@ -12,12 +12,12 @@ For MR review guidelines, see the [Service Ping review guidelines](https://docs.
MSG
# exit if not matching files or if no product intelligence labels
matching_changed_files = product_intelligence.matching_changed_files
product_intelligence_paths_to_review = project_helper.changes_by_category[:product_intelligence]
labels = product_intelligence.missing_labels
return if matching_changed_files.empty? || labels.empty?
return if product_intelligence_paths_to_review.empty? || labels.empty?
warn format(CHANGED_FILES_MESSAGE, changed_files: helper.markdown_list(matching_changed_files))
warn format(CHANGED_FILES_MESSAGE, changed_files: helper.markdown_list(product_intelligence_paths_to_review))
gitlab.api.update_merge_request(gitlab.mr_json['project_id'],
gitlab.mr_json['iid'],

View File

@ -0,0 +1,8 @@
# frozen_string_literal: true
class AddStateToMergeRequestAssignees < Gitlab::Database::Migration[1.0]
REVIEW_DEFAULT_STATE = 0
def change
add_column :merge_request_assignees, :state, :smallint, default: REVIEW_DEFAULT_STATE, null: false
end
end

View File

@ -49,7 +49,7 @@ class FinalizePushEventPayloadsBigintConversion < ActiveRecord::Migration[6.1]
# We need to update the trigger function in order to make PostgreSQL to
# regenerate the execution plan for it. This is to avoid type mismatch errors like
# "type of parameter 15 (bigint) does not match that when preparing the plan (integer)"
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(:event_id, :event_id_convert_to_bigint)
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name(:event_id, :event_id_convert_to_bigint)
execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL"
# Swap defaults

View File

@ -60,7 +60,7 @@ class FinalizeEventsBigintConversion < ActiveRecord::Migration[6.1]
# We need to update the trigger function in order to make PostgreSQL to
# regenerate the execution plan for it. This is to avoid type mismatch errors like
# "type of parameter 15 (bigint) does not match that when preparing the plan (integer)"
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(:id, :id_convert_to_bigint)
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name(:id, :id_convert_to_bigint)
execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL"
# Swap defaults

View File

@ -47,7 +47,7 @@ class FinalizeCiBuildsNeedsBigintConversion < ActiveRecord::Migration[6.1]
# We need to update the trigger function in order to make PostgreSQL to
# regenerate the execution plan for it. This is to avoid type mismatch errors like
# "type of parameter 15 (bigint) does not match that when preparing the plan (integer)"
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(:build_id, :build_id_convert_to_bigint)
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name(:build_id, :build_id_convert_to_bigint)
execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL"
# Swap defaults

View File

@ -46,7 +46,7 @@ class FinalizeCiBuildsStageIdBigintConversion < ActiveRecord::Migration[6.1]
execute "ALTER TABLE #{quoted_table_name} RENAME COLUMN #{quote_column_name(temporary_name)} TO #{quote_column_name(:stage_id_convert_to_bigint)}"
# Reset the function so PG drops the plan cache for the incorrect integer type
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME)
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection)
.name([:id, :stage_id], [:id_convert_to_bigint, :stage_id_convert_to_bigint])
execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL"

View File

@ -58,7 +58,7 @@ class FinalizeCiJobArtifactsBigintConversion < ActiveRecord::Migration[6.1]
# We need to update the trigger function in order to make PostgreSQL to
# regenerate the execution plan for it. This is to avoid type mismatch errors like
# "type of parameter 15 (bigint) does not match that when preparing the plan (integer)"
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name([:id, :job_id], [:id_convert_to_bigint, :job_id_convert_to_bigint])
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name([:id, :job_id], [:id_convert_to_bigint, :job_id_convert_to_bigint])
execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL"
# Swap defaults

View File

@ -53,7 +53,7 @@ class FinalizeCiStagesBigintConversion < ActiveRecord::Migration[6.1]
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN #{id_convert_to_bigint_name} TO #{id_name}"
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN #{temp_name} TO #{id_convert_to_bigint_name}"
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(:id, :id_convert_to_bigint)
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name(:id, :id_convert_to_bigint)
execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL"
# Swap defaults

View File

@ -53,8 +53,8 @@ class FinalizeCiBuildsMetadataBigintConversion < Gitlab::Database::Migration[1.0
# We need to update the trigger function in order to make PostgreSQL to
# regenerate the execution plan for it. This is to avoid type mismatch errors like
# "type of parameter 15 (bigint) does not match that when preparing the plan (integer)"
execute "ALTER FUNCTION #{quote_table_name(Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(:id, :id_convert_to_bigint))} RESET ALL"
execute "ALTER FUNCTION #{quote_table_name(Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(:build_id, :build_id_convert_to_bigint))} RESET ALL"
execute "ALTER FUNCTION #{quote_table_name(Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name(:id, :id_convert_to_bigint))} RESET ALL"
execute "ALTER FUNCTION #{quote_table_name(Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name(:build_id, :build_id_convert_to_bigint))} RESET ALL"
# Swap defaults for PK
execute "ALTER SEQUENCE ci_builds_metadata_id_seq OWNED BY #{TABLE_NAME}.id"

View File

@ -47,7 +47,7 @@ class FinalizeCiSourcesPipelinesBigintConversion < ActiveRecord::Migration[6.1]
# We need to update the trigger function in order to make PostgreSQL to
# regenerate the execution plan for it. This is to avoid type mismatch errors like
# "type of parameter 15 (bigint) does not match that when preparing the plan (integer)"
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(:source_job_id, :source_job_id_convert_to_bigint)
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name(:source_job_id, :source_job_id_convert_to_bigint)
execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL"
# No need to swap defaults, both columns have no default value

View File

@ -45,7 +45,7 @@ class FinalizeCiBuildTraceChunksBigintConversion < ActiveRecord::Migration[6.1]
# We need to update the trigger function in order to make PostgreSQL to
# regenerate the execution plan for it. This is to avoid type mismatch errors like
# "type of parameter 15 (bigint) does not match that when preparing the plan (integer)"
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(:build_id, :build_id_convert_to_bigint)
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name(:build_id, :build_id_convert_to_bigint)
execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL"
# Swap defaults

View File

@ -44,7 +44,7 @@ class FinalizeConvertGeoJobArtifactDeletedEventsBigint < ActiveRecord::Migration
change_column_default TABLE_NAME, COLUMN_NAME, nil
change_column_default TABLE_NAME, COLUMN_NAME_CONVERTED, 0
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(COLUMN_NAME, COLUMN_NAME_CONVERTED)
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name(COLUMN_NAME, COLUMN_NAME_CONVERTED)
execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL"
execute "DROP INDEX #{old_index_name}"

View File

@ -61,7 +61,7 @@ class FinalizePushEventPayloadsBigintConversion3 < ActiveRecord::Migration[6.1]
# We need to update the trigger function in order to make PostgreSQL to
# regenerate the execution plan for it. This is to avoid type mismatch errors like
# "type of parameter 15 (bigint) does not match that when preparing the plan (integer)"
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(:event_id, :event_id_convert_to_bigint)
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name(:event_id, :event_id_convert_to_bigint)
execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL"
# Swap defaults

View File

@ -35,7 +35,7 @@ class FinalizeConvertDeploymentsBigint < ActiveRecord::Migration[6.1]
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN #{quote_column_name(COLUMN_NAME_BIGINT)} TO #{quote_column_name(COLUMN_NAME)}"
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN #{quote_column_name(temp_name)} TO #{quote_column_name(COLUMN_NAME_BIGINT)}"
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(COLUMN_NAME, COLUMN_NAME_BIGINT)
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name(COLUMN_NAME, COLUMN_NAME_BIGINT)
execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL"
execute "DROP INDEX #{old_index_name}"

View File

@ -44,7 +44,7 @@ class FinalizeCiBuildsRunnerSessionBigintConversion < ActiveRecord::Migration[6.
# We need to update the trigger function in order to make PostgreSQL to
# regenerate the execution plan for it. This is to avoid type mismatch errors like
# "type of parameter 15 (bigint) does not match that when preparing the plan (integer)"
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(:build_id, :build_id_convert_to_bigint)
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name(:build_id, :build_id_convert_to_bigint)
execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL"
# Swap defaults

View File

@ -60,7 +60,7 @@ class FinalizeTagginsBigintConversion < ActiveRecord::Migration[6.1]
# We need to update the trigger function in order to make PostgreSQL to
# regenerate the execution plan for it. This is to avoid type mismatch errors like
# "type of parameter 15 (bigint) does not match that when preparing the plan (integer)"
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name([:id, :taggable_id], [:id_convert_to_bigint, :taggable_id_convert_to_bigint])
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name([:id, :taggable_id], [:id_convert_to_bigint, :taggable_id_convert_to_bigint])
execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL"
# Swap defaults

View File

@ -55,7 +55,7 @@ class FinalizeJobIdConversionToBigintForCiJobArtifacts < ActiveRecord::Migration
# We need to update the trigger function in order to make PostgreSQL to
# regenerate the execution plan for it. This is to avoid type mismatch errors like
# "type of parameter 15 (bigint) does not match that when preparing the plan (integer)"
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name([:id, :job_id], [:id_convert_to_bigint, :job_id_convert_to_bigint])
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection).name([:id, :job_id], [:id_convert_to_bigint, :job_id_convert_to_bigint])
execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL"
# Swap defaults

View File

@ -136,7 +136,7 @@ class FinalizeCiBuildsBigintConversion < Gitlab::Database::Migration[1.0]
execute "ALTER TABLE #{quoted_table_name} RENAME COLUMN #{quote_column_name(temporary_name)} TO #{quote_column_name(:id_convert_to_bigint)}"
# Reset the function so PG drops the plan cache for the incorrect integer type
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME)
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME, connection: connection)
.name([:id, :stage_id], [:id_convert_to_bigint, :stage_id_convert_to_bigint])
execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL"

View File

@ -6,7 +6,9 @@ class AddTemporaryIndexToIssueMetrics < Gitlab::Database::Migration[1.0]
INDEX_NAME = 'index_issue_metrics_first_mentioned_in_commit'
def up
add_concurrent_index :issue_metrics, :issue_id, where: 'EXTRACT(YEAR FROM first_mentioned_in_commit_at) > 2019', name: INDEX_NAME
condition = Gitlab::BackgroundMigration::FixFirstMentionedInCommitAt::TmpIssueMetrics
.first_mentioned_in_commit_at_condition
add_concurrent_index :issue_metrics, :issue_id, where: condition, name: INDEX_NAME
end
def down

View File

@ -8,8 +8,8 @@ class ScheduleFixFirstMentionedInCommitAtJob < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
def up
scope = define_batchable_model('issue_metrics')
.where('EXTRACT(YEAR FROM first_mentioned_in_commit_at) > 2019')
scope = Gitlab::BackgroundMigration::FixFirstMentionedInCommitAt::TmpIssueMetrics
.from_2020
queue_background_migration_jobs_by_range_at_intervals(
scope,

View File

@ -0,0 +1 @@
713efc9673bc6cda8eff4e433c3c85f0cc4b8b8ca7b5cc4308e57a6d0b0040a0

View File

@ -15706,7 +15706,8 @@ CREATE TABLE merge_request_assignees (
id bigint NOT NULL,
user_id integer NOT NULL,
merge_request_id integer NOT NULL,
created_at timestamp with time zone
created_at timestamp with time zone,
state smallint DEFAULT 0 NOT NULL
);
CREATE SEQUENCE merge_request_assignees_id_seq

View File

@ -222,10 +222,12 @@ When the number exceeds the limit the page displays an alert and links to a pagi
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/51401) in GitLab 11.10.
The number of pipelines that can be created in a single push is 4.
This is to prevent the accidental creation of pipelines when `git push --all`
This limit prevents the accidental creation of pipelines when `git push --all`
or `git push --mirror` is used.
Read more in the [CI documentation](../ci/yaml/index.md#processing-git-pushes).
This limit does not affect any of the updated merge request pipelines.
All updated merge requests have a pipeline created when using
[pipelines for merge requests](../ci/pipelines/merge_request_pipelines.md).
## Retention of activity history

View File

@ -16187,6 +16187,7 @@ State of a review of a GitLab merge request.
| Value | Description |
| ----- | ----------- |
| <a id="mergerequestreviewstateattention_required"></a>`ATTENTION_REQUIRED` | The merge request is attention_required. |
| <a id="mergerequestreviewstatereviewed"></a>`REVIEWED` | The merge request is reviewed. |
| <a id="mergerequestreviewstateunreviewed"></a>`UNREVIEWED` | The merge request is unreviewed. |

View File

@ -246,6 +246,21 @@ If the merge train pipeline was canceled before the merge request was merged, wi
- Add it to the train again.
### Project `group/project` not found or access denied
This message is shown if configuration is added with [`include`](yaml/index.md#include) and one of the following:
- The configuration refers to a project that can't be found.
- The user that is running the pipeline is unable to access any included projects.
To resolve this, check that:
- The path of the project is in the format `my-group/my-project` and does not include
any folders in the repository.
- The user running the pipeline is a [member of the projects](../user/project/members/index.md#add-users-to-a-project)
that contain the included files. Users must also have the [permission](../user/permissions.md#job-permissions)
to run CI/CD jobs in the same projects.
## Pipeline warnings
Pipeline configuration warnings are shown when you:

View File

@ -4741,15 +4741,6 @@ You can use [CI/CD variables](../variables/index.md) to configure how the runner
You can also use variables to configure how many times a runner
[attempts certain stages of job execution](../runners/configure_runners.md#job-stages-attempts).
## Processing Git pushes
GitLab creates at most four branch and tag pipelines when
pushing multiple changes in a single `git push` invocation.
This limitation does not affect any of the updated merge request pipelines.
All updated merge requests have a pipeline created when using
[pipelines for merge requests](../pipelines/merge_request_pipelines.md).
## Deprecated keywords
The following keywords are deprecated.

View File

@ -21,6 +21,12 @@ comments: false
git clone <url>
```
NOTE:
You can also clone GitLab projects with the
[GitLab Workflow VS Code extension](../../user/project/repository/vscode.md).
To learn more, read about the extension's
[`Git: Clone` command](https://marketplace.visualstudio.com/items?itemName=GitLab.gitlab-workflow#clone-gitlab-projects).
## Central Repositories
- To instantiate a central repository a `--bare` flag is required.

View File

@ -8,34 +8,59 @@ info: To determine the technical writer assigned to the Stage/Group associated w
You can sort a list of issues several ways, including by:
- Blocking **(PREMIUM)**
- Created date
- Due date
- Label priority
- Last updated
- Milestone due date
- Popularity
- Priority
- Title ([introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/67234) in GitLab 14.3)
- Weight
- [Blocking issues](#sorting-by-blocking-issues)
- [Created date](#sorting-by-created-date)
- [Due date](#sorting-by-due-date)
- [Label priority](#sorting-by-label-priority)
- [Last updated](#sorting-by-last-updated)
- [Manual sorting](#manual-sorting)
- [Milestone due date](#sorting-by-milestone-due-date)
- [Popularity](#sorting-by-popularity)
- [Priority](#sorting-by-priority)
- [Title](#sorting-by-title)
- [Weight](#sorting-by-weight)
The available sorting options can change based on the context of the list.
For sorting by issue priority, see [Label Priority](../labels.md#label-priority).
In group and project issue lists, it is also possible to order issues manually,
similar to [issue boards](../issue_board.md#ordering-issues-in-a-list).
## Sorting by blocking issues **(PREMIUM)**
## Sorting by popularity
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/34247/) in GitLab 13.7.
When you select sorting by **Popularity**, the issue order changes to sort descending by the
number of upvotes ([awarded](../../award_emojis.md) "thumbs up" emoji)
on each issue. You can use this to identify issues that are in high demand.
When you sort by **Blocking**, the issue list changes to sort descending by the
number of issues each issue is blocking.
## Sorting by created date
When you sort by **Created date**, the issue list changes to sort descending by the issue
creation date. Issues created most recently are first.
## Sorting by due date
When you sort by **Due date**, the issue list changes to sort ascending by the issue
[due date](issue_data_and_actions.md#due-date). Issues with the earliest due date are first,
and issues without a due date are last.
## Sorting by label priority
When you sort by **Label priority**, the issue list changes to sort descending.
Issues with the highest priority label are first, then all other issues.
Ties are broken arbitrarily. Only the highest prioritized label is checked,
and labels with a lower priority are ignored.
For more information, see [issue 14523](https://gitlab.com/gitlab-org/gitlab/-/issues/14523).
To learn more about priority labels, read the [Labels](../labels.md#label-priority) documentation.
## Sorting by last updated
When you sort by **Last updated**, the issue list changes to sort by the time of a last
update. Issues changed the most recently are first.
## Manual sorting
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/62178) in GitLab 12.2.
When you select **Manual** sorting, you can change
When you sort by **Manual** order, you can change
the order by dragging and dropping the issues. The changed order persists, and
everyone who visits the same list sees the updated issue order, with some exceptions.
@ -50,11 +75,45 @@ this ordering is maintained whenever they appear together in any list.
This ordering also affects [issue boards](../issue_board.md#ordering-issues-in-a-list).
Changing the order in an issue list changes the ordering in an issue board,
and vice versa.
and the other way around.
## Sorting by blocking issues **(PREMIUM)**
## Sorting by milestone due date
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/34247/) in GitLab 13.7.
When you sort by **Milestone due date**, the issue list changes to sort ascending by the
assigned milestone due date. Issues with milestones with the earliest due date are first,
then issues with a milestone without a due date.
When you select to sort by **Blocking**, the issue list changes to sort descending by the
number of issues each issue is blocking. You can use this to determine the critical path for your backlog.
## Sorting by popularity
When you sort by **Popularity**, the issue order changes to sort descending by the
number of upvotes ([awarded](../../award_emojis.md) a "thumbs up" emoji)
on each issue. You can use this to identify issues that are in high demand.
## Sorting by priority
When you sort by **Priority**, the issue order changes to sort in this order:
1. Issues with milestones that have due dates, where the soonest assigned milestone is listed first.
1. Issues with milestones with no due dates.
1. Issues with a higher priority label.
1. Issues without a prioritized label.
To learn more about priority, read the [Labels](../labels.md#label-priority) documentation.
## Sorting by title
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/67234) in GitLab 14.3.
When you sort by **Title**, the issue order changes to sort alphabetically by the issue
title in this order:
- Emoji
- Special characters
- Numbers
- Letters: first Latin, then accented (for example, `ö`)
## Sorting by weight
When you sort by **Weight**, the issue list changes to sort ascending by the
[issue weight](issue_weight.md).
Issues with lowest weight are first, and issues without a weight are last.

View File

@ -17,6 +17,10 @@ your merge request, and makes [code suggestions](suggestions.md) you can accept
from the user interface. When your work is reviewed, your team members can choose
to accept or reject it.
You can review merge requests from the GitLab interface. If you install the
[GitLab Workflow VS Code extension](../../repository/vscode.md), you can also
review merge requests in Visual Studio Code.
## Review a merge request
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/4213) in GitLab Premium 11.4.

View File

@ -82,14 +82,19 @@ prompted to open XCode.
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/220957) in GitLab 13.10.
All projects can be cloned into Visual Studio Code. To do that:
All projects can be cloned into Visual Studio Code from the GitLab user interface, but you
can also install the [GitLab Workflow VS Code extension](vscode.md) to clone from
Visual Studio Code:
1. From the GitLab UI, go to the project's overview page.
1. Click **Clone**.
1. Select **Clone with Visual Studio Code** under either HTTPS or SSH method.
1. Select a folder to clone the project into.
- From the GitLab interface:
1. Go to the project's overview page.
1. Select **Clone**.
1. Under either the **HTTPS** or **SSH** method, select **Clone with Visual Studio Code**.
1. Select a folder to clone the project into.
When VS Code has successfully cloned your project, it opens the folder.
After Visual Studio Code clones your project, it opens the folder.
- From Visual Studio Code, with the [extension](vscode.md) installed, use the
extension's [`Git: Clone` command](https://marketplace.visualstudio.com/items?itemName=GitLab.gitlab-workflow#clone-gitlab-projects).
## Download the code in a repository
@ -243,6 +248,10 @@ When you [rename a user](../../profile/index.md#change-your-username),
- The redirects are available as long as the original path is not claimed by
another group, user, or project.
## Related links
- [GitLab Workflow VS Code extension](vscode.md)
## Troubleshooting
### Repository Languages: excessive CPU use

View File

@ -14,6 +14,9 @@ You can [comment on](#comment-on-snippets), [clone](#clone-snippets), and
[syntax highlighting](#filenames), [embedding](#embed-snippets), [downloading](#download-snippets),
and you can maintain your snippets with the [snippets API](../api/snippets.md).
You can create and manage your snippets through the GitLab user interface, or by
using the [GitLab Workflow VS Code extension](project/repository/vscode.md).
![Example of snippet](img/snippet_intro_v13_11.png)
GitLab provides two types of snippets:
@ -39,6 +42,8 @@ You can create snippets in multiple ways, depending on whether you want to creat
- *For all other pages,* select the plus icon (**{plus-square-o}**)
in the top navigation bar, then select **New snippet** from the dropdown
menu.
- If you installed the [GitLab Workflow VS Code extension](project/repository/vscode.md),
use the [`Gitlab: Create snippet` command](https://marketplace.visualstudio.com/items?itemName=GitLab.gitlab-workflow#create-snippet).
- **To create a project snippet**: Go to your project's page. Select the
plus icon (**{plus-square-o}**), and then select **New snippet** from the
**This project** section of the dropdown menu.

View File

@ -33,6 +33,11 @@ pre-push:
files: git diff --name-only --diff-filter=d $(git merge-base origin/master HEAD)..HEAD
glob: '*.rb'
run: REVEAL_RUBOCOP_TODO=0 bundle exec rubocop --parallel --force-exclusion {files}
graphql_docs:
tags: documentation
files: git diff --name-only --diff-filter=d $(git merge-base origin/master HEAD)..HEAD
glob: '{app/graphql/**/*.rb,ee/app/graphql/**/*.rb}'
run: bundle exec rake gitlab:graphql:check_docs
vale: # Requires Vale: https://docs.gitlab.com/ee/development/documentation/#install-linters
tags: documentation style
files: git diff --name-only --diff-filter=d $(git merge-base origin/master HEAD)..HEAD

View File

@ -1,17 +1,13 @@
# frozen_string_literal: true
require 'rails/generators'
require 'rails/generators/active_record'
require 'rails/generators/active_record/migration/migration_generator'
module PostDeploymentMigration
class PostDeploymentMigrationGenerator < Rails::Generators::NamedBase
def create_migration_file
timestamp = Time.now.utc.strftime('%Y%m%d%H%M%S')
template "migration.rb", "db/post_migrate/#{timestamp}_#{file_name}.rb"
end
def migration_class_name
file_name.camelize
class PostDeploymentMigrationGenerator < ActiveRecord::Generators::MigrationGenerator
def db_migrate_path
super.sub("migrate", "post_migrate")
end
end
end

View File

@ -14,7 +14,15 @@ module Gitlab
self.table_name = 'issue_metrics'
def self.from_2020
where('EXTRACT(YEAR FROM first_mentioned_in_commit_at) > 2019')
where(first_mentioned_in_commit_at_condition)
end
def self.first_mentioned_in_commit_at_condition
if columns_hash['first_mentioned_in_commit_at'].sql_type == 'timestamp without time zone'
'EXTRACT(YEAR FROM first_mentioned_in_commit_at) > 2019'
else
"EXTRACT(YEAR FROM first_mentioned_in_commit_at at time zone 'UTC') > 2019"
end
end
end
# rubocop: enable Style/Documentation

View File

@ -33,10 +33,14 @@ module Gitlab
# connect_src with 'self' includes https/wss variations of the origin,
# however, safari hasn't covered this yet and we need to explicitly add
# support for websocket origins until Safari catches up with the specs
if Rails.env.development?
allow_webpack_dev_server(directives)
allow_letter_opener(directives)
allow_customersdot(directives) if ENV['CUSTOMER_PORTAL_URL'].present?
end
allow_websocket_connections(directives)
allow_webpack_dev_server(directives) if Rails.env.development?
allow_cdn(directives, Settings.gitlab.cdn_host) if Settings.gitlab.cdn_host.present?
allow_customersdot(directives) if Rails.env.development? && ENV['CUSTOMER_PORTAL_URL'].present?
allow_sentry(directives) if Gitlab.config.sentry&.enabled && Gitlab.config.sentry&.clientside_dsn
# The follow section contains workarounds to patch Safari's lack of support for CSP Level 3
@ -127,10 +131,17 @@ module Gitlab
append_to_directive(directives, 'connect_src', sentry_uri.to_s)
end
def self.allow_letter_opener(directives)
append_to_directive(directives, 'frame_src', Gitlab::Utils.append_path(Gitlab.config.gitlab.url, '/rails/letter_opener/'))
end
# Using 'self' in the CSP introduces several CSP bypass opportunities
# for this reason we list the URLs where GitLab frames itself instead
def self.framed_gitlab_paths
['/admin/sidekiq', '/-/speedscope/index.html'].map do |path|
# We need the version without trailing / for the sidekiq page itself
# and we also need the version with trailing / for "deeper" pages
# like /admin/sidekiq/busy
['/admin/sidekiq', '/admin/sidekiq/', '/-/speedscope/index.html'].map do |path|
Gitlab::Utils.append_path(Gitlab.config.gitlab.url, path)
end
end

View File

@ -3,7 +3,7 @@
module Gitlab
module Database
class UnidirectionalCopyTrigger
def self.on_table(table_name, connection: ActiveRecord::Base.connection)
def self.on_table(table_name, connection:)
new(table_name, connection)
end

View File

@ -25264,9 +25264,6 @@ msgstr ""
msgid "Pipelines|There are currently no pipelines."
msgstr ""
msgid "Pipelines|There was a problem with loading the pipeline data."
msgstr ""
msgid "Pipelines|There was an error fetching the pipelines. Try again in a few moments or contact your support team."
msgstr ""
@ -36340,9 +36337,6 @@ msgstr ""
msgid "Unable to fetch branches list, please close the form and try again"
msgstr ""
msgid "Unable to fetch upstream and downstream pipelines."
msgstr ""
msgid "Unable to fetch vulnerable projects"
msgstr ""

View File

@ -22,6 +22,7 @@ module QA
let(:uri) { URI.parse(Runtime::Scenario.gitlab_address) }
let(:gitlab_host_with_port) { "#{uri.host}:#{uri.port}" }
let(:dependency_proxy_url) { "#{gitlab_host_with_port}/#{project.group.full_path}/dependency_proxy/containers" }
let(:image_sha) { 'alpine@sha256:c3d45491770c51da4ef58318e3714da686bc7165338b7ab5ac758e75c7455efb' }
before do
Flow::Login.sign_in
@ -63,15 +64,15 @@ module QA
apk add --no-cache openssl
true | openssl s_client -showcerts -connect gitlab.test:5050 > /usr/local/share/ca-certificates/gitlab.test.crt
update-ca-certificates
dockerd-entrypoint.sh || exit
dockerd-entrypoint.sh || exit
before_script:
- apk add curl jq grep
- docker login -u "$CI_DEPENDENCY_PROXY_USER" -p "$CI_DEPENDENCY_PROXY_PASSWORD" "$CI_DEPENDENCY_PROXY_SERVER"
script:
- docker pull #{dependency_proxy_url}/alpine:latest
- docker pull #{dependency_proxy_url}/#{image_sha}
- TOKEN=$(curl "https://auth.docker.io/token?service=registry.docker.io&scope=repository:ratelimitpreview/test:pull" | jq --raw-output .token)
- 'curl --head --header "Authorization: Bearer $TOKEN" "https://registry-1.docker.io/v2/ratelimitpreview/test/manifests/latest" 2>&1'
- docker pull #{dependency_proxy_url}/alpine:latest
- docker pull #{dependency_proxy_url}/#{image_sha}
- 'curl --head --header "Authorization: Bearer $TOKEN" "https://registry-1.docker.io/v2/ratelimitpreview/test/manifests/latest" 2>&1'
tags:
- "runner-for-#{project.name}"
@ -95,7 +96,7 @@ module QA
Page::Group::Menu.perform(&:go_to_dependency_proxy)
Page::Group::DependencyProxy.perform do |index|
expect(index).to have_blob_count("Contains 2 blobs of images")
expect(index).to have_blob_count("Contains 1 blobs of images")
end
end
end

View File

@ -388,15 +388,24 @@ describe('diffs/components/app', () => {
wrapper.vm.jumpToFile(+1);
expect(spy.mock.calls[spy.mock.calls.length - 1]).toEqual(['diffs/scrollToFile', '222.js']);
expect(spy.mock.calls[spy.mock.calls.length - 1]).toEqual([
'diffs/scrollToFile',
{ path: '222.js' },
]);
store.state.diffs.currentDiffFileId = '222';
wrapper.vm.jumpToFile(+1);
expect(spy.mock.calls[spy.mock.calls.length - 1]).toEqual(['diffs/scrollToFile', '333.js']);
expect(spy.mock.calls[spy.mock.calls.length - 1]).toEqual([
'diffs/scrollToFile',
{ path: '333.js' },
]);
store.state.diffs.currentDiffFileId = '333';
wrapper.vm.jumpToFile(-1);
expect(spy.mock.calls[spy.mock.calls.length - 1]).toEqual(['diffs/scrollToFile', '222.js']);
expect(spy.mock.calls[spy.mock.calls.length - 1]).toEqual([
'diffs/scrollToFile',
{ path: '222.js' },
]);
});
it('does not jump to previous file from the first one', async () => {

View File

@ -113,7 +113,9 @@ describe('Diffs tree list component', () => {
wrapper.find('.file-row').trigger('click');
expect(wrapper.vm.$store.dispatch).toHaveBeenCalledWith('diffs/scrollToFile', 'app/index.js');
expect(wrapper.vm.$store.dispatch).toHaveBeenCalledWith('diffs/scrollToFile', {
path: 'app/index.js',
});
});
it('renders as file list when renderTreeList is false', () => {

View File

@ -890,7 +890,7 @@ describe('DiffsStoreActions', () => {
},
};
scrollToFile({ state, commit, getters }, 'path');
scrollToFile({ state, commit, getters }, { path: 'path' });
expect(document.location.hash).toBe('#test');
});
@ -904,7 +904,7 @@ describe('DiffsStoreActions', () => {
},
};
scrollToFile({ state, commit, getters }, 'path');
scrollToFile({ state, commit, getters }, { path: 'path' });
expect(commit).toHaveBeenCalledWith(types.VIEW_DIFF_FILE, 'test');
});

View File

@ -279,6 +279,14 @@ describe('common_utils', () => {
top: elementTopWithContext,
});
});
it('passes through behaviour', () => {
commonUtils.scrollToElementWithContext(`#${id}`, { behavior: 'smooth' });
expect(window.scrollTo).toHaveBeenCalledWith({
behavior: 'smooth',
top: elementTopWithContext,
});
});
});
});

View File

@ -1,4 +1,5 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
import { nextTick } from 'vue';
import Vuex from 'vuex';
import { setHTMLFixture } from 'helpers/fixtures';
import createEventHub from '~/helpers/event_hub_factory';
@ -7,12 +8,15 @@ import eventHub from '~/notes/event_hub';
import discussionNavigation from '~/notes/mixins/discussion_navigation';
import notesModule from '~/notes/stores/modules';
let scrollToFile;
const discussion = (id, index) => ({
id,
resolvable: index % 2 === 0,
active: true,
notes: [{}],
diff_discussion: true,
position: { new_line: 1, old_line: 1 },
diff_file: { file_path: 'test.js' },
});
const createDiscussions = () => [...'abcde'].map(discussion);
const createComponent = () => ({
@ -45,6 +49,7 @@ describe('Discussion navigation mixin', () => {
jest.spyOn(utils, 'scrollToElement');
expandDiscussion = jest.fn();
scrollToFile = jest.fn();
const { actions, ...notesRest } = notesModule();
store = new Vuex.Store({
modules: {
@ -52,6 +57,10 @@ describe('Discussion navigation mixin', () => {
...notesRest,
actions: { ...actions, expandDiscussion },
},
diffs: {
namespaced: true,
actions: { scrollToFile },
},
},
});
store.state.notes.discussions = createDiscussions();
@ -136,6 +145,7 @@ describe('Discussion navigation mixin', () => {
it('scrolls to element', () => {
expect(utils.scrollToElement).toHaveBeenCalledWith(
findDiscussion('div.discussion', expected),
{ behavior: 'smooth' },
);
});
});
@ -163,6 +173,7 @@ describe('Discussion navigation mixin', () => {
expect(utils.scrollToElementWithContext).toHaveBeenCalledWith(
findDiscussion('ul.notes', expected),
{ behavior: 'smooth' },
);
});
});
@ -203,10 +214,45 @@ describe('Discussion navigation mixin', () => {
it('scrolls to discussion', () => {
expect(utils.scrollToElement).toHaveBeenCalledWith(
findDiscussion('div.discussion', expected),
{ behavior: 'smooth' },
);
});
});
});
});
describe.each`
diffsVirtualScrolling
${false}
${true}
`('virtual scrolling feature is $diffsVirtualScrolling', ({ diffsVirtualScrolling }) => {
beforeEach(async () => {
window.gon = { features: { diffsVirtualScrolling } };
jest.spyOn(store, 'dispatch');
store.state.notes.currentDiscussionId = 'a';
window.location.hash = 'test';
wrapper.vm.jumpToNextDiscussion();
await nextTick();
});
afterEach(() => {
window.gon = {};
window.location.hash = '';
});
it('resets location hash if diffsVirtualScrolling flag is true', () => {
expect(window.location.hash).toBe(diffsVirtualScrolling ? '' : '#test');
});
it(`calls scrollToFile with setHash as ${diffsVirtualScrolling ? 'false' : 'true'}`, () => {
expect(store.dispatch).toHaveBeenCalledWith('diffs/scrollToFile', {
path: 'test.js',
setHash: !diffsVirtualScrolling,
});
});
});
});
});

View File

@ -8,5 +8,62 @@ exports[`PackagesListApp renders 1`] = `
/>
<package-search-stub />
<div>
<section
class="row empty-state text-center"
>
<div
class="col-12"
>
<div
class="svg-250 svg-content"
>
<img
alt=""
class="gl-max-w-full"
role="img"
src="emptyListIllustration"
/>
</div>
</div>
<div
class="col-12"
>
<div
class="text-content gl-mx-auto gl-my-0 gl-p-5"
>
<h1
class="h4"
>
There are no packages yet
</h1>
<p>
Learn how to
<b-link-stub
class="gl-link"
event="click"
href="emptyListHelpUrl"
routertag="a"
target="_blank"
>
publish and share your packages
</b-link-stub>
with GitLab.
</p>
<div
class="gl-display-flex gl-flex-wrap gl-justify-content-center"
>
<!---->
<!---->
</div>
</div>
</div>
</section>
</div>
</div>
`;

View File

@ -2,22 +2,25 @@ import { GlEmptyState, GlSprintf, GlLink } from '@gitlab/ui';
import { createLocalVue } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import PackageListApp from '~/packages_and_registries/package_registry/components/list/app.vue';
import PackageTitle from '~/packages_and_registries/package_registry/components/list/package_title.vue';
import PackageSearch from '~/packages_and_registries/package_registry/components/list/package_search.vue';
import OriginalPackageList from '~/packages_and_registries/package_registry/components/list/packages_list.vue';
import {
PROJECT_RESOURCE_TYPE,
GROUP_RESOURCE_TYPE,
LIST_QUERY_DEBOUNCE_TIME,
GRAPHQL_PAGE_SIZE,
} from '~/packages_and_registries/package_registry/constants';
import getPackagesQuery from '~/packages_and_registries/package_registry/graphql/queries/get_packages.query.graphql';
import { packagesListQuery } from '../../mock_data';
import { packagesListQuery, packageData, pagination } from '../../mock_data';
jest.mock('~/lib/utils/common_utils');
jest.mock('~/flash');
@ -39,11 +42,19 @@ describe('PackagesListApp', () => {
const PackageList = {
name: 'package-list',
template: '<div><slot name="empty-state"></slot></div>',
props: OriginalPackageList.props,
};
const GlLoadingIcon = { name: 'gl-loading-icon', template: '<div>loading</div>' };
const searchPayload = {
sort: 'VERSION_DESC',
filters: { packageName: 'foo', packageType: 'CONAN' },
};
const findPackageTitle = () => wrapper.findComponent(PackageTitle);
const findSearch = () => wrapper.findComponent(PackageSearch);
const findListComponent = () => wrapper.findComponent(PackageList);
const findEmptyState = () => wrapper.findComponent(GlEmptyState);
const mountComponent = ({
resolver = jest.fn().mockResolvedValue(packagesListQuery()),
@ -105,25 +116,55 @@ describe('PackagesListApp', () => {
const resolver = jest.fn().mockResolvedValue(packagesListQuery());
mountComponent({ resolver });
const payload = {
sort: 'VERSION_DESC',
filters: { packageName: 'foo', packageType: 'CONAN' },
};
findSearch().vm.$emit('update', payload);
findSearch().vm.$emit('update', searchPayload);
await waitForDebouncedApollo();
jest.advanceTimersByTime(LIST_QUERY_DEBOUNCE_TIME);
expect(resolver).toHaveBeenCalledWith(
expect.objectContaining({
groupSort: payload.sort,
...payload.filters,
groupSort: searchPayload.sort,
...searchPayload.filters,
}),
);
});
});
describe('list component', () => {
let resolver;
beforeEach(() => {
resolver = jest.fn().mockResolvedValue(packagesListQuery());
mountComponent({ resolver });
return waitForDebouncedApollo();
});
it('exists and has the right props', () => {
expect(findListComponent().props()).toMatchObject({
list: expect.arrayContaining([expect.objectContaining({ id: packageData().id })]),
isLoading: false,
pageInfo: expect.objectContaining({ endCursor: pagination().endCursor }),
});
});
it('when list emits next-page fetches the next set of records', () => {
findListComponent().vm.$emit('next-page');
expect(resolver).toHaveBeenCalledWith(
expect.objectContaining({ after: pagination().endCursor, first: GRAPHQL_PAGE_SIZE }),
);
});
it('when list emits prev-page fetches the prev set of records', () => {
findListComponent().vm.$emit('prev-page');
expect(resolver).toHaveBeenCalledWith(
expect.objectContaining({ before: pagination().startCursor, last: GRAPHQL_PAGE_SIZE }),
);
});
});
describe.each`
type | sortType
${PROJECT_RESOURCE_TYPE} | ${'sort'}
@ -136,7 +177,7 @@ describe('PackagesListApp', () => {
beforeEach(() => {
provide = { ...defaultProvide, isGroupPage };
resolver = jest.fn().mockResolvedValue(packagesListQuery(type));
resolver = jest.fn().mockResolvedValue(packagesListQuery({ type }));
mountComponent({ provide, resolver });
return waitForDebouncedApollo();
});
@ -151,4 +192,40 @@ describe('PackagesListApp', () => {
);
});
});
describe('empty state', () => {
beforeEach(() => {
const resolver = jest.fn().mockResolvedValue(packagesListQuery({ extend: { nodes: [] } }));
mountComponent({ resolver });
return waitForDebouncedApollo();
});
it('generate the correct empty list link', () => {
const link = findListComponent().findComponent(GlLink);
expect(link.attributes('href')).toBe(defaultProvide.emptyListHelpUrl);
expect(link.text()).toBe('publish and share your packages');
});
it('includes the right content on the default tab', () => {
expect(findEmptyState().text()).toContain(PackageListApp.i18n.emptyPageTitle);
});
});
describe('filter without results', () => {
beforeEach(async () => {
mountComponent();
await waitForDebouncedApollo();
findSearch().vm.$emit('update', searchPayload);
return nextTick();
});
it('should show specific empty message', () => {
expect(findEmptyState().text()).toContain(PackageListApp.i18n.noResultsTitle);
expect(findEmptyState().text()).toContain(PackageListApp.i18n.widenFilters);
});
});
});

View File

@ -1,93 +1,86 @@
import { GlTable, GlPagination, GlModal } from '@gitlab/ui';
import { mount, createLocalVue } from '@vue/test-utils';
import { last } from 'lodash';
import Vuex from 'vuex';
import stubChildren from 'helpers/stub_children';
import { packageList } from 'jest/packages/mock_data';
import { GlKeysetPagination, GlModal, GlSprintf } from '@gitlab/ui';
import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import PackagesListRow from '~/packages/shared/components/package_list_row.vue';
import PackagesListLoader from '~/packages/shared/components/packages_list_loader.vue';
import { TrackingActions } from '~/packages/shared/constants';
import * as SharedUtils from '~/packages/shared/utils';
import {
DELETE_PACKAGE_TRACKING_ACTION,
REQUEST_DELETE_PACKAGE_TRACKING_ACTION,
CANCEL_DELETE_PACKAGE_TRACKING_ACTION,
} from '~/packages_and_registries/package_registry/constants';
import PackagesList from '~/packages_and_registries/package_registry/components/list/packages_list.vue';
import Tracking from '~/tracking';
const localVue = createLocalVue();
localVue.use(Vuex);
import { packageData } from '../../mock_data';
describe('packages_list', () => {
let wrapper;
let store;
const firstPackage = packageData();
const secondPackage = {
...packageData(),
id: 'gid://gitlab/Packages::Package/112',
name: 'second-package',
};
const defaultProps = {
list: [firstPackage, secondPackage],
isLoading: false,
pageInfo: {},
};
const EmptySlotStub = { name: 'empty-slot-stub', template: '<div>bar</div>' };
const findPackagesListLoader = () => wrapper.find(PackagesListLoader);
const findPackageListPagination = () => wrapper.find(GlPagination);
const findPackageListDeleteModal = () => wrapper.find(GlModal);
const findEmptySlot = () => wrapper.find(EmptySlotStub);
const findPackagesListRow = () => wrapper.find(PackagesListRow);
const createStore = (isGroupPage, packages, isLoading) => {
const state = {
isLoading,
packages,
pagination: {
perPage: 1,
total: 1,
page: 1,
},
config: {
isGroupPage,
},
sorting: {
orderBy: 'version',
sort: 'desc',
},
};
store = new Vuex.Store({
state,
getters: {
getList: () => packages,
},
});
store.dispatch = jest.fn();
const GlModalStub = {
name: GlModal.name,
template: '<div><slot></slot></div>',
methods: { show: jest.fn() },
};
const mountComponent = ({
isGroupPage = false,
packages = packageList,
isLoading = false,
...options
} = {}) => {
createStore(isGroupPage, packages, isLoading);
const findPackagesListLoader = () => wrapper.findComponent(PackagesListLoader);
const findPackageListPagination = () => wrapper.findComponent(GlKeysetPagination);
const findPackageListDeleteModal = () => wrapper.findComponent(GlModalStub);
const findEmptySlot = () => wrapper.findComponent(EmptySlotStub);
const findPackagesListRow = () => wrapper.findComponent(PackagesListRow);
wrapper = mount(PackagesList, {
localVue,
store,
const mountComponent = (props) => {
wrapper = shallowMountExtended(PackagesList, {
propsData: {
...defaultProps,
...props,
},
stubs: {
...stubChildren(PackagesList),
GlTable,
GlModal,
GlModal: GlModalStub,
GlSprintf,
},
slots: {
'empty-state': EmptySlotStub,
},
...options,
});
};
beforeEach(() => {
GlModalStub.methods.show.mockReset();
});
afterEach(() => {
wrapper.destroy();
wrapper = null;
});
describe('when is loading', () => {
beforeEach(() => {
mountComponent({
packages: [],
isLoading: true,
});
mountComponent({ isLoading: true });
});
it('shows skeleton loader when loading', () => {
it('shows skeleton loader', () => {
expect(findPackagesListLoader().exists()).toBe(true);
});
it('does not show the rows', () => {
expect(findPackagesListRow().exists()).toBe(false);
});
it('does not show the pagination', () => {
expect(findPackageListPagination().exists()).toBe(false);
});
});
describe('when is not loading', () => {
@ -95,74 +88,68 @@ describe('packages_list', () => {
mountComponent();
});
it('does not show skeleton loader when not loading', () => {
it('does not show skeleton loader', () => {
expect(findPackagesListLoader().exists()).toBe(false);
});
it('shows the rows', () => {
expect(findPackagesListRow().exists()).toBe(true);
});
});
describe('layout', () => {
beforeEach(() => {
mountComponent();
});
it('contains a pagination component', () => {
const sorting = findPackageListPagination();
expect(sorting.exists()).toBe(true);
mountComponent({ pageInfo: { hasPreviousPage: true } });
expect(findPackageListPagination().exists()).toBe(true);
});
it('contains a modal component', () => {
const sorting = findPackageListDeleteModal();
expect(sorting.exists()).toBe(true);
mountComponent();
expect(findPackageListDeleteModal().exists()).toBe(true);
});
});
describe('when the user can destroy the package', () => {
beforeEach(() => {
mountComponent();
findPackagesListRow().vm.$emit('packageToDelete', firstPackage);
return nextTick();
});
it('setItemToBeDeleted sets itemToBeDeleted and open the modal', () => {
const mockModalShow = jest.spyOn(wrapper.vm.$refs.packageListDeleteModal, 'show');
const item = last(wrapper.vm.list);
findPackagesListRow().vm.$emit('packageToDelete', item);
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.vm.itemToBeDeleted).toEqual(item);
expect(mockModalShow).toHaveBeenCalled();
});
it('deleting a package opens the modal', () => {
expect(findPackageListDeleteModal().text()).toContain(firstPackage.name);
});
it('deleteItemConfirmation resets itemToBeDeleted', () => {
wrapper.setData({ itemToBeDeleted: 1 });
wrapper.vm.deleteItemConfirmation();
expect(wrapper.vm.itemToBeDeleted).toEqual(null);
it('confirming delete empties itemsToBeDeleted', async () => {
findPackageListDeleteModal().vm.$emit('ok');
await nextTick();
expect(findPackageListDeleteModal().text()).not.toContain(firstPackage.name);
});
it('deleteItemConfirmation emit package:delete', () => {
const itemToBeDeleted = { id: 2 };
wrapper.setData({ itemToBeDeleted });
wrapper.vm.deleteItemConfirmation();
return wrapper.vm.$nextTick(() => {
expect(wrapper.emitted('package:delete')[0]).toEqual([itemToBeDeleted]);
});
it('confirming on the modal emits package:delete', async () => {
findPackageListDeleteModal().vm.$emit('ok');
await nextTick();
expect(wrapper.emitted('package:delete')[0]).toEqual([firstPackage]);
});
it('deleteItemCanceled resets itemToBeDeleted', () => {
wrapper.setData({ itemToBeDeleted: 1 });
wrapper.vm.deleteItemCanceled();
expect(wrapper.vm.itemToBeDeleted).toEqual(null);
it('cancel event resets itemToBeDeleted', async () => {
findPackageListDeleteModal().vm.$emit('cancel');
await nextTick();
expect(findPackageListDeleteModal().text()).not.toContain(firstPackage.name);
});
});
describe('when the list is empty', () => {
beforeEach(() => {
mountComponent({
packages: [],
slots: {
'empty-state': EmptySlotStub,
},
});
mountComponent({ list: [] });
});
it('show the empty slot', () => {
@ -171,45 +158,59 @@ describe('packages_list', () => {
});
});
describe('pagination component', () => {
let pagination;
let modelEvent;
describe('pagination ', () => {
beforeEach(() => {
mountComponent();
pagination = findPackageListPagination();
// retrieve the event used by v-model, a more sturdy approach than hardcoding it
modelEvent = pagination.vm.$options.model.event;
mountComponent({ pageInfo: { hasPreviousPage: true } });
});
it('emits page:changed events when the page changes', () => {
pagination.vm.$emit(modelEvent, 2);
expect(wrapper.emitted('page:changed')).toEqual([[2]]);
it('emits prev-page events when the prev event is fired', () => {
findPackageListPagination().vm.$emit('prev');
expect(wrapper.emitted('prev-page')).toEqual([[]]);
});
it('emits next-page events when the next event is fired', () => {
findPackageListPagination().vm.$emit('next');
expect(wrapper.emitted('next-page')).toEqual([[]]);
});
});
describe('tracking', () => {
let eventSpy;
let utilSpy;
const category = 'foo';
const category = 'UI::NpmPackages';
beforeEach(() => {
mountComponent();
eventSpy = jest.spyOn(Tracking, 'event');
utilSpy = jest.spyOn(SharedUtils, 'packageTypeToTrackCategory').mockReturnValue(category);
wrapper.setData({ itemToBeDeleted: { package_type: 'conan' } });
mountComponent();
findPackagesListRow().vm.$emit('packageToDelete', firstPackage);
return nextTick();
});
it('tracking category calls packageTypeToTrackCategory', () => {
expect(wrapper.vm.tracking.category).toBe(category);
expect(utilSpy).toHaveBeenCalledWith('conan');
});
it('deleteItemConfirmation calls event', () => {
wrapper.vm.deleteItemConfirmation();
it('requesting the delete tracks the right action', () => {
expect(eventSpy).toHaveBeenCalledWith(
category,
TrackingActions.DELETE_PACKAGE,
REQUEST_DELETE_PACKAGE_TRACKING_ACTION,
expect.any(Object),
);
});
it('confirming delete tracks the right action', () => {
findPackageListDeleteModal().vm.$emit('ok');
expect(eventSpy).toHaveBeenCalledWith(
category,
DELETE_PACKAGE_TRACKING_ACTION,
expect.any(Object),
);
});
it('canceling delete tracks the right action', () => {
findPackageListDeleteModal().vm.$emit('cancel');
expect(eventSpy).toHaveBeenCalledWith(
category,
CANCEL_DELETE_PACKAGE_TRACKING_ACTION,
expect.any(Object),
);
});

View File

@ -1,3 +1,5 @@
import capitalize from 'lodash/capitalize';
export const packageTags = () => [
{ id: 'gid://gitlab/Packages::Tag/87', name: 'bananas_9', __typename: 'PackageTag' },
{ id: 'gid://gitlab/Packages::Tag/86', name: 'bananas_8', __typename: 'PackageTag' },
@ -156,6 +158,15 @@ export const nugetMetadata = () => ({
projectUrl: 'projectUrl',
});
export const pagination = (extend) => ({
endCursor: 'eyJpZCI6IjIwNSIsIm5hbWUiOiJteS9jb21wYW55L2FwcC9teS1hcHAifQ',
hasNextPage: true,
hasPreviousPage: true,
startCursor: 'eyJpZCI6IjI0NyIsIm5hbWUiOiJ2ZXJzaW9uX3Rlc3QxIn0',
__typename: 'PageInfo',
...extend,
});
export const packageDetailsQuery = (extendPackage) => ({
data: {
package: {
@ -256,7 +267,7 @@ export const packageDestroyFileMutationError = () => ({
],
});
export const packagesListQuery = (type = 'group') => ({
export const packagesListQuery = ({ type = 'group', extend = {}, extendPagination = {} } = {}) => ({
data: {
[type]: {
packages: {
@ -277,9 +288,11 @@ export const packagesListQuery = (type = 'group') => ({
pipelines: { nodes: [] },
},
],
pageInfo: pagination(extendPagination),
__typename: 'PackageConnection',
},
__typename: 'Group',
...extend,
__typename: capitalize(type),
},
},
});

View File

@ -1,58 +1,22 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import waitForPromises from 'helpers/wait_for_promises';
import createMockApollo from 'helpers/mock_apollo_helper';
import { shallowMount } from '@vue/test-utils';
import PipelineEditorMiniGraph from '~/pipeline_editor/components/header/pipeline_editor_mini_graph.vue';
import PipelineMiniGraph from '~/pipelines/components/pipelines_list/pipeline_mini_graph.vue';
import getLinkedPipelinesQuery from '~/projects/commit_box/info/graphql/queries/get_linked_pipelines.query.graphql';
import { PIPELINE_FAILURE } from '~/pipeline_editor/constants';
import { mockLinkedPipelines, mockProjectFullPath, mockProjectPipeline } from '../../mock_data';
const localVue = createLocalVue();
localVue.use(VueApollo);
import { mockProjectPipeline } from '../../mock_data';
describe('Pipeline Status', () => {
let wrapper;
let mockApollo;
let mockLinkedPipelinesQuery;
const createComponent = ({ hasStages = true, options } = {}) => {
const createComponent = ({ hasStages = true } = {}) => {
wrapper = shallowMount(PipelineEditorMiniGraph, {
provide: {
dataMethod: 'graphql',
projectFullPath: mockProjectFullPath,
},
propsData: {
pipeline: mockProjectPipeline({ hasStages }).pipeline,
},
...options,
});
};
const createComponentWithApollo = (hasStages = true) => {
const handlers = [[getLinkedPipelinesQuery, mockLinkedPipelinesQuery]];
mockApollo = createMockApollo(handlers);
createComponent({
hasStages,
options: {
localVue,
apolloProvider: mockApollo,
},
});
};
const findPipelineMiniGraph = () => wrapper.findComponent(PipelineMiniGraph);
const findUpstream = () => wrapper.find('[data-testid="pipeline-editor-mini-graph-upstream"]');
const findDownstream = () =>
wrapper.find('[data-testid="pipeline-editor-mini-graph-downstream"]');
beforeEach(() => {
mockLinkedPipelinesQuery = jest.fn();
});
afterEach(() => {
mockLinkedPipelinesQuery.mockReset();
wrapper.destroy();
});
@ -75,60 +39,4 @@ describe('Pipeline Status', () => {
expect(findPipelineMiniGraph().exists()).toBe(false);
});
});
describe('when querying upstream and downstream pipelines', () => {
describe('when query succeeds', () => {
beforeEach(() => {
mockLinkedPipelinesQuery.mockResolvedValue(mockLinkedPipelines());
createComponentWithApollo();
});
it('should call the query with the correct variables', () => {
expect(mockLinkedPipelinesQuery).toHaveBeenCalledTimes(1);
expect(mockLinkedPipelinesQuery).toHaveBeenCalledWith({
fullPath: mockProjectFullPath,
iid: mockProjectPipeline().pipeline.iid,
});
});
describe('linked pipeline rendering based on given data', () => {
it.each`
hasDownstream | hasUpstream | downstreamRenderAction | upstreamRenderAction
${true} | ${true} | ${'renders'} | ${'renders'}
${true} | ${false} | ${'renders'} | ${'hides'}
${false} | ${true} | ${'hides'} | ${'renders'}
${false} | ${false} | ${'hides'} | ${'hides'}
`(
'$downstreamRenderAction downstream and $upstreamRenderAction upstream',
async ({ hasDownstream, hasUpstream }) => {
mockLinkedPipelinesQuery.mockResolvedValue(
mockLinkedPipelines({ hasDownstream, hasUpstream }),
);
createComponentWithApollo();
await waitForPromises();
expect(findUpstream().exists()).toBe(hasUpstream);
expect(findDownstream().exists()).toBe(hasDownstream);
},
);
});
});
describe('when query fails', () => {
beforeEach(() => {
mockLinkedPipelinesQuery.mockRejectedValue(new Error());
createComponentWithApollo();
});
it('should emit an error event when query fails', async () => {
expect(wrapper.emitted('showError')).toHaveLength(1);
expect(wrapper.emitted('showError')[0]).toEqual([
{
type: PIPELINE_FAILURE,
reasons: [wrapper.vm.$options.i18n.linkedPipelinesFetchError],
},
]);
});
});
});
});

View File

@ -11,7 +11,6 @@ import {
DEFAULT_FAILURE,
DEFAULT_SUCCESS,
LOAD_FAILURE_UNKNOWN,
PIPELINE_FAILURE,
} from '~/pipeline_editor/constants';
beforeEach(() => {
@ -66,7 +65,6 @@ describe('Pipeline Editor messages', () => {
failureType | message | expectedFailureType
${COMMIT_FAILURE} | ${'failed commit'} | ${COMMIT_FAILURE}
${LOAD_FAILURE_UNKNOWN} | ${'loading failure'} | ${LOAD_FAILURE_UNKNOWN}
${PIPELINE_FAILURE} | ${'pipeline failure'} | ${PIPELINE_FAILURE}
${'random'} | ${'error without a specified type'} | ${DEFAULT_FAILURE}
`('shows a message for $message', ({ failureType, expectedFailureType }) => {
createComponent({ failureType, showFailure: true });

View File

@ -290,62 +290,6 @@ export const mockProjectPipeline = ({ hasStages = true } = {}) => {
};
};
export const mockLinkedPipelines = ({ hasDownstream = true, hasUpstream = true } = {}) => {
let upstream = null;
let downstream = {
nodes: [],
__typename: 'PipelineConnection',
};
if (hasDownstream) {
downstream = {
nodes: [
{
id: 'gid://gitlab/Ci::Pipeline/612',
path: '/root/job-log-sections/-/pipelines/612',
project: { name: 'job-log-sections', __typename: 'Project' },
detailedStatus: {
group: 'success',
icon: 'status_success',
label: 'passed',
__typename: 'DetailedStatus',
},
__typename: 'Pipeline',
},
],
__typename: 'PipelineConnection',
};
}
if (hasUpstream) {
upstream = {
id: 'gid://gitlab/Ci::Pipeline/610',
path: '/root/trigger-downstream/-/pipelines/610',
project: { name: 'trigger-downstream', __typename: 'Project' },
detailedStatus: {
group: 'success',
icon: 'status_success',
label: 'passed',
__typename: 'DetailedStatus',
},
__typename: 'Pipeline',
};
}
return {
data: {
project: {
pipeline: {
path: '/root/ci-project/-/pipelines/790',
downstream,
upstream,
},
__typename: 'Project',
},
},
};
};
export const mockLintResponse = {
valid: true,
mergedYaml: mockCiYml,

View File

@ -12,6 +12,10 @@ RSpec.describe GitlabSchema.types['MergeRequestReviewState'] do
'UNREVIEWED' => have_attributes(
description: 'The merge request is unreviewed.',
value: 'unreviewed'
),
'ATTENTION_REQUIRED' => have_attributes(
description: 'The merge request is attention_required.',
value: 'attention_required'
)
)
end

View File

@ -1,6 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20211004110500_add_temporary_index_to_issue_metrics.rb')
RSpec.describe Gitlab::BackgroundMigration::FixFirstMentionedInCommitAt, :migration, schema: 20211004110500 do
let(:namespaces) { table(:namespaces) }
@ -99,42 +100,67 @@ RSpec.describe Gitlab::BackgroundMigration::FixFirstMentionedInCommitAt, :migrat
.perform(issue_metrics.minimum(:issue_id), issue_metrics.maximum(:issue_id))
end
it "marks successful slices as completed" do
min_issue_id = issue_metrics.minimum(:issue_id)
max_issue_id = issue_metrics.maximum(:issue_id)
shared_examples 'fixes first_mentioned_in_commit_at' do
it "marks successful slices as completed" do
min_issue_id = issue_metrics.minimum(:issue_id)
max_issue_id = issue_metrics.maximum(:issue_id)
expect(subject).to receive(:mark_job_as_succeeded).with(min_issue_id, max_issue_id)
expect(subject).to receive(:mark_job_as_succeeded).with(min_issue_id, max_issue_id)
subject.perform(min_issue_id, max_issue_id)
end
subject.perform(min_issue_id, max_issue_id)
end
context 'when the persisted first_mentioned_in_commit_at is later than the first commit authored_date' do
it 'updates the issue_metrics record' do
record1 = issue_metrics.create!(issue_id: issue1.id, first_mentioned_in_commit_at: Time.current)
record2 = issue_metrics.create!(issue_id: issue2.id, first_mentioned_in_commit_at: Time.current)
context 'when the persisted first_mentioned_in_commit_at is later than the first commit authored_date' do
it 'updates the issue_metrics record' do
record1 = issue_metrics.create!(issue_id: issue1.id, first_mentioned_in_commit_at: Time.current)
record2 = issue_metrics.create!(issue_id: issue2.id, first_mentioned_in_commit_at: Time.current)
run_migration
record1.reload
record2.reload
run_migration
record1.reload
record2.reload
expect(record1.first_mentioned_in_commit_at).to be_within(2.seconds).of(commit2.authored_date)
expect(record2.first_mentioned_in_commit_at).to be_within(2.seconds).of(commit3.authored_date)
expect(record1.first_mentioned_in_commit_at).to be_within(2.seconds).of(commit2.authored_date)
expect(record2.first_mentioned_in_commit_at).to be_within(2.seconds).of(commit3.authored_date)
end
end
context 'when the persisted first_mentioned_in_commit_at is earlier than the first commit authored_date' do
it 'does not update the issue_metrics record' do
record = issue_metrics.create!(issue_id: issue1.id, first_mentioned_in_commit_at: 20.days.ago)
expect { run_migration }.not_to change { record.reload.first_mentioned_in_commit_at }
end
end
context 'when the first_mentioned_in_commit_at is null' do
it 'does nothing' do
record = issue_metrics.create!(issue_id: issue1.id, first_mentioned_in_commit_at: nil)
expect { run_migration }.not_to change { record.reload.first_mentioned_in_commit_at }
end
end
end
context 'when the persisted first_mentioned_in_commit_at is earlier than the first commit authored_date' do
it 'does not update the issue_metrics record' do
record = issue_metrics.create!(issue_id: issue1.id, first_mentioned_in_commit_at: 20.days.ago)
expect { run_migration }.not_to change { record.reload.first_mentioned_in_commit_at }
end
describe 'running the migration when first_mentioned_in_commit_at is timestamp without time zone' do
it_behaves_like 'fixes first_mentioned_in_commit_at'
end
context 'when the first_mentioned_in_commit_at is null' do
it 'does nothing' do
record = issue_metrics.create!(issue_id: issue1.id, first_mentioned_in_commit_at: nil)
describe 'running the migration when first_mentioned_in_commit_at is timestamp with time zone' do
around do |example|
AddTemporaryIndexToIssueMetrics.new.down
expect { run_migration }.not_to change { record.reload.first_mentioned_in_commit_at }
ActiveRecord::Base.connection.execute "ALTER TABLE issue_metrics ALTER first_mentioned_in_commit_at type timestamp with time zone"
Gitlab::BackgroundMigration::FixFirstMentionedInCommitAt::TmpIssueMetrics.reset_column_information
AddTemporaryIndexToIssueMetrics.new.up
example.run
AddTemporaryIndexToIssueMetrics.new.down
ActiveRecord::Base.connection.execute "ALTER TABLE issue_metrics ALTER first_mentioned_in_commit_at type timestamp without time zone"
Gitlab::BackgroundMigration::FixFirstMentionedInCommitAt::TmpIssueMetrics.reset_column_information
AddTemporaryIndexToIssueMetrics.new.up
end
it_behaves_like 'fixes first_mentioned_in_commit_at'
end
end

View File

@ -99,8 +99,10 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
end
context 'when CUSTOMER_PORTAL_URL is set' do
let(:customer_portal_url) { 'https://customers.example.com' }
before do
stub_env('CUSTOMER_PORTAL_URL', 'https://customers.example.com')
stub_env('CUSTOMER_PORTAL_URL', customer_portal_url)
end
context 'when in production' do
@ -109,7 +111,7 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
end
it 'does not add CUSTOMER_PORTAL_URL to CSP' do
expect(directives['frame_src']).to eq("http://localhost/admin/sidekiq http://localhost/-/speedscope/index.html https://www.google.com/recaptcha/ https://www.recaptcha.net/ https://content.googleapis.com https://content-compute.googleapis.com https://content-cloudbilling.googleapis.com https://content-cloudresourcemanager.googleapis.com")
expect(directives['frame_src']).to eq("http://localhost/admin/sidekiq http://localhost/admin/sidekiq/ http://localhost/-/speedscope/index.html https://www.google.com/recaptcha/ https://www.recaptcha.net/ https://content.googleapis.com https://content-compute.googleapis.com https://content-cloudbilling.googleapis.com https://content-cloudresourcemanager.googleapis.com")
end
end
@ -119,7 +121,36 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
end
it 'adds CUSTOMER_PORTAL_URL to CSP' do
expect(directives['frame_src']).to eq("http://localhost/admin/sidekiq http://localhost/-/speedscope/index.html https://www.google.com/recaptcha/ https://www.recaptcha.net/ https://content.googleapis.com https://content-compute.googleapis.com https://content-cloudbilling.googleapis.com https://content-cloudresourcemanager.googleapis.com https://customers.example.com")
expect(directives['frame_src']).to eq("http://localhost/admin/sidekiq http://localhost/admin/sidekiq/ http://localhost/-/speedscope/index.html https://www.google.com/recaptcha/ https://www.recaptcha.net/ https://content.googleapis.com https://content-compute.googleapis.com https://content-cloudbilling.googleapis.com https://content-cloudresourcemanager.googleapis.com http://localhost/rails/letter_opener/ https://customers.example.com")
end
end
end
context 'letter_opener applicaiton URL' do
let(:gitlab_url) { 'http://gitlab.example.com' }
let(:letter_opener_url) { "#{gitlab_url}/rails/letter_opener/" }
before do
stub_config_setting(url: gitlab_url)
end
context 'when in production' do
before do
allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('production'))
end
it 'does not add letter_opener to CSP' do
expect(directives['frame_src']).not_to include(letter_opener_url)
end
end
context 'when in development' do
before do
allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('development'))
end
it 'adds letter_opener to CSP' do
expect(directives['frame_src']).to include(letter_opener_url)
end
end
end
@ -129,22 +160,22 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
context 'generates URLs to be added to child-src' do
it 'with insecure domain' do
stub_config_setting(url: 'http://example.com')
expect(described_class.framed_gitlab_paths).to eq(%w[http://example.com/admin/sidekiq http://example.com/-/speedscope/index.html])
expect(described_class.framed_gitlab_paths).to eq(%w[http://example.com/admin/sidekiq http://example.com/admin/sidekiq/ http://example.com/-/speedscope/index.html])
end
it 'with secure domain' do
stub_config_setting(url: 'https://example.com')
expect(described_class.framed_gitlab_paths).to eq(%w[https://example.com/admin/sidekiq https://example.com/-/speedscope/index.html])
expect(described_class.framed_gitlab_paths).to eq(%w[https://example.com/admin/sidekiq https://example.com/admin/sidekiq/ https://example.com/-/speedscope/index.html])
end
it 'with custom port' do
stub_config_setting(url: 'http://example.com:1234')
expect(described_class.framed_gitlab_paths).to eq(%w[http://example.com:1234/admin/sidekiq http://example.com:1234/-/speedscope/index.html])
expect(described_class.framed_gitlab_paths).to eq(%w[http://example.com:1234/admin/sidekiq http://example.com:1234/admin/sidekiq/ http://example.com:1234/-/speedscope/index.html])
end
it 'with custom port and secure domain' do
stub_config_setting(url: 'https://example.com:1234')
expect(described_class.framed_gitlab_paths).to eq(%w[https://example.com:1234/admin/sidekiq https://example.com:1234/-/speedscope/index.html])
expect(described_class.framed_gitlab_paths).to eq(%w[https://example.com:1234/admin/sidekiq https://example.com:1234/admin/sidekiq/ https://example.com:1234/-/speedscope/index.html])
end
end
end

View File

@ -7,7 +7,7 @@ RSpec.describe Gitlab::Database::UnidirectionalCopyTrigger do
let(:table_name) { '_test_table' }
let(:connection) { ActiveRecord::Base.connection }
let(:copy_trigger) { described_class.on_table(table_name) }
let(:copy_trigger) { described_class.on_table(table_name, connection: connection) }
describe '#name' do
context 'when a single column name is given' do

View File

@ -1290,7 +1290,7 @@ RSpec.describe Ci::Build do
end
end
shared_examples_for 'state transition as a deployable' do
describe 'state transition as a deployable' do
subject { build.send(event) }
let!(:build) { create(:ci_build, :with_deployment, :start_review_app, project: project, pipeline: pipeline) }
@ -1332,6 +1332,22 @@ RSpec.describe Ci::Build do
expect(deployment).to be_running
end
context 'when deployment is already running state' do
before do
build.deployment.success!
end
it 'does not change deployment status and tracks an error' do
expect(Gitlab::ErrorTracking)
.to receive(:track_exception).with(
instance_of(Deployment::StatusSyncError), deployment_id: deployment.id, build_id: build.id)
with_cross_database_modification_prevented do
expect { subject }.not_to change { deployment.reload.status }
end
end
end
end
context 'when transits to success' do
@ -1399,36 +1415,6 @@ RSpec.describe Ci::Build do
end
end
it_behaves_like 'state transition as a deployable' do
context 'when transits to running' do
let(:event) { :run! }
context 'when deployment is already running state' do
before do
build.deployment.success!
end
it 'does not change deployment status and tracks an error' do
expect(Gitlab::ErrorTracking)
.to receive(:track_exception).with(
instance_of(Deployment::StatusSyncError), deployment_id: deployment.id, build_id: build.id)
with_cross_database_modification_prevented do
expect { subject }.not_to change { deployment.reload.status }
end
end
end
end
end
context 'when update_deployment_after_transaction_commit feature flag is disabled' do
before do
stub_feature_flags(update_deployment_after_transaction_commit: false)
end
it_behaves_like 'state transition as a deployable'
end
describe '#on_stop' do
subject { build.on_stop }

View File

@ -37,4 +37,6 @@ RSpec.describe MergeRequestAssignee do
end
end
end
it_behaves_like 'having unique enum values'
end

View File

@ -7,6 +7,8 @@ RSpec.describe MergeRequestReviewer do
subject { merge_request.merge_request_reviewers.build(reviewer: create(:user)) }
it_behaves_like 'having unique enum values'
describe 'associations' do
it { is_expected.to belong_to(:merge_request).class_name('MergeRequest') }
it { is_expected.to belong_to(:reviewer).class_name('User').inverse_of(:merge_request_reviewers) }

View File

@ -102,10 +102,15 @@ module Database
schemas = Database::GitlabSchema.table_schemas(all_tables)
if schemas.many?
raise Database::PreventCrossDatabaseModification::CrossDatabaseModificationAcrossUnsupportedTablesError,
"Cross-database data modification of '#{schemas.to_a.join(", ")}' were detected within " \
message = "Cross-database data modification of '#{schemas.to_a.join(", ")}' were detected within " \
"a transaction modifying the '#{all_tables.to_a.join(", ")}' tables." \
"Please refer to https://docs.gitlab.com/ee/development/database/multiple_databases.html#removing-cross-database-transactions for details on how to resolve this exception."
if schemas.any? { |s| s.to_s.start_with?("undefined") }
message += " The gitlab_schema was undefined for one or more of the tables in this transaction. Any new tables must be added to spec/support/database/gitlab_schemas.yml ."
end
raise Database::PreventCrossDatabaseModification::CrossDatabaseModificationAcrossUnsupportedTablesError, message
end
end
end

View File

@ -68,6 +68,17 @@ RSpec.describe 'Database::PreventCrossDatabaseModification' do
end
end.to raise_error /Cross-database data modification/
end
it 'raises an error when an undefined gitlab_schema table is modified with another table' do
expect do
with_cross_database_modification_prevented do
Project.transaction do
project.touch
project.connection.execute('UPDATE foo_bars_undefined_table SET a=1 WHERE id = -1')
end
end
end.to raise_error /Cross-database data modification.*The gitlab_schema was undefined/
end
end
context 'when running tests with prevent_cross_database_modification', :prevent_cross_database_modification do

View File

@ -68,71 +68,4 @@ RSpec.describe Tooling::Danger::ProductIntelligence do
it { is_expected.to be_empty }
end
end
describe '#matching_changed_files' do
subject { product_intelligence.matching_changed_files }
let(:changed_files) do
[
'dashboard/todos_controller.rb',
'components/welcome.vue',
'admin/groups/_form.html.haml'
]
end
context 'with snowplow files changed' do
context 'when vue file changed' do
let(:changed_lines) { ['+data-track-action'] }
it { is_expected.to match_array(['components/welcome.vue']) }
end
context 'when haml file changed' do
let(:changed_lines) { ['+ data: { track_label:'] }
it { is_expected.to match_array(['admin/groups/_form.html.haml']) }
end
context 'when ruby file changed' do
let(:changed_lines) { ['+ Gitlab::Tracking.event'] }
let(:changed_files) { ['dashboard/todos_controller.rb', 'admin/groups/_form.html.haml'] }
it { is_expected.to match_array(['dashboard/todos_controller.rb']) }
end
end
context 'with metrics files changed' do
let(:changed_files) { ['config/metrics/counts_7d/test_metric.yml', 'ee/config/metrics/counts_7d/ee_metric.yml'] }
it { is_expected.to match_array(changed_files) }
end
context 'with metrics files not changed' do
it { is_expected.to be_empty }
end
context 'with tracking files changed' do
let(:changed_files) do
[
'lib/gitlab/tracking.rb',
'spec/lib/gitlab/tracking_spec.rb',
'app/helpers/tracking_helper.rb'
]
end
it { is_expected.to match_array(changed_files) }
end
context 'with usage_data files changed' do
let(:changed_files) do
[
'doc/api/usage_data.md',
'ee/lib/ee/gitlab/usage_data.rb',
'spec/lib/gitlab/usage_data_spec.rb'
]
end
it { is_expected.to match_array(changed_files) }
end
end
end

View File

@ -40,7 +40,7 @@ RSpec.describe Tooling::Danger::ProjectHelper do
using RSpec::Parameterized::TableSyntax
before do
allow(fake_git).to receive(:diff_for_file).with('usage_data.rb') { double(:diff, patch: "+ count(User.active)") }
allow(fake_git).to receive(:diff_for_file).with(instance_of(String)) { double(:diff, patch: "+ count(User.active)") }
end
where(:path, :expected_categories) do
@ -189,6 +189,10 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'spec/frontend/tracking/foo.js' | [:frontend, :product_intelligence]
'spec/frontend/tracking_spec.js' | [:frontend, :product_intelligence]
'lib/gitlab/usage_database/foo.rb' | [:backend]
'config/metrics/counts_7d/test_metric.yml' | [:product_intelligence]
'config/metrics/schema.json' | [:product_intelligence]
'doc/api/usage_data.md' | [:product_intelligence]
'spec/lib/gitlab/usage_data_spec.rb' | [:product_intelligence]
end
with_them do
@ -199,6 +203,9 @@ RSpec.describe Tooling::Danger::ProjectHelper do
context 'having specific changes' do
where(:expected_categories, :patch, :changed_files) do
[:product_intelligence] | '+data-track-action' | ['components/welcome.vue']
[:product_intelligence] | '+ data: { track_label:' | ['admin/groups/_form.html.haml']
[:product_intelligence] | '+ Gitlab::Tracking.event' | ['dashboard/todos_controller.rb', 'admin/groups/_form.html.haml']
[:database, :backend, :product_intelligence] | '+ count(User.active)' | ['usage_data.rb', 'lib/gitlab/usage_data.rb', 'ee/lib/ee/gitlab/usage_data.rb']
[:database, :backend, :product_intelligence] | '+ estimate_batch_distinct_count(User.active)' | ['usage_data.rb']
[:backend, :product_intelligence] | '+ alt_usage_data(User.active)' | ['lib/gitlab/usage_data.rb']

View File

@ -9,26 +9,6 @@ module Tooling
'product intelligence::review pending'
].freeze
TRACKING_FILES = [
'lib/gitlab/tracking.rb',
'spec/lib/gitlab/tracking_spec.rb',
'app/helpers/tracking_helper.rb',
'spec/helpers/tracking_helper_spec.rb',
'app/assets/javascripts/tracking/index.js',
'app/assets/javascripts/tracking/constants.js',
'app/assets/javascripts/tracking/get_standard_context.js',
'spec/frontend/tracking/get_standard_context_spec.js',
'spec/frontend/tracking_spec.js',
'generator_templates/usage_metric_definition/metric_definition.yml',
'lib/generators/gitlab/usage_metric/usage_metric_generator.rb',
'lib/generators/gitlab/usage_metric_definition_generator.rb',
'lib/generators/gitlab/usage_metric_definition/redis_hll_generator.rb',
'spec/lib/generators/gitlab/usage_metric_generator_spec.rb',
'spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb',
'spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb',
'config/metrics/schema.json'
].freeze
def missing_labels
return [] if !helper.ci? || helper.mr_has_labels?('growth experiment')
@ -38,43 +18,6 @@ module Tooling
labels
end
def matching_changed_files
tracking_changed_files = all_changed_files & TRACKING_FILES
usage_data_changed_files = all_changed_files.grep(%r{(usage_data)})
usage_data_changed_files + tracking_changed_files + metrics_changed_files + snowplow_changed_files
end
private
def all_changed_files
helper.all_changed_files
end
def metrics_changed_files
all_changed_files.grep(%r{((ee/)?config/metrics/.*\.yml)})
end
def matching_files?(file, extension:, pattern:)
return unless file.end_with?(extension)
helper.changed_lines(file).grep(pattern).any?
end
def snowplow_changed_files
js_patterns = Regexp.union(
'Tracking.event',
/\btrack\(/,
'data-track-action'
)
all_changed_files.select do |file|
matching_files?(file, extension: '.rb', pattern: %r{Gitlab::Tracking\.(event|enabled\?|options)$}) ||
matching_files?(file, extension: '.js', pattern: js_patterns) ||
matching_files?(file, extension: '.vue', pattern: js_patterns) ||
matching_files?(file, extension: '.haml', pattern: %r{data: \{ track})
end
end
end
end
end

View File

@ -38,6 +38,8 @@ module Tooling
%r{\A((ee|jh)/)?config/feature_flags/} => :feature_flag,
%r{doc/api/usage_data.md} => [:product_intelligence],
%r{\Adoc/.*(\.(md|png|gif|jpg|yml))\z} => :docs,
%r{\A(CONTRIBUTING|LICENSE|MAINTENANCE|PHILOSOPHY|PROCESS|README)(\.md)?\z} => :docs,
%r{\Adata/whats_new/} => :docs,
@ -100,6 +102,7 @@ module Tooling
%r{\A((ee|jh)/)?spec/support/shared_contexts/features/} => :test,
%r{\A((ee|jh)/)?spec/support/helpers/features/} => :test,
%r{\A((spec/)?lib/generators/gitlab/usage_metric_)} => [:product_intelligence],
%r{\A((ee|jh)/)?lib/gitlab/usage_data_counters/.*\.yml\z} => [:product_intelligence],
%r{\A((ee|jh)/)?config/metrics/((.*\.yml)|(schema\.json))\z} => [:product_intelligence],
%r{\A((ee|jh)/)?lib/gitlab/usage_data(_counters)?(/|\.rb)} => [:backend, :product_intelligence],
@ -108,9 +111,16 @@ module Tooling
spec/lib/gitlab/tracking_spec\.rb |
app/helpers/tracking_helper\.rb |
spec/helpers/tracking_helper_spec\.rb |
(spec/)?lib/generators/gitlab/usage_metric_\S+ |
(spec/)?lib/generators/gitlab/usage_metric_definition/redis_hll_generator(_spec)?\.rb |
lib/generators/rails/usage_metric_definition_generator\.rb |
spec/lib/generators/usage_metric_definition_generator_spec\.rb |
generator_templates/usage_metric_definition/metric_definition\.yml)\z}x => [:backend, :product_intelligence],
%r{gitlab/usage_data(_spec)?\.rb} => [:product_intelligence],
[%r{\.haml\z}, %r{data: \{ track}] => [:product_intelligence],
[%r{\.(rb|haml)\z}, %r{Gitlab::Tracking\.(event|enabled\?|options)$}] => [:product_intelligence],
[%r{\.(vue|js)\z}, %r{(Tracking.event|/\btrack\(/|data-track-action)}] => [:product_intelligence],
%r{\A((ee|jh)/)?app/(?!assets|views)[^/]+} => :backend,
%r{\A((ee|jh)/)?(bin|config|generator_templates|lib|rubocop)/} => :backend,
%r{\A((ee|jh)/)?spec/migrations} => :database,