Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-10-27 18:11:02 +00:00
parent 0407f1573d
commit bd746eebdc
66 changed files with 2575 additions and 2349 deletions

View File

@ -2,7 +2,7 @@
# project here: https://gitlab.com/gitlab-org/gitlab/-/project_members
# As described in https://docs.gitlab.com/ee/user/project/code_owners.html
* @gitlab-org/maintainers/rails-backend @gitlab-org/maintainers/frontend @gitlab-org/maintainers/database @gl-quality/qe-maintainers @gitlab-org/delivery @gitlab-org/maintainers/cicd-templates @kwiebers @nolith @jacobvosmaer-gitlab @gitlab-org/tw-leadership
* @gitlab-org/maintainers/rails-backend @gitlab-org/maintainers/frontend @gitlab-org/maintainers/database @gl-quality/qe-maintainers @gitlab-org/delivery @gitlab-org/maintainers/cicd-templates @nolith @jacobvosmaer-gitlab @gitlab-org/tw-leadership
CODEOWNERS @gitlab-org/development-leaders @gitlab-org/tw-leadership
docs/CODEOWNERS @gitlab-org/development-leaders @gitlab-org/tw-leadership

View File

@ -1,4 +1,4 @@
<!-- See Pipelines for the GitLab project: https://docs.gitlab.com/ee/development/pipelines.html -->
<!-- See Pipelines for the GitLab project: https://docs.gitlab.com/ee/development/pipelines -->
<!-- When in doubt about a Pipeline configuration change, feel free to ping @gl-quality/eng-prod. -->
## What does this MR do?
@ -15,7 +15,7 @@
Consider the effect of the changes in this merge request on the following:
- [ ] Different [pipeline types](https://docs.gitlab.com/ee/development/pipelines.html#pipelines-for-merge-requests)
- [ ] Different [pipeline types](https://docs.gitlab.com/ee/development/pipelines/index.html#pipelines-types-for-merge-requests)
- Non-canonical projects:
- [ ] `gitlab-foss`
- [ ] `security`

View File

@ -16,8 +16,10 @@ export default class EditBlob {
constructor(options) {
this.options = options;
this.configureMonacoEditor();
this.isMarkdown = this.options.isMarkdown;
this.markdownLivePreviewOpened = false;
if (this.options.isMarkdown) {
if (this.isMarkdown) {
this.fetchMarkdownExtension();
}
@ -104,6 +106,13 @@ export default class EditBlob {
this.$editModeLinks.on('click', (e) => this.editModeLinkClickHandler(e));
}
toggleMarkdownPreview(toOpen) {
if (toOpen !== this.markdownLivePreviewOpened) {
this.editor.markdownPreview?.eventEmitter.fire();
this.markdownLivePreviewOpened = !this.markdownLivePreviewOpened;
}
}
editModeLinkClickHandler(e) {
e.preventDefault();
@ -115,25 +124,29 @@ export default class EditBlob {
currentLink.parent().addClass('active hover');
this.$editModePanes.hide();
if (this.isMarkdown) {
this.toggleMarkdownPreview(paneId === '#preview');
} else {
this.$editModePanes.hide();
currentPane.show();
currentPane.show();
if (paneId === '#preview') {
this.$toggleButton.hide();
axios
.post(currentLink.data('previewUrl'), {
content: this.editor.getValue(),
})
.then(({ data }) => {
currentPane.empty().append(data);
currentPane.renderGFM();
})
.catch(() =>
createAlert({
message: BLOB_PREVIEW_ERROR,
}),
);
if (paneId === '#preview') {
this.$toggleButton.hide();
axios
.post(currentLink.data('previewUrl'), {
content: this.editor.getValue(),
})
.then(({ data }) => {
currentPane.empty().append(data);
currentPane.renderGFM();
})
.catch(() =>
createAlert({
message: BLOB_PREVIEW_ERROR,
}),
);
}
}
this.$toggleButton.show();

View File

@ -1,4 +1,4 @@
import { KeyMod, KeyCode } from 'monaco-editor';
import { KeyMod, KeyCode, Emitter } from 'monaco-editor';
import { debounce } from 'lodash';
import { BLOB_PREVIEW_ERROR } from '~/blob_edit/constants';
import { createAlert } from '~/flash';
@ -56,6 +56,7 @@ export class EditorMarkdownPreviewExtension {
layoutChangeListener: undefined,
path: setupOptions.previewMarkdownPath,
actionShowPreviewCondition: instance.createContextKey('toggleLivePreview', true),
eventEmitter: new Emitter(),
};
this.toolbarButtons = [];
@ -71,6 +72,8 @@ export class EditorMarkdownPreviewExtension {
EditorMarkdownPreviewExtension.resizePreviewLayout(instance, newWidth);
}
});
this.preview.eventEmitter.event(this.togglePreview.bind(this, instance));
}
onBeforeUnuse(instance) {
@ -187,6 +190,31 @@ export class EditorMarkdownPreviewExtension {
});
}
togglePreview(instance) {
if (!this.preview?.el) {
this.preview.el = setupDomElement({ injectToEl: instance.getDomNode().parentElement });
}
this.togglePreviewLayout(instance);
this.togglePreviewPanel(instance);
this.preview.actionShowPreviewCondition.set(!this.preview.actionShowPreviewCondition.get());
if (!this.preview?.shown) {
this.preview.modelChangeListener = instance.onDidChangeModelContent(
debounce(this.fetchPreview.bind(this, instance), EXTENSION_MARKDOWN_PREVIEW_UPDATE_DELAY),
);
} else {
this.preview.modelChangeListener.dispose();
}
this.preview.shown = !this.preview?.shown;
if (instance.toolbar) {
instance.toolbar.updateItem(EXTENSION_MARKDOWN_PREVIEW_ACTION_ID, {
selected: this.preview.shown,
});
}
}
provides() {
return {
markdownPreview: this.preview,
@ -195,33 +223,7 @@ export class EditorMarkdownPreviewExtension {
setupPreviewAction: (instance) => this.setupPreviewAction(instance),
togglePreview: (instance) => {
if (!this.preview?.el) {
this.preview.el = setupDomElement({ injectToEl: instance.getDomNode().parentElement });
}
this.togglePreviewLayout(instance);
this.togglePreviewPanel(instance);
this.preview.actionShowPreviewCondition.set(!this.preview.actionShowPreviewCondition.get());
if (!this.preview?.shown) {
this.preview.modelChangeListener = instance.onDidChangeModelContent(
debounce(
this.fetchPreview.bind(this, instance),
EXTENSION_MARKDOWN_PREVIEW_UPDATE_DELAY,
),
);
} else {
this.preview.modelChangeListener.dispose();
}
this.preview.shown = !this.preview?.shown;
if (instance.toolbar) {
instance.toolbar.updateItem(EXTENSION_MARKDOWN_PREVIEW_ACTION_ID, {
selected: this.preview.shown,
});
}
},
togglePreview: (instance) => this.togglePreview(instance),
};
}
}

View File

@ -18,7 +18,8 @@ import { BV_SHOW_MODAL, BV_HIDE_MODAL } from '~/lib/utils/constants';
import { getParameterValues } from '~/lib/utils/url_utility';
import { n__, sprintf } from '~/locale';
import {
CLOSE_TO_LIMIT_COUNT,
CLOSE_TO_LIMIT_VARIANT,
REACHED_LIMIT_VARIANT,
USERS_FILTER_ALL,
INVITE_MEMBERS_FOR_TASK,
MEMBER_MODAL_LABELS,
@ -174,27 +175,11 @@ export default {
isOnLearnGitlab() {
return this.source === LEARN_GITLAB;
},
closeToLimit() {
if (this.usersLimitDataset.freeUsersLimit && this.usersLimitDataset.membersCount) {
return (
this.usersLimitDataset.membersCount >=
this.usersLimitDataset.freeUsersLimit - CLOSE_TO_LIMIT_COUNT
);
}
return false;
showUserLimitNotification() {
return this.usersLimitDataset.reachedLimit || this.usersLimitDataset.closeToDashboardLimit;
},
reachedLimit() {
if (this.usersLimitDataset.freeUsersLimit && this.usersLimitDataset.membersCount) {
return this.usersLimitDataset.membersCount >= this.usersLimitDataset.freeUsersLimit;
}
return false;
},
formGroupDescription() {
return this.reachedLimit
? this.$options.labels.placeHolderDisabled
: this.$options.labels.placeHolder;
limitVariant() {
return this.usersLimitDataset.reachedLimit ? REACHED_LIMIT_VARIANT : CLOSE_TO_LIMIT_VARIANT;
},
errorList() {
return Object.entries(this.invalidMembers).map(([member, error]) => {
@ -385,13 +370,11 @@ export default {
:help-link="helpLink"
:label-intro-text="labelIntroText"
:label-search-field="$options.labels.searchField"
:form-group-description="formGroupDescription"
:form-group-description="$options.labels.placeHolder"
:invalid-feedback-message="invalidFeedbackMessage"
:is-loading="isLoading"
:new-users-to-invite="newUsersToInvite"
:root-group-id="rootId"
:close-to-limit="closeToLimit"
:reached-limit="reachedLimit"
:users-limit-dataset="usersLimitDataset"
@reset="resetFields"
@submit="sendInvite"
@ -448,9 +431,8 @@ export default {
</template>
</gl-alert>
<user-limit-notification
v-else
:close-to-limit="closeToLimit"
:reached-limit="reachedLimit"
v-else-if="showUserLimitNotification"
:limit-variant="limitVariant"
:users-limit-dataset="usersLimitDataset"
/>
</template>

View File

@ -8,7 +8,6 @@ import {
GlLink,
GlSprintf,
GlFormInput,
GlIcon,
} from '@gitlab/ui';
import Tracking from '~/tracking';
import { sprintf } from '~/locale';
@ -20,11 +19,8 @@ import {
INVITE_BUTTON_TEXT,
INVITE_BUTTON_TEXT_DISABLED,
CANCEL_BUTTON_TEXT,
CANCEL_BUTTON_TEXT_DISABLED,
HEADER_CLOSE_LABEL,
ON_SHOW_TRACK_LABEL,
ON_CLOSE_TRACK_LABEL,
ON_SUBMIT_TRACK_LABEL,
} from '../constants';
const DEFAULT_SLOT = 'default';
@ -48,7 +44,6 @@ export default {
GlDropdownItem,
GlSprintf,
GlFormInput,
GlIcon,
ContentTransition,
},
mixins: [Tracking.mixin()],
@ -131,16 +126,6 @@ export default {
required: false,
default: false,
},
closeToLimit: {
type: Boolean,
required: false,
default: false,
},
reachedLimit: {
type: Boolean,
required: false,
default: false,
},
usersLimitDataset: {
type: Object,
required: false,
@ -175,20 +160,17 @@ export default {
},
actionPrimary() {
return {
text: this.reachedLimit ? INVITE_BUTTON_TEXT_DISABLED : this.submitButtonText,
text: this.submitButtonText,
attributes: {
variant: 'confirm',
disabled: this.reachedLimit ? false : this.submitDisabled,
loading: this.reachedLimit ? false : this.isLoading,
disabled: this.submitDisabled,
loading: this.isLoading,
'data-qa-selector': 'invite_button',
...(this.reachedLimit && { href: this.usersLimitDataset.membersPath }),
},
};
},
actionCancel() {
if (this.reachedLimit && this.usersLimitDataset.userNamespace) return undefined;
if (this.closeToLimit && this.usersLimitDataset.userNamespace) {
if (this.usersLimitDataset.closeToDashboardLimit && this.usersLimitDataset.userNamespace) {
return {
text: INVITE_BUTTON_TEXT_DISABLED,
attributes: {
@ -200,13 +182,9 @@ export default {
}
return {
text: this.reachedLimit ? CANCEL_BUTTON_TEXT_DISABLED : this.cancelButtonText,
...(this.reachedLimit && { attributes: { href: this.usersLimitDataset.purchasePath } }),
text: this.cancelButtonText,
};
},
selectLabelClass() {
return `col-form-label ${this.reachedLimit ? 'gl-text-gray-500' : ''}`;
},
},
watch: {
selectedAccessLevel: {
@ -226,23 +204,19 @@ export default {
this.$emit('reset');
},
onShowModal() {
if (this.reachedLimit) {
if (this.usersLimitDataset.reachedLimit) {
this.track('render', { category: 'default', label: ON_SHOW_TRACK_LABEL });
}
},
onCloseModal(e) {
if (this.preventCancelDefault || this.reachedLimit) {
if (this.preventCancelDefault) {
e.preventDefault();
} else {
this.onReset();
this.$refs.modal.hide();
}
if (this.reachedLimit) {
this.track('click_button', { category: 'default', label: ON_CLOSE_TRACK_LABEL });
} else {
this.$emit('cancel');
}
this.$emit('cancel');
},
changeSelectedItem(item) {
this.selectedAccessLevel = item;
@ -251,14 +225,10 @@ export default {
// We never want to hide when submitting
e.preventDefault();
if (this.reachedLimit) {
this.track('click_button', { category: 'default', label: ON_SUBMIT_TRACK_LABEL });
} else {
this.$emit('submit', {
accessLevel: this.selectedAccessLevel,
expiresAt: this.selectedDate,
});
}
this.$emit('submit', {
accessLevel: this.selectedAccessLevel,
expiresAt: this.selectedDate,
});
},
},
HEADER_CLOSE_LABEL,
@ -311,71 +281,63 @@ export default {
<gl-form-group
:invalid-feedback="invalidFeedbackMessage"
:state="exceptionState"
:description="formGroupDescription"
data-testid="members-form-group"
>
<template #description>
<gl-icon v-if="reachedLimit" name="lock" />
{{ formGroupDescription }}
</template>
<label :id="selectLabelId" :class="selectLabelClass">{{ labelSearchField }}</label>
<gl-form-input v-if="reachedLimit" data-testid="disabled-input" disabled />
<slot v-else name="select" v-bind="{ exceptionState, labelId: selectLabelId }"></slot>
<label :id="selectLabelId" class="col-form-label">{{ labelSearchField }}</label>
<slot name="select" v-bind="{ exceptionState, labelId: selectLabelId }"></slot>
</gl-form-group>
<template v-if="!reachedLimit">
<label class="gl-font-weight-bold">{{ $options.ACCESS_LEVEL }}</label>
<label class="gl-font-weight-bold">{{ $options.ACCESS_LEVEL }}</label>
<div class="gl-mt-2 gl-w-half gl-xs-w-full">
<gl-dropdown
class="gl-shadow-none gl-w-full"
data-qa-selector="access_level_dropdown"
v-bind="$attrs"
:text="selectedRoleName"
>
<template v-for="(key, item) in accessLevels">
<gl-dropdown-item
:key="key"
active-class="is-active"
is-check-item
:is-checked="key === selectedAccessLevel"
@click="changeSelectedItem(key)"
>
<div>{{ item }}</div>
</gl-dropdown-item>
</template>
</gl-dropdown>
</div>
<div class="gl-mt-2 gl-w-half gl-xs-w-full">
<gl-dropdown
class="gl-shadow-none gl-w-full"
data-qa-selector="access_level_dropdown"
v-bind="$attrs"
:text="selectedRoleName"
>
<template v-for="(key, item) in accessLevels">
<gl-dropdown-item
:key="key"
active-class="is-active"
is-check-item
:is-checked="key === selectedAccessLevel"
@click="changeSelectedItem(key)"
>
<div>{{ item }}</div>
</gl-dropdown-item>
</template>
</gl-dropdown>
</div>
<div class="gl-mt-2 gl-w-half gl-xs-w-full">
<gl-sprintf :message="$options.READ_MORE_TEXT">
<template #link="{ content }">
<gl-link :href="helpLink" target="_blank">{{ content }}</gl-link>
</template>
</gl-sprintf>
</div>
<div class="gl-mt-2 gl-w-half gl-xs-w-full">
<gl-sprintf :message="$options.READ_MORE_TEXT">
<template #link="{ content }">
<gl-link :href="helpLink" target="_blank">{{ content }}</gl-link>
</template>
</gl-sprintf>
</div>
<label class="gl-mt-5 gl-display-block" for="expires_at">{{
$options.ACCESS_EXPIRE_DATE
}}</label>
<div class="gl-mt-2 gl-w-half gl-xs-w-full gl-display-inline-block">
<gl-datepicker
v-model="selectedDate"
class="gl-display-inline!"
:min-date="minDate"
:target="null"
>
<template #default="{ formattedDate }">
<gl-form-input
class="gl-w-full"
:value="formattedDate"
:placeholder="__(`YYYY-MM-DD`)"
/>
</template>
</gl-datepicker>
</div>
<slot name="form-after"></slot>
</template>
<label class="gl-mt-5 gl-display-block" for="expires_at">{{
$options.ACCESS_EXPIRE_DATE
}}</label>
<div class="gl-mt-2 gl-w-half gl-xs-w-full gl-display-inline-block">
<gl-datepicker
v-model="selectedDate"
class="gl-display-inline!"
:min-date="minDate"
:target="null"
>
<template #default="{ formattedDate }">
<gl-form-input
class="gl-w-full"
:value="formattedDate"
:placeholder="__(`YYYY-MM-DD`)"
/>
</template>
</gl-datepicker>
</div>
<slot name="form-after"></slot>
</template>
<template v-for="{ key } in extraSlots" #[key]>

View File

@ -5,9 +5,10 @@ import { n__, sprintf } from '~/locale';
import {
WARNING_ALERT_TITLE,
DANGER_ALERT_TITLE,
REACHED_LIMIT_MESSAGE,
REACHED_LIMIT_UPGRADE_SUGGESTION_MESSAGE,
REACHED_LIMIT_VARIANT,
CLOSE_TO_LIMIT_MESSAGE,
CLOSE_TO_LIMIT_VARIANT,
} from '../constants';
export default {
@ -15,87 +16,71 @@ export default {
components: { GlAlert, GlSprintf, GlLink },
inject: ['name'],
props: {
closeToLimit: {
type: Boolean,
required: true,
},
reachedLimit: {
type: Boolean,
limitVariant: {
type: String,
required: true,
},
usersLimitDataset: {
type: Object,
required: false,
default: () => ({}),
required: true,
},
},
computed: {
freeUsersLimit() {
return this.usersLimitDataset.freeUsersLimit;
},
membersCount() {
return this.usersLimitDataset.membersCount;
},
newTrialRegistrationPath() {
return this.usersLimitDataset.newTrialRegistrationPath;
},
purchasePath() {
return this.usersLimitDataset.purchasePath;
},
warningAlertTitle() {
return sprintf(WARNING_ALERT_TITLE, {
count: this.freeUsersLimit - this.membersCount,
members: this.pluralMembers(this.freeUsersLimit - this.membersCount),
name: this.name,
});
},
dangerAlertTitle() {
return sprintf(DANGER_ALERT_TITLE, {
count: this.freeUsersLimit,
members: this.pluralMembers(this.freeUsersLimit),
name: this.name,
});
},
variant() {
return this.reachedLimit ? 'danger' : 'warning';
},
title() {
return this.reachedLimit ? this.dangerAlertTitle : this.warningAlertTitle;
},
message() {
if (this.reachedLimit) {
return this.$options.i18n.reachedLimitUpgradeSuggestionMessage;
}
return this.$options.i18n.closeToLimitMessage;
limitAttributes() {
return {
[CLOSE_TO_LIMIT_VARIANT]: {
variant: 'warning',
title: this.title(WARNING_ALERT_TITLE, this.usersLimitDataset.remainingSeats),
message: CLOSE_TO_LIMIT_MESSAGE,
},
[REACHED_LIMIT_VARIANT]: {
variant: 'danger',
title: this.title(DANGER_ALERT_TITLE, this.usersLimitDataset.freeUsersLimit),
message: REACHED_LIMIT_UPGRADE_SUGGESTION_MESSAGE,
},
};
},
},
methods: {
pluralMembers(count) {
return n__('member', 'members', count);
title(titleTemplate, count) {
return sprintf(titleTemplate, {
count,
members: n__('member', 'members', count),
name: this.name,
});
},
},
i18n: {
reachedLimitMessage: REACHED_LIMIT_MESSAGE,
reachedLimitUpgradeSuggestionMessage: REACHED_LIMIT_UPGRADE_SUGGESTION_MESSAGE,
closeToLimitMessage: CLOSE_TO_LIMIT_MESSAGE,
},
};
</script>
<template>
<gl-alert
v-if="reachedLimit || closeToLimit"
:variant="variant"
:variant="limitAttributes[limitVariant].variant"
:dismissible="false"
:title="title"
:title="limitAttributes[limitVariant].title"
>
<gl-sprintf :message="message">
<gl-sprintf :message="limitAttributes[limitVariant].message">
<template #trialLink="{ content }">
<gl-link :href="newTrialRegistrationPath" class="gl-label-link">{{ content }}</gl-link>
<gl-link
:href="usersLimitDataset.newTrialRegistrationPath"
class="gl-label-link"
data-track-action="click_link"
:data-track-label="`start_trial_user_limit_notification_${limitVariant}`"
data-testid="trial-link"
>
{{ content }}
</gl-link>
</template>
<template #upgradeLink="{ content }">
<gl-link :href="purchasePath" class="gl-label-link">{{ content }}</gl-link>
<gl-link
:href="usersLimitDataset.purchasePath"
class="gl-label-link"
data-track-action="click_link"
:data-track-label="`upgrade_user_limit_notification_${limitVariant}`"
data-testid="upgrade-link"
>
{{ content }}
</gl-link>
</template>
</gl-sprintf>
</gl-alert>

View File

@ -1,6 +1,5 @@
import { s__ } from '~/locale';
export const CLOSE_TO_LIMIT_COUNT = 2;
export const SEARCH_DELAY = 200;
export const VALID_TOKEN_BACKGROUND = 'gl-bg-green-100';
export const INVALID_TOKEN_BACKGROUND = 'gl-bg-red-100';
@ -40,9 +39,6 @@ export const MEMBERS_TO_PROJECT_CELEBRATE_INTRO_TEXT = s__(
);
export const MEMBERS_SEARCH_FIELD = s__('InviteMembersModal|Username or email address');
export const MEMBERS_PLACEHOLDER = s__('InviteMembersModal|Select members or type email addresses');
export const MEMBERS_PLACEHOLDER_DISABLED = s__(
'InviteMembersModal|This feature is disabled until this group has space for more members.',
);
export const MEMBERS_TASKS_TO_BE_DONE_TITLE = s__(
'InviteMembersModal|Create issues for your new team member to work on (optional)',
);
@ -110,7 +106,6 @@ export const MEMBER_MODAL_LABELS = {
},
searchField: MEMBERS_SEARCH_FIELD,
placeHolder: MEMBERS_PLACEHOLDER,
placeHolderDisabled: MEMBERS_PLACEHOLDER_DISABLED,
tasksToBeDone: {
title: MEMBERS_TASKS_TO_BE_DONE_TITLE,
noProjects: MEMBERS_TASKS_TO_BE_DONE_NO_PROJECTS,
@ -139,9 +134,7 @@ export const GROUP_MODAL_LABELS = {
};
export const LEARN_GITLAB = 'learn_gitlab';
export const ON_SHOW_TRACK_LABEL = 'locked_modal_viewed';
export const ON_CLOSE_TRACK_LABEL = 'explore_paid_plans_clicked';
export const ON_SUBMIT_TRACK_LABEL = 'manage_members_clicked';
export const ON_SHOW_TRACK_LABEL = 'over_limit_modal_viewed';
export const WARNING_ALERT_TITLE = s__(
'InviteMembersModal|You only have space for %{count} more %{members} in %{name}',
@ -150,13 +143,16 @@ export const DANGER_ALERT_TITLE = s__(
"InviteMembersModal|You've reached your %{count} %{members} limit for %{name}",
);
export const REACHED_LIMIT_VARIANT = 'reached';
export const CLOSE_TO_LIMIT_VARIANT = 'close';
export const REACHED_LIMIT_MESSAGE = s__(
'InviteMembersModal|You cannot add more members, but you can remove members who no longer need access.',
'InviteMembersModal|To invite new users to this namespace, you must remove existing users. You can still add existing namespace users.',
);
export const REACHED_LIMIT_UPGRADE_SUGGESTION_MESSAGE = REACHED_LIMIT_MESSAGE.concat(
s__(
'InviteMembersModal| To get more members and access to additional paid features, an owner of the group can start a trial or upgrade to a paid tier.',
'InviteMembersModal| To get more members, the owner of this namespace can %{trialLinkStart}start a trial%{trialLinkEnd} or %{upgradeLinkStart}upgrade%{upgradeLinkEnd} to a paid tier.',
),
);

View File

@ -131,7 +131,7 @@ export default {
<revision-card
data-testid="sourceRevisionCard"
:refs-project-path="to.refsProjectPath"
revision-text="Source"
:revision-text="__('Source')"
params-name="to"
:params-branch="to.revision"
:projects="to.projects"
@ -160,7 +160,7 @@ export default {
<revision-card
data-testid="targetRevisionCard"
:refs-project-path="from.refsProjectPath"
revision-text="Target"
:revision-text="__('Target')"
params-name="from"
:params-branch="from.revision"
:projects="from.projects"

View File

@ -6,7 +6,7 @@ MESSAGE = <<~MESSAGE
This merge request contains changes to the pipeline configuration for the GitLab project.
Please consider the effect of the changes in this merge request on the following:
- Effects on different [pipeline types](https://docs.gitlab.com/ee/development/pipelines.html#pipelines-for-merge-requests)
- Effects on different [pipeline types](https://docs.gitlab.com/ee/development/pipelines/index.html#pipelines-types-for-merge-requests)
- Effects on non-canonical projects:
- `gitlab-foss`
- `security`

View File

@ -116,7 +116,7 @@ The `CI_PRE_CLONE_SCRIPT` variable does not work on GitLab SaaS Windows or macOS
This example was used in the `gitlab-org/gitlab` project until November 2021.
The project no longer uses this optimization because the
[pack-objects cache](../../../administration/gitaly/configure_gitaly.md#pack-objects-cache)
lets Gitaly serve the full CI/CD fetch traffic. See [Git fetch caching](../../../development/pipelines.md#git-fetch-caching).
lets Gitaly serve the full CI/CD fetch traffic. See [Git fetch caching](../../../development/pipelines/performance.md#git-fetch-caching).
The `CI_PRE_CLONE_SCRIPT` was defined as a project CI/CD variable:

View File

@ -23,7 +23,7 @@ To request access to ChatOps on GitLab.com:
with one of the following methods (Okta is not supported):
- The same username you use on GitLab.com. You may have to choose a different username later.
- Clicking the **Sign in with Google** button to sign in with your GitLab.com email address.
- Selecting the **Sign in with Google** button to sign in with your GitLab.com email address.
1. Confirm that your username in [Internal GitLab for Operations](https://ops.gitlab.net/)
is the same as your username in [GitLab.com](https://gitlab.com/). If the usernames

View File

@ -33,7 +33,7 @@ On the left side we have the events that can trigger a pipeline based on various
- A `git push` is the most common event that triggers a pipeline.
- The [Web API](../../api/pipelines.md#create-a-new-pipeline).
- A user clicking the "Run pipeline" button in the UI.
- A user selecting the "Run pipeline" button in the UI.
- When a [merge request is created or updated](../../ci/pipelines/merge_request_pipelines.md).
- When an MR is added to a [Merge Train](../../ci/pipelines/merge_trains.md#merge-trains).
- A [scheduled pipeline](../../ci/pipelines/schedules.md).

View File

@ -418,7 +418,7 @@ is updated in a major version GitLab release.
Every CI/CD template must also have metrics defined to track their use. The CI/CD template monthly usage report
can be found in [Sisense (GitLab team members only)](https://app.periscopedata.com/app/gitlab/785953/Pipeline-Authoring-Dashboard?widget=13440051&udv=0).
Double click a template to see the graph for that single template.
Select a template to see the graph for that single template.
To add a metric definition for a new template:

View File

@ -234,7 +234,7 @@ below the doc link:
external_url: true
```
All nav links are clickable. If the higher-level link does not have a link
All nav links are selectable. If the higher-level link does not have a link
of its own, it must link to its first sub-item link, mimicking the navigation in GitLab.
This must be avoided so that we don't have duplicated links nor two `.active` links
at the same time.

View File

@ -400,22 +400,31 @@ You can use italics when you are introducing a term for the first time. Otherwis
### Punctuation
Follow these guidelines for punctuation:
Follow these guidelines for punctuation.
<!-- vale gitlab.Repetition = NO -->
- End full sentences with a period.
- Use one space between sentences.
- Do not use semicolons. Use two sentences instead.
- Do not use double spaces. (Tested in [`SentenceSpacing.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/doc/.vale/gitlab/SentenceSpacing.yml).)
- Do not use non-breaking spaces. Use standard spaces instead. (Tested in [`lint-doc.sh`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/scripts/lint-doc.sh).)
- Do not use tabs for indentation. Use spaces instead. You can configure your code editor to output spaces instead of tabs when pressing the tab key.
- Use serial (Oxford) commas before the final **and** or **or** in a list of three or more items. (Tested in [`OxfordComma.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/doc/.vale/gitlab/OxfordComma.yml).)
- Avoid dashes. Use separate sentences, or commas, instead.
- Do not use typographer's ("curly") quotes. Use straight quotes instead. (Tested in [`NonStandardQuotes.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/doc/.vale/gitlab/NonStandardQuotes.yml).)
<!-- vale gitlab.Repetition = YES -->
When spacing content:
- Use one space between sentences. (Use of more than one space is tested in [`SentenceSpacing.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/doc/.vale/gitlab/SentenceSpacing.yml).)
- Do not use non-breaking spaces. Use standard spaces instead. (Tested in [`lint-doc.sh`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/scripts/lint-doc.sh).)
- Do not use tabs for indentation. Use spaces instead. You can configure your code editor to output spaces instead of tabs when pressing the <kbd>Tab</kbd> key.
<!-- vale gitlab.NonStandardQuotes = NO -->
Do not use these punctuation characters:
- `;` (semicolon): Use two sentences instead.
- `` (en dash) or `—` (em dash): Use separate sentences, or commas, instead.
- `“` `”` `` ``: Double or single typographer's ("curly") quotation marks. Use straight quotes instead. (Tested in [`NonStandardQuotes.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/doc/.vale/gitlab/NonStandardQuotes.yml).)
<!-- vale gitlab.NonStandardQuotes = YES -->
### Placeholder text
You might want to provide a command or configuration that

View File

@ -839,7 +839,7 @@ When writing about the Owner role:
- Instead of: if you are an owner
Do not use bold.
Do not use **Owner permissions**. A user who is assigned the Owner role has a set of associated permissions.
An Owner is the highest role a user can have.
@ -1231,5 +1231,19 @@ Instead of:
- Users can configure a pipeline.
## you can
When possible, start sentences with an active verb instead of **you can**.
For example:
- Use code review analytics to view merge request data.
- Create a board to organize your team tasks.
- Configure variables to restrict pushes to a repository.
Use **you can** for optional actions. For example:
- Use code review analytics to view metrics per merge request. You can also use the API.
- Enter the name and value pairs. You can add up to 20 pairs per streaming destination.
<!-- vale on -->
<!-- markdownlint-enable -->

View File

@ -60,6 +60,15 @@ For example, `Create an issue`.
If you have several tasks on a page that share prerequisites, you can use the title
`Prerequisites` and link to it.
## Task introductions
To start the task topic, use the structure `active verb` + `noun`, and
provide context about the action.
For example, `Create an issue when you want to track bugs or future work`.
To start the task steps, use a succinct action followed by a colon.
For example, `To create an issue:`
## Related topics
- [View the format for writing task steps](../styleguide/index.md#navigation).

View File

@ -147,7 +147,7 @@ FOSS context as well.
To run pipelines in both contexts, add the `~"pipeline:run-as-if-foss"` label to the merge request.
See the [As-if-FOSS jobs](pipelines.md#as-if-foss-jobs) pipelines documentation for more information.
See the [As-if-FOSS jobs](pipelines/index.md#as-if-foss-jobs) pipelines documentation for more information.
## Separation of EE code in the backend

View File

@ -85,7 +85,7 @@ special formatting is required. When the extension receives this data,
it is set to `collapsedData`. You can access `collapsedData` in any computed property or
method.
When the user clicks **Expand**, the `fetchFullData` method is called. This method
When the user selects **Expand**, the `fetchFullData` method is called. This method
also gets called with the props as an argument. This method **must** also return
the full data. However, this data must be correctly formatted to match the format
mentioned in the data structure section.

View File

@ -22,7 +22,7 @@ official [Vuex documentation](https://vuex.vuejs.org).
Vuex is composed of State, Getters, Mutations, Actions, and Modules.
When a user clicks on an action, we need to `dispatch` it. This action `commits` a mutation that changes the state. The action itself does not update the state; only a mutation should update the state.
When a user selects an action, we need to `dispatch` it. This action `commits` a mutation that changes the state. The action itself does not update the state; only a mutation should update the state.
## File structure

View File

@ -20,7 +20,7 @@ See the [general pagination guidelines section](../database/pagination_guideline
This is the traditional, page-by-page pagination, that is most common,
and used across much of GitLab. You can recognize it by
a list of page numbers near the bottom of a page, which, when clicked,
a list of page numbers near the bottom of a page, which, when selected,
take you to that page of results.
For example, when you select **Page 100**, we send `100` to the

View File

@ -36,7 +36,7 @@ it should be disapproved in Crowdin. Our strings must use [variables](externaliz
for HTML instead.
It might be useful to pause the integration on the Crowdin side for a
moment so translations don't keep coming. You can do this by clicking
moment so translations don't keep coming. You can do this by selecting
**Pause sync** on the [Crowdin integration settings page](https://translate.gitlab.com/project/gitlab-ee/settings#integration).
## Merging translations

View File

@ -58,6 +58,7 @@ are very appreciative of the work done by translators and proofreaders!
- French
- Davy Defaud - [GitLab](https://gitlab.com/DevDef), [Crowdin](https://crowdin.com/profile/DevDef)
- Germain Gorisse - [GitLab](https://gitlab.com/ggorisse), [Crowdin](https://crowdin.com/profile/germaingorisse)
- Xavier Delatour - [GitLab](https://gitlab.com/xdelatour), [Crowdin](https://crowdin.com/profile/xdelatour)
- Galician
- Antón Méixome - [Crowdin](https://crowdin.com/profile/meixome)
- Pedro Garcia - [GitLab](https://gitlab.com/pedgarrod), [Crowdin](https://crowdin.com/profile/breaking_pitt)

View File

@ -109,4 +109,4 @@ The following steps describe setting up an environment to test the GitLab OAuth
1. Go to **Admin > Settings > General**.
1. Scroll down and expand the GitLab for Jira App section.
1. Go to [gitpod.io/variables](https://gitpod.io/variables).
1. Paste the Application ID into the **Jira Connect Application ID** field and click **Save changes**
1. Paste the Application ID into the **Jira Connect Application ID** field and select **Save changes**.

View File

@ -77,7 +77,7 @@ and complete an integration with the Secure stage.
1. Get a test account to begin developing your integration. You can
request a [GitLab.com Subscription Sandbox](https://about.gitlab.com/partners/technology-partners/integrate/#gitlabcom-subscription-sandbox-request)
or an [EE Developer License](https://about.gitlab.com/partners/technology-partners/integrate/#requesting-ultimate-dev-license-for-rd).
1. Provide a [pipeline job](../../development/pipelines.md)
1. Provide a [pipeline job](../../development/pipelines)
template that users could integrate into their own GitLab pipelines.
1. Create a report artifact with your pipeline jobs.
1. Ensure your pipeline jobs create a report artifact that GitLab can process

View File

@ -22,7 +22,7 @@ consistent performance of GitLab. Refer to the [Index](#performance-documentatio
- [Pagination performance guidelines](../development/database/pagination_performance_guidelines.md)
- [Keyset pagination performance](../development/database/keyset_pagination.md#performance)
- [Troubleshooting import/export performance issues](../development/import_export.md#troubleshooting-performance-issues)
- [Pipelines performance in the `gitlab` project](../development/pipelines.md#performance)
- [Pipelines performance in the `gitlab` project](pipelines/performance.md)
- Frontend:
- [Performance guidelines and monitoring](../development/fe_guide/performance.md)
- [Browser performance testing guidelines](../ci/testing/browser_performance_testing.md)

View File

@ -1,879 +1,11 @@
---
stage: none
group: Engineering Productivity
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
redirect_to: 'pipelines/index.md'
remove_date: '2023-01-20'
---
# Pipelines for the GitLab project
This document was moved to [another location](pipelines/index.md).
Pipelines for [`gitlab-org/gitlab`](https://gitlab.com/gitlab-org/gitlab) (as well as the `dev` instance's) is configured in the usual
[`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml)
which itself includes files under
[`.gitlab/ci/`](https://gitlab.com/gitlab-org/gitlab/-/tree/master/.gitlab/ci)
for easier maintenance.
We're striving to [dogfood](https://about.gitlab.com/handbook/engineering/development/principles/#dogfooding)
GitLab [CI/CD features and best-practices](../ci/yaml/index.md)
as much as possible.
## Minimal test jobs before a merge request is approved
**To reduce the pipeline cost and shorten the job duration, before a merge request is approved, the pipeline will run a minimal set of RSpec & Jest tests that are related to the merge request changes.**
After a merge request has been approved, the pipeline would contain the full RSpec & Jest tests. This will ensure that all tests
have been run before a merge request is merged.
### Overview of the GitLab project test dependency
To understand how the minimal test jobs are executed, we need to understand the dependency between
GitLab code (frontend and backend) and the respective tests (Jest and RSpec).
This dependency can be visualized in the following diagram:
```mermaid
flowchart LR
subgraph frontend
fe["Frontend code"]--tested with-->jest
end
subgraph backend
be["Backend code"]--tested with-->rspec
end
be--generates-->fixtures["frontend fixtures"]
fixtures--used in-->jest
```
In summary:
- RSpec tests are dependent on the backend code.
- Jest tests are dependent on both frontend and backend code, the latter through the frontend fixtures.
### RSpec minimal jobs
#### Determining related RSpec test files in a merge request
To identify the minimal set of tests needed, we use the [`test_file_finder` gem](https://gitlab.com/gitlab-org/ci-cd/test_file_finder), with two strategies:
- dynamic mapping from test coverage tracing (generated via the [`Crystalball` gem](https://github.com/toptal/crystalball))
([see where it's used](https://gitlab.com/gitlab-org/gitlab/-/blob/47d507c93779675d73a05002e2ec9c3c467cd698/tooling/bin/find_tests#L15))
- static mapping maintained in the [`tests.yml` file](https://gitlab.com/gitlab-org/gitlab/-/blob/master/tests.yml) for special cases that cannot
be mapped via coverage tracing ([see where it's used](https://gitlab.com/gitlab-org/gitlab/-/blob/47d507c93779675d73a05002e2ec9c3c467cd698/tooling/bin/find_tests#L12))
The test mappings contain a map of each source files to a list of test files which is dependent of the source file.
In the `detect-tests` job, we use this mapping to identify the minimal tests needed for the current merge request.
Later on in [the `rspec fail-fast` job](#fail-fast-job-in-merge-request-pipelines), we run the minimal tests needed for the current merge request.
#### Exceptional cases
In addition, there are a few circumstances where we would always run the full RSpec tests:
- when the `pipeline:run-all-rspec` label is set on the merge request
- when the `pipeline:run-full-rspec` label is set on the merge request, this label is assigned by triage automation when the merge request is approved by any reviewer
- when the merge request is created by an automation (for example, Gitaly update or MR targeting a stable branch)
- when the merge request is created in a security mirror
- when any CI configuration file is changed (for example, `.gitlab-ci.yml` or `.gitlab/ci/**/*`)
### Jest minimal jobs
#### Determining related Jest test files in a merge request
To identify the minimal set of tests needed, we pass a list of all the changed files into `jest` using the [`--findRelatedTests`](https://jestjs.io/docs/cli#--findrelatedtests-spaceseparatedlistofsourcefiles) option.
In this mode, `jest` would resolve all the dependencies of related to the changed files, which include test files that have these files in the dependency chain.
#### Exceptional cases
In addition, there are a few circumstances where we would always run the full Jest tests:
- when the `pipeline:run-all-jest` label is set on the merge request
- when the merge request is created by an automation (for example, Gitaly update or MR targeting a stable branch)
- when the merge request is created in a security mirror
- when any CI configuration file is changed (for example, `.gitlab-ci.yml` or `.gitlab/ci/**/*`)
- when any frontend "core" file is changed (for example, `package.json`, `yarn.lock`, `babel.config.js`, `jest.config.*.js`, `config/helpers/**/*.js`)
- when any vendored JavaScript file is changed (for example, `vendor/assets/javascripts/**/*`)
- when any backend file is changed ([see the patterns list for details](https://gitlab.com/gitlab-org/gitlab/-/blob/3616946936c1adbd9e754c1bd06f86ba670796d8/.gitlab/ci/rules.gitlab-ci.yml#L205-216))
### Fork pipelines
We run only the minimal RSpec & Jest jobs for fork pipelines, unless the `pipeline:run-all-rspec`
label is set on the MR. The goal is to reduce the CI/CD minutes consumed by fork pipelines.
See the [experiment issue](https://gitlab.com/gitlab-org/quality/team-tasks/-/issues/1170).
## Fail-fast job in merge request pipelines
To provide faster feedback when a merge request breaks existing tests, we are experimenting with a
fail-fast mechanism.
An `rspec fail-fast` job is added in parallel to all other `rspec` jobs in a merge
request pipeline. This job runs the tests that are directly related to the changes
in the merge request.
If any of these tests fail, the `rspec fail-fast` job fails, triggering a
`fail-pipeline-early` job to run. The `fail-pipeline-early` job:
- Cancels the currently running pipeline and all in-progress jobs.
- Sets pipeline to have status `failed`.
For example:
```mermaid
graph LR
subgraph "prepare stage";
A["detect-tests"]
end
subgraph "test stage";
B["jest"];
C["rspec migration"];
D["rspec unit"];
E["rspec integration"];
F["rspec system"];
G["rspec fail-fast"];
end
subgraph "post-test stage";
Z["fail-pipeline-early"];
end
A --"artifact: list of test files"--> G
G --"on failure"--> Z
```
The `rspec fail-fast` is a no-op if there are more than 10 test files related to the
merge request. This prevents `rspec fail-fast` duration from exceeding the average
`rspec` job duration and defeating its purpose.
This number can be overridden by setting a CI/CD variable named `RSPEC_FAIL_FAST_TEST_FILE_COUNT_THRESHOLD`.
## Faster feedback when reverting merge requests
When you need to revert a merge request, to get accelerated feedback, you can add the `~pipeline:revert` label to your merge request.
When this label is assigned, the following steps of the CI/CD pipeline are skipped:
- The `e2e:package-and-test` job.
- The `rspec:undercoverage` job.
- The entire [Review Apps process](testing_guide/review_apps.md).
Apply the label to the merge request, and run a new pipeline for the MR.
## Test jobs
We have dedicated jobs for each [testing level](testing_guide/testing_levels.md) and each job runs depending on the
changes made in your merge request.
If you want to force all the RSpec jobs to run regardless of your changes, you can add the `pipeline:run-all-rspec` label to the merge request.
WARNING:
Forcing all jobs on docs only related MRs would not have the prerequisite jobs and would lead to errors
### Test suite parallelization
Our current RSpec tests parallelization setup is as follows:
1. The `retrieve-tests-metadata` job in the `prepare` stage ensures we have a
`knapsack/report-master.json` file:
- The `knapsack/report-master.json` file is fetched from the latest `main` pipeline which runs `update-tests-metadata`
(for now it's the 2-hourly `maintenance` scheduled master pipeline), if it's not here we initialize the file with `{}`.
1. Each `[rspec|rspec-ee] [migration|unit|integration|system|geo] n m` job are run with
`knapsack rspec` and should have an evenly distributed share of tests:
- It works because the jobs have access to the `knapsack/report-master.json`
since the "artifacts from all previous stages are passed by default".
- the jobs set their own report path to
`"knapsack/${TEST_TOOL}_${TEST_LEVEL}_${DATABASE}_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json"`.
- if knapsack is doing its job, test files that are run should be listed under
`Report specs`, not under `Leftover specs`.
1. The `update-tests-metadata` job (which only runs on scheduled pipelines for
[the canonical project](https://gitlab.com/gitlab-org/gitlab) takes all the
`knapsack/rspec*.json` files and merge them all together into a single
`knapsack/report-master.json` file that is saved as artifact.
After that, the next pipeline uses the up-to-date `knapsack/report-master.json` file.
### Flaky tests
#### Automatic skipping of flaky tests
Tests that are [known to be flaky](testing_guide/flaky_tests.md#automatic-retries-and-flaky-tests-detection) are
skipped unless the `$SKIP_FLAKY_TESTS_AUTOMATICALLY` variable is set to `false` or if the `~"pipeline:run-flaky-tests"`
label is set on the MR.
See the [experiment issue](https://gitlab.com/gitlab-org/quality/team-tasks/-/issues/1069).
#### Automatic retry of failing tests in a separate process
Unless `$RETRY_FAILED_TESTS_IN_NEW_PROCESS` variable is set to `false` (`true` by default), RSpec tests that failed are automatically retried once in a separate
RSpec process. The goal is to get rid of most side-effects from previous tests that may lead to a subsequent test failure.
We keep track of retried tests in the `$RETRIED_TESTS_REPORT_FILE` file saved as artifact by the `rspec:flaky-tests-report` job.
See the [experiment issue](https://gitlab.com/gitlab-org/quality/team-tasks/-/issues/1148).
### Compatibility testing
By default, we run all tests with the versions that runs on GitLab.com.
Other versions (usually one back-compatible version, and one forward-compatible version) should be running in nightly scheduled pipelines.
Exceptions to this general guideline should be motivated and documented.
#### Single database testing
By default, all tests run with [multiple databases](database/multiple_databases.md).
We also run tests with a single database in nightly scheduled pipelines, and in merge requests that touch database-related files.
If you want to force tests to run with a single database, you can add the `pipeline:run-single-db` label to the merge request.
### Monitoring
The GitLab test suite is [monitored](performance.md#rspec-profiling) for the `main` branch, and any branch
that includes `rspec-profile` in their name.
### Logging
- Rails logging to `log/test.log` is disabled by default in CI
[for performance reasons](https://jtway.co/speed-up-your-rails-test-suite-by-6-in-1-line-13fedb869ec4).
To override this setting, provide the
`RAILS_ENABLE_TEST_LOG` environment variable.
## Review app jobs
Consult the [Review Apps](testing_guide/review_apps.md) dedicated page for more information.
If you want to force a Review App to be deployed regardless of your changes, you can add the `pipeline:run-review-app` label to the merge request.
## As-if-FOSS jobs
The `* as-if-foss` jobs run the GitLab test suite "as if FOSS", meaning as if the jobs would run in the context
of `gitlab-org/gitlab-foss`. These jobs are only created in the following cases:
- when the `pipeline:run-as-if-foss` label is set on the merge request
- when the merge request is created in the `gitlab-org/security/gitlab` project
- when any CI configuration file is changed (for example, `.gitlab-ci.yml` or `.gitlab/ci/**/*`)
The `* as-if-foss` jobs are run in addition to the regular EE-context jobs. They have the `FOSS_ONLY='1'` variable
set and get the `ee/` folder removed before the tests start running.
The intent is to ensure that a change doesn't introduce a failure after `gitlab-org/gitlab` is synced to `gitlab-org/gitlab-foss`.
## As-if-JH jobs
NOTE:
This is disabled for now.
The `* as-if-jh` jobs run the GitLab test suite "as if JiHu", meaning as if the jobs would run in the context
of [GitLab JH](jh_features_review.md). These jobs are only created in the following cases:
- when the `pipeline:run-as-if-jh` label is set on the merge request
- when the `pipeline:run-all-rspec` label is set on the merge request
- when any code or backstage file is changed
- when any startup CSS file is changed
The `* as-if-jh` jobs are run in addition to the regular EE-context jobs. The `jh/` folder is added before the tests start running.
The intent is to ensure that a change doesn't introduce a failure after `gitlab-org/gitlab` is synced to [GitLab JH](https://jihulab.com/gitlab-cn/gitlab).
### When to consider applying `pipeline:run-as-if-jh` label
NOTE:
This is disabled for now.
If a Ruby file is renamed and there's a corresponding [`prepend_mod` line](jh_features_review.md#jh-features-based-on-ce-or-ee-features),
it's likely that GitLab JH is relying on it and requires a corresponding
change to rename the module or class it's prepending.
### Corresponding JH branch
NOTE:
This is disabled for now.
You can create a corresponding JH branch on [GitLab JH](https://jihulab.com/gitlab-cn/gitlab) by
appending `-jh` to the branch name. If a corresponding JH branch is found,
`* as-if-jh` jobs grab the `jh` folder from the respective branch,
rather than from the default branch `main-jh`.
NOTE:
For now, CI will try to fetch the branch on the [GitLab JH mirror](https://gitlab.com/gitlab-org/gitlab-jh-mirrors/gitlab), so it might take some time for the new JH branch to propagate to the mirror.
## Ruby 3.0 jobs
You can add the `pipeline:run-in-ruby3` label to the merge request to switch
the Ruby version used for running the whole test suite to 3.0. When you do
this, the test suite will no longer run in Ruby 2.7 (default), and an
additional job `verify-ruby-2.7` will also run and always fail to remind us to
remove the label and run in Ruby 2.7 before merging the merge request.
This should let us:
- Test changes for Ruby 3.0
- Make sure it will not break anything when it's merged into the default branch
## `undercover` RSpec test
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/74859) in GitLab 14.6.
The `rspec:undercoverage` job runs [`undercover`](https://rubygems.org/gems/undercover)
to detect, and fail if any changes introduced in the merge request has zero coverage.
The `rspec:undercoverage` job obtains coverage data from the `rspec:coverage`
job.
In the event of an emergency, or false positive from this job, add the
`pipeline:skip-undercoverage` label to the merge request to allow this job to
fail.
### Troubleshooting `rspec:undercoverage` failures
The `rspec:undercoverage` job has [known bugs](https://gitlab.com/groups/gitlab-org/-/epics/8254)
that can cause false positive failures. You can test coverage locally to determine if it's
safe to apply `~"pipeline:skip-undercoverage"`. For example, using `<spec>` as the name of the
test causing the failure:
1. Run `SIMPLECOV=1 bundle exec rspec <spec>`.
1. Run `scripts/undercoverage`.
If these commands return `undercover: ✅ No coverage is missing in latest changes` then you can apply `~"pipeline:skip-undercoverage"` to bypass pipeline failures.
## Ruby versions testing
Our test suite runs against Ruby 2 in merge requests and default branch pipelines.
We also run our test suite against Ruby 3 on another 2-hourly scheduled pipelines, as GitLab.com will soon run on Ruby 3.
## PostgreSQL versions testing
Our test suite runs against PG12 as GitLab.com runs on PG12 and
[Omnibus defaults to PG12 for new installs and upgrades](../administration/package_information/postgresql_versions.md).
We do run our test suite against PG11 and PG13 on nightly scheduled pipelines.
We also run our test suite against PG11 upon specific database library changes in MRs and `main` pipelines (with the `rspec db-library-code pg11` job).
### Current versions testing
| Where? | PostgreSQL version | Ruby version |
|------------------------------------------------------------------------------------------------|-------------------------------------------------|--------------|
| Merge requests | 12 (default version), 11 for DB library changes | 2.7 (default version) |
| `master` branch commits | 12 (default version), 11 for DB library changes | 2.7 (default version) |
| `maintenance` scheduled pipelines for the `master` branch (every even-numbered hour) | 12 (default version), 11 for DB library changes | 2.7 (default version) |
| `maintenance` scheduled pipelines for the `ruby3` branch (every odd-numbered hour), see below. | 12 (default version), 11 for DB library changes | 3.0 (coded in the branch) |
| `nightly` scheduled pipelines for the `master` branch | 12 (default version), 11, 13 | 2.7 (default version) |
The pipeline configuration for the scheduled pipeline testing Ruby 3 is
stored in the `ruby3-sync` branch. The pipeline updates the `ruby3` branch
with latest `master`, and then it triggers a regular branch pipeline for
`ruby3`. Any changes in `ruby3` are only for running the pipeline. It should
never be merged back to `master`. Any other Ruby 3 changes should go into
`master` directly, which should be compatible with Ruby 2.7.
Previously, `ruby3-sync` was using a project token stored in `RUBY3_SYNC_TOKEN`
(now backed up in `RUBY3_SYNC_TOKEN_NOT_USED`), however due to various
permissions issues, we ended up using an access token from `gitlab-bot` so now
`RUBY3_SYNC_TOKEN` is actually an access token from `gitlab-bot`.
### Long-term plan
We follow the [PostgreSQL versions shipped with Omnibus GitLab](../administration/package_information/postgresql_versions.md):
| PostgreSQL version | 14.1 (July 2021) | 14.2 (August 2021) | 14.3 (September 2021) | 14.4 (October 2021) | 14.5 (November 2021) | 14.6 (December 2021) |
| -------------------| ---------------------- | ---------------------- | ---------------------- | ---------------------- | ---------------------- | ---------------------- |
| PG12 | MRs/`2-hour`/`nightly` | MRs/`2-hour`/`nightly` | MRs/`2-hour`/`nightly` | MRs/`2-hour`/`nightly` | MRs/`2-hour`/`nightly` | MRs/`2-hour`/`nightly` |
| PG11 | `nightly` | `nightly` | `nightly` | `nightly` | `nightly` | `nightly` |
| PG13 | `nightly` | `nightly` | `nightly` | `nightly` | `nightly` | `nightly` |
## Redis versions testing
Our test suite runs against Redis 6 as GitLab.com runs on Redis 6 and
[Omnibus defaults to Redis 6 for new installs and upgrades](https://gitlab.com/gitlab-org/omnibus-gitlab/-/blob/master/config/software/redis.rb).
We do run our test suite against Redis 5 on `nightly` scheduled pipelines, specifically when running backward-compatible and forward-compatible PostgreSQL jobs.
### Current versions testing
| Where? | Redis version |
| ------ | ------------------ |
| MRs | 6 |
| `default branch` (non-scheduled pipelines) | 6 |
| `nightly` scheduled pipelines | 5 |
## Pipelines types for merge requests
In general, pipelines for an MR fall into one of the following types (from shorter to longer), depending on the changes made in the MR:
- [Documentation pipeline](#documentation-pipeline): For MRs that touch documentation.
- [Backend pipeline](#backend-pipeline): For MRs that touch backend code.
- [Frontend pipeline](#frontend-pipeline): For MRs that touch frontend code.
- [End-to-end pipeline](#end-to-end-pipeline): For MRs that touch code in the `qa/` folder.
A "pipeline type" is an abstract term that mostly describes the "critical path" (for example, the chain of jobs for which the sum
of individual duration equals the pipeline's duration).
We use these "pipeline types" in [metrics dashboards](https://app.periscopedata.com/app/gitlab/858266/GitLab-Pipeline-Durations) to detect what types and jobs need to be optimized first.
An MR that touches multiple areas would be associated with the longest type applicable. For instance, an MR that touches backend
and frontend would fall into the "Frontend" pipeline type since this type takes longer to finish than the "Backend" pipeline type.
We use the [`rules:`](../ci/yaml/index.md#rules) and [`needs:`](../ci/yaml/index.md#needs) keywords extensively
to determine the jobs that need to be run in a pipeline. Note that an MR that includes multiple types of changes would
have a pipelines that include jobs from multiple types (for example, a combination of docs-only and code-only pipelines).
Following are graphs of the critical paths for each pipeline type. Jobs that aren't part of the critical path are omitted.
### Documentation pipeline
[Reference pipeline](https://gitlab.com/gitlab-org/gitlab/-/pipelines/432049110).
```mermaid
graph LR
classDef criticalPath fill:#f66;
1-3["docs-lint links (5 minutes)"];
class 1-3 criticalPath;
click 1-3 "https://app.periscopedata.com/app/gitlab/652085/Engineering-Productivity---Pipeline-Build-Durations?widget=8356757&udv=0"
```
### Backend pipeline
[Reference pipeline](https://gitlab.com/gitlab-org/gitlab/-/pipelines/433316063).
```mermaid
graph RL;
classDef criticalPath fill:#f66;
1-3["compile-test-assets (6 minutes)"];
class 1-3 criticalPath;
click 1-3 "https://app.periscopedata.com/app/gitlab/652085/Engineering-Productivity---Pipeline-Build-Durations?widget=6914317&udv=0"
1-6["setup-test-env (4 minutes)"];
click 1-6 "https://app.periscopedata.com/app/gitlab/652085/Engineering-Productivity---Pipeline-Build-Durations?widget=6914315&udv=0"
1-14["retrieve-tests-metadata"];
click 1-14 "https://app.periscopedata.com/app/gitlab/652085/Engineering-Productivity---Pipeline-Build-Durations?widget=8356697&udv=0"
1-15["detect-tests"];
click 1-15 "https://app.periscopedata.com/app/gitlab/652085/EP---Jobs-Durations?widget=10113603&udv=1005715"
2_5-1["rspec & db jobs (24 minutes)"];
class 2_5-1 criticalPath;
click 2_5-1 "https://app.periscopedata.com/app/gitlab/652085/Engineering-Productivity---Pipeline-Build-Durations"
2_5-1 --> 1-3 & 1-6 & 1-14 & 1-15;
3_2-1["rspec:coverage (5.35 minutes)"];
class 3_2-1 criticalPath;
click 3_2-1 "https://app.periscopedata.com/app/gitlab/652085/Engineering-Productivity---Pipeline-Build-Durations?widget=7248745&udv=0"
3_2-1 -.->|"(don't use needs<br/>because of limitations)"| 2_5-1;
4_3-1["rspec:undercoverage (3.5 minutes)"];
class 4_3-1 criticalPath;
click 4_3-1 "https://app.periscopedata.com/app/gitlab/652085/EP---Jobs-Durations?widget=13446492&udv=1005715"
4_3-1 --> 3_2-1;
```
### Frontend pipeline
[Reference pipeline](https://gitlab.com/gitlab-org/gitlab/-/pipelines/431913287).
```mermaid
graph RL;
classDef criticalPath fill:#f66;
1-2["build-qa-image (2 minutes)"];
click 1-2 "https://app.periscopedata.com/app/gitlab/652085/Engineering-Productivity---Pipeline-Build-Durations?widget=6914325&udv=0"
1-5["compile-production-assets (16 minutes)"];
class 1-5 criticalPath;
click 1-5 "https://app.periscopedata.com/app/gitlab/652085/Engineering-Productivity---Pipeline-Build-Durations?widget=6914312&udv=0"
2_3-1["build-assets-image (1.3 minutes)"];
class 2_3-1 criticalPath;
2_3-1 --> 1-5
2_6-1["start-review-app-pipeline (49 minutes)"];
class 2_6-1 criticalPath;
click 2_6-1 "https://app.periscopedata.com/app/gitlab/652085/Engineering-Productivity---Pipeline-Build-Durations"
2_6-1 --> 2_3-1 & 1-2;
```
### End-to-end pipeline
[Reference pipeline](https://gitlab.com/gitlab-org/gitlab/-/pipelines/431918463).
```mermaid
graph RL;
classDef criticalPath fill:#f66;
1-2["build-qa-image (2 minutes)"];
click 1-2 "https://app.periscopedata.com/app/gitlab/652085/Engineering-Productivity---Pipeline-Build-Durations?widget=6914325&udv=0"
1-5["compile-production-assets (16 minutes)"];
class 1-5 criticalPath;
click 1-5 "https://app.periscopedata.com/app/gitlab/652085/Engineering-Productivity---Pipeline-Build-Durations?widget=6914312&udv=0"
1-15["detect-tests"];
click 1-15 "https://app.periscopedata.com/app/gitlab/652085/EP---Jobs-Durations?widget=10113603&udv=1005715"
2_3-1["build-assets-image (1.3 minutes)"];
class 2_3-1 criticalPath;
2_3-1 --> 1-5
2_4-1["e2e:package-and-test (102 minutes)"];
class 2_4-1 criticalPath;
click 2_4-1 "https://app.periscopedata.com/app/gitlab/652085/Engineering-Productivity---Pipeline-Build-Durations?widget=6914305&udv=0"
2_4-1 --> 1-2 & 2_3-1 & 1-15;
```
## CI configuration internals
### Workflow rules
Pipelines for the GitLab project are created using the [`workflow:rules` keyword](../ci/yaml/index.md#workflow)
feature of the GitLab CI/CD.
Pipelines are always created for the following scenarios:
- `main` branch, including on schedules, pushes, merges, and so on.
- Merge requests.
- Tags.
- Stable, `auto-deploy`, and security branches.
Pipeline creation is also affected by the following CI/CD variables:
- If `$FORCE_GITLAB_CI` is set, pipelines are created.
- If `$GITLAB_INTERNAL` is not set, pipelines are not created.
No pipeline is created in any other cases (for example, when pushing a branch with no
MR for it).
The source of truth for these workflow rules is defined in [`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml).
### Default image
The default image is defined in [`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml).
<!-- vale gitlab.Spelling = NO -->
It includes Ruby, Go, Git, Git LFS, Chrome, Node, Yarn, PostgreSQL, and Graphics Magick.
<!-- vale gitlab.Spelling = YES -->
The images used in our pipelines are configured in the
[`gitlab-org/gitlab-build-images`](https://gitlab.com/gitlab-org/gitlab-build-images)
project, which is push-mirrored to [`gitlab/gitlab-build-images`](https://dev.gitlab.org/gitlab/gitlab-build-images)
for redundancy.
The current version of the build images can be found in the
["Used by GitLab section"](https://gitlab.com/gitlab-org/gitlab-build-images/blob/master/.gitlab-ci.yml).
### Default variables
In addition to the [predefined CI/CD variables](../ci/variables/predefined_variables.md),
each pipeline includes default variables defined in
[`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml).
### Stages
The current stages are:
- `sync`: This stage is used to synchronize changes from [`gitlab-org/gitlab`](https://gitlab.com/gitlab-org/gitlab) to
[`gitlab-org/gitlab-foss`](https://gitlab.com/gitlab-org/gitlab-foss).
- `prepare`: This stage includes jobs that prepare artifacts that are needed by
jobs in subsequent stages.
- `build-images`: This stage includes jobs that prepare Docker images
that are needed by jobs in subsequent stages or downstream pipelines.
- `fixtures`: This stage includes jobs that prepare fixtures needed by frontend tests.
- `lint`: This stage includes linting and static analysis jobs.
- `test`: This stage includes most of the tests, and DB/migration jobs.
- `post-test`: This stage includes jobs that build reports or gather data from
the `test` stage's jobs (for example, coverage, Knapsack metadata, and so on).
- `review`: This stage includes jobs that build the CNG images, deploy them, and
run end-to-end tests against Review Apps (see [Review Apps](testing_guide/review_apps.md) for details).
It also includes Docs Review App jobs.
- `qa`: This stage includes jobs that perform QA tasks against the Review App
that is deployed in stage `review`.
- `post-qa`: This stage includes jobs that build reports or gather data from
the `qa` stage's jobs (for example, Review App performance report).
- `pages`: This stage includes a job that deploys the various reports as
GitLab Pages (for example, [`coverage-ruby`](https://gitlab-org.gitlab.io/gitlab/coverage-ruby/),
and `webpack-report` (found at `https://gitlab-org.gitlab.io/gitlab/webpack-report/`, but there is
[an issue with the deployment](https://gitlab.com/gitlab-org/gitlab/-/issues/233458)).
- `notify`: This stage includes jobs that notify various failures to Slack.
### Dependency Proxy
Some of the jobs are using images from Docker Hub, where we also use
`${GITLAB_DEPENDENCY_PROXY_ADDRESS}` as a prefix to the image path, so that we pull
images from our [Dependency Proxy](../user/packages/dependency_proxy/index.md).
By default, this variable is set from the value of `${GITLAB_DEPENDENCY_PROXY}`.
`${GITLAB_DEPENDENCY_PROXY}` is a group CI/CD variable defined in
[`gitlab-org`](https://gitlab.com/gitlab-org) as
`${CI_DEPENDENCY_PROXY_GROUP_IMAGE_PREFIX}/`. This means when we use an image
defined as:
```yaml
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}alpine:edge
```
Projects in the `gitlab-org` group pull from the Dependency Proxy, while
forks that reside on any other personal namespaces or groups fall back to
Docker Hub unless `${GITLAB_DEPENDENCY_PROXY}` is also defined there.
#### Work around for when a pipeline is started by a Project access token user
When a pipeline is started by a Project access token user (e.g. the `release-tools approver bot` user which
automatically updates the Gitaly version used in the main project),
[the Dependency proxy isn't accessible](https://gitlab.com/gitlab-org/gitlab/-/issues/332411#note_1130388163)
and the job fails at the `Preparing the "docker+machine" executor` step.
To work around that, we have a special workflow rule, that overrides the
`${GITLAB_DEPENDENCY_PROXY_ADDRESS}` variable so that Depdendency proxy isn't used in that case:
```yaml
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $GITLAB_USER_LOGIN =~ /project_\d+_bot\d*/'
variables:
GITLAB_DEPENDENCY_PROXY_ADDRESS: ""
```
NOTE:
We don't directly override the `${GITLAB_DEPENDENCY_PROXY}` variable because group-level
variables have higher precedence over `.gitlab-ci.yml` variables.
### Common job definitions
Most of the jobs [extend from a few CI definitions](../ci/yaml/index.md#extends)
defined in [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml)
that are scoped to a single [configuration keyword](../ci/yaml/index.md#job-keywords).
| Job definitions | Description |
|------------------|-------------|
| `.default-retry` | Allows a job to [retry](../ci/yaml/index.md#retry) upon `unknown_failure`, `api_failure`, `runner_system_failure`, `job_execution_timeout`, or `stuck_or_timeout_failure`. |
| `.default-before_script` | Allows a job to use a default `before_script` definition suitable for Ruby/Rails tasks that may need a database running (for example, tests). |
| `.setup-test-env-cache` | Allows a job to use a default `cache` definition suitable for setting up test environment for subsequent Ruby/Rails tasks. |
| `.rails-cache` | Allows a job to use a default `cache` definition suitable for Ruby/Rails tasks. |
| `.static-analysis-cache` | Allows a job to use a default `cache` definition suitable for static analysis tasks. |
| `.coverage-cache` | Allows a job to use a default `cache` definition suitable for coverage tasks. |
| `.qa-cache` | Allows a job to use a default `cache` definition suitable for QA tasks. |
| `.yarn-cache` | Allows a job to use a default `cache` definition suitable for frontend jobs that do a `yarn install`. |
| `.assets-compile-cache` | Allows a job to use a default `cache` definition suitable for frontend jobs that compile assets. |
| `.use-pg11` | Allows a job to run the `postgres` 11 and `redis` services (see [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml) for the specific versions of the services). |
| `.use-pg11-ee` | Same as `.use-pg11` but also use an `elasticsearch` service (see [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml) for the specific version of the service). |
| `.use-pg12` | Allows a job to use the `postgres` 12 and `redis` services (see [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml) for the specific versions of the services). |
| `.use-pg12-ee` | Same as `.use-pg12` but also use an `elasticsearch` service (see [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml) for the specific version of the service). |
| `.use-pg13` | Allows a job to use the `postgres` 13 and `redis` services (see [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml) for the specific versions of the services). |
| `.use-pg13-ee` | Same as `.use-pg13` but also use an `elasticsearch` service (see [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml) for the specific version of the service). |
| `.use-kaniko` | Allows a job to use the `kaniko` tool to build Docker images. |
| `.as-if-foss` | Simulate the FOSS project by setting the `FOSS_ONLY='1'` CI/CD variable. |
| `.use-docker-in-docker` | Allows a job to use Docker in Docker. |
### `rules`, `if:` conditions and `changes:` patterns
We're using the [`rules` keyword](../ci/yaml/index.md#rules) extensively.
All `rules` definitions are defined in
[`rules.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rules.gitlab-ci.yml),
then included in individual jobs via [`extends`](../ci/yaml/index.md#extends).
The `rules` definitions are composed of `if:` conditions and `changes:` patterns,
which are also defined in
[`rules.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rules.gitlab-ci.yml)
and included in `rules` definitions via [YAML anchors](../ci/yaml/yaml_optimization.md#anchors)
#### `if:` conditions
<!-- vale gitlab.Substitutions = NO -->
| `if:` conditions | Description | Notes |
|------------------|-------------|-------|
| `if-not-canonical-namespace` | Matches if the project isn't in the canonical (`gitlab-org/`) or security (`gitlab-org/security`) namespace. | Use to create a job for forks (by using `when: on_success|manual`), or **not** create a job for forks (by using `when: never`). |
| `if-not-ee` | Matches if the project isn't EE (that is, project name isn't `gitlab` or `gitlab-ee`). | Use to create a job only in the FOSS project (by using `when: on_success|manual`), or **not** create a job if the project is EE (by using `when: never`). |
| `if-not-foss` | Matches if the project isn't FOSS (that is, project name isn't `gitlab-foss`, `gitlab-ce`, or `gitlabhq`). | Use to create a job only in the EE project (by using `when: on_success|manual`), or **not** create a job if the project is FOSS (by using `when: never`). |
| `if-default-refs` | Matches if the pipeline is for `master`, `main`, `/^[\d-]+-stable(-ee)?$/` (stable branches), `/^\d+-\d+-auto-deploy-\d+$/` (auto-deploy branches), `/^security\//` (security branches), merge requests, and tags. | Note that jobs aren't created for branches with this default configuration. |
| `if-master-refs` | Matches if the current branch is `master` or `main`. | |
| `if-master-push` | Matches if the current branch is `master` or `main` and pipeline source is `push`. | |
| `if-master-schedule-maintenance` | Matches if the current branch is `master` or `main` and pipeline runs on a 2-hourly schedule. | |
| `if-master-schedule-nightly` | Matches if the current branch is `master` or `main` and pipeline runs on a nightly schedule. | |
| `if-auto-deploy-branches` | Matches if the current branch is an auto-deploy one. | |
| `if-master-or-tag` | Matches if the pipeline is for the `master` or `main` branch or for a tag. | |
| `if-merge-request` | Matches if the pipeline is for a merge request. | |
| `if-merge-request-title-as-if-foss` | Matches if the pipeline is for a merge request and the MR has label ~"pipeline:run-as-if-foss" | |
| `if-merge-request-title-update-caches` | Matches if the pipeline is for a merge request and the MR has label ~"pipeline:update-cache". | |
| `if-merge-request-title-run-all-rspec` | Matches if the pipeline is for a merge request and the MR has label ~"pipeline:run-all-rspec". | |
| `if-security-merge-request` | Matches if the pipeline is for a security merge request. | |
| `if-security-schedule` | Matches if the pipeline is for a security scheduled pipeline. | |
| `if-nightly-master-schedule` | Matches if the pipeline is for a `master` scheduled pipeline with `$NIGHTLY` set. | |
| `if-dot-com-gitlab-org-schedule` | Limits jobs creation to scheduled pipelines for the `gitlab-org` group on GitLab.com. | |
| `if-dot-com-gitlab-org-master` | Limits jobs creation to the `master` or `main` branch for the `gitlab-org` group on GitLab.com. | |
| `if-dot-com-gitlab-org-merge-request` | Limits jobs creation to merge requests for the `gitlab-org` group on GitLab.com. | |
| `if-dot-com-gitlab-org-and-security-tag` | Limits job creation to tags for the `gitlab-org` and `gitlab-org/security` groups on GitLab.com. | |
| `if-dot-com-gitlab-org-and-security-merge-request` | Limit jobs creation to merge requests for the `gitlab-org` and `gitlab-org/security` groups on GitLab.com. | |
| `if-dot-com-gitlab-org-and-security-tag` | Limit jobs creation to tags for the `gitlab-org` and `gitlab-org/security` groups on GitLab.com. | |
| `if-dot-com-ee-schedule` | Limits jobs to scheduled pipelines for the `gitlab-org/gitlab` project on GitLab.com. | |
| `if-security-pipeline-merge-result` | Matches if the pipeline is for a security merge request triggered by `@gitlab-release-tools-bot`. | |
<!-- vale gitlab.Substitutions = YES -->
#### `changes:` patterns
| `changes:` patterns | Description |
|------------------------------|--------------------------------------------------------------------------|
| `ci-patterns` | Only create job for CI configuration-related changes. |
| `ci-build-images-patterns` | Only create job for CI configuration-related changes related to the `build-images` stage. |
| `ci-review-patterns` | Only create job for CI configuration-related changes related to the `review` stage. |
| `ci-qa-patterns` | Only create job for CI configuration-related changes related to the `qa` stage. |
| `yaml-lint-patterns` | Only create job for YAML-related changes. |
| `docs-patterns` | Only create job for docs-related changes. |
| `frontend-dependency-patterns` | Only create job when frontend dependencies are updated (that is, `package.json`, and `yarn.lock`). changes. |
| `frontend-patterns` | Only create job for frontend-related changes. |
| `backend-patterns` | Only create job for backend-related changes. |
| `db-patterns` | Only create job for DB-related changes. |
| `backstage-patterns` | Only create job for backstage-related changes (that is, Danger, fixtures, RuboCop, specs). |
| `code-patterns` | Only create job for code-related changes. |
| `qa-patterns` | Only create job for QA-related changes. |
| `code-backstage-patterns` | Combination of `code-patterns` and `backstage-patterns`. |
| `code-qa-patterns` | Combination of `code-patterns` and `qa-patterns`. |
| `code-backstage-qa-patterns` | Combination of `code-patterns`, `backstage-patterns`, and `qa-patterns`. |
| `static-analysis-patterns` | Only create jobs for Static Analytics configuration-related changes. |
## Performance
### Interruptible pipelines
By default, all jobs are [interruptible](../ci/yaml/index.md#interruptible), except the
`dont-interrupt-me` job which runs automatically on `main`, and is `manual`
otherwise.
If you want a running pipeline to finish even if you push new commits to a merge
request, be sure to start the `dont-interrupt-me` job before pushing.
### Git fetch caching
Because GitLab.com uses the [pack-objects cache](../administration/gitaly/configure_gitaly.md#pack-objects-cache),
concurrent Git fetches of the same pipeline ref are deduplicated on
the Gitaly server (always) and served from cache (when available).
This works well for the following reasons:
- The pack-objects cache is enabled on all Gitaly servers on GitLab.com.
- The CI/CD [Git strategy setting](../ci/pipelines/settings.md#choose-the-default-git-strategy) for `gitlab-org/gitlab` is **Git clone**,
causing all jobs to fetch the same data, which maximizes the cache hit ratio.
- We use [shallow clone](../ci/pipelines/settings.md#limit-the-number-of-changes-fetched-during-clone) to avoid downloading the full Git
history for every job.
### Caching strategy
1. All jobs must only pull caches by default.
1. All jobs must be able to pass with an empty cache. In other words, caches are only there to speed up jobs.
1. We currently have several different cache definitions defined in
[`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml),
with fixed keys:
- `.setup-test-env-cache`
- `.ruby-cache`
- `.rails-cache`
- `.static-analysis-cache`
- `.rubocop-cache`
- `.coverage-cache`
- `.ruby-node-cache`
- `.qa-cache`
- `.yarn-cache`
- `.assets-compile-cache` (the key includes `${NODE_ENV}` so it's actually two different caches).
1. These cache definitions are composed of [multiple atomic caches](../ci/caching/index.md#use-multiple-caches).
1. Only the following jobs, running in 2-hourly `maintenance` scheduled pipelines, are pushing (that is, updating) to the caches:
- `update-setup-test-env-cache`, defined in [`.gitlab/ci/rails.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rails.gitlab-ci.yml).
- `update-gitaly-binaries-cache`, defined in [`.gitlab/ci/rails.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rails.gitlab-ci.yml).
- `update-rubocop-cache`, defined in [`.gitlab/ci/rails.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rails.gitlab-ci.yml).
- `update-qa-cache`, defined in [`.gitlab/ci/qa.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/qa.gitlab-ci.yml).
- `update-assets-compile-production-cache`, defined in [`.gitlab/ci/frontend.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml).
- `update-assets-compile-test-cache`, defined in [`.gitlab/ci/frontend.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml).
- `update-yarn-cache`, defined in [`.gitlab/ci/frontend.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml).
- `update-storybook-yarn-cache`, defined in [`.gitlab/ci/frontend.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml).
1. These jobs can also be forced to run in merge requests with the `pipeline:update-cache` label (this can be useful to warm the caches in a MR that updates the cache keys).
### Artifacts strategy
We limit the artifacts that are saved and retrieved by jobs to the minimum to reduce the upload/download time and costs, as well as the artifacts storage.
### Components caching
Some external components (GitLab Workhorse and frontend assets) of GitLab need to be built from source as a preliminary step for running tests.
#### `cache-workhorse`
In [this MR](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/79766), and then
[this MR](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/96297),
we introduced a new `cache-workhorse` job that:
- runs automatically for all GitLab.com `gitlab-org/gitlab` scheduled pipelines
- runs automatically for any `master` commit that touches the `workhorse/` folder
- is manual for GitLab.com's `gitlab-org`'s MRs that touches caching-related files
This job tries to download a generic package that contains GitLab Workhorse binaries needed in the GitLab test suite (under `tmp/tests/gitlab-workhorse`).
- If the package URL returns a 404:
1. It runs `scripts/setup-test-env`, so that the GitLab Workhorse binaries are built.
1. It then creates an archive which contains the binaries and upload it [as a generic package](https://gitlab.com/gitlab-org/gitlab/-/packages/).
- Otherwise, if the package already exists, it exits the job successfully.
We also changed the `setup-test-env` job to:
1. First download the GitLab Workhorse generic package build and uploaded by `cache-workhorse`.
1. If the package is retrieved successfully, its content is placed in the right folder (for example, `tmp/tests/gitlab-workhorse`), preventing the building of the binaries when `scripts/setup-test-env` is run later on.
1. If the package URL returns a 404, the behavior doesn't change compared to the current one: the GitLab Workhorse binaries are built as part of `scripts/setup-test-env`.
NOTE:
The version of the package is the workhorse tree SHA (for example, `git rev-parse HEAD:workhorse`).
#### `cache-assets`
In [this MR](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/96297),
we introduced three new `cache-assets:test`, `cache-assets:test as-if-foss`,
and `cache-assets:production` jobs that:
- never run unless `$CACHE_ASSETS_AS_PACKAGE == "true"`
- runs automatically for all GitLab.com `gitlab-org/gitlab` scheduled pipelines
- runs automatically for any `master` commit that touches the assets-related folders
- is manual for GitLab.com's `gitlab-org`'s MRs that touches caching-related files
This job tries to download a generic package that contains GitLab compiled assets
needed in the GitLab test suite (under `app/assets/javascripts/locale/**/app.js`,
and `public/assets`).
- If the package URL returns a 404:
1. It runs `bin/rake gitlab:assets:compile`, so that the GitLab assets are compiled.
1. It then creates an archive which contains the assets and uploads it [as a generic package](https://gitlab.com/gitlab-org/gitlab/-/packages/).
The package version is set to the assets folders' hash sum.
- Otherwise, if the package already exists, it exits the job successfully.
#### `compile-*-assets`
We also changed the `compile-test-assets`, `compile-test-assets as-if-foss`,
and `compile-production-assets` jobs to:
1. First download the "native" cache assets, which contain:
- The [compiled assets](https://gitlab.com/gitlab-org/gitlab/-/blob/a6910c9086bb28e553f5e747ec2dd50af6da3c6b/.gitlab/ci/global.gitlab-ci.yml#L86-87).
- A [`cached-assets-hash.txt` file](https://gitlab.com/gitlab-org/gitlab/-/blob/a6910c9086bb28e553f5e747ec2dd50af6da3c6b/.gitlab/ci/global.gitlab-ci.yml#L85)
containing the `SHA256` hexdigest of all the source files on which the assets depend on.
This list of files is a pessimistic list and the assets might not depend on
some of these files. At worst we compile the assets more often, which is better than
using outdated assets.
The file is [created after assets are compiled](https://gitlab.com/gitlab-org/gitlab/-/blob/a6910c9086bb28e553f5e747ec2dd50af6da3c6b/.gitlab/ci/frontend.gitlab-ci.yml#L83).
1. We then we compute the `SHA256` hexdigest of all the source files the assets depend on, **for the current checked out branch**. We [store the hexdigest in the `GITLAB_ASSETS_HASH` variable](https://gitlab.com/gitlab-org/gitlab/-/blob/a6910c9086bb28e553f5e747ec2dd50af6da3c6b/.gitlab/ci/frontend.gitlab-ci.yml#L27).
1. If `$CACHE_ASSETS_AS_PACKAGE == "true"`, we download the generic package built and uploaded by [`cache-assets:*`](#cache-assets).
- If the cache is up-to-date for the checked out branch, we download the native cache
**and** the cache package. We could optimize that by not downloading
the genetic package but the native cache is actually very often outdated because it's
rebuilt only every 2 hours.
1. We [run the `assets_compile_script` function](https://gitlab.com/gitlab-org/gitlab/-/blob/a6910c9086bb28e553f5e747ec2dd50af6da3c6b/.gitlab/ci/frontend.gitlab-ci.yml#L35),
which [itself runs](https://gitlab.com/gitlab-org/gitlab/-/blob/c023191ef412e868ae957f3341208a41ca678403/scripts/utils.sh#L76)
the [`assets:compile` Rake task](https://gitlab.com/gitlab-org/gitlab/-/blob/c023191ef412e868ae957f3341208a41ca678403/lib/tasks/gitlab/assets.rake#L80-109).
This task is responsible for deciding if assets need to be compiled or not.
It [compares the `HEAD` `SHA256` hexdigest from `$GITLAB_ASSETS_HASH` with the `master` hexdigest from `cached-assets-hash.txt`](https://gitlab.com/gitlab-org/gitlab/-/blob/c023191ef412e868ae957f3341208a41ca678403/lib/tasks/gitlab/assets.rake#L86).
1. If the hashes are the same, we don't compile anything. If they're different, we compile the assets.
### Pre-clone step
NOTE:
We no longer use this optimization for `gitlab-org/gitlab` because the [pack-objects cache](../administration/gitaly/configure_gitaly.md#pack-objects-cache)
allows Gitaly to serve the full CI/CD fetch traffic now. See [Git fetch caching](#git-fetch-caching).
The pre-clone step works by using the `CI_PRE_CLONE_SCRIPT` variable
[defined by GitLab.com shared runners](../ci/runners/saas/linux_saas_runner.md#pre-clone-script).
---
[Return to Development documentation](index.md)
<!-- This redirect file can be deleted after <2023-01-20>. -->
<!-- Redirects that point to other docs in the same project expire in three months. -->
<!-- Redirects that point to docs in a different project or site (link is not relative and starts with `https:`) expire in one year. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/redirects.html -->

View File

@ -0,0 +1,530 @@
---
stage: none
group: Engineering Productivity
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
# Pipelines for the GitLab project
Pipelines for [`gitlab-org/gitlab`](https://gitlab.com/gitlab-org/gitlab) (as well as the `dev` instance's) is configured in the usual
[`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml)
which itself includes files under
[`.gitlab/ci/`](https://gitlab.com/gitlab-org/gitlab/-/tree/master/.gitlab/ci)
for easier maintenance.
We're striving to [dogfood](https://about.gitlab.com/handbook/engineering/development/principles/#dogfooding)
GitLab [CI/CD features and best-practices](../../ci/yaml/index.md)
as much as possible.
## Minimal test jobs before a merge request is approved
**To reduce the pipeline cost and shorten the job duration, before a merge request is approved, the pipeline will run a minimal set of RSpec & Jest tests that are related to the merge request changes.**
After a merge request has been approved, the pipeline would contain the full RSpec & Jest tests. This will ensure that all tests
have been run before a merge request is merged.
### Overview of the GitLab project test dependency
To understand how the minimal test jobs are executed, we need to understand the dependency between
GitLab code (frontend and backend) and the respective tests (Jest and RSpec).
This dependency can be visualized in the following diagram:
```mermaid
flowchart LR
subgraph frontend
fe["Frontend code"]--tested with-->jest
end
subgraph backend
be["Backend code"]--tested with-->rspec
end
be--generates-->fixtures["frontend fixtures"]
fixtures--used in-->jest
```
In summary:
- RSpec tests are dependent on the backend code.
- Jest tests are dependent on both frontend and backend code, the latter through the frontend fixtures.
### RSpec minimal jobs
#### Determining related RSpec test files in a merge request
To identify the minimal set of tests needed, we use the [`test_file_finder` gem](https://gitlab.com/gitlab-org/ci-cd/test_file_finder), with two strategies:
- dynamic mapping from test coverage tracing (generated via the [`Crystalball` gem](https://github.com/toptal/crystalball))
([see where it's used](https://gitlab.com/gitlab-org/gitlab/-/blob/47d507c93779675d73a05002e2ec9c3c467cd698/tooling/bin/find_tests#L15))
- static mapping maintained in the [`tests.yml` file](https://gitlab.com/gitlab-org/gitlab/-/blob/master/tests.yml) for special cases that cannot
be mapped via coverage tracing ([see where it's used](https://gitlab.com/gitlab-org/gitlab/-/blob/47d507c93779675d73a05002e2ec9c3c467cd698/tooling/bin/find_tests#L12))
The test mappings contain a map of each source files to a list of test files which is dependent of the source file.
In the `detect-tests` job, we use this mapping to identify the minimal tests needed for the current merge request.
Later on in [the `rspec fail-fast` job](#fail-fast-job-in-merge-request-pipelines), we run the minimal tests needed for the current merge request.
#### Exceptional cases
In addition, there are a few circumstances where we would always run the full RSpec tests:
- when the `pipeline:run-all-rspec` label is set on the merge request
- when the `pipeline:run-full-rspec` label is set on the merge request, this label is assigned by triage automation when the merge request is approved by any reviewer
- when the merge request is created by an automation (for example, Gitaly update or MR targeting a stable branch)
- when the merge request is created in a security mirror
- when any CI configuration file is changed (for example, `.gitlab-ci.yml` or `.gitlab/ci/**/*`)
### Jest minimal jobs
#### Determining related Jest test files in a merge request
To identify the minimal set of tests needed, we pass a list of all the changed files into `jest` using the [`--findRelatedTests`](https://jestjs.io/docs/cli#--findrelatedtests-spaceseparatedlistofsourcefiles) option.
In this mode, `jest` would resolve all the dependencies of related to the changed files, which include test files that have these files in the dependency chain.
#### Exceptional cases
In addition, there are a few circumstances where we would always run the full Jest tests:
- when the `pipeline:run-all-jest` label is set on the merge request
- when the merge request is created by an automation (for example, Gitaly update or MR targeting a stable branch)
- when the merge request is created in a security mirror
- when any CI configuration file is changed (for example, `.gitlab-ci.yml` or `.gitlab/ci/**/*`)
- when any frontend "core" file is changed (for example, `package.json`, `yarn.lock`, `babel.config.js`, `jest.config.*.js`, `config/helpers/**/*.js`)
- when any vendored JavaScript file is changed (for example, `vendor/assets/javascripts/**/*`)
- when any backend file is changed ([see the patterns list for details](https://gitlab.com/gitlab-org/gitlab/-/blob/3616946936c1adbd9e754c1bd06f86ba670796d8/.gitlab/ci/rules.gitlab-ci.yml#L205-216))
### Fork pipelines
We run only the minimal RSpec & Jest jobs for fork pipelines, unless the `pipeline:run-all-rspec`
label is set on the MR. The goal is to reduce the CI/CD minutes consumed by fork pipelines.
See the [experiment issue](https://gitlab.com/gitlab-org/quality/team-tasks/-/issues/1170).
## Fail-fast job in merge request pipelines
To provide faster feedback when a merge request breaks existing tests, we are experimenting with a
fail-fast mechanism.
An `rspec fail-fast` job is added in parallel to all other `rspec` jobs in a merge
request pipeline. This job runs the tests that are directly related to the changes
in the merge request.
If any of these tests fail, the `rspec fail-fast` job fails, triggering a
`fail-pipeline-early` job to run. The `fail-pipeline-early` job:
- Cancels the currently running pipeline and all in-progress jobs.
- Sets pipeline to have status `failed`.
For example:
```mermaid
graph LR
subgraph "prepare stage";
A["detect-tests"]
end
subgraph "test stage";
B["jest"];
C["rspec migration"];
D["rspec unit"];
E["rspec integration"];
F["rspec system"];
G["rspec fail-fast"];
end
subgraph "post-test stage";
Z["fail-pipeline-early"];
end
A --"artifact: list of test files"--> G
G --"on failure"--> Z
```
The `rspec fail-fast` is a no-op if there are more than 10 test files related to the
merge request. This prevents `rspec fail-fast` duration from exceeding the average
`rspec` job duration and defeating its purpose.
This number can be overridden by setting a CI/CD variable named `RSPEC_FAIL_FAST_TEST_FILE_COUNT_THRESHOLD`.
## Faster feedback when reverting merge requests
When you need to revert a merge request, to get accelerated feedback, you can add the `~pipeline:revert` label to your merge request.
When this label is assigned, the following steps of the CI/CD pipeline are skipped:
- The `e2e:package-and-test` job.
- The `rspec:undercoverage` job.
- The entire [Review Apps process](../testing_guide/review_apps.md).
Apply the label to the merge request, and run a new pipeline for the MR.
## Test jobs
We have dedicated jobs for each [testing level](../testing_guide/testing_levels.md) and each job runs depending on the
changes made in your merge request.
If you want to force all the RSpec jobs to run regardless of your changes, you can add the `pipeline:run-all-rspec` label to the merge request.
WARNING:
Forcing all jobs on docs only related MRs would not have the prerequisite jobs and would lead to errors
### Test suite parallelization
Our current RSpec tests parallelization setup is as follows:
1. The `retrieve-tests-metadata` job in the `prepare` stage ensures we have a
`knapsack/report-master.json` file:
- The `knapsack/report-master.json` file is fetched from the latest `main` pipeline which runs `update-tests-metadata`
(for now it's the 2-hourly `maintenance` scheduled master pipeline), if it's not here we initialize the file with `{}`.
1. Each `[rspec|rspec-ee] [migration|unit|integration|system|geo] n m` job are run with
`knapsack rspec` and should have an evenly distributed share of tests:
- It works because the jobs have access to the `knapsack/report-master.json`
since the "artifacts from all previous stages are passed by default".
- the jobs set their own report path to
`"knapsack/${TEST_TOOL}_${TEST_LEVEL}_${DATABASE}_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json"`.
- if knapsack is doing its job, test files that are run should be listed under
`Report specs`, not under `Leftover specs`.
1. The `update-tests-metadata` job (which only runs on scheduled pipelines for
[the canonical project](https://gitlab.com/gitlab-org/gitlab) takes all the
`knapsack/rspec*.json` files and merge them all together into a single
`knapsack/report-master.json` file that is saved as artifact.
After that, the next pipeline uses the up-to-date `knapsack/report-master.json` file.
### Flaky tests
#### Automatic skipping of flaky tests
Tests that are [known to be flaky](../testing_guide/flaky_tests.md#automatic-retries-and-flaky-tests-detection) are
skipped unless the `$SKIP_FLAKY_TESTS_AUTOMATICALLY` variable is set to `false` or if the `~"pipeline:run-flaky-tests"`
label is set on the MR.
See the [experiment issue](https://gitlab.com/gitlab-org/quality/team-tasks/-/issues/1069).
#### Automatic retry of failing tests in a separate process
Unless `$RETRY_FAILED_TESTS_IN_NEW_PROCESS` variable is set to `false` (`true` by default), RSpec tests that failed are automatically retried once in a separate
RSpec process. The goal is to get rid of most side-effects from previous tests that may lead to a subsequent test failure.
We keep track of retried tests in the `$RETRIED_TESTS_REPORT_FILE` file saved as artifact by the `rspec:flaky-tests-report` job.
See the [experiment issue](https://gitlab.com/gitlab-org/quality/team-tasks/-/issues/1148).
### Compatibility testing
By default, we run all tests with the versions that runs on GitLab.com.
Other versions (usually one back-compatible version, and one forward-compatible version) should be running in nightly scheduled pipelines.
Exceptions to this general guideline should be motivated and documented.
#### Single database testing
By default, all tests run with [multiple databases](../database/multiple_databases.md).
We also run tests with a single database in nightly scheduled pipelines, and in merge requests that touch database-related files.
If you want to force tests to run with a single database, you can add the `pipeline:run-single-db` label to the merge request.
### Monitoring
The GitLab test suite is [monitored](../performance.md#rspec-profiling) for the `main` branch, and any branch
that includes `rspec-profile` in their name.
### Logging
- Rails logging to `log/test.log` is disabled by default in CI
[for performance reasons](https://jtway.co/speed-up-your-rails-test-suite-by-6-in-1-line-13fedb869ec4).
To override this setting, provide the
`RAILS_ENABLE_TEST_LOG` environment variable.
## Review app jobs
Consult the [Review Apps](../testing_guide/review_apps.md) dedicated page for more information.
If you want to force a Review App to be deployed regardless of your changes, you can add the `pipeline:run-review-app` label to the merge request.
## As-if-FOSS jobs
The `* as-if-foss` jobs run the GitLab test suite "as if FOSS", meaning as if the jobs would run in the context
of `gitlab-org/gitlab-foss`. These jobs are only created in the following cases:
- when the `pipeline:run-as-if-foss` label is set on the merge request
- when the merge request is created in the `gitlab-org/security/gitlab` project
- when any CI configuration file is changed (for example, `.gitlab-ci.yml` or `.gitlab/ci/**/*`)
The `* as-if-foss` jobs are run in addition to the regular EE-context jobs. They have the `FOSS_ONLY='1'` variable
set and get the `ee/` folder removed before the tests start running.
The intent is to ensure that a change doesn't introduce a failure after `gitlab-org/gitlab` is synced to `gitlab-org/gitlab-foss`.
## As-if-JH jobs
NOTE:
This is disabled for now.
The `* as-if-jh` jobs run the GitLab test suite "as if JiHu", meaning as if the jobs would run in the context
of [GitLab JH](../jh_features_review.md). These jobs are only created in the following cases:
- when the `pipeline:run-as-if-jh` label is set on the merge request
- when the `pipeline:run-all-rspec` label is set on the merge request
- when any code or backstage file is changed
- when any startup CSS file is changed
The `* as-if-jh` jobs are run in addition to the regular EE-context jobs. The `jh/` folder is added before the tests start running.
The intent is to ensure that a change doesn't introduce a failure after `gitlab-org/gitlab` is synced to [GitLab JH](https://jihulab.com/gitlab-cn/gitlab).
### When to consider applying `pipeline:run-as-if-jh` label
NOTE:
This is disabled for now.
If a Ruby file is renamed and there's a corresponding [`prepend_mod` line](../jh_features_review.md#jh-features-based-on-ce-or-ee-features),
it's likely that GitLab JH is relying on it and requires a corresponding
change to rename the module or class it's prepending.
### Corresponding JH branch
NOTE:
This is disabled for now.
You can create a corresponding JH branch on [GitLab JH](https://jihulab.com/gitlab-cn/gitlab) by
appending `-jh` to the branch name. If a corresponding JH branch is found,
`* as-if-jh` jobs grab the `jh` folder from the respective branch,
rather than from the default branch `main-jh`.
NOTE:
For now, CI will try to fetch the branch on the [GitLab JH mirror](https://gitlab.com/gitlab-org/gitlab-jh-mirrors/gitlab), so it might take some time for the new JH branch to propagate to the mirror.
## Ruby 3.0 jobs
You can add the `pipeline:run-in-ruby3` label to the merge request to switch
the Ruby version used for running the whole test suite to 3.0. When you do
this, the test suite will no longer run in Ruby 2.7 (default), and an
additional job `verify-ruby-2.7` will also run and always fail to remind us to
remove the label and run in Ruby 2.7 before merging the merge request.
This should let us:
- Test changes for Ruby 3.0
- Make sure it will not break anything when it's merged into the default branch
## `undercover` RSpec test
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/74859) in GitLab 14.6.
The `rspec:undercoverage` job runs [`undercover`](https://rubygems.org/gems/undercover)
to detect, and fail if any changes introduced in the merge request has zero coverage.
The `rspec:undercoverage` job obtains coverage data from the `rspec:coverage`
job.
In the event of an emergency, or false positive from this job, add the
`pipeline:skip-undercoverage` label to the merge request to allow this job to
fail.
### Troubleshooting `rspec:undercoverage` failures
The `rspec:undercoverage` job has [known bugs](https://gitlab.com/groups/gitlab-org/-/epics/8254)
that can cause false positive failures. You can test coverage locally to determine if it's
safe to apply `~"pipeline:skip-undercoverage"`. For example, using `<spec>` as the name of the
test causing the failure:
1. Run `SIMPLECOV=1 bundle exec rspec <spec>`.
1. Run `scripts/undercoverage`.
If these commands return `undercover: ✅ No coverage is missing in latest changes` then you can apply `~"pipeline:skip-undercoverage"` to bypass pipeline failures.
## Ruby versions testing
Our test suite runs against Ruby 2 in merge requests and default branch pipelines.
We also run our test suite against Ruby 3 on another 2-hourly scheduled pipelines, as GitLab.com will soon run on Ruby 3.
## PostgreSQL versions testing
Our test suite runs against PG12 as GitLab.com runs on PG12 and
[Omnibus defaults to PG12 for new installs and upgrades](../../administration/package_information/postgresql_versions.md).
We do run our test suite against PG11 and PG13 on nightly scheduled pipelines.
We also run our test suite against PG11 upon specific database library changes in MRs and `main` pipelines (with the `rspec db-library-code pg11` job).
### Current versions testing
| Where? | PostgreSQL version | Ruby version |
|------------------------------------------------------------------------------------------------|-------------------------------------------------|--------------|
| Merge requests | 12 (default version), 11 for DB library changes | 2.7 (default version) |
| `master` branch commits | 12 (default version), 11 for DB library changes | 2.7 (default version) |
| `maintenance` scheduled pipelines for the `master` branch (every even-numbered hour) | 12 (default version), 11 for DB library changes | 2.7 (default version) |
| `maintenance` scheduled pipelines for the `ruby3` branch (every odd-numbered hour), see below. | 12 (default version), 11 for DB library changes | 3.0 (coded in the branch) |
| `nightly` scheduled pipelines for the `master` branch | 12 (default version), 11, 13 | 2.7 (default version) |
The pipeline configuration for the scheduled pipeline testing Ruby 3 is
stored in the `ruby3-sync` branch. The pipeline updates the `ruby3` branch
with latest `master`, and then it triggers a regular branch pipeline for
`ruby3`. Any changes in `ruby3` are only for running the pipeline. It should
never be merged back to `master`. Any other Ruby 3 changes should go into
`master` directly, which should be compatible with Ruby 2.7.
Previously, `ruby3-sync` was using a project token stored in `RUBY3_SYNC_TOKEN`
(now backed up in `RUBY3_SYNC_TOKEN_NOT_USED`), however due to various
permissions issues, we ended up using an access token from `gitlab-bot` so now
`RUBY3_SYNC_TOKEN` is actually an access token from `gitlab-bot`.
### Long-term plan
We follow the [PostgreSQL versions shipped with Omnibus GitLab](../../administration/package_information/postgresql_versions.md):
| PostgreSQL version | 14.1 (July 2021) | 14.2 (August 2021) | 14.3 (September 2021) | 14.4 (October 2021) | 14.5 (November 2021) | 14.6 (December 2021) |
| -------------------| ---------------------- | ---------------------- | ---------------------- | ---------------------- | ---------------------- | ---------------------- |
| PG12 | MRs/`2-hour`/`nightly` | MRs/`2-hour`/`nightly` | MRs/`2-hour`/`nightly` | MRs/`2-hour`/`nightly` | MRs/`2-hour`/`nightly` | MRs/`2-hour`/`nightly` |
| PG11 | `nightly` | `nightly` | `nightly` | `nightly` | `nightly` | `nightly` |
| PG13 | `nightly` | `nightly` | `nightly` | `nightly` | `nightly` | `nightly` |
## Redis versions testing
Our test suite runs against Redis 6 as GitLab.com runs on Redis 6 and
[Omnibus defaults to Redis 6 for new installs and upgrades](https://gitlab.com/gitlab-org/omnibus-gitlab/-/blob/master/config/software/redis.rb).
We do run our test suite against Redis 5 on `nightly` scheduled pipelines, specifically when running backward-compatible and forward-compatible PostgreSQL jobs.
### Current versions testing
| Where? | Redis version |
| ------ | ------------------ |
| MRs | 6 |
| `default branch` (non-scheduled pipelines) | 6 |
| `nightly` scheduled pipelines | 5 |
## Pipelines types for merge requests
In general, pipelines for an MR fall into one of the following types (from shorter to longer), depending on the changes made in the MR:
- [Documentation pipeline](#documentation-pipeline): For MRs that touch documentation.
- [Backend pipeline](#backend-pipeline): For MRs that touch backend code.
- [Frontend pipeline](#frontend-pipeline): For MRs that touch frontend code.
- [End-to-end pipeline](#end-to-end-pipeline): For MRs that touch code in the `qa/` folder.
A "pipeline type" is an abstract term that mostly describes the "critical path" (for example, the chain of jobs for which the sum
of individual duration equals the pipeline's duration).
We use these "pipeline types" in [metrics dashboards](https://app.periscopedata.com/app/gitlab/858266/GitLab-Pipeline-Durations) to detect what types and jobs need to be optimized first.
An MR that touches multiple areas would be associated with the longest type applicable. For instance, an MR that touches backend
and frontend would fall into the "Frontend" pipeline type since this type takes longer to finish than the "Backend" pipeline type.
We use the [`rules:`](../../ci/yaml/index.md#rules) and [`needs:`](../../ci/yaml/index.md#needs) keywords extensively
to determine the jobs that need to be run in a pipeline. Note that an MR that includes multiple types of changes would
have a pipelines that include jobs from multiple types (for example, a combination of docs-only and code-only pipelines).
Following are graphs of the critical paths for each pipeline type. Jobs that aren't part of the critical path are omitted.
### Documentation pipeline
[Reference pipeline](https://gitlab.com/gitlab-org/gitlab/-/pipelines/432049110).
```mermaid
graph LR
classDef criticalPath fill:#f66;
1-3["docs-lint links (5 minutes)"];
class 1-3 criticalPath;
click 1-3 "https://app.periscopedata.com/app/gitlab/652085/Engineering-Productivity---Pipeline-Build-Durations?widget=8356757&udv=0"
```
### Backend pipeline
[Reference pipeline](https://gitlab.com/gitlab-org/gitlab/-/pipelines/433316063).
```mermaid
graph RL;
classDef criticalPath fill:#f66;
1-3["compile-test-assets (6 minutes)"];
class 1-3 criticalPath;
click 1-3 "https://app.periscopedata.com/app/gitlab/652085/Engineering-Productivity---Pipeline-Build-Durations?widget=6914317&udv=0"
1-6["setup-test-env (4 minutes)"];
click 1-6 "https://app.periscopedata.com/app/gitlab/652085/Engineering-Productivity---Pipeline-Build-Durations?widget=6914315&udv=0"
1-14["retrieve-tests-metadata"];
click 1-14 "https://app.periscopedata.com/app/gitlab/652085/Engineering-Productivity---Pipeline-Build-Durations?widget=8356697&udv=0"
1-15["detect-tests"];
click 1-15 "https://app.periscopedata.com/app/gitlab/652085/EP---Jobs-Durations?widget=10113603&udv=1005715"
2_5-1["rspec & db jobs (24 minutes)"];
class 2_5-1 criticalPath;
click 2_5-1 "https://app.periscopedata.com/app/gitlab/652085/Engineering-Productivity---Pipeline-Build-Durations"
2_5-1 --> 1-3 & 1-6 & 1-14 & 1-15;
3_2-1["rspec:coverage (5.35 minutes)"];
class 3_2-1 criticalPath;
click 3_2-1 "https://app.periscopedata.com/app/gitlab/652085/Engineering-Productivity---Pipeline-Build-Durations?widget=7248745&udv=0"
3_2-1 -.->|"(don't use needs<br/>because of limitations)"| 2_5-1;
4_3-1["rspec:undercoverage (3.5 minutes)"];
class 4_3-1 criticalPath;
click 4_3-1 "https://app.periscopedata.com/app/gitlab/652085/EP---Jobs-Durations?widget=13446492&udv=1005715"
4_3-1 --> 3_2-1;
```
### Frontend pipeline
[Reference pipeline](https://gitlab.com/gitlab-org/gitlab/-/pipelines/431913287).
```mermaid
graph RL;
classDef criticalPath fill:#f66;
1-2["build-qa-image (2 minutes)"];
click 1-2 "https://app.periscopedata.com/app/gitlab/652085/Engineering-Productivity---Pipeline-Build-Durations?widget=6914325&udv=0"
1-5["compile-production-assets (16 minutes)"];
class 1-5 criticalPath;
click 1-5 "https://app.periscopedata.com/app/gitlab/652085/Engineering-Productivity---Pipeline-Build-Durations?widget=6914312&udv=0"
2_3-1["build-assets-image (1.3 minutes)"];
class 2_3-1 criticalPath;
2_3-1 --> 1-5
2_6-1["start-review-app-pipeline (49 minutes)"];
class 2_6-1 criticalPath;
click 2_6-1 "https://app.periscopedata.com/app/gitlab/652085/Engineering-Productivity---Pipeline-Build-Durations"
2_6-1 --> 2_3-1 & 1-2;
```
### End-to-end pipeline
[Reference pipeline](https://gitlab.com/gitlab-org/gitlab/-/pipelines/431918463).
```mermaid
graph RL;
classDef criticalPath fill:#f66;
1-2["build-qa-image (2 minutes)"];
click 1-2 "https://app.periscopedata.com/app/gitlab/652085/Engineering-Productivity---Pipeline-Build-Durations?widget=6914325&udv=0"
1-5["compile-production-assets (16 minutes)"];
class 1-5 criticalPath;
click 1-5 "https://app.periscopedata.com/app/gitlab/652085/Engineering-Productivity---Pipeline-Build-Durations?widget=6914312&udv=0"
1-15["detect-tests"];
click 1-15 "https://app.periscopedata.com/app/gitlab/652085/EP---Jobs-Durations?widget=10113603&udv=1005715"
2_3-1["build-assets-image (1.3 minutes)"];
class 2_3-1 criticalPath;
2_3-1 --> 1-5
2_4-1["e2e:package-and-test (102 minutes)"];
class 2_4-1 criticalPath;
click 2_4-1 "https://app.periscopedata.com/app/gitlab/652085/Engineering-Productivity---Pipeline-Build-Durations?widget=6914305&udv=0"
2_4-1 --> 1-2 & 2_3-1 & 1-15;
```
## CI configuration internals
See the dedicated [CI configuration internals page](internals.md).
## Performance
See the dedicated [CI configuration performance page](performance.md).
---
[Return to Development documentation](../index.md)

View File

@ -0,0 +1,216 @@
---
stage: none
group: Engineering Productivity
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
# CI configuration internals
## Workflow rules
Pipelines for the GitLab project are created using the [`workflow:rules` keyword](../../ci/yaml/index.md#workflow)
feature of the GitLab CI/CD.
Pipelines are always created for the following scenarios:
- `main` branch, including on schedules, pushes, merges, and so on.
- Merge requests.
- Tags.
- Stable, `auto-deploy`, and security branches.
Pipeline creation is also affected by the following CI/CD variables:
- If `$FORCE_GITLAB_CI` is set, pipelines are created.
- If `$GITLAB_INTERNAL` is not set, pipelines are not created.
No pipeline is created in any other cases (for example, when pushing a branch with no
MR for it).
The source of truth for these workflow rules is defined in [`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml).
## Default image
The default image is defined in [`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml).
<!-- vale gitlab.Spelling = NO -->
It includes Ruby, Go, Git, Git LFS, Chrome, Node, Yarn, PostgreSQL, and Graphics Magick.
<!-- vale gitlab.Spelling = YES -->
The images used in our pipelines are configured in the
[`gitlab-org/gitlab-build-images`](https://gitlab.com/gitlab-org/gitlab-build-images)
project, which is push-mirrored to [`gitlab/gitlab-build-images`](https://dev.gitlab.org/gitlab/gitlab-build-images)
for redundancy.
The current version of the build images can be found in the
["Used by GitLab section"](https://gitlab.com/gitlab-org/gitlab-build-images/blob/master/.gitlab-ci.yml).
## Default variables
In addition to the [predefined CI/CD variables](../../ci/variables/predefined_variables.md),
each pipeline includes default variables defined in
[`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml).
## Stages
The current stages are:
- `sync`: This stage is used to synchronize changes from [`gitlab-org/gitlab`](https://gitlab.com/gitlab-org/gitlab) to
[`gitlab-org/gitlab-foss`](https://gitlab.com/gitlab-org/gitlab-foss).
- `prepare`: This stage includes jobs that prepare artifacts that are needed by
jobs in subsequent stages.
- `build-images`: This stage includes jobs that prepare Docker images
that are needed by jobs in subsequent stages or downstream pipelines.
- `fixtures`: This stage includes jobs that prepare fixtures needed by frontend tests.
- `lint`: This stage includes linting and static analysis jobs.
- `test`: This stage includes most of the tests, and DB/migration jobs.
- `post-test`: This stage includes jobs that build reports or gather data from
the `test` stage's jobs (for example, coverage, Knapsack metadata, and so on).
- `review`: This stage includes jobs that build the CNG images, deploy them, and
run end-to-end tests against Review Apps (see [Review Apps](../testing_guide/review_apps.md) for details).
It also includes Docs Review App jobs.
- `qa`: This stage includes jobs that perform QA tasks against the Review App
that is deployed in stage `review`.
- `post-qa`: This stage includes jobs that build reports or gather data from
the `qa` stage's jobs (for example, Review App performance report).
- `pages`: This stage includes a job that deploys the various reports as
GitLab Pages (for example, [`coverage-ruby`](https://gitlab-org.gitlab.io/gitlab/coverage-ruby/),
and `webpack-report` (found at `https://gitlab-org.gitlab.io/gitlab/webpack-report/`, but there is
[an issue with the deployment](https://gitlab.com/gitlab-org/gitlab/-/issues/233458)).
- `notify`: This stage includes jobs that notify various failures to Slack.
## Dependency Proxy
Some of the jobs are using images from Docker Hub, where we also use
`${GITLAB_DEPENDENCY_PROXY_ADDRESS}` as a prefix to the image path, so that we pull
images from our [Dependency Proxy](../../user/packages/dependency_proxy/index.md).
By default, this variable is set from the value of `${GITLAB_DEPENDENCY_PROXY}`.
`${GITLAB_DEPENDENCY_PROXY}` is a group CI/CD variable defined in
[`gitlab-org`](https://gitlab.com/gitlab-org) as
`${CI_DEPENDENCY_PROXY_GROUP_IMAGE_PREFIX}/`. This means when we use an image
defined as:
```yaml
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}alpine:edge
```
Projects in the `gitlab-org` group pull from the Dependency Proxy, while
forks that reside on any other personal namespaces or groups fall back to
Docker Hub unless `${GITLAB_DEPENDENCY_PROXY}` is also defined there.
### Work around for when a pipeline is started by a Project access token user
When a pipeline is started by a Project access token user (e.g. the `release-tools approver bot` user which
automatically updates the Gitaly version used in the main project),
[the Dependency proxy isn't accessible](https://gitlab.com/gitlab-org/gitlab/-/issues/332411#note_1130388163)
and the job fails at the `Preparing the "docker+machine" executor` step.
To work around that, we have a special workflow rule, that overrides the
`${GITLAB_DEPENDENCY_PROXY_ADDRESS}` variable so that Depdendency proxy isn't used in that case:
```yaml
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $GITLAB_USER_LOGIN =~ /project_\d+_bot\d*/'
variables:
GITLAB_DEPENDENCY_PROXY_ADDRESS: ""
```
NOTE:
We don't directly override the `${GITLAB_DEPENDENCY_PROXY}` variable because group-level
variables have higher precedence over `.gitlab-ci.yml` variables.
## Common job definitions
Most of the jobs [extend from a few CI definitions](../../ci/yaml/index.md#extends)
defined in [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml)
that are scoped to a single [configuration keyword](../../ci/yaml/index.md#job-keywords).
| Job definitions | Description |
|------------------|-------------|
| `.default-retry` | Allows a job to [retry](../../ci/yaml/index.md#retry) upon `unknown_failure`, `api_failure`, `runner_system_failure`, `job_execution_timeout`, or `stuck_or_timeout_failure`. |
| `.default-before_script` | Allows a job to use a default `before_script` definition suitable for Ruby/Rails tasks that may need a database running (for example, tests). |
| `.setup-test-env-cache` | Allows a job to use a default `cache` definition suitable for setting up test environment for subsequent Ruby/Rails tasks. |
| `.rails-cache` | Allows a job to use a default `cache` definition suitable for Ruby/Rails tasks. |
| `.static-analysis-cache` | Allows a job to use a default `cache` definition suitable for static analysis tasks. |
| `.coverage-cache` | Allows a job to use a default `cache` definition suitable for coverage tasks. |
| `.qa-cache` | Allows a job to use a default `cache` definition suitable for QA tasks. |
| `.yarn-cache` | Allows a job to use a default `cache` definition suitable for frontend jobs that do a `yarn install`. |
| `.assets-compile-cache` | Allows a job to use a default `cache` definition suitable for frontend jobs that compile assets. |
| `.use-pg11` | Allows a job to run the `postgres` 11 and `redis` services (see [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml) for the specific versions of the services). |
| `.use-pg11-ee` | Same as `.use-pg11` but also use an `elasticsearch` service (see [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml) for the specific version of the service). |
| `.use-pg12` | Allows a job to use the `postgres` 12 and `redis` services (see [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml) for the specific versions of the services). |
| `.use-pg12-ee` | Same as `.use-pg12` but also use an `elasticsearch` service (see [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml) for the specific version of the service). |
| `.use-pg13` | Allows a job to use the `postgres` 13 and `redis` services (see [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml) for the specific versions of the services). |
| `.use-pg13-ee` | Same as `.use-pg13` but also use an `elasticsearch` service (see [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml) for the specific version of the service). |
| `.use-kaniko` | Allows a job to use the `kaniko` tool to build Docker images. |
| `.as-if-foss` | Simulate the FOSS project by setting the `FOSS_ONLY='1'` CI/CD variable. |
| `.use-docker-in-docker` | Allows a job to use Docker in Docker. |
## `rules`, `if:` conditions and `changes:` patterns
We're using the [`rules` keyword](../../ci/yaml/index.md#rules) extensively.
All `rules` definitions are defined in
[`rules.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rules.gitlab-ci.yml),
then included in individual jobs via [`extends`](../../ci/yaml/index.md#extends).
The `rules` definitions are composed of `if:` conditions and `changes:` patterns,
which are also defined in
[`rules.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rules.gitlab-ci.yml)
and included in `rules` definitions via [YAML anchors](../../ci/yaml/yaml_optimization.md#anchors)
### `if:` conditions
<!-- vale gitlab.Substitutions = NO -->
| `if:` conditions | Description | Notes |
|------------------|-------------|-------|
| `if-not-canonical-namespace` | Matches if the project isn't in the canonical (`gitlab-org/`) or security (`gitlab-org/security`) namespace. | Use to create a job for forks (by using `when: on_success|manual`), or **not** create a job for forks (by using `when: never`). |
| `if-not-ee` | Matches if the project isn't EE (that is, project name isn't `gitlab` or `gitlab-ee`). | Use to create a job only in the FOSS project (by using `when: on_success|manual`), or **not** create a job if the project is EE (by using `when: never`). |
| `if-not-foss` | Matches if the project isn't FOSS (that is, project name isn't `gitlab-foss`, `gitlab-ce`, or `gitlabhq`). | Use to create a job only in the EE project (by using `when: on_success|manual`), or **not** create a job if the project is FOSS (by using `when: never`). |
| `if-default-refs` | Matches if the pipeline is for `master`, `main`, `/^[\d-]+-stable(-ee)?$/` (stable branches), `/^\d+-\d+-auto-deploy-\d+$/` (auto-deploy branches), `/^security\//` (security branches), merge requests, and tags. | Note that jobs aren't created for branches with this default configuration. |
| `if-master-refs` | Matches if the current branch is `master` or `main`. | |
| `if-master-push` | Matches if the current branch is `master` or `main` and pipeline source is `push`. | |
| `if-master-schedule-maintenance` | Matches if the current branch is `master` or `main` and pipeline runs on a 2-hourly schedule. | |
| `if-master-schedule-nightly` | Matches if the current branch is `master` or `main` and pipeline runs on a nightly schedule. | |
| `if-auto-deploy-branches` | Matches if the current branch is an auto-deploy one. | |
| `if-master-or-tag` | Matches if the pipeline is for the `master` or `main` branch or for a tag. | |
| `if-merge-request` | Matches if the pipeline is for a merge request. | |
| `if-merge-request-title-as-if-foss` | Matches if the pipeline is for a merge request and the MR has label ~"pipeline:run-as-if-foss" | |
| `if-merge-request-title-update-caches` | Matches if the pipeline is for a merge request and the MR has label ~"pipeline:update-cache". | |
| `if-merge-request-title-run-all-rspec` | Matches if the pipeline is for a merge request and the MR has label ~"pipeline:run-all-rspec". | |
| `if-security-merge-request` | Matches if the pipeline is for a security merge request. | |
| `if-security-schedule` | Matches if the pipeline is for a security scheduled pipeline. | |
| `if-nightly-master-schedule` | Matches if the pipeline is for a `master` scheduled pipeline with `$NIGHTLY` set. | |
| `if-dot-com-gitlab-org-schedule` | Limits jobs creation to scheduled pipelines for the `gitlab-org` group on GitLab.com. | |
| `if-dot-com-gitlab-org-master` | Limits jobs creation to the `master` or `main` branch for the `gitlab-org` group on GitLab.com. | |
| `if-dot-com-gitlab-org-merge-request` | Limits jobs creation to merge requests for the `gitlab-org` group on GitLab.com. | |
| `if-dot-com-gitlab-org-and-security-tag` | Limits job creation to tags for the `gitlab-org` and `gitlab-org/security` groups on GitLab.com. | |
| `if-dot-com-gitlab-org-and-security-merge-request` | Limit jobs creation to merge requests for the `gitlab-org` and `gitlab-org/security` groups on GitLab.com. | |
| `if-dot-com-gitlab-org-and-security-tag` | Limit jobs creation to tags for the `gitlab-org` and `gitlab-org/security` groups on GitLab.com. | |
| `if-dot-com-ee-schedule` | Limits jobs to scheduled pipelines for the `gitlab-org/gitlab` project on GitLab.com. | |
| `if-security-pipeline-merge-result` | Matches if the pipeline is for a security merge request triggered by `@gitlab-release-tools-bot`. | |
<!-- vale gitlab.Substitutions = YES -->
### `changes:` patterns
| `changes:` patterns | Description |
|------------------------------|--------------------------------------------------------------------------|
| `ci-patterns` | Only create job for CI configuration-related changes. |
| `ci-build-images-patterns` | Only create job for CI configuration-related changes related to the `build-images` stage. |
| `ci-review-patterns` | Only create job for CI configuration-related changes related to the `review` stage. |
| `ci-qa-patterns` | Only create job for CI configuration-related changes related to the `qa` stage. |
| `yaml-lint-patterns` | Only create job for YAML-related changes. |
| `docs-patterns` | Only create job for docs-related changes. |
| `frontend-dependency-patterns` | Only create job when frontend dependencies are updated (that is, `package.json`, and `yarn.lock`). changes. |
| `frontend-patterns` | Only create job for frontend-related changes. |
| `backend-patterns` | Only create job for backend-related changes. |
| `db-patterns` | Only create job for DB-related changes. |
| `backstage-patterns` | Only create job for backstage-related changes (that is, Danger, fixtures, RuboCop, specs). |
| `code-patterns` | Only create job for code-related changes. |
| `qa-patterns` | Only create job for QA-related changes. |
| `code-backstage-patterns` | Combination of `code-patterns` and `backstage-patterns`. |
| `code-qa-patterns` | Combination of `code-patterns` and `qa-patterns`. |
| `code-backstage-qa-patterns` | Combination of `code-patterns`, `backstage-patterns`, and `qa-patterns`. |
| `static-analysis-patterns` | Only create jobs for Static Analytics configuration-related changes. |

View File

@ -0,0 +1,151 @@
---
stage: none
group: Engineering Productivity
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
# CI configuration performance
## Interruptible pipelines
By default, all jobs are [interruptible](../../ci/yaml/index.md#interruptible), except the
`dont-interrupt-me` job which runs automatically on `main`, and is `manual`
otherwise.
If you want a running pipeline to finish even if you push new commits to a merge
request, be sure to start the `dont-interrupt-me` job before pushing.
## Git fetch caching
Because GitLab.com uses the [pack-objects cache](../../administration/gitaly/configure_gitaly.md#pack-objects-cache),
concurrent Git fetches of the same pipeline ref are deduplicated on
the Gitaly server (always) and served from cache (when available).
This works well for the following reasons:
- The pack-objects cache is enabled on all Gitaly servers on GitLab.com.
- The CI/CD [Git strategy setting](../../ci/pipelines/settings.md#choose-the-default-git-strategy) for `gitlab-org/gitlab` is **Git clone**,
causing all jobs to fetch the same data, which maximizes the cache hit ratio.
- We use [shallow clone](../../ci/pipelines/settings.md#limit-the-number-of-changes-fetched-during-clone) to avoid downloading the full Git
history for every job.
## Caching strategy
1. All jobs must only pull caches by default.
1. All jobs must be able to pass with an empty cache. In other words, caches are only there to speed up jobs.
1. We currently have several different cache definitions defined in
[`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml),
with fixed keys:
- `.setup-test-env-cache`
- `.ruby-cache`
- `.rails-cache`
- `.static-analysis-cache`
- `.rubocop-cache`
- `.coverage-cache`
- `.ruby-node-cache`
- `.qa-cache`
- `.yarn-cache`
- `.assets-compile-cache` (the key includes `${NODE_ENV}` so it's actually two different caches).
1. These cache definitions are composed of [multiple atomic caches](../../ci/caching/index.md#use-multiple-caches).
1. Only the following jobs, running in 2-hourly `maintenance` scheduled pipelines, are pushing (that is, updating) to the caches:
- `update-setup-test-env-cache`, defined in [`.gitlab/ci/rails.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rails.gitlab-ci.yml).
- `update-gitaly-binaries-cache`, defined in [`.gitlab/ci/rails.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rails.gitlab-ci.yml).
- `update-rubocop-cache`, defined in [`.gitlab/ci/rails.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rails.gitlab-ci.yml).
- `update-qa-cache`, defined in [`.gitlab/ci/qa.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/qa.gitlab-ci.yml).
- `update-assets-compile-production-cache`, defined in [`.gitlab/ci/frontend.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml).
- `update-assets-compile-test-cache`, defined in [`.gitlab/ci/frontend.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml).
- `update-yarn-cache`, defined in [`.gitlab/ci/frontend.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml).
- `update-storybook-yarn-cache`, defined in [`.gitlab/ci/frontend.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml).
1. These jobs can also be forced to run in merge requests with the `pipeline:update-cache` label (this can be useful to warm the caches in a MR that updates the cache keys).
## Artifacts strategy
We limit the artifacts that are saved and retrieved by jobs to the minimum to reduce the upload/download time and costs, as well as the artifacts storage.
## Components caching
Some external components (GitLab Workhorse and frontend assets) of GitLab need to be built from source as a preliminary step for running tests.
## `cache-workhorse`
In [this MR](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/79766), and then
[this MR](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/96297),
we introduced a new `cache-workhorse` job that:
- runs automatically for all GitLab.com `gitlab-org/gitlab` scheduled pipelines
- runs automatically for any `master` commit that touches the `workhorse/` folder
- is manual for GitLab.com's `gitlab-org`'s MRs that touches caching-related files
This job tries to download a generic package that contains GitLab Workhorse binaries needed in the GitLab test suite (under `tmp/tests/gitlab-workhorse`).
- If the package URL returns a 404:
1. It runs `scripts/setup-test-env`, so that the GitLab Workhorse binaries are built.
1. It then creates an archive which contains the binaries and upload it [as a generic package](https://gitlab.com/gitlab-org/gitlab/-/packages/).
- Otherwise, if the package already exists, it exits the job successfully.
We also changed the `setup-test-env` job to:
1. First download the GitLab Workhorse generic package build and uploaded by `cache-workhorse`.
1. If the package is retrieved successfully, its content is placed in the right folder (for example, `tmp/tests/gitlab-workhorse`), preventing the building of the binaries when `scripts/setup-test-env` is run later on.
1. If the package URL returns a 404, the behavior doesn't change compared to the current one: the GitLab Workhorse binaries are built as part of `scripts/setup-test-env`.
NOTE:
The version of the package is the workhorse tree SHA (for example, `git rev-parse HEAD:workhorse`).
## `cache-assets`
In [this MR](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/96297),
we introduced three new `cache-assets:test`, `cache-assets:test as-if-foss`,
and `cache-assets:production` jobs that:
- never run unless `$CACHE_ASSETS_AS_PACKAGE == "true"`
- runs automatically for all GitLab.com `gitlab-org/gitlab` scheduled pipelines
- runs automatically for any `master` commit that touches the assets-related folders
- is manual for GitLab.com's `gitlab-org`'s MRs that touches caching-related files
This job tries to download a generic package that contains GitLab compiled assets
needed in the GitLab test suite (under `app/assets/javascripts/locale/**/app.js`,
and `public/assets`).
- If the package URL returns a 404:
1. It runs `bin/rake gitlab:assets:compile`, so that the GitLab assets are compiled.
1. It then creates an archive which contains the assets and uploads it [as a generic package](https://gitlab.com/gitlab-org/gitlab/-/packages/).
The package version is set to the assets folders' hash sum.
- Otherwise, if the package already exists, it exits the job successfully.
## `compile-*-assets`
We also changed the `compile-test-assets`, `compile-test-assets as-if-foss`,
and `compile-production-assets` jobs to:
1. First download the "native" cache assets, which contain:
- The [compiled assets](https://gitlab.com/gitlab-org/gitlab/-/blob/a6910c9086bb28e553f5e747ec2dd50af6da3c6b/.gitlab/ci/global.gitlab-ci.yml#L86-87).
- A [`cached-assets-hash.txt` file](https://gitlab.com/gitlab-org/gitlab/-/blob/a6910c9086bb28e553f5e747ec2dd50af6da3c6b/.gitlab/ci/global.gitlab-ci.yml#L85)
containing the `SHA256` hexdigest of all the source files on which the assets depend on.
This list of files is a pessimistic list and the assets might not depend on
some of these files. At worst we compile the assets more often, which is better than
using outdated assets.
The file is [created after assets are compiled](https://gitlab.com/gitlab-org/gitlab/-/blob/a6910c9086bb28e553f5e747ec2dd50af6da3c6b/.gitlab/ci/frontend.gitlab-ci.yml#L83).
1. We then we compute the `SHA256` hexdigest of all the source files the assets depend on, **for the current checked out branch**. We [store the hexdigest in the `GITLAB_ASSETS_HASH` variable](https://gitlab.com/gitlab-org/gitlab/-/blob/a6910c9086bb28e553f5e747ec2dd50af6da3c6b/.gitlab/ci/frontend.gitlab-ci.yml#L27).
1. If `$CACHE_ASSETS_AS_PACKAGE == "true"`, we download the generic package built and uploaded by [`cache-assets:*`](#cache-assets).
- If the cache is up-to-date for the checked out branch, we download the native cache
**and** the cache package. We could optimize that by not downloading
the genetic package but the native cache is actually very often outdated because it's
rebuilt only every 2 hours.
1. We [run the `assets_compile_script` function](https://gitlab.com/gitlab-org/gitlab/-/blob/a6910c9086bb28e553f5e747ec2dd50af6da3c6b/.gitlab/ci/frontend.gitlab-ci.yml#L35),
which [itself runs](https://gitlab.com/gitlab-org/gitlab/-/blob/c023191ef412e868ae957f3341208a41ca678403/scripts/utils.sh#L76)
the [`assets:compile` Rake task](https://gitlab.com/gitlab-org/gitlab/-/blob/c023191ef412e868ae957f3341208a41ca678403/lib/tasks/gitlab/assets.rake#L80-109).
This task is responsible for deciding if assets need to be compiled or not.
It [compares the `HEAD` `SHA256` hexdigest from `$GITLAB_ASSETS_HASH` with the `master` hexdigest from `cached-assets-hash.txt`](https://gitlab.com/gitlab-org/gitlab/-/blob/c023191ef412e868ae957f3341208a41ca678403/lib/tasks/gitlab/assets.rake#L86).
1. If the hashes are the same, we don't compile anything. If they're different, we compile the assets.
## Pre-clone step
NOTE:
We no longer use this optimization for `gitlab-org/gitlab` because the [pack-objects cache](../../administration/gitaly/configure_gitaly.md#pack-objects-cache)
allows Gitaly to serve the full CI/CD fetch traffic now. See [Git fetch caching](#git-fetch-caching).
The pre-clone step works by using the `CI_PRE_CLONE_SCRIPT` variable
[defined by GitLab.com shared runners](../../ci/runners/saas/linux_saas_runner.md#pre-clone-script).

View File

@ -110,7 +110,7 @@ If the specs fail the check they must be fixed before than can run in random ord
### Test speed
GitLab has a massive test suite that, without [parallelization](../pipelines.md#test-suite-parallelization), can take hours
GitLab has a massive test suite that, without [parallelization](../pipelines/index.md#test-suite-parallelization), can take hours
to run. It's important that we make an effort to write tests that are accurate
and effective _as well as_ fast.

View File

@ -62,7 +62,7 @@ For example, `FLAKY_RSPEC_GENERATE_REPORT=1 bin/rspec ...`.
The `rspec/flaky/report-suite.json` report is:
- Used for [automatically skipping known flaky tests](../pipelines.md#automatic-skipping-of-flaky-tests).
- Used for [automatically skipping known flaky tests](../pipelines/index.md#automatic-skipping-of-flaky-tests).
- [Imported into Snowflake](https://gitlab.com/gitlab-data/analytics/-/blob/master/extract/gitlab_flaky_tests/upload.py)
once per day, for monitoring with the [internal dashboard](https://app.periscopedata.com/app/gitlab/888968/EP---Flaky-tests).

View File

@ -172,7 +172,7 @@ subgraph "CNG-mirror pipeline"
job [triggers a pipeline](https://gitlab.com/gitlab-org/build/CNG-mirror/pipelines/44364657)
in the [`CNG-mirror`](https://gitlab.com/gitlab-org/build/CNG-mirror) project.
- The `review-build-cng` job automatically starts only if your MR includes
[CI or frontend changes](../pipelines.md#changes-patterns). In other cases, the job is manual.
[CI or frontend changes](../pipelines/internals.md#changes-patterns). In other cases, the job is manual.
- The [`CNG-mirror`](https://gitlab.com/gitlab-org/build/CNG-mirror/pipelines/44364657) pipeline creates the Docker images of
each component (for example, `gitlab-rails-ee`, `gitlab-shell`, `gitaly` etc.)
based on the commit from the [GitLab pipeline](https://gitlab.com/gitlab-org/gitlab/pipelines/125315730) and stores

View File

@ -14,7 +14,7 @@ The [Deploy Job template](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib
- `staging`
- `production`
These environments are tied to jobs using [Auto Deploy](stages.md#auto-deploy), so they must have different deployment domains. You must define separate [`KUBE_CONTEXT`](../../user/clusters/agent/ci_cd_workflow.md#using-the-agent-with-auto-devops) and [`KUBE_INGRESS_BASE_DOMAIN`](requirements.md#auto-devops-base-domain) variables for each of the three environments.
These environments are tied to jobs using [Auto Deploy](stages.md#auto-deploy), so they must have different deployment domains. You must define separate [`KUBE_CONTEXT`](../../user/clusters/agent/ci_cd_workflow.md#environments-that-use-auto-devops) and [`KUBE_INGRESS_BASE_DOMAIN`](requirements.md#auto-devops-base-domain) variables for each of the three environments.
## Deploy to different clusters

View File

@ -127,6 +127,30 @@ deploy:
If you are not sure what your agent's context is, open a terminal and connect to your cluster.
Run `kubectl config get-contexts`.
### Environments that use Auto DevOps
If Auto DevOps is enabled, you must define the CI/CD variable `KUBE_CONTEXT`.
Set the value of `KUBE_CONTEXT` to the context of the agent you want Auto DevOps to use:
```yaml
deploy:
variables:
KUBE_CONTEXT: <path_to_agent_config_repository>:<agent_name>
```
You can assign different agents to separate Auto DevOps jobs. For instance,
Auto DevOps can use one agent for `staging` jobs, and another agent for `production` jobs.
To use multiple agents, define an [environment-scoped CI/CD variable](../../../ci/variables/index.md#limit-the-environment-scope-of-a-cicd-variable)
for each agent. For example:
1. Define two variables named `KUBE_CONTEXT`.
1. For the first variable:
1. Set the `environment` to `staging`.
1. Set the value to the context of your staging agent.
1. For the second variable:
1. Set the `environment` to `production`.
1. Set the value to the context of your production agent.
### Environments with both certificate-based and agent-based connections
When you deploy to an environment that has both a
@ -158,20 +182,6 @@ deploy:
# ... rest of your job configuration
```
### Using the agent with Auto DevOps
If Auto DevOps is enabled, you must define the `KUBE_CONTEXT` CI/CD variable. Set the value of `KUBE_CONTEXT` to the context of the agent you want to use in your Auto DevOps pipeline jobs (`<PATH_TO_AGENT_CONFIG_REPOSITORY>:<AGENT_NAME>`).
You can also use different agents for different Auto DevOps jobs. For instance, you can use one agent for `staging` jobs and a different agent for `production` jobs. To use multiple agents, define a unique CI/CD variable for each agent.
For example:
1. Add two [environment-scoped CI/CD variables](../../../ci/variables/index.md#limit-the-environment-scope-of-a-cicd-variable) and name both `KUBE_CONTEXT`.
1. Set the `environment` of the first variable to `staging`. Set the value of the variable to `<PATH_TO_AGENT_CONFIGURATION_PROJECT>:<STAGING_AGENT_NAME>`.
1. Set the `environment` of the second variable to `production`. Set the value of the variable to `<PATH_TO_AGENT_CONFIGURATION_PROJECT>:<PRODUCTION_AGENT_NAME>`.
When the `staging` job runs, it will connect to the cluster via the agent named `<STAGING_AGENT_NAME>`, and when the `production` job runs it will connect to the cluster via the agent named `<PRODUCTION_AGENT_NAME>`.
## Restrict project and group access by using impersonation **(PREMIUM)**
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/345014) in GitLab 14.5.

View File

@ -69,10 +69,9 @@ This workflow has a weaker security model and is not recommended for production
GitLab supports the following Kubernetes versions. You can upgrade your
Kubernetes version to a supported version at any time:
- 1.24 (support ends on September 22, 2023 or when 1.27 becomes supported)
- 1.25 (support ends on October 22, 2023 or when 1.28 becomes supported)
- 1.24 (support ends on July 22, 2023 or when 1.27 becomes supported)
- 1.23 (support ends on February 22, 2023 or when 1.26 becomes supported)
- 1.22 (support ends on October 22, 2022)
- 1.21 (support ends on August 22, 2022)
GitLab aims to support a new minor Kubernetes version three months after its initial release. GitLab supports at least three production-ready Kubernetes minor
versions at any given time.

View File

@ -67,8 +67,8 @@ module API
Gitlab::Metrics::GlobalSearchSlis.record_apdex(
elapsed: @search_duration_s,
search_type: search_type,
search_level: search_service.level,
search_type: search_type(additional_params),
search_level: search_service(additional_params).level,
search_scope: search_scope
)
@ -81,7 +81,7 @@ module API
# with a 200 status code, but an empty @search_duration_s.
Gitlab::Metrics::GlobalSearchSlis.record_error_rate(
error: @search_duration_s.nil? || (status < 200 || status >= 400),
search_type: search_type,
search_type: search_type(additional_params),
search_level: search_service(additional_params).level,
search_scope: search_scope
)

View File

@ -8,6 +8,7 @@ module Gitlab
include Migrations::BatchedBackgroundMigrationHelpers
include Migrations::LockRetriesHelpers
include Migrations::TimeoutHelpers
include Migrations::ConstraintsHelpers
include DynamicModelHelpers
include RenameTableHelpers
include AsyncIndexes::MigrationHelpers
@ -24,8 +25,6 @@ module Gitlab
super(table_name, connection: connection, **kwargs)
end
# https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS
MAX_IDENTIFIER_NAME_LENGTH = 63
DEFAULT_TIMESTAMP_COLUMNS = %i[created_at updated_at].freeze
# Adds `created_at` and `updated_at` columns with timezone information.
@ -1086,250 +1085,6 @@ into similar problems in the future (e.g. when new tables are created).
execute(sql)
end
# Returns the name for a check constraint
#
# type:
# - Any value, as long as it is unique
# - Constraint names are unique per table in Postgres, and, additionally,
# we can have multiple check constraints over a column
# So we use the (table, column, type) triplet as a unique name
# - e.g. we use 'max_length' when adding checks for text limits
# or 'not_null' when adding a NOT NULL constraint
#
def check_constraint_name(table, column, type)
identifier = "#{table}_#{column}_check_#{type}"
# Check concurrent_foreign_key_name() for info on why we use a hash
hashed_identifier = Digest::SHA256.hexdigest(identifier).first(10)
"check_#{hashed_identifier}"
end
def check_constraint_exists?(table, constraint_name)
# Constraint names are unique per table in Postgres, not per schema
# Two tables can have constraints with the same name, so we filter by
# the table name in addition to using the constraint_name
check_sql = <<~SQL
SELECT COUNT(*)
FROM pg_catalog.pg_constraint con
INNER JOIN pg_catalog.pg_class rel
ON rel.oid = con.conrelid
INNER JOIN pg_catalog.pg_namespace nsp
ON nsp.oid = con.connamespace
WHERE con.contype = 'c'
AND con.conname = #{connection.quote(constraint_name)}
AND nsp.nspname = #{connection.quote(current_schema)}
AND rel.relname = #{connection.quote(table)}
SQL
connection.select_value(check_sql) > 0
end
# Adds a check constraint to a table
#
# This method is the generic helper for adding any check constraint
# More specialized helpers may use it (e.g. add_text_limit or add_not_null)
#
# This method only requires minimal locking:
# - The constraint is added using NOT VALID
# This allows us to add the check constraint without validating it
# - The check will be enforced for new data (inserts) coming in
# - If `validate: true` the constraint is also validated
# Otherwise, validate_check_constraint() can be used at a later stage
# - Check comments on add_concurrent_foreign_key for more info
#
# table - The table the constraint will be added to
# check - The check clause to add
# e.g. 'char_length(name) <= 5' or 'store IS NOT NULL'
# constraint_name - The name of the check constraint (otherwise auto-generated)
# Should be unique per table (not per column)
# validate - Whether to validate the constraint in this call
#
def add_check_constraint(table, check, constraint_name, validate: true)
# Transactions would result in ALTER TABLE locks being held for the
# duration of the transaction, defeating the purpose of this method.
validate_not_in_transaction!(:add_check_constraint)
validate_check_constraint_name!(constraint_name)
if check_constraint_exists?(table, constraint_name)
warning_message = <<~MESSAGE
Check constraint was not created because it exists already
(this may be due to an aborted migration or similar)
table: #{table}, check: #{check}, constraint name: #{constraint_name}
MESSAGE
Gitlab::AppLogger.warn warning_message
else
# Only add the constraint without validating it
# Even though it is fast, ADD CONSTRAINT requires an EXCLUSIVE lock
# Use with_lock_retries to make sure that this operation
# will not timeout on tables accessed by many processes
with_lock_retries do
execute <<-EOF.strip_heredoc
ALTER TABLE #{table}
ADD CONSTRAINT #{constraint_name}
CHECK ( #{check} )
NOT VALID;
EOF
end
end
if validate
validate_check_constraint(table, constraint_name)
end
end
def validate_check_constraint(table, constraint_name)
validate_check_constraint_name!(constraint_name)
unless check_constraint_exists?(table, constraint_name)
raise missing_schema_object_message(table, "check constraint", constraint_name)
end
disable_statement_timeout do
# VALIDATE CONSTRAINT only requires a SHARE UPDATE EXCLUSIVE LOCK
# It only conflicts with other validations and creating indexes
execute("ALTER TABLE #{table} VALIDATE CONSTRAINT #{constraint_name};")
end
end
def remove_check_constraint(table, constraint_name)
# This is technically not necessary, but aligned with add_check_constraint
# and allows us to continue use with_lock_retries here
validate_not_in_transaction!(:remove_check_constraint)
validate_check_constraint_name!(constraint_name)
# DROP CONSTRAINT requires an EXCLUSIVE lock
# Use with_lock_retries to make sure that this will not timeout
with_lock_retries do
execute <<-EOF.strip_heredoc
ALTER TABLE #{table}
DROP CONSTRAINT IF EXISTS #{constraint_name}
EOF
end
end
# Copies all check constraints for the old column to the new column.
#
# table - The table containing the columns.
# old - The old column.
# new - The new column.
# schema - The schema the table is defined for
# If it is not provided, then the current_schema is used
def copy_check_constraints(table, old, new, schema: nil)
if transaction_open?
raise 'copy_check_constraints can not be run inside a transaction'
end
unless column_exists?(table, old)
raise "Column #{old} does not exist on #{table}"
end
unless column_exists?(table, new)
raise "Column #{new} does not exist on #{table}"
end
table_with_schema = schema.present? ? "#{schema}.#{table}" : table
check_constraints_for(table, old, schema: schema).each do |check_c|
validate = !(check_c["constraint_def"].end_with? "NOT VALID")
# Normalize:
# - Old constraint definitions:
# '(char_length(entity_path) <= 5500)'
# - Definitionss from pg_get_constraintdef(oid):
# 'CHECK ((char_length(entity_path) <= 5500))'
# - Definitions from pg_get_constraintdef(oid, pretty_bool):
# 'CHECK (char_length(entity_path) <= 5500)'
# - Not valid constraints: 'CHECK (...) NOT VALID'
# to a single format that we can use:
# '(char_length(entity_path) <= 5500)'
check_definition = check_c["constraint_def"]
.sub(/^\s*(CHECK)?\s*\({0,2}/, '(')
.sub(/\){0,2}\s*(NOT VALID)?\s*$/, ')')
constraint_name = begin
if check_definition == "(#{old} IS NOT NULL)"
not_null_constraint_name(table_with_schema, new)
elsif check_definition.start_with? "(char_length(#{old}) <="
text_limit_name(table_with_schema, new)
else
check_constraint_name(table_with_schema, new, 'copy_check_constraint')
end
end
add_check_constraint(
table_with_schema,
check_definition.gsub(old.to_s, new.to_s),
constraint_name,
validate: validate
)
end
end
# Migration Helpers for adding limit to text columns
def add_text_limit(table, column, limit, constraint_name: nil, validate: true)
add_check_constraint(
table,
"char_length(#{column}) <= #{limit}",
text_limit_name(table, column, name: constraint_name),
validate: validate
)
end
def validate_text_limit(table, column, constraint_name: nil)
validate_check_constraint(table, text_limit_name(table, column, name: constraint_name))
end
def remove_text_limit(table, column, constraint_name: nil)
remove_check_constraint(table, text_limit_name(table, column, name: constraint_name))
end
def check_text_limit_exists?(table, column, constraint_name: nil)
check_constraint_exists?(table, text_limit_name(table, column, name: constraint_name))
end
# Migration Helpers for managing not null constraints
def add_not_null_constraint(table, column, constraint_name: nil, validate: true)
if column_is_nullable?(table, column)
add_check_constraint(
table,
"#{column} IS NOT NULL",
not_null_constraint_name(table, column, name: constraint_name),
validate: validate
)
else
warning_message = <<~MESSAGE
NOT NULL check constraint was not created:
column #{table}.#{column} is already defined as `NOT NULL`
MESSAGE
Gitlab::AppLogger.warn warning_message
end
end
def validate_not_null_constraint(table, column, constraint_name: nil)
validate_check_constraint(
table,
not_null_constraint_name(table, column, name: constraint_name)
)
end
def remove_not_null_constraint(table, column, constraint_name: nil)
remove_check_constraint(
table,
not_null_constraint_name(table, column, name: constraint_name)
)
end
def check_not_null_constraint_exists?(table, column, constraint_name: nil)
check_constraint_exists?(
table,
not_null_constraint_name(table, column, name: constraint_name)
)
end
def create_extension(extension)
execute('CREATE EXTENSION IF NOT EXISTS %s' % extension)
rescue ActiveRecord::StatementInvalid => e
@ -1387,19 +1142,6 @@ into similar problems in the future (e.g. when new tables are created).
raise
end
def rename_constraint(table_name, old_name, new_name)
execute <<~SQL
ALTER TABLE #{quote_table_name(table_name)}
RENAME CONSTRAINT #{quote_column_name(old_name)} TO #{quote_column_name(new_name)}
SQL
end
def drop_constraint(table_name, constraint_name, cascade: false)
execute <<~SQL
ALTER TABLE #{quote_table_name(table_name)} DROP CONSTRAINT #{quote_column_name(constraint_name)} #{cascade_statement(cascade)}
SQL
end
def add_primary_key_using_index(table_name, pk_name, index_to_use)
execute <<~SQL
ALTER TABLE #{quote_table_name(table_name)} ADD CONSTRAINT #{quote_table_name(pk_name)} PRIMARY KEY USING INDEX #{quote_table_name(index_to_use)}
@ -1434,10 +1176,6 @@ into similar problems in the future (e.g. when new tables are created).
Array.wrap(columns).join(separator)
end
def cascade_statement(cascade)
cascade ? 'CASCADE' : ''
end
def create_temporary_columns_and_triggers(table, columns, primary_key: :id, data_type: :bigint)
unless table_exists?(table)
raise "Table #{table} does not exist"
@ -1477,43 +1215,6 @@ into similar problems in the future (e.g. when new tables are created).
end
end
def validate_check_constraint_name!(constraint_name)
if constraint_name.to_s.length > MAX_IDENTIFIER_NAME_LENGTH
raise "The maximum allowed constraint name is #{MAX_IDENTIFIER_NAME_LENGTH} characters"
end
end
# Returns an ActiveRecord::Result containing the check constraints
# defined for the given column.
#
# If the schema is not provided, then the current_schema is used
def check_constraints_for(table, column, schema: nil)
check_sql = <<~SQL
SELECT
ccu.table_schema as schema_name,
ccu.table_name as table_name,
ccu.column_name as column_name,
con.conname as constraint_name,
pg_get_constraintdef(con.oid) as constraint_def
FROM pg_catalog.pg_constraint con
INNER JOIN pg_catalog.pg_class rel
ON rel.oid = con.conrelid
INNER JOIN pg_catalog.pg_namespace nsp
ON nsp.oid = con.connamespace
INNER JOIN information_schema.constraint_column_usage ccu
ON con.conname = ccu.constraint_name
AND nsp.nspname = ccu.constraint_schema
AND rel.relname = ccu.table_name
WHERE nsp.nspname = #{connection.quote(schema.presence || current_schema)}
AND rel.relname = #{connection.quote(table)}
AND ccu.column_name = #{connection.quote(column)}
AND con.contype = 'c'
ORDER BY constraint_name
SQL
connection.exec_query(check_sql)
end
def column_is_nullable?(table, column)
# Check if table.column has not been defined with NOT NULL
check_sql = <<~SQL
@ -1527,14 +1228,6 @@ into similar problems in the future (e.g. when new tables are created).
connection.select_value(check_sql) == 'YES'
end
def text_limit_name(table, column, name: nil)
name.presence || check_constraint_name(table, column, 'max_length')
end
def not_null_constraint_name(table, column, name: nil)
name.presence || check_constraint_name(table, column, 'not_null')
end
def missing_schema_object_message(table, type, name)
<<~MESSAGE
Could not find #{type} "#{name}" on table "#{table}" which was referenced during the migration.
@ -1604,17 +1297,6 @@ into similar problems in the future (e.g. when new tables are created).
Must end with `_at`}
MESSAGE
end
def validate_not_in_transaction!(method_name, modifier = nil)
return unless transaction_open?
raise <<~ERROR
#{["`#{method_name}`", modifier].compact.join(' ')} cannot be run inside a transaction.
You can disable transactions by calling `disable_ddl_transaction!` in the body of
your migration class
ERROR
end
end
end
end

View File

@ -0,0 +1,337 @@
# frozen_string_literal: true
module Gitlab
module Database
module Migrations
module ConstraintsHelpers
include LockRetriesHelpers
include TimeoutHelpers
# https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS
MAX_IDENTIFIER_NAME_LENGTH = 63
# Returns the name for a check constraint
#
# type:
# - Any value, as long as it is unique
# - Constraint names are unique per table in Postgres, and, additionally,
# we can have multiple check constraints over a column
# So we use the (table, column, type) triplet as a unique name
# - e.g. we use 'max_length' when adding checks for text limits
# or 'not_null' when adding a NOT NULL constraint
#
def check_constraint_name(table, column, type)
identifier = "#{table}_#{column}_check_#{type}"
# Check concurrent_foreign_key_name() for info on why we use a hash
hashed_identifier = Digest::SHA256.hexdigest(identifier).first(10)
"check_#{hashed_identifier}"
end
def check_constraint_exists?(table, constraint_name)
# Constraint names are unique per table in Postgres, not per schema
# Two tables can have constraints with the same name, so we filter by
# the table name in addition to using the constraint_name
check_sql = <<~SQL
SELECT COUNT(*)
FROM pg_catalog.pg_constraint con
INNER JOIN pg_catalog.pg_class rel
ON rel.oid = con.conrelid
INNER JOIN pg_catalog.pg_namespace nsp
ON nsp.oid = con.connamespace
WHERE con.contype = 'c'
AND con.conname = #{connection.quote(constraint_name)}
AND nsp.nspname = #{connection.quote(current_schema)}
AND rel.relname = #{connection.quote(table)}
SQL
connection.select_value(check_sql) > 0
end
# Adds a check constraint to a table
#
# This method is the generic helper for adding any check constraint
# More specialized helpers may use it (e.g. add_text_limit or add_not_null)
#
# This method only requires minimal locking:
# - The constraint is added using NOT VALID
# This allows us to add the check constraint without validating it
# - The check will be enforced for new data (inserts) coming in
# - If `validate: true` the constraint is also validated
# Otherwise, validate_check_constraint() can be used at a later stage
# - Check comments on add_concurrent_foreign_key for more info
#
# table - The table the constraint will be added to
# check - The check clause to add
# e.g. 'char_length(name) <= 5' or 'store IS NOT NULL'
# constraint_name - The name of the check constraint (otherwise auto-generated)
# Should be unique per table (not per column)
# validate - Whether to validate the constraint in this call
#
def add_check_constraint(table, check, constraint_name, validate: true)
# Transactions would result in ALTER TABLE locks being held for the
# duration of the transaction, defeating the purpose of this method.
validate_not_in_transaction!(:add_check_constraint)
validate_check_constraint_name!(constraint_name)
if check_constraint_exists?(table, constraint_name)
warning_message = <<~MESSAGE
Check constraint was not created because it exists already
(this may be due to an aborted migration or similar)
table: #{table}, check: #{check}, constraint name: #{constraint_name}
MESSAGE
Gitlab::AppLogger.warn warning_message
else
# Only add the constraint without validating it
# Even though it is fast, ADD CONSTRAINT requires an EXCLUSIVE lock
# Use with_lock_retries to make sure that this operation
# will not timeout on tables accessed by many processes
with_lock_retries do
execute <<~SQL
ALTER TABLE #{table}
ADD CONSTRAINT #{constraint_name}
CHECK ( #{check} )
NOT VALID;
SQL
end
end
validate_check_constraint(table, constraint_name) if validate
end
def validate_check_constraint(table, constraint_name)
validate_check_constraint_name!(constraint_name)
unless check_constraint_exists?(table, constraint_name)
raise missing_schema_object_message(table, "check constraint", constraint_name)
end
disable_statement_timeout do
# VALIDATE CONSTRAINT only requires a SHARE UPDATE EXCLUSIVE LOCK
# It only conflicts with other validations and creating indexes
execute("ALTER TABLE #{table} VALIDATE CONSTRAINT #{constraint_name};")
end
end
def remove_check_constraint(table, constraint_name)
# This is technically not necessary, but aligned with add_check_constraint
# and allows us to continue use with_lock_retries here
validate_not_in_transaction!(:remove_check_constraint)
validate_check_constraint_name!(constraint_name)
# DROP CONSTRAINT requires an EXCLUSIVE lock
# Use with_lock_retries to make sure that this will not timeout
with_lock_retries do
execute <<-SQL
ALTER TABLE #{table}
DROP CONSTRAINT IF EXISTS #{constraint_name}
SQL
end
end
# Copies all check constraints for the old column to the new column.
#
# table - The table containing the columns.
# old - The old column.
# new - The new column.
# schema - The schema the table is defined for
# If it is not provided, then the current_schema is used
def copy_check_constraints(table, old, new, schema: nil)
raise 'copy_check_constraints can not be run inside a transaction' if transaction_open?
raise "Column #{old} does not exist on #{table}" unless column_exists?(table, old)
raise "Column #{new} does not exist on #{table}" unless column_exists?(table, new)
table_with_schema = schema.present? ? "#{schema}.#{table}" : table
check_constraints_for(table, old, schema: schema).each do |check_c|
validate = !(check_c["constraint_def"].end_with? "NOT VALID")
# Normalize:
# - Old constraint definitions:
# '(char_length(entity_path) <= 5500)'
# - Definitionss from pg_get_constraintdef(oid):
# 'CHECK ((char_length(entity_path) <= 5500))'
# - Definitions from pg_get_constraintdef(oid, pretty_bool):
# 'CHECK (char_length(entity_path) <= 5500)'
# - Not valid constraints: 'CHECK (...) NOT VALID'
# to a single format that we can use:
# '(char_length(entity_path) <= 5500)'
check_definition = check_c["constraint_def"]
.sub(/^\s*(CHECK)?\s*\({0,2}/, '(')
.sub(/\){0,2}\s*(NOT VALID)?\s*$/, ')')
constraint_name = if check_definition == "(#{old} IS NOT NULL)"
not_null_constraint_name(table_with_schema, new)
elsif check_definition.start_with? "(char_length(#{old}) <="
text_limit_name(table_with_schema, new)
else
check_constraint_name(table_with_schema, new, 'copy_check_constraint')
end
add_check_constraint(
table_with_schema,
check_definition.gsub(old.to_s, new.to_s),
constraint_name,
validate: validate
)
end
end
# Migration Helpers for adding limit to text columns
def add_text_limit(table, column, limit, constraint_name: nil, validate: true)
add_check_constraint(
table,
"char_length(#{column}) <= #{limit}",
text_limit_name(table, column, name: constraint_name),
validate: validate
)
end
def validate_text_limit(table, column, constraint_name: nil)
validate_check_constraint(table, text_limit_name(table, column, name: constraint_name))
end
def remove_text_limit(table, column, constraint_name: nil)
remove_check_constraint(table, text_limit_name(table, column, name: constraint_name))
end
def check_text_limit_exists?(table, column, constraint_name: nil)
check_constraint_exists?(table, text_limit_name(table, column, name: constraint_name))
end
# Migration Helpers for managing not null constraints
def add_not_null_constraint(table, column, constraint_name: nil, validate: true)
if column_is_nullable?(table, column)
add_check_constraint(
table,
"#{column} IS NOT NULL",
not_null_constraint_name(table, column, name: constraint_name),
validate: validate
)
else
warning_message = <<~MESSAGE
NOT NULL check constraint was not created:
column #{table}.#{column} is already defined as `NOT NULL`
MESSAGE
Gitlab::AppLogger.warn warning_message
end
end
def validate_not_null_constraint(table, column, constraint_name: nil)
validate_check_constraint(
table,
not_null_constraint_name(table, column, name: constraint_name)
)
end
def remove_not_null_constraint(table, column, constraint_name: nil)
remove_check_constraint(
table,
not_null_constraint_name(table, column, name: constraint_name)
)
end
def check_not_null_constraint_exists?(table, column, constraint_name: nil)
check_constraint_exists?(
table,
not_null_constraint_name(table, column, name: constraint_name)
)
end
def rename_constraint(table_name, old_name, new_name)
execute <<~SQL
ALTER TABLE #{quote_table_name(table_name)}
RENAME CONSTRAINT #{quote_column_name(old_name)} TO #{quote_column_name(new_name)}
SQL
end
def drop_constraint(table_name, constraint_name, cascade: false)
execute <<~SQL
ALTER TABLE #{quote_table_name(table_name)} DROP CONSTRAINT #{quote_column_name(constraint_name)} #{cascade_statement(cascade)}
SQL
end
def validate_check_constraint_name!(constraint_name)
return unless constraint_name.to_s.length > MAX_IDENTIFIER_NAME_LENGTH
raise "The maximum allowed constraint name is #{MAX_IDENTIFIER_NAME_LENGTH} characters"
end
def text_limit_name(table, column, name: nil)
name.presence || check_constraint_name(table, column, 'max_length')
end
private
def validate_not_in_transaction!(method_name, modifier = nil)
return unless transaction_open?
raise <<~ERROR
#{["`#{method_name}`", modifier].compact.join(' ')} cannot be run inside a transaction.
You can disable transactions by calling `disable_ddl_transaction!` in the body of
your migration class
ERROR
end
# Returns an ActiveRecord::Result containing the check constraints
# defined for the given column.
#
# If the schema is not provided, then the current_schema is used
def check_constraints_for(table, column, schema: nil)
check_sql = <<~SQL
SELECT
ccu.table_schema as schema_name,
ccu.table_name as table_name,
ccu.column_name as column_name,
con.conname as constraint_name,
pg_get_constraintdef(con.oid) as constraint_def
FROM pg_catalog.pg_constraint con
INNER JOIN pg_catalog.pg_class rel
ON rel.oid = con.conrelid
INNER JOIN pg_catalog.pg_namespace nsp
ON nsp.oid = con.connamespace
INNER JOIN information_schema.constraint_column_usage ccu
ON con.conname = ccu.constraint_name
AND nsp.nspname = ccu.constraint_schema
AND rel.relname = ccu.table_name
WHERE nsp.nspname = #{connection.quote(schema.presence || current_schema)}
AND rel.relname = #{connection.quote(table)}
AND ccu.column_name = #{connection.quote(column)}
AND con.contype = 'c'
ORDER BY constraint_name
SQL
connection.exec_query(check_sql)
end
def cascade_statement(cascade)
cascade ? 'CASCADE' : ''
end
def not_null_constraint_name(table, column, name: nil)
name.presence || check_constraint_name(table, column, 'not_null')
end
def missing_schema_object_message(table, type, name)
<<~MESSAGE
Could not find #{type} "#{name}" on table "#{table}" which was referenced during the migration.
This issue could be caused by the database schema straying from the expected state.
To resolve this issue, please verify:
1. all previous migrations have completed
2. the database objects used in this migration match the Rails definition in schema.rb or structure.sql
MESSAGE
end
end
end
end
end

View File

@ -5,12 +5,12 @@ module Gitlab
module GlobalSearchSlis
class << self
# The following targets are the 99.95th percentile of code searches
# gathered on 24-08-2022
# gathered on 25-10-2022
# from https://log.gprd.gitlab.net/goto/0c89cd80-23af-11ed-8656-f5f2137823ba (internal only)
BASIC_CONTENT_TARGET_S = 7.031
BASIC_CODE_TARGET_S = 21.903
ADVANCED_CONTENT_TARGET_S = 4.865
ADVANCED_CODE_TARGET_S = 13.546
BASIC_CONTENT_TARGET_S = 8.812
BASIC_CODE_TARGET_S = 27.538
ADVANCED_CONTENT_TARGET_S = 2.452
ADVANCED_CODE_TARGET_S = 15.52
def initialize_slis!
Gitlab::Metrics::Sli::Apdex.initialize_sli(:global_search, possible_labels)

View File

@ -22212,7 +22212,7 @@ msgstr ""
msgid "InviteMembersBanner|We noticed that you haven't invited anyone to this group. Invite your colleagues so you can discuss issues, collaborate on merge requests, and share your knowledge."
msgstr ""
msgid "InviteMembersModal| To get more members and access to additional paid features, an owner of the group can start a trial or upgrade to a paid tier."
msgid "InviteMembersModal| To get more members, the owner of this namespace can %{trialLinkStart}start a trial%{trialLinkEnd} or %{upgradeLinkStart}upgrade%{upgradeLinkEnd} to a paid tier."
msgstr ""
msgid "InviteMembersModal|%{linkStart}Read more%{linkEnd} about role permissions"
@ -22292,19 +22292,16 @@ msgid_plural "InviteMembersModal|The following %d members couldn't be invited"
msgstr[0] ""
msgstr[1] ""
msgid "InviteMembersModal|This feature is disabled until this group has space for more members."
msgstr ""
msgid "InviteMembersModal|To assign issues to a new team member, you need a project for the issues. %{linkStart}Create a project to get started.%{linkEnd}"
msgstr ""
msgid "InviteMembersModal|To get more members an owner of the group can %{trialLinkStart}start a trial%{trialLinkEnd} or %{upgradeLinkStart}upgrade%{upgradeLinkEnd} to a paid tier."
msgstr ""
msgid "InviteMembersModal|Username or email address"
msgid "InviteMembersModal|To invite new users to this namespace, you must remove existing users. You can still add existing namespace users."
msgstr ""
msgid "InviteMembersModal|You cannot add more members, but you can remove members who no longer need access."
msgid "InviteMembersModal|Username or email address"
msgstr ""
msgid "InviteMembersModal|You only have space for %{count} more %{members} in %{name}"
@ -36129,6 +36126,9 @@ msgstr ""
msgid "SecurityOrchestration| or "
msgstr ""
msgid "SecurityOrchestration|%{agent} for %{namespaces}"
msgstr ""
msgid "SecurityOrchestration|%{branches} %{plural}"
msgstr ""
@ -36354,10 +36354,10 @@ msgstr ""
msgid "SecurityOrchestration|Scan result policy"
msgstr ""
msgid "SecurityOrchestration|Scan to be performed %{cadence}"
msgid "SecurityOrchestration|Scan to be performed %{cadence} on the %{branches}"
msgstr ""
msgid "SecurityOrchestration|Scan to be performed %{cadence} on the %{branches}"
msgid "SecurityOrchestration|Scan to be performed by the agent named %{agents} %{cadence}"
msgstr ""
msgid "SecurityOrchestration|Scan to be performed on every pipeline on the %{branches}"
@ -36465,6 +36465,9 @@ msgstr ""
msgid "SecurityOrchestration|all branches"
msgstr ""
msgid "SecurityOrchestration|all namespaces"
msgstr ""
msgid "SecurityOrchestration|an"
msgstr ""
@ -36483,6 +36486,12 @@ msgstr ""
msgid "SecurityOrchestration|the %{branches}"
msgstr ""
msgid "SecurityOrchestration|the %{namespaces} %{plural}"
msgstr ""
msgid "SecurityOrchestration|the %{namespaces} and %{lastNamespace} %{plural}"
msgstr ""
msgid "SecurityOrchestration|vulnerabilities"
msgstr ""
@ -39855,6 +39864,9 @@ msgstr ""
msgid "Take a look at the documentation to discover all of GitLabs capabilities."
msgstr ""
msgid "Target"
msgstr ""
msgid "Target Branch"
msgstr ""
@ -47857,6 +47869,9 @@ msgstr ""
msgid "committed"
msgstr ""
msgid "complete"
msgstr ""
msgid "compliance violation has already been recorded"
msgstr ""
@ -47916,6 +47931,9 @@ msgstr[1] ""
msgid "days"
msgstr ""
msgid "default"
msgstr ""
msgid "default branch"
msgstr ""
@ -48175,6 +48193,9 @@ msgstr ""
msgid "invalid milestone state `%{state}`"
msgstr ""
msgid "invalidated"
msgstr ""
msgid "is"
msgstr ""
@ -48743,6 +48764,11 @@ msgstr ""
msgid "my-topic"
msgstr ""
msgid "namespace"
msgid_plural "namespaces"
msgstr[0] ""
msgstr[1] ""
msgid "needs to be between 10 minutes and 1 month"
msgstr ""
@ -48978,6 +49004,9 @@ msgstr ""
msgid "role's base access level does not match the access level of the membership"
msgstr ""
msgid "running"
msgstr ""
msgid "satisfied"
msgstr ""

View File

@ -86,6 +86,24 @@ module QA
end
end
def has_reviewer?(username)
wait_reviewers_block_finish_loading do
has_text?(username)
end
end
def has_no_reviewer?(username)
wait_reviewers_block_finish_loading do
has_no_text?(username)
end
end
def has_no_reviewers?
wait_reviewers_block_finish_loading do
has_text?('None')
end
end
def has_avatar_image_count?(count)
wait_assignees_block_finish_loading do
all_elements(:avatar_image, count: count)
@ -133,6 +151,53 @@ module QA
click_element(:more_assignees_link)
end
def toggle_reviewers_edit
click_element(:reviewers_edit_button)
end
def suggested_reviewer_usernames
within_element(:reviewers_block_container) do
wait_for_requests
click_element(:reviewers_edit_button)
wait_for_requests
list = find_element(:dropdown_list_content)
suggested_reviewers = list.find_all('li[data-user-suggested="true"')
raise ElementNotFound, 'No suggested reviewers found' if suggested_reviewers.nil?
suggested_reviewers.map do |reviewer|
info = reviewer.text.split('@')
{
name: info[0].chomp,
username: info[1].chomp
}
end.compact
end
end
def unassign_reviewers
within_element(:reviewers_block_container) do
wait_for_requests
click_element(:reviewers_edit_button)
wait_for_requests
end
select_reviewer('Unassigned')
end
def select_reviewer(username)
within_element(:reviewers_block_container) do
within_element(:dropdown_list_content) do
click_on username
end
click_element(:reviewers_edit_button)
wait_for_requests
end
end
private
def wait_assignees_block_finish_loading
@ -144,6 +209,15 @@ module QA
end
end
def wait_reviewers_block_finish_loading
within_element(:reviewers_block_container) do
wait_until(reload: false, max_duration: 10, sleep_interval: 1) do
finished_loading_block?
yield
end
end
end
def wait_labels_block_finish_loading
within_element(:labels_block) do
wait_until(reload: false, max_duration: 10, sleep_interval: 1) do

View File

@ -11,7 +11,8 @@ module QA
:milestone,
:labels,
:file_name,
:file_content
:file_content,
:reviewer_ids
attr_writer :no_preparation,
:wait_for_merge,
@ -22,7 +23,8 @@ module QA
:description,
:merge_when_pipeline_succeeds,
:merge_status,
:state
:state,
:reviewers
attribute :project do
Project.fabricate_via_api! do |resource|
@ -121,12 +123,17 @@ module QA
"/projects/#{project.id}/merge_requests"
end
def api_reviewers_path
"#{api_get_path}/reviewers"
end
def api_post_body
{
description: description,
source_branch: source_branch,
target_branch: target_branch,
title: title
title: title,
reviewer_ids: reviewer_ids
}
end

View File

@ -1,7 +1,7 @@
# frozen_string_literal: true
module QA
RSpec.describe 'Systems' do
RSpec.describe 'Systems', product_group: :gitaly do
context 'with Gitaly automatic failover and recovery', :orchestrated, :gitaly_cluster do
# Variables shared between contexts. They're used and shared between
# contexts so they can't be `let` variables.

View File

@ -1,7 +1,7 @@
# frozen_string_literal: true
module QA
RSpec.describe 'Systems' do
RSpec.describe 'Systems', product_group: :gitaly do
describe 'Gitaly backend node recovery', :orchestrated, :gitaly_cluster, :skip_live_env do
let(:praefect_manager) { Service::PraefectManager.new }
let(:project) do

View File

@ -1,7 +1,7 @@
# frozen_string_literal: true
module QA
RSpec.describe 'Systems' do
RSpec.describe 'Systems', product_group: :gitaly do
describe 'Changing Gitaly repository storage', :requires_admin, except: { job: 'review-qa-*' } do
praefect_manager = Service::PraefectManager.new

View File

@ -3,7 +3,7 @@
require 'parallel'
module QA
RSpec.describe 'Systems' do
RSpec.describe 'Systems', product_group: :gitaly do
describe 'Gitaly distributed reads', :orchestrated, :gitaly_cluster, :skip_live_env, :requires_admin do
let(:number_of_reads_per_loop) { 9 }
let(:praefect_manager) { Service::PraefectManager.new }

View File

@ -2,7 +2,7 @@
module QA
RSpec.describe 'Systems' do
describe 'Gitaly using mTLS', :orchestrated, :mtls do
describe 'Gitaly using mTLS', :orchestrated, :mtls, product_group: :gitaly do
let(:intial_commit_message) { 'Initial commit' }
let(:first_added_commit_message) { 'commit over git' }
let(:second_added_commit_message) { 'commit over api' }

View File

@ -2,7 +2,7 @@
module QA
RSpec.describe 'Systems' do
describe 'Praefect connectivity commands', :orchestrated, :gitaly_cluster do
describe 'Praefect connectivity commands', :orchestrated, :gitaly_cluster, product_group: :gitaly do
praefect_manager = Service::PraefectManager.new
before do

View File

@ -2,7 +2,7 @@
module QA
RSpec.describe 'Systems' do
describe 'Praefect dataloss commands', :orchestrated, :gitaly_cluster do
describe 'Praefect dataloss commands', :orchestrated, :gitaly_cluster, product_group: :gitaly do
let(:praefect_manager) { Service::PraefectManager.new }
let(:project) do

View File

@ -3,7 +3,7 @@
require 'parallel'
module QA
RSpec.describe 'Systems' do
RSpec.describe 'Systems', product_group: :gitaly do
describe 'Gitaly Cluster replication queue', :orchestrated, :gitaly_cluster, :skip_live_env do
let(:praefect_manager) { Service::PraefectManager.new }
let(:project) do

View File

@ -2,7 +2,7 @@
module QA
RSpec.describe 'Systems' do
describe 'Praefect repository commands', :orchestrated, :gitaly_cluster do
describe 'Praefect repository commands', :orchestrated, :gitaly_cluster, product_group: :gitaly do
let(:praefect_manager) { Service::PraefectManager.new }
let(:repo1) do

View File

@ -7,43 +7,47 @@ module QA
describe 'Gitlab migration', product_group: :import do
include_context 'with gitlab project migration'
let!(:source_project_with_readme) { true }
let!(:other_user) do
Resource::User
.fabricate_via_api! { |usr| usr.api_client = admin_api_client }
.tap do |usr|
usr.set_public_email
source_project.add_member(usr, Resource::Members::AccessLevel::MAINTAINER)
end
end
let!(:source_mr) do
Resource::MergeRequest.fabricate_via_api! do |mr|
mr.project = source_project
mr.api_client = Runtime::API::Client.new(user: other_user)
mr.reviewer_ids = [other_user.id]
end
end
let!(:source_comment) { source_mr.add_comment(body: 'This is a test comment!') }
let(:imported_mrs) { imported_project.merge_requests }
let(:imported_mr_comments) { imported_mr.comments.map { |note| note.except(:id, :noteable_id) } }
let(:source_mr_comments) { source_mr.comments.map { |note| note.except(:id, :noteable_id) } }
let(:imported_mr) do
Resource::MergeRequest.init do |mr|
mr.project = imported_project
mr.iid = imported_mrs.first[:iid]
mr.api_client = api_client
end
end
let(:imported_reviewers) { imported_mr.reviewers.map { |r| r.slice(:id, :username) } }
let(:source_mr_reviewers) { [{ id: other_user.id, username: other_user.username }] }
after do
other_user.remove_via_api!
end
context 'with merge request' do
let!(:source_project_with_readme) { true }
let!(:other_user) do
Resource::User
.fabricate_via_api! { |usr| usr.api_client = admin_api_client }
.tap do |usr|
usr.set_public_email
source_project.add_member(usr, Resource::Members::AccessLevel::MAINTAINER)
end
end
let!(:source_mr) do
Resource::MergeRequest.fabricate_via_api! do |mr|
mr.project = source_project
mr.api_client = Runtime::API::Client.new(user: other_user)
end
end
let!(:source_comment) { source_mr.add_comment('This is a test comment!') }
let(:imported_mrs) { imported_project.merge_requests }
let(:imported_mr_comments) { imported_mr.comments.map { |note| note.except(:id, :noteable_id) } }
let(:source_mr_comments) { source_mr.comments.map { |note| note.except(:id, :noteable_id) } }
let(:imported_mr) do
Resource::MergeRequest.init do |mr|
mr.project = imported_project
mr.iid = imported_mrs.first[:iid]
mr.api_client = api_client
end
end
after do
other_user.remove_via_api!
end
it(
'successfully imports merge request',
testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/348478'
@ -54,7 +58,8 @@ module QA
aggregate_failures do
expect(imported_mr).to eq(source_mr.reload!)
expect(imported_mr_comments).to eq(source_mr_comments)
expect(imported_mr_comments).to match_array(source_mr_comments)
expect(imported_reviewers).to eq(source_mr_reviewers)
end
end
end

View File

@ -93,7 +93,11 @@ class SlackReporter
end
def source
"`#{ENV['CI_PIPELINE_SOURCE']}`"
"`#{ENV['CI_PIPELINE_SOURCE']}#{schedule_type}`"
end
def schedule_type
ENV['CI_PIPELINE_SOURCE'] == 'schedule' ? ": #{ENV['SCHEDULE_TYPE']}" : ''
end
def project_link

View File

@ -9,9 +9,9 @@ RSpec.describe 'DevOps Report page', :js do
gitlab_enable_admin_mode_sign_in(admin)
end
context 'with devops_adoption feature flag disabled' do
context 'without licensed feature devops adoption' do
before do
stub_feature_flags(devops_adoption: false)
stub_licensed_features(devops_adoption: false)
end
it 'has dismissable intro callout' do

View File

@ -1,3 +1,4 @@
import MockAdapter from 'axios-mock-adapter';
import { Emitter } from 'monaco-editor';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import waitForPromises from 'helpers/wait_for_promises';
@ -8,6 +9,7 @@ import { EditorMarkdownExtension } from '~/editor/extensions/source_editor_markd
import { EditorMarkdownPreviewExtension } from '~/editor/extensions/source_editor_markdown_livepreview_ext';
import { ToolbarExtension } from '~/editor/extensions/source_editor_toolbar_ext';
import SourceEditor from '~/editor/source_editor';
import axios from '~/lib/utils/axios_utils';
jest.mock('~/editor/source_editor');
jest.mock('~/editor/extensions/source_editor_extension_base');
@ -32,6 +34,7 @@ const markdownExtensions = [
describe('Blob Editing', () => {
let blobInstance;
let mock;
const useMock = jest.fn(() => markdownExtensions);
const unuseMock = jest.fn();
const emitter = new Emitter();
@ -44,7 +47,10 @@ describe('Blob Editing', () => {
onDidChangeModelLanguage: emitter.event,
};
beforeEach(() => {
mock = new MockAdapter(axios);
setHTMLFixture(`
<div class="js-edit-mode-pane"></div>
<div class="js-edit-mode"><a href="#write">Write</a><a href="#preview">Preview</a></div>
<form class="js-edit-blob-form">
<div id="file_path"></div>
<div id="editor"></div>
@ -54,6 +60,7 @@ describe('Blob Editing', () => {
jest.spyOn(SourceEditor.prototype, 'createInstance').mockReturnValue(mockInstance);
});
afterEach(() => {
mock.restore();
jest.clearAllMocks();
unuseMock.mockClear();
useMock.mockClear();
@ -108,6 +115,47 @@ describe('Blob Editing', () => {
});
});
describe('correctly handles toggling the live-preview panel for different file types', () => {
it.each`
desc | isMarkdown | isPreviewOpened | tabToClick | shouldOpenPreview | shouldClosePreview | expectedDesc
${'not markdown with preview closed'} | ${false} | ${false} | ${'#write'} | ${false} | ${false} | ${'not toggle preview'}
${'not markdown with preview closed'} | ${false} | ${false} | ${'#preview'} | ${false} | ${false} | ${'not toggle preview'}
${'markdown with preview closed'} | ${true} | ${false} | ${'#write'} | ${false} | ${false} | ${'not toggle preview'}
${'markdown with preview closed'} | ${true} | ${false} | ${'#preview'} | ${true} | ${false} | ${'open preview'}
${'markdown with preview opened'} | ${true} | ${true} | ${'#write'} | ${false} | ${true} | ${'close preview'}
${'markdown with preview opened'} | ${true} | ${true} | ${'#preview'} | ${false} | ${false} | ${'not toggle preview'}
`(
'when $desc, clicking $tabToClick should $expectedDesc',
async ({
isMarkdown,
isPreviewOpened,
tabToClick,
shouldOpenPreview,
shouldClosePreview,
}) => {
const fire = jest.fn();
SourceEditor.prototype.createInstance = jest.fn().mockReturnValue({
...mockInstance,
markdownPreview: {
eventEmitter: {
fire,
},
},
});
await initEditor(isMarkdown);
blobInstance.markdownLivePreviewOpened = isPreviewOpened;
const elToClick = document.querySelector(`a[href='${tabToClick}']`);
elToClick.dispatchEvent(new Event('click'));
if (shouldOpenPreview || shouldClosePreview) {
expect(fire).toHaveBeenCalled();
} else {
expect(fire).not.toHaveBeenCalled();
}
},
);
});
it('adds trailing newline to the blob content on submit', async () => {
const form = document.querySelector('.js-edit-blob-form');
const fileContentEl = document.getElementById('file-content');

View File

@ -81,9 +81,18 @@ describe('Markdown Live Preview Extension for Source Editor', () => {
},
path: previewMarkdownPath,
actionShowPreviewCondition: expect.any(Object),
eventEmitter: expect.any(Object),
});
});
it('support external preview trigger via emitter event', () => {
expect(panelSpy).not.toHaveBeenCalled();
instance.markdownPreview.eventEmitter.fire();
expect(panelSpy).toHaveBeenCalled();
});
describe('onDidLayoutChange', () => {
const emitter = new Emitter();
let layoutSpy;

View File

@ -10,12 +10,12 @@ import InviteMembersModal from '~/invite_members/components/invite_members_modal
import InviteModalBase from '~/invite_members/components/invite_modal_base.vue';
import ModalConfetti from '~/invite_members/components/confetti.vue';
import MembersTokenSelect from '~/invite_members/components/members_token_select.vue';
import UserLimitNotification from '~/invite_members/components/user_limit_notification.vue';
import {
INVITE_MEMBERS_FOR_TASK,
MEMBERS_MODAL_CELEBRATE_INTRO,
MEMBERS_MODAL_CELEBRATE_TITLE,
MEMBERS_PLACEHOLDER,
MEMBERS_PLACEHOLDER_DISABLED,
MEMBERS_TO_PROJECT_CELEBRATE_INTRO_TEXT,
LEARN_GITLAB,
EXPANDED_ERRORS,
@ -31,8 +31,6 @@ import {
propsData,
inviteSource,
newProjectPath,
freeUsersLimit,
membersCount,
user1,
user2,
user3,
@ -99,6 +97,7 @@ describe('InviteMembersModal', () => {
const findIntroText = () => wrapper.findByTestId('modal-base-intro-text').text();
const findMemberErrorAlert = () => wrapper.findByTestId('alert-member-error');
const findMoreInviteErrorsButton = () => wrapper.findByTestId('accordion-button');
const findUserLimitAlert = () => wrapper.findComponent(UserLimitNotification);
const findAccordion = () => wrapper.findComponent(GlCollapse);
const findErrorsIcon = () => wrapper.findComponent(GlIcon);
const findMemberErrorMessage = (element) =>
@ -112,7 +111,7 @@ describe('InviteMembersModal', () => {
const findMembersFormGroup = () => wrapper.findByTestId('members-form-group');
const membersFormGroupInvalidFeedback = () =>
findMembersFormGroup().attributes('invalid-feedback');
const membersFormGroupText = () => findMembersFormGroup().text();
const membersFormGroupDescription = () => findMembersFormGroup().attributes('description');
const findMembersSelect = () => wrapper.findComponent(MembersTokenSelect);
const findTasksToBeDone = () => wrapper.findByTestId('invite-members-modal-tasks-to-be-done');
const findTasks = () => wrapper.findByTestId('invite-members-modal-tasks');
@ -299,19 +298,8 @@ describe('InviteMembersModal', () => {
describe('members form group description', () => {
it('renders correct description', () => {
createInviteMembersToProjectWrapper({ freeUsersLimit, membersCount }, { GlFormGroup });
expect(membersFormGroupText()).toContain(MEMBERS_PLACEHOLDER);
});
describe('when reached user limit', () => {
it('renders correct description', () => {
createInviteMembersToProjectWrapper(
{ freeUsersLimit, membersCount: 5 },
{ GlFormGroup },
);
expect(membersFormGroupText()).toContain(MEMBERS_PLACEHOLDER_DISABLED);
});
createInviteMembersToProjectWrapper({ GlFormGroup });
expect(membersFormGroupDescription()).toContain(MEMBERS_PLACEHOLDER);
});
});
});
@ -339,23 +327,10 @@ describe('InviteMembersModal', () => {
describe('members form group description', () => {
it('renders correct description', async () => {
createInviteMembersToProjectWrapper({ freeUsersLimit, membersCount }, { GlFormGroup });
createInviteMembersToProjectWrapper({ GlFormGroup });
await triggerOpenModal({ mode: 'celebrate' });
expect(membersFormGroupText()).toContain(MEMBERS_PLACEHOLDER);
});
describe('when reached user limit', () => {
it('renders correct description', async () => {
createInviteMembersToProjectWrapper(
{ freeUsersLimit, membersCount: 5 },
{ GlFormGroup },
);
await triggerOpenModal({ mode: 'celebrate' });
expect(membersFormGroupText()).toContain(MEMBERS_PLACEHOLDER_DISABLED);
});
expect(membersFormGroupDescription()).toContain(MEMBERS_PLACEHOLDER);
});
});
});
@ -370,20 +345,39 @@ describe('InviteMembersModal', () => {
describe('members form group description', () => {
it('renders correct description', () => {
createInviteMembersToGroupWrapper({ freeUsersLimit, membersCount }, { GlFormGroup });
expect(membersFormGroupText()).toContain(MEMBERS_PLACEHOLDER);
});
describe('when reached user limit', () => {
it('renders correct description', () => {
createInviteMembersToGroupWrapper({ freeUsersLimit, membersCount: 5 }, { GlFormGroup });
expect(membersFormGroupText()).toContain(MEMBERS_PLACEHOLDER_DISABLED);
});
createInviteMembersToGroupWrapper({ GlFormGroup });
expect(membersFormGroupDescription()).toContain(MEMBERS_PLACEHOLDER);
});
});
});
});
describe('rendering the user limit notification', () => {
it('shows the user limit notification alert when reached limit', () => {
const usersLimitDataset = { reachedLimit: true };
createInviteMembersToProjectWrapper(usersLimitDataset);
expect(findUserLimitAlert().exists()).toBe(true);
});
it('shows the user limit notification alert when close to dashboard limit', () => {
const usersLimitDataset = { closeToDashboardLimit: true };
createInviteMembersToProjectWrapper(usersLimitDataset);
expect(findUserLimitAlert().exists()).toBe(true);
});
it('does not show the user limit notification alert', () => {
const usersLimitDataset = {};
createInviteMembersToProjectWrapper(usersLimitDataset);
expect(findUserLimitAlert().exists()).toBe(false);
});
});
describe('submitting the invite form', () => {
const mockInvitationsApi = (code, data) => {
mock.onPost(GROUPS_INVITATIONS_PATH).reply(code, data);

View File

@ -18,10 +18,7 @@ import {
CANCEL_BUTTON_TEXT,
INVITE_BUTTON_TEXT_DISABLED,
INVITE_BUTTON_TEXT,
CANCEL_BUTTON_TEXT_DISABLED,
ON_SHOW_TRACK_LABEL,
ON_CLOSE_TRACK_LABEL,
ON_SUBMIT_TRACK_LABEL,
} from '~/invite_members/constants';
import { propsData, membersPath, purchasePath } from '../mock_data/modal_base';
@ -131,7 +128,9 @@ describe('InviteModalBase', () => {
it('renders description', () => {
createComponent({}, { GlFormGroup });
expect(findMembersFormGroup().text()).toContain(propsData.formGroupDescription);
expect(findMembersFormGroup().attributes('description')).toContain(
propsData.formGroupDescription,
);
});
describe('when users limit is reached', () => {
@ -145,30 +144,13 @@ describe('InviteModalBase', () => {
beforeEach(() => {
createComponent(
{ usersLimitDataset: { membersPath, purchasePath }, reachedLimit: true },
{ usersLimitDataset: { membersPath, purchasePath, reachedLimit: true } },
{ GlModal, GlFormGroup },
);
});
it('renders correct blocks', () => {
expect(findIcon().exists()).toBe(true);
expect(findDisabledInput().exists()).toBe(true);
expect(findDropdown().exists()).toBe(false);
expect(findDatepicker().exists()).toBe(false);
});
it('renders correct buttons', () => {
const cancelButton = findCancelButton();
const actionButton = findActionButton();
expect(cancelButton.attributes('href')).toBe(purchasePath);
expect(cancelButton.text()).toBe(CANCEL_BUTTON_TEXT_DISABLED);
expect(actionButton.attributes('href')).toBe(membersPath);
expect(actionButton.text()).toBe(INVITE_BUTTON_TEXT_DISABLED);
});
it('tracks actions', () => {
createComponent({ reachedLimit: true }, { GlFormGroup, GlModal });
createComponent({ usersLimitDataset: { reachedLimit: true } }, { GlFormGroup, GlModal });
trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
const modal = wrapper.findComponent(GlModal);
@ -176,37 +158,20 @@ describe('InviteModalBase', () => {
modal.vm.$emit('shown');
expectTracking('render', ON_SHOW_TRACK_LABEL);
modal.vm.$emit('cancel', { preventDefault: jest.fn() });
expectTracking('click_button', ON_CLOSE_TRACK_LABEL);
modal.vm.$emit('primary', { preventDefault: jest.fn() });
expectTracking('click_button', ON_SUBMIT_TRACK_LABEL);
unmockTracking();
});
describe('when free user namespace', () => {
it('hides cancel button', () => {
createComponent(
{
usersLimitDataset: { membersPath, purchasePath, userNamespace: true },
reachedLimit: true,
},
{ GlModal, GlFormGroup },
);
expect(findCancelButton().exists()).toBe(false);
});
});
});
describe('when user limit is close on a personal namespace', () => {
beforeEach(() => {
createComponent(
{
closeToLimit: true,
reachedLimit: false,
usersLimitDataset: { membersPath, userNamespace: true },
usersLimitDataset: {
membersPath,
userNamespace: true,
closeToDashboardLimit: true,
reachedLimit: false,
},
},
{ GlModal, GlFormGroup },
);

View File

@ -1,8 +1,8 @@
import { GlAlert, GlSprintf } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import UserLimitNotification from '~/invite_members/components/user_limit_notification.vue';
import { REACHED_LIMIT_UPGRADE_SUGGESTION_MESSAGE } from '~/invite_members/constants';
import { freeUsersLimit, membersCount } from '../mock_data/member_modal';
import { REACHED_LIMIT_VARIANT, CLOSE_TO_LIMIT_VARIANT } from '~/invite_members/constants';
import { freeUsersLimit, remainingSeats } from '../mock_data/member_modal';
const WARNING_ALERT_TITLE = 'You only have space for 2 more members in name';
@ -10,20 +10,16 @@ describe('UserLimitNotification', () => {
let wrapper;
const findAlert = () => wrapper.findComponent(GlAlert);
const findTrialLink = () => wrapper.findByTestId('trial-link');
const findUpgradeLink = () => wrapper.findByTestId('upgrade-link');
const createComponent = (
closeToLimit = false,
reachedLimit = false,
usersLimitDataset = {},
props = {},
) => {
const createComponent = (limitVariant, usersLimitDataset = {}, props = {}) => {
wrapper = shallowMountExtended(UserLimitNotification, {
propsData: {
closeToLimit,
reachedLimit,
limitVariant,
usersLimitDataset: {
remainingSeats,
freeUsersLimit,
membersCount,
newTrialRegistrationPath: 'newTrialRegistrationPath',
purchasePath: 'purchasePath',
...usersLimitDataset,
@ -35,40 +31,46 @@ describe('UserLimitNotification', () => {
});
};
afterEach(() => {
wrapper.destroy();
});
describe('when limit is not reached', () => {
it('renders empty block', () => {
createComponent();
expect(findAlert().exists()).toBe(false);
});
});
describe('when close to limit within a group', () => {
it("renders user's limit notification", () => {
createComponent(true, false, { membersCount: 3 });
createComponent(CLOSE_TO_LIMIT_VARIANT);
const alert = findAlert();
expect(alert.attributes('title')).toEqual(WARNING_ALERT_TITLE);
expect(alert.text()).toEqual(
'To get more members an owner of the group can start a trial or upgrade to a paid tier.',
);
expect(alert.text()).toContain('To get more members an owner of the group can');
});
});
describe('when limit is reached', () => {
it("renders user's limit notification", () => {
createComponent(true, true);
createComponent(REACHED_LIMIT_VARIANT);
const alert = findAlert();
expect(alert.attributes('title')).toEqual("You've reached your 5 members limit for name");
expect(alert.text()).toEqual(REACHED_LIMIT_UPGRADE_SUGGESTION_MESSAGE);
expect(alert.text()).toContain(
'To invite new users to this namespace, you must remove existing users.',
);
});
});
describe('tracking', () => {
it.each([CLOSE_TO_LIMIT_VARIANT, REACHED_LIMIT_VARIANT])(
`has tracking attributes for %j variant`,
(variant) => {
createComponent(variant);
expect(findTrialLink().attributes('data-track-action')).toBe('click_link');
expect(findTrialLink().attributes('data-track-label')).toBe(
`start_trial_user_limit_notification_${variant}`,
);
expect(findUpgradeLink().attributes('data-track-action')).toBe('click_link');
expect(findUpgradeLink().attributes('data-track-label')).toBe(
`upgrade_user_limit_notification_${variant}`,
);
},
);
});
});

View File

@ -19,7 +19,7 @@ export const propsData = {
export const inviteSource = 'unknown';
export const newProjectPath = 'projects/new';
export const freeUsersLimit = 5;
export const membersCount = 1;
export const remainingSeats = 2;
export const user1 = { id: 1, name: 'Name One', username: 'one_1', avatar_url: '' };
export const user2 = { id: 2, name: 'Name Two', username: 'one_2', avatar_url: '' };

View File

@ -2667,644 +2667,6 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
describe '#check_constraint_name' do
it 'returns a valid constraint name' do
name = model.check_constraint_name(:this_is_a_very_long_table_name,
:with_a_very_long_column_name,
:with_a_very_long_type)
expect(name).to be_an_instance_of(String)
expect(name).to start_with('check_')
expect(name.length).to eq(16)
end
end
describe '#check_constraint_exists?' do
before do
ActiveRecord::Migration.connection.execute(
'ALTER TABLE projects ADD CONSTRAINT check_1 CHECK (char_length(path) <= 5) NOT VALID'
)
ActiveRecord::Migration.connection.execute(
'CREATE SCHEMA new_test_schema'
)
ActiveRecord::Migration.connection.execute(
'CREATE TABLE new_test_schema.projects (id integer, name character varying)'
)
ActiveRecord::Migration.connection.execute(
'ALTER TABLE new_test_schema.projects ADD CONSTRAINT check_2 CHECK (char_length(name) <= 5)'
)
end
it 'returns true if a constraint exists' do
expect(model.check_constraint_exists?(:projects, 'check_1'))
.to be_truthy
end
it 'returns false if a constraint does not exist' do
expect(model.check_constraint_exists?(:projects, 'this_does_not_exist'))
.to be_falsy
end
it 'returns false if a constraint with the same name exists in another table' do
expect(model.check_constraint_exists?(:users, 'check_1'))
.to be_falsy
end
it 'returns false if a constraint with the same name exists for the same table in another schema' do
expect(model.check_constraint_exists?(:projects, 'check_2'))
.to be_falsy
end
end
describe '#add_check_constraint' do
before do
allow(model).to receive(:check_constraint_exists?).and_return(false)
end
context 'constraint name validation' do
it 'raises an error when too long' do
expect do
model.add_check_constraint(
:test_table,
'name IS NOT NULL',
'a' * (Gitlab::Database::MigrationHelpers::MAX_IDENTIFIER_NAME_LENGTH + 1)
)
end.to raise_error(RuntimeError)
end
it 'does not raise error when the length is acceptable' do
constraint_name = 'a' * Gitlab::Database::MigrationHelpers::MAX_IDENTIFIER_NAME_LENGTH
expect(model).to receive(:transaction_open?).and_return(false)
expect(model).to receive(:check_constraint_exists?).and_return(false)
expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:execute).with(/ADD CONSTRAINT/)
model.add_check_constraint(
:test_table,
'name IS NOT NULL',
constraint_name,
validate: false
)
end
end
context 'inside a transaction' do
it 'raises an error' do
expect(model).to receive(:transaction_open?).and_return(true)
expect do
model.add_check_constraint(
:test_table,
'name IS NOT NULL',
'check_name_not_null'
)
end.to raise_error(RuntimeError)
end
end
context 'outside a transaction' do
before do
allow(model).to receive(:transaction_open?).and_return(false)
end
context 'when the constraint is already defined in the database' do
it 'does not create a constraint' do
expect(model).to receive(:check_constraint_exists?)
.with(:test_table, 'check_name_not_null')
.and_return(true)
expect(model).not_to receive(:execute).with(/ADD CONSTRAINT/)
# setting validate: false to only focus on the ADD CONSTRAINT command
model.add_check_constraint(
:test_table,
'name IS NOT NULL',
'check_name_not_null',
validate: false
)
end
end
context 'when the constraint is not defined in the database' do
it 'creates the constraint' do
expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:execute).with(/ADD CONSTRAINT check_name_not_null/)
# setting validate: false to only focus on the ADD CONSTRAINT command
model.add_check_constraint(
:test_table,
'char_length(name) <= 255',
'check_name_not_null',
validate: false
)
end
end
context 'when validate is not provided' do
it 'performs validation' do
expect(model).to receive(:check_constraint_exists?)
.with(:test_table, 'check_name_not_null')
.and_return(false).exactly(1)
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:statement_timeout_disabled?).and_return(false)
expect(model).to receive(:execute).with(/SET statement_timeout TO/)
expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:execute).with(/ADD CONSTRAINT check_name_not_null/)
# we need the check constraint to exist so that the validation proceeds
expect(model).to receive(:check_constraint_exists?)
.with(:test_table, 'check_name_not_null')
.and_return(true).exactly(1)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
model.add_check_constraint(
:test_table,
'char_length(name) <= 255',
'check_name_not_null'
)
end
end
context 'when validate is provided with a falsey value' do
it 'skips validation' do
expect(model).not_to receive(:disable_statement_timeout)
expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:execute).with(/ADD CONSTRAINT/)
expect(model).not_to receive(:execute).with(/VALIDATE CONSTRAINT/)
model.add_check_constraint(
:test_table,
'char_length(name) <= 255',
'check_name_not_null',
validate: false
)
end
end
context 'when validate is provided with a truthy value' do
it 'performs validation' do
expect(model).to receive(:check_constraint_exists?)
.with(:test_table, 'check_name_not_null')
.and_return(false).exactly(1)
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:statement_timeout_disabled?).and_return(false)
expect(model).to receive(:execute).with(/SET statement_timeout TO/)
expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:execute).with(/ADD CONSTRAINT check_name_not_null/)
expect(model).to receive(:check_constraint_exists?)
.with(:test_table, 'check_name_not_null')
.and_return(true).exactly(1)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
model.add_check_constraint(
:test_table,
'char_length(name) <= 255',
'check_name_not_null',
validate: true
)
end
end
end
end
describe '#validate_check_constraint' do
context 'when the constraint does not exist' do
it 'raises an error' do
error_message = /Could not find check constraint "check_1" on table "test_table"/
expect(model).to receive(:check_constraint_exists?).and_return(false)
expect do
model.validate_check_constraint(:test_table, 'check_1')
end.to raise_error(RuntimeError, error_message)
end
end
context 'when the constraint exists' do
it 'performs validation' do
validate_sql = /ALTER TABLE test_table VALIDATE CONSTRAINT check_name/
expect(model).to receive(:check_constraint_exists?).and_return(true)
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:statement_timeout_disabled?).and_return(false)
expect(model).to receive(:execute).with(/SET statement_timeout TO/)
expect(model).to receive(:execute).ordered.with(validate_sql)
expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
model.validate_check_constraint(:test_table, 'check_name')
end
end
end
describe '#remove_check_constraint' do
before do
allow(model).to receive(:transaction_open?).and_return(false)
end
it 'removes the constraint' do
drop_sql = /ALTER TABLE test_table\s+DROP CONSTRAINT IF EXISTS check_name/
expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:execute).with(drop_sql)
model.remove_check_constraint(:test_table, 'check_name')
end
end
describe '#copy_check_constraints' do
context 'inside a transaction' do
it 'raises an error' do
expect(model).to receive(:transaction_open?).and_return(true)
expect do
model.copy_check_constraints(:test_table, :old_column, :new_column)
end.to raise_error(RuntimeError)
end
end
context 'outside a transaction' do
before do
allow(model).to receive(:transaction_open?).and_return(false)
allow(model).to receive(:column_exists?).and_return(true)
end
let(:old_column_constraints) do
[
{
'schema_name' => 'public',
'table_name' => 'test_table',
'column_name' => 'old_column',
'constraint_name' => 'check_d7d49d475d',
'constraint_def' => 'CHECK ((old_column IS NOT NULL))'
},
{
'schema_name' => 'public',
'table_name' => 'test_table',
'column_name' => 'old_column',
'constraint_name' => 'check_48560e521e',
'constraint_def' => 'CHECK ((char_length(old_column) <= 255))'
},
{
'schema_name' => 'public',
'table_name' => 'test_table',
'column_name' => 'old_column',
'constraint_name' => 'custom_check_constraint',
'constraint_def' => 'CHECK (((old_column IS NOT NULL) AND (another_column IS NULL)))'
},
{
'schema_name' => 'public',
'table_name' => 'test_table',
'column_name' => 'old_column',
'constraint_name' => 'not_valid_check_constraint',
'constraint_def' => 'CHECK ((old_column IS NOT NULL)) NOT VALID'
}
]
end
it 'copies check constraints from one column to another' do
allow(model).to receive(:check_constraints_for)
.with(:test_table, :old_column, schema: nil)
.and_return(old_column_constraints)
allow(model).to receive(:not_null_constraint_name).with(:test_table, :new_column)
.and_return('check_1')
allow(model).to receive(:text_limit_name).with(:test_table, :new_column)
.and_return('check_2')
allow(model).to receive(:check_constraint_name)
.with(:test_table, :new_column, 'copy_check_constraint')
.and_return('check_3')
expect(model).to receive(:add_check_constraint)
.with(
:test_table,
'(new_column IS NOT NULL)',
'check_1',
validate: true
).once
expect(model).to receive(:add_check_constraint)
.with(
:test_table,
'(char_length(new_column) <= 255)',
'check_2',
validate: true
).once
expect(model).to receive(:add_check_constraint)
.with(
:test_table,
'((new_column IS NOT NULL) AND (another_column IS NULL))',
'check_3',
validate: true
).once
expect(model).to receive(:add_check_constraint)
.with(
:test_table,
'(new_column IS NOT NULL)',
'check_1',
validate: false
).once
model.copy_check_constraints(:test_table, :old_column, :new_column)
end
it 'does nothing if there are no constraints defined for the old column' do
allow(model).to receive(:check_constraints_for)
.with(:test_table, :old_column, schema: nil)
.and_return([])
expect(model).not_to receive(:add_check_constraint)
model.copy_check_constraints(:test_table, :old_column, :new_column)
end
it 'raises an error when the orginating column does not exist' do
allow(model).to receive(:column_exists?).with(:test_table, :old_column).and_return(false)
error_message = /Column old_column does not exist on test_table/
expect do
model.copy_check_constraints(:test_table, :old_column, :new_column)
end.to raise_error(RuntimeError, error_message)
end
it 'raises an error when the target column does not exist' do
allow(model).to receive(:column_exists?).with(:test_table, :new_column).and_return(false)
error_message = /Column new_column does not exist on test_table/
expect do
model.copy_check_constraints(:test_table, :old_column, :new_column)
end.to raise_error(RuntimeError, error_message)
end
end
end
describe '#add_text_limit' do
context 'when it is called with the default options' do
it 'calls add_check_constraint with an infered constraint name and validate: true' do
constraint_name = model.check_constraint_name(:test_table,
:name,
'max_length')
check = "char_length(name) <= 255"
expect(model).to receive(:check_constraint_name).and_call_original
expect(model).to receive(:add_check_constraint)
.with(:test_table, check, constraint_name, validate: true)
model.add_text_limit(:test_table, :name, 255)
end
end
context 'when all parameters are provided' do
it 'calls add_check_constraint with the correct parameters' do
constraint_name = 'check_name_limit'
check = "char_length(name) <= 255"
expect(model).not_to receive(:check_constraint_name)
expect(model).to receive(:add_check_constraint)
.with(:test_table, check, constraint_name, validate: false)
model.add_text_limit(
:test_table,
:name,
255,
constraint_name: constraint_name,
validate: false
)
end
end
end
describe '#validate_text_limit' do
context 'when constraint_name is not provided' do
it 'calls validate_check_constraint with an infered constraint name' do
constraint_name = model.check_constraint_name(:test_table,
:name,
'max_length')
expect(model).to receive(:check_constraint_name).and_call_original
expect(model).to receive(:validate_check_constraint)
.with(:test_table, constraint_name)
model.validate_text_limit(:test_table, :name)
end
end
context 'when constraint_name is provided' do
it 'calls validate_check_constraint with the correct parameters' do
constraint_name = 'check_name_limit'
expect(model).not_to receive(:check_constraint_name)
expect(model).to receive(:validate_check_constraint)
.with(:test_table, constraint_name)
model.validate_text_limit(:test_table, :name, constraint_name: constraint_name)
end
end
end
describe '#remove_text_limit' do
context 'when constraint_name is not provided' do
it 'calls remove_check_constraint with an infered constraint name' do
constraint_name = model.check_constraint_name(:test_table,
:name,
'max_length')
expect(model).to receive(:check_constraint_name).and_call_original
expect(model).to receive(:remove_check_constraint)
.with(:test_table, constraint_name)
model.remove_text_limit(:test_table, :name)
end
end
context 'when constraint_name is provided' do
it 'calls remove_check_constraint with the correct parameters' do
constraint_name = 'check_name_limit'
expect(model).not_to receive(:check_constraint_name)
expect(model).to receive(:remove_check_constraint)
.with(:test_table, constraint_name)
model.remove_text_limit(:test_table, :name, constraint_name: constraint_name)
end
end
end
describe '#check_text_limit_exists?' do
context 'when constraint_name is not provided' do
it 'calls check_constraint_exists? with an infered constraint name' do
constraint_name = model.check_constraint_name(:test_table,
:name,
'max_length')
expect(model).to receive(:check_constraint_name).and_call_original
expect(model).to receive(:check_constraint_exists?)
.with(:test_table, constraint_name)
model.check_text_limit_exists?(:test_table, :name)
end
end
context 'when constraint_name is provided' do
it 'calls check_constraint_exists? with the correct parameters' do
constraint_name = 'check_name_limit'
expect(model).not_to receive(:check_constraint_name)
expect(model).to receive(:check_constraint_exists?)
.with(:test_table, constraint_name)
model.check_text_limit_exists?(:test_table, :name, constraint_name: constraint_name)
end
end
end
describe '#add_not_null_constraint' do
context 'when it is called with the default options' do
it 'calls add_check_constraint with an infered constraint name and validate: true' do
constraint_name = model.check_constraint_name(:test_table,
:name,
'not_null')
check = "name IS NOT NULL"
expect(model).to receive(:column_is_nullable?).and_return(true)
expect(model).to receive(:check_constraint_name).and_call_original
expect(model).to receive(:add_check_constraint)
.with(:test_table, check, constraint_name, validate: true)
model.add_not_null_constraint(:test_table, :name)
end
end
context 'when all parameters are provided' do
it 'calls add_check_constraint with the correct parameters' do
constraint_name = 'check_name_not_null'
check = "name IS NOT NULL"
expect(model).to receive(:column_is_nullable?).and_return(true)
expect(model).not_to receive(:check_constraint_name)
expect(model).to receive(:add_check_constraint)
.with(:test_table, check, constraint_name, validate: false)
model.add_not_null_constraint(
:test_table,
:name,
constraint_name: constraint_name,
validate: false
)
end
end
context 'when the column is defined as NOT NULL' do
it 'does not add a check constraint' do
expect(model).to receive(:column_is_nullable?).and_return(false)
expect(model).not_to receive(:check_constraint_name)
expect(model).not_to receive(:add_check_constraint)
model.add_not_null_constraint(:test_table, :name)
end
end
end
describe '#validate_not_null_constraint' do
context 'when constraint_name is not provided' do
it 'calls validate_check_constraint with an infered constraint name' do
constraint_name = model.check_constraint_name(:test_table,
:name,
'not_null')
expect(model).to receive(:check_constraint_name).and_call_original
expect(model).to receive(:validate_check_constraint)
.with(:test_table, constraint_name)
model.validate_not_null_constraint(:test_table, :name)
end
end
context 'when constraint_name is provided' do
it 'calls validate_check_constraint with the correct parameters' do
constraint_name = 'check_name_not_null'
expect(model).not_to receive(:check_constraint_name)
expect(model).to receive(:validate_check_constraint)
.with(:test_table, constraint_name)
model.validate_not_null_constraint(:test_table, :name, constraint_name: constraint_name)
end
end
end
describe '#remove_not_null_constraint' do
context 'when constraint_name is not provided' do
it 'calls remove_check_constraint with an infered constraint name' do
constraint_name = model.check_constraint_name(:test_table,
:name,
'not_null')
expect(model).to receive(:check_constraint_name).and_call_original
expect(model).to receive(:remove_check_constraint)
.with(:test_table, constraint_name)
model.remove_not_null_constraint(:test_table, :name)
end
end
context 'when constraint_name is provided' do
it 'calls remove_check_constraint with the correct parameters' do
constraint_name = 'check_name_not_null'
expect(model).not_to receive(:check_constraint_name)
expect(model).to receive(:remove_check_constraint)
.with(:test_table, constraint_name)
model.remove_not_null_constraint(:test_table, :name, constraint_name: constraint_name)
end
end
end
describe '#check_not_null_constraint_exists?' do
context 'when constraint_name is not provided' do
it 'calls check_constraint_exists? with an infered constraint name' do
constraint_name = model.check_constraint_name(:test_table,
:name,
'not_null')
expect(model).to receive(:check_constraint_name).and_call_original
expect(model).to receive(:check_constraint_exists?)
.with(:test_table, constraint_name)
model.check_not_null_constraint_exists?(:test_table, :name)
end
end
context 'when constraint_name is provided' do
it 'calls check_constraint_exists? with the correct parameters' do
constraint_name = 'check_name_not_null'
expect(model).not_to receive(:check_constraint_name)
expect(model).to receive(:check_constraint_exists?)
.with(:test_table, constraint_name)
model.check_not_null_constraint_exists?(:test_table, :name, constraint_name: constraint_name)
end
end
end
describe '#create_extension' do
subject { model.create_extension(extension) }
@ -3357,30 +2719,6 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
describe '#rename_constraint' do
it "executes the statement to rename constraint" do
expect(model).to receive(:execute).with /ALTER TABLE "test_table"\nRENAME CONSTRAINT "fk_old_name" TO "fk_new_name"/
model.rename_constraint(:test_table, :fk_old_name, :fk_new_name)
end
end
describe '#drop_constraint' do
it "executes the statement to drop the constraint" do
expect(model).to receive(:execute).with("ALTER TABLE \"test_table\" DROP CONSTRAINT \"constraint_name\" CASCADE\n")
model.drop_constraint(:test_table, :constraint_name, cascade: true)
end
context 'when cascade option is false' do
it "executes the statement to drop the constraint without cascade" do
expect(model).to receive(:execute).with("ALTER TABLE \"test_table\" DROP CONSTRAINT \"constraint_name\" \n")
model.drop_constraint(:test_table, :constraint_name, cascade: false)
end
end
end
describe '#add_primary_key_using_index' do
it "executes the statement to add the primary key" do
expect(model).to receive(:execute).with /ALTER TABLE "test_table" ADD CONSTRAINT "old_name" PRIMARY KEY USING INDEX "new_name"/

View File

@ -0,0 +1,679 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::ConstraintsHelpers do
let(:model) do
ActiveRecord::Migration.new.extend(described_class)
end
before do
allow(model).to receive(:puts)
end
describe '#check_constraint_name' do
it 'returns a valid constraint name' do
name = model.check_constraint_name(:this_is_a_very_long_table_name,
:with_a_very_long_column_name,
:with_a_very_long_type)
expect(name).to be_an_instance_of(String)
expect(name).to start_with('check_')
expect(name.length).to eq(16)
end
end
describe '#check_constraint_exists?' do
before do
ActiveRecord::Migration.connection.execute(
'ALTER TABLE projects ADD CONSTRAINT check_1 CHECK (char_length(path) <= 5) NOT VALID'
)
ActiveRecord::Migration.connection.execute(
'CREATE SCHEMA new_test_schema'
)
ActiveRecord::Migration.connection.execute(
'CREATE TABLE new_test_schema.projects (id integer, name character varying)'
)
ActiveRecord::Migration.connection.execute(
'ALTER TABLE new_test_schema.projects ADD CONSTRAINT check_2 CHECK (char_length(name) <= 5)'
)
end
it 'returns true if a constraint exists' do
expect(model)
.to be_check_constraint_exists(:projects, 'check_1')
end
it 'returns false if a constraint does not exist' do
expect(model)
.not_to be_check_constraint_exists(:projects, 'this_does_not_exist')
end
it 'returns false if a constraint with the same name exists in another table' do
expect(model)
.not_to be_check_constraint_exists(:users, 'check_1')
end
it 'returns false if a constraint with the same name exists for the same table in another schema' do
expect(model)
.not_to be_check_constraint_exists(:projects, 'check_2')
end
end
describe '#add_check_constraint' do
before do
allow(model).to receive(:check_constraint_exists?).and_return(false)
end
context 'when constraint name validation' do
it 'raises an error when too long' do
expect do
model.add_check_constraint(
:test_table,
'name IS NOT NULL',
'a' * (Gitlab::Database::MigrationHelpers::MAX_IDENTIFIER_NAME_LENGTH + 1)
)
end.to raise_error(RuntimeError)
end
it 'does not raise error when the length is acceptable' do
constraint_name = 'a' * Gitlab::Database::MigrationHelpers::MAX_IDENTIFIER_NAME_LENGTH
expect(model).to receive(:transaction_open?).and_return(false)
expect(model).to receive(:check_constraint_exists?).and_return(false)
expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:execute).with(/ADD CONSTRAINT/)
model.add_check_constraint(
:test_table,
'name IS NOT NULL',
constraint_name,
validate: false
)
end
end
context 'when inside a transaction' do
it 'raises an error' do
expect(model).to receive(:transaction_open?).and_return(true)
expect do
model.add_check_constraint(
:test_table,
'name IS NOT NULL',
'check_name_not_null'
)
end.to raise_error(RuntimeError)
end
end
context 'when outside a transaction' do
before do
allow(model).to receive(:transaction_open?).and_return(false)
end
context 'when the constraint is already defined in the database' do
it 'does not create a constraint' do
expect(model).to receive(:check_constraint_exists?)
.with(:test_table, 'check_name_not_null')
.and_return(true)
expect(model).not_to receive(:execute).with(/ADD CONSTRAINT/)
# setting validate: false to only focus on the ADD CONSTRAINT command
model.add_check_constraint(
:test_table,
'name IS NOT NULL',
'check_name_not_null',
validate: false
)
end
end
context 'when the constraint is not defined in the database' do
it 'creates the constraint' do
expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:execute).with(/ADD CONSTRAINT check_name_not_null/)
# setting validate: false to only focus on the ADD CONSTRAINT command
model.add_check_constraint(
:test_table,
'char_length(name) <= 255',
'check_name_not_null',
validate: false
)
end
end
context 'when validate is not provided' do
it 'performs validation' do
expect(model).to receive(:check_constraint_exists?)
.with(:test_table, 'check_name_not_null')
.and_return(false).exactly(1)
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:statement_timeout_disabled?).and_return(false)
expect(model).to receive(:execute).with(/SET statement_timeout TO/)
expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:execute).with(/ADD CONSTRAINT check_name_not_null/)
# we need the check constraint to exist so that the validation proceeds
expect(model).to receive(:check_constraint_exists?)
.with(:test_table, 'check_name_not_null')
.and_return(true).exactly(1)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
model.add_check_constraint(
:test_table,
'char_length(name) <= 255',
'check_name_not_null'
)
end
end
context 'when validate is provided with a falsey value' do
it 'skips validation' do
expect(model).not_to receive(:disable_statement_timeout)
expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:execute).with(/ADD CONSTRAINT/)
expect(model).not_to receive(:execute).with(/VALIDATE CONSTRAINT/)
model.add_check_constraint(
:test_table,
'char_length(name) <= 255',
'check_name_not_null',
validate: false
)
end
end
context 'when validate is provided with a truthy value' do
it 'performs validation' do
expect(model).to receive(:check_constraint_exists?)
.with(:test_table, 'check_name_not_null')
.and_return(false).exactly(1)
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:statement_timeout_disabled?).and_return(false)
expect(model).to receive(:execute).with(/SET statement_timeout TO/)
expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:execute).with(/ADD CONSTRAINT check_name_not_null/)
expect(model).to receive(:check_constraint_exists?)
.with(:test_table, 'check_name_not_null')
.and_return(true).exactly(1)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
model.add_check_constraint(
:test_table,
'char_length(name) <= 255',
'check_name_not_null',
validate: true
)
end
end
end
end
describe '#validate_check_constraint' do
context 'when the constraint does not exist' do
it 'raises an error' do
error_message = /Could not find check constraint "check_1" on table "test_table"/
expect(model).to receive(:check_constraint_exists?).and_return(false)
expect do
model.validate_check_constraint(:test_table, 'check_1')
end.to raise_error(RuntimeError, error_message)
end
end
context 'when the constraint exists' do
it 'performs validation' do
validate_sql = /ALTER TABLE test_table VALIDATE CONSTRAINT check_name/
expect(model).to receive(:check_constraint_exists?).and_return(true)
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:statement_timeout_disabled?).and_return(false)
expect(model).to receive(:execute).with(/SET statement_timeout TO/)
expect(model).to receive(:execute).ordered.with(validate_sql)
expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
model.validate_check_constraint(:test_table, 'check_name')
end
end
end
describe '#remove_check_constraint' do
before do
allow(model).to receive(:transaction_open?).and_return(false)
end
it 'removes the constraint' do
drop_sql = /ALTER TABLE test_table\s+DROP CONSTRAINT IF EXISTS check_name/
expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:execute).with(drop_sql)
model.remove_check_constraint(:test_table, 'check_name')
end
end
describe '#copy_check_constraints' do
context 'when inside a transaction' do
it 'raises an error' do
expect(model).to receive(:transaction_open?).and_return(true)
expect do
model.copy_check_constraints(:test_table, :old_column, :new_column)
end.to raise_error(RuntimeError)
end
end
context 'when outside a transaction' do
before do
allow(model).to receive(:transaction_open?).and_return(false)
allow(model).to receive(:column_exists?).and_return(true)
end
let(:old_column_constraints) do
[
{
'schema_name' => 'public',
'table_name' => 'test_table',
'column_name' => 'old_column',
'constraint_name' => 'check_d7d49d475d',
'constraint_def' => 'CHECK ((old_column IS NOT NULL))'
},
{
'schema_name' => 'public',
'table_name' => 'test_table',
'column_name' => 'old_column',
'constraint_name' => 'check_48560e521e',
'constraint_def' => 'CHECK ((char_length(old_column) <= 255))'
},
{
'schema_name' => 'public',
'table_name' => 'test_table',
'column_name' => 'old_column',
'constraint_name' => 'custom_check_constraint',
'constraint_def' => 'CHECK (((old_column IS NOT NULL) AND (another_column IS NULL)))'
},
{
'schema_name' => 'public',
'table_name' => 'test_table',
'column_name' => 'old_column',
'constraint_name' => 'not_valid_check_constraint',
'constraint_def' => 'CHECK ((old_column IS NOT NULL)) NOT VALID'
}
]
end
it 'copies check constraints from one column to another' do
allow(model).to receive(:check_constraints_for)
.with(:test_table, :old_column, schema: nil)
.and_return(old_column_constraints)
allow(model).to receive(:not_null_constraint_name).with(:test_table, :new_column)
.and_return('check_1')
allow(model).to receive(:text_limit_name).with(:test_table, :new_column)
.and_return('check_2')
allow(model).to receive(:check_constraint_name)
.with(:test_table, :new_column, 'copy_check_constraint')
.and_return('check_3')
expect(model).to receive(:add_check_constraint)
.with(
:test_table,
'(new_column IS NOT NULL)',
'check_1',
validate: true
).once
expect(model).to receive(:add_check_constraint)
.with(
:test_table,
'(char_length(new_column) <= 255)',
'check_2',
validate: true
).once
expect(model).to receive(:add_check_constraint)
.with(
:test_table,
'((new_column IS NOT NULL) AND (another_column IS NULL))',
'check_3',
validate: true
).once
expect(model).to receive(:add_check_constraint)
.with(
:test_table,
'(new_column IS NOT NULL)',
'check_1',
validate: false
).once
model.copy_check_constraints(:test_table, :old_column, :new_column)
end
it 'does nothing if there are no constraints defined for the old column' do
allow(model).to receive(:check_constraints_for)
.with(:test_table, :old_column, schema: nil)
.and_return([])
expect(model).not_to receive(:add_check_constraint)
model.copy_check_constraints(:test_table, :old_column, :new_column)
end
it 'raises an error when the orginating column does not exist' do
allow(model).to receive(:column_exists?).with(:test_table, :old_column).and_return(false)
error_message = /Column old_column does not exist on test_table/
expect do
model.copy_check_constraints(:test_table, :old_column, :new_column)
end.to raise_error(RuntimeError, error_message)
end
it 'raises an error when the target column does not exist' do
allow(model).to receive(:column_exists?).with(:test_table, :new_column).and_return(false)
error_message = /Column new_column does not exist on test_table/
expect do
model.copy_check_constraints(:test_table, :old_column, :new_column)
end.to raise_error(RuntimeError, error_message)
end
end
end
describe '#add_text_limit' do
context 'when it is called with the default options' do
it 'calls add_check_constraint with an infered constraint name and validate: true' do
constraint_name = model.check_constraint_name(:test_table,
:name,
'max_length')
check = "char_length(name) <= 255"
expect(model).to receive(:check_constraint_name).and_call_original
expect(model).to receive(:add_check_constraint)
.with(:test_table, check, constraint_name, validate: true)
model.add_text_limit(:test_table, :name, 255)
end
end
context 'when all parameters are provided' do
it 'calls add_check_constraint with the correct parameters' do
constraint_name = 'check_name_limit'
check = "char_length(name) <= 255"
expect(model).not_to receive(:check_constraint_name)
expect(model).to receive(:add_check_constraint)
.with(:test_table, check, constraint_name, validate: false)
model.add_text_limit(
:test_table,
:name,
255,
constraint_name: constraint_name,
validate: false
)
end
end
end
describe '#validate_text_limit' do
context 'when constraint_name is not provided' do
it 'calls validate_check_constraint with an infered constraint name' do
constraint_name = model.check_constraint_name(:test_table,
:name,
'max_length')
expect(model).to receive(:check_constraint_name).and_call_original
expect(model).to receive(:validate_check_constraint)
.with(:test_table, constraint_name)
model.validate_text_limit(:test_table, :name)
end
end
context 'when constraint_name is provided' do
it 'calls validate_check_constraint with the correct parameters' do
constraint_name = 'check_name_limit'
expect(model).not_to receive(:check_constraint_name)
expect(model).to receive(:validate_check_constraint)
.with(:test_table, constraint_name)
model.validate_text_limit(:test_table, :name, constraint_name: constraint_name)
end
end
end
describe '#remove_text_limit' do
context 'when constraint_name is not provided' do
it 'calls remove_check_constraint with an infered constraint name' do
constraint_name = model.check_constraint_name(:test_table,
:name,
'max_length')
expect(model).to receive(:check_constraint_name).and_call_original
expect(model).to receive(:remove_check_constraint)
.with(:test_table, constraint_name)
model.remove_text_limit(:test_table, :name)
end
end
context 'when constraint_name is provided' do
it 'calls remove_check_constraint with the correct parameters' do
constraint_name = 'check_name_limit'
expect(model).not_to receive(:check_constraint_name)
expect(model).to receive(:remove_check_constraint)
.with(:test_table, constraint_name)
model.remove_text_limit(:test_table, :name, constraint_name: constraint_name)
end
end
end
describe '#check_text_limit_exists?' do
context 'when constraint_name is not provided' do
it 'calls check_constraint_exists? with an infered constraint name' do
constraint_name = model.check_constraint_name(:test_table,
:name,
'max_length')
expect(model).to receive(:check_constraint_name).and_call_original
expect(model).to receive(:check_constraint_exists?)
.with(:test_table, constraint_name)
model.check_text_limit_exists?(:test_table, :name)
end
end
context 'when constraint_name is provided' do
it 'calls check_constraint_exists? with the correct parameters' do
constraint_name = 'check_name_limit'
expect(model).not_to receive(:check_constraint_name)
expect(model).to receive(:check_constraint_exists?)
.with(:test_table, constraint_name)
model.check_text_limit_exists?(:test_table, :name, constraint_name: constraint_name)
end
end
end
describe '#add_not_null_constraint' do
context 'when it is called with the default options' do
it 'calls add_check_constraint with an infered constraint name and validate: true' do
constraint_name = model.check_constraint_name(:test_table,
:name,
'not_null')
check = "name IS NOT NULL"
expect(model).to receive(:column_is_nullable?).and_return(true)
expect(model).to receive(:check_constraint_name).and_call_original
expect(model).to receive(:add_check_constraint)
.with(:test_table, check, constraint_name, validate: true)
model.add_not_null_constraint(:test_table, :name)
end
end
context 'when all parameters are provided' do
it 'calls add_check_constraint with the correct parameters' do
constraint_name = 'check_name_not_null'
check = "name IS NOT NULL"
expect(model).to receive(:column_is_nullable?).and_return(true)
expect(model).not_to receive(:check_constraint_name)
expect(model).to receive(:add_check_constraint)
.with(:test_table, check, constraint_name, validate: false)
model.add_not_null_constraint(
:test_table,
:name,
constraint_name: constraint_name,
validate: false
)
end
end
context 'when the column is defined as NOT NULL' do
it 'does not add a check constraint' do
expect(model).to receive(:column_is_nullable?).and_return(false)
expect(model).not_to receive(:check_constraint_name)
expect(model).not_to receive(:add_check_constraint)
model.add_not_null_constraint(:test_table, :name)
end
end
end
describe '#validate_not_null_constraint' do
context 'when constraint_name is not provided' do
it 'calls validate_check_constraint with an infered constraint name' do
constraint_name = model.check_constraint_name(:test_table,
:name,
'not_null')
expect(model).to receive(:check_constraint_name).and_call_original
expect(model).to receive(:validate_check_constraint)
.with(:test_table, constraint_name)
model.validate_not_null_constraint(:test_table, :name)
end
end
context 'when constraint_name is provided' do
it 'calls validate_check_constraint with the correct parameters' do
constraint_name = 'check_name_not_null'
expect(model).not_to receive(:check_constraint_name)
expect(model).to receive(:validate_check_constraint)
.with(:test_table, constraint_name)
model.validate_not_null_constraint(:test_table, :name, constraint_name: constraint_name)
end
end
end
describe '#remove_not_null_constraint' do
context 'when constraint_name is not provided' do
it 'calls remove_check_constraint with an infered constraint name' do
constraint_name = model.check_constraint_name(:test_table,
:name,
'not_null')
expect(model).to receive(:check_constraint_name).and_call_original
expect(model).to receive(:remove_check_constraint)
.with(:test_table, constraint_name)
model.remove_not_null_constraint(:test_table, :name)
end
end
context 'when constraint_name is provided' do
it 'calls remove_check_constraint with the correct parameters' do
constraint_name = 'check_name_not_null'
expect(model).not_to receive(:check_constraint_name)
expect(model).to receive(:remove_check_constraint)
.with(:test_table, constraint_name)
model.remove_not_null_constraint(:test_table, :name, constraint_name: constraint_name)
end
end
end
describe '#check_not_null_constraint_exists?' do
context 'when constraint_name is not provided' do
it 'calls check_constraint_exists? with an infered constraint name' do
constraint_name = model.check_constraint_name(:test_table,
:name,
'not_null')
expect(model).to receive(:check_constraint_name).and_call_original
expect(model).to receive(:check_constraint_exists?)
.with(:test_table, constraint_name)
model.check_not_null_constraint_exists?(:test_table, :name)
end
end
context 'when constraint_name is provided' do
it 'calls check_constraint_exists? with the correct parameters' do
constraint_name = 'check_name_not_null'
expect(model).not_to receive(:check_constraint_name)
expect(model).to receive(:check_constraint_exists?)
.with(:test_table, constraint_name)
model.check_not_null_constraint_exists?(:test_table, :name, constraint_name: constraint_name)
end
end
end
describe '#rename_constraint' do
it "executes the statement to rename constraint" do
expect(model).to receive(:execute).with(
/ALTER TABLE "test_table"\nRENAME CONSTRAINT "fk_old_name" TO "fk_new_name"/
)
model.rename_constraint(:test_table, :fk_old_name, :fk_new_name)
end
end
describe '#drop_constraint' do
it "executes the statement to drop the constraint" do
expect(model).to receive(:execute).with(
"ALTER TABLE \"test_table\" DROP CONSTRAINT \"constraint_name\" CASCADE\n"
)
model.drop_constraint(:test_table, :constraint_name, cascade: true)
end
context 'when cascade option is false' do
it "executes the statement to drop the constraint without cascade" do
expect(model).to receive(:execute).with("ALTER TABLE \"test_table\" DROP CONSTRAINT \"constraint_name\" \n")
model.drop_constraint(:test_table, :constraint_name, cascade: false)
end
end
end
end

View File

@ -47,10 +47,10 @@ RSpec.describe Gitlab::Metrics::GlobalSearchSlis do
describe '#record_apdex' do
where(:search_type, :code_search, :duration_target) do
'basic' | false | 7.031
'basic' | true | 21.903
'advanced' | false | 4.865
'advanced' | true | 13.546
'basic' | false | 8.812
'basic' | true | 27.538
'advanced' | false | 2.452
'advanced' | true | 15.52
end
with_them do

View File

@ -29,6 +29,19 @@ RSpec.describe API::Search do
end
end
shared_examples 'apdex recorded' do |scope:, level:, search: ''|
it 'increments the custom search sli apdex' do
expect(Gitlab::Metrics::GlobalSearchSlis).to receive(:record_apdex).with(
elapsed: a_kind_of(Numeric),
search_scope: scope,
search_type: 'basic',
search_level: level
)
get api(endpoint, user), params: { scope: scope, search: search }
end
end
shared_examples 'orderable by created_at' do |scope:|
it 'allows ordering results by created_at asc' do
get api(endpoint, user), params: { scope: scope, search: 'sortable', order_by: 'created_at', sort: 'asc' }
@ -172,6 +185,8 @@ RSpec.describe API::Search do
it_behaves_like 'pagination', scope: :projects
it_behaves_like 'ping counters', scope: :projects
it_behaves_like 'apdex recorded', scope: 'projects', level: 'global'
end
context 'for issues scope' do
@ -186,6 +201,8 @@ RSpec.describe API::Search do
it_behaves_like 'ping counters', scope: :issues
it_behaves_like 'apdex recorded', scope: 'issues', level: 'global'
it_behaves_like 'issues orderable by created_at'
describe 'pagination' do
@ -248,6 +265,8 @@ RSpec.describe API::Search do
it_behaves_like 'ping counters', scope: :merge_requests
it_behaves_like 'apdex recorded', scope: 'merge_requests', level: 'global'
it_behaves_like 'merge_requests orderable by created_at'
describe 'pagination' do
@ -293,6 +312,8 @@ RSpec.describe API::Search do
it_behaves_like 'ping counters', scope: :milestones
it_behaves_like 'apdex recorded', scope: 'milestones', level: 'global'
describe 'pagination' do
before do
create(:milestone, project: project, title: 'another milestone')
@ -330,6 +351,8 @@ RSpec.describe API::Search do
it_behaves_like 'pagination', scope: :users
it_behaves_like 'ping counters', scope: :users
it_behaves_like 'apdex recorded', scope: 'users', level: 'global'
end
context 'for snippet_titles scope' do
@ -343,6 +366,8 @@ RSpec.describe API::Search do
it_behaves_like 'ping counters', scope: :snippet_titles
it_behaves_like 'apdex recorded', scope: 'snippet_titles', level: 'global'
describe 'pagination' do
before do
create(:snippet, :public, title: 'another snippet', content: 'snippet content')
@ -352,17 +377,6 @@ RSpec.describe API::Search do
end
end
it 'increments the custom search sli apdex' do
expect(Gitlab::Metrics::GlobalSearchSlis).to receive(:record_apdex).with(
elapsed: a_kind_of(Numeric),
search_scope: 'issues',
search_type: 'basic',
search_level: 'global'
)
get api(endpoint, user), params: { scope: 'issues', search: 'john doe' }
end
it 'increments the custom search sli error rate with error false if no error occurred' do
expect(Gitlab::Metrics::GlobalSearchSlis).to receive(:record_error_rate).with(
error: false,
@ -466,6 +480,8 @@ RSpec.describe API::Search do
it_behaves_like 'pagination', scope: :projects
it_behaves_like 'ping counters', scope: :projects
it_behaves_like 'apdex recorded', scope: 'projects', level: 'group'
end
context 'for issues scope' do
@ -479,6 +495,8 @@ RSpec.describe API::Search do
it_behaves_like 'ping counters', scope: :issues
it_behaves_like 'apdex recorded', scope: 'issues', level: 'group'
it_behaves_like 'issues orderable by created_at'
describe 'pagination' do
@ -501,6 +519,8 @@ RSpec.describe API::Search do
it_behaves_like 'ping counters', scope: :merge_requests
it_behaves_like 'apdex recorded', scope: 'merge_requests', level: 'group'
it_behaves_like 'merge_requests orderable by created_at'
describe 'pagination' do
@ -523,6 +543,8 @@ RSpec.describe API::Search do
it_behaves_like 'ping counters', scope: :milestones
it_behaves_like 'apdex recorded', scope: 'milestones', level: 'group'
describe 'pagination' do
before do
create(:milestone, project: project, title: 'another milestone')
@ -556,6 +578,8 @@ RSpec.describe API::Search do
it_behaves_like 'ping counters', scope: :users
it_behaves_like 'apdex recorded', scope: 'users', level: 'group'
describe 'pagination' do
before do
create(:group_member, :developer, group: group)
@ -645,6 +669,8 @@ RSpec.describe API::Search do
it_behaves_like 'issues orderable by created_at'
it_behaves_like 'apdex recorded', scope: 'issues', level: 'project'
describe 'pagination' do
before do
create(:issue, project: project, title: 'another issue')
@ -677,6 +703,8 @@ RSpec.describe API::Search do
it_behaves_like 'merge_requests orderable by created_at'
it_behaves_like 'apdex recorded', scope: 'merge_requests', level: 'project'
describe 'pagination' do
before do
create(:merge_request, source_project: repo_project, title: 'another mr', target_branch: 'another_branch')
@ -700,6 +728,8 @@ RSpec.describe API::Search do
it_behaves_like 'ping counters', scope: :milestones
it_behaves_like 'apdex recorded', scope: 'milestones', level: 'project'
describe 'pagination' do
before do
create(:milestone, project: project, title: 'another milestone')
@ -737,6 +767,8 @@ RSpec.describe API::Search do
it_behaves_like 'ping counters', scope: :users
it_behaves_like 'apdex recorded', scope: 'users', level: 'project'
describe 'pagination' do
before do
create(:project_member, :developer, project: project)
@ -757,6 +789,8 @@ RSpec.describe API::Search do
it_behaves_like 'ping counters', scope: :notes
it_behaves_like 'apdex recorded', scope: 'notes', level: 'project'
describe 'pagination' do
before do
mr = create(:merge_request, source_project: project, target_branch: 'another_branch')
@ -780,6 +814,8 @@ RSpec.describe API::Search do
it_behaves_like 'ping counters', scope: :wiki_blobs
it_behaves_like 'apdex recorded', scope: 'wiki_blobs', level: 'project'
describe 'pagination' do
before do
create(:wiki_page, wiki: wiki, title: 'home 2', content: 'Another page')
@ -802,6 +838,8 @@ RSpec.describe API::Search do
it_behaves_like 'ping counters', scope: :commits
it_behaves_like 'apdex recorded', scope: 'commits', level: 'project'
describe 'pipeline visibility' do
shared_examples 'pipeline information visible' do
it 'contains status and last_pipeline' do
@ -899,6 +937,8 @@ RSpec.describe API::Search do
end
it_behaves_like 'response is correct', schema: 'public_api/v4/commits_details'
it_behaves_like 'apdex recorded', scope: 'commits', level: 'project'
end
context 'for blobs scope' do
@ -914,6 +954,8 @@ RSpec.describe API::Search do
it_behaves_like 'ping counters', scope: :blobs
it_behaves_like 'apdex recorded', scope: 'blobs', level: 'project'
context 'filters' do
it 'by filename' do
get api(endpoint, user), params: { scope: 'blobs', search: 'mon filename:PROCESS.md' }