Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-06-17 15:08:29 +00:00
parent 6b97ea1f80
commit 6c26db46b8
79 changed files with 988 additions and 161 deletions

View File

@ -114,6 +114,8 @@ rules:
message: 'Migrate to GlSkeletonLoader, or import GlDeprecatedSkeletonLoading.'
# See https://gitlab.com/gitlab-org/gitlab/-/issues/360551
vue/multi-word-component-names: off
unicorn/prefer-dom-node-dataset:
- error
overrides:
- files:
- '{,ee/,jh/}spec/frontend*/**/*'

View File

@ -55,8 +55,8 @@ export function renderKroki(krokiImages) {
// A single Kroki image is processed multiple times for some reason,
// so this condition ensures we only create one alert per Kroki image
if (!parent.hasAttribute('data-kroki-processed')) {
parent.setAttribute('data-kroki-processed', 'true');
if (!Object.prototype.hasOwnProperty.call(parent.dataset, 'krokiProcessed')) {
parent.dataset.krokiProcessed = 'true';
parent.after(createAlert(krokiImage));
}
});

View File

@ -112,7 +112,7 @@ class SafeMathRenderer {
try {
displayContainer.innerHTML = this.katex.renderToString(text, {
displayMode: el.getAttribute('data-math-style') === 'display',
displayMode: el.dataset.mathStyle === 'display',
throwOnError: true,
maxSize: 20,
maxExpand: 20,
@ -145,7 +145,7 @@ class SafeMathRenderer {
this.elements.forEach((el) => {
const placeholder = document.createElement('span');
placeholder.style.display = 'none';
placeholder.setAttribute('data-math-style', el.getAttribute('data-math-style'));
placeholder.dataset.mathStyle = el.dataset.mathStyle;
placeholder.textContent = el.textContent;
el.parentNode.replaceChild(placeholder, el);
this.queue.push(placeholder);

View File

@ -9,10 +9,11 @@ const updateLineNumbersOnBlobPermalinks = (linksToUpdate) => {
[].concat(Array.prototype.slice.call(linksToUpdate)).forEach((permalinkButton) => {
const baseHref =
permalinkButton.getAttribute('data-original-href') ||
permalinkButton.dataset.originalHref ||
(() => {
const href = permalinkButton.getAttribute('href');
permalinkButton.setAttribute('data-original-href', href);
// eslint-disable-next-line no-param-reassign
permalinkButton.dataset.originalHref = href;
return href;
})();
permalinkButton.setAttribute('href', `${baseHref}${hashUrlString}`);

View File

@ -36,19 +36,19 @@ const loadRichBlobViewer = (type) => {
const loadViewer = (viewerParam) => {
const viewer = viewerParam;
const url = viewer.getAttribute('data-url');
const { url } = viewer.dataset;
if (!url || viewer.getAttribute('data-loaded') || viewer.getAttribute('data-loading')) {
if (!url || viewer.dataset.loaded || viewer.dataset.loading) {
return Promise.resolve(viewer);
}
viewer.setAttribute('data-loading', 'true');
viewer.dataset.loading = 'true';
return axios.get(url).then(({ data }) => {
viewer.innerHTML = data.html;
window.requestIdleCallback(() => {
viewer.removeAttribute('data-loading');
delete viewer.dataset.loading;
});
return viewer;
@ -108,7 +108,7 @@ export class BlobViewer {
switchToInitialViewer() {
const initialViewer = this.$fileHolder[0].querySelector('.blob-viewer:not(.hidden)');
let initialViewerName = initialViewer.getAttribute('data-type');
let initialViewerName = initialViewer.dataset.type;
if (this.switcher && window.location.hash.indexOf('#L') === 0) {
initialViewerName = 'simple';
@ -138,12 +138,12 @@ export class BlobViewer {
e.preventDefault();
this.switchToViewer(target.getAttribute('data-viewer'));
this.switchToViewer(target.dataset.viewer);
}
toggleCopyButtonState() {
if (!this.copySourceBtn) return;
if (this.simpleViewer.getAttribute('data-loaded')) {
if (this.simpleViewer.dataset.loaded) {
this.copySourceBtnTooltip.setAttribute('title', __('Copy file contents'));
this.copySourceBtn.classList.remove('disabled');
} else if (this.activeViewer === this.simpleViewer) {
@ -199,7 +199,8 @@ export class BlobViewer {
this.$fileHolder.trigger('highlight:line');
handleLocationHash();
viewer.setAttribute('data-loaded', 'true');
// eslint-disable-next-line no-param-reassign
viewer.dataset.loaded = 'true';
this.toggleCopyButtonState();
eventHub.$emit('showBlobInteractionZones', viewer.dataset.path);
});

View File

@ -5,7 +5,7 @@ export const addTooltipToEl = (el) => {
if (textEl && textEl.scrollWidth > textEl.offsetWidth) {
el.setAttribute('title', el.textContent);
el.setAttribute('data-container', 'body');
el.dataset.container = 'body';
el.classList.add('has-tooltip');
}
};

View File

@ -32,8 +32,8 @@ export const addInteractionClass = ({ path, d, wrapTextNodes }) => {
});
if (el && !isTextNode(el)) {
el.setAttribute('data-char-index', d.start_char);
el.setAttribute('data-line-index', d.start_line);
el.dataset.charIndex = d.start_char;
el.dataset.lineIndex = d.start_line;
el.classList.add('cursor-pointer', 'code-navigation', 'js-code-navigation');
el.closest('.line').classList.add('code-navigation-line');
}

View File

@ -107,10 +107,10 @@ function createLink(data, selected, options, index) {
}
if (options.trackSuggestionClickedLabel) {
link.setAttribute('data-track-action', 'click_text');
link.setAttribute('data-track-label', options.trackSuggestionClickedLabel);
link.setAttribute('data-track-value', index);
link.setAttribute('data-track-property', slugify(data.category || 'no-category'));
link.dataset.trackAction = 'click_text';
link.dataset.trackLabel = options.trackSuggestionClickedLabel;
link.dataset.trackValue = index;
link.dataset.trackProperty = slugify(data.category || 'no-category');
}
link.classList.toggle('is-active', selected);

View File

@ -26,7 +26,8 @@ export default class Diff {
FilesCommentButton.init($diffFile);
const firstFile = $('.files').first().get(0);
const canCreateNote = firstFile && firstFile.hasAttribute('data-can-create-note');
const canCreateNote =
firstFile && Object.prototype.hasOwnProperty.call(firstFile.dataset, 'canCreateNote');
$diffFile.each((index, file) => initImageDiffHelper.initImageDiff(file, canCreateNote));
if (!isBound) {

View File

@ -197,10 +197,10 @@ export default class AvailableDropdownMappings {
}
getGroupId() {
return this.filteredSearchInput.getAttribute('data-group-id') || '';
return this.filteredSearchInput.dataset.groupId || '';
}
getProjectId() {
return this.filteredSearchInput.getAttribute('data-project-id') || '';
return this.filteredSearchInput.dataset.projectId || '';
}
}

View File

@ -25,9 +25,9 @@ export default class DropdownHint extends FilteredSearchDropdown {
const { selected } = e.detail;
if (selected.tagName === 'LI') {
if (selected.hasAttribute('data-value')) {
if (Object.prototype.hasOwnProperty.call(selected.dataset, 'value')) {
this.dismissDropdown();
} else if (selected.getAttribute('data-action') === 'submit') {
} else if (selected.dataset.action === 'submit') {
this.dismissDropdown();
this.dispatchFormSubmitEvent();
} else {

View File

@ -23,7 +23,7 @@ export default class DropdownOperator extends FilteredSearchDropdown {
const { selected } = e.detail;
if (selected.tagName === 'LI') {
if (selected.hasAttribute('data-value')) {
if (Object.prototype.hasOwnProperty.call(selected.dataset, 'value')) {
const name = FilteredSearchVisualTokens.getLastTokenPartial();
const operator = selected.dataset.value;

View File

@ -31,11 +31,11 @@ export default class DropdownUser extends DropdownAjaxFilter {
}
getGroupId() {
return this.input.getAttribute('data-group-id');
return this.input.dataset.groupId;
}
getProjectId() {
return this.input.getAttribute('data-project-id');
return this.input.dataset.projectId;
}
projectOrGroupId() {

View File

@ -87,6 +87,7 @@ export default class DropdownUtils {
}
static setDataValueIfSelected(filter, operator, selected) {
// eslint-disable-next-line unicorn/prefer-dom-node-dataset
const dataValue = selected.getAttribute('data-value');
if (dataValue) {
@ -96,6 +97,7 @@ export default class DropdownUtils {
tokenValue: dataValue,
clicked: true,
options: {
// eslint-disable-next-line unicorn/prefer-dom-node-dataset
capitalizeTokenValue: selected.hasAttribute('data-capitalize'),
},
});

View File

@ -165,8 +165,8 @@ class DropDown {
images.forEach((image) => {
const img = image;
img.src = img.getAttribute('data-src');
img.removeAttribute('data-src');
img.src = img.dataset.src;
delete img.dataset.src;
});
}
}

View File

@ -814,7 +814,7 @@ export default class FilteredSearchManager {
getUsernameParams() {
const usernamesById = {};
try {
const attribute = this.filteredSearchInput.getAttribute('data-username-params');
const attribute = this.filteredSearchInput.dataset.usernameParams;
JSON.parse(attribute).forEach((user) => {
usernamesById[user.id] = user.username;
});

View File

@ -6,7 +6,7 @@ export function setPositionDataAttribute(el, options) {
const positionObject = { ...JSON.parse(position), x, y, width, height };
el.setAttribute('data-position', JSON.stringify(positionObject));
el.dataset.position = JSON.stringify(positionObject);
}
export function updateDiscussionAvatarBadgeNumber(discussionEl, newBadgeNumber) {

View File

@ -82,10 +82,7 @@ export default class CreateMergeRequestDropdown {
this.init();
if (isConfidentialIssue()) {
this.createMergeRequestButton.setAttribute(
'data-dropdown-trigger',
'#create-merge-request-dropdown',
);
this.createMergeRequestButton.dataset.dropdownTrigger = '#create-merge-request-dropdown';
initConfidentialMergeRequest();
}
}

View File

@ -379,7 +379,7 @@ export default {
},
setActiveTask(el) {
const { parentElement } = el;
const lineNumbers = parentElement.getAttribute('data-sourcepos').match(/\b\d+(?=:)/g);
const lineNumbers = parentElement.dataset.sourcepos.match(/\b\d+(?=:)/g);
this.activeTask = {
title: parentElement.innerText,
lineNumberStart: lineNumbers[0],

View File

@ -127,7 +127,7 @@ export default class LazyLoader {
// Loading Images which are in the current viewport or close to them
this.lazyImages = this.lazyImages.filter((selectedImage) => {
if (selectedImage.getAttribute('data-src')) {
if (selectedImage.dataset.src) {
const imgBoundRect = selectedImage.getBoundingClientRect();
const imgTop = scrollTop + imgBoundRect.top;
const imgBound = imgTop + imgBoundRect.height;
@ -156,16 +156,17 @@ export default class LazyLoader {
}
static loadImage(img) {
if (img.getAttribute('data-src')) {
if (img.dataset.src) {
img.setAttribute('loading', 'lazy');
let imgUrl = img.getAttribute('data-src');
let imgUrl = img.dataset.src;
// Only adding width + height for avatars for now
if (imgUrl.indexOf('/avatar/') > -1 && imgUrl.indexOf('?') === -1) {
const targetWidth = img.getAttribute('width') || img.width;
imgUrl += `?width=${targetWidth}`;
}
img.setAttribute('src', imgUrl);
img.removeAttribute('data-src');
// eslint-disable-next-line no-param-reassign
delete img.dataset.src;
img.classList.remove('lazy');
img.classList.add('js-lazy-loaded');
img.classList.add('qa-js-lazy-loaded');

View File

@ -56,7 +56,7 @@ export function confirmAction(
export function confirmViaGlModal(message, element) {
const primaryBtnConfig = {};
const confirmBtnVariant = element.getAttribute('data-confirm-btn-variant');
const { confirmBtnVariant } = element.dataset;
if (confirmBtnVariant) {
primaryBtnConfig.primaryBtnVariant = confirmBtnVariant;

View File

@ -41,7 +41,7 @@ export default {
const dropdownToggle = this.$refs.glDropdown.$el.querySelector('.dropdown-toggle');
if (dropdownToggle) {
dropdownToggle.setAttribute('data-qa-selector', 'access_level_dropdown');
dropdownToggle.dataset.qaSelector = 'access_level_dropdown';
}
},
methods: {

View File

@ -1,6 +1,6 @@
function onSidebarLinkClick() {
const setDataTrackAction = (element, action) => {
element.setAttribute('data-track-action', action);
element.dataset.trackAction = action;
};
const setDataTrackExtra = (element, value) => {
@ -12,10 +12,10 @@ function onSidebarLinkClick() {
? SIDEBAR_COLLAPSED
: SIDEBAR_EXPANDED;
element.setAttribute(
'data-track-extra',
JSON.stringify({ sidebar_display: sidebarCollapsed, menu_display: value }),
);
element.dataset.trackExtra = JSON.stringify({
sidebar_display: sidebarCollapsed,
menu_display: value,
});
};
const EXPANDED = 'Expanded';

View File

@ -298,7 +298,7 @@ export default class ActivityCalendar {
.querySelector(this.activitiesContainer)
.querySelectorAll('.js-localtime')
.forEach((el) => {
el.setAttribute('title', formatDate(el.getAttribute('data-datetime')));
el.setAttribute('title', formatDate(el.dataset.datetime));
});
})
.catch(() =>

View File

@ -57,7 +57,7 @@ export default {
if (authorParam) {
commitsSearchInput.setAttribute('disabled', true);
commitsSearchInput.setAttribute('data-toggle', 'tooltip');
commitsSearchInput.dataset.toggle = 'tooltip';
commitsSearchInput.setAttribute('title', tooltipMessage);
this.currentAuthor = authorParam;
}

View File

@ -119,7 +119,7 @@ function mountAssigneesComponentDeprecated(mediator) {
issuableIid: String(iid),
projectPath: fullPath,
field: el.dataset.field,
signedIn: el.hasAttribute('data-signed-in'),
signedIn: Object.prototype.hasOwnProperty.call(el.dataset, 'signedIn'),
issuableType:
isInIssuePage() || isInIncidentPage() || isInDesignPage()
? IssuableType.Issue
@ -149,7 +149,10 @@ function mountAssigneesComponent() {
},
provide: {
canUpdate: editable,
directlyInviteMembers: el.hasAttribute('data-directly-invite-members'),
directlyInviteMembers: Object.prototype.hasOwnProperty.call(
el.dataset,
'directlyInviteMembers',
),
},
render: (createElement) =>
createElement('sidebar-assignees-widget', {

View File

@ -39,7 +39,7 @@ export default () => {
return createElement(TerraformList, {
props: {
emptyStateImage,
terraformAdmin: el.hasAttribute('data-terraform-admin'),
terraformAdmin: Object.prototype.hasOwnProperty.call(el.dataset, 'terraformAdmin'),
},
});
},

View File

@ -33,7 +33,7 @@ export default {
this.fetchFreshItems();
const body = document.querySelector('body');
const namespaceId = body.getAttribute('data-namespace-id');
const { namespaceId } = body.dataset;
this.track('click_whats_new_drawer', { label: 'namespace_id', value: namespaceId });
},

View File

@ -1,6 +1,6 @@
export const STORAGE_KEY = 'display-whats-new-notification';
export const getVersionDigest = (appEl) => appEl.getAttribute('data-version-digest');
export const getVersionDigest = (appEl) => appEl.dataset.versionDigest;
export const setNotification = (appEl) => {
const versionDigest = getVersionDigest(appEl);

View File

@ -139,6 +139,10 @@ module Repositories
def verify_commit_range!(from, to)
return unless Feature.enabled?(:changelog_commits_limitation, @project)
commits = @project.repository.commits_by(oids: [from, to])
raise Gitlab::Changelog::Error, "Invalid or not found commit value in the given range" unless commits.count == 2
_, commits_count = @project.repository.diverging_commit_count(from, to)
if commits_count > COMMITS_LIMIT

View File

@ -1,6 +1,6 @@
.user_new
= gitlab_ui_form_for [:admin, @user], html: { class: 'fieldset-form' } do |f|
= form_errors(@user)
= form_errors(@user, pajamas_alert: true)
%fieldset
%legend.gl-border-bottom-0= _('Account')

View File

@ -25,11 +25,11 @@
%label.label-bold= _('Personal Access Token')
= hidden_field_tag(:namespace_id, params[:namespace_id])
= text_field_tag :personal_access_token, '', class: 'form-control gl-form-input', placeholder: _('e.g. %{token}') % { token: '8d3f016698e...' }, data: { qa_selector: 'personal_access_token_field' }
%span.form-text.text-muted
%span.form-text.gl-text-gray-600
= import_github_personal_access_token_message
= render_if_exists 'import/github/ci_cd_only'
.form-actions.d-flex.justify-content-end
.form-actions.gl-display-flex.gl-justify-content-end
= link_to _('Cancel'), new_project_path, class: 'gl-button btn btn-default'
= submit_tag _('Authenticate'), class: 'gl-button btn btn-confirm ml-2', data: { qa_selector: 'authenticate_button' }
= submit_tag _('Authenticate'), class: 'gl-button btn btn-confirm gl-ml-3', data: { qa_selector: 'authenticate_button' }

View File

@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/363621
milestone: '15.1'
type: development
group: group::pipeline authoring
default_enabled: false
default_enabled: true

View File

@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/339664
milestone: '14.3'
type: development
group: group::project management
default_enabled: false
default_enabled: true

View File

@ -82,6 +82,7 @@ exceptions:
- GID
- GIF
- GKE
- GLEX
- GLFM
- GNU
- GPG

View File

@ -815,6 +815,20 @@ Gitlab::Geo.verification_enabled_replicator_classes.each do |klass|
end
```
### Message: curl 18 transfer closed with outstanding read data remaining & fetch-pack: unexpected disconnect while reading sideband packet
Unstable networking conditions can cause Gitaly to fail when trying to fetch large repository
data from the primary site. This is more likely to happen if a repository has to be
replicated from scratch between sites.
Geo retries several times, but if the transmission is consistently interrupted
by network hiccups, an alternative method such as `rsync` can be used to circumvent `git` and
create the initial copy of any repository that fails to be replicated by Geo.
We recommend transferring each failing repository individually and checking for consistency
after each transfer. Follow the [single target `rsync` instructions](../../operations/moving_repositories.md#single-rsync-to-another-server)
to transfer each affected repository from the primary to the secondary site.
## Fixing errors during a failover or when promoting a secondary to a primary node
The following are possible error messages that might be encountered during failover or

View File

@ -10,6 +10,7 @@ type: howto
> - [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/5914) in GitLab 14.4 [with a flag](../../feature_flags.md) named `geo_secondary_proxy`. Disabled by default.
> - [Enabled by default for unified URLs](https://gitlab.com/gitlab-org/gitlab/-/issues/325732) in GitLab 14.6.
> - [Disabled by default for different URLs](https://gitlab.com/gitlab-org/gitlab/-/issues/325732) in GitLab 14.6 [with a flag](../../feature_flags.md) named `geo_secondary_proxy_separate_urls`.
> - [Enabled by default for different URLs](https://gitlab.com/gitlab-org/gitlab/-/issues/346112) in GitLab 15.1.
FLAG:
On self-managed GitLab, this feature is only available by default for Geo sites using a unified URL. See below to
@ -107,17 +108,19 @@ gitlab:
GEO_SECONDARY_PROXY: "0"
```
## Enable Geo proxying with Separate URLs
## Geo proxying with Separate URLs
The ability to use proxying with separate URLs is still in development. You can follow the
["Geo secondary proxying with separate URLs" epic](https://gitlab.com/groups/gitlab-org/-/epics/6865)
for progress.
Since GitLab 15.1, Geo secondary proxying is enabled by default for separate URLs also.
To try out this feature, enable the `geo_secondary_proxy_separate_urls` feature flag.
There are minor known issues linked in the ["Geo secondary proxying with separate URLs"
epic](https://gitlab.com/groups/gitlab-org/-/epics/6865). You can also add feedback in the epic about any use-cases that
are not possible anymore with proxying enabled.
If you run into issues, to disable this feature, disable the `geo_secondary_proxy_separate_urls` feature flag.
SSH into one node running Rails on your primary Geo site and run:
```shell
sudo gitlab-rails runner "Feature.enable(:geo_secondary_proxy_separate_urls)"
sudo gitlab-rails runner "Feature.disable(:geo_secondary_proxy_separate_urls)"
```
In Kubernetes, you can run the same command in the toolbox pod. Refer to the

View File

@ -6,7 +6,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Broadcast Messages API **(FREE SELF)**
> 'target_access_levels' [introduced](https://gitlab.com/gitlab-org/growth/team-tasks/-/issues/461) in GitLab 14.8 [with a flag](../administration/feature_flags.md) named `role_targeted_broadcast_messages`. Disabled by default.
> `target_access_levels` [introduced](https://gitlab.com/gitlab-org/growth/team-tasks/-/issues/461) in GitLab 14.8 [with a flag](../administration/feature_flags.md) named `role_targeted_broadcast_messages`. Disabled by default.
Broadcast messages API operates on [broadcast messages](../user/admin_area/broadcast_messages.md).

View File

@ -2882,6 +2882,28 @@ Input type: `HttpIntegrationUpdateInput`
| <a id="mutationhttpintegrationupdateerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
| <a id="mutationhttpintegrationupdateintegration"></a>`integration` | [`AlertManagementHttpIntegration`](#alertmanagementhttpintegration) | HTTP integration. |
### `Mutation.issuableResourceLinkCreate`
Input type: `IssuableResourceLinkCreateInput`
#### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mutationissuableresourcelinkcreateclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationissuableresourcelinkcreateid"></a>`id` | [`IssueID!`](#issueid) | Incident id to associate the resource link with. |
| <a id="mutationissuableresourcelinkcreatelink"></a>`link` | [`String!`](#string) | Link of the resource. |
| <a id="mutationissuableresourcelinkcreatelinktext"></a>`linkText` | [`String`](#string) | Link text of the resource. |
| <a id="mutationissuableresourcelinkcreatelinktype"></a>`linkType` | [`IssuableResourceLinkType`](#issuableresourcelinktype) | Link type of the resource. |
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mutationissuableresourcelinkcreateclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationissuableresourcelinkcreateerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
| <a id="mutationissuableresourcelinkcreateissuableresourcelink"></a>`issuableResourceLink` | [`IssuableResourceLink`](#issuableresourcelink) | Issuable resource link. |
### `Mutation.issueMove`
Input type: `IssueMoveInput`
@ -12572,6 +12594,20 @@ Returns [`VulnerabilitySeveritiesCount`](#vulnerabilityseveritiescount).
| <a id="instancesecuritydashboardvulnerabilityseveritiescountseverity"></a>`severity` | [`[VulnerabilitySeverity!]`](#vulnerabilityseverity) | Filter vulnerabilities by severity. |
| <a id="instancesecuritydashboardvulnerabilityseveritiescountstate"></a>`state` | [`[VulnerabilityState!]`](#vulnerabilitystate) | Filter vulnerabilities by state. |
### `IssuableResourceLink`
Describes an issuable resource link for incident issues.
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="issuableresourcelinkid"></a>`id` | [`IncidentManagementIssuableResourceLinkID!`](#incidentmanagementissuableresourcelinkid) | ID of the Issuable resource link. |
| <a id="issuableresourcelinkissue"></a>`issue` | [`Issue!`](#issue) | Incident of the resource link. |
| <a id="issuableresourcelinklink"></a>`link` | [`String!`](#string) | Web Link to the resource. |
| <a id="issuableresourcelinklinktext"></a>`linkText` | [`String`](#string) | Optional text for the link. |
| <a id="issuableresourcelinklinktype"></a>`linkType` | [`IssuableResourceLinkType!`](#issuableresourcelinktype) | Type of the resource link. |
### `Issue`
#### Fields
@ -18980,6 +19016,16 @@ Health status of an issue or epic.
| <a id="healthstatusneedsattention"></a>`needsAttention` | Needs attention. |
| <a id="healthstatusontrack"></a>`onTrack` | On track. |
### `IssuableResourceLinkType`
Issuable resource link type enum.
| Value | Description |
| ----- | ----------- |
| <a id="issuableresourcelinktypegeneral"></a>`general` | General link type. |
| <a id="issuableresourcelinktypeslack"></a>`slack` | Slack link type. |
| <a id="issuableresourcelinktypezoom"></a>`zoom` | Zoom link type. |
### `IssuableSearchableField`
Fields to perform the search in.
@ -20385,6 +20431,12 @@ A `IncidentManagementEscalationRuleID` is a global ID. It is encoded as a string
An example `IncidentManagementEscalationRuleID` is: `"gid://gitlab/IncidentManagement::EscalationRule/1"`.
### `IncidentManagementIssuableResourceLinkID`
A `IncidentManagementIssuableResourceLinkID` is a global ID. It is encoded as a string.
An example `IncidentManagementIssuableResourceLinkID` is: `"gid://gitlab/IncidentManagement::IssuableResourceLink/1"`.
### `IncidentManagementOncallParticipantID`
A `IncidentManagementOncallParticipantID` is a global ID. It is encoded as a string.

View File

@ -2007,6 +2007,28 @@ end
.to contain_exactly(a_graphql_entity_for(issue, :iid, :title, created_at: some_time))
```
- Use `GraphqlHelpers#empty_schema` to create an empty schema, rather than creating
one by hand. For example:
```ruby
# good
let(:schema) { empty_schema }
# bad
let(:query_type) { GraphQL::ObjectType.new }
let(:schema) { GraphQL::Schema.define(query: query_type, mutation: nil)}
```
- Use `GraphqlHelpers#query_double(schema: nil)` of `double('query', schema: nil)`. For example:
```ruby
# good
let(:query) { query_double(schema: GitlabSchema) }
# bad
let(:query) { double('Query', schema: GitlabSchema) }
```
- Avoid false positives:
Authenticating a user with the `current_user:` argument for `post_graphql`
@ -2061,6 +2083,122 @@ end
`spec/requests/api/graphql/ci/pipeline_spec.rb` regardless of the query being
used to fetch the pipeline data.
- There can be possible cyclic dependencies within our GraphQL types.
See [Adding field with resolver on a Type causes "Can't determine the return type " error on a different Type](https://github.com/rmosolgo/graphql-ruby/issues/3974#issuecomment-1084444214)
and [Fix unresolved name due to cyclic definition](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/84202/diffs#diff-content-32d14251082fd45412e1fdbf5820e62d157e70d2).
In particular, this can happen with `connection_type`. Normally we might use the following in a resolver:
```ruby
type Types::IssueType.connection_type, null: true
```
However this might cause a cyclic definition, which can result in errors like:
```ruby
NameError: uninitialized constant Resolvers::GroupIssuesResolver
```
To fix this, we must create a new file that encapsulates the connection type,
and then reference it using double quotes. This gives a delayed resolution,
and the proper connection type. For example:
```ruby
module Types
# rubocop: disable Graphql/AuthorizeTypes
class IssueConnectionType < CountableConnectionType
end
end
Types::IssueConnectionType.prepend_mod_with('Types::IssueConnectionType')
```
in [types/issue_connection_type.rb](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/graphql/types/issue_connection_type.rb)
defines a new `Types::IssueConnectionType`, and is then referenced in
[app/graphql/resolvers/base_issues_resolver.rb](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/graphql/resolvers/base_issues_resolver.rb)
```ruby
type "Types::IssueConnection", null: true
```
Only use this style if you are having spec failures. This is not intended to be a new
pattern that we use. This issue may disappear after we've upgraded to `2.x`.
- There can be instances where a spec fails because the class is not loaded correctly.
It relates to the
[circular dependencies problem](https://github.com/rmosolgo/graphql-ruby/issues/1929) and
[Adding field with resolver on a Type causes "Can't determine the return type " error on a different Type](https://github.com/rmosolgo/graphql-ruby/issues/3974).
Unfortunately, the errors generated don't really indicate what the problem is. For example,
remove the quotes from the `Rspec.descrbe` in
[ee/spec/graphql/resolvers/compliance_management/merge_requests/compliance_violation_resolver_spec.rb](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/spec/graphql/resolvers/compliance_management/merge_requests/compliance_violation_resolver_spec.rb).
Then run `rspec ee/spec/graphql/resolvers/compliance_management/merge_requests/compliance_violation_resolver_spec.rb`.
This generates errors with the expectations. For example:
```ruby
1) Resolvers::ComplianceManagement::MergeRequests::ComplianceViolationResolver#resolve user is authorized filtering the results when given an array of project IDs finds the filtered compliance violations
Failure/Error: expect(subject).to contain_exactly(compliance_violation)
expected collection contained: [#<MergeRequests::ComplianceViolation id: 4, violating_user_id: 26, merge_request_id: 4, reason: "approved_by_committer", severity_level: "low">]
actual collection contained: [#<MergeRequests::ComplianceViolation id: 4, violating_user_id: 26, merge_request_id: 4, reason: "app...er_id: 27, merge_request_id: 5, reason: "approved_by_merge_request_author", severity_level: "high">]
the extra elements were: [#<MergeRequests::ComplianceViolation id: 5, violating_user_id: 27, merge_request_id: 5, reason: "approved_by_merge_request_author", severity_level: "high">]
# ./ee/spec/graphql/resolvers/compliance_management/merge_requests/compliance_violation_resolver_spec.rb:55:in `block (6 levels) in <top (required)>'
```
However, this is not a case of the wrong result being generated, it's because of the loading order
of the `ComplianceViolationResolver` class.
The only way we've found to fix this is by quoting the class name in the spec. For example, changing
```ruby
RSpec.describe Resolvers::ComplianceManagement::MergeRequests::ComplianceViolationResolver do
```
into:
```ruby
RSpec.describe 'Resolvers::ComplianceManagement::MergeRequests::ComplianceViolationResolver' do
```
See [this merge request](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/87295#note_946174036) for some discussion.
Only use this style if you are having spec failures. This is not intended to be a new
pattern that we use. This issue may disappear after we've upgraded to `2.x`.
- When testing resolvers using `GraphqlHelpers#resolve`, arguments for the resolver can be handled two ways.
1. 95% of the resolver specs use arguments that are Ruby objects, as opposed to when using the GraphQL API
only strings and integers are used. This works fine in most cases.
1. If your resolver takes arguments that use a `prepare` proc, such as a resolver that accepts timeframe
arguments (`TimeFrameArguments`), you must pass the `arg_style: :internal_prepared` parameter into
the `resolve` method. This tells the code to convert the arguments into strings and integers and pass
them through regular argument handling, ensuring that the `prepare` proc is called correctly.
For example in [`iterations_resolver_spec.rb`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/spec/graphql/resolvers/iterations_resolver_spec.rb):
```ruby
def resolve_group_iterations(args = {}, obj = group, context = { current_user: current_user })
resolve(described_class, obj: obj, args: args, ctx: context, arg_style: :internal_prepared)
end
```
One additional caveat is that if you are passing enums as a resolver argument, you must use the
external representation of the enum, rather than the internal. For example:
```ruby
# good
resolve_group_iterations({ search: search, in: ['CADENCE_TITLE'] })
# bad
resolve_group_iterations({ search: search, in: [:cadence_title] })
```
The use of `:internal_prepared` was added as a bridge for the
[GraphQL gem](https://graphql-ruby.org) upgrade. Testing resolvers directly will be
[removed eventually](https://gitlab.com/gitlab-org/gitlab/-/issues/363121),
and writing unit tests for resolvers/mutations is
[already deprecated](#writing-unit-tests-deprecated)
## Notes about Query flow and GraphQL infrastructure
The GitLab GraphQL infrastructure can be found in `lib/gitlab/graphql`.

View File

@ -27,7 +27,7 @@ sometimes referred to as GLEX, to run our experiments. The gem exists in a separ
so it can be shared across any GitLab property that uses Ruby. You should feel comfortable reading
the documentation on that project if you want to dig into more advanced topics or open issues. Be
aware that the documentation there reflects what's in the main branch and may not be the same as
the version being used within GitLab.
the version being used in GitLab.
## Glossary of terms
@ -43,7 +43,7 @@ when communicating about experiments:
## Implementing an experiment
[`GLEX`](https://gitlab.com/gitlab-org/ruby/gems/gitlab-experiment) - or `Gitlab::Experiment`, the `gitlab-experiment` gem - is the preferred option for implementing an experiment in GitLab.
[GLEX](https://gitlab.com/gitlab-org/ruby/gems/gitlab-experiment) - or `Gitlab::Experiment`, the `gitlab-experiment` gem - is the preferred option for implementing an experiment in GitLab.
For more information, see [Implementing an A/B/n experiment using GLEX](implementing_experiments.md).

View File

@ -8,7 +8,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
## Testing experiments with RSpec
In the course of working with experiments, you'll probably want to use the RSpec
In the course of working with experiments, you might want to use the RSpec
tooling that's built in. This happens automatically for files in `spec/experiments`, but
for other files and specs you want to include it in, you can specify the `:experiment` type:
@ -84,7 +84,7 @@ expect(subject).to track(:my_event)
subject.track(:my_event)
```
You can use the `on_next_instance` chain method to specify that it will happen
You can use the `on_next_instance` chain method to specify that it happens
on the next instance of the experiment. This helps you if you're calling
`experiment(:example).track` downstream:
@ -127,7 +127,7 @@ describe('when my_experiment is enabled', () => {
```
NOTE:
This method of stubbing in Jest specs will not automatically un-stub itself at the end of the test. We merge our stubbed experiment in with all the other global data in `window.gl`. If you need to remove the stubbed experiments after your test or ensure a clean global object before your test, you'll need to manage the global object directly yourself:
This method of stubbing in Jest specs does not automatically un-stub itself at the end of the test. We merge our stubbed experiment in with all the other global data in `window.gl`. If you must remove the stubbed experiments after your test or ensure a clean global object before your test, you must manage the global object directly yourself:
```javascript
describe('tests that care about global state', () => {

View File

@ -0,0 +1,41 @@
---
stage: Data Stores
group: Global Search
info: "To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments"
type: drawer
source: /doc/user/search/global_search/advanced_search_syntax.md
---
# Search tips
<!-- markdownlint-disable -->
| Use | Description | Example |
|------|-------------|---------|
| `"` | Exact search | [`"gem sidekiq"`](https://gitlab.com/search?group_id=9970&project_id=278964&scope=blobs&search=%22gem+sidekiq%22) |
| <code>&#124;</code> | Or | [<code>display &#124; banner</code>](https://gitlab.com/search?group_id=9970&project_id=278964&scope=blobs&search=display+%7C+banner) |
| `+` | And | [`display +banner`](https://gitlab.com/search?group_id=9970&project_id=278964&repository_ref=&scope=blobs&search=display+%2Bbanner&snippets=) |
| `-` | Exclude | [`display -banner`](https://gitlab.com/search?group_id=9970&project_id=278964&scope=blobs&search=display+-banner) |
| `*` | Partial | [`bug error 50*`](https://gitlab.com/search?group_id=9970&project_id=278964&repository_ref=&scope=blobs&search=bug+error+50%2A&snippets=) |
| `\` | Escape | [`\*md`](https://gitlab.com/search?snippets=&scope=blobs&repository_ref=&search=%5C*md&group_id=9970&project_id=278964) |
## Code search
| Use | Description | Example |
|------|-------------|---------|
| `filename:` | Filename | [`filename:*spec.rb`](https://gitlab.com/search?snippets=&scope=blobs&repository_ref=&search=filename%3A*spec.rb&group_id=9970&project_id=278964) |
| `path:` | Repository location | [`path:spec/workers/`](https://gitlab.com/search?group_id=9970&project_id=278964&repository_ref=&scope=blobs&search=path%3Aspec%2Fworkers&snippets=) |
| `extension:` | File extension, without the `.` | [`extension:js`](https://gitlab.com/search?group_id=9970&project_id=278964&repository_ref=&scope=blobs&search=extension%3Ajs&snippets=) |
| `blob:` | Git object ID | [`blob:998707*`](https://gitlab.com/search?snippets=false&scope=blobs&repository_ref=&search=blob%3A998707*&group_id=9970) |
`extension` and `blob` return exact matches only.
## Examples
| Use | Description |
|------|-------------|
| [`rails -filename:gemfile.lock`](https://gitlab.com/search?group_id=9970&project_id=278964&repository_ref=&scope=blobs&search=rails+-filename%3Agemfile.lock&snippets=) | Show _rails_ in all files except the _`gemfile.lock`_ file. |
| [`RSpec.describe Resolvers -*builder`](https://gitlab.com/search?group_id=9970&project_id=278964&scope=blobs&search=RSpec.describe+Resolvers+-*builder) | Show all _RSpec.describe Resolvers_ that don't start with _builder_. |
| [<code>bug &#124; (display +banner)</code>](https://gitlab.com/search?snippets=&scope=issues&repository_ref=&search=bug+%7C+%28display+%2Bbanner%29&group_id=9970&project_id=278964) | Show _bug_ **or** _display_ **and** _banner_. |
<!-- markdownlint-enable -->

View File

@ -31,6 +31,8 @@ GitLab package.
upgrade is in progress. The user's web browser shows a `Deploy in progress` message or a `502` error.
- For multi-node installations, see how to perform
[zero downtime upgrades](../zero_downtime.md).
- Upgrades to multi-node installations can also be performed
[with downtime](../with_downtime.md).
## Version-specific changes

329
doc/update/with_downtime.md Normal file
View File

@ -0,0 +1,329 @@
---
stage: Systems
group: Distribution
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---
# Multi-node upgrades with downtime **(FREE SELF)**
NOTE:
This process is a work in progress. You're welcome to provide feedback by either raising a ticket to support,
or [commenting on this issue](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/6244).
While you can upgrade a multi-node GitLab deployment [with zero downtime](zero_downtime.md),
there are a number of constraints. In particular, you can upgrade to only one minor release
at a time, for example, from 14.6 to 14.7, then to 14.8, etc.
If you want to upgrade to more than one minor release at a time (for example, from 14.6 to 14.9),
you need to take your GitLab instance offline, which implies downtime.
Before starting this process, verify the
[version specific upgrading instructions](index.md#version-specific-upgrading-instructions)
relevant to your [upgrade path](index.md#upgrade-paths).
For a single node installation, you only need to [uprgade the GitLab package](package/index.md).
The process for upgrading a number of components of a multi-node GitLab
installation is the same as for zero downtime upgrades.
The differences relate to the servers running Rails (Puma/Sidekiq) and
the order of events.
At a high level, the process is:
1. Shut down the GitLab application.
1. Upgrade your Consul servers.
1. Upgrade the other back-end components:
- Gitaly, Rails PostgreSQL, Redis, PgBouncer: these can be upgraded in any order.
- If you use PostgreSQL or Redis from your cloud platform and upgrades are required,
substitute the instructions for Omnibus GitLab with your cloud provider's instructions.
1. Upgrade the GitLab application (Sidekiq, Puma) and start the application up.
If you are a Community Edition user, replace `gitlab-ee` with
`gitlab-ce` in the following commands.
## Stop writes to the database
Shut down Puma and Sidekiq on all servers running these processes:
```shell
sudo gitlab-ctl stop sidekiq
sudo gitlab-ctl stop puma
```
## Upgrade the Consul nodes
[Consult the Consul documentation for the complete instructions](../administration/consul.md#upgrade-the-consul-nodes).
In summary:
1. Check the Consul nodes are all healthy.
1. Upgrade the GitLab package on all your Consul servers:
```shell
# Debian/Ubuntu
sudo apt-get update && sudo apt-get install gitlab-ee
# Centos/RHEL
sudo yum install gitlab-ee
```
1. Restart all GitLab services **one node at a time**:
```shell
sudo gitlab-ctl restart
```
If your Consul cluster processes are not on their own servers, and are shared
with another service such as Redis HA or Patroni, ensure that you follow the
following principles when upgrading those servers:
- Do not restart services more than one server at a time.
- Check the Consul cluster is healthy before upgrading or restarting services.
## Upgrade the Gitaly nodes (Praefect / Gitaly Cluster)
If you're running Gitaly cluster, follow the [zero downtime process](zero_downtime.md#gitaly-or-gitaly-cluster)
for Gitaly cluster.
If you are using Amazon Machine Images (AMIs) on AWS, the Gitaly nodes
**should not be upgraded via the AMI process**. Gitaly nodes should **only**
be upgraded using the package upgrade. This is because:
- Praefect tracks replicas of Git repositories by server hostname.
- Redeployment using AMIs will issue the nodes with new hostnames.
- Even though the storage will be the same, Gitaly cluster will not work after this.
The Praefect nodes, however, can be upgraded via an AMI redeployment process:
1. The AMI redeployment process must include `gitlab-ctl reconfigure`.
Set `praefect['auto_migrate'] = false` on the AMI so all nodes get this. This
prevents `reconfigure` from automatically running database migrations.
1. The first node to be redeployed with the upgraded image should be your
deploy node.
1. After it's deployed, set `praefect['auto_migrate'] = true` in `gitlab.rb`
and apply with `gitlab-ctl reconfigure`. This will run the database
migrations.
1. Redeploy your other Praefect nodes.
## Upgrade the Gitaly nodes not part of Gitaly cluster
For Gitaly servers which are not part of Gitaly cluster, update the GitLab package:
```shell
# Debian/Ubuntu
sudo apt-get update && sudo apt-get install gitlab-ee
# Centos/RHEL
sudo yum install gitlab-ee
```
If you have multiple Gitaly shards or have multiple load-balanced Gitaly nodes
using NFS, it doesn't matter in which order you upgrade the Gitaly servers.
## Upgrade the PostgreSQL nodes
For unclustered PostgreSQL servers:
1. Upgrade the GitLab package:
```shell
# Debian/Ubuntu
sudo apt-get update && sudo apt-get install gitlab-ee
# Centos/RHEL
sudo yum install gitlab-ee
```
1. The upgrade process does not restart PostgreSQL when the binaries are upgraded.
Restart to load the new version:
```shell
sudo gitlab-ctl restart
```
## Upgrade the Patroni node
Patroni is used to achiece high availabilty with PostgreSQL.
If a PostgreSQL major version upgrade is required,
[follow the major version process](../administration/postgresql/replication_and_failover.md#upgrading-postgresql-major-version-in-a-patroni-cluster).
The upgrade process for all other versions is performed on all replicas first.
After they're upgraded, a cluster failover occurs from the leader to one of the upgraded
replicas. This ensures that only one failover is needed, and once complete the new
leader will be upgraded.
Follow the following process:
1. Identify the leader and replica nodes, and [verify that the cluster is healthy](../administration/postgresql/replication_and_failover.md#check-replication-status).
Run on a database node:
```shell
sudo gitlab-ctl patroni members
```
1. Upgrade the GitLab package on one of the replica nodes:
```shell
# Debian/Ubuntu
sudo apt-get update && sudo apt-get install gitlab-ee
# Centos/RHEL
sudo yum install gitlab-ee
```
1. Restart to load the new version:
```shell
sudo gitlab-ctl restart
```
1. [Verify that the cluster is healthy](../administration/postgresql/replication_and_failover.md#check-replication-status).
1. Repeat these steps for the other replica: upgrade, restart, health check.
1. Upgrade the leader node following the same package upgrade as the replicas.
1. Restart all services on the leader node to load the new version, and also
trigger a cluster failover:
```shell
sudo gitlab-ctl restart
```
1. [Check the cluster is healthy](../administration/postgresql/replication_and_failover.md#check-replication-status)
## Upgrade the PgBouncer nodes
If you run PgBouncer on your Rails (application) nodes, then
PgBouncer are upgraded as part of the application server upgrade.
Upgrade the PgBouncer nodes:
```shell
# Debian/Ubuntu
sudo apt-get update && sudo apt-get install gitlab-ee
# Centos/RHEL
sudo yum install gitlab-ee
```
## Upgrade the Redis node
Upgrade a standalone Redis server by updating the GitLab package:
```shell
# Debian/Ubuntu
sudo apt-get update && sudo apt-get install gitlab-ee
# Centos/RHEL
sudo yum install gitlab-ee
```
## Upgrade Redis HA (using Sentinel) **(PREMIUM SELF)**
Follow [the zero downtime instructions](zero_downtime.md#use-redis-ha-using-sentinel)
for upgrading your Redis HA cluster.
## Upgrade the Rails nodes (Puma / Sidekiq)
All the Puma and Sidekiq processes were previously shut down. On each node:
1. Ensure `/etc/gitlab/skip-auto-reconfigure` does not exist.
1. Check that Puma and Sidekiq are shut down:
```shell
ps -ef | egrep 'puma: | puma | sidekiq '
```
Select one node that runs Puma. This will be your deploy node, and is responsible for
running all database migrations. On the deploy node:
1. Ensure the server is configured to permit regular migrations. Check that
`/etc/gitlab/gitlab.rb` does not contain `gitlab_rails['auto_migrate'] = false`.
Either set it specifically `gitlab_rails['auto_migrate'] = true` or omit it
for the default behavior (`true`).
1. If you're using PgBouncer:
You must bypass PgBouncer and connect directly to PostgreSQL
before running migrations.
Rails uses an advisory lock when attempting to run a migration to prevent
concurrent migrations from running on the same database. These locks are
not shared across transactions, resulting in `ActiveRecord::ConcurrentMigrationError`
and other issues when running database migrations using PgBouncer in transaction
pooling mode.
1. If you're running Patroni, find the leader node. Run on a database node:
```shell
sudo gitlab-ctl patroni members
```
1. Update `gitlab.rb` on the deploy node. Change `gitlab_rails['db_host']`
and `gitlab_rails['db_port']` to either:
- The host and port for your database server (unclustered PostgreSQL).
- The host and port for your cluster leader if you're running Patroni.
1. Apply the changes:
```shell
sudo gitlab-ctl reconfigure
```
1. Upgrade the GitLab package:
```shell
# Debian/Ubuntu
sudo apt-get update && sudo apt-get install gitlab-ee
# Centos/RHEL
sudo yum install gitlab-ee
```
1. If you modified `gitlab.rb` on the deploy node to bypass PgBouncer:
1. Update `gitlab.rb` on the deploy node. Change `gitlab_rails['db_host']`
and `gitlab_rails['db_port']` back to your PgBouncer settings.
1. Apply the changes:
```shell
sudo gitlab-ctl reconfigure
```
1. To ensure all services are running the upgraded version, and (if applicable) accessing
the database using PgBouncer, restart all services on the deploy node:
```shell
sudo gitlab-ctl restart
```
Next, upgrade all the other Puma and Sidekiq nodes. The setting `gitlab_rails['auto_migrate']` can be
set to anything in `gitlab.rb` on these nodes.
They can be upgraded in parallel:
1. Upgrade the GitLab package:
```shell
# Debian/Ubuntu
sudo apt-get update && sudo apt-get install gitlab-ee
# Centos/RHEL
sudo yum install gitlab-ee
```
1. Ensure all services are restarted:
```shell
sudo gitlab-ctl restart
```
## Upgrade the Monitor node
Upgrade the GitLab package:
```shell
# Debian/Ubuntu
sudo apt-get update && sudo apt-get install gitlab-ee
# Centos/RHEL
sudo yum install gitlab-ee
```

View File

@ -17,6 +17,11 @@ there are the following requirements:
- You are using PostgreSQL. Starting from GitLab 12.1, MySQL is not supported.
- You have set up a multi-node GitLab instance. Single-node instances do not support zero-downtime upgrades.
If you want to upgrade multiple releases or do not meet the other requirements:
- [Upgrade a single node with downtime](package/index.md).
- [Upgrade a multi-node instance with downtime](with_downtime.md).
If you meet all the requirements above, follow these instructions in order. There are three sets of steps, depending on your deployment type:
| Deployment type | Description |
@ -308,7 +313,7 @@ node throughout the process.
- If you're using PgBouncer:
You need to bypass PgBouncer and connect directly to the database master
You need to bypass PgBouncer and connect directly to the database leader
before running migrations.
Rails uses an advisory lock when attempting to run a migration to prevent
@ -317,7 +322,7 @@ node throughout the process.
and other issues when running database migrations using PgBouncer in transaction
pooling mode.
To find the master node, run the following on a database node:
To find the leader node, run the following on a database node:
```shell
sudo gitlab-ctl patroni members
@ -325,7 +330,7 @@ node throughout the process.
Then, in your `gitlab.rb` file on the deploy node, update
`gitlab_rails['db_host']` and `gitlab_rails['db_port']` with the database
master's host and port.
leader's host and port.
- To get the regular database migrations and latest code in place, run
@ -691,7 +696,7 @@ sudo touch /etc/gitlab/skip-auto-reconfigure
1. If you're using PgBouncer:
You need to bypass PgBouncer and connect directly to the database master
You need to bypass PgBouncer and connect directly to the database leader
before running migrations.
Rails uses an advisory lock when attempting to run a migration to prevent
@ -700,7 +705,7 @@ sudo touch /etc/gitlab/skip-auto-reconfigure
and other issues when running database migrations using PgBouncer in transaction
pooling mode.
To find the master node, run the following on a database node:
To find the leader node, run the following on a database node:
```shell
sudo gitlab-ctl patroni members
@ -708,7 +713,7 @@ sudo touch /etc/gitlab/skip-auto-reconfigure
Then, in your `gitlab.rb` file on the deploy node, update
`gitlab_rails['db_host']` and `gitlab_rails['db_port']` with the database
master's host and port.
leader's host and port.
1. To get the regular database migrations and latest code in place, run

View File

@ -78,7 +78,7 @@ list of broadcast messages.
## Edit a broadcast message
If you need to make changes to a broadcast message, you can edit it.
If you must make changes to a broadcast message, you can edit it.
To edit a broadcast message:

View File

@ -45,7 +45,7 @@ frequency to the `production` environment. The environment must be part of the
for its deployment information to appear on the graphs.
Deployment frequency is one of the four [DORA metrics](index.md#devops-research-and-assessment-dora-key-metrics) that DevOps teams use for measuring excellence in software delivery.
The deployment frequency chart is available for groups and projects.
To view the deployment frequency chart:
@ -78,7 +78,7 @@ To view the lead time for changes chart:
![Lead time](img/lead_time_chart_v13_11.png)
## View time to restore service chart **(ULTIMATE)**
## View time to restore service chart **(PREMIUM)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/356959) in GitLab 15.1

View File

@ -190,7 +190,7 @@ To pull images from the Dependency Proxy, you must:
### GitLab deploy token
> Support for `gitlab-deploy-token` at the group level [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/214014) in GitLab 15.1 [with a flag](../../../administration/feature_flags.md) named `ci_variable_for_group_gitlab_deploy_token`. Disabled by default.
> Support for `gitlab-deploy-token` at the group level [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/214014) in GitLab 15.1 [with a flag](../../../administration/feature_flags.md) named `ci_variable_for_group_gitlab_deploy_token`. Enabled by default.
There's a special case when it comes to deploy tokens. If a user creates one
named `gitlab-deploy-token`, the username and token of the deploy token is

View File

@ -6,13 +6,13 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Tasks **(FREE)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/334812) in GitLab 14.5 [with a flag](../administration/feature_flags.md) named `work_items`. Disabled by default.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/334812) in GitLab 14.5 [with a flag](../administration/feature_flags.md) named `work_items`. Disabled by default.
> - [Enabled on GitLab.com and self-managed](https://gitlab.com/gitlab-org/gitlab/-/issues/339664) in GitLab 15.1.
FLAG:
On self-managed GitLab, by default this feature is not available. To make it available,
ask an administrator to [enable the feature flag](../administration/feature_flags.md) named `work_items`.
On GitLab.com, this feature is not available.
The feature is not ready for production use.
On self-managed GitLab, by default this feature is available. To hide the feature,
ask an administrator to [disable the feature flag](../administration/feature_flags.md) named `work_items`.
On GitLab.com, this feature is available.
Use tasks to track steps needed for the [issue](project/issues/index.md) to be closed.

View File

@ -0,0 +1,88 @@
# frozen_string_literal: true
require 'fiddle'
module Gitlab
module Memory
module Jemalloc
extend self
STATS_FORMATS = {
json: { options: 'J', extension: 'json' },
text: { options: '', extension: 'txt' }
}.freeze
STATS_DEFAULT_FORMAT = :json
# Return jemalloc stats as a string.
def stats(format: STATS_DEFAULT_FORMAT)
verify_format!(format)
with_malloc_stats_print do |stats_print|
StringIO.new.tap { |io| write_stats(stats_print, io, STATS_FORMATS[format]) }.string
end
end
# Write jemalloc stats to the given directory.
def dump_stats(path:, format: STATS_DEFAULT_FORMAT)
verify_format!(format)
with_malloc_stats_print do |stats_print|
format_settings = STATS_FORMATS[format]
File.open(File.join(path, file_name(format_settings[:extension])), 'wb') do |io|
write_stats(stats_print, io, format_settings)
end
end
end
private
def verify_format!(format)
raise "format must be one of #{STATS_FORMATS.keys}" unless STATS_FORMATS.key?(format)
end
def with_malloc_stats_print
fiddle_func = malloc_stats_print
return unless fiddle_func
yield fiddle_func
end
def malloc_stats_print
method = Fiddle::Handle.sym("malloc_stats_print")
Fiddle::Function.new(
method,
# C signature:
# void (write_cb_t *write_cb, void *cbopaque, const char *opts)
# arg1: callback function pointer (see below)
# arg2: pointer to cbopaque holding additional callback data; always NULL here
# arg3: options string, affects output format (text or JSON)
#
# Callback signature (write_cb_t):
# void (void *, const char *)
# arg1: pointer to cbopaque data (see above; unused)
# arg2: pointer to string buffer holding textual output
[Fiddle::TYPE_VOIDP, Fiddle::TYPE_VOIDP, Fiddle::TYPE_VOIDP],
Fiddle::TYPE_VOID
)
rescue Fiddle::DLError
# This means the Fiddle::Handle to jemalloc was not open (jemalloc wasn't loaded)
# or already closed. Eiher way, return nil.
end
def write_stats(stats_print, io, format)
callback = Fiddle::Closure::BlockCaller.new(
Fiddle::TYPE_VOID, [Fiddle::TYPE_VOIDP, Fiddle::TYPE_VOIDP]) do |_, fragment|
io << fragment
end
stats_print.call(callback, nil, format[:options])
end
def file_name(extension)
"jemalloc_stats.#{$$}.#{Time.current.to_i}.#{extension}"
end
end
end
end

View File

@ -43866,6 +43866,9 @@ msgstr ""
msgid "You have insufficient permissions to create an on-call schedule for this project"
msgstr ""
msgid "You have insufficient permissions to manage resource links for this incident"
msgstr ""
msgid "You have insufficient permissions to manage timeline events for this incident"
msgstr ""

View File

@ -13,16 +13,16 @@ export default function initVueMRPage() {
const diffsAppProjectPath = 'testproject';
const mrEl = document.createElement('div');
mrEl.className = 'merge-request fixture-mr';
mrEl.setAttribute('data-mr-action', 'diffs');
mrEl.dataset.mrAction = 'diffs';
mrTestEl.appendChild(mrEl);
const mrDiscussionsEl = document.createElement('div');
mrDiscussionsEl.id = 'js-vue-mr-discussions';
mrDiscussionsEl.setAttribute('data-current-user-data', JSON.stringify(userDataMock));
mrDiscussionsEl.setAttribute('data-noteable-data', JSON.stringify(noteableDataMock));
mrDiscussionsEl.setAttribute('data-notes-data', JSON.stringify(notesDataMock));
mrDiscussionsEl.setAttribute('data-noteable-type', 'merge-request');
mrDiscussionsEl.setAttribute('data-is-locked', 'false');
mrDiscussionsEl.dataset.currentUserData = JSON.stringify(userDataMock);
mrDiscussionsEl.dataset.noteableData = JSON.stringify(noteableDataMock);
mrDiscussionsEl.dataset.notesData = JSON.stringify(notesDataMock);
mrDiscussionsEl.dataset.noteableType = 'merge-request';
mrDiscussionsEl.dataset.isLocked = 'false';
mrTestEl.appendChild(mrDiscussionsEl);
const discussionCounterEl = document.createElement('div');
@ -31,9 +31,9 @@ export default function initVueMRPage() {
const diffsAppEl = document.createElement('div');
diffsAppEl.id = 'js-diffs-app';
diffsAppEl.setAttribute('data-endpoint', diffsAppEndpoint);
diffsAppEl.setAttribute('data-project-path', diffsAppProjectPath);
diffsAppEl.setAttribute('data-current-user-data', JSON.stringify(userDataMock));
diffsAppEl.dataset.endpoint = diffsAppEndpoint;
diffsAppEl.dataset.projectPath = diffsAppProjectPath;
diffsAppEl.dataset.currentUserData = JSON.stringify(userDataMock);
mrTestEl.appendChild(diffsAppEl);
const mock = new MockAdapter(axios);

View File

@ -9,7 +9,7 @@ export const toHaveSpriteIcon = (element, iconName) => {
const iconReferences = [].slice.apply(element.querySelectorAll('svg use'));
const matchingIcon = iconReferences.find(
(reference) => reference.parentNode.getAttribute('data-testid') === `${iconName}-icon`,
(reference) => reference.parentNode.dataset.testid === `${iconName}-icon`,
);
const pass = Boolean(matchingIcon);

View File

@ -12,8 +12,8 @@ describe('initAdminUsersApp', () => {
beforeEach(() => {
el = document.createElement('div');
el.setAttribute('data-users', JSON.stringify(users));
el.setAttribute('data-paths', JSON.stringify(paths));
el.dataset.users = JSON.stringify(users);
el.dataset.paths = JSON.stringify(paths);
wrapper = createWrapper(initAdminUsersApp(el));
});
@ -40,8 +40,8 @@ describe('initAdminUserActions', () => {
beforeEach(() => {
el = document.createElement('div');
el.setAttribute('data-user', JSON.stringify(user));
el.setAttribute('data-paths', JSON.stringify(paths));
el.dataset.user = JSON.stringify(user);
el.dataset.paths = JSON.stringify(paths);
wrapper = createWrapper(initAdminUserActions(el));
});

View File

@ -15,8 +15,8 @@ describe('initRecoveryCodes', () => {
beforeEach(() => {
el = document.createElement('div');
el.setAttribute('class', 'js-2fa-recovery-codes');
el.setAttribute('data-codes', codesJsonString);
el.setAttribute('data-profile-account-path', profileAccountPath);
el.dataset.codes = codesJsonString;
el.dataset.profileAccountPath = profileAccountPath;
document.body.appendChild(el);
wrapper = createWrapper(initRecoveryCodes());

View File

@ -11,7 +11,7 @@ function createComponent() {
}
async function setLoaded(loaded) {
document.querySelector('.blob-viewer').setAttribute('data-loaded', loaded);
document.querySelector('.blob-viewer').dataset.loaded = loaded;
await nextTick();
}
@ -53,7 +53,7 @@ describe('Markdown table of contents component', () => {
it('does not show dropdown when viewing non-rich content', async () => {
createComponent();
document.querySelector('.blob-viewer').setAttribute('data-type', 'simple');
document.querySelector('.blob-viewer').dataset.type = 'simple';
await setLoaded(true);

View File

@ -80,9 +80,9 @@ describe('Blob viewer', () => {
return asyncClick()
.then(() => asyncClick())
.then(() => {
expect(
document.querySelector('.blob-viewer[data-type="simple"]').getAttribute('data-loaded'),
).toBe('true');
expect(document.querySelector('.blob-viewer[data-type="simple"]').dataset.loaded).toBe(
'true',
);
});
});

View File

@ -21,12 +21,12 @@ describe('LockPopovers', () => {
};
if (lockedByApplicationSetting) {
popoverMountEl.setAttribute('data-popover-data', JSON.stringify(popoverData));
popoverMountEl.dataset.popoverData = JSON.stringify(popoverData);
} else if (lockedByAncestor) {
popoverMountEl.setAttribute(
'data-popover-data',
JSON.stringify({ ...popoverData, ancestor_namespace: mockNamespace }),
);
popoverMountEl.dataset.popoverData = JSON.stringify({
...popoverData,
ancestor_namespace: mockNamespace,
});
}
document.body.appendChild(popoverMountEl);

View File

@ -195,8 +195,8 @@ describe('Code navigation actions', () => {
it('commits SET_CURRENT_DEFINITION with LSIF data', () => {
target.classList.add('js-code-navigation');
target.setAttribute('data-line-index', '0');
target.setAttribute('data-char-index', '0');
target.dataset.lineIndex = '0';
target.dataset.charIndex = '0';
return testAction(
actions.showDefinition,
@ -218,8 +218,8 @@ describe('Code navigation actions', () => {
it('adds hll class to target element', () => {
target.classList.add('js-code-navigation');
target.setAttribute('data-line-index', '0');
target.setAttribute('data-char-index', '0');
target.dataset.lineIndex = '0';
target.dataset.charIndex = '0';
return testAction(
actions.showDefinition,
@ -243,8 +243,8 @@ describe('Code navigation actions', () => {
it('caches current target element', () => {
target.classList.add('js-code-navigation');
target.setAttribute('data-line-index', '0');
target.setAttribute('data-char-index', '0');
target.dataset.lineIndex = '0';
target.dataset.charIndex = '0';
return testAction(
actions.showDefinition,

View File

@ -31,9 +31,9 @@ describe('ConfirmModal', () => {
buttons.forEach((x) => {
const button = document.createElement('button');
button.setAttribute('class', 'js-confirm-modal-button');
button.setAttribute('data-path', x.path);
button.setAttribute('data-method', x.method);
button.setAttribute('data-modal-attributes', JSON.stringify(x.modalAttributes));
button.dataset.path = x.path;
button.dataset.method = x.method;
button.dataset.modalAttributes = JSON.stringify(x.modalAttributes);
button.innerHTML = 'Action';
buttonContainer.appendChild(button);
});

View File

@ -59,9 +59,10 @@ describe('waitForCSSLoaded', () => {
<link href="two.css" data-startupcss="loading">
`);
const events = waitForCSSLoaded(mockedCallback);
document
.querySelectorAll('[data-startupcss="loading"]')
.forEach((elem) => elem.setAttribute('data-startupcss', 'loaded'));
document.querySelectorAll('[data-startupcss="loading"]').forEach((elem) => {
// eslint-disable-next-line no-param-reassign
elem.dataset.startupcss = 'loaded';
});
document.dispatchEvent(new CustomEvent('CSSStartupLinkLoaded'));
await events;

View File

@ -84,7 +84,7 @@ describe('CreateMergeRequestDropdown', () => {
});
it('enables when can create confidential issue', () => {
document.querySelector('.js-create-mr').setAttribute('data-is-confidential', 'true');
document.querySelector('.js-create-mr').dataset.isConfidential = 'true';
confidentialState.selectedProject = { name: 'test' };
dropdown.enable();
@ -93,7 +93,7 @@ describe('CreateMergeRequestDropdown', () => {
});
it('does not enable when can not create confidential issue', () => {
document.querySelector('.js-create-mr').setAttribute('data-is-confidential', 'true');
document.querySelector('.js-create-mr').dataset.isConfidential = 'true';
dropdown.enable();

View File

@ -25,11 +25,11 @@ describe('DeleteLabelModal', () => {
buttons.forEach((x) => {
const button = document.createElement('button');
button.setAttribute('class', 'js-delete-label-modal-button');
button.setAttribute('data-label-name', x.labelName);
button.setAttribute('data-destroy-path', x.destroyPath);
button.dataset.labelName = x.labelName;
button.dataset.destroyPath = x.destroyPath;
if (x.subjectName) {
button.setAttribute('data-subject-name', x.subjectName);
button.dataset.subjectName = x.subjectName;
}
button.innerHTML = 'Action';

View File

@ -27,7 +27,7 @@ describe('LazyLoader', () => {
const createLazyLoadImage = () => {
const newImg = document.createElement('img');
newImg.className = 'lazy';
newImg.setAttribute('data-src', TEST_PATH);
newImg.dataset.src = TEST_PATH;
document.body.appendChild(newImg);
triggerChildMutation();
@ -108,7 +108,7 @@ describe('LazyLoader', () => {
expect(LazyLoader.loadImage).toHaveBeenCalledWith(img);
expect(img.getAttribute('src')).toBe(TEST_PATH);
expect(img.getAttribute('data-src')).toBe(null);
expect(img.dataset.src).toBeUndefined();
expect(img).toHaveClass('js-lazy-loaded');
});

View File

@ -24,7 +24,7 @@ describe('initMembersApp', () => {
beforeEach(() => {
el = document.createElement('div');
el.setAttribute('data-members-data', dataAttribute);
el.dataset.membersData = dataAttribute;
window.gon = { current_user_id: 123 };
});

View File

@ -256,7 +256,7 @@ describe('Members Utils', () => {
beforeEach(() => {
el = document.createElement('div');
el.setAttribute('data-members-data', dataAttribute);
el.dataset.membersData = dataAttribute;
});
afterEach(() => {

View File

@ -78,8 +78,8 @@ describe('Markdown component', () => {
});
await nextTick();
expect(findLink().getAttribute('data-remote')).toBe(null);
expect(findLink().getAttribute('data-type')).toBe(null);
expect(findLink().dataset.remote).toBeUndefined();
expect(findLink().dataset.type).toBeUndefined();
});
describe('When parsing images', () => {

View File

@ -404,13 +404,13 @@ describe('Actions Notes Store', () => {
beforeEach(() => {
axiosMock.onDelete(endpoint).replyOnce(200, {});
document.body.setAttribute('data-page', '');
document.body.dataset.page = '';
});
afterEach(() => {
axiosMock.restore();
document.body.setAttribute('data-page', '');
document.body.dataset.page = '';
});
it('commits DELETE_NOTE and dispatches updateMergeRequestWidget', () => {
@ -440,7 +440,7 @@ describe('Actions Notes Store', () => {
it('dispatches removeDiscussionsFromDiff on merge request page', () => {
const note = { path: endpoint, id: 1 };
document.body.setAttribute('data-page', 'projects:merge_requests:show');
document.body.dataset.page = 'projects:merge_requests:show';
return testAction(
actions.removeNote,
@ -473,13 +473,13 @@ describe('Actions Notes Store', () => {
beforeEach(() => {
axiosMock.onDelete(endpoint).replyOnce(200, {});
document.body.setAttribute('data-page', '');
document.body.dataset.page = '';
});
afterEach(() => {
axiosMock.restore();
document.body.setAttribute('data-page', '');
document.body.dataset.page = '';
});
it('dispatches removeNote', () => {

View File

@ -17,11 +17,11 @@ describe('performance bar wrapper', () => {
performance.getEntriesByType = jest.fn().mockReturnValue([]);
peekWrapper.setAttribute('id', 'js-peek');
peekWrapper.setAttribute('data-env', 'development');
peekWrapper.setAttribute('data-request-id', '123');
peekWrapper.setAttribute('data-peek-url', '/-/peek/results');
peekWrapper.setAttribute('data-stats-url', 'https://log.gprd.gitlab.net/app/dashboards#/view/');
peekWrapper.setAttribute('data-profile-url', '?lineprofiler=true');
peekWrapper.dataset.env = 'development';
peekWrapper.dataset.requestId = '123';
peekWrapper.dataset.peekUrl = '/-/peek/results';
peekWrapper.dataset.statsUrl = 'https://log.gprd.gitlab.net/app/dashboards#/view/';
peekWrapper.dataset.profileUrl = '?lineprofiler=true';
mock = new MockAdapter(axios);

View File

@ -53,7 +53,7 @@ describe('Search autocomplete dropdown', () => {
};
const disableProjectIssues = () => {
document.querySelector('.js-search-project-options').setAttribute('data-issues-disabled', true);
document.querySelector('.js-search-project-options').dataset.issuesDisabled = true;
};
// Mock `gl` object in window for dashboard specific page. App code will need it.

View File

@ -22,7 +22,7 @@ describe('User Popovers', () => {
const link = document.createElement('a');
link.classList.add('js-user-link');
link.setAttribute('data-user', '1');
link.dataset.user = '1';
return link;
};

View File

@ -95,10 +95,10 @@ export const setAssignees = (...users) => {
const input = document.createElement('input');
input.name = 'merge_request[assignee_ids][]';
input.value = user.id.toString();
input.setAttribute('data-avatar-url', user.avatar_url);
input.setAttribute('data-name', user.name);
input.setAttribute('data-username', user.username);
input.setAttribute('data-can-merge', user.can_merge);
input.dataset.avatarUrl = user.avatar_url;
input.dataset.name = user.name;
input.dataset.username = user.username;
input.dataset.canMerge = user.can_merge;
return input;
}),
);

View File

@ -193,9 +193,7 @@ describe('MRWidgetMerged', () => {
it('shows button to copy commit SHA to clipboard', () => {
expect(selectors.copyMergeShaButton).not.toBe(null);
expect(selectors.copyMergeShaButton.getAttribute('data-clipboard-text')).toBe(
vm.mr.mergeCommitSha,
);
expect(selectors.copyMergeShaButton.dataset.clipboardText).toBe(vm.mr.mergeCommitSha);
});
it('hides button to copy commit SHA if SHA does not exist', async () => {

View File

@ -427,7 +427,7 @@ describe('MrWidgetOptions', () => {
beforeEach(() => {
const favicon = document.createElement('link');
favicon.setAttribute('id', 'favicon');
favicon.setAttribute('data-original-href', faviconDataUrl);
favicon.dataset.originalHref = faviconDataUrl;
document.body.appendChild(favicon);
faviconElement = document.getElementById('favicon');

View File

@ -46,14 +46,14 @@ export const findMonacoDiffEditor = () =>
export const findAndSetEditorValue = async (value) => {
const editor = await findMonacoEditor();
const uri = editor.getAttribute('data-uri');
const { uri } = editor.dataset;
monacoEditor.getModel(uri).setValue(value);
};
export const getEditorValue = async () => {
const editor = await findMonacoEditor();
const uri = editor.getAttribute('data-uri');
const { uri } = editor.dataset;
return monacoEditor.getModel(uri).getValue();
};

View File

@ -0,0 +1,121 @@
# frozen_string_literal: true
require 'fast_spec_helper'
RSpec.describe Gitlab::Memory::Jemalloc do
let(:outdir) { Dir.mktmpdir }
after do
FileUtils.rm_f(outdir)
end
context 'when jemalloc is loaded' do
let(:fiddle_func) { instance_double(::Fiddle::Function) }
context 'with JSON format' do
let(:format) { :json }
let(:output) { '{"a": 24}' }
before do
stub_stats_call(output, 'J')
end
describe '.stats' do
it 'returns stats JSON' do
expect(described_class.stats(format: format)).to eq(output)
end
end
describe '.dump_stats' do
it 'writes stats JSON file' do
described_class.dump_stats(path: outdir, format: format)
file = Dir.entries(outdir).find { |e| e.match(/jemalloc_stats\.#{$$}\.\d+\.json$/) }
expect(file).not_to be_nil
expect(File.read(File.join(outdir, file))).to eq(output)
end
end
end
context 'with text format' do
let(:format) { :text }
let(:output) { 'stats' }
before do
stub_stats_call(output)
end
describe '.stats' do
it 'returns a text report' do
expect(described_class.stats(format: format)).to eq(output)
end
end
describe '.dump_stats' do
it 'writes stats text file' do
described_class.dump_stats(path: outdir, format: format)
file = Dir.entries(outdir).find { |e| e.match(/jemalloc_stats\.#{$$}\.\d+\.txt$/) }
expect(file).not_to be_nil
expect(File.read(File.join(outdir, file))).to eq(output)
end
end
end
context 'with unsupported format' do
let(:format) { 'unsupported' }
describe '.stats' do
it 'raises an error' do
expect do
described_class.stats(format: format)
end.to raise_error(/format must be one of/)
end
end
describe '.dump_stats' do
it 'raises an error' do
expect do
described_class.dump_stats(path: outdir, format: format)
end.to raise_error(/format must be one of/)
end
end
end
end
context 'when jemalloc is not loaded' do
before do
expect(::Fiddle::Handle).to receive(:sym).and_raise(Fiddle::DLError)
end
describe '.stats' do
it 'returns nil' do
expect(described_class.stats).to be_nil
end
end
describe '.dump_stats' do
it 'does nothing' do
stub_env('LD_PRELOAD', nil)
described_class.dump_stats(path: outdir)
expect(Dir.empty?(outdir)).to be(true)
end
end
end
def stub_stats_call(output, expected_options = '')
# Stub function pointer to stats call.
func_pointer = Fiddle::Pointer.new(0xd34db33f)
expect(::Fiddle::Handle).to receive(:sym).with('malloc_stats_print').and_return(func_pointer)
# Stub actual function call.
expect(::Fiddle::Function).to receive(:new)
.with(func_pointer, anything, anything)
.and_return(fiddle_func)
expect(fiddle_func).to receive(:call).with(anything, nil, expected_options) do |callback, _, options|
callback.call(nil, output)
end
end
end

View File

@ -165,6 +165,14 @@ RSpec.describe Repositories::ChangelogService do
expect { request.call(sha3) }.not_to exceed_query_limit(control.count)
end
context 'when one of commits does not exist' do
let(:service) { described_class.new(project, creator, version: '1.0.0', from: 'master', to: '54321') }
it 'raises an exception' do
expect { service.execute(commit_to_changelog: false) }.to raise_error(Gitlab::Changelog::Error)
end
end
context 'when commit range exceeds the limit' do
let(:service) { described_class.new(project, creator, version: '1.0.0', from: sha1) }

View File

@ -34,6 +34,7 @@ var zipSubcommandsErrorsCounter = promauto.NewCounterVec(
}, []string{"error"})
type artifactsUploadProcessor struct {
opts *destination.UploadOpts
format string
SavedFileTracker
@ -42,8 +43,15 @@ type artifactsUploadProcessor struct {
// Artifacts is like a Multipart but specific for artifacts upload.
func Artifacts(myAPI *api.API, h http.Handler, p Preparer) http.Handler {
return myAPI.PreAuthorizeHandler(func(w http.ResponseWriter, r *http.Request, a *api.Response) {
opts, err := p.Prepare(a)
if err != nil {
helper.Fail500(w, r, fmt.Errorf("UploadArtifacts: error preparing file storage options"))
return
}
format := r.URL.Query().Get(ArtifactFormatKey)
mg := &artifactsUploadProcessor{
opts: opts,
format: format,
SavedFileTracker: SavedFileTracker{Request: r},
}
@ -53,7 +61,10 @@ func Artifacts(myAPI *api.API, h http.Handler, p Preparer) http.Handler {
func (a *artifactsUploadProcessor) generateMetadataFromZip(ctx context.Context, file *destination.FileHandler) (*destination.FileHandler, error) {
metaOpts := &destination.UploadOpts{
LocalTempPath: os.TempDir(),
LocalTempPath: a.opts.LocalTempPath,
}
if metaOpts.LocalTempPath == "" {
metaOpts.LocalTempPath = os.TempDir()
}
fileName := file.LocalPath