Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-03-16 21:09:14 +00:00
parent 204df35415
commit 06b21ad63c
55 changed files with 887 additions and 187 deletions

View file

@ -42,9 +42,6 @@
expire_in: 7 days
when: always
.parallel-qa-base:
parallel: 5
.allure-report-base:
image:
name: ${GITLAB_DEPENDENCY_PROXY}andrcuns/allure-report-publisher:0.4.2
@ -82,7 +79,7 @@ review-qa-reliable:
extends:
- .review-qa-base
- .review:rules:review-qa-reliable
- .parallel-qa-base
parallel: 8
retry: 1
variables:
QA_RUN_TYPE: review-qa-reliable
@ -92,7 +89,7 @@ review-qa-all:
extends:
- .review-qa-base
- .review:rules:review-qa-all
- .parallel-qa-base
parallel: 5
variables:
QA_RUN_TYPE: review-qa-all
QA_SCENARIO: Test::Instance::All

View file

@ -1,6 +1,19 @@
<script>
/* eslint-disable @gitlab/vue-require-i18n-strings */
import { GlAlert, GlSprintf } from '@gitlab/ui';
import { __ } from '~/locale';
export const i18n = {
title: __('Too many changes to show.'),
plainDiff: __('Plain diff'),
emailPatch: __('Email patch'),
};
export default {
i18n,
components: {
GlAlert,
GlSprintf,
},
props: {
total: {
type: String,
@ -23,17 +36,28 @@ export default {
</script>
<template>
<div class="alert alert-warning">
<h4>
{{ __('Too many changes to show.') }}
<div class="float-right">
<a :href="plainDiffPath" class="btn btn-sm"> {{ __('Plain diff') }} </a>
<a :href="emailPatchPath" class="btn btn-sm"> {{ __('Email patch') }} </a>
</div>
</h4>
<p>
To preserve performance only <strong> {{ visible }} of {{ total }} </strong> files are
displayed.
</p>
</div>
<gl-alert
variant="warning"
:title="$options.i18n.title"
:primary-button-text="$options.i18n.plainDiff"
:primary-button-link="plainDiffPath"
:secondary-button-text="$options.i18n.emailPatch"
:secondary-button-link="emailPatchPath"
:dismissible="false"
>
<gl-sprintf
:message="
sprintf(
__(
'To preserve performance only %{strongStart}%{visible} of %{total}%{strongEnd} files are displayed.',
),
{ visible, total },
)
"
>
<template #strong="{ content }">
<strong>{{ content }}</strong>
</template>
</gl-sprintf>
</gl-alert>
</template>

View file

@ -147,6 +147,9 @@ export default {
fileType() {
return this.previewMode?.id || '';
},
showTabs() {
return !this.shouldHideEditor && this.isEditModeActive && this.previewMode;
},
},
watch: {
'file.name': {
@ -194,6 +197,9 @@ export default {
this.refreshEditorDimensions();
}
},
showTabs() {
this.$nextTick(() => this.refreshEditorDimensions());
},
rightPaneIsOpen() {
this.refreshEditorDimensions();
},
@ -410,7 +416,7 @@ export default {
}
},
refreshEditorDimensions() {
if (this.showEditor) {
if (this.showEditor && this.editor) {
this.editor.updateDimensions();
}
},
@ -495,7 +501,7 @@ export default {
<template>
<div id="ide" class="blob-viewer-container blob-editor-container">
<div v-if="!shouldHideEditor && isEditModeActive" class="ide-mode-tabs clearfix">
<div v-if="showTabs" class="ide-mode-tabs clearfix">
<ul class="nav-links float-left border-bottom-0">
<li :class="editTabCSS">
<a
@ -506,7 +512,7 @@ export default {
>{{ __('Edit') }}</a
>
</li>
<li v-if="previewMode" :class="previewTabCSS">
<li :class="previewTabCSS">
<a
href="javascript:void(0);"
role="button"

View file

@ -130,19 +130,6 @@ export const isInViewport = (el, offset = {}) => {
);
};
export const parseUrl = (url) => {
const parser = document.createElement('a');
parser.href = url;
return parser;
};
export const parseUrlPathname = (url) => {
const parsedUrl = parseUrl(url);
// parsedUrl.pathname will return an absolute path for Firefox and a relative path for IE11
// We have to make sure we always have an absolute path.
return parsedUrl.pathname.charAt(0) === '/' ? parsedUrl.pathname : `/${parsedUrl.pathname}`;
};
export const isMetaKey = (e) => e.metaKey || e.ctrlKey || e.altKey || e.shiftKey;
// Identify following special clicks

View file

@ -18,6 +18,20 @@ function resetRegExp(regex) {
return regex;
}
/**
* Returns the absolute pathname for a relative or absolute URL string.
*
* A few examples of inputs and outputs:
* 1) 'http://a.com/b/c/d' => '/b/c/d'
* 2) '/b/c/d' => '/b/c/d'
* 3) 'b/c/d' => '/b/c/d' or '[path]/b/c/d' depending of the current path of the
* document.location
*/
export const parseUrlPathname = (url) => {
const { pathname } = new URL(url, document.location.href);
return pathname;
};
// Returns a decoded url parameter value
// - Treats '+' as '%20'
function decodeUrlParameter(val) {

View file

@ -2,13 +2,8 @@
import { GlBreakpointInstance as bp } from '@gitlab/ui/dist/utils';
import $ from 'jquery';
import Vue from 'vue';
import {
getCookie,
parseUrlPathname,
isMetaClick,
parseBoolean,
scrollToElement,
} from '~/lib/utils/common_utils';
import { getCookie, isMetaClick, parseBoolean, scrollToElement } from '~/lib/utils/common_utils';
import { parseUrlPathname } from '~/lib/utils/url_utility';
import createEventHub from '~/helpers/event_hub_factory';
import BlobForkSuggestion from './blob/blob_fork_suggestion';
import Diff from './diff';

View file

@ -1,7 +1,11 @@
<script>
import { GlSprintf, GlAlert, GlLink } from '@gitlab/ui';
import { ALERT_MESSAGES, ADMIN_GARBAGE_COLLECTION_TIP } from '../../constants/index';
import {
ALERT_MESSAGES,
ADMIN_GARBAGE_COLLECTION_TIP,
ALERT_DANGER_IMPORTING,
} from '../../constants/index';
export default {
components: {
@ -23,6 +27,7 @@ export default {
},
},
garbageCollectionHelpPagePath: { type: String, required: false, default: '' },
containerRegistryImportingHelpPagePath: { type: String, required: false, default: '' },
isAdmin: {
type: Boolean,
default: false,
@ -48,6 +53,11 @@ export default {
}
return config;
},
alertHref() {
return this.deleteAlertType === ALERT_DANGER_IMPORTING
? this.containerRegistryImportingHelpPagePath
: this.garbageCollectionHelpPagePath;
},
},
};
</script>
@ -61,7 +71,7 @@ export default {
>
<gl-sprintf :message="deleteAlertConfig.message">
<template #docLink="{ content }">
<gl-link :href="garbageCollectionHelpPagePath" target="_blank">
<gl-link :href="alertHref" target="_blank">
{{ content }}
</gl-link>
</template>

View file

@ -93,6 +93,10 @@ export const DETAILS_DELETE_IMAGE_ERROR_MESSAGE = s__(
'ContainerRegistry|Something went wrong while scheduling the image for deletion.',
);
export const DETAILS_IMPORTING_ERROR_MESSAGE = s__(
'ContainerRegistry|Tags temporarily cannot be marked for deletion. Please try again in a few minutes. %{docLinkStart}More details%{docLinkEnd}.',
);
export const DELETE_IMAGE_CONFIRMATION_TITLE = s__('ContainerRegistry|Delete image repository?');
export const DELETE_IMAGE_CONFIRMATION_TEXT = s__(
'ContainerRegistry|Deleting the image repository will delete all images and tags inside. This action cannot be undone. Please type the following to confirm: %{code}',
@ -133,6 +137,7 @@ export const ALERT_DANGER_TAG = 'danger_tag';
export const ALERT_SUCCESS_TAGS = 'success_tags';
export const ALERT_DANGER_TAGS = 'danger_tags';
export const ALERT_DANGER_IMAGE = 'danger_image';
export const ALERT_DANGER_IMPORTING = 'danger_importing';
export const DELETE_SCHEDULED = 'DELETE_SCHEDULED';
export const DELETE_FAILED = 'DELETE_FAILED';
@ -143,6 +148,7 @@ export const ALERT_MESSAGES = {
[ALERT_SUCCESS_TAGS]: DELETE_TAGS_SUCCESS_MESSAGE,
[ALERT_DANGER_TAGS]: DELETE_TAGS_ERROR_MESSAGE,
[ALERT_DANGER_IMAGE]: DETAILS_DELETE_IMAGE_ERROR_MESSAGE,
[ALERT_DANGER_IMPORTING]: DETAILS_IMPORTING_ERROR_MESSAGE,
};
export const UNFINISHED_STATUS = 'UNFINISHED';

View file

@ -20,6 +20,7 @@ import {
ALERT_SUCCESS_TAGS,
ALERT_DANGER_TAGS,
ALERT_DANGER_IMAGE,
ALERT_DANGER_IMPORTING,
FETCH_IMAGES_LIST_ERROR_MESSAGE,
UNFINISHED_STATUS,
MISSING_OR_DELETED_IMAGE_BREADCRUMB,
@ -32,6 +33,8 @@ import deleteContainerRepositoryTagsMutation from '../graphql/mutations/delete_c
import getContainerRepositoryDetailsQuery from '../graphql/queries/get_container_repository_details.query.graphql';
import getContainerRepositoryTagsQuery from '../graphql/queries/get_container_repository_tags.query.graphql';
const REPOSITORY_IMPORTING_ERROR_MESSAGE = 'repository importing';
export default {
name: 'RegistryDetailsPage',
components: {
@ -147,12 +150,17 @@ export default {
});
if (data?.destroyContainerRepositoryTags?.errors[0]) {
throw new Error();
throw new Error(data.destroyContainerRepositoryTags.errors[0]);
}
this.deleteAlertType =
itemsToBeDeleted.length === 0 ? ALERT_SUCCESS_TAG : ALERT_SUCCESS_TAGS;
} catch (e) {
this.deleteAlertType = itemsToBeDeleted.length === 0 ? ALERT_DANGER_TAG : ALERT_DANGER_TAGS;
if (e.message === REPOSITORY_IMPORTING_ERROR_MESSAGE) {
this.deleteAlertType = ALERT_DANGER_IMPORTING;
} else {
this.deleteAlertType =
itemsToBeDeleted.length === 0 ? ALERT_DANGER_TAG : ALERT_DANGER_TAGS;
}
}
this.mutationLoading = false;
@ -188,6 +196,7 @@ export default {
<delete-alert
v-model="deleteAlertType"
:garbage-collection-help-page-path="config.garbageCollectionHelpPagePath"
:container-registry-importing-help-page-path="config.containerRegistryImportingHelpPagePath"
:is-admin="config.isAdmin"
class="gl-my-2"
/>

View file

@ -82,6 +82,13 @@ export default {
};
this.loading = false;
if (!this.commitMessageIsTouched) {
this.commitMessage = this.state.defaultMergeCommitMessage;
}
if (!this.squashCommitMessageIsTouched) {
this.squashCommitMessage = this.state.defaultSquashCommitMessage;
}
if (this.state.mergeTrainsCount !== null && this.state.mergeTrainsCount !== undefined) {
this.initPolling();
}
@ -133,9 +140,11 @@ export default {
isMakingRequest: false,
isMergingImmediately: false,
commitMessage: this.mr.commitMessage,
commitMessageIsTouched: false,
squashBeforeMerge: this.mr.squashIsSelected,
isSquashReadOnly: this.mr.squashIsReadonly,
squashCommitMessage: this.mr.squashCommitMessage,
squashCommitMessageIsTouched: false,
isPipelineFailedModalVisibleMergeTrain: false,
isPipelineFailedModalVisibleNormalMerge: false,
editCommitMessage: false,
@ -465,6 +474,14 @@ export default {
});
});
},
setCommitMessage(val) {
this.commitMessage = val;
this.commitMessageIsTouched = true;
},
setSquashCommitMessage(val) {
this.squashCommitMessage = val;
this.squashCommitMessageIsTouched = true;
},
},
i18n: {
mergeCommitTemplateHintText: s__(
@ -630,21 +647,23 @@ export default {
>
<commit-edit
v-if="shouldShowSquashEdit"
v-model="squashCommitMessage"
:value="squashCommitMessage"
:label="__('Squash commit message')"
input-id="squash-message-edit"
class="gl-m-0! gl-p-0!"
@input="setSquashCommitMessage"
>
<template #header>
<commit-message-dropdown v-model="squashCommitMessage" :commits="commits" />
<commit-message-dropdown :commits="commits" @input="setSquashCommitMessage" />
</template>
</commit-edit>
<commit-edit
v-if="shouldShowMergeEdit"
v-model="commitMessage"
:value="commitMessage"
:label="__('Merge commit message')"
input-id="merge-message-edit"
class="gl-m-0! gl-p-0!"
@input="setCommitMessage"
/>
<li class="gl-m-0! gl-p-0!">
<p class="form-text text-muted">
@ -748,20 +767,22 @@ export default {
<ul class="border-top content-list commits-list flex-list">
<commit-edit
v-if="shouldShowSquashEdit"
v-model="squashCommitMessage"
:value="squashCommitMessage"
:label="__('Squash commit message')"
input-id="squash-message-edit"
squash
@input="setSquashCommitMessage"
>
<template #header>
<commit-message-dropdown v-model="squashCommitMessage" :commits="commits" />
<commit-message-dropdown :commits="commits" @input="setSquashCommitMessage" />
</template>
</commit-edit>
<commit-edit
v-if="shouldShowMergeEdit"
v-model="commitMessage"
:value="commitMessage"
:label="__('Merge commit message')"
input-id="merge-message-edit"
@input="setCommitMessage"
/>
<li>
<p class="form-text text-muted">

View file

@ -1,9 +1,11 @@
fragment ReadyToMerge on Project {
__typename
id
onlyAllowMergeIfPipelineSucceeds
mergeRequestsFfOnlyEnabled
squashReadOnly
mergeRequest(iid: $iid) {
__typename
id
autoMergeEnabled
shouldRemoveSourceBranch

View file

@ -181,7 +181,8 @@ module MarkupHelper
wiki: wiki,
repository: wiki.repository,
page_slug: wiki_page.slug,
issuable_reference_expansion_enabled: true
issuable_reference_expansion_enabled: true,
requested_path: wiki_page.path
).merge(render_wiki_content_context_container(wiki))
end
@ -263,7 +264,7 @@ module MarkupHelper
end
def asciidoc_unsafe(text, context = {})
context.merge!(
context.reverse_merge!(
commit: @commit,
ref: @ref,
requested_path: @path

View file

@ -277,7 +277,19 @@ class User < ApplicationRecord
after_update :username_changed_hook, if: :saved_change_to_username?
after_destroy :post_destroy_hook
after_destroy :remove_key_cache
after_create :add_primary_email_to_emails!, if: :confirmed?
after_save if: -> { saved_change_to_email? && confirmed? } do
email_to_confirm = self.emails.find_by(email: self.email)
if email_to_confirm.present?
if skip_confirmation_period_expiry_check
email_to_confirm.force_confirm
else
email_to_confirm.confirm
end
else
add_primary_email_to_emails!
end
end
after_commit(on: :update) do
if previous_changes.key?('email')
# Add the old primary email to Emails if not added already - this should be removed
@ -2013,29 +2025,6 @@ class User < ApplicationRecord
ci_job_token_scope.present?
end
# override from Devise::Models::Confirmable
#
# Add the primary email to user.emails (or confirm it if it was already
# present) when the primary email is confirmed.
def confirm(args = {})
saved = super(args)
return false unless saved
email_to_confirm = self.emails.find_by(email: self.email)
if email_to_confirm.present?
if skip_confirmation_period_expiry_check
email_to_confirm.force_confirm(args)
else
email_to_confirm.confirm(args)
end
else
add_primary_email_to_emails!
end
saved
end
def user_project
strong_memoize(:user_project) do
personal_projects.find_by(path: username, visibility_level: Gitlab::VisibilityLevel::PUBLIC)

View file

@ -12,6 +12,7 @@
"registry_host_url_with_port" => escape_once(registry_config.host_port),
"garbage_collection_help_page_path" => help_page_path('administration/packages/container_registry', anchor: 'container-registry-garbage-collection'),
"run_cleanup_policies_help_page_path" => help_page_path('administration/packages/container_registry', anchor: 'run-the-cleanup-policy-now'),
"container_registry_importing_help_page_path" => help_page_path('user/packages/container_registry/index', anchor: 'tags-temporarily-cannot-be-marked-for-deletion'),
"is_admin": current_user&.admin.to_s,
is_group_page: "true",
"group_path": @group.full_path,

View file

@ -40,12 +40,10 @@
= _('Time based: Yes')
= form_tag profile_two_factor_auth_path, method: :post do |f|
- if @error
.gl-alert.gl-alert-danger.gl-mb-5
.gl-alert-container
.gl-alert-content
%p.gl-alert-body.gl-md-0
= @error[:message]
= link_to _('Try the troubleshooting steps here.'), help_page_path('user/profile/account/two_factor_authentication.md', anchor: 'troubleshooting'), target: '_blank', rel: 'noopener noreferrer'
= render 'shared/global_alert', title: @error[:message], variant: :danger, dismissible: false do
.gl-alert-body
= link_to _('Try the troubleshooting steps here.'), help_page_path('user/profile/account/two_factor_authentication.md', anchor: 'troubleshooting'), target: '_blank', rel: 'noopener noreferrer'
.form-group
= label_tag :pin_code, _('Pin code'), class: "label-bold"
= text_field_tag :pin_code, nil, class: "form-control gl-form-input", required: true, data: { qa_selector: 'pin_code_field' }

View file

@ -15,6 +15,7 @@
"expiration_policy_help_page_path" => help_page_path('user/packages/container_registry/reduce_container_registry_storage', anchor: 'cleanup-policy'),
"garbage_collection_help_page_path" => help_page_path('administration/packages/container_registry', anchor: 'container-registry-garbage-collection'),
"run_cleanup_policies_help_page_path" => help_page_path('administration/packages/container_registry', anchor: 'run-the-cleanup-policy-now'),
"container_registry_importing_help_page_path" => help_page_path('user/packages/container_registry/index', anchor: 'tags-temporarily-cannot-be-marked-for-deletion'),
"project_path": @project.full_path,
"gid_prefix": container_repository_gid_prefix,
"is_admin": current_user&.admin.to_s,

View file

@ -570,6 +570,15 @@
:weight: 1
:idempotent:
:tags: []
- :name: cronjob:quality_test_data_cleanup
:worker_name: Quality::TestDataCleanupWorker
:feature_category: :quality_management
:has_external_dependencies:
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: cronjob:releases_manage_evidence
:worker_name: Releases::ManageEvidenceWorker
:feature_category: :release_evidence

View file

@ -0,0 +1,33 @@
# frozen_string_literal: true
module Quality
class TestDataCleanupWorker
include ApplicationWorker
data_consistency :always
feature_category :quality_management
urgency :low
include CronjobQueue
idempotent!
KEEP_RECENT_DATA_DAY = 3
GROUP_PATH_PATTERN = 'test-group-fulfillment'
GROUP_OWNER_EMAIL_PATTERN = %w(test-user- gitlab-qa-user qa-user-).freeze
# Remove test groups generated in E2E tests on gstg
# rubocop: disable CodeReuse/ActiveRecord
def perform
return unless Gitlab.staging?
Group.where('path like ?', "#{GROUP_PATH_PATTERN}%").where('created_at < ?', KEEP_RECENT_DATA_DAY.days.ago).each do |group|
next unless GROUP_OWNER_EMAIL_PATTERN.any? { |pattern| group.owners.first.email.include?(pattern) }
with_context(namespace: group, user: group.owners.first) do
Groups::DestroyService.new(group, group.owners.first).execute
end
end
end
# rubocop: enable CodeReuse/ActiveRecord
end
end

View file

@ -1049,7 +1049,10 @@ For example:
## Mattermost Logs
For Omnibus GitLab installations, Mattermost logs are in `/var/log/gitlab/mattermost/mattermost.log`.
For Omnibus GitLab installations, Mattermost logs are in these locations:
- `/var/log/gitlab/mattermost/mattermost.log`
- `/var/log/gitlab/mattermost/current`
## Workhorse Logs

View file

@ -10,6 +10,7 @@ type: reference, api
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/212199) in GitLab 13.5.
> - The `encoding` field was [added](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/81150) in GitLab 14.9.
> - The `render_html` attribute was [added](https://gitlab.com/gitlab-org/gitlab/-/issues/336792) in GitLab 14.9.
> - The `version` attribute was [added](https://gitlab.com/gitlab-org/gitlab/-/issues/336792) in GitLab 14.9.
The [group wikis](../user/project/wiki/group.md) API is available only in APIv4.
An API for [project wikis](wikis.md) is also available.
@ -71,6 +72,7 @@ GET /groups/:id/wikis/:slug
| `id` | integer/string | yes | The ID or [URL-encoded path of the group](index.md#namespaced-path-encoding) |
| `slug` | string | yes | URL-encoded slug (a unique string) of the wiki page, such as `dir%2Fpage_name` |
| `render_html` | boolean | no | Return the rendered HTML of the wiki page |
| `version` | string | no | Wiki page version sha |
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/1/wikis/home"

View file

@ -8,6 +8,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
> - The `encoding` field was [added](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/81150) in GitLab 14.9.
> - The `render_html` attribute was [added](https://gitlab.com/gitlab-org/gitlab/-/issues/336792) in GitLab 14.9.
> - The `version` attribute was [added](https://gitlab.com/gitlab-org/gitlab/-/issues/336792) in GitLab 14.9.
The project [wikis](../user/project/wiki/index.md) API is available only in APIv4.
An API for [group wikis](group_wikis.md) is also available.
@ -69,6 +70,7 @@ GET /projects/:id/wikis/:slug
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](index.md#namespaced-path-encoding) |
| `slug` | string | yes | URLencoded slug (a unique string) of the wiki page, such as `dir%2Fpage_name` |
| `render_html` | boolean | no | Return the rendered HTML of the wiki page |
| `version` | string | no | Wiki page version sha |
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/wikis/home"

Binary file not shown.

After

Width:  |  Height:  |  Size: 65 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 55 KiB

View file

@ -0,0 +1,62 @@
---
type: reference, dev
stage: create
group: code_review
info: "See the Technical Writers assigned to Development Guidelines: https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments-to-development-guidelines"
---
# Merge Request Concepts
**NOTE**:
The documentation below is the single source of truth for the merge request terminology and functionality.
## Overview
The merge request is made up of several different key components and ideas that encompass the overall merge request experience. These concepts sometimes have competing and confusing terminology or overlap with other concepts. The concepts this will cover are:
1. Merge widget
1. Report widgets
1. Merge checks
1. Approval rules
### Merge widget
The merge widget is the component of the merge request where the `merge` button exists:
![merge widget](../img/merge_widget_v14_7.png)
This area of the merge request is where all of the options and commit messages are defined prior to merging. It also contains information about what is in the merge request, what issues may be closed, and other important information to the merging process.
### Report widgets
Reports are widgets within the merge request that report information about changes within the merge request. These widgets provide information to better help the author understand the changes and further improvements to the proposed changes.
[Design Documentation](https://design.gitlab.com/regions/merge-request-reports)
![merge request reports](../img/merge_request_reports_v14_7.png)
### Merge checks
Merge checks are statuses that can either pass or fail and conditionally control the availability of the merge button being available within a merge request. The key distinguishing factor in a merge check is that users **do not** interact with the merge checks inside of the merge request, but are able to influence whether or not the check passes or fails. Results from the check are processed as true/false to determine whether or not a merge request can be merged. Examples include:
1. merge conflicts
1. pipeline success
1. threads resolution
1. [external status checks](../../user/project/merge_requests/status_checks.md)
1. required approvals
When all of the required merge checks are satisfied a merge request becomes mergeable.
### Approvals
Approval rules specify users that are required to or can optionally approve a merge request based on some kind of organizational policy. When approvals are required, they effectively become a required merge check. The key differentiator between merge checks and approval rules is that users **do** interact with approval rules, by deciding to approve the merge request.
Additionally, approval settings provide configuration options to define how those approval rules are applied in a merge request. They can set limitations, add requirements, or modify approvals.
Examples of approval rules and settings include:
1. [merge request approval rules](../../user/project/merge_requests/approvals/rules.md)
1. [code owner approvals](../../user/project/code_owners.md)
1. [security approvals](../../user/application_security/index.md#security-approvals-in-merge-requests)
1. [prevent editing approval rules](../../user/project/merge_requests/approvals/settings.md#prevent-editing-approval-rules-in-merge-requests)]
1. [remove all approvals when commits are added](../../user/project/merge_requests/approvals/settings.md#remove-all-approvals-when-commits-are-added-to-the-source-branch)

View file

@ -695,3 +695,10 @@ There may be some errors not properly cached. Follow these steps to investigate
Once adjusted, trigger another tag deletion. You should be able to successfully delete tags.
Follow [this issue](https://gitlab.com/gitlab-org/container-registry/-/issues/551) for details.
### Tags temporarily cannot be marked for deletion
GitLab is [migrating to the next generation of the Container Registry](https://gitlab.com/groups/gitlab-org/-/epics/5523).
During the migration, you may encounter difficulty deleting tags.
If you encounter an error, it's likely that your image repository is in the process of being migrated.
Please wait a few minutes and try again.

View file

@ -84,7 +84,7 @@ Commit message templates support these variables:
| `%{first_commit}` | Full message of the first commit in merge request diff. | `Update README.md` |
| `%{first_multiline_commit}` | Full message of the first commit that's not a merge commit and has more than one line in message body. Merge request title if all commits aren't multiline. | `Update README.md`<br><br>`Improved project description in readme file.` |
| `%{url}` | Full URL to the merge request. | `https://gitlab.com/gitlab-org/gitlab/-/merge_requests/1` |
| `%{approved_by}` | Line-separated list of the merge request approvers. This value is not updated until the first page refresh after an approval. | `Approved-by: Sidney Jones <sjones@example.com>` <br> `Approved-by: Zhang Wei <zwei@example.com>` |
| `%{approved_by}` | Line-separated list of the merge request approvers. | `Approved-by: Sidney Jones <sjones@example.com>` <br> `Approved-by: Zhang Wei <zwei@example.com>` |
| `%{merged_by}` | User who merged the merge request. | `Alex Garcia <agarcia@example.com>` |
| `%{co_authored_by}` | Names and emails of commit authors in a `Co-authored-by` Git commit trailer format. Limited to authors of 100 most recent commits in merge request. | `Co-authored-by: Zane Doe <zdoe@example.com>` <br> `Co-authored-by: Blake Smith <bsmith@example.com>` |
| `%{all_commits}` | Messages from all commits in the merge request. Limited to 100 most recent commits. Skips commit bodies exceeding 100KiB and merge commit messages. | `* Feature introduced` <br><br> `This commit implements feature` <br> `Changelog:added` <br><br> `* Bug fixed` <br><br> `* Documentation improved` <br><br>`This commit introduced better docs.`|
@ -92,6 +92,10 @@ Commit message templates support these variables:
Any line containing only an empty variable is removed. If the line to be removed is both
preceded and followed by an empty line, the preceding empty line is also removed.
After you edit a commit message on an open merge request, GitLab will
not automatically update the commit message again.
To restore the commit message to the project template, reload the page.
## Related topics
- [Squash and merge](squash_and_merge.md).

View file

@ -6,7 +6,7 @@ module API
include ::MarkupHelper
expose :content do |wiki_page, options|
options[:render_html] ? render_wiki_content(wiki_page) : wiki_page.content
options[:render_html] ? render_wiki_content(wiki_page, ref: wiki_page.version.id) : wiki_page.content
end
expose :encoding do |wiki_page|

View file

@ -13,8 +13,8 @@ module API
raise "Unknown wiki container #{kind}"
end
def wiki_page
Wiki.for_container(container, current_user).find_page(params[:slug]) || not_found!('Wiki Page')
def wiki_page(version = nil)
Wiki.for_container(container, current_user).find_page(params[:slug], version.presence) || not_found!('Wiki Page')
end
def commit_params(attrs)

View file

@ -45,12 +45,13 @@ module API
end
params do
requires :slug, type: String, desc: 'The slug of a wiki page'
optional :version, type: String, desc: 'The version hash of a wiki page'
optional :render_html, type: Boolean, default: false, desc: 'Render content to HTML'
end
get ':id/wikis/:slug' do
authorize! :read_wiki, container
present wiki_page, with: Entities::WikiPage, render_html: params[:render_html]
present wiki_page(params[:version]), with: Entities::WikiPage, render_html: params[:render_html]
end
desc 'Create a wiki page' do

View file

@ -3,6 +3,8 @@
module Gitlab
module Database
module BackgroundMigration
SplitAndRetryError = Class.new(StandardError)
class BatchedJob < SharedModel
include EachBatch
include FromUnion
@ -11,6 +13,8 @@ module Gitlab
MAX_ATTEMPTS = 3
STUCK_JOBS_TIMEOUT = 1.hour.freeze
TIMEOUT_EXCEPTIONS = [ActiveRecord::StatementTimeout, ActiveRecord::ConnectionTimeoutError,
ActiveRecord::AdapterTimeout, ActiveRecord::LockWaitTimeout].freeze
belongs_to :batched_migration, foreign_key: :batched_background_migration_id
has_many :batched_job_transition_logs, foreign_key: :batched_background_migration_job_id
@ -51,6 +55,16 @@ module Gitlab
job.metrics = {}
end
after_transition any => :failed do |job, transition|
error_hash = transition.args.find { |arg| arg[:error].present? }
exception = error_hash&.fetch(:error)
job.split_and_retry! if job.can_split?(exception)
rescue SplitAndRetryError => error
Gitlab::AppLogger.error(message: error.message, batched_job_id: job.id)
end
after_transition do |job, transition|
error_hash = transition.args.find { |arg| arg[:error].present? }
@ -79,13 +93,17 @@ module Gitlab
duration.to_f / batched_migration.interval
end
def can_split?(exception)
attempts >= MAX_ATTEMPTS && TIMEOUT_EXCEPTIONS.include?(exception&.class) && batch_size > sub_batch_size
end
def split_and_retry!
with_lock do
raise 'Only failed jobs can be split' unless failed?
raise SplitAndRetryError, 'Only failed jobs can be split' unless failed?
new_batch_size = batch_size / 2
raise 'Job cannot be split further' if new_batch_size < 1
raise SplitAndRetryError, 'Job cannot be split further' if new_batch_size < 1
batching_strategy = batched_migration.batch_class.new(connection: self.class.connection)
next_batch_bounds = batching_strategy.next_batch(

View file

@ -9723,6 +9723,9 @@ msgstr ""
msgid "ContainerRegistry|Tags successfully marked for deletion."
msgstr ""
msgid "ContainerRegistry|Tags temporarily cannot be marked for deletion. Please try again in a few minutes. %{docLinkStart}More details%{docLinkEnd}."
msgstr ""
msgid "ContainerRegistry|Tags that match these rules are %{strongStart}kept%{strongEnd}, even if they match a removal rule below. The %{secondStrongStart}latest%{secondStrongEnd} tag is always kept."
msgstr ""
@ -38768,6 +38771,9 @@ msgstr ""
msgid "To personalize your GitLab experience, we'd like to know a bit more about you"
msgstr ""
msgid "To preserve performance only %{strongStart}%{visible} of %{total}%{strongEnd} files are displayed."
msgstr ""
msgid "To preserve performance only %{strong_open}%{display_size} of %{real_size}%{strong_close} files are displayed."
msgstr ""

View file

@ -385,6 +385,18 @@ module QA
end
end
def remove_via_api!
super
Support::Retrier.retry_until(max_duration: 60, sleep_interval: 1, message: "Waiting for #{self.class.name} to be removed") do
!exists?
rescue InternalServerError
# Retry on transient errors that are likely to be due to race conditions between concurrent delete operations
# when parts of a resource are stored in multiple tables
false
end
end
protected
# Return subset of fields for comparing projects

View file

@ -118,5 +118,14 @@ FactoryBot.define do
create(:crm_settings, group: group, enabled: true)
end
end
trait :test_group do
path { "test-group-fulfillment#{SecureRandom.hex(4)}" }
created_at { 4.days.ago }
after(:create) do |group|
group.add_owner(create(:user, email: "test-user-#{SecureRandom.hex(4)}@test.com"))
end
end
end
end

View file

@ -97,6 +97,8 @@ RSpec.describe 'Container Registry', :js do
expect(find('.modal .modal-title')).to have_content _('Remove tag')
find('.modal .modal-footer .btn-danger').click
end
it_behaves_like 'rejecting tags destruction for an importing repository on', tags: ['latest']
end
end

View file

@ -45,8 +45,8 @@ RSpec.describe 'Merge request > User sees diff', :js do
visit diffs_project_merge_request_path(project, merge_request)
page.within('.alert') do
expect(page).to have_text("Too many changes to show. Plain diff Email patch To preserve performance only 3 of 3+ files are displayed.")
page.within('.gl-alert') do
expect(page).to have_text("Too many changes to show. To preserve performance only 3 of 3+ files are displayed. Plain diff Email patch")
end
end
end

View file

@ -103,6 +103,8 @@ RSpec.describe 'Container Registry', :js do
find('.modal .modal-footer .btn-danger').click
end
it_behaves_like 'rejecting tags destruction for an importing repository on', tags: ['1']
it('pagination navigate to the second page') do
visit_next_page

View file

@ -1,4 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import { mount } from '@vue/test-utils';
import { GlButton } from '@gitlab/ui';
import { __ } from '~/locale';
import HiddenFilesWarning from '~/diffs/components/hidden_files_warning.vue';
const propsData = {
@ -12,7 +14,7 @@ describe('HiddenFilesWarning', () => {
let wrapper;
const createComponent = () => {
wrapper = shallowMount(HiddenFilesWarning, {
wrapper = mount(HiddenFilesWarning, {
propsData,
});
};
@ -26,22 +28,20 @@ describe('HiddenFilesWarning', () => {
});
it('has a correct plain diff URL', () => {
const plainDiffLink = wrapper.findAll('a').wrappers.filter((x) => x.text() === 'Plain diff')[0];
const plainDiffLink = wrapper.findAllComponents(GlButton).at(0);
expect(plainDiffLink.attributes('href')).toBe(propsData.plainDiffPath);
});
it('has a correct email patch URL', () => {
const emailPatchLink = wrapper
.findAll('a')
.wrappers.filter((x) => x.text() === 'Email patch')[0];
const emailPatchLink = wrapper.findAllComponents(GlButton).at(1);
expect(emailPatchLink.attributes('href')).toBe(propsData.emailPatchPath);
});
it('has a correct visible/total files text', () => {
const filesText = wrapper.find('strong');
expect(filesText.text()).toBe('5 of 10');
expect(wrapper.text()).toContain(
__('To preserve performance only 5 of 10 files are displayed.'),
);
});
});

View file

@ -130,6 +130,25 @@ RSpec.describe Projects::MergeRequestsController, '(JavaScript fixtures)', type:
expect(response).to be_successful
end
describe GraphQL::Query, type: :request do
include ApiHelpers
include GraphqlHelpers
context 'merge request in state readyToMerge query' do
base_input_path = 'vue_merge_request_widget/queries/states/'
base_output_path = 'graphql/merge_requests/states/'
query_name = 'ready_to_merge.query.graphql'
it "#{base_output_path}#{query_name}.json" do
query = get_graphql_query_as_string("#{base_input_path}#{query_name}", ee: true)
post_graphql(query, current_user: user, variables: { projectPath: project.full_path, iid: merge_request.iid.to_s })
expect_graphql_errors_to_be_empty
end
end
end
private
def render_discussions_json(merge_request)

View file

@ -169,12 +169,11 @@ describe('RepoEditor', () => {
expect(findEditor().isVisible()).toBe(true);
});
it('renders only an edit tab', async () => {
it('renders no tabs', async () => {
await createComponent();
const tabs = findTabs();
expect(tabs).toHaveLength(1);
expect(tabs.at(0).text()).toBe('Edit');
expect(tabs).toHaveLength(0);
});
});
@ -196,25 +195,48 @@ describe('RepoEditor', () => {
mock.restore();
});
it('renders an Edit and a Preview Tab', async () => {
await createComponent({ activeFile });
const tabs = findTabs();
describe('when files is markdown', () => {
let layoutSpy;
expect(tabs).toHaveLength(2);
expect(tabs.at(0).text()).toBe('Edit');
expect(tabs.at(1).text()).toBe('Preview Markdown');
beforeEach(async () => {
await createComponent({ activeFile });
layoutSpy = jest.spyOn(wrapper.vm.editor, 'layout');
});
it('renders an Edit and a Preview Tab', () => {
const tabs = findTabs();
expect(tabs).toHaveLength(2);
expect(tabs.at(0).text()).toBe('Edit');
expect(tabs.at(1).text()).toBe('Preview Markdown');
});
it('renders markdown for tempFile', async () => {
findPreviewTab().trigger('click');
await waitForPromises();
expect(wrapper.find(ContentViewer).html()).toContain(defaultFileProps.content);
});
it('should not trigger layout', async () => {
expect(layoutSpy).not.toHaveBeenCalled();
});
describe('when file changes to non-markdown file', () => {
beforeEach(async () => {
wrapper.setProps({ file: dummyFile.empty });
});
it('should hide tabs', () => {
expect(findTabs()).toHaveLength(0);
});
it('should trigger refresh dimensions', async () => {
expect(layoutSpy).toHaveBeenCalledTimes(1);
});
});
});
it('renders markdown for tempFile', async () => {
// by default files created in the spec are temp: no need for explicitly sending the param
await createComponent({ activeFile });
findPreviewTab().trigger('click');
await waitForPromises();
expect(wrapper.find(ContentViewer).html()).toContain(defaultFileProps.content);
});
it('shows no tabs when not in Edit mode', async () => {
it('when not in edit mode, shows no tabs', async () => {
await createComponent({
state: {
currentActivityView: leftSidebarViews.review.name,
@ -405,7 +427,7 @@ describe('RepoEditor', () => {
it.each`
mode | isVisible
${'edit'} | ${true}
${'edit'} | ${false}
${'review'} | ${false}
${'commit'} | ${false}
`('tabs in $mode are $isVisible', async ({ mode, isVisible } = {}) => {

View file

@ -51,31 +51,6 @@ describe('common_utils', () => {
});
});
describe('parseUrl', () => {
it('returns an anchor tag with url', () => {
expect(commonUtils.parseUrl('/some/absolute/url').pathname).toContain('some/absolute/url');
});
it('url is escaped', () => {
// IE11 will return a relative pathname while other browsers will return a full pathname.
// parseUrl uses an anchor element for parsing an url. With relative urls, the anchor
// element will create an absolute url relative to the current execution context.
// The JavaScript test suite is executed at '/' which will lead to an absolute url
// starting with '/'.
expect(commonUtils.parseUrl('" test="asf"').pathname).toContain('/%22%20test=%22asf%22');
});
});
describe('parseUrlPathname', () => {
it('returns an absolute url when given an absolute url', () => {
expect(commonUtils.parseUrlPathname('/some/absolute/url')).toEqual('/some/absolute/url');
});
it('returns an absolute url when given a relative url', () => {
expect(commonUtils.parseUrlPathname('some/relative/url')).toEqual('/some/relative/url');
});
});
describe('handleLocationHash', () => {
beforeEach(() => {
jest.spyOn(window.document, 'getElementById');

View file

@ -22,6 +22,27 @@ beforeEach(() => {
});
describe('URL utility', () => {
describe('parseUrlPathname', () => {
it('returns an absolute url when given an absolute url', () => {
expect(urlUtils.parseUrlPathname('/some/absolute/url')).toBe('/some/absolute/url');
});
it('returns an absolute url when given a relative url', () => {
expect(urlUtils.parseUrlPathname('some/relative/url')).toBe('/some/relative/url');
});
it('returns an absolute url that includes the document.location path when given a relative url', () => {
// Change the location to see the `/mypath/` included in the result
setWindowLocation(`${TEST_HOST}/mypath/`);
expect(urlUtils.parseUrlPathname('some/relative/url')).toBe('/mypath/some/relative/url');
});
it('encodes certain character in the url', () => {
expect(urlUtils.parseUrlPathname('test="a b"')).toBe('/test=%22a%20b%22');
});
});
describe('webIDEUrl', () => {
afterEach(() => {
gon.relative_url_root = '';
@ -636,7 +657,7 @@ describe('URL utility', () => {
`('returns "$expectation" with "$protocol" protocol', ({ protocol, expectation }) => {
setWindowLocation(`${protocol}//example.com`);
expect(urlUtils.getWebSocketProtocol()).toEqual(expectation);
expect(urlUtils.getWebSocketProtocol()).toBe(expectation);
});
});
@ -646,7 +667,7 @@ describe('URL utility', () => {
const path = '/lorem/ipsum?a=bc';
expect(urlUtils.getWebSocketUrl(path)).toEqual('ws://example.com/lorem/ipsum?a=bc');
expect(urlUtils.getWebSocketUrl(path)).toBe('ws://example.com/lorem/ipsum?a=bc');
});
});
@ -696,7 +717,7 @@ describe('URL utility', () => {
it('should return valid parameter', () => {
setWindowLocation('?scope=all&p=2');
expect(getParameterByName('p')).toEqual('2');
expect(getParameterByName('p')).toBe('2');
expect(getParameterByName('scope')).toBe('all');
});
@ -737,7 +758,7 @@ describe('URL utility', () => {
it('converts search query object back into a search query', () => {
const searchQueryObject = { one: '1', two: '2' };
expect(urlUtils.objectToQuery(searchQueryObject)).toEqual('one=1&two=2');
expect(urlUtils.objectToQuery(searchQueryObject)).toBe('one=1&two=2');
});
it('returns empty string when `params` is undefined, null or empty string', () => {
@ -833,15 +854,15 @@ describe('URL utility', () => {
it('adds new params as query string', () => {
const url = 'https://gitlab.com/test';
expect(
urlUtils.setUrlParams({ group_id: 'gitlab-org', project_id: 'my-project' }, url),
).toEqual('https://gitlab.com/test?group_id=gitlab-org&project_id=my-project');
expect(urlUtils.setUrlParams({ group_id: 'gitlab-org', project_id: 'my-project' }, url)).toBe(
'https://gitlab.com/test?group_id=gitlab-org&project_id=my-project',
);
});
it('updates an existing parameter', () => {
const url = 'https://gitlab.com/test?group_id=gitlab-org&project_id=my-project';
expect(urlUtils.setUrlParams({ project_id: 'gitlab-test' }, url)).toEqual(
expect(urlUtils.setUrlParams({ project_id: 'gitlab-test' }, url)).toBe(
'https://gitlab.com/test?group_id=gitlab-org&project_id=gitlab-test',
);
});
@ -849,7 +870,7 @@ describe('URL utility', () => {
it("removes the project_id param when it's value is null", () => {
const url = 'https://gitlab.com/test?group_id=gitlab-org&project_id=my-project';
expect(urlUtils.setUrlParams({ project_id: null }, url)).toEqual(
expect(urlUtils.setUrlParams({ project_id: null }, url)).toBe(
'https://gitlab.com/test?group_id=gitlab-org',
);
});
@ -857,7 +878,7 @@ describe('URL utility', () => {
it('adds parameters from arrays', () => {
const url = 'https://gitlab.com/test';
expect(urlUtils.setUrlParams({ labels: ['foo', 'bar'] }, url)).toEqual(
expect(urlUtils.setUrlParams({ labels: ['foo', 'bar'] }, url)).toBe(
'https://gitlab.com/test?labels=foo&labels=bar',
);
});
@ -865,13 +886,13 @@ describe('URL utility', () => {
it('removes parameters from empty arrays', () => {
const url = 'https://gitlab.com/test?labels=foo&labels=bar';
expect(urlUtils.setUrlParams({ labels: [] }, url)).toEqual('https://gitlab.com/test');
expect(urlUtils.setUrlParams({ labels: [] }, url)).toBe('https://gitlab.com/test');
});
it('removes parameters from empty arrays while keeping other parameters', () => {
const url = 'https://gitlab.com/test?labels=foo&labels=bar&unrelated=unrelated';
expect(urlUtils.setUrlParams({ labels: [] }, url)).toEqual(
expect(urlUtils.setUrlParams({ labels: [] }, url)).toBe(
'https://gitlab.com/test?unrelated=unrelated',
);
});
@ -879,7 +900,7 @@ describe('URL utility', () => {
it('adds parameters from arrays when railsArraySyntax=true', () => {
const url = 'https://gitlab.com/test';
expect(urlUtils.setUrlParams({ labels: ['foo', 'bar'] }, url, false, true)).toEqual(
expect(urlUtils.setUrlParams({ labels: ['foo', 'bar'] }, url, false, true)).toBe(
'https://gitlab.com/test?labels%5B%5D=foo&labels%5B%5D=bar',
);
});
@ -887,7 +908,7 @@ describe('URL utility', () => {
it('removes parameters from empty arrays when railsArraySyntax=true', () => {
const url = 'https://gitlab.com/test?labels%5B%5D=foo&labels%5B%5D=bar';
expect(urlUtils.setUrlParams({ labels: [] }, url, false, true)).toEqual(
expect(urlUtils.setUrlParams({ labels: [] }, url, false, true)).toBe(
'https://gitlab.com/test',
);
});
@ -895,7 +916,7 @@ describe('URL utility', () => {
it('decodes URI when decodeURI=true', () => {
const url = 'https://gitlab.com/test';
expect(urlUtils.setUrlParams({ labels: ['foo', 'bar'] }, url, false, true, true)).toEqual(
expect(urlUtils.setUrlParams({ labels: ['foo', 'bar'] }, url, false, true, true)).toBe(
'https://gitlab.com/test?labels[]=foo&labels[]=bar',
);
});
@ -903,7 +924,7 @@ describe('URL utility', () => {
it('removes all existing URL params and sets a new param when cleanParams=true', () => {
const url = 'https://gitlab.com/test?group_id=gitlab-org&project_id=my-project';
expect(urlUtils.setUrlParams({ foo: 'bar' }, url, true)).toEqual(
expect(urlUtils.setUrlParams({ foo: 'bar' }, url, true)).toBe(
'https://gitlab.com/test?foo=bar',
);
});

View file

@ -4,6 +4,7 @@ import initMrPage from 'helpers/init_vue_mr_page_helper';
import axios from '~/lib/utils/axios_utils';
import MergeRequestTabs from '~/merge_request_tabs';
import '~/lib/utils/common_utils';
import '~/lib/utils/url_utility';
jest.mock('~/lib/utils/webpack', () => ({
resetServiceWorkersPublicPath: jest.fn(),

View file

@ -6,6 +6,7 @@ import {
DELETE_TAG_ERROR_MESSAGE,
DELETE_TAGS_SUCCESS_MESSAGE,
DELETE_TAGS_ERROR_MESSAGE,
DETAILS_IMPORTING_ERROR_MESSAGE,
ADMIN_GARBAGE_COLLECTION_TIP,
} from '~/packages_and_registries/container_registry/explorer/constants';
@ -76,6 +77,7 @@ describe('Delete alert', () => {
});
});
});
describe('error states', () => {
describe.each`
deleteAlertType | message
@ -105,6 +107,25 @@ describe('Delete alert', () => {
});
});
describe('importing repository error state', () => {
beforeEach(() => {
mountComponent({
deleteAlertType: 'danger_importing',
containerRegistryImportingHelpPagePath: 'https://foobar',
});
});
it('alert exist and text is appropriate', () => {
expect(findAlert().text()).toMatchInterpolatedText(DETAILS_IMPORTING_ERROR_MESSAGE);
});
it('alert body contains link', () => {
const alertLink = findLink();
expect(alertLink.exists()).toBe(true);
expect(alertLink.attributes('href')).toBe('https://foobar');
});
});
describe('dismissing alert', () => {
it('GlAlert dismiss event triggers a change event', () => {
mountComponent({ deleteAlertType: 'success_tags' });

View file

@ -239,6 +239,15 @@ export const graphQLDeleteImageRepositoryTagsMock = {
},
};
export const graphQLDeleteImageRepositoryTagImportingErrorMock = {
data: {
destroyContainerRepositoryTags: {
errors: ['repository importing'],
__typename: 'DestroyContainerRepositoryTagsPayload',
},
},
};
export const dockerCommands = {
dockerBuildCommand: 'foofoo',
dockerPushCommand: 'barbar',

View file

@ -18,6 +18,7 @@ import {
UNFINISHED_STATUS,
DELETE_SCHEDULED,
ALERT_DANGER_IMAGE,
ALERT_DANGER_IMPORTING,
MISSING_OR_DELETED_IMAGE_BREADCRUMB,
ROOT_IMAGE_TEXT,
MISSING_OR_DELETED_IMAGE_TITLE,
@ -33,6 +34,7 @@ import Tracking from '~/tracking';
import {
graphQLImageDetailsMock,
graphQLDeleteImageRepositoryTagsMock,
graphQLDeleteImageRepositoryTagImportingErrorMock,
containerRepositoryMock,
graphQLEmptyImageDetailsMock,
tagsMock,
@ -329,6 +331,7 @@ describe('Details Page', () => {
const config = {
isAdmin: true,
garbageCollectionHelpPagePath: 'baz',
containerRegistryImportingHelpPagePath: 'https://foobar',
};
const deleteAlertType = 'success_tag';
@ -353,6 +356,35 @@ describe('Details Page', () => {
expect(findDeleteAlert().props()).toEqual({ ...config, deleteAlertType });
});
describe('importing repository error', () => {
let mutationResolver;
let tagsResolver;
beforeEach(async () => {
mutationResolver = jest
.fn()
.mockResolvedValue(graphQLDeleteImageRepositoryTagImportingErrorMock);
tagsResolver = jest.fn().mockResolvedValue(graphQLImageDetailsMock(imageTagsMock));
mountComponent({ mutationResolver, tagsResolver });
await waitForApolloRequestRender();
});
it('displays the proper alert', async () => {
findTagsList().vm.$emit('delete', [cleanTags[0]]);
await nextTick();
findDeleteModal().vm.$emit('confirmDelete');
await waitForPromises();
expect(tagsResolver).toHaveBeenCalled();
const deleteAlert = findDeleteAlert();
expect(deleteAlert.exists()).toBe(true);
expect(deleteAlert.props('deleteAlertType')).toBe(ALERT_DANGER_IMPORTING);
});
});
});
describe('Partial Cleanup Alert', () => {

View file

@ -1,7 +1,12 @@
import { shallowMount } from '@vue/test-utils';
import { createLocalVue, shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { GlSprintf } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
import produce from 'immer';
import readyToMergeResponse from 'test_fixtures/graphql/merge_requests/states/ready_to_merge.query.graphql.json';
import waitForPromises from 'helpers/wait_for_promises';
import createMockApollo from 'helpers/mock_apollo_helper';
import readyToMergeQuery from 'ee_else_ce/vue_merge_request_widget/queries/states/ready_to_merge.query.graphql';
import simplePoll from '~/lib/utils/simple_poll';
import CommitEdit from '~/vue_merge_request_widget/components/states/commit_edit.vue';
import CommitMessageDropdown from '~/vue_merge_request_widget/components/states/commit_message_dropdown.vue';
@ -19,9 +24,11 @@ jest.mock('~/commons/nav/user_merge_requests', () => ({
refreshUserMergeRequestCounts: jest.fn(),
}));
const commitMessage = 'This is the commit message';
const squashCommitMessage = 'This is the squash commit message';
const commitMessageWithDescription = 'This is the commit message description';
const commitMessage = readyToMergeResponse.data.project.mergeRequest.defaultMergeCommitMessage;
const squashCommitMessage =
readyToMergeResponse.data.project.mergeRequest.defaultSquashCommitMessage;
const commitMessageWithDescription =
readyToMergeResponse.data.project.mergeRequest.defaultMergeCommitMessageWithDescription;
const createTestMr = (customConfig) => {
const mr = {
isPipelineActive: false,
@ -42,6 +49,8 @@ const createTestMr = (customConfig) => {
commitMessage,
squashCommitMessage,
commitMessageWithDescription,
defaultMergeCommitMessage: commitMessage,
defaultSquashCommitMessage: squashCommitMessage,
shouldRemoveSourceBranch: true,
canRemoveSourceBranch: false,
targetBranch: 'main',
@ -61,15 +70,25 @@ const createTestService = () => ({
merge: jest.fn(),
poll: jest.fn().mockResolvedValue(),
});
const localVue = createLocalVue();
localVue.use(VueApollo);
let wrapper;
let readyToMergeResponseSpy;
const findMergeButton = () => wrapper.find('[data-testid="merge-button"]');
const findPipelineFailedConfirmModal = () =>
wrapper.findComponent(MergeFailedPipelineConfirmationDialog);
const createReadyToMergeResponse = (customMr) => {
return produce(readyToMergeResponse, (draft) => {
Object.assign(draft.data.project.mergeRequest, customMr);
});
};
const createComponent = (customConfig = {}, mergeRequestWidgetGraphql = false) => {
wrapper = shallowMount(ReadyToMerge, {
localVue,
propsData: {
mr: createTestMr(customConfig),
service: createTestService(),
@ -82,10 +101,29 @@ const createComponent = (customConfig = {}, mergeRequestWidgetGraphql = false) =
stubs: {
CommitEdit,
},
apolloProvider: createMockApollo([[readyToMergeQuery, readyToMergeResponseSpy]]),
});
};
const findCheckboxElement = () => wrapper.find(SquashBeforeMerge);
const findCommitsHeaderElement = () => wrapper.find(CommitsHeader);
const findCommitEditElements = () => wrapper.findAll(CommitEdit);
const findCommitDropdownElement = () => wrapper.find(CommitMessageDropdown);
const findFirstCommitEditLabel = () => findCommitEditElements().at(0).props('label');
const findTipLink = () => wrapper.find(GlSprintf);
const findCommitEditWithInputId = (inputId) =>
findCommitEditElements().wrappers.find((x) => x.props('inputId') === inputId);
const findMergeCommitMessage = () => findCommitEditWithInputId('merge-message-edit').props('value');
const findSquashCommitMessage = () =>
findCommitEditWithInputId('squash-message-edit').props('value');
const triggerApprovalUpdated = () => eventHub.$emit('ApprovalUpdated');
describe('ReadyToMerge', () => {
beforeEach(() => {
readyToMergeResponseSpy = jest.fn().mockResolvedValueOnce(readyToMergeResponse);
});
afterEach(() => {
wrapper.destroy();
});
@ -447,13 +485,6 @@ describe('ReadyToMerge', () => {
});
describe('render children components', () => {
const findCheckboxElement = () => wrapper.find(SquashBeforeMerge);
const findCommitsHeaderElement = () => wrapper.find(CommitsHeader);
const findCommitEditElements = () => wrapper.findAll(CommitEdit);
const findCommitDropdownElement = () => wrapper.find(CommitMessageDropdown);
const findFirstCommitEditLabel = () => findCommitEditElements().at(0).props('label');
const findTipLink = () => wrapper.find(GlSprintf);
describe('squash checkbox', () => {
it('should be rendered when squash before merge is enabled and there is more than 1 commit', () => {
createComponent({
@ -772,4 +803,65 @@ describe('ReadyToMerge', () => {
expect(findPipelineFailedConfirmModal().props()).toEqual({ visible: true });
});
});
describe('updating graphql data triggers commit message update when default changed', () => {
const UPDATED_MERGE_COMMIT_MESSAGE = 'New merge message from BE';
const UPDATED_SQUASH_COMMIT_MESSAGE = 'New squash message from BE';
const USER_COMMIT_MESSAGE = 'Merge message provided manually by user';
const createDefaultGqlComponent = () =>
createComponent({ mr: { commitsCount: 2, enableSquashBeforeMerge: true } }, true);
beforeEach(() => {
readyToMergeResponseSpy = jest
.fn()
.mockResolvedValueOnce(createReadyToMergeResponse({ squash: true, squashOnMerge: true }))
.mockResolvedValue(
createReadyToMergeResponse({
squash: true,
squashOnMerge: true,
defaultMergeCommitMessage: UPDATED_MERGE_COMMIT_MESSAGE,
defaultSquashCommitMessage: UPDATED_SQUASH_COMMIT_MESSAGE,
}),
);
});
describe.each`
desc | finderFn | initialValue | updatedValue | inputId
${'merge commit message'} | ${findMergeCommitMessage} | ${commitMessage} | ${UPDATED_MERGE_COMMIT_MESSAGE} | ${'#merge-message-edit'}
${'squash commit message'} | ${findSquashCommitMessage} | ${squashCommitMessage} | ${UPDATED_SQUASH_COMMIT_MESSAGE} | ${'#squash-message-edit'}
`('with $desc', ({ finderFn, initialValue, updatedValue, inputId }) => {
it('should have initial value', async () => {
createDefaultGqlComponent();
await waitForPromises();
expect(finderFn()).toBe(initialValue);
});
it('should have updated value after graphql refetch', async () => {
createDefaultGqlComponent();
await waitForPromises();
triggerApprovalUpdated();
await waitForPromises();
expect(finderFn()).toBe(updatedValue);
});
it('should not update if user has touched', async () => {
createDefaultGqlComponent();
await waitForPromises();
const input = wrapper.find(inputId);
input.element.value = USER_COMMIT_MESSAGE;
input.trigger('input');
triggerApprovalUpdated();
await waitForPromises();
expect(finderFn()).toBe(USER_COMMIT_MESSAGE);
});
});
});
});

View file

@ -318,13 +318,14 @@ RSpec.describe MarkupHelper do
let(:wiki) { build(:wiki, container: project) }
let(:content) { 'wiki content' }
let(:slug) { 'nested/page' }
let(:wiki_page) { double('WikiPage', path: "file.#{extension}", content: content, slug: slug, wiki: wiki) }
let(:path) { "file.#{extension}" }
let(:wiki_page) { double('WikiPage', path: path, content: content, slug: slug, wiki: wiki) }
let(:context) do
{
pipeline: :wiki, project: project, wiki: wiki,
page_slug: slug, issuable_reference_expansion_enabled: true,
repository: wiki.repository
repository: wiki.repository, requested_path: path
}
end

View file

@ -29,6 +29,20 @@ RSpec.describe API::Entities::WikiPage do
it 'returns the wiki page content rendered' do
expect(subject[:content]).to eq "<p data-sourcepos=\"1:1-1:#{wiki_page.content.size}\" dir=\"auto\">#{wiki_page.content}</p>"
end
it 'includes the wiki page version in the render context' do
expect(entity).to receive(:render_wiki_content).with(anything, hash_including(ref: wiki_page.version.id)).and_call_original
subject[:content]
end
context 'when page is an Ascii document' do
let(:wiki_page) { create(:wiki_page, content: "*Test* _content_", format: :asciidoc) }
it 'renders the page without errors' do
expect(subject[:content]).to eq("<div>&#x000A;<p><strong>Test</strong> <em>content</em></p>&#x000A;</div>")
end
end
end
context 'when it is false' do

View file

@ -7,6 +7,8 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
it { is_expected.to be_a Gitlab::Database::SharedModel }
it { expect(described_class::TIMEOUT_EXCEPTIONS).to match_array [ActiveRecord::StatementTimeout, ActiveRecord::ConnectionTimeoutError, ActiveRecord::AdapterTimeout, ActiveRecord::LockWaitTimeout] }
describe 'associations' do
it { is_expected.to belong_to(:batched_migration).with_foreign_key(:batched_background_migration_id) }
it { is_expected.to have_many(:batched_job_transition_logs).with_foreign_key(:batched_background_migration_job_id) }
@ -15,6 +17,8 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
describe 'state machine' do
let_it_be(:job) { create(:batched_background_migration_job, :failed) }
it { expect(described_class.state_machine.states.map(&:name)).to eql(%i(pending running failed succeeded)) }
context 'when a job is running' do
it 'logs the transition' do
expect(Gitlab::AppLogger).to receive(:info).with( { batched_job_id: job.id, message: 'BatchedJob transition', new_state: :running, previous_state: :failed } )
@ -47,6 +51,51 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
end
end
context 'when a job fails the number of max times' do
let(:max_times) { described_class::MAX_ATTEMPTS }
let!(:job) { create(:batched_background_migration_job, :running, batch_size: 10, min_value: 6, max_value: 15, attempts: max_times) }
context 'when job can be split' do
let(:exception) { ActiveRecord::StatementTimeout.new('Timeout!') }
before do
allow_next_instance_of(Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy) do |batch_class|
allow(batch_class).to receive(:next_batch).and_return([6, 10])
end
end
it 'splits the job into two retriable jobs' do
expect { job.failure!(error: exception) }.to change { job.batched_migration.batched_jobs.retriable.count }.from(0).to(2)
end
end
context 'when the job cannot be split' do
let(:exception) { ActiveRecord::StatementTimeout.new('Timeout!') }
let(:max_times) { described_class::MAX_ATTEMPTS }
let!(:job) { create(:batched_background_migration_job, :running, batch_size: 50, sub_batch_size: 20, min_value: 6, max_value: 15, attempts: max_times) }
let(:error_message) { 'Job cannot be split further' }
let(:split_and_retry_exception) { Gitlab::Database::BackgroundMigration::SplitAndRetryError.new(error_message) }
before do
allow(job).to receive(:split_and_retry!).and_raise(split_and_retry_exception)
end
it 'does not split the job' do
expect { job.failure!(error: exception) }.not_to change { job.batched_migration.batched_jobs.retriable.count }
end
it 'keeps the same job attributes' do
expect { job.failure!(error: exception) }.not_to change { job }
end
it 'logs the error' do
expect(Gitlab::AppLogger).to receive(:error).with( { message: error_message, batched_job_id: job.id } )
job.failure!(error: exception)
end
end
end
context 'when a job fails' do
let(:job) { create(:batched_background_migration_job, :running) }
@ -147,6 +196,49 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
end
end
describe '#can_split?' do
subject { job.can_split?(exception) }
context 'when the number of attempts is greater than the limit and the batch_size is greater than the sub_batch_size' do
let(:job) { create(:batched_background_migration_job, :failed, batch_size: 4, sub_batch_size: 2, attempts: described_class::MAX_ATTEMPTS + 1) }
context 'when is a timeout exception' do
let(:exception) { ActiveRecord::StatementTimeout.new }
it { expect(subject).to be_truthy }
end
context 'when is not a timeout exception' do
let(:exception) { RuntimeError.new }
it { expect(subject).to be_falsey }
end
end
context 'when the number of attempts is lower than the limit and the batch_size is greater than the sub_batch_size' do
let(:job) { create(:batched_background_migration_job, :failed, batch_size: 4, sub_batch_size: 2, attempts: described_class::MAX_ATTEMPTS - 1) }
context 'when is a timeout exception' do
let(:exception) { ActiveRecord::StatementTimeout.new }
it { expect(subject).to be_falsey }
end
context 'when is not a timeout exception' do
let(:exception) { RuntimeError.new }
it { expect(subject).to be_falsey }
end
end
context 'when the batch_size is lower than the sub_batch_size' do
let(:job) { create(:batched_background_migration_job, :failed, batch_size: 2, sub_batch_size: 4) }
let(:exception) { ActiveRecord::StatementTimeout.new }
it { expect(subject).to be_falsey }
end
end
describe '#time_efficiency' do
subject { job.time_efficiency }

View file

@ -193,6 +193,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
it_behaves_like 'an error is raised', RuntimeError.new('Something broke!')
it_behaves_like 'an error is raised', SignalException.new('SIGTERM')
it_behaves_like 'an error is raised', ActiveRecord::StatementTimeout.new('Timeout!')
end
context 'when the batched background migration does not inherit from BaseJob' do

View file

@ -1172,8 +1172,8 @@ RSpec.describe User do
@user.update!(email: 'new_primary@example.com')
@user.reload
expect(@user.emails.count).to eq 2
expect(@user.emails.pluck(:email)).to match_array([@secondary.email, 'primary@example.com'])
expect(@user.emails.count).to eq 3
expect(@user.emails.pluck(:email)).to match_array([@secondary.email, 'primary@example.com', 'new_primary@example.com'])
end
context 'when the first email was unconfirmed and the second email gets confirmed' do
@ -1594,6 +1594,66 @@ RSpec.describe User do
end
end
describe 'saving primary email to the emails table' do
context 'when calling skip_reconfirmation! while updating the primary email' do
let(:user) { create(:user, email: 'primary@example.com') }
it 'adds the new email to emails' do
user.skip_reconfirmation!
user.update!(email: 'new_primary@example.com')
expect(user.email).to eq('new_primary@example.com')
expect(user.unconfirmed_email).to be_nil
expect(user).to be_confirmed
expect(user.emails.pluck(:email)).to include('new_primary@example.com')
expect(user.emails.find_by(email: 'new_primary@example.com')).to be_confirmed
end
end
context 'when the email is changed but not confirmed' do
let(:user) { create(:user, email: 'primary@example.com') }
it 'does not add the new email to emails yet' do
user.update!(email: 'new_primary@example.com')
expect(user.unconfirmed_email).to eq('new_primary@example.com')
expect(user.email).to eq('primary@example.com')
expect(user).to be_confirmed
expect(user.emails.pluck(:email)).not_to include('new_primary@example.com')
end
end
context 'when the user is created as not confirmed' do
let(:user) { create(:user, :unconfirmed, email: 'primary@example.com') }
it 'does not add the email to emails yet' do
expect(user).not_to be_confirmed
expect(user.emails.pluck(:email)).not_to include('primary@example.com')
end
end
context 'when the user is created as confirmed' do
let(:user) { create(:user, email: 'primary@example.com', confirmed_at: DateTime.now.utc) }
it 'adds the email to emails' do
expect(user).to be_confirmed
expect(user.emails.pluck(:email)).to include('primary@example.com')
end
end
context 'when skip_confirmation! is called' do
let(:user) { build(:user, :unconfirmed, email: 'primary@example.com') }
it 'adds the email to emails' do
user.skip_confirmation!
user.save!
expect(user).to be_confirmed
expect(user.emails.pluck(:email)).to include('primary@example.com')
end
end
end
describe '#force_confirm' do
let(:expired_confirmation_sent_at) { Date.today - described_class.confirm_within - 7.days }
let(:extant_confirmation_sent_at) { Date.today }

View file

@ -174,8 +174,6 @@ RSpec.describe API::Wikis do
context 'when user is developer' do
before do
project.add_developer(user)
request
end
include_examples 'wikis API returns wiki page'
@ -183,6 +181,10 @@ RSpec.describe API::Wikis do
context 'when page is not existing' do
let(:url) { "/projects/#{project.id}/wikis/unknown" }
before do
request
end
include_examples 'wiki API 404 Wiki Page Not Found'
end
end
@ -190,8 +192,6 @@ RSpec.describe API::Wikis do
context 'when user is maintainer' do
before do
project.add_maintainer(user)
request
end
include_examples 'wikis API returns wiki page'
@ -199,6 +199,10 @@ RSpec.describe API::Wikis do
context 'when page is not existing' do
let(:url) { "/projects/#{project.id}/wikis/unknown" }
before do
request
end
include_examples 'wiki API 404 Wiki Page Not Found'
end
end
@ -220,8 +224,6 @@ RSpec.describe API::Wikis do
context 'when user is developer' do
before do
project.add_developer(user)
request
end
include_examples 'wikis API returns wiki page'
@ -229,6 +231,10 @@ RSpec.describe API::Wikis do
context 'when page is not existing' do
let(:url) { "/projects/#{project.id}/wikis/unknown" }
before do
request
end
include_examples 'wiki API 404 Wiki Page Not Found'
end
end
@ -236,8 +242,6 @@ RSpec.describe API::Wikis do
context 'when user is maintainer' do
before do
project.add_maintainer(user)
request
end
include_examples 'wikis API returns wiki page'
@ -245,6 +249,10 @@ RSpec.describe API::Wikis do
context 'when page is not existing' do
let(:url) { "/projects/#{project.id}/wikis/unknown" }
before do
request
end
include_examples 'wiki API 404 Wiki Page Not Found'
end
end

View file

@ -7,3 +7,20 @@ RSpec.shared_examples 'handling feature network errors with the container regist
expect(page).to have_content 'We are having trouble connecting to the Container Registry'
end
end
RSpec.shared_examples 'rejecting tags destruction for an importing repository on' do |tags: []|
it 'rejects the tag destruction operation' do
service = instance_double('Projects::ContainerRepository::DeleteTagsService')
expect(service).to receive(:execute).with(container_repository) { { status: :error, message: 'repository importing' } }
expect(Projects::ContainerRepository::DeleteTagsService).to receive(:new).with(container_repository.project, user, tags: tags) { service }
first('[data-testid="additional-actions"]').click
first('[data-testid="single-delete-button"]').click
expect(find('.modal .modal-title')).to have_content _('Remove tag')
find('.modal .modal-footer .btn-danger').click
alert_body = find('.gl-alert-body')
expect(alert_body).to have_content('Tags temporarily cannot be marked for deletion. Please try again in a few minutes.')
expect(alert_body).to have_link('More details', href: help_page_path('user/packages/container_registry/index', anchor: 'tags-temporarily-cannot-be-marked-for-deletion'))
end
end

View file

@ -44,7 +44,13 @@ RSpec.shared_examples_for 'wikis API returns list of wiki pages' do
end
RSpec.shared_examples_for 'wikis API returns wiki page' do
subject(:request) { get api(url, user), params: params }
shared_examples 'returns wiki page' do
before do
request
end
specify do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to eq(5)
@ -71,6 +77,38 @@ RSpec.shared_examples_for 'wikis API returns wiki page' do
it_behaves_like 'returns wiki page'
end
context 'when wiki page has versions' do
let(:new_content) { 'New content' }
before do
wiki.update_page(page.page, content: new_content, message: 'updated page')
expect(page.count_versions).to eq(2)
request
end
context 'when version param is not present' do
it 'retrieves the last version' do
expect(json_response['content']).to eq(new_content)
end
end
context 'when version param is set' do
let(:params) { { version: page.version.id } }
it 'retrieves the specific page version' do
expect(json_response['content']).to eq(page.content)
end
context 'when version param is not valid or inexistent' do
let(:params) { { version: 'foobar' } }
it_behaves_like 'wiki API 404 Wiki Page Not Found'
end
end
end
end
RSpec.shared_examples_for 'wikis API creates wiki page' do

View file

@ -0,0 +1,44 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Quality::TestDataCleanupWorker do
subject { described_class.new }
shared_examples 'successful deletion' do
before do
allow(Gitlab).to receive(:staging?).and_return(true)
end
it 'removes test groups' do
expect { subject.perform }.to change(Group, :count).by(-test_group_count)
end
end
describe "#perform" do
context 'with multiple test groups to remove' do
let(:test_group_count) { 5 }
let!(:groups_to_remove) { create_list(:group, test_group_count, :test_group) }
let!(:group_to_keep) { create(:group, path: 'test-group-fulfillment-keep', created_at: 1.day.ago) }
let!(:non_test_group) { create(:group) }
let(:non_test_owner_group) { create(:group, path: 'test-group-fulfillment1234', created_at: 4.days.ago) }
before do
non_test_owner_group.add_owner(create(:user))
end
it_behaves_like 'successful deletion'
end
context 'with paid groups' do
let(:test_group_count) { 1 }
let!(:paid_group) { create(:group, :test_group) }
before do
allow(paid_group).to receive(:paid?).and_return(true)
end
it_behaves_like 'successful deletion'
end
end
end