Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-04-25 18:11:07 +00:00
parent 29516285eb
commit c7531da771
47 changed files with 744 additions and 206 deletions

View File

@ -71,6 +71,10 @@ export default {
type: String,
required: true,
},
historyPath: {
type: String,
required: true,
},
},
data() {
@ -485,12 +489,15 @@ export default {
<template>
<div>
<h1
class="gl-my-0 gl-py-4 gl-font-size-h1 gl-border-solid gl-border-gray-200 gl-border-0 gl-border-b-1 gl-display-flex"
<div
class="gl-display-flex gl-align-items-center gl-border-solid gl-border-gray-200 gl-border-0 gl-border-b-1"
>
<img :src="$options.gitlabLogo" class="gl-w-6 gl-h-6 gl-mb-2 gl-display-inline gl-mr-2" />
{{ s__('BulkImport|Import groups from GitLab') }}
</h1>
<h1 class="gl-my-0 gl-py-4 gl-font-size-h1gl-display-flex">
<img :src="$options.gitlabLogo" class="gl-w-6 gl-h-6 gl-mb-2 gl-display-inline gl-mr-2" />
{{ s__('BulkImport|Import groups from GitLab') }}
</h1>
<gl-link :href="historyPath" class="gl-ml-auto">{{ s__('BulkImport|History') }}</gl-link>
</div>
<gl-alert
v-if="unavailableFeatures.length > 0 && unavailableFeaturesAlertVisible"
variant="warning"

View File

@ -17,6 +17,7 @@ export function mountImportGroupsApp(mountElement) {
jobsPath,
sourceUrl,
groupPathRegex,
historyPath,
} = mountElement.dataset;
const apolloProvider = new VueApollo({
defaultClient: createApolloClient({
@ -38,6 +39,7 @@ export function mountImportGroupsApp(mountElement) {
sourceUrl,
jobsPath,
groupPathRegex: new RegExp(`^(${groupPathRegex})$`),
historyPath,
},
});
},

View File

@ -65,7 +65,8 @@ export default class IssuableForm {
this.gfmAutoComplete = new GfmAutoComplete(
gl.GfmAutoComplete && gl.GfmAutoComplete.dataSources,
).setup();
this.usersSelect = new UsersSelect();
const autoAssignToMe = form.get(0).id === 'new_merge_request';
this.usersSelect = new UsersSelect(undefined, undefined, { autoAssignToMe });
this.reviewersSelect = new UsersSelect(undefined, '.js-reviewer-search');
this.zenMode = new ZenMode();

View File

@ -17,7 +17,7 @@ import getRefMixin from '../mixins/get_ref';
import blobInfoQuery from '../queries/blob_info.query.graphql';
import userInfoQuery from '../queries/user_info.query.graphql';
import applicationInfoQuery from '../queries/application_info.query.graphql';
import { DEFAULT_BLOB_INFO, TEXT_FILE_TYPE, LFS_STORAGE } from '../constants';
import { DEFAULT_BLOB_INFO, TEXT_FILE_TYPE, LFS_STORAGE, LEGACY_FILE_TYPES } from '../constants';
import BlobButtonGroup from './blob_button_group.vue';
import ForkSuggestion from './fork_suggestion.vue';
import { loadViewer } from './blob_viewers';
@ -132,7 +132,8 @@ export default {
return this.shouldLoadLegacyViewer ? null : loadViewer(fileType, this.isUsingLfs);
},
shouldLoadLegacyViewer() {
return this.viewer.fileType === TEXT_FILE_TYPE && !this.glFeatures.highlightJs;
const isTextFile = this.viewer.fileType === TEXT_FILE_TYPE && !this.glFeatures.highlightJs;
return isTextFile || LEGACY_FILE_TYPES.includes(this.blobInfo.fileType);
},
legacyViewerLoaded() {
return (

View File

@ -86,3 +86,24 @@ export const DEFAULT_BLOB_INFO = {
export const TEXT_FILE_TYPE = 'text';
export const LFS_STORAGE = 'lfs';
/**
* We have some features (like linking to external dependencies) that our frontend highlighter
* do not yet support.
* These are file types that we want the legacy (backend) syntax highlighter to highlight.
*/
export const LEGACY_FILE_TYPES = [
'package_json',
'gemfile',
'gemspec',
'composer_json',
'podfile',
'podspec',
'podspec_json',
'cartfile',
'godeps_json',
'requirements_txt',
'cargo_toml',
'go_mod',
'go_sum',
];

View File

@ -35,7 +35,7 @@ function UsersSelect(currentUser, els, options = {}) {
}
}
const { handleClick } = options;
const { handleClick, autoAssignToMe } = options;
const userSelect = this;
$els.each((i, dropdown) => {
@ -172,10 +172,7 @@ function UsersSelect(currentUser, els, options = {}) {
});
};
$assignToMeLink.on('click', (e) => {
e.preventDefault();
$(e.currentTarget).hide();
const onAssignToMeClick = () => {
if ($dropdown.data('multiSelect')) {
assignYourself();
checkMaxSelect();
@ -194,8 +191,19 @@ function UsersSelect(currentUser, els, options = {}) {
.text(gon.current_user_fullname)
.removeClass('is-default');
}
};
$assignToMeLink.on('click', (e) => {
e.preventDefault();
$(e.currentTarget).hide();
onAssignToMeClick();
});
if (autoAssignToMe) {
$assignToMeLink.hide();
onAssignToMeClick();
}
$block.on('click', '.js-assign-yourself', (e) => {
e.preventDefault();
return assignTo(userSelect.currentUser.id);

View File

@ -12,6 +12,7 @@ body.gl-dark {
--gl-text-color: #fafafa;
--border-color: #4f4f4f;
--black: #fff;
--nav-active-bg: rgba(255, 255, 255, 0.08);
}
:root {
--white: #333;
@ -1787,6 +1788,7 @@ body.gl-dark {
--white: #333;
--black: #fff;
--svg-status-bg: #333;
--nav-active-bg: rgba(255, 255, 255, 0.08);
}
.nav-sidebar li a {
color: var(--gray-600);
@ -2023,6 +2025,7 @@ body.gl-dark {
--white: #333;
--black: #fff;
--svg-status-bg: #333;
--nav-active-bg: rgba(255, 255, 255, 0.08);
}
.tab-width-8 {
-moz-tab-size: 8;

View File

@ -99,6 +99,7 @@ $white-normal: #333;
$white-dark: #444;
$border-color: #4f4f4f;
$nav-active-bg: rgba(255, 255, 255, 0.08);
body.gl-dark {
--gray-10: #{$gray-10};
@ -199,6 +200,7 @@ body.gl-dark {
--black: #{$black};
--svg-status-bg: #{$white};
--nav-active-bg: #{$nav-active-bg};
.gl-button.gl-button,
.gl-button.gl-button.btn-block {

View File

@ -13,43 +13,65 @@ class Wiki
markdown: {
name: 'Markdown',
default_extension: :md,
extension_regex: Regexp.new('md|mkdn?|mdown|markdown', 'i'),
created_by_user: true
},
rdoc: {
name: 'RDoc',
default_extension: :rdoc,
extension_regex: Regexp.new('rdoc', 'i'),
created_by_user: true
},
asciidoc: {
name: 'AsciiDoc',
default_extension: :asciidoc,
extension_regex: Regexp.new('adoc|asciidoc', 'i'),
created_by_user: true
},
org: {
name: 'Org',
default_extension: :org,
extension_regex: Regexp.new('org', 'i'),
created_by_user: true
},
textile: {
name: 'Textile',
default_extension: :textile
default_extension: :textile,
extension_regex: Regexp.new('textile', 'i')
},
creole: {
name: 'Creole',
default_extension: :creole
default_extension: :creole,
extension_regex: Regexp.new('creole', 'i')
},
rest: {
name: 'reStructuredText',
default_extension: :rst
default_extension: :rst,
extension_regex: Regexp.new('re?st(\.txt)?', 'i')
},
mediawiki: {
name: 'MediaWiki',
default_extension: :mediawiki
default_extension: :mediawiki,
extension_regex: Regexp.new('(media)?wiki', 'i')
},
pod: {
name: 'Pod',
default_extension: :pod,
extension_regex: Regexp.new('pod', 'i')
},
plaintext: {
name: 'Plain Text',
default_extension: :txt,
extension_regex: Regexp.new('txt', 'i')
}
}.freeze unless defined?(MARKUPS)
VALID_USER_MARKUPS = MARKUPS.select { |_, v| v[:created_by_user] }.freeze unless defined?(VALID_USER_MARKUPS)
unless defined?(ALLOWED_EXTENSIONS_REGEX)
ALLOWED_EXTENSIONS_REGEX = Regexp.union(MARKUPS.map { |key, value| value[:extension_regex] }).freeze
end
CouldNotCreateWikiError = Class.new(StandardError)
HOMEPAGE = 'home'
@ -205,15 +227,36 @@ class Wiki
end
def create_page(title, content, format = :markdown, message = nil)
commit = commit_details(:created, message, title)
if Feature.enabled?(:gitaly_replace_wiki_create_page, container, default_enabled: :yaml)
with_valid_format(format) do |default_extension|
if file_exists_by_regex?(title)
raise_duplicate_page_error!
end
wiki.write_page(title, format.to_sym, content, commit)
repository.expire_status_cache if repository.empty?
after_wiki_activity
capture_git_error(:created) do
create_wiki_repository unless repository_exists?
sanitized_path = sluggified_full_path(title, default_extension)
repository.create_file(user, sanitized_path, content, **multi_commit_options(:created, message, title))
repository.expire_status_cache if repository.empty?
after_wiki_activity
true
true
rescue Gitlab::Git::Index::IndexError
raise_duplicate_page_error!
end
end
else
commit = commit_details(:created, message, title)
wiki.write_page(title, format.to_sym, content, commit)
repository.expire_status_cache if repository.empty?
after_wiki_activity
true
end
rescue Gitlab::Git::Wiki::DuplicatePageError => e
@error_message = "Duplicate page: #{e.message}"
@error_message = _("Duplicate page: %{error_message}" % { error_message: e.message })
false
end
@ -393,12 +436,33 @@ class Wiki
yield default_extension
end
def file_exists_by_regex?(title)
return false unless repository_exists?
escaped_title = Regexp.escape(sluggified_title(title))
regex = Regexp.new("^#{escaped_title}\.#{ALLOWED_EXTENSIONS_REGEX}$", 'i')
repository.ls_files('HEAD').any? { |s| s =~ regex }
end
def raise_duplicate_page_error!
raise Gitlab::Git::Wiki::DuplicatePageError, _('A page with that title already exists')
end
def sluggified_full_path(title, extension)
sluggified_title(title) + '.' + extension
end
def sluggified_title(title)
Gitlab::EncodingHelper.encode_utf8_no_detect(title).tr(' ', '-')
utf8_encoded_title = Gitlab::EncodingHelper.encode_utf8_no_detect(title)
sanitized_title(utf8_encoded_title).tr(' ', '-')
end
def sanitized_title(title)
clean_absolute_path = File.expand_path(title, '/')
Pathname.new(clean_absolute_path).relative_path_from('/').to_s
end
end

View File

@ -8,4 +8,5 @@
jobs_path: realtime_changes_import_bulk_imports_path(format: :json),
source_url: @source_url,
group_path_regex: Gitlab::PathRegex::NAMESPACE_FORMAT_REGEX_JS,
history_path: history_import_bulk_imports_path,
group_url_error_message: group_url_error_message } }

View File

@ -3,32 +3,14 @@
%head{ prefix: "og: http://ogp.me/ns#" }
%meta{ charset: "utf-8" }
%title= page_title(site_name)
= render 'layouts/loading_hints'
%meta{ 'http-equiv' => 'X-UA-Compatible', content: 'IE=edge' }
= render 'layouts/startup_js'
-# Open Graph - http://ogp.me/
%meta{ property: 'og:type', content: "object" }
%meta{ property: 'og:site_name', content: site_name }
%meta{ property: 'og:title', content: page_title }
%meta{ property: 'og:description', content: page_description }
%meta{ property: 'og:image', content: page_image }
%meta{ property: 'og:image:width', content: '64' }
%meta{ property: 'og:image:height', content: '64' }
%meta{ property: 'og:url', content: request.base_url + request.fullpath }
-# Twitter Card - https://dev.twitter.com/cards/types/summary
%meta{ property: 'twitter:card', content: "summary" }
%meta{ property: 'twitter:title', content: page_title }
%meta{ property: 'twitter:description', content: page_description }
%meta{ property: 'twitter:image', content: page_image }
= page_card_meta_tags
%title= page_title(site_name)
%meta{ name: "description", content: page_description }
- if page_canonical_link
%link{ rel: 'canonical', href: page_canonical_link }
@ -67,13 +49,32 @@
= yield :project_javascripts
= csrf_meta_tags
= csp_meta_tag
= action_cable_meta_tag
-# Open Graph - http://ogp.me/
%meta{ property: 'og:type', content: "object" }
%meta{ property: 'og:site_name', content: site_name }
%meta{ property: 'og:title', content: page_title }
%meta{ property: 'og:description', content: page_description }
%meta{ property: 'og:image', content: page_image }
%meta{ property: 'og:image:width', content: '64' }
%meta{ property: 'og:image:height', content: '64' }
%meta{ property: 'og:url', content: request.base_url + request.fullpath }
-# Twitter Card - https://dev.twitter.com/cards/types/summary
%meta{ property: 'twitter:card', content: "summary" }
%meta{ property: 'twitter:title', content: page_title }
%meta{ property: 'twitter:description', content: page_description }
%meta{ property: 'twitter:image', content: page_image }
= page_card_meta_tags
%meta{ name: "description", content: page_description }
%meta{ name: 'viewport', content: 'width=device-width, initial-scale=1, maximum-scale=1' }
%meta{ name: 'theme-color', content: user_theme_primary_color }
= csrf_meta_tags
= csp_meta_tag
= action_cable_meta_tag
-# Apple Safari/iOS home screen icons
= favicon_link_tag 'touch-icon-iphone.png', rel: 'apple-touch-icon'
= favicon_link_tag 'touch-icon-ipad.png', rel: 'apple-touch-icon', sizes: '76x76'

View File

@ -1,5 +1,5 @@
%p.details
= sprintf(s_("Notify|%{author_link}'s issue %{issue_reference_link} is due soon."), { author_link: link_to(@issue.author_name, user_url(@issue.author)), issue_reference_link: issue_reference_link(@issue) })
= sprintf(s_("Notify|%{author_link}'s issue %{issue_reference_link} is due soon."), { author_link: link_to(@issue.author_name, user_url(@issue.author)), issue_reference_link: issue_reference_link(@issue) }).html_safe
- if @issue.assignees.any?
%p

View File

@ -27,7 +27,7 @@
- if Feature.enabled?(:user_other_role_details)
.row
.form-group.col-sm-12.js-other-role-group.hidden
= f.label :other_role, _('What is your job title? (optional)'), class: 'form-check-label gl-mb-3'
= f.label :other_role, _('What is your job title? (optional)')
= f.text_field :other_role, class: 'form-control'
= render_if_exists "registrations/welcome/jobs_to_be_done", f: f
= render_if_exists "registrations/welcome/setup_for_company", f: f

View File

@ -0,0 +1,8 @@
---
name: gitaly_replace_wiki_create_page
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/83734
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/356983
milestone: '15.0'
type: development
group: group::editor
default_enabled: false

View File

@ -4,6 +4,8 @@ classes:
- Gitlab::Database::BackgroundMigrationJob
feature_categories:
- database
description: TODO
description: >-
The background_migration_jobs table stores information about the jobs processed during the execution of a background migration.
See https://docs.gitlab.com/ee/development/database/background_migrations.html for more details.
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/commit/9af97ee69a36de1dc4e73f4030d6316d3f0a82c5
milestone: '13.2'

View File

@ -4,6 +4,8 @@ classes:
- Gitlab::Database::BackgroundMigration::BatchedJobTransitionLog
feature_categories:
- database
description: TODO
description: >-
The batched_background_migration_job_transition_logs table stores information about the batched background migrations jobs transitions.
Every time a batched background migrations job changes to a new state, the system records that information in this table.
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/75113
milestone: '14.8'

View File

@ -4,6 +4,7 @@ classes:
- Gitlab::Database::BackgroundMigration::BatchedJob
feature_categories:
- database
description: TODO
description: >-
The batched_background_migration_jobs table stores information about the jobs created during the execution of a batched background migration.
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/54628
milestone: '13.10'

View File

@ -4,6 +4,8 @@ classes:
- Gitlab::Database::BackgroundMigration::BatchedMigration
feature_categories:
- database
description: TODO
description: >-
The batched_background_migrations table stores information about the batched background migrations present in the system.
See https://docs.gitlab.com/ee/development/batched_background_migrations.html for more details.
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/54628
milestone: '13.10'

View File

@ -1110,3 +1110,46 @@ As noted in [this authentication issue](https://github.com/git-lfs/git-lfs/issue
requests redirected from the secondary to the primary node do not properly send the
Authorization header. This may result in either an infinite `Authorization <-> Redirect`
loop, or Authorization error messages.
## Recovering from a partial failover
The partial failover to a secondary Geo *site* may be the result of a temporary/transient issue. Therefore, first attempt to run the promote command again.
1. SSH into every Sidekiq, PostgresSQL, Gitaly, and Rails node in the **secondary** site and run one of the following commands:
- To promote the secondary node to primary:
```shell
sudo gitlab-ctl geo promote
```
- To promote the secondary node to primary **without any further confirmation**:
```shell
sudo gitlab-ctl geo promote --force
```
1. Verify you can connect to the newly-promoted **primary** site using the URL used previously for the **secondary** site.
1. If **successful**, the **secondary** site is now promoted to the **primary** site.
If the above steps are **not successful**, proceed through the next steps:
1. SSH to every Sidekiq, PostgresSQL, Gitaly and Rails node in the **secondary** site and perform the following operations:
- Create a `/etc/gitlab/gitlab-cluster.json` file with the following content:
```shell
{
"primary": true,
"secondary": false
}
```
- Reconfigure GitLab for the changes to take effect:
```shell
sudo gitlab-ctl reconfigure
```
1. Verify you can connect to the newly-promoted **primary** site using the URL used previously for the **secondary** site.
1. If successful, the **secondary** site is now promoted to the **primary** site.

View File

@ -369,7 +369,7 @@ listed in the descriptions of the relevant settings.
| `max_attachment_size` | integer | no | Limit attachment size in MB. |
| `max_import_size` | integer | no | Maximum import size in MB. 0 for unlimited. Default = 0 (unlimited) [Modified](https://gitlab.com/gitlab-org/gitlab/-/issues/251106) from 50MB to 0 in GitLab 13.8. |
| `max_pages_size` | integer | no | Maximum size of pages repositories in MB. |
| `max_personal_access_token_lifetime` **(ULTIMATE SELF)** | integer | no | Maximum allowable lifetime for personal access tokens in days. |
| `max_personal_access_token_lifetime` **(ULTIMATE SELF)** | integer | no | Maximum allowable lifetime for access tokens in days. |
| `max_ssh_key_lifetime` **(ULTIMATE SELF)** | integer | no | Maximum allowable lifetime for SSH keys in days. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/1007) in GitLab 14.6. |
| `metrics_method_call_threshold` | integer | no | A method call is only tracked when it takes longer than the given amount of milliseconds. |
| `mirror_available` | boolean | no | Allow repository mirroring to configured by project Maintainers. If disabled, only Administrators can configure repository mirroring. |

View File

@ -164,7 +164,7 @@ Use lowercase for **boards**, **issue boards**, and **epic boards**.
Use **text box** to refer to the UI field. Do not use **field** or **box**. For example:
- In the **Variable name** text box, enter `my text`.
- In the **Variable name** text box, enter a value.
## button
@ -318,7 +318,22 @@ Use **active** or **on** instead. ([Vale](../testing.md#vale) rule: [`InclusionA
## enter
Use **enter** instead of **type** when talking about putting values into text boxes.
In most cases, use **enter** rather than **type**.
- **Enter** encompasses multiple ways to enter information, including speech and keyboard.
- **Enter** assumes that the user puts a value in a field and then moves the cursor outside the field (or presses <kbd>Enter</kbd>).
**Enter** includes both the entering of the content and the action to validate the content.
For example:
- In the **Variable name** text box, enter a value.
- In the **Variable name** text box, enter `my text`.
When you use **Enter** to refer to the key on a keyboard, use the HTML `<kbd>` tag:
- To view the list of results, press <kbd>Enter</kbd>.
See also [**type**](#type).
## epic
@ -356,7 +371,7 @@ Use **box** instead of **field** or **text box**.
Use:
- In the **Variable name** box, enter `my text`.
- In the **Variable name** text box, enter `my text`.
Instead of:
@ -950,7 +965,17 @@ Use [**2FA** and **two-factor authentication**](#2fa-two-factor-authentication)
## type
Do not use **type** if you can avoid it. Use **enter** instead.
Use **type** when the cursor remains in the field you're typing in. For example,
in a search dialog, you begin typing and the field populates results. You do not
click out of the field.
For example:
- To view all users named Alex, type `Al`.
- To view all labels for the documentation team, type `doc`.
- For a list of quick actions, type `/`.
See also [**enter**](#enter).
## update

View File

@ -40,7 +40,7 @@ To access the Credentials inventory:
If you see a **Revoke** button, you can revoke that user's PAT. Whether you see a **Revoke** button depends on the token state, and if an expiration date has been set. For more information, see the following table:
| Token state | [Token expiration enforced?](settings/account_and_limit_settings.md#allow-expired-personal-access-tokens-to-be-used-deprecated) | Show Revoke button? | Comments |
| Token state | [Token expiration enforced?](settings/account_and_limit_settings.md#allow-expired-access-tokens-to-be-used-deprecated) | Show Revoke button? | Comments |
|-------------|------------------------|--------------------|----------------------------------------------------------------------------|
| Active | Yes | Yes | Allows administrators to revoke the PAT, such as for a compromised account |
| Active | No | Yes | Allows administrators to revoke the PAT, such as for a compromised account |

View File

@ -249,15 +249,16 @@ To allow the use of expired SSH keys:
Disabling SSH key expiration immediately enables all expired SSH keys.
## Limit the lifetime of personal access tokens **(ULTIMATE SELF)**
## Limit the lifetime of access tokens **(ULTIMATE SELF)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/3649) in GitLab 12.6.
Users can optionally specify a lifetime for
[personal access tokens](../../profile/personal_access_tokens.md).
access tokens, this includes [personal](../../profile/personal_access_tokens.md),
[group](../../group/settings/group_access_tokens.md), and [project](../../project/settings/project_access_tokens.md) access tokens.
This lifetime is not a requirement, and can be set to any arbitrary number of days.
Personal access tokens are the only tokens needed for programmatic access to GitLab.
Access tokens are the only tokens needed for programmatic access to GitLab.
However, organizations with security requirements may want to enforce more protection by
requiring the regular rotation of these tokens.
@ -266,15 +267,15 @@ requiring the regular rotation of these tokens.
Only a GitLab administrator can set a lifetime. Leaving it empty means
there are no restrictions.
To set a lifetime on how long personal access tokens are valid:
To set a lifetime on how long access tokens are valid:
1. On the top bar, select **Menu > Admin**.
1. On the left sidebar, select **Settings > General**.
1. Expand the **Account and limit** section.
1. Fill in the **Maximum allowable lifetime for personal access tokens (days)** field.
1. Fill in the **Maximum allowable lifetime for access tokens (days)** field.
1. Click **Save changes**.
Once a lifetime for personal access tokens is set, GitLab:
Once a lifetime for access tokens is set, GitLab:
- Applies the lifetime for new personal access tokens, and require users to set an expiration date
and a date no later than the allowed lifetime.
@ -282,7 +283,7 @@ Once a lifetime for personal access tokens is set, GitLab:
allowed lifetime. Three hours is given to allow administrators to change the allowed lifetime,
or remove it, before revocation takes place.
## Allow expired Personal Access Tokens to be used (DEPRECATED) **(ULTIMATE SELF)**
## Allow expired access tokens to be used (DEPRECATED) **(ULTIMATE SELF)**
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/214723) in GitLab 13.1.
> - [Feature flag removed](https://gitlab.com/gitlab-org/gitlab/-/issues/296881) in GitLab 13.9.
@ -298,7 +299,7 @@ To allow the use of expired PATs:
1. On the top bar, select **Menu > Admin**.
1. On the left sidebar, select **Settings > General**.
1. Expand the **Account and limit** section.
1. Uncheck the **Enforce personal access token expiration** checkbox.
1. Uncheck the **Enforce access token expiration** checkbox.
## Disable user profile name changes **(PREMIUM SELF)**

View File

@ -107,14 +107,14 @@ Since personal access tokens are the only token needed for programmatic access t
### Set a limit
Only a GitLab administrator or an owner of a group-managed account can set a limit. When this field
is left empty, the [instance-level restriction](../../admin_area/settings/account_and_limit_settings.md#limit-the-lifetime-of-personal-access-tokens)
is left empty, the [instance-level restriction](../../admin_area/settings/account_and_limit_settings.md#limit-the-lifetime-of-access-tokens)
on the lifetime of personal access tokens apply.
To set a limit on how long personal access tokens are valid for users in a group managed account:
1. Navigate to the **Settings > General** page in your group's sidebar.
1. Expand the **Permissions and group features** section.
1. Fill in the **Maximum allowable lifetime for personal access tokens (days)** field.
1. Fill in the **Maximum allowable lifetime for access tokens (days)** field.
1. Click **Save changes**.
Once a lifetime for personal access tokens is set:

View File

@ -25,7 +25,7 @@ Group access tokens are similar to [project access tokens](../../project/setting
and [personal access tokens](../../profile/personal_access_tokens.md), except they are
associated with a group rather than a project or user.
In self-managed instances, group access tokens are subject to the same [maximum lifetime limits](../../admin_area/settings/account_and_limit_settings.md#limit-the-lifetime-of-personal-access-tokens) as personal access tokens if the limit is set.
In self-managed instances, group access tokens are subject to the same [maximum lifetime limits](../../admin_area/settings/account_and_limit_settings.md#limit-the-lifetime-of-access-tokens) as personal access tokens if the limit is set.
You can use group access tokens:
@ -50,7 +50,7 @@ To create a group access token:
1. On the top bar, select **Menu > Groups** and find your group.
1. On the left sidebar, select **Settings > Access Tokens**.
1. Enter a name. The token name is visible to any user with permissions to view the group.
1. Optional. Enter an expiry date for the token. The token will expire on that date at midnight UTC. An instance-wide [maximum lifetime](../../admin_area/settings/account_and_limit_settings.md#limit-the-lifetime-of-personal-access-tokens) setting can limit the maximum allowable lifetime in self-managed instances.
1. Optional. Enter an expiry date for the token. The token will expire on that date at midnight UTC. An instance-wide [maximum lifetime](../../admin_area/settings/account_and_limit_settings.md#limit-the-lifetime-of-access-tokens) setting can limit the maximum allowable lifetime in self-managed instances.
1. Select a role for the token.
1. Select the [desired scopes](#scopes-for-a-group-access-token).
1. Select **Create group access token**.

View File

@ -109,9 +109,9 @@ Personal access tokens expire on the date you define, at midnight UTC.
- GitLab runs a check at 01:00 AM UTC every day to identify personal access tokens that expire in the next seven days. The owners of these tokens are notified by email.
- GitLab runs a check at 02:00 AM UTC every day to identify personal access tokens that expire on the current date. The owners of these tokens are notified by email.
- In GitLab Ultimate, administrators can
[limit the lifetime of personal access tokens](../admin_area/settings/account_and_limit_settings.md#limit-the-lifetime-of-personal-access-tokens).
[limit the lifetime of access tokens](../admin_area/settings/account_and_limit_settings.md#limit-the-lifetime-of-access-tokens).
- In GitLab Ultimate, administrators can choose whether or not to
[enforce personal access token expiration](../admin_area/settings/account_and_limit_settings.md#allow-expired-personal-access-tokens-to-be-used-deprecated).
[enforce access token expiration](../admin_area/settings/account_and_limit_settings.md#allow-expired-access-tokens-to-be-used-deprecated).
## Create a personal access token programmatically **(FREE SELF)**

View File

@ -20,6 +20,52 @@ can enable in a user-friendly interface. They are defined either:
- Per project, so you can have different rules applied to different
projects depending on your needs.
## Default push rules
The following options are available:
- **Reject unverified users** - GitLab rejects any commit that was not committed
by the same user as the user who pushed it, or where the committer's email address
is not [confirmed](../../../security/user_email_confirmation.md).
- **Reject unsigned commits** - Reject commit when it is not signed through GPG.
Read [signing commits with GPG](gpg_signed_commits/index.md).
- **Removal of tags with** `git push` - Forbid users to remove Git tags with `git push`.
Tags can be deleted through the web UI.
- **Check whether the commit author is a GitLab user** - Restrict commits to existing
GitLab users (checked against their email addresses). Checks both the commit author and committer.
- **Prevent pushing secret files** - GitLab rejects any files that are
[likely to contain secrets](#prevent-pushing-secrets-to-the-repository).
These push rules require you to create a regular expression for the rule to evaluate:
- **Require expression in commit messages** - Only commit messages that match this
regular expression can be pushed. To allow any commit message, leave empty.
Uses multiline mode, which can be disabled using `(?-m)`.
- **Reject expression in commit messages** - Only commit messages that do not match
this regular expression can be pushed. To allow any commit message, leave empty.
Uses multiline mode, which can be disabled using `(?-m)`.
- **Restrict by branch name** - Only branch names that match this regular expression
can be pushed. To allow any branch name, leave empty.
- **Restrict by commit author's email** - Only the commit author's email address that matches this
regular expression can be pushed. Checks both the commit author and committer.
To allow any email address, leave empty.
- **Prohibited file names** - Any committed file names that match this regular expression
and do not already exist in the repository can't be pushed. To allow all file names,
leave empty. See [common examples](#prohibited-file-names).
- **Maximum file size** - Pushes that contain added or updated files that exceed this
file size (in MB) are rejected. To allow files of any size, set to `0`.
Files tracked by Git LFS are exempted.
GitLab uses [RE2 syntax](https://github.com/google/re2/wiki/Syntax) for regular expressions
in push rules, and you can test them at the [regex101 regex tester](https://regex101.com/).
## Custom push rules **(PREMIUM SELF)**
It's possible to create custom push rules rather than the push rules available in
**Admin Area > Push Rules** by using more advanced server hooks.
See [server hooks](../../../administration/server_hooks.md) for more information.
## Use cases
Every push rule could have its own use case, but let's consider some examples.
@ -72,13 +118,6 @@ Some example regular expressions you can use in push rules:
By default, GitLab restricts certain formats of branch names for security purposes.
40-character hexadecimal names, similar to Git commit hashes, are prohibited.
### Custom Push Rules **(PREMIUM SELF)**
It's possible to create custom push rules rather than the push rules available in
**Admin Area > Push Rules** by using more advanced server hooks.
See [server hooks](../../../administration/server_hooks.md) for more information.
## Enabling push rules
You can create push rules for all new projects to inherit, but they can be overridden
@ -97,25 +136,6 @@ To override global push rules in a project's settings:
1. Set the rule you want.
1. Select **Save push rules**.
The following options are available:
| Push rule | Description |
|---------------------------------|-------------|
| Removal of tags with `git push` | Forbid users to remove Git tags with `git push`. Tags can be deleted through the web UI. |
| Check whether the commit author is a GitLab user | Restrict commits to existing GitLab users (checked against their emails). <sup>1</sup> |
| Reject unverified users | GitLab rejects any commit that was not committed by the same user as the user who pushed it, or where the committer's email address is not [confirmed](../../../security/user_email_confirmation.md). |
| Check whether commit is signed through GPG | Reject commit when it is not signed through GPG. Read [signing commits with GPG](gpg_signed_commits/index.md). |
| Prevent pushing secret files | GitLab rejects any files that are likely to contain secrets. See the [forbidden file names](#prevent-pushing-secrets-to-the-repository). |
| Require expression in commit messages | Only commit messages that match this regular expression are allowed to be pushed. <sup>2</sup> Leave empty to allow any commit message. Uses multiline mode, which can be disabled using `(?-m)`. |
| Reject expression in commit messages | Only commit messages that do not match this regular expression are allowed to be pushed. <sup>2</sup> Leave empty to allow any commit message. Uses multiline mode, which can be disabled using `(?-m)`. |
| Restrict by branch name | Only branch names that match this regular expression are allowed to be pushed. <sup>2</sup> Leave empty to allow all branch names. |
| Restrict by commit author's email | Only commit author's email that match this regular expression are allowed to be pushed. <sup>1</sup> <sup>2</sup> Leave empty to allow any email. |
| Prohibited file names | Any committed filenames that match this regular expression and do not already exist in the repository are not allowed to be pushed. <sup>2</sup> Leave empty to allow any filenames. See [common examples](#prohibited-file-names). |
| Maximum file size | Pushes that contain added or updated files that exceed this file size (in MB) are rejected. Set to 0 to allow files of any size. Files tracked by Git LFS are exempted. |
1. Checks both the commit author and committer.
1. GitLab uses [RE2 syntax](https://github.com/google/re2/wiki/Syntax) for regular expressions in push rules, and you can test them at the [regex101 regex tester](https://regex101.com/).
### Caveat to "Reject unsigned commits" push rule
This push rule ignores commits that are authenticated and created by GitLab

View File

@ -25,7 +25,7 @@ Use a project access token to authenticate:
Project access tokens are similar to [group access tokens](../../group/settings/group_access_tokens.md)
and [personal access tokens](../../profile/personal_access_tokens.md).
In self-managed instances, project access tokens are subject to the same [maximum lifetime limits](../../admin_area/settings/account_and_limit_settings.md#limit-the-lifetime-of-personal-access-tokens) as personal access tokens if the limit is set.
In self-managed instances, project access tokens are subject to the same [maximum lifetime limits](../../admin_area/settings/account_and_limit_settings.md#limit-the-lifetime-of-access-tokens) as personal access tokens if the limit is set.
You can use project access tokens:
@ -48,7 +48,7 @@ To create a project access token:
1. On the top bar, select **Menu > Projects** and find your project.
1. On the left sidebar, select **Settings > Access Tokens**.
1. Enter a name. The token name is visible to any user with permissions to view the project.
1. Optional. Enter an expiry date for the token. The token expires on that date at midnight UTC. An instance-wide [maximum lifetime](../../admin_area/settings/account_and_limit_settings.md#limit-the-lifetime-of-personal-access-tokens) setting can limit the maximum allowable lifetime in self-managed instances.
1. Optional. Enter an expiry date for the token. The token expires on that date at midnight UTC. An instance-wide [maximum lifetime](../../admin_area/settings/account_and_limit_settings.md#limit-the-lifetime-of-access-tokens) setting can limit the maximum allowable lifetime in self-managed instances.
1. Select a role for the token.
1. Select the [desired scopes](#scopes-for-a-project-access-token).

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
module BulkImports
module Projects
module Pipelines
class ReleasesPipeline
include NdjsonPipeline
relation_name 'releases'
extractor ::BulkImports::Common::Extractors::NdjsonExtractor, relation: relation
end
end
end
end

View File

@ -79,6 +79,10 @@ module BulkImports
pipeline: BulkImports::Projects::Pipelines::ServiceDeskSettingPipeline,
stage: 4
},
releases: {
pipeline: BulkImports::Projects::Pipelines::ReleasesPipeline,
stage: 4
},
wiki: {
pipeline: BulkImports::Common::Pipelines::WikiPipeline,
stage: 5

View File

@ -1610,6 +1610,9 @@ msgstr ""
msgid "A non-confidential epic cannot be assigned to a confidential parent epic"
msgstr ""
msgid "A page with that title already exists"
msgstr ""
msgid "A plain HTML site that uses Netlify for CI/CD instead of GitLab, but still with all the other great GitLab features"
msgstr ""
@ -6411,6 +6414,9 @@ msgstr ""
msgid "BulkImport|Group import history"
msgstr ""
msgid "BulkImport|History"
msgstr ""
msgid "BulkImport|Import failed: Destination cannot be a subgroup of the source group. Change the destination and try again."
msgstr ""
@ -13491,6 +13497,9 @@ msgstr ""
msgid "Due date"
msgstr ""
msgid "Duplicate page: %{error_message}"
msgstr ""
msgid "Duration"
msgstr ""
@ -14094,7 +14103,7 @@ msgstr ""
msgid "Enforce SSH key expiration"
msgstr ""
msgid "Enforce personal access token expiration"
msgid "Enforce access token expiration"
msgstr ""
msgid "Enforce two-factor authentication"
@ -23360,6 +23369,9 @@ msgstr ""
msgid "Maximum Users"
msgstr ""
msgid "Maximum allowable lifetime for access token (days)"
msgstr ""
msgid "Maximum allowable lifetime for personal access token (days)"
msgstr ""
@ -33581,9 +33593,6 @@ msgstr ""
msgid "SecurityOrchestration|All policies"
msgstr ""
msgid "SecurityOrchestration|Allow all inbound traffic to all pods from all pods on ports 443/TCP."
msgstr ""
msgid "SecurityOrchestration|An error occurred assigning your security policy project"
msgstr ""
@ -33638,9 +33647,6 @@ msgstr ""
msgid "SecurityOrchestration|If any scanner finds a newly detected critical vulnerability in an open merge request targeting the master branch, then require two approvals from any member of App security."
msgstr ""
msgid "SecurityOrchestration|If you are using Auto DevOps, your %{monospacedStart}auto-deploy-values.yaml%{monospacedEnd} file will not be updated if you change a policy in this section. Auto DevOps users should make changes by following the %{linkStart}Container Network Policy documentation%{linkEnd}."
msgstr ""
msgid "SecurityOrchestration|Invalid policy type"
msgstr ""
@ -33650,9 +33656,6 @@ msgstr ""
msgid "SecurityOrchestration|Network"
msgstr ""
msgid "SecurityOrchestration|Network policy"
msgstr ""
msgid "SecurityOrchestration|New network policy"
msgstr ""
@ -33818,9 +33821,6 @@ msgstr ""
msgid "SecurityOrchestration|Use a scan result policy to create rules that ensure security issues are checked before merging a merge request."
msgstr ""
msgid "SecurityOrchestration|Use network policies to create firewall rules for network connections in your Kubernetes cluster."
msgstr ""
msgid "SecurityOrchestration|View policy project"
msgstr ""
@ -33878,9 +33878,6 @@ msgstr ""
msgid "SecurityPolicies|+%{count} more"
msgstr ""
msgid "SecurityPolicies|Environment(s)"
msgstr ""
msgid "SecurityPolicies|Policy type"
msgstr ""
@ -42553,7 +42550,7 @@ msgstr ""
msgid "When enabled, cleanup polices execute faster but put more load on Redis."
msgstr ""
msgid "When enabled, existing personal access tokens may be revoked. Leave blank for no limit."
msgid "When enabled, existing access tokens may be revoked. Leave blank for no limit."
msgstr ""
msgid "When enabled, job logs are collected by Datadog and displayed along with pipeline execution traces."
@ -42603,7 +42600,7 @@ msgstr ""
msgid "Who will be using this group?"
msgstr ""
msgid "Why are you signing up? (Optional)"
msgid "Why are you signing up? (optional)"
msgstr ""
msgid "Wiki"

View File

@ -99,7 +99,7 @@
"autosize": "^5.0.1",
"aws-sdk": "^2.637.0",
"axios": "^0.24.0",
"babel-loader": "^8.2.4",
"babel-loader": "^8.2.5",
"babel-plugin-lodash": "^3.3.4",
"bootstrap": "4.5.3",
"cache-loader": "^4.1.0",
@ -259,7 +259,7 @@
"webpack-dev-server": "4.8.1",
"xhr-mock": "^2.5.1",
"yarn-check-webpack-plugin": "^1.2.0",
"yarn-deduplicate": "^4.0.0"
"yarn-deduplicate": "^5.0.0"
},
"blockedDependencies": {
"bootstrap-vue": "https://docs.gitlab.com/ee/development/fe_guide/dependencies.html#bootstrapvue"

View File

@ -24,7 +24,6 @@ module QA
Resource::MergeRequest.fabricate_via_browser_ui! do |merge_request|
merge_request.project = project
merge_request.title = merge_request_title
merge_request.assignee = 'me'
merge_request.description = merge_request_description
end
@ -54,7 +53,6 @@ module QA
merge_request.description = merge_request_description
merge_request.project = project
merge_request.milestone = milestone
merge_request.assignee = 'me'
merge_request.labels.push(label)
end

View File

@ -304,6 +304,8 @@ FactoryBot.define do
trait :wiki_repo do
after(:create) do |project|
stub_feature_flags(main_branch_over_master: false)
raise 'Failed to create wiki repository!' unless project.create_wiki
end
end

View File

@ -25,6 +25,20 @@ RSpec.describe 'User creates branch and merge request on issue page', :js do
sign_in(user)
end
context 'when Create merge request button is clicked' do
before do
visit project_issue_path(project, issue)
wait_for_requests
click_button('Create merge request')
wait_for_requests
end
it_behaves_like 'merge request author auto assign'
end
context 'when interacting with the dropdown' do
before do
visit project_issue_path(project, issue)

View File

@ -15,28 +15,40 @@ RSpec.describe "User creates a merge request", :js do
sign_in(user)
end
it "creates a merge request" do
visit(project_new_merge_request_path(project))
context 'when completed the compare branches form' do
before do
visit(project_new_merge_request_path(project))
find(".js-source-branch").click
click_link("fix")
find(".js-source-branch").click
click_link("fix")
find(".js-target-branch").click
click_link("feature")
find(".js-target-branch").click
click_link("feature")
click_button("Compare branches")
page.within('.merge-request-form') do
expect(page.find('#merge_request_title')['placeholder']).to eq 'Title'
expect(page.find('#merge_request_description')['placeholder']).to eq 'Describe the goal of the changes and what reviewers should be aware of.'
click_button("Compare branches")
end
fill_in("Title", with: title)
click_button("Create merge request")
page.within(".merge-request") do
expect(page).to have_content(title)
it "shows merge request form" do
page.within('.merge-request-form') do
expect(page.find('#merge_request_title')['placeholder']).to eq 'Title'
expect(page.find('#merge_request_description')['placeholder']).to eq 'Describe the goal of the changes and what reviewers should be aware of.'
end
end
context "when completed the merge request form" do
before do
fill_in("Title", with: title)
click_button("Create merge request")
end
it "creates a merge request" do
page.within(".merge-request") do
expect(page).to have_content(title)
end
end
end
it_behaves_like 'merge request author auto assign'
end
context "XSS branch name exists" do

View File

@ -70,6 +70,7 @@ describe('import table', () => {
groupPathRegex: /.*/,
jobsPath: '/fake_job_path',
sourceUrl: SOURCE_URL,
historyPath: '/fake_history_path',
},
apolloProvider,
});

View File

@ -26,6 +26,7 @@ import { isLoggedIn } from '~/lib/utils/common_utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import httpStatusCodes from '~/lib/utils/http_status';
import LineHighlighter from '~/blob/line_highlighter';
import { LEGACY_FILE_TYPES } from '~/repository/constants';
import {
simpleViewerMock,
richViewerMock,
@ -195,6 +196,14 @@ describe('Blob content viewer component', () => {
expect(mockAxios.history.get[0].url).toBe(legacyViewerUrl);
});
it.each(LEGACY_FILE_TYPES)(
'loads the legacy viewer when a file type is identified as legacy',
async (type) => {
await createComponent({ blob: { ...simpleViewerMock, fileType: type, webPath: type } });
expect(mockAxios.history.get[0].url).toBe(`${type}?format=json&viewer=simple`);
},
);
it('loads the LineHighlighter', async () => {
mockAxios.onGet(legacyViewerUrl).replyOnce(httpStatusCodes.OK, 'test');
await createComponent({ blob: { ...simpleViewerMock, fileType, highlightJs } });

View File

@ -0,0 +1,122 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:entity) do
create(
:bulk_import_entity,
:project_entity,
project: project,
bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: 'My Destination Project',
destination_namespace: group.full_path
)
end
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:attributes) { {} }
let(:release) do
{
'tag' => '1.1',
'name' => 'release 1.1',
'description' => 'Release notes',
'created_at' => '2019-12-26T10:17:14.621Z',
'updated_at' => '2019-12-26T10:17:14.621Z',
'released_at' => '2019-12-26T10:17:14.615Z',
'sha' => '901de3a8bd5573f4a049b1457d28bc1592ba6bf9'
}.merge(attributes)
end
subject(:pipeline) { described_class.new(context) }
describe '#run' do
before do
group.add_owner(user)
with_index = [release, 0]
allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [with_index]))
end
pipeline.run
end
it 'imports release into destination project' do
expect(project.releases.count).to eq(1)
imported_release = project.releases.last
aggregate_failures do
expect(imported_release.tag).to eq(release['tag'])
expect(imported_release.name).to eq(release['name'])
expect(imported_release.description).to eq(release['description'])
expect(imported_release.created_at.to_s).to eq('2019-12-26 10:17:14 UTC')
expect(imported_release.updated_at.to_s).to eq('2019-12-26 10:17:14 UTC')
expect(imported_release.released_at.to_s).to eq('2019-12-26 10:17:14 UTC')
expect(imported_release.sha).to eq(release['sha'])
end
end
context 'links' do
let(:link) do
{
'url' => 'http://localhost/namespace6/project6/-/jobs/140463678/artifacts/download',
'name' => 'release-1.1.dmg',
'created_at' => '2019-12-26T10:17:14.621Z',
'updated_at' => '2019-12-26T10:17:14.621Z'
}
end
let(:attributes) {{ 'links' => [link] }}
it 'restores release links' do
release_link = project.releases.last.links.first
aggregate_failures do
expect(release_link.url).to eq(link['url'])
expect(release_link.name).to eq(link['name'])
expect(release_link.created_at.to_s).to eq('2019-12-26 10:17:14 UTC')
expect(release_link.updated_at.to_s).to eq('2019-12-26 10:17:14 UTC')
end
end
end
context 'milestones' do
let(:milestone) do
{
'iid' => 1,
'state' => 'closed',
'title' => 'test milestone',
'description' => 'test milestone',
'due_date' => '2016-06-14',
'created_at' => '2016-06-14T15:02:04.415Z',
'updated_at' => '2016-06-14T15:02:04.415Z'
}
end
let(:attributes) {{ 'milestone_releases' => [{ 'milestone' => milestone }] }}
it 'restores release milestone' do
release_milestone = project.releases.last.milestone_releases.first.milestone
aggregate_failures do
expect(release_milestone.iid).to eq(milestone['iid'])
expect(release_milestone.state).to eq(milestone['state'])
expect(release_milestone.title).to eq(milestone['title'])
expect(release_milestone.description).to eq(milestone['description'])
expect(release_milestone.due_date.to_s).to eq('2016-06-14')
expect(release_milestone.created_at.to_s).to eq('2016-06-14 15:02:04 UTC')
expect(release_milestone.updated_at.to_s).to eq('2016-06-14 15:02:04 UTC')
end
end
end
end
end

View File

@ -24,6 +24,7 @@ RSpec.describe BulkImports::Projects::Stage do
[4, BulkImports::Projects::Pipelines::ProjectFeaturePipeline],
[4, BulkImports::Projects::Pipelines::ContainerExpirationPolicyPipeline],
[4, BulkImports::Projects::Pipelines::ServiceDeskSettingPipeline],
[4, BulkImports::Projects::Pipelines::ReleasesPipeline],
[5, BulkImports::Common::Pipelines::WikiPipeline],
[5, BulkImports::Common::Pipelines::UploadsPipeline],
[5, BulkImports::Common::Pipelines::LfsObjectsPipeline],

View File

@ -123,7 +123,7 @@ RSpec.describe Integrations::MicrosoftTeams do
{
title: "Awesome wiki_page",
content: "Some text describing some thing or another",
format: "md",
format: :markdown,
message: "user created page: Awesome wiki_page"
}
end

View File

@ -4,7 +4,7 @@ RSpec.shared_examples 'multiple assignees merge request' do |action, save_button
it "#{action} a MR with multiple assignees", :js do
find('.js-assignee-search').click
page.within '.dropdown-menu-user' do
click_link user.name
click_link user.name unless action == 'creates'
click_link user2.name
end

View File

@ -4,7 +4,7 @@ RSpec.shared_examples 'multiple assignees widget merge request' do |action, save
it "#{action} a MR with multiple assignees", :js do
find('.js-assignee-search').click
page.within '.dropdown-menu-user' do
click_link user.name
click_link user.name unless action == 'creates'
click_link user2.name
end

View File

@ -0,0 +1,8 @@
# frozen_string_literal: true
RSpec.shared_examples 'merge request author auto assign' do
it 'populates merge request author as assignee' do
expect(find('.js-assignee-search')).to have_content(user.name)
expect(page).not_to have_content 'Assign yourself'
end
end

View File

@ -199,7 +199,7 @@ RSpec.shared_examples "chat integration" do |integration_name|
{
title: "Awesome wiki_page",
content: "Some text describing some thing or another",
format: "md",
format: :markdown,
message: "user created page: Awesome wiki_page"
}
end

View File

@ -392,41 +392,161 @@ RSpec.shared_examples 'wiki model' do
end
describe '#create_page' do
it 'creates a new wiki page' do
expect(subject.create_page('test page', 'this is content')).not_to eq(false)
expect(subject.list_pages.count).to eq(1)
shared_examples 'create_page tests' do
it 'creates a new wiki page' do
expect(subject.create_page('test page', 'this is content')).not_to eq(false)
expect(subject.list_pages.count).to eq(1)
end
it 'returns false when a duplicate page exists' do
subject.create_page('test page', 'content')
expect(subject.create_page('test page', 'content')).to eq(false)
end
it 'stores an error message when a duplicate page exists' do
2.times { subject.create_page('test page', 'content') }
expect(subject.error_message).to match(/Duplicate page:/)
end
it 'sets the correct commit message' do
subject.create_page('test page', 'some content', :markdown, 'commit message')
expect(subject.list_pages.first.page.version.message).to eq('commit message')
end
it 'sets the correct commit email' do
subject.create_page('test page', 'content')
expect(user.commit_email).not_to eq(user.email)
expect(commit.author_email).to eq(user.commit_email)
expect(commit.committer_email).to eq(user.commit_email)
end
it 'runs after_wiki_activity callbacks' do
expect(subject).to receive(:after_wiki_activity)
subject.create_page('Test Page', 'This is content')
end
it 'cannot create two pages with the same title but different format' do
subject.create_page('test page', 'content', :markdown)
subject.create_page('test page', 'content', :rdoc)
expect(subject.error_message).to match(/Duplicate page:/)
end
it 'cannot create two pages with the same title but different capitalization' do
subject.create_page('test page', 'content')
subject.create_page('Test page', 'content')
expect(subject.error_message).to match(/Duplicate page:/)
end
it 'cannot create two pages with the same title, different capitalization, and different format' do
subject.create_page('test page', 'content')
subject.create_page('Test page', 'content', :rdoc)
expect(subject.error_message).to match(/Duplicate page:/)
end
end
it 'returns false when a duplicate page exists' do
subject.create_page('test page', 'content')
it_behaves_like 'create_page tests' do
it 'returns false if a page exists already in the repository', :aggregate_failures do
subject.create_page('test page', 'content')
expect(subject.create_page('test page', 'content')).to eq(false)
allow(subject).to receive(:file_exists_by_regex?).and_return(false)
expect(subject.create_page('test page', 'content')).to eq false
expect(subject.error_message).to match(/Duplicate page:/)
end
it 'returns false if it has an invalid format', :aggregate_failures do
expect(subject.create_page('test page', 'content', :foobar)).to eq false
expect(subject.error_message).to match(/Invalid format selected/)
end
using RSpec::Parameterized::TableSyntax
where(:new_file, :format, :existing_repo_files, :success) do
'foo' | :markdown | [] | true
'foo' | :rdoc | [] | true
'foo' | :asciidoc | [] | true
'foo' | :org | [] | true
'foo' | :textile | [] | false
'foo' | :creole | [] | false
'foo' | :rest | [] | false
'foo' | :mediawiki | [] | false
'foo' | :pod | [] | false
'foo' | :plaintext | [] | false
'foo' | :markdown | ['foo.md'] | false
'foo' | :markdown | ['foO.md'] | false
'foO' | :markdown | ['foo.md'] | false
'foo' | :markdown | ['foo.mdfoo'] | true
'foo' | :markdown | ['foo.markdown'] | false
'foo' | :markdown | ['foo.mkd'] | false
'foo' | :markdown | ['foo.mkdn'] | false
'foo' | :markdown | ['foo.mdown'] | false
'foo' | :markdown | ['foo.adoc'] | false
'foo' | :markdown | ['foo.asciidoc'] | false
'foo' | :markdown | ['foo.org'] | false
'foo' | :markdown | ['foo.rdoc'] | false
'foo' | :markdown | ['foo.textile'] | false
'foo' | :markdown | ['foo.creole'] | false
'foo' | :markdown | ['foo.rest'] | false
'foo' | :markdown | ['foo.rest.txt'] | false
'foo' | :markdown | ['foo.rst'] | false
'foo' | :markdown | ['foo.rst.txt'] | false
'foo' | :markdown | ['foo.rst.txtfoo'] | true
'foo' | :markdown | ['foo.mediawiki'] | false
'foo' | :markdown | ['foo.wiki'] | false
'foo' | :markdown | ['foo.pod'] | false
'foo' | :markdown | ['foo.txt'] | false
'foo' | :markdown | ['foo.Md'] | false
'foo' | :markdown | ['foo.jpg'] | true
'foo' | :rdoc | ['foo.md'] | false
'foo' | :rdoc | ['foO.md'] | false
'foO' | :rdoc | ['foo.md'] | false
'foo' | :asciidoc | ['foo.md'] | false
'foo' | :org | ['foo.md'] | false
'foo' | :markdown | ['dir/foo.md'] | true
'/foo' | :markdown | ['foo.md'] | false
'./foo' | :markdown | ['foo.md'] | false
'../foo' | :markdown | ['foo.md'] | false
'../../foo' | :markdown | ['foo.md'] | false
'../../foo' | :markdown | ['dir/foo.md'] | true
'dir/foo' | :markdown | ['foo.md'] | true
'dir/foo' | :markdown | ['dir/foo.md'] | false
'dir/foo' | :markdown | ['dir/foo.rdoc'] | false
'/dir/foo' | :markdown | ['dir/foo.rdoc'] | false
'./dir/foo' | :markdown | ['dir/foo.rdoc'] | false
'../dir/foo' | :markdown | ['dir/foo.rdoc'] | false
'../dir/../foo' | :markdown | ['dir/foo.rdoc'] | true
'../dir/../foo' | :markdown | ['foo.rdoc'] | false
'../dir/../dir/foo' | :markdown | ['dir/foo.rdoc'] | false
'../dir/../another/foo' | :markdown | ['dir/foo.rdoc'] | true
'another/dir/foo' | :markdown | ['dir/foo.md'] | true
'foo bar' | :markdown | ['foo-bar.md'] | false
'foo bar' | :markdown | ['foo-bar.md'] | true
'föö'.encode('ISO-8859-1') | :markdown | ['f<><66>.md'] | false
end
with_them do
specify do
allow(subject.repository).to receive(:ls_files).and_return(existing_repo_files)
expect(subject.create_page(new_file, 'content', format)).to eq success
end
end
end
it 'stores an error message when a duplicate page exists' do
2.times { subject.create_page('test page', 'content') }
context 'when feature flag :gitaly_replace_wiki_create_page is disabled' do
before do
stub_feature_flags(gitaly_replace_wiki_create_page: false)
end
expect(subject.error_message).to match(/Duplicate page:/)
end
it 'sets the correct commit message' do
subject.create_page('test page', 'some content', :markdown, 'commit message')
expect(subject.list_pages.first.page.version.message).to eq('commit message')
end
it 'sets the correct commit email' do
subject.create_page('test page', 'content')
expect(user.commit_email).not_to eq(user.email)
expect(commit.author_email).to eq(user.commit_email)
expect(commit.committer_email).to eq(user.commit_email)
end
it 'runs after_wiki_activity callbacks' do
expect(subject).to receive(:after_wiki_activity)
subject.create_page('Test Page', 'This is content')
it_behaves_like 'create_page tests'
end
end
@ -452,7 +572,7 @@ RSpec.shared_examples 'wiki model' do
expect(subject).to receive(:after_wiki_activity)
expect(update_page).to eq true
page = subject.find_page(updated_title.presence || original_title)
page = subject.find_page(expected_title)
expect(page.raw_content).to eq(updated_content)
expect(page.path).to eq(expected_path)
@ -467,23 +587,25 @@ RSpec.shared_examples 'wiki model' do
shared_context 'common examples' do
using RSpec::Parameterized::TableSyntax
where(:original_title, :original_format, :updated_title, :updated_format, :expected_path) do
'test page' | :markdown | 'new test page' | :markdown | 'new-test-page.md'
'test page' | :markdown | 'test page' | :markdown | 'test-page.md'
'test page' | :markdown | 'test page' | :asciidoc | 'test-page.asciidoc'
where(:original_title, :original_format, :updated_title, :updated_format, :expected_title, :expected_path) do
'test page' | :markdown | 'new test page' | :markdown | 'new test page' | 'new-test-page.md'
'test page' | :markdown | 'test page' | :markdown | 'test page' | 'test-page.md'
'test page' | :markdown | 'test page' | :asciidoc | 'test page' | 'test-page.asciidoc'
'test page' | :markdown | 'new dir/new test page' | :markdown | 'new-dir/new-test-page.md'
'test page' | :markdown | 'new dir/test page' | :markdown | 'new-dir/test-page.md'
'test page' | :markdown | 'new dir/new test page' | :markdown | 'new dir/new test page' | 'new-dir/new-test-page.md'
'test page' | :markdown | 'new dir/test page' | :markdown | 'new dir/test page' | 'new-dir/test-page.md'
'test dir/test page' | :markdown | 'new dir/new test page' | :markdown | 'new-dir/new-test-page.md'
'test dir/test page' | :markdown | 'test dir/test page' | :markdown | 'test-dir/test-page.md'
'test dir/test page' | :markdown | 'test dir/test page' | :asciidoc | 'test-dir/test-page.asciidoc'
'test dir/test page' | :markdown | 'new dir/new test page' | :markdown | 'new dir/new test page' | 'new-dir/new-test-page.md'
'test dir/test page' | :markdown | 'test dir/test page' | :markdown | 'test dir/test page' | 'test-dir/test-page.md'
'test dir/test page' | :markdown | 'test dir/test page' | :asciidoc | 'test dir/test page' | 'test-dir/test-page.asciidoc'
'test dir/test page' | :markdown | 'new test page' | :markdown | 'new-test-page.md'
'test dir/test page' | :markdown | 'test page' | :markdown | 'test-page.md'
'test dir/test page' | :markdown | 'new test page' | :markdown | 'new test page' | 'new-test-page.md'
'test dir/test page' | :markdown | 'test page' | :markdown | 'test page' | 'test-page.md'
'test page' | :markdown | nil | :markdown | 'test-page.md'
'test.page' | :markdown | nil | :markdown | 'test.page.md'
'test page' | :markdown | nil | :markdown | 'test page' | 'test-page.md'
'test.page' | :markdown | nil | :markdown | 'test.page' | 'test.page.md'
'testpage' | :markdown | './testpage' | :markdown | 'testpage' | 'testpage.md'
end
end
@ -497,16 +619,23 @@ RSpec.shared_examples 'wiki model' do
shared_context 'extended examples' do
using RSpec::Parameterized::TableSyntax
where(:original_title, :original_format, :updated_title, :updated_format, :expected_path) do
'test page' | :markdown | 'new test page' | :asciidoc | 'new-test-page.asciidoc'
'test page' | :markdown | 'new dir/new test page' | :asciidoc | 'new-dir/new-test-page.asciidoc'
'test dir/test page' | :markdown | 'new dir/new test page' | :asciidoc | 'new-dir/new-test-page.asciidoc'
'test dir/test page' | :markdown | 'new test page' | :asciidoc | 'new-test-page.asciidoc'
'test page' | :markdown | nil | :asciidoc | 'test-page.asciidoc'
'test dir/test page' | :markdown | nil | :asciidoc | 'test-dir/test-page.asciidoc'
'test dir/test page' | :markdown | nil | :markdown | 'test-dir/test-page.md'
'test page' | :markdown | '' | :markdown | 'test-page.md'
'test.page' | :markdown | '' | :markdown | 'test.page.md'
where(:original_title, :original_format, :updated_title, :updated_format, :expected_title, :expected_path) do
'test page' | :markdown | 'new test page' | :asciidoc | 'new test page' | 'new-test-page.asciidoc'
'test page' | :markdown | 'new dir/new test page' | :asciidoc | 'new dir/new test page' | 'new-dir/new-test-page.asciidoc'
'test dir/test page' | :markdown | 'new dir/new test page' | :asciidoc | 'new dir/new test page' | 'new-dir/new-test-page.asciidoc'
'test dir/test page' | :markdown | 'new test page' | :asciidoc | 'new test page' | 'new-test-page.asciidoc'
'test page' | :markdown | nil | :asciidoc | 'test page' | 'test-page.asciidoc'
'test dir/test page' | :markdown | nil | :asciidoc | 'test dir/test page' | 'test-dir/test-page.asciidoc'
'test dir/test page' | :markdown | nil | :markdown | 'test dir/test page' | 'test-dir/test-page.md'
'test page' | :markdown | '' | :markdown | 'test page' | 'test-page.md'
'test.page' | :markdown | '' | :markdown | 'test.page' | 'test.page.md'
'testpage' | :markdown | '../testpage' | :markdown | 'testpage' | 'testpage.md'
'dir/testpage' | :markdown | 'dir/../testpage' | :markdown | 'testpage' | 'testpage.md'
'dir/testpage' | :markdown | './dir/testpage' | :markdown | 'dir/testpage' | 'dir/testpage.md'
'dir/testpage' | :markdown | '../dir/testpage' | :markdown | 'dir/testpage' | 'dir/testpage.md'
'dir/testpage' | :markdown | '../dir/../testpage' | :markdown | 'testpage' | 'testpage.md'
'dir/testpage' | :markdown | '../dir/../dir/testpage' | :markdown | 'dir/testpage' | 'dir/testpage.md'
'dir/testpage' | :markdown | '../dir/../another/testpage' | :markdown | 'another/testpage' | 'another/testpage.md'
end
end

View File

@ -2853,10 +2853,10 @@ babel-jest@^26.5.2:
graceful-fs "^4.2.4"
slash "^3.0.0"
babel-loader@^8.2.4:
version "8.2.4"
resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.2.4.tgz#95f5023c791b2e9e2ca6f67b0984f39c82ff384b"
integrity sha512-8dytA3gcvPPPv4Grjhnt8b5IIiTcq/zeXOPk4iTYI0SVXcsmuGg7JtBRDp8S9X+gJfhQ8ektjXZlDu1Bb33U8A==
babel-loader@^8.2.5:
version "8.2.5"
resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.2.5.tgz#d45f585e654d5a5d90f5350a779d7647c5ed512e"
integrity sha512-OSiFfH89LrEMiWd4pLNqGz4CwJDtbs2ZVc+iGu2HrkRfPxId9F2anQj38IxWpmRfsUY0aBZYi1EFcd3mhtRMLQ==
dependencies:
find-cache-dir "^3.3.1"
loader-utils "^2.0.0"
@ -3672,7 +3672,12 @@ commander@^6.0.0:
resolved "https://registry.yarnpkg.com/commander/-/commander-6.2.1.tgz#0792eb682dfbc325999bb2b84fddddba110ac73c"
integrity sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==
commander@^9.0.0, commander@~9.0.0:
commander@^9.2.0:
version "9.2.0"
resolved "https://registry.yarnpkg.com/commander/-/commander-9.2.0.tgz#6e21014b2ed90d8b7c9647230d8b7a94a4a419a9"
integrity sha512-e2i4wANQiSXgnrBlIatyHtP1odfUp0BbV5Y5nEGbxtIrStkEOAAzCUirvLBNXHLr7kwLvJl6V+4V3XV9x7Wd9w==
commander@~9.0.0:
version "9.0.0"
resolved "https://registry.yarnpkg.com/commander/-/commander-9.0.0.tgz#86d58f24ee98126568936bd1d3574e0308a99a40"
integrity sha512-JJfP2saEKbQqvW+FI93OYUB4ByV5cizMpFMiiJI8xDbBvQvSkIk0VvQdn1CZ8mqAO8Loq2h0gYTYtDFUZUeERw==
@ -11962,6 +11967,11 @@ tslib@^1.8.1, tslib@^1.9.0, tslib@^1.9.3:
resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00"
integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==
tslib@^2.3.1:
version "2.4.0"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3"
integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==
tsutils@^3.17.1:
version "3.17.1"
resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.17.1.tgz#ed719917f11ca0dee586272b2ac49e015a2dd759"
@ -13138,14 +13148,15 @@ yarn-check-webpack-plugin@^1.2.0:
dependencies:
chalk "^2.4.2"
yarn-deduplicate@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/yarn-deduplicate/-/yarn-deduplicate-4.0.0.tgz#0fcd11a83c2629ee41bc38a97d78fbd810c5825f"
integrity sha512-1sI617aM8WNplWA7O58peEq3gC14Ah/Ld55CF1aB2v4pTaxDpOgb+mTaWhIKzOCqJjwnaSqmYVrfgABUlc9bNA==
yarn-deduplicate@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/yarn-deduplicate/-/yarn-deduplicate-5.0.0.tgz#8977b9a4b1a2fd905568c3a23507b1021fa381eb"
integrity sha512-sYA5tqBSY3m+DtEcwfMYP1G2zWq1UtWSNg2goESqiu/JXBoBF/Qh+FuTJGGjsrisxL+5yOgq/ez1Rd+KSPwzvA==
dependencies:
"@yarnpkg/lockfile" "^1.1.0"
commander "^9.0.0"
commander "^9.2.0"
semver "^7.3.2"
tslib "^2.3.1"
yn@3.1.1:
version "3.1.1"