Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
842ac3526c
commit
81c0f29ad9
83 changed files with 676 additions and 724 deletions
|
@ -43,7 +43,7 @@ export default {
|
|||
{{ s__('ServicePing|Turn on service ping to review instance-level analytics.') }}
|
||||
</p>
|
||||
|
||||
<gl-button category="primary" variant="success" :href="primaryButtonPath">
|
||||
<gl-button category="primary" variant="confirm" :href="primaryButtonPath">
|
||||
{{ s__('ServicePing|Turn on service ping') }}
|
||||
</gl-button>
|
||||
</template>
|
||||
|
|
|
@ -4,7 +4,10 @@ import { mapActions } from 'vuex';
|
|||
import { getIdFromGraphQLId, isGid } from '~/graphql_shared/utils';
|
||||
import { updateHistory, setUrlParams } from '~/lib/utils/url_utility';
|
||||
import { __ } from '~/locale';
|
||||
import { FILTERED_SEARCH_TERM } from '~/vue_shared/components/filtered_search_bar/constants';
|
||||
import {
|
||||
FILTERED_SEARCH_TERM,
|
||||
FILTER_ANY,
|
||||
} from '~/vue_shared/components/filtered_search_bar/constants';
|
||||
import FilteredSearch from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
|
||||
import { AssigneeFilterType } from '~/boards/constants';
|
||||
|
||||
|
@ -42,6 +45,7 @@ export default {
|
|||
search,
|
||||
milestoneTitle,
|
||||
iterationId,
|
||||
iterationCadenceId,
|
||||
types,
|
||||
weight,
|
||||
epicId,
|
||||
|
@ -95,10 +99,20 @@ export default {
|
|||
});
|
||||
}
|
||||
|
||||
if (iterationId) {
|
||||
let iterationData = null;
|
||||
|
||||
if (iterationId && iterationCadenceId) {
|
||||
iterationData = `${iterationId}&${iterationCadenceId}`;
|
||||
} else if (iterationCadenceId) {
|
||||
iterationData = `${FILTER_ANY}&${iterationCadenceId}`;
|
||||
} else if (iterationId) {
|
||||
iterationData = iterationId;
|
||||
}
|
||||
|
||||
if (iterationData) {
|
||||
filteredSearchValue.push({
|
||||
type: 'iteration',
|
||||
value: { data: iterationId, operator: '=' },
|
||||
value: { data: iterationData, operator: '=' },
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -228,9 +242,12 @@ export default {
|
|||
epicId,
|
||||
myReactionEmoji,
|
||||
iterationId,
|
||||
iterationCadenceId,
|
||||
releaseTag,
|
||||
confidential,
|
||||
} = this.filterParams;
|
||||
let iteration = iterationId;
|
||||
let cadence = iterationCadenceId;
|
||||
let notParams = {};
|
||||
|
||||
if (Object.prototype.hasOwnProperty.call(this.filterParams, 'not')) {
|
||||
|
@ -251,6 +268,10 @@ export default {
|
|||
);
|
||||
}
|
||||
|
||||
if (iterationId?.includes('&')) {
|
||||
[iteration, cadence] = iterationId.split('&');
|
||||
}
|
||||
|
||||
return mapValues(
|
||||
{
|
||||
...notParams,
|
||||
|
@ -259,7 +280,8 @@ export default {
|
|||
assignee_username: assigneeUsername,
|
||||
assignee_id: assigneeId,
|
||||
milestone_title: milestoneTitle,
|
||||
iteration_id: iterationId,
|
||||
iteration_id: iteration,
|
||||
iteration_cadence_id: cadence,
|
||||
search,
|
||||
types,
|
||||
weight,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<script>
|
||||
import { GlLoadingIcon, GlIcon, GlTooltipDirective, GlBadge } from '@gitlab/ui';
|
||||
import CiIcon from '../../../vue_shared/components/ci_icon.vue';
|
||||
import CiIcon from '~/vue_shared/components/ci_icon.vue';
|
||||
import Item from './item.vue';
|
||||
|
||||
export default {
|
||||
|
|
|
@ -2,7 +2,7 @@ import { escape } from 'lodash';
|
|||
import createFlash from '~/flash';
|
||||
import { __, sprintf } from '~/locale';
|
||||
import { logError } from '~/lib/logger';
|
||||
import api from '../../../api';
|
||||
import api from '~/api';
|
||||
import service from '../../services';
|
||||
import * as types from '../mutation_types';
|
||||
|
||||
|
|
|
@ -40,7 +40,7 @@ export default {
|
|||
:title="$options.title"
|
||||
:aria-label="$options.title"
|
||||
icon="check"
|
||||
variant="success"
|
||||
variant="confirm"
|
||||
type="submit"
|
||||
/>
|
||||
</gl-form>
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import $ from 'jquery';
|
||||
import BranchGraph from '../../../network/branch_graph';
|
||||
import BranchGraph from '~/network/branch_graph';
|
||||
|
||||
const vph = $(window).height() - 250;
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import $ from 'jquery';
|
||||
import initVueAlerts from '~/vue_alerts';
|
||||
import NoEmojiValidator from '../../../emoji/no_emoji_validator';
|
||||
import NoEmojiValidator from '~/emoji/no_emoji_validator';
|
||||
import LengthValidator from './length_validator';
|
||||
import OAuthRememberMe from './oauth_remember_me';
|
||||
import preserveUrlFragment from './preserve_url_fragment';
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import InputValidator from '../../../validators/input_validator';
|
||||
import InputValidator from '~/validators/input_validator';
|
||||
|
||||
const errorMessageClass = 'gl-field-error';
|
||||
|
||||
|
|
|
@ -3,8 +3,8 @@ import Vue from 'vue';
|
|||
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
|
||||
import csrf from '~/lib/utils/csrf';
|
||||
import Translate from '~/vue_shared/translate';
|
||||
import GLForm from '../../../gl_form';
|
||||
import ZenMode from '../../../zen_mode';
|
||||
import GLForm from '~/gl_form';
|
||||
import ZenMode from '~/zen_mode';
|
||||
import deleteWikiModal from './components/delete_wiki_modal.vue';
|
||||
import wikiAlert from './components/wiki_alert.vue';
|
||||
import wikiForm from './components/wiki_form.vue';
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<script>
|
||||
import ciIcon from '../../../vue_shared/components/ci_icon.vue';
|
||||
import ciIcon from '~/vue_shared/components/ci_icon.vue';
|
||||
|
||||
/**
|
||||
* Component that renders both the CI icon status and the job name.
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
<script>
|
||||
import { refreshUserMergeRequestCounts } from '~/commons/nav/user_merge_requests';
|
||||
import simplePoll from '~/lib/utils/simple_poll';
|
||||
import MergeRequest from '../../../merge_request';
|
||||
import MergeRequest from '~/merge_request';
|
||||
import eventHub from '../../event_hub';
|
||||
import { MERGE_ACTIVE_STATUS_PHRASES, STATE_MACHINE } from '../../constants';
|
||||
import statusIcon from '../mr_widget_status_icon.vue';
|
||||
|
|
|
@ -10,7 +10,7 @@ import {
|
|||
} from '~/behaviors/shortcuts/keybindings';
|
||||
import { getSelectedFragment } from '~/lib/utils/common_utils';
|
||||
import { s__, __ } from '~/locale';
|
||||
import { CopyAsGFM } from '../../../behaviors/markdown/copy_as_gfm';
|
||||
import { CopyAsGFM } from '~/behaviors/markdown/copy_as_gfm';
|
||||
import ToolbarButton from './toolbar_button.vue';
|
||||
|
||||
export default {
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
import { GlTooltip, GlAvatar } from '@gitlab/ui';
|
||||
import defaultAvatarUrl from 'images/no_avatar.png';
|
||||
import { __ } from '~/locale';
|
||||
import { placeholderImage } from '../../../lazy_loader';
|
||||
import { placeholderImage } from '~/lazy_loader';
|
||||
|
||||
export default {
|
||||
name: 'UserAvatarImageNew',
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
import { GlTooltip } from '@gitlab/ui';
|
||||
import defaultAvatarUrl from 'images/no_avatar.png';
|
||||
import { __ } from '~/locale';
|
||||
import { placeholderImage } from '../../../lazy_loader';
|
||||
import { placeholderImage } from '~/lazy_loader';
|
||||
|
||||
export default {
|
||||
name: 'UserAvatarImageOld',
|
||||
|
|
|
@ -8,7 +8,7 @@ import {
|
|||
GlSprintf,
|
||||
} from '@gitlab/ui';
|
||||
import UserNameWithStatus from '~/sidebar/components/assignees/user_name_with_status.vue';
|
||||
import { glEmojiTag } from '../../../emoji';
|
||||
import { glEmojiTag } from '~/emoji';
|
||||
import UserAvatarImage from '../user_avatar/user_avatar_image.vue';
|
||||
|
||||
const MAX_SKELETON_LINES = 4;
|
||||
|
|
|
@ -10,6 +10,11 @@
|
|||
module EnforcesTwoFactorAuthentication
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
MFA_HELP_PAGE = Rails.application.routes.url_helpers.help_page_url(
|
||||
'user/profile/account/two_factor_authentication.html',
|
||||
anchor: 'enable-two-factor-authentication'
|
||||
)
|
||||
|
||||
included do
|
||||
before_action :check_two_factor_requirement, except: [:route_not_found]
|
||||
|
||||
|
@ -26,7 +31,11 @@ module EnforcesTwoFactorAuthentication
|
|||
if two_factor_authentication_required? && current_user_requires_two_factor?
|
||||
case self
|
||||
when GraphqlController
|
||||
render_error("2FA required", status: :unauthorized)
|
||||
render_error(
|
||||
_("Authentication error: enable 2FA in your profile settings to continue using GitLab: %{mfa_help_page}") %
|
||||
{ mfa_help_page: MFA_HELP_PAGE },
|
||||
status: :unauthorized
|
||||
)
|
||||
else
|
||||
redirect_to profile_two_factor_auth_path
|
||||
end
|
||||
|
|
|
@ -8,7 +8,6 @@ class Groups::BoardsController < Groups::ApplicationController
|
|||
before_action :assign_endpoint_vars
|
||||
before_action do
|
||||
push_frontend_feature_flag(:board_multi_select, group, default_enabled: :yaml)
|
||||
push_frontend_feature_flag(:iteration_cadences, group, default_enabled: :yaml)
|
||||
push_frontend_feature_flag(:realtime_labels, group, default_enabled: :yaml)
|
||||
experiment(:prominent_create_board_btn, subject: current_user) do |e|
|
||||
e.control { }
|
||||
|
@ -50,3 +49,5 @@ class Groups::BoardsController < Groups::ApplicationController
|
|||
access_denied! unless can?(current_user, :read_issue_board, group)
|
||||
end
|
||||
end
|
||||
|
||||
Groups::BoardsController.prepend_mod
|
||||
|
|
|
@ -33,7 +33,6 @@ class GroupsController < Groups::ApplicationController
|
|||
|
||||
before_action do
|
||||
push_frontend_feature_flag(:vue_issues_list, @group, default_enabled: :yaml)
|
||||
push_frontend_feature_flag(:iteration_cadences, @group, default_enabled: :yaml)
|
||||
end
|
||||
|
||||
before_action :check_export_rate_limit!, only: [:export, :download_export]
|
||||
|
|
|
@ -8,7 +8,6 @@ class Projects::BoardsController < Projects::ApplicationController
|
|||
before_action :assign_endpoint_vars
|
||||
before_action do
|
||||
push_frontend_feature_flag(:board_multi_select, project, default_enabled: :yaml)
|
||||
push_frontend_feature_flag(:iteration_cadences, project&.group, default_enabled: :yaml)
|
||||
push_frontend_feature_flag(:realtime_labels, project&.group, default_enabled: :yaml)
|
||||
experiment(:prominent_create_board_btn, subject: current_user) do |e|
|
||||
e.control { }
|
||||
|
@ -51,3 +50,5 @@ class Projects::BoardsController < Projects::ApplicationController
|
|||
access_denied! unless can?(current_user, :read_issue_board, project)
|
||||
end
|
||||
end
|
||||
|
||||
Projects::BoardsController.prepend_mod
|
||||
|
|
|
@ -40,7 +40,6 @@ class Projects::IssuesController < Projects::ApplicationController
|
|||
|
||||
before_action do
|
||||
push_frontend_feature_flag(:vue_issues_list, project&.group, default_enabled: :yaml)
|
||||
push_frontend_feature_flag(:iteration_cadences, project&.group, default_enabled: :yaml)
|
||||
push_frontend_feature_flag(:contacts_autocomplete, project&.group, default_enabled: :yaml)
|
||||
push_frontend_feature_flag(:incident_timeline, project, default_enabled: :yaml)
|
||||
end
|
||||
|
|
|
@ -13,6 +13,8 @@ module Mutations
|
|||
|
||||
if pipeline.cancelable?
|
||||
pipeline.cancel_running
|
||||
pipeline.cancel
|
||||
|
||||
{ success: true, errors: [] }
|
||||
else
|
||||
{ success: false, errors: ['Pipeline is not cancelable'] }
|
||||
|
|
|
@ -15,7 +15,8 @@ module Mutations
|
|||
argument :confidential,
|
||||
GraphQL::Types::Boolean,
|
||||
required: false,
|
||||
description: 'Confidentiality flag of a note. Default is false.'
|
||||
description: 'Confidentiality flag of a note. Default is false.',
|
||||
deprecated: { reason: 'No longer allowed to update confidentiality of notes', milestone: '14.10' }
|
||||
|
||||
private
|
||||
|
||||
|
|
|
@ -828,12 +828,7 @@ class Group < Namespace
|
|||
end
|
||||
|
||||
def work_items_feature_flag_enabled?
|
||||
actors = [root_ancestor]
|
||||
actors << self if root_ancestor != self
|
||||
|
||||
actors.any? do |actor|
|
||||
Feature.enabled?(:work_items, actor, default_enabled: :yaml)
|
||||
end
|
||||
feature_flag_enabled_for_self_or_ancestor?(:work_items)
|
||||
end
|
||||
|
||||
# Check for enabled features, similar to `Project#feature_available?`
|
||||
|
@ -849,6 +844,15 @@ class Group < Namespace
|
|||
|
||||
private
|
||||
|
||||
def feature_flag_enabled_for_self_or_ancestor?(feature_flag)
|
||||
actors = [root_ancestor]
|
||||
actors << self if root_ancestor != self
|
||||
|
||||
actors.any? do |actor|
|
||||
::Feature.enabled?(feature_flag, actor, default_enabled: :yaml)
|
||||
end
|
||||
end
|
||||
|
||||
def max_member_access(user_ids)
|
||||
Gitlab::SafeRequestLoader.execute(resource_key: max_member_access_for_resource_key(User),
|
||||
resource_ids: user_ids,
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
= form_for group, url: update_auto_devops_group_settings_ci_cd_path(group), method: :patch do |f|
|
||||
= gitlab_ui_form_for group, url: update_auto_devops_group_settings_ci_cd_path(group), method: :patch do |f|
|
||||
= form_errors(group)
|
||||
%fieldset
|
||||
.form-group
|
||||
.card.auto-devops-card
|
||||
.card-body
|
||||
.form-check
|
||||
= f.check_box :auto_devops_enabled, class: 'form-check-input', checked: group.auto_devops_enabled?
|
||||
= f.label :auto_devops_enabled, class: 'form-check-label' do
|
||||
%strong= s_('GroupSettings|Default to Auto DevOps pipeline for all projects within this group')
|
||||
= gl_badge_tag badge_for_auto_devops_scope(group), variant: :info
|
||||
.form-text.text-muted
|
||||
= s_('GroupSettings|The Auto DevOps pipeline runs if no alternative CI configuration file is found.')
|
||||
= link_to _('Learn more.'), help_page_path('topics/autodevops/index.md'), target: '_blank', rel: 'noopener noreferrer'
|
||||
- learn_more_link = link_to _('Learn more.'), help_page_path('topics/autodevops/index.md'), target: '_blank', rel: 'noopener noreferrer'
|
||||
- help_text = s_('GroupSettings|The Auto DevOps pipeline runs if no alternative CI configuration file is found.')
|
||||
- badge = gl_badge_tag badge_for_auto_devops_scope(group), variant: :info
|
||||
- label = s_('GroupSettings|Default to Auto DevOps pipeline for all projects within this group')
|
||||
= f.gitlab_ui_checkbox_component :auto_devops_enabled,
|
||||
'%{label} %{badge}'.html_safe % { label: label, badge: badge.html_safe },
|
||||
help_text: '%{help_text} %{learn_more_link}'.html_safe % { help_text: help_text, learn_more_link: learn_more_link },
|
||||
checkbox_options: { checked: group.auto_devops_enabled? }
|
||||
|
||||
= f.submit _('Save changes'), class: 'btn gl-button btn-confirm gl-mt-5'
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
---
|
||||
name: iteration_cadences
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/54822
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/354878
|
||||
milestone: '13.10'
|
||||
type: development
|
||||
group: group::project management
|
||||
default_enabled: false
|
|
@ -1,7 +1,7 @@
|
|||
---
|
||||
data_category: optional
|
||||
key_path: usage_activity_by_stage_monthly.verify.ci_pipeline_config_auto_devops
|
||||
description: Total pipelines from an Auto DevOps template
|
||||
description: Distinct users that ran an auto DevOps pipeline without a .gitlab-ci.yml file.
|
||||
product_section: ops
|
||||
product_stage: configure
|
||||
product_group: group::configure
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
- name: "Toggle notes confidentiality on APIs"
|
||||
announcement_milestone: "14.10" # The milestone when this feature was first announced as deprecated.
|
||||
announcement_date: "2022-03-22" # The date of the milestone release when this feature was first announced as deprecated. This should almost always be the 22nd of a month (YYYY-MM-22), unless you did an out of band blog post.
|
||||
removal_milestone: "16.0" # The milestone when this feature is planned to be removed
|
||||
removal_date: "2023-05-22" # The date of the milestone release when this feature is planned to be removed. This should almost always be the 22nd of a month (YYYY-MM-22), unless you did an out of band blog post.
|
||||
breaking_change: true # If this deprecation is a breaking change, set this value to true
|
||||
body: | # Do not modify this line, instead modify the lines below.
|
||||
Toggling notes confidentiality with REST and GraphQL APIs is being deprecated. Updating notes confidential attribute is no longer supported by any means. We are changing this to simplify the experience and prevent private information from being unintentionally exposed.
|
||||
issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/350670
|
|
@ -5014,7 +5014,7 @@ Input type: `UpdateNoteInput`
|
|||
| ---- | ---- | ----------- |
|
||||
| <a id="mutationupdatenotebody"></a>`body` | [`String`](#string) | Content of the note. |
|
||||
| <a id="mutationupdatenoteclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
|
||||
| <a id="mutationupdatenoteconfidential"></a>`confidential` | [`Boolean`](#boolean) | Confidentiality flag of a note. Default is false. |
|
||||
| <a id="mutationupdatenoteconfidential"></a>`confidential` **{warning-solid}** | [`Boolean`](#boolean) | **Deprecated:** No longer allowed to update confidentiality of notes. Deprecated in 14.10. |
|
||||
| <a id="mutationupdatenoteid"></a>`id` | [`NoteID!`](#noteid) | Global ID of the note to update. |
|
||||
|
||||
#### Fields
|
||||
|
|
|
@ -142,8 +142,8 @@ Parameters:
|
|||
|
||||
| Attribute | Type | Required | Description |
|
||||
|----------------|----------------|----------|------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `id` | integer or string | yes | The ID or [URL-encoded path of the project](index.md#namespaced-path-encoding). |
|
||||
| `issue_iid` | integer | yes | The IID of an issue. |
|
||||
| `id` | integer or string | yes | The ID or [URL-encoded path of the project](index.md#namespaced-path-encoding). |
|
||||
| `issue_iid` | integer | yes | The IID of an issue. |
|
||||
| `body` | string | yes | The content of a note. Limited to 1,000,000 characters. |
|
||||
| `confidential` | boolean | no | The confidential flag of a note. Default is false. |
|
||||
| `created_at` | string | no | Date time string, ISO 8601 formatted. Example: `2016-03-11T03:45:40Z` (requires administrator or project/group owner rights) |
|
||||
|
@ -162,13 +162,13 @@ PUT /projects/:id/issues/:issue_iid/notes/:note_id
|
|||
|
||||
Parameters:
|
||||
|
||||
| Attribute | Type | Required | Description |
|
||||
|----------------|----------------|----------|----------------------------------------------------------------------------------|
|
||||
| `id` | integer or string | yes | The ID or [URL-encoded path of the project](index.md#namespaced-path-encoding). |
|
||||
| `issue_iid` | integer | yes | The IID of an issue. |
|
||||
| `note_id` | integer | yes | The ID of a note. |
|
||||
| `body` | string | no | The content of a note. Limited to 1,000,000 characters. |
|
||||
| `confidential` | boolean | no | The confidential flag of a note. |
|
||||
| Attribute | Type | Required | Description |
|
||||
|----------------|----------------|-------------|----------------------------------------------------------------------------------------------------|
|
||||
| `id` | integer or string | yes | The ID or [URL-encoded path of the project](index.md#namespaced-path-encoding). |
|
||||
| `issue_iid` | integer | yes | The IID of an issue. |
|
||||
| `note_id` | integer | yes | The ID of a note. |
|
||||
| `body` | string | no | The content of a note. Limited to 1,000,000 characters. |
|
||||
| `confidential` | boolean | no | **Deprecated:** will be removed in GitLab 16.0. The confidential flag of a note. Default is false. |
|
||||
|
||||
```shell
|
||||
curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/issues/11/notes?body=note"
|
||||
|
@ -415,12 +415,13 @@ PUT /projects/:id/merge_requests/:merge_request_iid/notes/:note_id
|
|||
|
||||
Parameters:
|
||||
|
||||
| Attribute | Type | Required | Description |
|
||||
|---------------------|----------------|----------|---------------------------------------------------------------------------------|
|
||||
| `id` | integer or string | yes | The ID or [URL-encoded path of the project](index.md#namespaced-path-encoding) |
|
||||
| `merge_request_iid` | integer | yes | The IID of a project merge request |
|
||||
| `note_id` | integer | no | The ID of a note |
|
||||
| `body` | string | yes | The content of a note. Limited to 1,000,000 characters. |
|
||||
| Attribute | Type | Required | Description |
|
||||
|---------------------|-------------------|----------|----------------------------------------------------------------------------------------------------|
|
||||
| `id` | integer or string | yes | The ID or [URL-encoded path of the project](index.md#namespaced-path-encoding) |
|
||||
| `merge_request_iid` | integer | yes | The IID of a project merge request |
|
||||
| `note_id` | integer | no | The ID of a note |
|
||||
| `body` | string | yes | The content of a note. Limited to 1,000,000 characters. |
|
||||
| `confidential` | boolean | no | **Deprecated:** will be removed in GitLab 16.0. The confidential flag of a note. Default is false. |
|
||||
|
||||
```shell
|
||||
curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/merge_requests/11/notes?body=note"
|
||||
|
@ -519,11 +520,12 @@ POST /groups/:id/epics/:epic_id/notes
|
|||
|
||||
Parameters:
|
||||
|
||||
| Attribute | Type | Required | Description |
|
||||
| --------- | -------------- | -------- | ----------- |
|
||||
| `id` | integer or string | yes | The ID or [URL-encoded path of the group](index.md#namespaced-path-encoding) |
|
||||
| `epic_id` | integer | yes | The ID of an epic |
|
||||
| `body` | string | yes | The content of a note. Limited to 1,000,000 characters. |
|
||||
| Attribute | Type | Required | Description |
|
||||
| --------- | -------------- | -------- | ----------- |
|
||||
| `body` | string | yes | The content of a note. Limited to 1,000,000 characters. |
|
||||
| `epic_id` | integer | yes | The ID of an epic |
|
||||
| `id` | integer or string | yes | The ID or [URL-encoded path of the group](index.md#namespaced-path-encoding) |
|
||||
| `confidential` | boolean | no | The confidential flag of a note. Default is `false`. |
|
||||
|
||||
```shell
|
||||
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/5/epics/11/notes?body=note"
|
||||
|
@ -539,12 +541,13 @@ PUT /groups/:id/epics/:epic_id/notes/:note_id
|
|||
|
||||
Parameters:
|
||||
|
||||
| Attribute | Type | Required | Description |
|
||||
| --------- | -------------- | -------- | ----------- |
|
||||
| `id` | integer or string | yes | The ID or [URL-encoded path of the group](index.md#namespaced-path-encoding) |
|
||||
| `epic_id` | integer | yes | The ID of an epic |
|
||||
| `note_id` | integer | yes | The ID of a note |
|
||||
| `body` | string | yes | The content of a note. Limited to 1,000,000 characters. |
|
||||
| Attribute | Type | Required | Description |
|
||||
| ---------------| ----------------- | -------- | ---------------------------------------------------------------------------------------------------|
|
||||
| `id` | integer or string | yes | The ID or [URL-encoded path of the group](index.md#namespaced-path-encoding) |
|
||||
| `epic_id` | integer | yes | The ID of an epic |
|
||||
| `note_id` | integer | yes | The ID of a note |
|
||||
| `body` | string | yes | The content of a note. Limited to 1,000,000 characters. |
|
||||
| `confidential` | boolean | no | **Deprecated:** will be removed in GitLab 16.0. The confidential flag of a note. Default is false. |
|
||||
|
||||
```shell
|
||||
curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/5/epics/11/notes?body=note"
|
||||
|
|
11
doc/development/avoiding_downtime_in_migrations.md
Normal file
11
doc/development/avoiding_downtime_in_migrations.md
Normal file
|
@ -0,0 +1,11 @@
|
|||
---
|
||||
redirect_to: 'database/avoiding_downtime_in_migrations.md'
|
||||
remove_date: '2022-07-08'
|
||||
---
|
||||
|
||||
This document was moved to [another location](database/avoiding_downtime_in_migrations.md).
|
||||
|
||||
<!-- This redirect file can be deleted after <2022-07-08>. -->
|
||||
<!-- Redirects that point to other docs in the same project expire in three months. -->
|
||||
<!-- Redirects that point to docs in a different project or site (for example, link is not relative and starts with `https:`) expire in one year. -->
|
||||
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/redirects.html -->
|
11
doc/development/background_migrations.md
Normal file
11
doc/development/background_migrations.md
Normal file
|
@ -0,0 +1,11 @@
|
|||
---
|
||||
redirect_to: 'database/background_migrations.md'
|
||||
remove_date: '2022-07-08'
|
||||
---
|
||||
|
||||
This document was moved to [another location](database/background_migrations.md).
|
||||
|
||||
<!-- This redirect file can be deleted after <2022-07-08>. -->
|
||||
<!-- Redirects that point to other docs in the same project expire in three months. -->
|
||||
<!-- Redirects that point to docs in a different project or site (for example, link is not relative and starts with `https:`) expire in one year. -->
|
||||
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/redirects.html -->
|
|
@ -241,6 +241,8 @@ warrant a comment could be:
|
|||
- Any benchmarking performed to complement the change.
|
||||
- Potentially insecure code.
|
||||
|
||||
If there are any projects, snippets, or other assets that are required for a reviewer to validate the solution, ensure they have access to those assets before requesting review.
|
||||
|
||||
Avoid:
|
||||
|
||||
- Adding TODO comments (referenced above) directly to the source code unless the reviewer requires
|
||||
|
|
11
doc/development/deleting_migrations.md
Normal file
11
doc/development/deleting_migrations.md
Normal file
|
@ -0,0 +1,11 @@
|
|||
---
|
||||
redirect_to: 'database/deleting_migrations.md'
|
||||
remove_date: '2022-07-08'
|
||||
---
|
||||
|
||||
This document was moved to [another location](database/deleting_migrations.md).
|
||||
|
||||
<!-- This redirect file can be deleted after <2022-07-08>. -->
|
||||
<!-- Redirects that point to other docs in the same project expire in three months. -->
|
||||
<!-- Redirects that point to docs in a different project or site (for example, link is not relative and starts with `https:`) expire in one year. -->
|
||||
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/redirects.html -->
|
11
doc/development/post_deployment_migrations.md
Normal file
11
doc/development/post_deployment_migrations.md
Normal file
|
@ -0,0 +1,11 @@
|
|||
---
|
||||
redirect_to: 'database/post_deployment_migrations.md'
|
||||
remove_date: '2022-07-08'
|
||||
---
|
||||
|
||||
This document was moved to [another location](database/post_deployment_migrations.md).
|
||||
|
||||
<!-- This redirect file can be deleted after <2022-07-08>. -->
|
||||
<!-- Redirects that point to other docs in the same project expire in three months. -->
|
||||
<!-- Redirects that point to docs in a different project or site (for example, link is not relative and starts with `https:`) expire in one year. -->
|
||||
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/redirects.html -->
|
|
@ -4,29 +4,29 @@ group: Source Code
|
|||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
|
||||
---
|
||||
|
||||
# Websocket channel support
|
||||
# Websocket channel support for Workhorse
|
||||
|
||||
In some cases, GitLab can provide in-browser terminal access to an
|
||||
environment (which is a running server or container, onto which a
|
||||
project has been deployed), or even access to services running in CI
|
||||
through a WebSocket. Workhorse manages the WebSocket upgrade and
|
||||
long-lived connection to the websocket connection, which frees
|
||||
up GitLab to process other requests.
|
||||
In some cases, GitLab can provide the following through a WebSocket:
|
||||
|
||||
This document outlines the architecture of these connections.
|
||||
- In-browser terminal access to an environment: a running server or container,
|
||||
onto which a project has been deployed.
|
||||
- Access to services running in CI.
|
||||
|
||||
Workhorse manages the WebSocket upgrade and long-lived connection to the websocket
|
||||
connection, which frees up GitLab to process other requests. This document outlines
|
||||
the architecture of these connections.
|
||||
|
||||
## Introduction to WebSockets
|
||||
|
||||
A websocket is an "upgraded" HTTP/1.1 request. Their purpose is to
|
||||
permit bidirectional communication between a client and a server.
|
||||
**Websockets are not HTTP**. Clients can send messages (known as
|
||||
frames) to the server at any time, and vice-versa. Client messages
|
||||
are not necessarily requests, and server messages are not necessarily
|
||||
responses. WebSocket URLs have schemes like `ws://` (unencrypted) or
|
||||
Websockets are an "upgraded" `HTTP/1.1` request. They permit bidirectional
|
||||
communication between a client and a server. **Websockets are not HTTP**.
|
||||
Clients can send messages (known as frames) to the server at any time, and
|
||||
vice versa. Client messages are not necessarily requests, and server messages are
|
||||
not necessarily responses. WebSocket URLs have schemes like `ws://` (unencrypted) or
|
||||
`wss://` (TLS-secured).
|
||||
|
||||
When requesting an upgrade to WebSocket, the browser sends a HTTP/1.1
|
||||
request that looks like this:
|
||||
When requesting an upgrade to WebSocket, the browser sends a `HTTP/1.1`
|
||||
request like this:
|
||||
|
||||
```plaintext
|
||||
GET /path.ws HTTP/1.1
|
||||
|
@ -36,164 +36,166 @@ Sec-WebSocket-Protocol: terminal.gitlab.com
|
|||
# More headers, including security measures
|
||||
```
|
||||
|
||||
At this point, the connection is still HTTP, so this is a request and
|
||||
the server can send a normal HTTP response, including `404 Not Found`,
|
||||
`500 Internal Server Error`, etc.
|
||||
At this point, the connection is still HTTP, so this is a request.
|
||||
The server can send a normal HTTP response, such as `404 Not Found` or
|
||||
`500 Internal Server Error`.
|
||||
|
||||
If the server decides to permit the upgrade, it will send a HTTP
|
||||
`101 Switching Protocols` response. From this point, the connection
|
||||
is no longer HTTP. It is a WebSocket and frames, not HTTP requests,
|
||||
will flow over it. The connection will persist until the client or
|
||||
server closes the connection.
|
||||
If the server decides to permit the upgrade, it sends a HTTP
|
||||
`101 Switching Protocols` response. From this point, the connection is no longer
|
||||
HTTP. It is now a WebSocket and frames, not HTTP requests, flow over it. The connection
|
||||
persists until the client or server closes the connection.
|
||||
|
||||
In addition to the subprotocol, individual websocket frames may
|
||||
also specify a message type - examples include `BinaryMessage`,
|
||||
`TextMessage`, `Ping`, `Pong` or `Close`. Only binary frames can
|
||||
contain arbitrary data - other frames are expected to be valid
|
||||
UTF-8 strings, in addition to any subprotocol expectations.
|
||||
In addition to the sub-protocol, individual websocket frames may
|
||||
also specify a message type, such as:
|
||||
|
||||
- `BinaryMessage`
|
||||
- `TextMessage`
|
||||
- `Ping`
|
||||
- `Pong`
|
||||
- `Close`
|
||||
|
||||
Only binary frames can contain arbitrary data. The frames are expected to be valid
|
||||
UTF-8 strings, in addition to any sub-protocol expectations.
|
||||
|
||||
## Browser to Workhorse
|
||||
|
||||
Using the terminal as an example, GitLab serves a JavaScript terminal
|
||||
emulator to the browser on a URL like
|
||||
`https://gitlab.com/group/project/-/environments/1/terminal`.
|
||||
This opens a websocket connection to, e.g.,
|
||||
`wss://gitlab.com/group/project/-/environments/1/terminal.ws`,
|
||||
This endpoint doesn't exist in GitLab - only in Workhorse.
|
||||
Using the terminal as an example:
|
||||
|
||||
When receiving the connection, Workhorse first checks that the
|
||||
client is authorized to access the requested terminal. It does
|
||||
this by performing a "preauthentication" request to GitLab.
|
||||
1. GitLab serves a JavaScript terminal emulator to the browser on a URL like
|
||||
`https://gitlab.com/group/project/-/environments/1/terminal`.
|
||||
1. This URL opens a websocket connection to
|
||||
`wss://gitlab.com/group/project/-/environments/1/terminal.ws`.
|
||||
This endpoint exists only in Workhorse, and doesn't exist in GitLab.
|
||||
1. When receiving the connection, Workhorse first performs a `preauthentication`
|
||||
request to GitLab to confirm the client is authorized to access the requested terminal:
|
||||
- If the client has the appropriate permissions and the terminal exists, GitLab
|
||||
responds with a successful response that includes details of the terminal
|
||||
the client should be connected to.
|
||||
- Otherwise, Workhorse returns an appropriate HTTP error response.
|
||||
1. If GitLab returns valid terminal details to Workhorse, it:
|
||||
1. Connects to the specified terminal.
|
||||
1. Upgrades the browser to a WebSocket.
|
||||
1. Proxies between the two connections for as long as the browser's credentials are valid.
|
||||
1. Send regular `PingMessage` control frames to the browser, to prevent intervening
|
||||
proxies from terminating the connection while the browser is present.
|
||||
|
||||
If the client has the appropriate permissions and the terminal
|
||||
exists, GitLab responds with a successful response that includes
|
||||
details of the terminal that the client should be connected to.
|
||||
Otherwise, it returns an appropriate HTTP error response.
|
||||
The browser must request an upgrade with a specific sub-protocol:
|
||||
|
||||
Errors are passed back to the client as HTTP responses, but if
|
||||
GitLab returns valid terminal details to Workhorse, it will
|
||||
connect to the specified terminal, upgrade the browser to a
|
||||
WebSocket, and proxy between the two connections for as long
|
||||
as the browser's credentials are valid. Workhorse will also
|
||||
send regular `PingMessage` control frames to the browser, to
|
||||
keep intervening proxies from terminating the connection
|
||||
while the browser is present.
|
||||
|
||||
The browser must request an upgrade with a specific subprotocol:
|
||||
- [`terminal.gitlab.com`](#terminalgitlabcom)
|
||||
- [`base64.terminal.gitlab.com`](#base64terminalgitlabcom)
|
||||
|
||||
### `terminal.gitlab.com`
|
||||
|
||||
This subprotocol considers `TextMessage` frames to be invalid.
|
||||
Control frames, such as `PingMessage` or `CloseMessage`, have
|
||||
their usual meanings.
|
||||
This sub-protocol considers `TextMessage` frames to be invalid. Control frames,
|
||||
such as `PingMessage` or `CloseMessage`, have their usual meanings.
|
||||
|
||||
`BinaryMessage` frames sent from the browser to the server are
|
||||
arbitrary text input.
|
||||
|
||||
`BinaryMessage` frames sent from the server to the browser are
|
||||
arbitrary text output.
|
||||
- `BinaryMessage` frames sent from the browser to the server are
|
||||
arbitrary text input.
|
||||
- `BinaryMessage` frames sent from the server to the browser are
|
||||
arbitrary text output.
|
||||
|
||||
These frames are expected to contain ANSI text control codes
|
||||
and may be in any encoding.
|
||||
|
||||
### `base64.terminal.gitlab.com`
|
||||
|
||||
This subprotocol considers `BinaryMessage` frames to be invalid.
|
||||
This sub-protocol considers `BinaryMessage` frames to be invalid.
|
||||
Control frames, such as `PingMessage` or `CloseMessage`, have
|
||||
their usual meanings.
|
||||
|
||||
`TextMessage` frames sent from the browser to the server are
|
||||
base64-encoded arbitrary text input (so the server must
|
||||
base64-decode them before inputting them).
|
||||
|
||||
`TextMessage` frames sent from the server to the browser are
|
||||
base64-encoded arbitrary text output (so the browser must
|
||||
base64-decode them before outputting them).
|
||||
- `TextMessage` frames sent from the browser to the server are
|
||||
base64-encoded arbitrary text input. The server must
|
||||
base64-decode them before inputting them.
|
||||
- `TextMessage` frames sent from the server to the browser are
|
||||
base64-encoded arbitrary text output. The browser must
|
||||
base64-decode them before outputting them.
|
||||
|
||||
In their base64-encoded form, these frames are expected to
|
||||
contain ANSI terminal control codes, and may be in any encoding.
|
||||
|
||||
## Workhorse to GitLab
|
||||
|
||||
Using again the terminal as an example, before upgrading the browser,
|
||||
Using the terminal as an example, before upgrading the browser,
|
||||
Workhorse sends a normal HTTP request to GitLab on a URL like
|
||||
`https://gitlab.com/group/project/environments/1/terminal.ws/authorize`.
|
||||
This returns a JSON response containing details of where the
|
||||
terminal can be found, and how to connect it. In particular,
|
||||
the following details are returned in case of success:
|
||||
|
||||
- WebSocket URL to **connect** to, e.g.: `wss://example.com/terminals/1.ws?tty=1`
|
||||
- WebSocket subprotocols to support, e.g.: `["channel.k8s.io"]`
|
||||
- Headers to send, e.g.: `Authorization: Token xxyyz..`
|
||||
- Certificate authority to verify `wss` connections with (optional)
|
||||
- WebSocket URL to connect** to, such as `wss://example.com/terminals/1.ws?tty=1`.
|
||||
- WebSocket sub-protocols to support, such as `["channel.k8s.io"]`.
|
||||
- Headers to send, such as `Authorization: Token xxyyz`.
|
||||
- Optional. Certificate authority to verify `wss` connections with.
|
||||
|
||||
Workhorse periodically re-checks this endpoint, and if it gets an
|
||||
error response, or the details of the terminal change, it will
|
||||
terminate the websocket session.
|
||||
Workhorse periodically rechecks this endpoint. If it receives an error response,
|
||||
or the details of the terminal change, it terminates the websocket session.
|
||||
|
||||
## Workhorse to the WebSocket server
|
||||
|
||||
In GitLab, environments or CI jobs may have a deployment service (e.g.,
|
||||
In GitLab, environments or CI jobs may have a deployment service (like
|
||||
`KubernetesService`) associated with them. This service knows
|
||||
where the terminals or the service for an environment may be found, and these
|
||||
details are returned to Workhorse by GitLab.
|
||||
where the terminals or the service for an environment may be found, and GitLab
|
||||
returns these details to Workhorse.
|
||||
|
||||
These URLs are *also* WebSocket URLs, and GitLab tells Workhorse
|
||||
which subprotocols to speak over the connection, along with any
|
||||
authentication details required by the remote end.
|
||||
These URLs are also WebSocket URLs. GitLab tells Workhorse which sub-protocols to
|
||||
speak over the connection, along with any authentication details required by the
|
||||
remote end.
|
||||
|
||||
Before upgrading the browser's connection to a websocket,
|
||||
Workhorse opens a HTTP client connection, according to the
|
||||
details given to it by Workhorse, and attempts to upgrade
|
||||
that connection to a websocket. If it fails, an error
|
||||
response is sent to the browser; otherwise, the browser is
|
||||
also upgraded.
|
||||
Before upgrading the browser's connection to a websocket, Workhorse:
|
||||
|
||||
Workhorse now has two websocket connections, albeit with
|
||||
differing subprotocols. It decodes incoming frames from the
|
||||
browser, re-encodes them to the channel's subprotocol, and
|
||||
sends them to the channel. Similarly, it decodes incoming
|
||||
frames from the channel, re-encodes them to the browser's
|
||||
subprotocol, and sends them to the browser.
|
||||
1. Opens a HTTP client connection, according to the details given to it by Workhorse.
|
||||
1. Attempts to upgrade that connection to a websocket.
|
||||
- If it fails, an error response is sent to the browser.
|
||||
- If it succeeds, the browser is also upgraded.
|
||||
|
||||
When either connection closes or enters an error state,
|
||||
Workhorse detects the error and closes the other connection,
|
||||
terminating the channel session. If the browser is the
|
||||
connection that has disconnected, Workhorse will send an ANSI
|
||||
`End of Transmission` control code (the `0x04` byte) to the
|
||||
channel, encoded according to the appropriate subprotocol.
|
||||
Workhorse will automatically reply to any websocket ping frame
|
||||
sent by the channel, to avoid being disconnected.
|
||||
Workhorse now has two websocket connections, albeit with differing sub-protocols,
|
||||
and then:
|
||||
|
||||
Currently, Workhorse only supports the following subprotocols.
|
||||
Supporting new deployment services will require new subprotocols
|
||||
to be supported:
|
||||
- Decodes incoming frames from the browser, re-encodes them to the channel's
|
||||
sub-protocol, and sends them to the channel.
|
||||
- Decodes incoming frames from the channel, re-encodes them to the browser's
|
||||
sub-protocol, and sends them to the browser.
|
||||
|
||||
When either connection closes or enters an error state, Workhorse detects the error
|
||||
and closes the other connection, terminating the channel session. If the browser
|
||||
is the connection that has disconnected, Workhorse sends an ANSI `End of Transmission`
|
||||
control code (the `0x04` byte) to the channel, encoded according to the appropriate
|
||||
sub-protocol. To avoid being disconnected, Workhorse replies to any websocket ping
|
||||
frame sent by the channel.
|
||||
|
||||
Workhorse only supports the following sub-protocols:
|
||||
|
||||
- [`channel.k8s.io`](#channelk8sio)
|
||||
- [`base64.channel.k8s.io`](#base64channelk8sio)
|
||||
|
||||
Supporting new deployment services requires new sub-protocols to be supported.
|
||||
|
||||
### `channel.k8s.io`
|
||||
|
||||
Used by Kubernetes, this subprotocol defines a simple multiplexed
|
||||
channel.
|
||||
Used by Kubernetes, this sub-protocol defines a simple multiplexed channel.
|
||||
|
||||
Control frames have their usual meanings. `TextMessage` frames are
|
||||
invalid. `BinaryMessage` frames represent I/O to a specific file
|
||||
descriptor.
|
||||
|
||||
The first byte of each `BinaryMessage` frame represents the file
|
||||
descriptor (fd) number, as a `uint8` (so the value `0x00` corresponds
|
||||
to fd 0, `STDIN`, while `0x01` corresponds to fd 1, `STDOUT`).
|
||||
descriptor (`fd`) number, as a `uint8`. For example:
|
||||
|
||||
- `0x00` corresponds to `fd 0`, `STDIN`.
|
||||
- `0x01` corresponds to `fd 1`, `STDOUT`.
|
||||
|
||||
The remaining bytes represent arbitrary data. For frames received
|
||||
from the server, they are bytes that have been received from that
|
||||
fd. For frames sent to the server, they are bytes that should be
|
||||
written to that fd.
|
||||
`fd`. For frames sent to the server, they are bytes that should be
|
||||
written to that `fd`.
|
||||
|
||||
### `base64.channel.k8s.io`
|
||||
|
||||
Also used by Kubernetes, this subprotocol defines a similar multiplexed
|
||||
Also used by Kubernetes, this sub-protocol defines a similar multiplexed
|
||||
channel to `channel.k8s.io`. The main differences are:
|
||||
|
||||
- `TextMessage` frames are valid, rather than `BinaryMessage` frames.
|
||||
- The first byte of each `TextMessage` frame represents the file
|
||||
descriptor as a numeric UTF-8 character, so the character `U+0030`,
|
||||
or "0", is fd 0, STDIN).
|
||||
or "0", is `fd 0`, `STDIN`.
|
||||
- The remaining bytes represent base64-encoded arbitrary data.
|
||||
|
|
|
@ -6,9 +6,13 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
|
||||
# Workhorse configuration
|
||||
|
||||
For historical reasons Workhorse uses both command line flags, a configuration file and environment variables.
|
||||
For historical reasons, Workhorse uses:
|
||||
|
||||
All new configuration options that get added to Workhorse should go into the configuration file.
|
||||
- Command line flags.
|
||||
- A configuration file.
|
||||
- Environment variables.
|
||||
|
||||
Add any new Workhorse configuration options into the configuration file.
|
||||
|
||||
## CLI options
|
||||
|
||||
|
@ -61,35 +65,32 @@ Options:
|
|||
```
|
||||
|
||||
The 'auth backend' refers to the GitLab Rails application. The name is
|
||||
a holdover from when GitLab Workhorse only handled Git push/pull over
|
||||
a holdover from when GitLab Workhorse only handled `git push` and `git pull` over
|
||||
HTTP.
|
||||
|
||||
GitLab Workhorse can listen on either a TCP or a Unix domain socket. It
|
||||
can also open a second listening TCP listening socket with the Go
|
||||
[`net/http/pprof` profiler server](http://golang.org/pkg/net/http/pprof/).
|
||||
|
||||
GitLab Workhorse can listen on Redis events (currently only builds/register
|
||||
for runners). This requires you to pass a valid TOML configuration file via
|
||||
`-config` flag.
|
||||
For regular setups it only requires the following (replacing the string
|
||||
GitLab Workhorse can listen on Redis build and runner registration events if you
|
||||
pass a valid TOML configuration file through the `-config` flag.
|
||||
A regular setup it only requires the following (replacing the string
|
||||
with the actual socket)
|
||||
|
||||
## Redis
|
||||
|
||||
GitLab Workhorse integrates with Redis to do long polling for CI build
|
||||
requests. This is configured via two things:
|
||||
requests. To configure it:
|
||||
|
||||
- Redis settings in the TOML configuration file
|
||||
- The `-apiCiLongPollingDuration` command line flag to control polling
|
||||
behavior for CI build requests
|
||||
- Configure Redis settings in the TOML configuration file.
|
||||
- Control polling behavior for CI build requests with the `-apiCiLongPollingDuration`
|
||||
command-line flag.
|
||||
|
||||
It is OK to enable Redis in the configuration file but to leave CI polling
|
||||
disabled; this just results in an idle Redis pubsub connection. The
|
||||
opposite is not possible: CI long polling requires a correct Redis
|
||||
configuration.
|
||||
You can enable Redis in the configuration file while leaving CI polling
|
||||
disabled. This configuration results in an idle Redis Pub/Sub connection. The
|
||||
opposite is not possible: CI long polling requires a correct Redis configuration.
|
||||
|
||||
Below we discuss the options for the `[redis]` section in the configuration
|
||||
file.
|
||||
For example, the `[redis]` section in the configuration file could contain:
|
||||
|
||||
```plaintext
|
||||
[redis]
|
||||
|
@ -99,15 +100,13 @@ Sentinel = [ "tcp://sentinel1:23456", "tcp://sentinel2:23456" ]
|
|||
SentinelMaster = "mymaster"
|
||||
```
|
||||
|
||||
- `URL` takes a string in the format `unix://path/to/redis.sock` or
|
||||
`tcp://host:port`.
|
||||
- `Password` is only required if your Redis instance is password-protected
|
||||
- `Sentinel` is used if you are using Sentinel.
|
||||
- `URL` - A string in the format `unix://path/to/redis.sock` or `tcp://host:port`.
|
||||
- `Password` - Required only if your Redis instance is password-protected.
|
||||
- `Sentinel` - Required if you use Sentinel.
|
||||
|
||||
NOTE:
|
||||
If both `Sentinel` and `URL` are given, only `Sentinel` will be used.
|
||||
If both `Sentinel` and `URL` are given, only `Sentinel` is used.
|
||||
|
||||
Optional fields are as follows:
|
||||
Optional fields:
|
||||
|
||||
```plaintext
|
||||
[redis]
|
||||
|
@ -116,15 +115,14 @@ MaxIdle = 1
|
|||
MaxActive = 1
|
||||
```
|
||||
|
||||
- `DB` is the Database to connect to. Defaults to `0`
|
||||
- `MaxIdle` is how many idle connections can be in the Redis pool at once. Defaults to 1
|
||||
- `MaxActive` is how many connections the pool can keep. Defaults to 1
|
||||
- `DB` - The database to connect to. Defaults to `0`.
|
||||
- `MaxIdle` - How many idle connections can be in the Redis pool at once. Defaults to `1`.
|
||||
- `MaxActive` - How many connections the pool can keep. Defaults to `1`.
|
||||
|
||||
## Relative URL support
|
||||
|
||||
If you are mounting GitLab at a relative URL, e.g.
|
||||
`example.com/gitlab`, then you should also use this relative URL in
|
||||
the `authBackend` setting:
|
||||
If you mount GitLab at a relative URL, like `example.com/gitlab`), use this
|
||||
relative URL in the `authBackend` setting:
|
||||
|
||||
```plaintext
|
||||
gitlab-workhorse -authBackend http://localhost:8080/gitlab
|
||||
|
@ -132,33 +130,32 @@ gitlab-workhorse -authBackend http://localhost:8080/gitlab
|
|||
|
||||
## Interaction of authBackend and authSocket
|
||||
|
||||
The interaction between `authBackend` and `authSocket` can be a bit
|
||||
confusing. It comes down to: if `authSocket` is set it overrides the
|
||||
_host_ part of `authBackend` but not the relative path.
|
||||
The interaction between `authBackend` and `authSocket` can be confusing.
|
||||
If `authSocket` is set, it overrides the host portion of `authBackend`, but not
|
||||
the relative path.
|
||||
|
||||
In table form:
|
||||
|
||||
|authBackend|authSocket|Workhorse connects to?|Rails relative URL|
|
||||
|---|---|---|---|
|
||||
|unset|unset|`localhost:8080`|`/`|
|
||||
|`http://localhost:3000`|unset|`localhost:3000`|`/`|
|
||||
|`http://localhost:3000/gitlab`|unset|`localhost:3000`|`/gitlab`|
|
||||
|unset|`/path/to/socket`|`/path/to/socket`|`/`|
|
||||
|`http://localhost:3000`|`/path/to/socket`|`/path/to/socket`|`/`|
|
||||
|`http://localhost:3000/gitlab`|`/path/to/socket`|`/path/to/socket`|`/gitlab`|
|
||||
| authBackend | authSocket | Workhorse connects to | Rails relative URL |
|
||||
|--------------------------------|-------------------|-----------------------|--------------------|
|
||||
| unset | unset | `localhost:8080` | `/` |
|
||||
| `http://localhost:3000` | unset | `localhost:3000` | `/` |
|
||||
| `http://localhost:3000/gitlab` | unset | `localhost:3000` | `/gitlab` |
|
||||
| unset | `/path/to/socket` | `/path/to/socket` | `/` |
|
||||
| `http://localhost:3000` | `/path/to/socket` | `/path/to/socket` | `/` |
|
||||
| `http://localhost:3000/gitlab` | `/path/to/socket` | `/path/to/socket` | `/gitlab` |
|
||||
|
||||
The same applies to `cableBackend` and `cableSocket`.
|
||||
|
||||
## Error tracking
|
||||
|
||||
GitLab-Workhorse supports remote error tracking with
|
||||
[Sentry](https://sentry.io). To enable this feature set the
|
||||
`GITLAB_WORKHORSE_SENTRY_DSN` environment variable.
|
||||
GitLab-Workhorse supports remote error tracking with [Sentry](https://sentry.io).
|
||||
To enable this feature, set the `GITLAB_WORKHORSE_SENTRY_DSN` environment variable.
|
||||
You can also set the `GITLAB_WORKHORSE_SENTRY_ENVIRONMENT` environment variable to
|
||||
use the Sentry environment functionality to separate staging, production and
|
||||
use the Sentry environment feature to separate staging, production and
|
||||
development.
|
||||
|
||||
Omnibus (`/etc/gitlab/gitlab.rb`):
|
||||
Omnibus GitLab (`/etc/gitlab/gitlab.rb`):
|
||||
|
||||
```ruby
|
||||
gitlab_workhorse['env'] = {
|
||||
|
@ -174,46 +171,48 @@ export GITLAB_WORKHORSE_SENTRY_DSN='https://foobar'
|
|||
export GITLAB_WORKHORSE_SENTRY_ENVIRONMENT='production'
|
||||
```
|
||||
|
||||
## Distributed Tracing
|
||||
## Distributed tracing
|
||||
|
||||
Workhorse supports distributed tracing through [LabKit](https://gitlab.com/gitlab-org/labkit/) using [OpenTracing APIs](https://opentracing.io).
|
||||
Workhorse supports distributed tracing through [LabKit](https://gitlab.com/gitlab-org/labkit/)
|
||||
using [OpenTracing APIs](https://opentracing.io).
|
||||
|
||||
By default, no tracing implementation is linked into the binary, but different OpenTracing providers can be linked in using [build tags](https://golang.org/pkg/go/build/#hdr-Build_Constraints) or build constraints. This can be done by setting the `BUILD_TAGS` make variable.
|
||||
By default, no tracing implementation is linked into the binary. You can link in
|
||||
different OpenTracing providers with [build tags](https://golang.org/pkg/go/build/#hdr-Build_Constraints)
|
||||
or build constraints by setting the `BUILD_TAGS` make variable.
|
||||
|
||||
For more details of the supported providers, see LabKit, but as an example, for Jaeger tracing support, include the tags: `BUILD_TAGS="tracer_static tracer_static_jaeger"`.
|
||||
For more details of the supported providers, refer to LabKit. For an example of
|
||||
Jaeger tracing support, include the tags: `BUILD_TAGS="tracer_static tracer_static_jaeger"` like this:
|
||||
|
||||
```shell
|
||||
make BUILD_TAGS="tracer_static tracer_static_jaeger"
|
||||
```
|
||||
|
||||
Once Workhorse is compiled with an opentracing provider, the tracing configuration is configured via the `GITLAB_TRACING` environment variable.
|
||||
|
||||
For example:
|
||||
After you compile Workhorse with an OpenTracing provider, configure the tracing
|
||||
configuration with the `GITLAB_TRACING` environment variable, like this:
|
||||
|
||||
```shell
|
||||
GITLAB_TRACING=opentracing://jaeger ./gitlab-workhorse
|
||||
```
|
||||
|
||||
## Continuous Profiling
|
||||
## Continuous profiling
|
||||
|
||||
Workhorse supports continuous profiling through [LabKit](https://gitlab.com/gitlab-org/labkit/) using [Stackdriver Profiler](https://cloud.google.com/profiler).
|
||||
|
||||
By default, the Stackdriver Profiler implementation is linked in the binary using [build tags](https://golang.org/pkg/go/build/#hdr-Build_Constraints), though it's not
|
||||
required and can be skipped.
|
||||
|
||||
For example:
|
||||
Workhorse supports continuous profiling through [LabKit](https://gitlab.com/gitlab-org/labkit/)
|
||||
using [Stackdriver Profiler](https://cloud.google.com/profiler). By default, the
|
||||
Stackdriver Profiler implementation is linked in the binary using
|
||||
[build tags](https://golang.org/pkg/go/build/#hdr-Build_Constraints), though it's not
|
||||
required and can be skipped. For example:
|
||||
|
||||
```shell
|
||||
make BUILD_TAGS=""
|
||||
```
|
||||
|
||||
Once Workhorse is compiled with Continuous Profiling, the profiler configuration can be set via `GITLAB_CONTINUOUS_PROFILING`
|
||||
environment variable.
|
||||
|
||||
For example:
|
||||
After you compile Workhorse with continuous profiling, set the profiler configuration
|
||||
with the `GITLAB_CONTINUOUS_PROFILING` environment variable. For example:
|
||||
|
||||
```shell
|
||||
GITLAB_CONTINUOUS_PROFILING="stackdriver?service=workhorse&service_version=1.0.1&project_id=test-123 ./gitlab-workhorse"
|
||||
```
|
||||
|
||||
More information about see the [LabKit monitoring documentation](https://gitlab.com/gitlab-org/labkit/-/blob/master/monitoring/doc.go).
|
||||
## Related topics
|
||||
|
||||
- [LabKit monitoring documentation](https://gitlab.com/gitlab-org/labkit/-/blob/master/monitoring/doc.go).
|
||||
|
|
|
@ -10,7 +10,7 @@ Workhorse itself is not a feature, but there are several features in
|
|||
GitLab that would not work efficiently without Workhorse.
|
||||
|
||||
To put the efficiency benefit in context, consider that in 2020Q3 on
|
||||
GitLab.com [we see][https://thanos-query.ops.gitlab.net/graph?g0.range_input=1h&g0.max_source_resolution=0s&g0.expr=sum(ruby_process_resident_memory_bytes%7Bapp%3D%22webservice%22%2Cenv%3D%22gprd%22%2Crelease%3D%22gitlab%22%7D)%20%2F%20sum(puma_max_threads%7Bapp%3D%22webservice%22%2Cenv%3D%22gprd%22%2Crelease%3D%22gitlab%22%7D)&g0.tab=1&g1.range_input=1h&g1.max_source_resolution=0s&g1.expr=sum(go_memstats_sys_bytes%7Bapp%3D%22webservice%22%2Cenv%3D%22gprd%22%2Crelease%3D%22gitlab%22%7D)%2Fsum(go_goroutines%7Bapp%3D%22webservice%22%2Cenv%3D%22gprd%22%2Crelease%3D%22gitlab%22%7D)&g1.tab=1]
|
||||
GitLab.com [we see](https://thanos-query.ops.gitlab.net/graph?g0.range_input=1h&g0.max_source_resolution=0s&g0.expr=sum(ruby_process_resident_memory_bytes%7Bapp%3D%22webservice%22%2Cenv%3D%22gprd%22%2Crelease%3D%22gitlab%22%7D)%20%2F%20sum(puma_max_threads%7Bapp%3D%22webservice%22%2Cenv%3D%22gprd%22%2Crelease%3D%22gitlab%22%7D)&g0.tab=1&g1.range_input=1h&g1.max_source_resolution=0s&g1.expr=sum(go_memstats_sys_bytes%7Bapp%3D%22webservice%22%2Cenv%3D%22gprd%22%2Crelease%3D%22gitlab%22%7D)%2Fsum(go_goroutines%7Bapp%3D%22webservice%22%2Cenv%3D%22gprd%22%2Crelease%3D%22gitlab%22%7D)&g1.tab=1)
|
||||
Rails application threads using on average
|
||||
about 200MB of RSS vs about 200KB for Workhorse goroutines.
|
||||
|
||||
|
|
|
@ -7,40 +7,72 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
# Adding new features to Workhorse
|
||||
|
||||
GitLab Workhorse is a smart reverse proxy for GitLab. It handles
|
||||
"long" HTTP requests such as file downloads, file uploads, Git
|
||||
push/pull and Git archive downloads.
|
||||
[long HTTP requests](#what-are-long-requests), such as:
|
||||
|
||||
Workhorse itself is not a feature, but there are [several features in GitLab](gitlab_features.md) that would not work efficiently without Workhorse.
|
||||
- File downloads.
|
||||
- File uploads.
|
||||
- Git pushes and pulls.
|
||||
- Git archive downloads.
|
||||
|
||||
At a first glance, it may look like Workhorse is just a pipeline for processing HTTP streams so that you can reduce the amount of logic in your Ruby on Rails controller, but there are good reasons to avoid treating it like that.
|
||||
Workhorse itself is not a feature, but [several features in GitLab](gitlab_features.md)
|
||||
would not work efficiently without Workhorse.
|
||||
|
||||
Engineers embarking on the quest of offloading a feature to Workhorse often find that the endeavor is much higher than what originally anticipated. In part because of the new programming language (only a few engineers at GitLab are Go developers), in part because of the demanding requirements for Workhorse. Workhorse is stateless, memory and disk usage must be kept under tight control, and the request should not be slowed down in the process.
|
||||
At a first glance, Workhorse appears to be just a pipeline for processing HTTP
|
||||
streams to reduce the amount of logic in your Ruby on Rails controller. However,
|
||||
don't treat it that way. Engineers trying to offload a feature to Workhorse often
|
||||
find it takes more work than originally anticipated:
|
||||
|
||||
## Can I add a new feature to Workhorse?
|
||||
- It's a new programming language, and only a few engineers at GitLab are Go developers.
|
||||
- Workhorse has demanding requirements:
|
||||
- It's stateless.
|
||||
- Memory and disk usage must be kept under tight control.
|
||||
- The request should not be slowed down in the process.
|
||||
|
||||
We suggest to follow this route only if absolutely necessary and no other options are available.
|
||||
## Avoid adding new features
|
||||
|
||||
Splitting a feature between the Rails code-base and Workhorse is deliberately choosing to introduce technical debt. It adds complexity to the system and coupling between the two components.
|
||||
We suggest adding new features only if absolutely necessary and no other options exist.
|
||||
Splitting a feature between the Rails codebase and Workhorse is a deliberate choice
|
||||
to introduce technical debt. It adds complexity to the system, and coupling between
|
||||
the two components:
|
||||
|
||||
- Building features using Workhorse has a considerable complexity cost, so you should prefer designs based on Rails requests and Sidekiq jobs.
|
||||
- Even when using Rails+Sidekiq is "more work" than using Rails+Workhorse, Rails+Sidekiq is easier to maintain in the long term because Workhorse is unique to GitLab while Rails+Sidekiq is an industry standard.
|
||||
- For "global" behaviors around web requests consider using a Rack middleware instead of Workhorse.
|
||||
- Generally speaking, we should only use Rails+Workhorse if the HTTP client expects behavior that is not reasonable to implement in Rails, like "long" requests.
|
||||
- Building features using Workhorse has a considerable complexity cost, so you should
|
||||
prefer designs based on Rails requests and Sidekiq jobs.
|
||||
- Even when using Rails-and-Sidekiq is more work than using Rails-and-Workhorse,
|
||||
Rails-and-Sidekiq is easier to maintain in the long term. Workhorse is unique
|
||||
to GitLab, while Rails-and-Sidekiq is an industry standard.
|
||||
- For global behaviors around web requests, consider using a Rack middleware
|
||||
instead of Workhorse.
|
||||
- Generally speaking, use Rails-and-Workhorse only if the HTTP client expects
|
||||
behavior reasonable to implement in Rails, like long requests.
|
||||
|
||||
## What is a "long" request?
|
||||
## What are long requests?
|
||||
|
||||
There is one order of magnitude between Workhorse and Puma RAM usage. Having connection open for a period longer than milliseconds is a problem because of the amount of RAM it monopolizes once it reaches the Ruby on Rails controller.
|
||||
One order of magnitude exists between Workhorse and Puma RAM usage. Having a connection
|
||||
open for longer than milliseconds is problematic due to the amount of RAM
|
||||
it monopolizes after it reaches the Ruby on Rails controller. We've identified two classes
|
||||
of long requests: data transfers and HTTP long polling. Some examples:
|
||||
|
||||
So far we identified two classes of "long" requests: data transfers and HTTP long polling.
|
||||
- `git push`.
|
||||
- `git pull`.
|
||||
- Uploading or downloading an artifact.
|
||||
- A CI runner waiting for a new job.
|
||||
|
||||
`git push`, `git pull`, uploading or downloading an artifact, the CI runner waiting for a new job are all good examples of long requests.
|
||||
With the rise of cloud-native installations, Workhorse's feature set was extended
|
||||
to add object storage direct-upload. This change removed the need for the shared
|
||||
Network File System (NFS) drives.
|
||||
|
||||
With the rise of cloud-native installations, Workhorse's feature-set was extended to add object storage direct-upload, to get rid of the shared Network File System (NFS) drives.
|
||||
If you still think we should add a new feature to Workhorse, open an issue for the
|
||||
Workhorse maintainers and explain:
|
||||
|
||||
In 2020 @nolith presented at FOSDEM a talk titled [_Speed up the monolith. Building a smart reverse proxy in Go_](https://archive.fosdem.org/2020/schedule/event/speedupmonolith/).
|
||||
You can watch the recording for more details on the history of Workhorse and the NFS removal.
|
||||
1. What you want to implement.
|
||||
1. Why it can't be implemented in our Ruby codebase.
|
||||
|
||||
[Uploads development documentation](../uploads.md)
|
||||
contains the most common use-cases for adding a new type of upload and may answer all of your questions.
|
||||
The Workhorse maintainers can help you assess the situation.
|
||||
|
||||
If you still think we should add a new feature to Workhorse, please open an issue explaining **what you want to implement** and **why it can't be implemented in our Ruby code-base**. Workhorse maintainers will be happy to help you assessing the situation.
|
||||
## Related topics
|
||||
|
||||
- In 2020, `@nolith` presented the talk
|
||||
["Speed up the monolith. Building a smart reverse proxy in Go"](https://archive.fosdem.org/2020/schedule/event/speedupmonolith/)
|
||||
at FOSDEM. The talk includes more details on the history of Workhorse and the NFS removal.
|
||||
- The [uploads development documentation](../uploads.md) contains the most common
|
||||
use cases for adding a new type of upload.
|
||||
|
|
|
@ -381,7 +381,7 @@ sudo -u git -H bundle exec rake gitlab:backup:create GITLAB_BACKUP_MAX_CONCURREN
|
|||
> - [Enabled on self-managed](https://gitlab.com/gitlab-org/gitlab/-/issues/355945) in GitLab 14.10.
|
||||
|
||||
FLAG:
|
||||
On self-managed GitLab, by default this feature is available. To hide the feature, ask an administrator to [disable the feature flag](<path to>/administration/feature_flags.md) named `incremental_repository_backup`.
|
||||
On self-managed GitLab, by default this feature is available. To hide the feature, ask an administrator to [disable the feature flag](../administration/feature_flags.md) named `incremental_repository_backup`.
|
||||
On GitLab.com, this feature is not available.
|
||||
|
||||
Incremental backups can be faster than full backups because they only pack changes since the last backup into the backup
|
||||
|
|
|
@ -74,6 +74,18 @@ For more information about iteration cadences, you can refer to
|
|||
|
||||
**Planned removal milestone: 16.0 (2023-04-22)**
|
||||
|
||||
### Toggle notes confidentiality on APIs
|
||||
|
||||
WARNING:
|
||||
This feature will be changed or removed in 16.0
|
||||
as a [breaking change](https://docs.gitlab.com/ee/development/contributing/#breaking-changes).
|
||||
Before updating GitLab, review the details carefully to determine if you need to make any
|
||||
changes to your code, settings, or workflow.
|
||||
|
||||
Toggling notes confidentiality with REST and GraphQL APIs is being deprecated. Updating notes confidential attribute is no longer supported by any means. We are changing this to simplify the experience and prevent private information from being unintentionally exposed.
|
||||
|
||||
**Planned removal milestone: 16.0 (2023-05-22)**
|
||||
|
||||
## 14.9
|
||||
|
||||
### Background upload for object storage
|
||||
|
|
|
@ -152,7 +152,8 @@ If an issue or merge request is locked and closed, you cannot reopen it.
|
|||
|
||||
## Mark a comment as confidential **(FREE SELF)**
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/207473) in GitLab 13.9 [with a flag](../../administration/feature_flags.md) named `confidential_notes`. Disabled by default.
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/207473) in GitLab 13.9 [with a flag](../../administration/feature_flags.md) named `confidential_notes`. Disabled by default.
|
||||
> - [Changed](https://gitlab.com/gitlab-org/gitlab/-/issues/351143) in GitLab 14.10: you can only mark comments in issues and epics as confidential. Previously, it was also possible for comments in merge requests and snippets.
|
||||
|
||||
FLAG:
|
||||
On self-managed GitLab, by default this feature is not available. To make it available,
|
||||
|
@ -160,9 +161,25 @@ ask an administrator to [enable the feature flag](../../administration/feature_f
|
|||
On GitLab.com, this feature is not available.
|
||||
You should not use this feature for production environments.
|
||||
|
||||
You can make a comment confidential, so that it is visible only to project members
|
||||
who have at least the Reporter role.
|
||||
You can make a comment **in an issue or an epic** confidential, so that it is visible only to you (the commenting user) and
|
||||
the project members who have at least the Reporter role.
|
||||
|
||||
Keep in mind:
|
||||
|
||||
- You can only mark comments as confidential when you create them.
|
||||
- You can't change the confidentiality of existing comments.
|
||||
- Replies to comments use same confidentiality as the original comment.
|
||||
|
||||
Prerequisites:
|
||||
|
||||
- You must either:
|
||||
- Have at least the Reporter role for the project.
|
||||
- Be the issue assignee.
|
||||
- Be the issue author.
|
||||
|
||||
To mark a comment as confidential:
|
||||
|
||||
1. Start adding a new comment.
|
||||
1. Below the comment, select the **Make this comment confidential** checkbox.
|
||||
1. Select **Comment**.
|
||||
|
||||
|
|
|
@ -112,7 +112,7 @@ module API
|
|||
requires :noteable_id, type: Integer, desc: 'The ID of the noteable'
|
||||
requires :note_id, type: Integer, desc: 'The ID of a note'
|
||||
optional :body, type: String, allow_blank: false, desc: 'The content of a note'
|
||||
optional :confidential, type: Boolean, desc: 'Confidentiality note flag'
|
||||
optional :confidential, type: Boolean, desc: '[Deprecated in 14.10] No longer allowed to update confidentiality of notes'
|
||||
end
|
||||
put ":id/#{noteables_str}/:noteable_id/notes/:note_id", feature_category: feature_category do
|
||||
noteable = find_noteable(noteable_type, params[:noteable_id])
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Backup
|
||||
class Artifacts < Backup::Files
|
||||
def initialize(progress)
|
||||
super(progress, 'artifacts', JobArtifactUploader.root, excludes: ['tmp'])
|
||||
end
|
||||
|
||||
override :human_name
|
||||
def human_name
|
||||
_('artifacts')
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,14 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Backup
|
||||
class Builds < Backup::Files
|
||||
def initialize(progress)
|
||||
super(progress, 'builds', Settings.gitlab_ci.builds_path)
|
||||
end
|
||||
|
||||
override :human_name
|
||||
def human_name
|
||||
_('builds')
|
||||
end
|
||||
end
|
||||
end
|
|
@ -134,11 +134,6 @@ module Backup
|
|||
MSG
|
||||
end
|
||||
|
||||
override :human_name
|
||||
def human_name
|
||||
_('database')
|
||||
end
|
||||
|
||||
protected
|
||||
|
||||
def database
|
||||
|
|
|
@ -9,12 +9,11 @@ module Backup
|
|||
|
||||
DEFAULT_EXCLUDE = 'lost+found'
|
||||
|
||||
attr_reader :name, :excludes
|
||||
attr_reader :excludes
|
||||
|
||||
def initialize(progress, name, app_files_dir, excludes: [])
|
||||
def initialize(progress, app_files_dir, excludes: [])
|
||||
super(progress)
|
||||
|
||||
@name = name
|
||||
@app_files_dir = app_files_dir
|
||||
@excludes = [DEFAULT_EXCLUDE].concat(excludes)
|
||||
end
|
||||
|
@ -55,7 +54,7 @@ module Backup
|
|||
|
||||
override :restore
|
||||
def restore(backup_tarball)
|
||||
backup_existing_files_dir
|
||||
backup_existing_files_dir(backup_tarball)
|
||||
|
||||
cmd_list = [%w[gzip -cd], %W[#{tar} --unlink-first --recursive-unlink -C #{app_files_realpath} -xf -]]
|
||||
status_list, output = run_pipeline!(cmd_list, in: backup_tarball)
|
||||
|
@ -73,11 +72,13 @@ module Backup
|
|||
end
|
||||
end
|
||||
|
||||
def backup_existing_files_dir
|
||||
def backup_existing_files_dir(backup_tarball)
|
||||
name = File.basename(backup_tarball, '.tar.gz')
|
||||
|
||||
timestamped_files_path = File.join(Gitlab.config.backup.path, "tmp", "#{name}.#{Time.now.to_i}")
|
||||
if File.exist?(app_files_realpath)
|
||||
# Move all files in the existing repos directory except . and .. to
|
||||
# repositories.old.<timestamp> directory
|
||||
# repositories.<timestamp> directory
|
||||
FileUtils.mkdir_p(timestamped_files_path, mode: 0700)
|
||||
files = Dir.glob(File.join(app_files_realpath, "*"), File::FNM_DOTMATCH) - [File.join(app_files_realpath, "."), File.join(app_files_realpath, "..")]
|
||||
begin
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Backup
|
||||
class Lfs < Backup::Files
|
||||
def initialize(progress)
|
||||
super(progress, 'lfs', Settings.lfs.storage_path)
|
||||
end
|
||||
|
||||
override :human_name
|
||||
def human_name
|
||||
_('lfs objects')
|
||||
end
|
||||
end
|
||||
end
|
|
@ -5,68 +5,34 @@ module Backup
|
|||
FILE_NAME_SUFFIX = '_gitlab_backup.tar'
|
||||
MANIFEST_NAME = 'backup_information.yml'
|
||||
|
||||
# pages used to deploy tmp files to this path
|
||||
# if some of these files are still there, we don't need them in the backup
|
||||
LEGACY_PAGES_TMP_PATH = '@pages.tmp'
|
||||
|
||||
TaskDefinition = Struct.new(
|
||||
:enabled, # `true` if the task can be used. Treated as `true` when not specified.
|
||||
:human_name, # Name of the task used for logging.
|
||||
:destination_path, # Where the task should put its backup file/dir.
|
||||
:destination_optional, # `true` if the destination might not exist on a successful backup.
|
||||
:cleanup_path, # Path to remove after a successful backup. Uses `destination_path` when not specified.
|
||||
:task,
|
||||
keyword_init: true
|
||||
)
|
||||
) do
|
||||
def enabled?
|
||||
enabled.nil? || enabled
|
||||
end
|
||||
end
|
||||
|
||||
attr_reader :progress
|
||||
|
||||
def initialize(progress, definitions: nil)
|
||||
@progress = progress
|
||||
|
||||
force = ENV['force'] == 'yes'
|
||||
@incremental = Feature.feature_flags_available? &&
|
||||
Feature.enabled?(:incremental_repository_backup, default_enabled: :yaml) &&
|
||||
Gitlab::Utils.to_boolean(ENV['INCREMENTAL'], default: false)
|
||||
|
||||
@definitions = definitions || {
|
||||
'db' => TaskDefinition.new(
|
||||
destination_path: 'db/database.sql.gz',
|
||||
cleanup_path: 'db',
|
||||
task: Database.new(progress, force: force)
|
||||
),
|
||||
'repositories' => TaskDefinition.new(
|
||||
destination_path: 'repositories',
|
||||
destination_optional: true,
|
||||
task: Repositories.new(progress, strategy: repository_backup_strategy)
|
||||
),
|
||||
'uploads' => TaskDefinition.new(
|
||||
destination_path: 'uploads.tar.gz',
|
||||
task: Uploads.new(progress)
|
||||
),
|
||||
'builds' => TaskDefinition.new(
|
||||
destination_path: 'builds.tar.gz',
|
||||
task: Builds.new(progress)
|
||||
),
|
||||
'artifacts' => TaskDefinition.new(
|
||||
destination_path: 'artifacts.tar.gz',
|
||||
task: Artifacts.new(progress)
|
||||
),
|
||||
'pages' => TaskDefinition.new(
|
||||
destination_path: 'pages.tar.gz',
|
||||
task: Pages.new(progress)
|
||||
),
|
||||
'lfs' => TaskDefinition.new(
|
||||
destination_path: 'lfs.tar.gz',
|
||||
task: Lfs.new(progress)
|
||||
),
|
||||
'terraform_state' => TaskDefinition.new(
|
||||
destination_path: 'terraform_state.tar.gz',
|
||||
task: TerraformState.new(progress)
|
||||
),
|
||||
'registry' => TaskDefinition.new(
|
||||
destination_path: 'registry.tar.gz',
|
||||
task: Registry.new(progress)
|
||||
),
|
||||
'packages' => TaskDefinition.new(
|
||||
destination_path: 'packages.tar.gz',
|
||||
task: Packages.new(progress)
|
||||
)
|
||||
}.freeze
|
||||
@definitions = definitions || build_definitions
|
||||
end
|
||||
|
||||
def create
|
||||
|
@ -102,22 +68,22 @@ module Backup
|
|||
|
||||
build_backup_information
|
||||
|
||||
unless definition.task.enabled
|
||||
puts_time "Dumping #{definition.task.human_name} ... ".color(:blue) + "[DISABLED]".color(:cyan)
|
||||
unless definition.enabled?
|
||||
puts_time "Dumping #{definition.human_name} ... ".color(:blue) + "[DISABLED]".color(:cyan)
|
||||
return
|
||||
end
|
||||
|
||||
if skipped?(task_name)
|
||||
puts_time "Dumping #{definition.task.human_name} ... ".color(:blue) + "[SKIPPED]".color(:cyan)
|
||||
puts_time "Dumping #{definition.human_name} ... ".color(:blue) + "[SKIPPED]".color(:cyan)
|
||||
return
|
||||
end
|
||||
|
||||
puts_time "Dumping #{definition.task.human_name} ... ".color(:blue)
|
||||
puts_time "Dumping #{definition.human_name} ... ".color(:blue)
|
||||
definition.task.dump(File.join(Gitlab.config.backup.path, definition.destination_path), backup_id)
|
||||
puts_time "Dumping #{definition.task.human_name} ... ".color(:blue) + "done".color(:green)
|
||||
puts_time "Dumping #{definition.human_name} ... ".color(:blue) + "done".color(:green)
|
||||
|
||||
rescue Backup::DatabaseBackupError, Backup::FileBackupError => e
|
||||
puts_time "Dumping #{definition.task.human_name} failed: #{e.message}".color(:red)
|
||||
puts_time "Dumping #{definition.human_name} failed: #{e.message}".color(:red)
|
||||
end
|
||||
|
||||
def restore
|
||||
|
@ -146,12 +112,12 @@ module Backup
|
|||
def run_restore_task(task_name)
|
||||
definition = @definitions[task_name]
|
||||
|
||||
unless definition.task.enabled
|
||||
puts_time "Restoring #{definition.task.human_name} ... ".color(:blue) + "[DISABLED]".color(:cyan)
|
||||
unless definition.enabled?
|
||||
puts_time "Restoring #{definition.human_name} ... ".color(:blue) + "[DISABLED]".color(:cyan)
|
||||
return
|
||||
end
|
||||
|
||||
puts_time "Restoring #{definition.task.human_name} ... ".color(:blue)
|
||||
puts_time "Restoring #{definition.human_name} ... ".color(:blue)
|
||||
|
||||
warning = definition.task.pre_restore_warning
|
||||
if warning.present?
|
||||
|
@ -161,7 +127,7 @@ module Backup
|
|||
|
||||
definition.task.restore(File.join(Gitlab.config.backup.path, definition.destination_path))
|
||||
|
||||
puts_time "Restoring #{definition.task.human_name} ... ".color(:blue) + "done".color(:green)
|
||||
puts_time "Restoring #{definition.human_name} ... ".color(:blue) + "done".color(:green)
|
||||
|
||||
warning = definition.task.post_restore_warning
|
||||
if warning.present?
|
||||
|
@ -176,6 +142,82 @@ module Backup
|
|||
|
||||
private
|
||||
|
||||
def build_definitions
|
||||
{
|
||||
'db' => TaskDefinition.new(
|
||||
human_name: _('database'),
|
||||
destination_path: 'db/database.sql.gz',
|
||||
cleanup_path: 'db',
|
||||
task: build_db_task
|
||||
),
|
||||
'repositories' => TaskDefinition.new(
|
||||
human_name: _('repositories'),
|
||||
destination_path: 'repositories',
|
||||
destination_optional: true,
|
||||
task: build_repositories_task
|
||||
),
|
||||
'uploads' => TaskDefinition.new(
|
||||
human_name: _('uploads'),
|
||||
destination_path: 'uploads.tar.gz',
|
||||
task: build_files_task(File.join(Gitlab.config.uploads.storage_path, 'uploads'), excludes: ['tmp'])
|
||||
),
|
||||
'builds' => TaskDefinition.new(
|
||||
human_name: _('builds'),
|
||||
destination_path: 'builds.tar.gz',
|
||||
task: build_files_task(Settings.gitlab_ci.builds_path)
|
||||
),
|
||||
'artifacts' => TaskDefinition.new(
|
||||
human_name: _('artifacts'),
|
||||
destination_path: 'artifacts.tar.gz',
|
||||
task: build_files_task(JobArtifactUploader.root, excludes: ['tmp'])
|
||||
),
|
||||
'pages' => TaskDefinition.new(
|
||||
human_name: _('pages'),
|
||||
destination_path: 'pages.tar.gz',
|
||||
task: build_files_task(Gitlab.config.pages.path, excludes: [LEGACY_PAGES_TMP_PATH])
|
||||
),
|
||||
'lfs' => TaskDefinition.new(
|
||||
human_name: _('lfs objects'),
|
||||
destination_path: 'lfs.tar.gz',
|
||||
task: build_files_task(Settings.lfs.storage_path)
|
||||
),
|
||||
'terraform_state' => TaskDefinition.new(
|
||||
human_name: _('terraform states'),
|
||||
destination_path: 'terraform_state.tar.gz',
|
||||
task: build_files_task(Settings.terraform_state.storage_path, excludes: ['tmp'])
|
||||
),
|
||||
'registry' => TaskDefinition.new(
|
||||
enabled: Gitlab.config.registry.enabled,
|
||||
human_name: _('container registry images'),
|
||||
destination_path: 'registry.tar.gz',
|
||||
task: build_files_task(Settings.registry.path)
|
||||
),
|
||||
'packages' => TaskDefinition.new(
|
||||
human_name: _('packages'),
|
||||
destination_path: 'packages.tar.gz',
|
||||
task: build_files_task(Settings.packages.storage_path, excludes: ['tmp'])
|
||||
)
|
||||
}.freeze
|
||||
end
|
||||
|
||||
def build_db_task
|
||||
force = ENV['force'] == 'yes'
|
||||
|
||||
Database.new(progress, force: force)
|
||||
end
|
||||
|
||||
def build_repositories_task
|
||||
max_concurrency = ENV['GITLAB_BACKUP_MAX_CONCURRENCY'].presence
|
||||
max_storage_concurrency = ENV['GITLAB_BACKUP_MAX_STORAGE_CONCURRENCY'].presence
|
||||
strategy = Backup::GitalyBackup.new(progress, incremental: incremental?, max_parallelism: max_concurrency, storage_parallelism: max_storage_concurrency)
|
||||
|
||||
Repositories.new(progress, strategy: strategy)
|
||||
end
|
||||
|
||||
def build_files_task(app_files_dir, excludes: [])
|
||||
Files.new(progress, app_files_dir, excludes: excludes)
|
||||
end
|
||||
|
||||
def incremental?
|
||||
@incremental
|
||||
end
|
||||
|
@ -394,7 +436,7 @@ module Backup
|
|||
end
|
||||
|
||||
def enabled_task?(task_name)
|
||||
@definitions[task_name].task.enabled
|
||||
@definitions[task_name].enabled?
|
||||
end
|
||||
|
||||
def backup_file?(file)
|
||||
|
@ -500,12 +542,6 @@ module Backup
|
|||
Gitlab.config.backup.upload.connection&.provider&.downcase == 'google'
|
||||
end
|
||||
|
||||
def repository_backup_strategy
|
||||
max_concurrency = ENV['GITLAB_BACKUP_MAX_CONCURRENCY'].presence
|
||||
max_storage_concurrency = ENV['GITLAB_BACKUP_MAX_STORAGE_CONCURRENCY'].presence
|
||||
Backup::GitalyBackup.new(progress, incremental: incremental?, max_parallelism: max_concurrency, storage_parallelism: max_storage_concurrency)
|
||||
end
|
||||
|
||||
def puts_time(msg)
|
||||
progress.puts "#{Time.now} -- #{msg}"
|
||||
Gitlab::BackupLogger.info(message: "#{Rainbow.uncolor(msg)}")
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Backup
|
||||
class Packages < Backup::Files
|
||||
def initialize(progress)
|
||||
super(progress, 'packages', Settings.packages.storage_path, excludes: ['tmp'])
|
||||
end
|
||||
|
||||
override :human_name
|
||||
def human_name
|
||||
_('packages')
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,18 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Backup
|
||||
class Pages < Backup::Files
|
||||
# pages used to deploy tmp files to this path
|
||||
# if some of these files are still there, we don't need them in the backup
|
||||
LEGACY_PAGES_TMP_PATH = '@pages.tmp'
|
||||
|
||||
def initialize(progress)
|
||||
super(progress, 'pages', Gitlab.config.pages.path, excludes: [LEGACY_PAGES_TMP_PATH])
|
||||
end
|
||||
|
||||
override :human_name
|
||||
def human_name
|
||||
_('pages')
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,19 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Backup
|
||||
class Registry < Backup::Files
|
||||
def initialize(progress)
|
||||
super(progress, 'registry', Settings.registry.path)
|
||||
end
|
||||
|
||||
override :human_name
|
||||
def human_name
|
||||
_('container registry images')
|
||||
end
|
||||
|
||||
override :enabled
|
||||
def enabled
|
||||
Gitlab.config.registry.enabled
|
||||
end
|
||||
end
|
||||
end
|
|
@ -33,11 +33,6 @@ module Backup
|
|||
restore_object_pools
|
||||
end
|
||||
|
||||
override :human_name
|
||||
def human_name
|
||||
_('repositories')
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :strategy
|
||||
|
|
|
@ -6,11 +6,6 @@ module Backup
|
|||
@progress = progress
|
||||
end
|
||||
|
||||
# human readable task name used for logging
|
||||
def human_name
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
# dump task backup to `path`
|
||||
#
|
||||
# @param [String] path fully qualified backup task destination
|
||||
|
@ -32,11 +27,6 @@ module Backup
|
|||
def post_restore_warning
|
||||
end
|
||||
|
||||
# returns `true` when the task should be used
|
||||
def enabled
|
||||
true
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :progress
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Backup
|
||||
class TerraformState < Backup::Files
|
||||
def initialize(progress)
|
||||
super(progress, 'terraform_state', Settings.terraform_state.storage_path, excludes: ['tmp'])
|
||||
end
|
||||
|
||||
override :human_name
|
||||
def human_name
|
||||
_('terraform states')
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,14 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Backup
|
||||
class Uploads < Backup::Files
|
||||
def initialize(progress)
|
||||
super(progress, 'uploads', File.join(Gitlab.config.uploads.storage_path, "uploads"), excludes: ['tmp'])
|
||||
end
|
||||
|
||||
override :human_name
|
||||
def human_name
|
||||
_('uploads')
|
||||
end
|
||||
end
|
||||
end
|
|
@ -3,19 +3,36 @@
|
|||
# This specific template is located at:
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Security/API-Fuzzing.latest.gitlab-ci.yml
|
||||
|
||||
# To use this template, add the following to your .gitlab-ci.yml file:
|
||||
#
|
||||
# include:
|
||||
# template: API-Fuzzing.latest.gitlab-ci.yml
|
||||
#
|
||||
# You also need to add a `fuzz` stage to your `stages:` configuration. A sample configuration for API Fuzzing:
|
||||
#
|
||||
# stages:
|
||||
# - build
|
||||
# - test
|
||||
# - deploy
|
||||
# - fuzz
|
||||
|
||||
# Read more about this feature here: https://docs.gitlab.com/ee/user/application_security/api_fuzzing/
|
||||
#
|
||||
# Configure API fuzzing with CI/CD variables (https://docs.gitlab.com/ee/ci/variables/index.html).
|
||||
# Configure API Fuzzing with CI/CD variables (https://docs.gitlab.com/ee/ci/variables/index.html).
|
||||
# List of available variables: https://docs.gitlab.com/ee/user/application_security/api_fuzzing/#available-cicd-variables
|
||||
|
||||
variables:
|
||||
FUZZAPI_VERSION: "1"
|
||||
# Setting this variable affects all Security templates
|
||||
# (SAST, Dependency Scanning, ...)
|
||||
SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/security-products"
|
||||
#
|
||||
FUZZAPI_VERSION: "1"
|
||||
FUZZAPI_VERSION_TAG: ""
|
||||
FUZZAPI_IMAGE: api-fuzzing
|
||||
|
||||
apifuzzer_fuzz:
|
||||
stage: fuzz
|
||||
image: $SECURE_ANALYZERS_PREFIX/$FUZZAPI_IMAGE:$FUZZAPI_VERSION
|
||||
image: $SECURE_ANALYZERS_PREFIX/$FUZZAPI_IMAGE:$FUZZAPI_VERSION$FUZZAPI_VERSION_TAG
|
||||
allow_failure: true
|
||||
rules:
|
||||
- if: $API_FUZZING_DISABLED
|
||||
|
@ -23,6 +40,10 @@ apifuzzer_fuzz:
|
|||
- if: $API_FUZZING_DISABLED_FOR_DEFAULT_BRANCH &&
|
||||
$CI_DEFAULT_BRANCH == $CI_COMMIT_REF_NAME
|
||||
when: never
|
||||
- if: $CI_COMMIT_BRANCH &&
|
||||
$CI_GITLAB_FIPS_MODE == "true"
|
||||
variables:
|
||||
FUZZAPI_VERSION_TAG: "-fips"
|
||||
- if: $CI_COMMIT_BRANCH
|
||||
script:
|
||||
- /peach/analyzer-fuzz-api
|
||||
|
|
|
@ -27,11 +27,12 @@ variables:
|
|||
SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/security-products"
|
||||
#
|
||||
DAST_API_VERSION: "1"
|
||||
DAST_API_VERSION_TAG: ""
|
||||
DAST_API_IMAGE: api-fuzzing
|
||||
|
||||
dast_api:
|
||||
stage: dast
|
||||
image: $SECURE_ANALYZERS_PREFIX/$DAST_API_IMAGE:$DAST_API_VERSION
|
||||
image: $SECURE_ANALYZERS_PREFIX/$DAST_API_IMAGE:$DAST_API_VERSION$DAST_API_VERSION_TAG
|
||||
allow_failure: true
|
||||
rules:
|
||||
- if: $DAST_API_DISABLED
|
||||
|
@ -39,6 +40,10 @@ dast_api:
|
|||
- if: $DAST_API_DISABLED_FOR_DEFAULT_BRANCH &&
|
||||
$CI_DEFAULT_BRANCH == $CI_COMMIT_REF_NAME
|
||||
when: never
|
||||
- if: $CI_COMMIT_BRANCH &&
|
||||
$CI_GITLAB_FIPS_MODE == "true"
|
||||
variables:
|
||||
DAST_API_VERSION_TAG: "-fips"
|
||||
- if: $CI_COMMIT_BRANCH
|
||||
script:
|
||||
- /peach/analyzer-dast-api
|
||||
|
@ -50,3 +55,5 @@ dast_api:
|
|||
- gl-*.log
|
||||
reports:
|
||||
dast: gl-dast-api-report.json
|
||||
|
||||
# end
|
||||
|
|
|
@ -5,7 +5,7 @@ module Gitlab
|
|||
class Deprecation
|
||||
REASONS = {
|
||||
renamed: 'This was renamed.',
|
||||
discouraged: 'Use of this is not recommended.'
|
||||
alpha: 'This feature is in Alpha, and can be removed or changed at any point.'
|
||||
}.freeze
|
||||
|
||||
include ActiveModel::Validations
|
||||
|
|
|
@ -5266,6 +5266,9 @@ msgstr ""
|
|||
msgid "Authentication Log"
|
||||
msgstr ""
|
||||
|
||||
msgid "Authentication error: enable 2FA in your profile settings to continue using GitLab: %{mfa_help_page}"
|
||||
msgstr ""
|
||||
|
||||
msgid "Authentication failed: %{error_message}"
|
||||
msgstr ""
|
||||
|
||||
|
@ -15460,6 +15463,9 @@ msgstr ""
|
|||
msgid "Failed to load groups, users and deploy keys."
|
||||
msgstr ""
|
||||
|
||||
msgid "Failed to load iteration cadences."
|
||||
msgstr ""
|
||||
|
||||
msgid "Failed to load iterations."
|
||||
msgstr ""
|
||||
|
||||
|
|
|
@ -31,6 +31,7 @@ module RuboCop
|
|||
].freeze
|
||||
SELF_METHODS = %i[
|
||||
push_frontend_feature_flag
|
||||
push_force_frontend_feature_flag
|
||||
limit_feature_flag=
|
||||
limit_feature_flag_for_override=
|
||||
].freeze + EXPERIMENT_METHODS + RUGGED_METHODS + WORKER_METHODS
|
||||
|
|
|
@ -168,7 +168,12 @@ RSpec.describe GraphqlController do
|
|||
post :execute
|
||||
|
||||
expect(response).to have_gitlab_http_status(:unauthorized)
|
||||
expect(json_response).to eq({ 'errors' => [{ 'message' => '2FA required' }] })
|
||||
|
||||
expected_message = "Authentication error: " \
|
||||
"enable 2FA in your profile settings to continue using GitLab: %{mfa_help_page}" %
|
||||
{ mfa_help_page: EnforcesTwoFactorAuthentication::MFA_HELP_PAGE }
|
||||
|
||||
expect(json_response).to eq({ 'errors' => [{ 'message' => expected_message }] })
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -124,7 +124,7 @@ describe('BoardFilteredSearch', () => {
|
|||
{ type: 'milestone', value: { data: 'New Milestone', operator: '=' } },
|
||||
{ type: 'type', value: { data: 'INCIDENT', operator: '=' } },
|
||||
{ type: 'weight', value: { data: '2', operator: '=' } },
|
||||
{ type: 'iteration', value: { data: '3341', operator: '=' } },
|
||||
{ type: 'iteration', value: { data: 'Any&3', operator: '=' } },
|
||||
{ type: 'release', value: { data: 'v1.0.0', operator: '=' } },
|
||||
];
|
||||
jest.spyOn(urlUtility, 'updateHistory');
|
||||
|
@ -134,7 +134,7 @@ describe('BoardFilteredSearch', () => {
|
|||
title: '',
|
||||
replace: true,
|
||||
url:
|
||||
'http://test.host/?author_username=root&label_name[]=label&label_name[]=label%262&assignee_username=root&milestone_title=New%20Milestone&iteration_id=3341&types=INCIDENT&weight=2&release_tag=v1.0.0',
|
||||
'http://test.host/?author_username=root&label_name[]=label&label_name[]=label%262&assignee_username=root&milestone_title=New%20Milestone&iteration_id=Any&iteration_cadence_id=3&types=INCIDENT&weight=2&release_tag=v1.0.0',
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import AddSshKeyValidation from '../../../app/assets/javascripts/profile/add_ssh_key_validation';
|
||||
import AddSshKeyValidation from '~/profile/add_ssh_key_validation';
|
||||
|
||||
describe('AddSshKeyValidation', () => {
|
||||
describe('submit', () => {
|
||||
|
|
|
@ -4,7 +4,7 @@ import { redirectTo } from '~/lib/utils/url_utility';
|
|||
import * as actions from '~/user_lists/store/edit/actions';
|
||||
import * as types from '~/user_lists/store/edit/mutation_types';
|
||||
import createState from '~/user_lists/store/edit/state';
|
||||
import { userList } from '../../../feature_flags/mock_data';
|
||||
import { userList } from 'jest/feature_flags/mock_data';
|
||||
|
||||
jest.mock('~/api');
|
||||
jest.mock('~/lib/utils/url_utility');
|
||||
|
|
|
@ -2,7 +2,7 @@ import statuses from '~/user_lists/constants/edit';
|
|||
import * as types from '~/user_lists/store/edit/mutation_types';
|
||||
import mutations from '~/user_lists/store/edit/mutations';
|
||||
import createState from '~/user_lists/store/edit/state';
|
||||
import { userList } from '../../../feature_flags/mock_data';
|
||||
import { userList } from 'jest/feature_flags/mock_data';
|
||||
|
||||
describe('User List Edit Mutations', () => {
|
||||
let state;
|
||||
|
|
|
@ -12,7 +12,7 @@ import {
|
|||
} from '~/user_lists/store/index/actions';
|
||||
import * as types from '~/user_lists/store/index/mutation_types';
|
||||
import createState from '~/user_lists/store/index/state';
|
||||
import { userList } from '../../../feature_flags/mock_data';
|
||||
import { userList } from 'jest/feature_flags/mock_data';
|
||||
|
||||
jest.mock('~/api.js');
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ import { parseIntPagination, normalizeHeaders } from '~/lib/utils/common_utils';
|
|||
import * as types from '~/user_lists/store/index/mutation_types';
|
||||
import mutations from '~/user_lists/store/index/mutations';
|
||||
import createState from '~/user_lists/store/index/state';
|
||||
import { userList } from '../../../feature_flags/mock_data';
|
||||
import { userList } from 'jest/feature_flags/mock_data';
|
||||
|
||||
describe('~/user_lists/store/index/mutations', () => {
|
||||
let state;
|
||||
|
|
|
@ -4,7 +4,7 @@ import { redirectTo } from '~/lib/utils/url_utility';
|
|||
import * as actions from '~/user_lists/store/new/actions';
|
||||
import * as types from '~/user_lists/store/new/mutation_types';
|
||||
import createState from '~/user_lists/store/new/state';
|
||||
import { userList } from '../../../feature_flags/mock_data';
|
||||
import { userList } from 'jest/feature_flags/mock_data';
|
||||
|
||||
jest.mock('~/api');
|
||||
jest.mock('~/lib/utils/url_utility');
|
||||
|
|
|
@ -10,12 +10,12 @@ import extensionsContainer from '~/vue_merge_request_widget/components/extension
|
|||
import { registerExtension } from '~/vue_merge_request_widget/components/extensions';
|
||||
import httpStatusCodes from '~/lib/utils/http_status';
|
||||
|
||||
import { failedReport } from '../../../reports/mock_data/mock_data';
|
||||
import mixedResultsTestReports from '../../../reports/mock_data/new_and_fixed_failures_report.json';
|
||||
import newErrorsTestReports from '../../../reports/mock_data/new_errors_report.json';
|
||||
import newFailedTestReports from '../../../reports/mock_data/new_failures_report.json';
|
||||
import successTestReports from '../../../reports/mock_data/no_failures_report.json';
|
||||
import resolvedFailures from '../../../reports/mock_data/resolved_failures.json';
|
||||
import { failedReport } from 'jest/reports/mock_data/mock_data';
|
||||
import mixedResultsTestReports from 'jest/reports/mock_data/new_and_fixed_failures_report.json';
|
||||
import newErrorsTestReports from 'jest/reports/mock_data/new_errors_report.json';
|
||||
import newFailedTestReports from 'jest/reports/mock_data/new_failures_report.json';
|
||||
import successTestReports from 'jest/reports/mock_data/no_failures_report.json';
|
||||
import resolvedFailures from 'jest/reports/mock_data/resolved_failures.json';
|
||||
|
||||
const reportWithParsingErrors = failedReport;
|
||||
reportWithParsingErrors.suites[0].suite_errors = {
|
||||
|
|
|
@ -3,7 +3,7 @@ import Vue from 'vue';
|
|||
import Vuex from 'vuex';
|
||||
import IssuePlaceholderNote from '~/vue_shared/components/notes/placeholder_note.vue';
|
||||
import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue';
|
||||
import { userDataMock } from '../../../notes/mock_data';
|
||||
import { userDataMock } from 'jest/notes/mock_data';
|
||||
|
||||
Vue.use(Vuex);
|
||||
|
||||
|
|
|
@ -1,24 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Backup::Artifacts do
|
||||
let(:progress) { StringIO.new }
|
||||
|
||||
subject(:backup) { described_class.new(progress) }
|
||||
|
||||
describe '#dump' do
|
||||
before do
|
||||
allow(File).to receive(:realpath).with('/var/gitlab-artifacts').and_return('/var/gitlab-artifacts')
|
||||
allow(File).to receive(:realpath).with('/var/gitlab-artifacts/..').and_return('/var')
|
||||
allow(JobArtifactUploader).to receive(:root) { '/var/gitlab-artifacts' }
|
||||
end
|
||||
|
||||
it 'excludes tmp from backup tar' do
|
||||
expect(backup).to receive(:tar).and_return('blabla-tar')
|
||||
expect(backup).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./tmp -C /var/gitlab-artifacts -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
|
||||
expect(backup).to receive(:pipeline_succeeded?).and_return(true)
|
||||
backup.dump('artifacts.tar.gz', 'backup_id')
|
||||
end
|
||||
end
|
||||
end
|
|
@ -39,7 +39,7 @@ RSpec.describe Backup::Files do
|
|||
end
|
||||
|
||||
describe '#restore' do
|
||||
subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
|
||||
subject { described_class.new(progress, '/var/gitlab-registry') }
|
||||
|
||||
let(:timestamp) { Time.utc(2017, 3, 22) }
|
||||
|
||||
|
@ -110,7 +110,7 @@ RSpec.describe Backup::Files do
|
|||
end
|
||||
|
||||
describe '#dump' do
|
||||
subject { described_class.new(progress, 'pages', '/var/gitlab-pages', excludes: ['@pages.tmp']) }
|
||||
subject { described_class.new(progress, '/var/gitlab-pages', excludes: ['@pages.tmp']) }
|
||||
|
||||
before do
|
||||
allow(subject).to receive(:run_pipeline!).and_return([[true, true], ''])
|
||||
|
@ -176,7 +176,7 @@ RSpec.describe Backup::Files do
|
|||
end
|
||||
|
||||
describe '#exclude_dirs' do
|
||||
subject { described_class.new(progress, 'pages', '/var/gitlab-pages', excludes: ['@pages.tmp']) }
|
||||
subject { described_class.new(progress, '/var/gitlab-pages', excludes: ['@pages.tmp']) }
|
||||
|
||||
it 'prepends a leading dot slash to tar excludes' do
|
||||
expect(subject.exclude_dirs(:tar)).to eq(['--exclude=lost+found', '--exclude=./@pages.tmp'])
|
||||
|
@ -188,7 +188,7 @@ RSpec.describe Backup::Files do
|
|||
end
|
||||
|
||||
describe '#run_pipeline!' do
|
||||
subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
|
||||
subject { described_class.new(progress, '/var/gitlab-registry') }
|
||||
|
||||
it 'executes an Open3.pipeline for cmd_list' do
|
||||
expect(Open3).to receive(:pipeline).with(%w[whew command], %w[another cmd], any_args)
|
||||
|
@ -222,7 +222,7 @@ RSpec.describe Backup::Files do
|
|||
end
|
||||
|
||||
describe '#pipeline_succeeded?' do
|
||||
subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
|
||||
subject { described_class.new(progress, '/var/gitlab-registry') }
|
||||
|
||||
it 'returns true if both tar and gzip succeeeded' do
|
||||
expect(
|
||||
|
@ -262,7 +262,7 @@ RSpec.describe Backup::Files do
|
|||
end
|
||||
|
||||
describe '#tar_ignore_non_success?' do
|
||||
subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
|
||||
subject { described_class.new(progress, '/var/gitlab-registry') }
|
||||
|
||||
context 'if `tar` command exits with 1 exitstatus' do
|
||||
it 'returns true' do
|
||||
|
@ -310,7 +310,7 @@ RSpec.describe Backup::Files do
|
|||
end
|
||||
|
||||
describe '#noncritical_warning?' do
|
||||
subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
|
||||
subject { described_class.new(progress, '/var/gitlab-registry') }
|
||||
|
||||
it 'returns true if given text matches noncritical warnings list' do
|
||||
expect(
|
||||
|
|
|
@ -1,26 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Backup::Lfs do
|
||||
let(:progress) { StringIO.new }
|
||||
|
||||
subject(:backup) { described_class.new(progress) }
|
||||
|
||||
describe '#dump' do
|
||||
before do
|
||||
allow(File).to receive(:realpath).and_call_original
|
||||
allow(File).to receive(:realpath).with('/var/lfs-objects').and_return('/var/lfs-objects')
|
||||
allow(File).to receive(:realpath).with('/var/lfs-objects/..').and_return('/var')
|
||||
allow(Settings.lfs).to receive(:storage_path).and_return('/var/lfs-objects')
|
||||
end
|
||||
|
||||
it 'uses the correct lfs dir in tar command', :aggregate_failures do
|
||||
expect(backup).to receive(:tar).and_return('blabla-tar')
|
||||
expect(backup).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found -C /var/lfs-objects -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
|
||||
expect(backup).to receive(:pipeline_succeeded?).and_return(true)
|
||||
|
||||
backup.dump('lfs.tar.gz', 'backup_id')
|
||||
end
|
||||
end
|
||||
end
|
|
@ -22,8 +22,8 @@ RSpec.describe Backup::Manager do
|
|||
|
||||
describe '#run_create_task' do
|
||||
let(:enabled) { true }
|
||||
let(:task) { instance_double(Backup::Task, human_name: 'my task', enabled: enabled) }
|
||||
let(:definitions) { { 'my_task' => Backup::Manager::TaskDefinition.new(task: task, destination_path: 'my_task.tar.gz') } }
|
||||
let(:task) { instance_double(Backup::Task) }
|
||||
let(:definitions) { { 'my_task' => Backup::Manager::TaskDefinition.new(task: task, enabled: enabled, destination_path: 'my_task.tar.gz', human_name: 'my task') } }
|
||||
|
||||
it 'calls the named task' do
|
||||
expect(task).to receive(:dump)
|
||||
|
@ -58,12 +58,10 @@ RSpec.describe Backup::Manager do
|
|||
let(:enabled) { true }
|
||||
let(:pre_restore_warning) { nil }
|
||||
let(:post_restore_warning) { nil }
|
||||
let(:definitions) { { 'my_task' => Backup::Manager::TaskDefinition.new(task: task, destination_path: 'my_task.tar.gz') } }
|
||||
let(:definitions) { { 'my_task' => Backup::Manager::TaskDefinition.new(task: task, enabled: enabled, human_name: 'my task', destination_path: 'my_task.tar.gz') } }
|
||||
let(:backup_information) { {} }
|
||||
let(:task) do
|
||||
instance_double(Backup::Task,
|
||||
human_name: 'my task',
|
||||
enabled: enabled,
|
||||
pre_restore_warning: pre_restore_warning,
|
||||
post_restore_warning: post_restore_warning)
|
||||
end
|
||||
|
@ -158,12 +156,12 @@ RSpec.describe Backup::Manager do
|
|||
}
|
||||
end
|
||||
|
||||
let(:task1) { instance_double(Backup::Task, human_name: 'task 1', enabled: true) }
|
||||
let(:task2) { instance_double(Backup::Task, human_name: 'task 2', enabled: true) }
|
||||
let(:task1) { instance_double(Backup::Task) }
|
||||
let(:task2) { instance_double(Backup::Task) }
|
||||
let(:definitions) do
|
||||
{
|
||||
'task1' => Backup::Manager::TaskDefinition.new(task: task1, destination_path: 'task1.tar.gz'),
|
||||
'task2' => Backup::Manager::TaskDefinition.new(task: task2, destination_path: 'task2.tar.gz')
|
||||
'task1' => Backup::Manager::TaskDefinition.new(task: task1, human_name: 'task 1', destination_path: 'task1.tar.gz'),
|
||||
'task2' => Backup::Manager::TaskDefinition.new(task: task2, human_name: 'task 2', destination_path: 'task2.tar.gz')
|
||||
}
|
||||
end
|
||||
|
||||
|
@ -735,12 +733,12 @@ RSpec.describe Backup::Manager do
|
|||
end
|
||||
|
||||
describe '#restore' do
|
||||
let(:task1) { instance_double(Backup::Task, human_name: 'task 1', enabled: true, pre_restore_warning: nil, post_restore_warning: nil) }
|
||||
let(:task2) { instance_double(Backup::Task, human_name: 'task 2', enabled: true, pre_restore_warning: nil, post_restore_warning: nil) }
|
||||
let(:task1) { instance_double(Backup::Task, pre_restore_warning: nil, post_restore_warning: nil) }
|
||||
let(:task2) { instance_double(Backup::Task, pre_restore_warning: nil, post_restore_warning: nil) }
|
||||
let(:definitions) do
|
||||
{
|
||||
'task1' => Backup::Manager::TaskDefinition.new(task: task1, destination_path: 'task1.tar.gz'),
|
||||
'task2' => Backup::Manager::TaskDefinition.new(task: task2, destination_path: 'task2.tar.gz')
|
||||
'task1' => Backup::Manager::TaskDefinition.new(task: task1, human_name: 'task 1', destination_path: 'task1.tar.gz'),
|
||||
'task2' => Backup::Manager::TaskDefinition.new(task: task2, human_name: 'task 2', destination_path: 'task2.tar.gz')
|
||||
}
|
||||
end
|
||||
|
||||
|
|
|
@ -1,35 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.shared_examples 'backup object' do |setting|
|
||||
let(:progress) { StringIO.new }
|
||||
let(:backup_path) { "/var/#{setting}" }
|
||||
|
||||
subject(:backup) { described_class.new(progress) }
|
||||
|
||||
describe '#dump' do
|
||||
before do
|
||||
allow(File).to receive(:realpath).and_call_original
|
||||
allow(File).to receive(:realpath).with(backup_path).and_return(backup_path)
|
||||
allow(File).to receive(:realpath).with("#{backup_path}/..").and_return('/var')
|
||||
allow(Settings.send(setting)).to receive(:storage_path).and_return(backup_path)
|
||||
end
|
||||
|
||||
it 'uses the correct storage dir in tar command and excludes tmp', :aggregate_failures do
|
||||
expect(backup).to receive(:tar).and_return('blabla-tar')
|
||||
expect(backup).to receive(:run_pipeline!).with([%W(blabla-tar --exclude=lost+found --exclude=./tmp -C #{backup_path} -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
|
||||
expect(backup).to receive(:pipeline_succeeded?).and_return(true)
|
||||
|
||||
backup.dump('backup_object.tar.gz', 'backup_id')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
RSpec.describe Backup::Packages do
|
||||
it_behaves_like 'backup object', 'packages'
|
||||
end
|
||||
|
||||
RSpec.describe Backup::TerraformState do
|
||||
it_behaves_like 'backup object', 'terraform_state'
|
||||
end
|
|
@ -1,25 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Backup::Pages do
|
||||
let(:progress) { StringIO.new }
|
||||
|
||||
subject { described_class.new(progress) }
|
||||
|
||||
before do
|
||||
allow(File).to receive(:realpath).with("/var/gitlab-pages").and_return("/var/gitlab-pages")
|
||||
allow(File).to receive(:realpath).with("/var/gitlab-pages/..").and_return("/var")
|
||||
end
|
||||
|
||||
describe '#dump' do
|
||||
it 'excludes tmp from backup tar' do
|
||||
allow(Gitlab.config.pages).to receive(:path) { '/var/gitlab-pages' }
|
||||
|
||||
expect(subject).to receive(:tar).and_return('blabla-tar')
|
||||
expect(subject).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./@pages.tmp -C /var/gitlab-pages -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
|
||||
expect(subject).to receive(:pipeline_succeeded?).and_return(true)
|
||||
subject.dump('pages.tar.gz', 'backup_id')
|
||||
end
|
||||
end
|
||||
end
|
|
@ -7,12 +7,6 @@ RSpec.describe Backup::Task do
|
|||
|
||||
subject { described_class.new(progress) }
|
||||
|
||||
describe '#human_name' do
|
||||
it 'must be implemented by the subclass' do
|
||||
expect { subject.human_name }.to raise_error(NotImplementedError)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#dump' do
|
||||
it 'must be implemented by the subclass' do
|
||||
expect { subject.dump('some/path', 'backup_id') }.to raise_error(NotImplementedError)
|
||||
|
|
|
@ -1,25 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Backup::Uploads do
|
||||
let(:progress) { StringIO.new }
|
||||
|
||||
subject(:backup) { described_class.new(progress) }
|
||||
|
||||
describe '#dump' do
|
||||
before do
|
||||
allow(File).to receive(:realpath).and_call_original
|
||||
allow(File).to receive(:realpath).with('/var/uploads').and_return('/var/uploads')
|
||||
allow(File).to receive(:realpath).with('/var/uploads/..').and_return('/var')
|
||||
allow(Gitlab.config.uploads).to receive(:storage_path) { '/var' }
|
||||
end
|
||||
|
||||
it 'excludes tmp from backup tar' do
|
||||
expect(backup).to receive(:tar).and_return('blabla-tar')
|
||||
expect(backup).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./tmp -C /var/uploads -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
|
||||
expect(backup).to receive(:pipeline_succeeded?).and_return(true)
|
||||
backup.dump('uploads.tar.gz', 'backup_id')
|
||||
end
|
||||
end
|
||||
end
|
|
@ -3366,44 +3366,9 @@ RSpec.describe Group do
|
|||
end
|
||||
|
||||
describe '#work_items_feature_flag_enabled?' do
|
||||
let_it_be(:root_group) { create(:group) }
|
||||
let_it_be(:group) { create(:group, parent: root_group) }
|
||||
let_it_be(:project) { create(:project, group: group) }
|
||||
|
||||
subject { group.work_items_feature_flag_enabled? }
|
||||
|
||||
context 'when work_items FF is enabled for the root group' do
|
||||
before do
|
||||
stub_feature_flags(work_items: root_group)
|
||||
end
|
||||
|
||||
it { is_expected.to be_truthy }
|
||||
end
|
||||
|
||||
context 'when work_items FF is enabled for the group' do
|
||||
before do
|
||||
stub_feature_flags(work_items: group)
|
||||
end
|
||||
|
||||
it { is_expected.to be_truthy }
|
||||
|
||||
context 'when root_group is the actor' do
|
||||
it 'is not enabled if the FF is enabled for a child' do
|
||||
expect(root_group).not_to be_work_items_feature_flag_enabled
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when work_items FF is disabled globally' do
|
||||
before do
|
||||
stub_feature_flags(work_items: false)
|
||||
end
|
||||
|
||||
it { is_expected.to be_falsey }
|
||||
end
|
||||
|
||||
context 'when work_items FF is enabled globally' do
|
||||
it { is_expected.to be_truthy }
|
||||
it_behaves_like 'checks self and root ancestor feature flag' do
|
||||
let(:feature_flag) { :work_items }
|
||||
let(:feature_flag_method) { :work_items_feature_flag_enabled? }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -47,5 +47,6 @@ RSpec.describe 'PipelineCancel' do
|
|||
|
||||
expect(response).to have_gitlab_http_status(:success)
|
||||
expect(build.reload).to be_canceled
|
||||
expect(pipeline.reload).to be_canceled
|
||||
end
|
||||
end
|
||||
|
|
|
@ -62,9 +62,9 @@ RSpec.shared_examples 'Gitlab-style deprecations' do
|
|||
expect(deprecable.deprecation_reason).to include 'This was renamed.'
|
||||
end
|
||||
|
||||
it 'supports named reasons: discouraged' do
|
||||
deprecable = subject(deprecated: { milestone: '1.10', reason: :discouraged })
|
||||
it 'supports named reasons: alpha' do
|
||||
deprecable = subject(deprecated: { milestone: '1.10', reason: :alpha })
|
||||
|
||||
expect(deprecable.deprecation_reason).to include 'Use of this is not recommended.'
|
||||
expect(deprecable.deprecation_reason).to include 'This feature is in Alpha'
|
||||
end
|
||||
end
|
||||
|
|
43
spec/support/shared_examples/models/group_shared_examples.rb
Normal file
43
spec/support/shared_examples/models/group_shared_examples.rb
Normal file
|
@ -0,0 +1,43 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
RSpec.shared_examples 'checks self and root ancestor feature flag' do
|
||||
let_it_be(:root_group) { create(:group) }
|
||||
let_it_be(:group) { create(:group, parent: root_group) }
|
||||
let_it_be(:project) { create(:project, group: group) }
|
||||
|
||||
subject { group.public_send(feature_flag_method) }
|
||||
|
||||
context 'when FF is enabled for the root group' do
|
||||
before do
|
||||
stub_feature_flags(feature_flag => root_group)
|
||||
end
|
||||
|
||||
it { is_expected.to be_truthy }
|
||||
end
|
||||
|
||||
context 'when FF is enabled for the group' do
|
||||
before do
|
||||
stub_feature_flags(feature_flag => group)
|
||||
end
|
||||
|
||||
it { is_expected.to be_truthy }
|
||||
|
||||
context 'when root_group is the actor' do
|
||||
it 'is not enabled if the FF is enabled for a child' do
|
||||
expect(root_group.public_send(feature_flag_method)).to be_falsey
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when FF is disabled globally' do
|
||||
before do
|
||||
stub_feature_flags(feature_flag => false)
|
||||
end
|
||||
|
||||
it { is_expected.to be_falsey }
|
||||
end
|
||||
|
||||
context 'when FF is enabled globally' do
|
||||
it { is_expected.to be_truthy }
|
||||
end
|
||||
end
|
|
@ -235,19 +235,19 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
|
|||
db_backup_error = Backup::DatabaseBackupError.new(config, db_file_name)
|
||||
|
||||
where(:backup_class, :rake_task, :error) do
|
||||
Backup::Database | 'gitlab:backup:db:create' | db_backup_error
|
||||
Backup::Builds | 'gitlab:backup:builds:create' | file_backup_error
|
||||
Backup::Uploads | 'gitlab:backup:uploads:create' | file_backup_error
|
||||
Backup::Artifacts | 'gitlab:backup:artifacts:create' | file_backup_error
|
||||
Backup::Pages | 'gitlab:backup:pages:create' | file_backup_error
|
||||
Backup::Lfs | 'gitlab:backup:lfs:create' | file_backup_error
|
||||
Backup::Registry | 'gitlab:backup:registry:create' | file_backup_error
|
||||
Backup::Database | 'gitlab:backup:db:create' | db_backup_error
|
||||
Backup::Files | 'gitlab:backup:builds:create' | file_backup_error
|
||||
Backup::Files | 'gitlab:backup:uploads:create' | file_backup_error
|
||||
Backup::Files | 'gitlab:backup:artifacts:create' | file_backup_error
|
||||
Backup::Files | 'gitlab:backup:pages:create' | file_backup_error
|
||||
Backup::Files | 'gitlab:backup:lfs:create' | file_backup_error
|
||||
Backup::Files | 'gitlab:backup:registry:create' | file_backup_error
|
||||
end
|
||||
|
||||
with_them do
|
||||
before do
|
||||
expect_next_instance_of(backup_class) do |instance|
|
||||
expect(instance).to receive(:dump).and_raise(error)
|
||||
allow_next_instance_of(backup_class) do |instance|
|
||||
allow(instance).to receive(:dump).and_raise(error)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
Loading…
Reference in a new issue