Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-12-07 12:10:33 +00:00
parent dc62bfce8b
commit 6dd9e3644e
64 changed files with 1416 additions and 958 deletions

View File

@ -67,10 +67,6 @@ Rails/SaveBang:
- qa/qa/specs/features/browser_ui/3_create/repository/push_mirroring_lfs_over_http_spec.rb
- qa/qa/specs/features/ee/browser_ui/3_create/repository/pull_mirroring_over_http_spec.rb
- qa/qa/specs/features/ee/browser_ui/3_create/repository/pull_mirroring_over_ssh_with_key_spec.rb
- spec/controllers/abuse_reports_controller_spec.rb
- spec/controllers/boards/issues_controller_spec.rb
- spec/controllers/sent_notifications_controller_spec.rb
- spec/controllers/sessions_controller_spec.rb
- spec/lib/backup/manager_spec.rb
- spec/lib/gitlab/alerting/alert_spec.rb
- spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb

View File

@ -1,13 +1,12 @@
#import "~/graphql_shared/fragments/label.fragment.graphql"
mutation issueSetLabels($input: UpdateIssueInput!) {
updateIssue(input: $input) {
issue {
updateIssuableLabels: updateIssue(input: $input) {
issuable: issue {
id
labels {
nodes {
id
title
color
description
...Label
}
}
}

View File

@ -2,7 +2,6 @@ import { s__, __ } from '~/locale';
export const TEST_INTEGRATION_EVENT = 'testIntegration';
export const SAVE_INTEGRATION_EVENT = 'saveIntegration';
export const TOGGLE_INTEGRATION_EVENT = 'toggleIntegration';
export const VALIDATE_INTEGRATION_FORM_EVENT = 'validateIntegrationForm';
export const integrationLevels = {

View File

@ -1,8 +1,6 @@
<script>
import { GlFormGroup, GlFormCheckbox } from '@gitlab/ui';
import { mapGetters } from 'vuex';
import { TOGGLE_INTEGRATION_EVENT } from '~/integrations/constants';
import eventHub from '../event_hub';
export default {
name: 'ActiveCheckbox',
@ -20,14 +18,11 @@ export default {
},
mounted() {
this.activated = this.propsSource.initialActivated;
// Initialize view
this.$nextTick(() => {
this.onChange(this.activated);
});
this.onChange(this.activated);
},
methods: {
onChange(e) {
eventHub.$emit(TOGGLE_INTEGRATION_EVENT, e);
onChange(isChecked) {
this.$emit('toggle-integration-active', isChecked);
},
},
};

View File

@ -37,12 +37,21 @@ export default {
},
mixins: [glFeatureFlagsMixin()],
props: {
formSelector: {
type: String,
required: true,
},
helpHtml: {
type: String,
required: false,
default: '',
},
},
data() {
return {
integrationActive: false,
};
},
computed: {
...mapGetters(['currentKey', 'propsSource', 'isDisabled']),
...mapState([
@ -71,7 +80,7 @@ export default {
},
mounted() {
// this form element is defined in Haml
this.form = document.querySelector('.js-integration-settings-form');
this.form = document.querySelector(this.formSelector);
},
methods: {
...mapActions([
@ -84,11 +93,15 @@ export default {
]),
onSaveClick() {
this.setIsSaving(true);
eventHub.$emit(SAVE_INTEGRATION_EVENT);
const formValid = this.form.checkValidity() || this.integrationActive === false;
eventHub.$emit(SAVE_INTEGRATION_EVENT, formValid);
},
onTestClick() {
this.setIsTesting(true);
eventHub.$emit(TEST_INTEGRATION_EVENT);
const formValid = this.form.checkValidity();
eventHub.$emit(TEST_INTEGRATION_EVENT, formValid);
},
onResetClick() {
this.fetchResetIntegration();
@ -97,6 +110,19 @@ export default {
const formData = new FormData(this.form);
this.requestJiraIssueTypes(formData);
},
onToggleIntegrationState(integrationActive) {
this.integrationActive = integrationActive;
if (!this.form) {
return;
}
// If integration will be active, enable form validation.
if (integrationActive) {
this.form.removeAttribute('novalidate');
} else {
this.form.setAttribute('novalidate', true);
}
},
},
helpHtmlConfig: {
ADD_ATTR: ['target'], // allow external links, can be removed after https://gitlab.com/gitlab-org/gitlab-ui/-/issues/1427 is implemented
@ -123,7 +149,11 @@ export default {
<!-- helpHtml is trusted input -->
<div v-if="helpHtml" v-safe-html:[$options.helpHtmlConfig]="helpHtml"></div>
<active-checkbox v-if="propsSource.showActive" :key="`${currentKey}-active-checkbox`" />
<active-checkbox
v-if="propsSource.showActive"
:key="`${currentKey}-active-checkbox`"
@toggle-integration-active="onToggleIntegrationState"
/>
<jira-trigger-fields
v-if="isJira"
:key="`${currentKey}-jira-trigger-fields`"
@ -167,6 +197,7 @@ export default {
type="submit"
:loading="isSaving"
:disabled="isDisabled"
data-testid="save-button"
data-qa-selector="save_changes_button"
@click.prevent="onSaveClick"
>
@ -180,6 +211,7 @@ export default {
:loading="isTesting"
:disabled="isDisabled"
:href="propsSource.testPath"
data-testid="test-button"
@click.prevent="onTestClick"
>
{{ __('Test settings') }}

View File

@ -85,7 +85,7 @@ function parseDatasetToProps(data) {
};
}
export default (el, defaultEl) => {
export default (el, defaultEl, formSelector) => {
if (!el) {
return null;
}
@ -112,6 +112,7 @@ export default (el, defaultEl) => {
return createElement(IntegrationForm, {
props: {
helpHtml,
formSelector,
},
});
},

View File

@ -5,7 +5,6 @@ import eventHub from './edit/event_hub';
import {
TEST_INTEGRATION_EVENT,
SAVE_INTEGRATION_EVENT,
TOGGLE_INTEGRATION_EVENT,
VALIDATE_INTEGRATION_FORM_EVENT,
I18N_DEFAULT_ERROR_MESSAGE,
I18N_SUCCESSFUL_CONNECTION_MESSAGE,
@ -14,8 +13,8 @@ import { testIntegrationSettings } from './edit/api';
export default class IntegrationSettingsForm {
constructor(formSelector) {
this.formSelector = formSelector;
this.$form = document.querySelector(formSelector);
this.formActive = false;
this.vue = null;
@ -28,26 +27,22 @@ export default class IntegrationSettingsForm {
this.vue = initForm(
document.querySelector('.js-vue-integration-settings'),
document.querySelector('.js-vue-default-integration-settings'),
this.formSelector,
);
eventHub.$on(TOGGLE_INTEGRATION_EVENT, (active) => {
this.formActive = active;
this.toggleServiceState();
eventHub.$on(TEST_INTEGRATION_EVENT, (formValid) => {
this.testIntegration(formValid);
});
eventHub.$on(TEST_INTEGRATION_EVENT, () => {
this.testIntegration();
});
eventHub.$on(SAVE_INTEGRATION_EVENT, () => {
this.saveIntegration();
eventHub.$on(SAVE_INTEGRATION_EVENT, (formValid) => {
this.saveIntegration(formValid);
});
}
saveIntegration() {
saveIntegration(formValid) {
// Save Service if not active and check the following if active;
// 1) If form contents are valid
// 2) If this service can be saved
// If both conditions are true, we override form submission
// and save the service using provided configuration.
const formValid = this.$form.checkValidity() || this.formActive === false;
if (formValid) {
delay(() => {
@ -59,13 +54,13 @@ export default class IntegrationSettingsForm {
}
}
testIntegration() {
testIntegration(formValid) {
// Service was marked active so now we check;
// 1) If form contents are valid
// 2) If this service can be tested
// If both conditions are true, we override form submission
// and test the service using provided configuration.
if (this.$form.checkValidity()) {
if (formValid) {
this.testSettings(new FormData(this.$form));
} else {
eventHub.$emit(VALIDATE_INTEGRATION_FORM_EVENT);
@ -73,17 +68,6 @@ export default class IntegrationSettingsForm {
}
}
/**
* Change Form's validation enforcement based on service status (active/inactive)
*/
toggleServiceState() {
if (this.formActive) {
this.$form.removeAttribute('novalidate');
} else if (!this.$form.getAttribute('novalidate')) {
this.$form.setAttribute('novalidate', 'novalidate');
}
}
/**
* Get a list of Jira issue types for the currently configured project
*

View File

@ -1,3 +0,0 @@
import initUIKit from '~/ui_development_kit';
initUIKit();

View File

@ -1,3 +0,0 @@
import initDeprecatedRemoveRowBehavior from '~/behaviors/deprecated_remove_row_behavior';
document.addEventListener('DOMContentLoaded', initDeprecatedRemoveRowBehavior);

View File

@ -1,27 +1,29 @@
<script>
import { GlButton, GlButtonGroup, GlTooltipDirective } from '@gitlab/ui';
import { GlButton, GlButtonGroup, GlModalDirective, GlTooltipDirective } from '@gitlab/ui';
import createFlash from '~/flash';
import { __, s__ } from '~/locale';
import { __, s__, sprintf } from '~/locale';
import runnerDeleteMutation from '~/runner/graphql/runner_delete.mutation.graphql';
import runnerActionsUpdateMutation from '~/runner/graphql/runner_actions_update.mutation.graphql';
import { captureException } from '~/runner/sentry_utils';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import RunnerDeleteModal from '../runner_delete_modal.vue';
const i18n = {
I18N_EDIT: __('Edit'),
I18N_PAUSE: __('Pause'),
I18N_RESUME: __('Resume'),
I18N_REMOVE: __('Remove'),
I18N_REMOVE_CONFIRMATION: s__('Runners|Are you sure you want to delete this runner?'),
};
const I18N_EDIT = __('Edit');
const I18N_PAUSE = __('Pause');
const I18N_RESUME = __('Resume');
const I18N_DELETE = s__('Runners|Delete runner');
const I18N_DELETED_TOAST = s__('Runners|Runner %{name} was deleted');
export default {
name: 'RunnerActionsCell',
components: {
GlButton,
GlButtonGroup,
RunnerDeleteModal,
},
directives: {
GlTooltip: GlTooltipDirective,
GlModal: GlModalDirective,
},
props: {
runner: {
@ -48,21 +50,29 @@ export default {
// mouseout listeners don't run leaving the tooltip stuck
return '';
}
return this.isActive ? i18n.I18N_PAUSE : i18n.I18N_RESUME;
return this.isActive ? I18N_PAUSE : I18N_RESUME;
},
deleteTitle() {
// Prevent a "sticky" tooltip: If element gets removed,
// mouseout listeners don't run and leaving the tooltip stuck
return this.deleting ? '' : i18n.I18N_REMOVE;
if (this.deleting) {
// Prevent a "sticky" tooltip: If this button is disabled,
// mouseout listeners don't run leaving the tooltip stuck
return '';
}
return I18N_DELETE;
},
runnerId() {
return getIdFromGraphQLId(this.runner.id);
},
runnerName() {
return `#${this.runnerId} (${this.runner.shortSha})`;
},
runnerDeleteModalId() {
return `delete-runner-modal-${this.runnerId}`;
},
},
methods: {
async onToggleActive() {
this.updating = true;
// TODO In HAML iteration we had a confirmation modal via:
// data-confirm="_('Are you sure?')"
// this may not have to ported, this is an easily reversible operation
try {
const toggledActive = !this.runner.active;
@ -91,12 +101,8 @@ export default {
},
async onDelete() {
// TODO Replace confirmation with gl-modal
// eslint-disable-next-line no-alert
if (!window.confirm(i18n.I18N_REMOVE_CONFIRMATION)) {
return;
}
// Deleting stays "true" until this row is removed,
// should only change back if the operation fails.
this.deleting = true;
try {
const {
@ -115,11 +121,13 @@ export default {
});
if (errors && errors.length) {
throw new Error(errors.join(' '));
} else {
// Use $root to have the toast message stay after this element is removed
this.$root.$toast?.show(sprintf(I18N_DELETED_TOAST, { name: this.runnerName }));
}
} catch (e) {
this.onError(e);
} finally {
this.deleting = false;
this.onError(e);
}
},
@ -133,14 +141,15 @@ export default {
captureException({ error, component: this.$options.name });
},
},
i18n,
I18N_EDIT,
I18N_DELETE,
};
</script>
<template>
<gl-button-group>
<!--
This button appears for administratos: those with
This button appears for administrators: those with
access to the adminUrl. More advanced permissions policies
will allow more granular permissions.
@ -148,16 +157,14 @@ export default {
-->
<gl-button
v-if="runner.adminUrl"
v-gl-tooltip.hover.viewport
v-gl-tooltip.hover.viewport="$options.I18N_EDIT"
:href="runner.adminUrl"
:title="$options.i18n.I18N_EDIT"
:aria-label="$options.i18n.I18N_EDIT"
:aria-label="$options.I18N_EDIT"
icon="pencil"
data-testid="edit-runner"
/>
<gl-button
v-gl-tooltip.hover.viewport
:title="toggleActiveTitle"
v-gl-tooltip.hover.viewport="toggleActiveTitle"
:aria-label="toggleActiveTitle"
:icon="toggleActiveIcon"
:loading="updating"
@ -165,14 +172,20 @@ export default {
@click="onToggleActive"
/>
<gl-button
v-gl-tooltip.hover.viewport
:title="deleteTitle"
v-gl-tooltip.hover.viewport="deleteTitle"
v-gl-modal="runnerDeleteModalId"
:aria-label="deleteTitle"
icon="close"
:loading="deleting"
variant="danger"
data-testid="delete-runner"
@click="onDelete"
/>
<runner-delete-modal
:ref="runnerDeleteModalId"
:modal-id="runnerDeleteModalId"
:runner-name="runnerName"
@primary="onDelete"
/>
</gl-button-group>
</template>

View File

@ -0,0 +1,51 @@
<script>
import { GlModal } from '@gitlab/ui';
import { __, s__, sprintf } from '~/locale';
const I18N_TITLE = s__('Runners|Delete runner %{name}?');
const I18N_BODY = s__(
'Runners|The runner will be permanently deleted and no longer available for projects or groups in the instance. Are you sure you want to continue?',
);
const I18N_PRIMARY = s__('Runners|Delete runner');
const I18N_CANCEL = __('Cancel');
export default {
components: {
GlModal,
},
props: {
runnerName: {
type: String,
required: true,
},
},
computed: {
title() {
return sprintf(I18N_TITLE, { name: this.runnerName });
},
},
methods: {
onPrimary() {
this.$refs.modal.hide();
},
},
actionPrimary: { text: I18N_PRIMARY, attributes: { variant: 'danger' } },
actionCancel: { text: I18N_CANCEL },
I18N_BODY,
};
</script>
<template>
<gl-modal
ref="modal"
size="sm"
:title="title"
:action-primary="$options.actionPrimary"
:action-cancel="$options.actionCancel"
v-bind="$attrs"
v-on="$listeners"
@primary="onPrimary"
>
{{ $options.I18N_BODY }}
</gl-modal>
</template>

View File

@ -81,6 +81,7 @@ export default {
:tbody-tr-attr="runnerTrAttr"
data-testid="runner-list"
stacked="md"
primary-key="id"
fixed
>
<template v-if="!runners.length" #table-busy>

View File

@ -1,7 +1,7 @@
mutation mergeRequestSetLabels($input: MergeRequestSetLabelsInput!) {
mergeRequestSetLabels(input: $input) {
updateIssuableLabels: mergeRequestSetLabels(input: $input) {
errors
mergeRequest {
issuable: mergeRequest {
id
labels {
nodes {

View File

@ -1,28 +0,0 @@
import $ from 'jquery';
import initDeprecatedJQueryDropdown from '~/deprecated_jquery_dropdown';
import Api from './api';
export default () => {
initDeprecatedJQueryDropdown($('#js-project-dropdown'), {
data: (term, callback) => {
Api.projects(
term,
{
order_by: 'last_activity_at',
},
(data) => {
callback(data);
},
);
},
text: (project) => project.name_with_namespace || project.name,
selectable: true,
fieldName: 'author_id',
filterable: true,
search: {
fields: ['name_with_namespace'],
},
id: (data) => data.id,
isSelected: (data) => data.id === 2,
});
};

View File

@ -1,8 +1,8 @@
#import "~/graphql_shared/fragments/label.fragment.graphql"
mutation updateEpicLabels($input: UpdateEpicInput!) {
updateEpic(input: $input) {
epic {
updateIssuableLabels: updateEpic(input: $input) {
issuable: epic {
id
labels {
nodes {

View File

@ -225,16 +225,13 @@ export default {
variables: { input: inputVariables },
})
.then(({ data }) => {
const { mutationName } = issuableLabelsQueries[this.issuableType];
if (data[mutationName]?.errors?.length) {
if (data.updateIssuableLabels?.errors?.length) {
throw new Error();
}
this.issuableLabels = data[mutationName]?.[this.issuableType]?.labels?.nodes;
this.$emit('updateSelectedLabels', {
id: data[mutationName]?.[this.issuableType]?.id,
labels: this.issuableLabels,
id: data.updateIssuableLabels?.issuable?.id,
labels: data.updateIssuableLabels?.issuable?.labels?.nodes,
});
})
.catch((error) =>

View File

@ -904,6 +904,7 @@ $ide-commit-header-height: 48px;
.sidebar-context-title {
white-space: nowrap;
display: block;
color: var(--ide-text-color, $gl-text-color);
&.text-secondary {
font-weight: normal;
@ -964,6 +965,10 @@ $ide-commit-header-height: 48px;
margin: 0;
}
}
.gl-tab-content {
color: var(--ide-text-color, $gl-text-color);
}
}
.ide-pipeline-header {

View File

@ -59,6 +59,8 @@ class GroupsController < Groups::ApplicationController
feature_category :projects, [:projects]
feature_category :importers, [:export, :download_export]
urgency :high, [:unfoldered_environment_names]
def index
redirect_to(current_user ? dashboard_groups_path : explore_groups_path)
end

View File

@ -59,10 +59,6 @@ class HelpController < ApplicationController
@instance_configuration = InstanceConfiguration.new
end
def ui
@user = User.new(id: 0, name: 'John Doe', username: '@johndoe')
end
private
def path_params

View File

@ -162,6 +162,10 @@ class OmniauthCallbacksController < Devise::OmniauthCallbacksController
user = auth_user.find_and_update!
if auth_user.valid_sign_in?
# In this case the `#current_user` would not be set. So we can't fetch it
# from that in `#context_user`. Pushing it manually here makes the information
# available in the logs for this request.
Gitlab::ApplicationContext.push(user: user)
log_audit_event(user, with: oauth['provider'])
set_remember_me(user)
@ -287,10 +291,6 @@ class OmniauthCallbacksController < Devise::OmniauthCallbacksController
def fail_admin_mode_invalid_credentials
redirect_to new_admin_session_path, alert: _('Invalid login or password')
end
def context_user
current_user
end
end
OmniauthCallbacksController.prepend_mod_with('OmniauthCallbacksController')

View File

@ -79,6 +79,8 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
feature_category :infrastructure_as_code, [:terraform_reports]
feature_category :continuous_integration, [:pipeline_status, :pipelines, :exposed_artifacts]
urgency :high, [:export_csv]
def index
@merge_requests = @issuables

View File

@ -51,7 +51,9 @@ class ProjectsController < Projects::ApplicationController
feature_category :team_planning, [:preview_markdown, :new_issuable_address]
feature_category :importers, [:export, :remove_export, :generate_new_export, :download_export]
feature_category :code_review, [:unfoldered_environment_names]
urgency :low, [:refs]
urgency :high, [:unfoldered_environment_names]
def index
redirect_to(current_user ? root_path : explore_root_path)

View File

@ -5,7 +5,7 @@ class SearchController < ApplicationController
include SearchHelper
include RedisTracking
RESCUE_FROM_TIMEOUT_ACTIONS = [:count, :show].freeze
RESCUE_FROM_TIMEOUT_ACTIONS = [:count, :show, :autocomplete].freeze
track_redis_hll_event :show, name: 'i_search_total'
@ -74,11 +74,7 @@ class SearchController < ApplicationController
def autocomplete
term = params[:term]
if params[:project_id].present?
@project = Project.find_by(id: params[:project_id])
@project = nil unless can?(current_user, :read_project, @project)
end
@project = search_service.project
@ref = params[:project_ref] if params[:project_ref].present?
render json: search_autocomplete_opts(term).to_json
@ -189,17 +185,16 @@ class SearchController < ApplicationController
@timeout = true
if count_action_name?
case action_name.to_sym
when :count
render json: {}, status: :request_timeout
when :autocomplete
render json: [], status: :request_timeout
else
render status: :request_timeout
end
end
def count_action_name?
action_name.to_sym == :count
end
def strip_surrounding_whitespace_from_search
%i(term search).each { |param| params[param]&.strip! }
end

View File

@ -12,10 +12,9 @@ module Packages
attr_reader :name, :packages
def initialize(name, packages, include_metadata: false)
def initialize(name, packages)
@name = name
@packages = packages
@include_metadata = include_metadata
end
def versions
@ -24,10 +23,7 @@ module Packages
packages.each_batch do |relation|
batched_packages = relation.including_dependency_links
.preload_files
if @include_metadata
batched_packages = batched_packages.preload_npm_metadatum
end
.preload_npm_metadatum
batched_packages.each do |package|
package_file = package.package_files.last
@ -92,8 +88,6 @@ module Packages
end
def abbreviated_package_json(package)
return {} unless @include_metadata
json = package.npm_metadatum&.package_json || {}
json.slice(*PACKAGE_JSON_ALLOWED_FIELDS)
end

View File

@ -23,9 +23,7 @@ module Packages
::Packages::CreateDependencyService.new(package, package_dependencies).execute
::Packages::Npm::CreateTagService.new(package, dist_tag).execute
if Feature.enabled?(:packages_npm_abbreviated_metadata, project, default_enabled: :yaml)
package.create_npm_metadatum!(package_json: package_json)
end
package.create_npm_metadatum!(package_json: package_json)
package
end

View File

@ -1,8 +0,0 @@
---
name: packages_npm_abbreviated_metadata
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/73639
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/344827
milestone: '14.5'
type: development
group: group::package
default_enabled: true

View File

@ -283,8 +283,7 @@ Gitaly Cluster provides the following features:
- [Replication factor](#replication-factor) of repositories for increased redundancy.
- [Automatic failover](praefect.md#automatic-failover-and-primary-election-strategies) from the
primary Gitaly node to secondary Gitaly nodes.
- Reporting of possible [data loss](praefect.md#check-for-data-loss) if replication queue is
non-empty.
- Reporting of possible [data loss](recovery.md#check-for-data-loss) if replication queue isn't empty.
Follow the [Gitaly Cluster epic](https://gitlab.com/groups/gitlab-org/-/epics/1489) for improvements
including [horizontally distributing reads](https://gitlab.com/groups/gitlab-org/-/epics/2013).
@ -524,6 +523,10 @@ To monitor [strong consistency](#strong-consistency), you can use the following
You can also monitor the [Praefect logs](../logs.md#praefect-logs).
## Recover from failure
Gitaly Cluster can [recover from certain types of failure](recovery.md).
## Do not bypass Gitaly
GitLab doesn't advise directly accessing Gitaly repositories stored on disk with a Git client,

View File

@ -1293,481 +1293,3 @@ Migrate to [repository-specific primary nodes](#repository-specific-primary-node
If a sufficient number of health checks fail for the current primary Gitaly node, a new primary is
elected. **Do not use with multiple Praefect nodes!** Using with multiple Praefect nodes is
likely to result in a split brain.
## Primary Node Failure
Gitaly Cluster recovers from a failing primary Gitaly node by promoting a healthy secondary as the
new primary.
In GitLab 14.1 and later, Gitaly Cluster:
- Elects a healthy secondary with a fully up to date copy of the repository as the new primary.
- Repository becomes unavailable if there are no fully up to date copies of it on healthy secondaries.
To minimize data loss in GitLab 13.0 to 14.0, Gitaly Cluster:
- Switches repositories that are outdated on the new primary to [read-only mode](#read-only-mode).
- Elects the secondary with the least unreplicated writes from the primary to be the new
primary. Because there can still be some unreplicated writes,
[data loss can occur](#check-for-data-loss).
### Read-only mode
> - Introduced in GitLab 13.0 as [generally available](https://about.gitlab.com/handbook/product/gitlab-the-product/#generally-available-ga).
> - Between GitLab 13.0 and GitLab 13.2, read-only mode applied to the whole virtual storage and occurred whenever failover occurred.
> - [In GitLab 13.3 and later](https://gitlab.com/gitlab-org/gitaly/-/issues/2862), read-only mode applies on a per-repository basis and only occurs if a new primary is out of date.
new primary. If the failed primary contained unreplicated writes, [data loss can occur](#check-for-data-loss).
> - Removed in GitLab 14.1. Instead, repositories [become unavailable](#unavailable-repositories).
When Gitaly Cluster switches to a new primary in GitLab 13.0 to 14.0, repositories enter
read-only mode if they are out of date. This can happen after failing over to an outdated
secondary. Read-only mode eases data recovery efforts by preventing writes that may conflict
with the unreplicated writes on other nodes.
To enable writes again in GitLab 13.0 to 14.0, an administrator can:
1. [Check](#check-for-data-loss) for data loss.
1. Attempt to [recover](#data-recovery) missing data.
1. Either [enable writes](#enable-writes-or-accept-data-loss) in the virtual storage or
[accept data loss](#enable-writes-or-accept-data-loss) if necessary, depending on the version of
GitLab.
## Retrieve repository metadata
> [Introduced](https://gitlab.com/gitlab-org/gitaly/-/issues/3481) in GitLab 14.6.
Gitaly Cluster maintains a [metadata database](index.md#components) about the repositories stored on the cluster. Use the `praefect metadata` subcommand
to inspect the metadata for troubleshooting.
You can retrieve a repository's metadata by its Praefect-assigned repository ID:
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml metadata -repository-id <repository-id>
```
You can also retrieve a repository's metadata by its virtual storage and relative path:
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml metadata -virtual-storage <virtual-storage> -relative-path <relative-path>
```
### Examples
To retrieve the metadata for a repository with a Praefect-assigned repository ID of 1:
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml metadata -repository-id 1
```
To retrieve the metadata for a repository with virtual storage `default` and relative path `@hashed/b1/7e/b17ef6d19c7a5b1ee83b907c595526dcb1eb06db8227d650d5dda0a9f4ce8cd9.git`:
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml metadata -virtual-storage default -relative-path @hashed/b1/7e/b17ef6d19c7a5b1ee83b907c595526dcb1eb06db8227d650d5dda0a9f4ce8cd9.git
```
Either of these examples retrieve the following metadata for an example repository:
```plaintext
Repository ID: 54771
Virtual Storage: "default"
Relative Path: "@hashed/b1/7e/b17ef6d19c7a5b1ee83b907c595526dcb1eb06db8227d650d5dda0a9f4ce8cd9.git"
Replica Path: "@hashed/b1/7e/b17ef6d19c7a5b1ee83b907c595526dcb1eb06db8227d650d5dda0a9f4ce8cd9.git"
Primary: "gitaly-1"
Generation: 1
Replicas:
- Storage: "gitaly-1"
Assigned: true
Generation: 1, fully up to date
Healthy: true
Valid Primary: true
- Storage: "gitaly-2"
Assigned: true
Generation: 0, behind by 1 changes
Healthy: true
Valid Primary: false
- Storage: "gitaly-3"
Assigned: true
Generation: replica not yet created
Healthy: false
Valid Primary: false
```
### Available metadata
The metadata retrieved by `praefect metadata` includes the fields in the following tables.
| Field | Description |
|:------------------|:-------------------------------------------------------------------------------------------------------------------|
| `Repository ID` | Permanent unique ID assigned to the repository by Praefect. Different to the ID GitLab uses for repositories. |
| `Virtual Storage` | Name of the virtual storage the repository is stored in. |
| `Relative Path` | Repository's path in the virtual storage. |
| `Replica Path` | Where on the Gitaly node's disk the repository's replicas are stored. |
| `Primary` | Current primary of the repository. |
| `Generation` | Used by Praefect to track repository changes. Each write in the repository increments the repository's generation. |
| `Replicas` | A list of replicas that exist or are expected to exist. |
For each replica, the following metadata is available:
| `Replicas` Field | Description |
|:-----------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `Storage` | Name of the Gitaly storage that contains the replica. |
| `Assigned` | Indicates whether the replica is expected to exist in the storage. Can be `false` if a Gitaly node is removed from the cluster or if the storage contains an extra copy after the repository's replication factor was decreased. |
| `Generation` | Latest confirmed generation of the replica. It indicates:<br><br>- The replica is fully up to date if the generation matches the repository's generation.<br>- The replica is outdated if the replica's generation is less than the repository's generation.<br>- `replica not yet created` if the replica does not yet exist at all on the storage. |
| `Healthy` | Indicates whether the Gitaly node that is hosting this replica is considered healthy by the consensus of Praefect nodes. |
| `Valid Primary` | Indicates whether the replica is fit to serve as the primary node. If the repository's primary is not a valid primary, a failover occurs on the next write to the repository if there is another replica that is a valid primary. A replica is a valid primary if:<br><br>- It is stored on a healthy Gitaly node.<br>- It is fully up to date.<br>- It is not targeted by a pending deletion job from decreasing replication factor.<br>- It is assigned. |
## Unavailable repositories
> - From GitLab 13.0 through 14.0, repositories became read-only if they were outdated on the primary but fully up to date on a healthy secondary. `dataloss` sub-command displays read-only repositories by default through these versions.
> - Since GitLab 14.1, Praefect contains more responsive failover logic which immediately fails over to one of the fully up to date secondaries rather than placing the repository in read-only mode. Since GitLab 14.1, the `dataloss` sub-command displays repositories which are unavailable due to having no fully up to date copies on healthy Gitaly nodes.
A repository is unavailable if all of its up to date replicas are unavailable. Unavailable repositories are
not accessible through Praefect to prevent serving stale data that may break automated tooling.
### Check for data loss
The Praefect `dataloss` subcommand identifies:
- Copies of repositories in GitLab 13.0 to GitLab 14.0 that at are likely to be outdated.
This can help identify potential data loss after a failover.
- Repositories in GitLab 14.1 and later that are unavailable. This helps identify potential
data loss and repositories which are no longer accessible because all of their up-to-date
replicas copies are unavailable.
The following parameters are available:
- `-virtual-storage` that specifies which virtual storage to check. Because they might require
an administrator to intervene, the default behavior is to display:
- In GitLab 13.0 to 14.0, copies of read-only repositories.
- In GitLab 14.1 and later, unavailable repositories.
- In GitLab 14.1 and later, [`-partially-unavailable`](#unavailable-replicas-of-available-repositories)
that specifies whether to include in the output repositories that are available but have
some assigned copies that are not available.
NOTE:
`dataloss` is still in beta and the output format is subject to change.
To check for repositories with outdated primaries or for unavailable repositories, run:
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml dataloss [-virtual-storage <virtual-storage>]
```
Every configured virtual storage is checked if none is specified:
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml dataloss
```
Repositories are listed in the output that have either:
- An outdated copy of the repository on the primary, in GitLab 13.0 to GitLab 14.0.
- No healthy and fully up-to-date copies available, in GitLab 14.1 and later.
The following information is printed for each repository:
- A repository's relative path to the storage directory identifies each repository and groups the related
information.
- The repository's current status is printed in parentheses next to the disk path:
- In GitLab 13.0 to 14.0, either `(read-only)` if the repository's primary node is outdated
and can't accept writes. Otherwise, `(writable)`.
- In GitLab 14.1 and later, `(unavailable)` is printed next to the disk path if the
repository is unavailable.
- The primary field lists the repository's current primary. If the repository has no primary, the field shows
`No Primary`.
- The In-Sync Storages lists replicas which have replicated the latest successful write and all writes
preceding it.
- The Outdated Storages lists replicas which contain an outdated copy of the repository. Replicas which have no copy
of the repository but should contain it are also listed here. The maximum number of changes the replica is missing
is listed next to replica. It's important to notice that the outdated replicas may be fully up to date or contain
later changes but Praefect can't guarantee it.
Additional information includes:
- Whether a node is assigned to host the repository is listed with each node's status.
`assigned host` is printed next to nodes that are assigned to store the repository. The
text is omitted if the node contains a copy of the repository but is not assigned to store
the repository. Such copies aren't kept in sync by Praefect, but may act as replication
sources to bring assigned copies up to date.
- In GitLab 14.1 and later, `unhealthy` is printed next to the copies that are located
on unhealthy Gitaly nodes.
Example output:
```shell
Virtual storage: default
Outdated repositories:
@hashed/3f/db/3fdba35f04dc8c462986c992bcf875546257113072a909c162f7e470e581e278.git (unavailable):
Primary: gitaly-1
In-Sync Storages:
gitaly-2, assigned host, unhealthy
Outdated Storages:
gitaly-1 is behind by 3 changes or less, assigned host
gitaly-3 is behind by 3 changes or less
```
A confirmation is printed out when every repository is available. For example:
```shell
Virtual storage: default
All repositories are available!
```
#### Unavailable replicas of available repositories
NOTE:
In GitLab 14.0 and earlier, the flag is `-partially-replicated` and the output shows any repositories with assigned nodes with outdated
copies.
To also list information of repositories which are available but are unavailable from some of the assigned nodes,
use the `-partially-unavailable` flag.
A repository is available if there is a healthy, up to date replica available. Some of the assigned secondary
replicas may be temporarily unavailable for access while they are waiting to replicate the latest changes.
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml dataloss [-virtual-storage <virtual-storage>] [-partially-unavailable]
```
Example output:
```shell
Virtual storage: default
Outdated repositories:
@hashed/3f/db/3fdba35f04dc8c462986c992bcf875546257113072a909c162f7e470e581e278.git:
Primary: gitaly-1
In-Sync Storages:
gitaly-1, assigned host
Outdated Storages:
gitaly-2 is behind by 3 changes or less, assigned host
gitaly-3 is behind by 3 changes or less
```
With the `-partially-unavailable` flag set, a confirmation is printed out if every assigned replica is fully up to
date and healthy.
For example:
```shell
Virtual storage: default
All repositories are fully available on all assigned storages!
```
### Check repository checksums
To check a project's repository checksums across on all Gitaly nodes, run the
[replicas Rake task](../raketasks/praefect.md#replica-checksums) on the main GitLab node.
### Accept data loss
WARNING:
`accept-dataloss` causes permanent data loss by overwriting other versions of the repository. Data
[recovery efforts](#data-recovery) must be performed before using it.
If it is not possible to bring one of the up to date replicas back online, you may have to accept data
loss. When accepting data loss, Praefect marks the chosen replica of the repository as the latest version
and replicates it to the other assigned Gitaly nodes. This process overwrites any other version of the
repository so care must be taken.
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml accept-dataloss
-virtual-storage <virtual-storage> -repository <relative-path> -authoritative-storage <storage-name>
```
### Enable writes or accept data loss
WARNING:
`accept-dataloss` causes permanent data loss by overwriting other versions of the repository.
Data [recovery efforts](#data-recovery) must be performed before using it.
Praefect provides the following subcommands to re-enable writes or accept data loss:
- In GitLab 13.2 and earlier, `enable-writes` to re-enable virtual storage for writes after
data recovery attempts:
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml enable-writes -virtual-storage <virtual-storage>
```
- In GitLab 13.3 and later, if it is not possible to bring one of the up to date nodes back
online, you may have to accept data loss:
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml accept-dataloss -virtual-storage <virtual-storage> -repository <relative-path> -authoritative-storage <storage-name>
```
When accepting data loss, Praefect:
1. Marks the chosen copy of the repository as the latest version.
1. Replicates the copy to the other assigned Gitaly nodes.
This process overwrites any other copy of the repository so care must be taken.
## Data recovery
If a Gitaly node fails replication jobs for any reason, it ends up hosting outdated versions of the
affected repositories. Praefect provides tools for:
- [Automatic](#automatic-reconciliation) reconciliation, for GitLab 13.4 and later.
- [Manual](#manual-reconciliation) reconciliation, for:
- GitLab 13.3 and earlier.
- Repositories upgraded to GitLab 13.4 and later without entries in the `repositories` table. In
GitLab 13.6 and later, [a migration is run](https://gitlab.com/gitlab-org/gitaly/-/issues/3033)
when Praefect starts for these repositories.
These tools reconcile the outdated repositories to bring them fully up to date again.
### Automatic reconciliation
> [Introduced](https://gitlab.com/gitlab-org/gitaly/-/issues/2717) in GitLab 13.4.
Praefect automatically reconciles repositories that are not up to date. By default, this is done every
five minutes. For each outdated repository on a healthy Gitaly node, the Praefect picks a
random, fully up-to-date replica of the repository on another healthy Gitaly node to replicate from. A
replication job is scheduled only if there are no other replication jobs pending for the target
repository.
The reconciliation frequency can be changed via the configuration. The value can be any valid
[Go duration value](https://golang.org/pkg/time/#ParseDuration). Values below 0 disable the feature.
Examples:
```ruby
praefect['reconciliation_scheduling_interval'] = '5m' # the default value
```
```ruby
praefect['reconciliation_scheduling_interval'] = '30s' # reconcile every 30 seconds
```
```ruby
praefect['reconciliation_scheduling_interval'] = '0' # disable the feature
```
### Manual reconciliation
WARNING:
The `reconcile` sub-command was removed in GitLab 14.1. Use [automatic reconciliation](#automatic-reconciliation) instead. Manual reconciliation may produce excess replication jobs and is limited in functionality. Manual reconciliation does not work when [repository-specific primary nodes](#repository-specific-primary-nodes) are
enabled.
The Praefect `reconcile` sub-command allows for the manual reconciliation between two Gitaly nodes. The
command replicates every repository on a later version on the reference storage to the target storage.
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml reconcile -virtual <virtual-storage> -reference <up-to-date-storage> -target <outdated-storage> -f
```
- Replace the placeholder `<virtual-storage>` with the virtual storage containing the Gitaly node storage to be checked.
- Replace the placeholder `<up-to-date-storage>` with the Gitaly storage name containing up to date repositories.
- Replace the placeholder `<outdated-storage>` with the Gitaly storage name containing outdated repositories.
### Manually remove repositories
> [Introduced](https://gitlab.com/gitlab-org/gitaly/-/merge_requests/3767) in GitLab 14.3.
The `remove-repository` Praefect sub-command removes repositories from a Gitaly Cluster. It removes
all state associated with a given repository including:
- On-disk repositories on all relevant Gitaly nodes.
- Any database state tracked by Praefect.
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml remove-repository -virtual-storage <virtual-storage> -repository <repository>
```
- `-virtual-storage` is the virtual storage the repository is located in. Virtual storages are configured in `/etc/gitlab/gitlab.rb` under `praefect['virtual_storages]` and looks like the following:
```ruby
praefect['virtual_storages'] = {
'default' => {
...
},
'storage-1' => {
...
}
}
```
In this example, the virtual storage to specify is `default` or `storage-1`.
- `-repository` is the repository's relative path in the storage [beginning with `@hashed`](../repository_storage_types.md#hashed-storage).
For example:
```plaintext
@hashed/f5/ca/f5ca38f748a1d6eaf726b8a42fb575c3c71f1864a8143301782de13da2d9202b.git
```
Parts of the repository can continue to exist after running `remove-repository`. This can be because of:
- A deletion error.
- An in-flight RPC call targeting the repository.
If this occurs, run `remove-repository` again.
### Manually list untracked repositories
> [Introduced](https://gitlab.com/gitlab-org/gitaly/-/merge_requests/3926) in GitLab 14.4.
The `list-untracked-repositories` Praefect sub-command lists repositories of the Gitaly Cluster that both:
- Exist for at least one Gitaly storage.
- Aren't tracked in the Praefect database.
The command outputs:
- Result to `STDOUT` and the command's logs.
- Errors to `STDERR`.
Each entry is a complete JSON string with a newline at the end (configurable using the
`-delimiter` flag). For example:
```plaintext
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml list-untracked-repositories
{"virtual_storage":"default","storage":"gitaly-1","relative_path":"@hashed/ab/cd/abcd123456789012345678901234567890123456789012345678901234567890.git"}
{"virtual_storage":"default","storage":"gitaly-1","relative_path":"@hashed/ab/cd/abcd123456789012345678901234567890123456789012345678901234567891.git"}
```
### Manually track repositories
> [Introduced](https://gitlab.com/gitlab-org/omnibus-gitlab/-/merge_requests/5658) in GitLab 14.4.
The `track-repository` Praefect sub-command adds repositories on disk to the Praefect database to be tracked.
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml track-repository -virtual-storage <virtual-storage> -repository <repository>
```
- `-virtual-storage` is the virtual storage the repository is located in. Virtual storages are configured in `/etc/gitlab/gitlab.rb` under `praefect['virtual_storages]` and looks like the following:
```ruby
praefect['virtual_storages'] = {
'default' => {
...
},
'storage-1' => {
...
}
}
```
In this example, the virtual storage to specify is `default` or `storage-1`.
- `-repository` is the repository's relative path in the storage [beginning with `@hashed`](../repository_storage_types.md#hashed-storage).
For example:
```plaintext
@hashed/f5/ca/f5ca38f748a1d6eaf726b8a42fb575c3c71f1864a8143301782de13da2d9202b.git
```
- `-authoritative-storage` is the storage we want Praefect to treat as the primary. Required if
[per-repository replication](#configure-replication-factor) is set as the replication strategy.
The command outputs:
- Results to `STDOUT` and the command's logs.
- Errors to `STDERR`.
This command fails if:
- The repository is already being tracked by the Praefect database.
- The repository does not exist on disk.

View File

@ -0,0 +1,405 @@
---
stage: Create
group: Gitaly
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
type: reference
---
# Recovery options
Gitaly Cluster can [recover from certain types of failure](recovery.md).
## Primary Node Failure
Gitaly Cluster recovers from a failing primary Gitaly node by promoting a healthy secondary as the
new primary.
In GitLab 14.1 and later, Gitaly Cluster:
- Elects a healthy secondary with a fully up to date copy of the repository as the new primary.
- Repository becomes unavailable if there are no fully up to date copies of it on healthy secondaries.
To minimize data loss in GitLab 13.0 to 14.0, Gitaly Cluster:
- Switches repositories that are outdated on the new primary to [read-only mode](#read-only-mode).
- Elects the secondary with the least unreplicated writes from the primary to be the new
primary. Because there can still be some unreplicated writes,
[data loss can occur](#check-for-data-loss).
### Read-only mode
> - Introduced in GitLab 13.0 as [generally available](https://about.gitlab.com/handbook/product/gitlab-the-product/#generally-available-ga).
> - Between GitLab 13.0 and GitLab 13.2, read-only mode applied to the whole virtual storage and occurred whenever failover occurred.
> - [In GitLab 13.3 and later](https://gitlab.com/gitlab-org/gitaly/-/issues/2862), read-only mode applies on a per-repository basis and only occurs if a new primary is out of date.
new primary. If the failed primary contained unreplicated writes, [data loss can occur](#check-for-data-loss).
> - Removed in GitLab 14.1. Instead, repositories [become unavailable](#unavailable-repositories).
When Gitaly Cluster switches to a new primary in GitLab 13.0 to 14.0, repositories enter
read-only mode if they are out of date. This can happen after failing over to an outdated
secondary. Read-only mode eases data recovery efforts by preventing writes that may conflict
with the unreplicated writes on other nodes.
To enable writes again in GitLab 13.0 to 14.0, an administrator can:
1. [Check](#check-for-data-loss) for data loss.
1. Attempt to [recover](#data-recovery) missing data.
1. Either [enable writes](#enable-writes-or-accept-data-loss) in the virtual storage or
[accept data loss](#enable-writes-or-accept-data-loss) if necessary, depending on the version of
GitLab.
## Unavailable repositories
> - From GitLab 13.0 through 14.0, repositories became read-only if they were outdated on the primary but fully up to date on a healthy secondary. `dataloss` sub-command displays read-only repositories by default through these versions.
> - Since GitLab 14.1, Praefect contains more responsive failover logic which immediately fails over to one of the fully up to date secondaries rather than placing the repository in read-only mode. Since GitLab 14.1, the `dataloss` sub-command displays repositories which are unavailable due to having no fully up to date copies on healthy Gitaly nodes.
A repository is unavailable if all of its up to date replicas are unavailable. Unavailable repositories are
not accessible through Praefect to prevent serving stale data that may break automated tooling.
### Check for data loss
The Praefect `dataloss` subcommand identifies:
- Copies of repositories in GitLab 13.0 to GitLab 14.0 that at are likely to be outdated.
This can help identify potential data loss after a failover.
- Repositories in GitLab 14.1 and later that are unavailable. This helps identify potential
data loss and repositories which are no longer accessible because all of their up-to-date
replicas copies are unavailable.
The following parameters are available:
- `-virtual-storage` that specifies which virtual storage to check. Because they might require
an administrator to intervene, the default behavior is to display:
- In GitLab 13.0 to 14.0, copies of read-only repositories.
- In GitLab 14.1 and later, unavailable repositories.
- In GitLab 14.1 and later, [`-partially-unavailable`](#unavailable-replicas-of-available-repositories)
that specifies whether to include in the output repositories that are available but have
some assigned copies that are not available.
NOTE:
`dataloss` is still in beta and the output format is subject to change.
To check for repositories with outdated primaries or for unavailable repositories, run:
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml dataloss [-virtual-storage <virtual-storage>]
```
Every configured virtual storage is checked if none is specified:
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml dataloss
```
Repositories are listed in the output that have either:
- An outdated copy of the repository on the primary, in GitLab 13.0 to GitLab 14.0.
- No healthy and fully up-to-date copies available, in GitLab 14.1 and later.
The following information is printed for each repository:
- A repository's relative path to the storage directory identifies each repository and groups the related
information.
- The repository's current status is printed in parentheses next to the disk path:
- In GitLab 13.0 to 14.0, either `(read-only)` if the repository's primary node is outdated
and can't accept writes. Otherwise, `(writable)`.
- In GitLab 14.1 and later, `(unavailable)` is printed next to the disk path if the
repository is unavailable.
- The primary field lists the repository's current primary. If the repository has no primary, the field shows
`No Primary`.
- The In-Sync Storages lists replicas which have replicated the latest successful write and all writes
preceding it.
- The Outdated Storages lists replicas which contain an outdated copy of the repository. Replicas which have no copy
of the repository but should contain it are also listed here. The maximum number of changes the replica is missing
is listed next to replica. It's important to notice that the outdated replicas may be fully up to date or contain
later changes but Praefect can't guarantee it.
Additional information includes:
- Whether a node is assigned to host the repository is listed with each node's status.
`assigned host` is printed next to nodes that are assigned to store the repository. The
text is omitted if the node contains a copy of the repository but is not assigned to store
the repository. Such copies aren't kept in sync by Praefect, but may act as replication
sources to bring assigned copies up to date.
- In GitLab 14.1 and later, `unhealthy` is printed next to the copies that are located
on unhealthy Gitaly nodes.
Example output:
```shell
Virtual storage: default
Outdated repositories:
@hashed/3f/db/3fdba35f04dc8c462986c992bcf875546257113072a909c162f7e470e581e278.git (unavailable):
Primary: gitaly-1
In-Sync Storages:
gitaly-2, assigned host, unhealthy
Outdated Storages:
gitaly-1 is behind by 3 changes or less, assigned host
gitaly-3 is behind by 3 changes or less
```
A confirmation is printed out when every repository is available. For example:
```shell
Virtual storage: default
All repositories are available!
```
#### Unavailable replicas of available repositories
NOTE:
In GitLab 14.0 and earlier, the flag is `-partially-replicated` and the output shows any repositories with assigned nodes with outdated
copies.
To also list information of repositories which are available but are unavailable from some of the assigned nodes,
use the `-partially-unavailable` flag.
A repository is available if there is a healthy, up to date replica available. Some of the assigned secondary
replicas may be temporarily unavailable for access while they are waiting to replicate the latest changes.
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml dataloss [-virtual-storage <virtual-storage>] [-partially-unavailable]
```
Example output:
```shell
Virtual storage: default
Outdated repositories:
@hashed/3f/db/3fdba35f04dc8c462986c992bcf875546257113072a909c162f7e470e581e278.git:
Primary: gitaly-1
In-Sync Storages:
gitaly-1, assigned host
Outdated Storages:
gitaly-2 is behind by 3 changes or less, assigned host
gitaly-3 is behind by 3 changes or less
```
With the `-partially-unavailable` flag set, a confirmation is printed out if every assigned replica is fully up to
date and healthy.
For example:
```shell
Virtual storage: default
All repositories are fully available on all assigned storages!
```
### Check repository checksums
To check a project's repository checksums across on all Gitaly nodes, run the
[replicas Rake task](../raketasks/praefect.md#replica-checksums) on the main GitLab node.
### Accept data loss
WARNING:
`accept-dataloss` causes permanent data loss by overwriting other versions of the repository. Data
[recovery efforts](#data-recovery) must be performed before using it.
If it is not possible to bring one of the up to date replicas back online, you may have to accept data
loss. When accepting data loss, Praefect marks the chosen replica of the repository as the latest version
and replicates it to the other assigned Gitaly nodes. This process overwrites any other version of the
repository so care must be taken.
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml accept-dataloss
-virtual-storage <virtual-storage> -repository <relative-path> -authoritative-storage <storage-name>
```
### Enable writes or accept data loss
WARNING:
`accept-dataloss` causes permanent data loss by overwriting other versions of the repository.
Data [recovery efforts](#data-recovery) must be performed before using it.
Praefect provides the following subcommands to re-enable writes or accept data loss:
- In GitLab 13.2 and earlier, `enable-writes` to re-enable virtual storage for writes after
data recovery attempts:
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml enable-writes -virtual-storage <virtual-storage>
```
- In GitLab 13.3 and later, if it is not possible to bring one of the up to date nodes back
online, you may have to accept data loss:
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml accept-dataloss -virtual-storage <virtual-storage> -repository <relative-path> -authoritative-storage <storage-name>
```
When accepting data loss, Praefect:
1. Marks the chosen copy of the repository as the latest version.
1. Replicates the copy to the other assigned Gitaly nodes.
This process overwrites any other copy of the repository so care must be taken.
## Data recovery
If a Gitaly node fails replication jobs for any reason, it ends up hosting outdated versions of the
affected repositories. Praefect provides tools for:
- [Automatic](#automatic-reconciliation) reconciliation, for GitLab 13.4 and later.
- [Manual](#manual-reconciliation) reconciliation, for:
- GitLab 13.3 and earlier.
- Repositories upgraded to GitLab 13.4 and later without entries in the `repositories` table. In
GitLab 13.6 and later, [a migration is run](https://gitlab.com/gitlab-org/gitaly/-/issues/3033)
when Praefect starts for these repositories.
These tools reconcile the outdated repositories to bring them fully up to date again.
### Automatic reconciliation
> [Introduced](https://gitlab.com/gitlab-org/gitaly/-/issues/2717) in GitLab 13.4.
Praefect automatically reconciles repositories that are not up to date. By default, this is done every
five minutes. For each outdated repository on a healthy Gitaly node, the Praefect picks a
random, fully up-to-date replica of the repository on another healthy Gitaly node to replicate from. A
replication job is scheduled only if there are no other replication jobs pending for the target
repository.
The reconciliation frequency can be changed via the configuration. The value can be any valid
[Go duration value](https://golang.org/pkg/time/#ParseDuration). Values below 0 disable the feature.
Examples:
```ruby
praefect['reconciliation_scheduling_interval'] = '5m' # the default value
```
```ruby
praefect['reconciliation_scheduling_interval'] = '30s' # reconcile every 30 seconds
```
```ruby
praefect['reconciliation_scheduling_interval'] = '0' # disable the feature
```
### Manual reconciliation
WARNING:
The `reconcile` sub-command was removed in GitLab 14.1. Use [automatic reconciliation](#automatic-reconciliation) instead.
Manual reconciliation may produce excess replication jobs and is limited in functionality. Manual reconciliation does not
work when [repository-specific primary nodes](praefect.md#repository-specific-primary-nodes) are enabled.
The Praefect `reconcile` sub-command allows for the manual reconciliation between two Gitaly nodes. The
command replicates every repository on a later version on the reference storage to the target storage.
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml reconcile -virtual <virtual-storage> -reference <up-to-date-storage> -target <outdated-storage> -f
```
- Replace the placeholder `<virtual-storage>` with the virtual storage containing the Gitaly node storage to be checked.
- Replace the placeholder `<up-to-date-storage>` with the Gitaly storage name containing up to date repositories.
- Replace the placeholder `<outdated-storage>` with the Gitaly storage name containing outdated repositories.
### Manually remove repositories
> [Introduced](https://gitlab.com/gitlab-org/gitaly/-/merge_requests/3767) in GitLab 14.3.
The `remove-repository` Praefect sub-command removes repositories from a Gitaly Cluster. It removes
all state associated with a given repository including:
- On-disk repositories on all relevant Gitaly nodes.
- Any database state tracked by Praefect.
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml remove-repository -virtual-storage <virtual-storage> -repository <repository>
```
- `-virtual-storage` is the virtual storage the repository is located in. Virtual storages are configured in `/etc/gitlab/gitlab.rb` under `praefect['virtual_storages]` and looks like the following:
```ruby
praefect['virtual_storages'] = {
'default' => {
...
},
'storage-1' => {
...
}
}
```
In this example, the virtual storage to specify is `default` or `storage-1`.
- `-repository` is the repository's relative path in the storage [beginning with `@hashed`](../repository_storage_types.md#hashed-storage).
For example:
```plaintext
@hashed/f5/ca/f5ca38f748a1d6eaf726b8a42fb575c3c71f1864a8143301782de13da2d9202b.git
```
Parts of the repository can continue to exist after running `remove-repository`. This can be because of:
- A deletion error.
- An in-flight RPC call targeting the repository.
If this occurs, run `remove-repository` again.
### Manually list untracked repositories
> [Introduced](https://gitlab.com/gitlab-org/gitaly/-/merge_requests/3926) in GitLab 14.4.
The `list-untracked-repositories` Praefect sub-command lists repositories of the Gitaly Cluster that both:
- Exist for at least one Gitaly storage.
- Aren't tracked in the Praefect database.
The command outputs:
- Result to `STDOUT` and the command's logs.
- Errors to `STDERR`.
Each entry is a complete JSON string with a newline at the end (configurable using the
`-delimiter` flag). For example:
```plaintext
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml list-untracked-repositories
{"virtual_storage":"default","storage":"gitaly-1","relative_path":"@hashed/ab/cd/abcd123456789012345678901234567890123456789012345678901234567890.git"}
{"virtual_storage":"default","storage":"gitaly-1","relative_path":"@hashed/ab/cd/abcd123456789012345678901234567890123456789012345678901234567891.git"}
```
### Manually track repositories
> [Introduced](https://gitlab.com/gitlab-org/omnibus-gitlab/-/merge_requests/5658) in GitLab 14.4.
The `track-repository` Praefect sub-command adds repositories on disk to the Praefect database to be tracked.
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml track-repository -virtual-storage <virtual-storage> -repository <repository>
```
- `-virtual-storage` is the virtual storage the repository is located in. Virtual storages are configured in `/etc/gitlab/gitlab.rb` under `praefect['virtual_storages]` and looks like the following:
```ruby
praefect['virtual_storages'] = {
'default' => {
...
},
'storage-1' => {
...
}
}
```
In this example, the virtual storage to specify is `default` or `storage-1`.
- `-repository` is the repository's relative path in the storage [beginning with `@hashed`](../repository_storage_types.md#hashed-storage).
For example:
```plaintext
@hashed/f5/ca/f5ca38f748a1d6eaf726b8a42fb575c3c71f1864a8143301782de13da2d9202b.git
```
- `-authoritative-storage` is the storage we want Praefect to treat as the primary. Required if
[per-repository replication](praefect.md#configure-replication-factor) is set as the replication strategy.
The command outputs:
- Results to `STDOUT` and the command's logs.
- Errors to `STDERR`.
This command fails if:
- The repository is already being tracked by the Praefect database.
- The repository does not exist on disk.

View File

@ -376,7 +376,7 @@ Here are common errors and potential causes:
To determine the primary node of a repository:
- In GitLab 14.6 and later, use the [`praefect metadata`](praefect.md#retrieve-repository-metadata) subcommand.
- In GitLab 14.6 and later, use the [`praefect metadata`](#view-repository-metadata) subcommand.
- In GitLab 13.12 to GitLab 14.5 with [repository-specific primaries](praefect.md#repository-specific-primary-nodes),
use the [`gitlab:praefect:replicas` Rake task](../raketasks/praefect.md#replica-checksums).
- With legacy election strategies in GitLab 13.12 and earlier, the primary was the same for all repositories in a virtual storage.
@ -392,13 +392,97 @@ To determine the primary node of a repository:
curl localhost:9652/metrics | grep gitaly_praefect_primaries`
```
### View repository metadata
> [Introduced](https://gitlab.com/gitlab-org/gitaly/-/issues/3481) in GitLab 14.6.
Gitaly Cluster maintains a [metadata database](index.md#components) about the repositories stored on the cluster. Use the `praefect metadata` subcommand
to inspect the metadata for troubleshooting.
You can retrieve a repository's metadata by its Praefect-assigned repository ID:
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml metadata -repository-id <repository-id>
```
You can also retrieve a repository's metadata by its virtual storage and relative path:
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml metadata -virtual-storage <virtual-storage> -relative-path <relative-path>
```
#### Examples
To retrieve the metadata for a repository with a Praefect-assigned repository ID of 1:
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml metadata -repository-id 1
```
To retrieve the metadata for a repository with virtual storage `default` and relative path `@hashed/b1/7e/b17ef6d19c7a5b1ee83b907c595526dcb1eb06db8227d650d5dda0a9f4ce8cd9.git`:
```shell
sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml metadata -virtual-storage default -relative-path @hashed/b1/7e/b17ef6d19c7a5b1ee83b907c595526dcb1eb06db8227d650d5dda0a9f4ce8cd9.git
```
Either of these examples retrieve the following metadata for an example repository:
```plaintext
Repository ID: 54771
Virtual Storage: "default"
Relative Path: "@hashed/b1/7e/b17ef6d19c7a5b1ee83b907c595526dcb1eb06db8227d650d5dda0a9f4ce8cd9.git"
Replica Path: "@hashed/b1/7e/b17ef6d19c7a5b1ee83b907c595526dcb1eb06db8227d650d5dda0a9f4ce8cd9.git"
Primary: "gitaly-1"
Generation: 1
Replicas:
- Storage: "gitaly-1"
Assigned: true
Generation: 1, fully up to date
Healthy: true
Valid Primary: true
- Storage: "gitaly-2"
Assigned: true
Generation: 0, behind by 1 changes
Healthy: true
Valid Primary: false
- Storage: "gitaly-3"
Assigned: true
Generation: replica not yet created
Healthy: false
Valid Primary: false
```
#### Available metadata
The metadata retrieved by `praefect metadata` includes the fields in the following tables.
| Field | Description |
|:------------------|:-------------------------------------------------------------------------------------------------------------------|
| `Repository ID` | Permanent unique ID assigned to the repository by Praefect. Different to the ID GitLab uses for repositories. |
| `Virtual Storage` | Name of the virtual storage the repository is stored in. |
| `Relative Path` | Repository's path in the virtual storage. |
| `Replica Path` | Where on the Gitaly node's disk the repository's replicas are stored. |
| `Primary` | Current primary of the repository. |
| `Generation` | Used by Praefect to track repository changes. Each write in the repository increments the repository's generation. |
| `Replicas` | A list of replicas that exist or are expected to exist. |
For each replica, the following metadata is available:
| `Replicas` Field | Description |
|:-----------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `Storage` | Name of the Gitaly storage that contains the replica. |
| `Assigned` | Indicates whether the replica is expected to exist in the storage. Can be `false` if a Gitaly node is removed from the cluster or if the storage contains an extra copy after the repository's replication factor was decreased. |
| `Generation` | Latest confirmed generation of the replica. It indicates:<br><br>- The replica is fully up to date if the generation matches the repository's generation.<br>- The replica is outdated if the replica's generation is less than the repository's generation.<br>- `replica not yet created` if the replica does not yet exist at all on the storage. |
| `Healthy` | Indicates whether the Gitaly node that is hosting this replica is considered healthy by the consensus of Praefect nodes. |
| `Valid Primary` | Indicates whether the replica is fit to serve as the primary node. If the repository's primary is not a valid primary, a failover occurs on the next write to the repository if there is another replica that is a valid primary. A replica is a valid primary if:<br><br>- It is stored on a healthy Gitaly node.<br>- It is fully up to date.<br>- It is not targeted by a pending deletion job from decreasing replication factor.<br>- It is assigned. |
### Check that repositories are in sync
Is [some cases](index.md#known-issues) the Praefect database can get out of sync with the underlying Gitaly nodes. To check that
a given repository is fully synced on all nodes, run the [`gitlab:praefect:replicas` Rake task](../raketasks/praefect.md#replica-checksums)
that checksums the repository on all Gitaly nodes.
The [Praefect dataloss](praefect.md#check-for-data-loss) command only checks the state of the repo in the Praefect database, and cannot
The [Praefect dataloss](recovery.md#check-for-data-loss) command only checks the state of the repo in the Praefect database, and cannot
be relied to detect sync problems in this scenario.
### Relation does not exist errors

View File

@ -1,6 +1,6 @@
---
stage: Release
group: Release
stage: Configure
group: Configure
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---

View File

@ -121,9 +121,7 @@ module API
not_found!('Packages') if packages.empty?
include_metadata = Feature.enabled?(:packages_npm_abbreviated_metadata, project, default_enabled: :yaml)
present ::Packages::Npm::PackagePresenter.new(package_name, packages, include_metadata: include_metadata),
present ::Packages::Npm::PackagePresenter.new(package_name, packages),
with: ::API::Entities::NpmPackage
end
end

View File

@ -105,7 +105,11 @@ module API
end
def promote_label(parent)
authorize! :admin_label, parent
unless parent.group
render_api_error!('Failed to promote project label to group label', 400)
end
authorize! :admin_label, parent.group
label = find_label(parent, params[:name], include_ancestor_groups: false)

View File

@ -303,7 +303,7 @@ module API
desc 'Get the context commits of a merge request' do
success Entities::Commit
end
get ':id/merge_requests/:merge_request_iid/context_commits', feature_category: :code_review do
get ':id/merge_requests/:merge_request_iid/context_commits', feature_category: :code_review, urgency: :high do
merge_request = find_merge_request_with_access(params[:merge_request_iid])
project = merge_request.project

View File

@ -8,6 +8,10 @@ module API
feature_category :global_search
rescue_from ActiveRecord::QueryCanceled do |e|
render_api_error!({ error: 'Request timed out' }, 408)
end
helpers do
SCOPE_ENTITY = {
merge_requests: Entities::MergeRequestBasic,

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
module BulkImports
module Projects
module Pipelines
class ContainerExpirationPolicyPipeline
include NdjsonPipeline
relation_name 'container_expiration_policy'
extractor ::BulkImports::Common::Extractors::NdjsonExtractor, relation: relation
end
end
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
module BulkImports
module Projects
module Pipelines
class ServiceDeskSettingPipeline
include NdjsonPipeline
relation_name 'service_desk_setting'
extractor ::BulkImports::Common::Extractors::NdjsonExtractor, relation: relation
end
end
end
end

View File

@ -59,6 +59,14 @@ module BulkImports
pipeline: BulkImports::Projects::Pipelines::ProjectFeaturePipeline,
stage: 4
},
container_expiration_policy: {
pipeline: BulkImports::Projects::Pipelines::ContainerExpirationPolicyPipeline,
stage: 4
},
service_desk_setting: {
pipeline: BulkImports::Projects::Pipelines::ServiceDeskSettingPipeline,
stage: 4
},
wiki: {
pipeline: BulkImports::Common::Pipelines::WikiPipeline,
stage: 5

View File

@ -94,11 +94,11 @@ module Gitlab
end
def job_class_name=(class_name)
write_attribute(:job_class_name, class_name.demodulize)
write_attribute(:job_class_name, class_name.delete_prefix("::"))
end
def batch_class_name=(class_name)
write_attribute(:batch_class_name, class_name.demodulize)
write_attribute(:batch_class_name, class_name.delete_prefix("::"))
end
def migrated_tuple_count

View File

@ -30097,9 +30097,6 @@ msgstr ""
msgid "Runners|Architecture"
msgstr ""
msgid "Runners|Are you sure you want to delete this runner?"
msgstr ""
msgid "Runners|Associated with one or more projects"
msgstr ""
@ -30121,6 +30118,12 @@ msgstr ""
msgid "Runners|Copy registration token"
msgstr ""
msgid "Runners|Delete runner"
msgstr ""
msgid "Runners|Delete runner %{name}?"
msgstr ""
msgid "Runners|Deploy GitLab Runner in AWS"
msgstr ""
@ -30241,6 +30244,9 @@ msgstr ""
msgid "Runners|Runner #%{runner_id}"
msgstr ""
msgid "Runners|Runner %{name} was deleted"
msgstr ""
msgid "Runners|Runner ID"
msgstr ""
@ -30298,6 +30304,9 @@ msgstr ""
msgid "Runners|Tags"
msgstr ""
msgid "Runners|The runner will be permanently deleted and no longer available for projects or groups in the instance. Are you sure you want to continue?"
msgstr ""
msgid "Runners|This runner has never connected to this instance"
msgstr ""

View File

@ -19,7 +19,7 @@ RSpec.describe AbuseReportsController do
context 'when the user has already been deleted' do
it 'redirects the reporter to root_path' do
user_id = user.id
user.destroy
user.destroy!
get :new, params: { user_id: user_id }

View File

@ -484,7 +484,7 @@ RSpec.describe Boards::IssuesController do
context 'with guest user' do
context 'in open list' do
it 'returns a successful 200 response' do
open_list = board.lists.create(list_type: :backlog)
open_list = board.lists.create!(list_type: :backlog)
create_issue user: guest, board: board, list: open_list, title: 'New issue'
expect(response).to have_gitlab_http_status(:ok)

View File

@ -479,6 +479,19 @@ RSpec.describe OmniauthCallbacksController, type: :controller do
post :saml, params: { SAMLResponse: mock_saml_response }
end
end
context 'with a blocked user trying to log in when there are hooks set up' do
let(:user) { create(:omniauth_user, extern_uid: 'my-uid', provider: 'saml') }
subject(:post_action) { post :saml, params: { SAMLResponse: mock_saml_response } }
before do
create(:system_hook)
user.block!
end
it { expect { post_action }.not_to raise_error }
end
end
describe 'enable admin mode' do

View File

@ -328,6 +328,7 @@ RSpec.describe SearchController do
describe 'GET #autocomplete' do
it_behaves_like 'when the user cannot read cross project', :autocomplete, { term: 'hello' }
it_behaves_like 'with external authorization service enabled', :autocomplete, { term: 'hello' }
it_behaves_like 'support for active record query timeouts', :autocomplete, { term: 'hello' }, :project, :json
end
describe '#append_info_to_payload' do

View File

@ -10,19 +10,19 @@ RSpec.describe SentNotificationsController do
let(:issue) do
create(:issue, project: target_project) do |issue|
issue.subscriptions.create(user: user, project: target_project, subscribed: true)
issue.subscriptions.create!(user: user, project: target_project, subscribed: true)
end
end
let(:confidential_issue) do
create(:issue, project: target_project, confidential: true) do |issue|
issue.subscriptions.create(user: user, project: target_project, subscribed: true)
issue.subscriptions.create!(user: user, project: target_project, subscribed: true)
end
end
let(:merge_request) do
create(:merge_request, source_project: target_project, target_project: target_project) do |mr|
mr.subscriptions.create(user: user, project: target_project, subscribed: true)
mr.subscriptions.create!(user: user, project: target_project, subscribed: true)
end
end
@ -213,7 +213,7 @@ RSpec.describe SentNotificationsController do
context 'when the force param is not passed' do
let(:merge_request) do
create(:merge_request, source_project: project, author: user) do |merge_request|
merge_request.subscriptions.create(user: user, project: project, subscribed: true)
merge_request.subscriptions.create!(user: user, project: project, subscribed: true)
end
end

View File

@ -403,7 +403,7 @@ RSpec.describe SessionsController do
context 'when the user is on their last attempt' do
before do
user.update(failed_attempts: User.maximum_attempts.pred)
user.update!(failed_attempts: User.maximum_attempts.pred)
end
context 'when OTP is valid' do

View File

@ -59,6 +59,42 @@ RSpec.describe "Admin Runners" do
end
end
describe 'delete runner' do
let!(:runner) { create(:ci_runner, description: 'runner-foo') }
before do
visit admin_runners_path
within "[data-testid='runner-row-#{runner.id}']" do
click_on 'Delete runner'
end
end
it 'shows a confirmation modal' do
expect(page).to have_text "Delete runner ##{runner.id} (#{runner.short_sha})?"
expect(page).to have_text "Are you sure you want to continue?"
end
it 'deletes a runner' do
within '.modal' do
click_on 'Delete runner'
end
expect(page.find('.gl-toast')).to have_text(/Runner .+ deleted/)
expect(page).not_to have_content 'runner-foo'
end
it 'cancels runner deletion' do
within '.modal' do
click_on 'Cancel'
end
wait_for_requests
expect(page).to have_content 'runner-foo'
end
end
describe 'search' do
before do
create(:ci_runner, :instance, description: 'runner-foo')

View File

@ -34,16 +34,22 @@ describe('ActiveCheckbox', () => {
});
});
describe('initialActivated is false', () => {
it('renders GlFormCheckbox as unchecked', () => {
describe('initialActivated is `false`', () => {
beforeEach(() => {
createComponent({
initialActivated: false,
});
});
it('renders GlFormCheckbox as unchecked', () => {
expect(findGlFormCheckbox().exists()).toBe(true);
expect(findGlFormCheckbox().vm.$attrs.checked).toBe(false);
expect(findInputInCheckbox().attributes('disabled')).toBeUndefined();
});
it('emits `toggle-integration-active` event with `false` on mount', () => {
expect(wrapper.emitted('toggle-integration-active')[0]).toEqual([false]);
});
});
describe('initialActivated is true', () => {
@ -63,10 +69,21 @@ describe('ActiveCheckbox', () => {
findInputInCheckbox().trigger('click');
await wrapper.vm.$nextTick();
expect(findGlFormCheckbox().vm.$attrs.checked).toBe(false);
});
});
it('emits `toggle-integration-active` event with `true` on mount', () => {
expect(wrapper.emitted('toggle-integration-active')[0]).toEqual([true]);
});
describe('on checkbox `change` event', () => {
it('emits `toggle-integration-active` event', () => {
findGlFormCheckbox().vm.$emit('change', false);
expect(wrapper.emitted('toggle-integration-active')[1]).toEqual([false]);
});
});
});
});
});

View File

@ -11,8 +11,15 @@ import JiraTriggerFields from '~/integrations/edit/components/jira_trigger_field
import OverrideDropdown from '~/integrations/edit/components/override_dropdown.vue';
import ResetConfirmationModal from '~/integrations/edit/components/reset_confirmation_modal.vue';
import TriggerFields from '~/integrations/edit/components/trigger_fields.vue';
import { integrationLevels } from '~/integrations/constants';
import {
integrationLevels,
TEST_INTEGRATION_EVENT,
SAVE_INTEGRATION_EVENT,
} from '~/integrations/constants';
import { createStore } from '~/integrations/edit/store';
import eventHub from '~/integrations/edit/event_hub';
jest.mock('~/integrations/edit/event_hub');
describe('IntegrationForm', () => {
let wrapper;
@ -31,7 +38,7 @@ describe('IntegrationForm', () => {
dispatch = jest.spyOn(store, 'dispatch').mockImplementation();
wrapper = shallowMountExtended(IntegrationForm, {
propsData: { ...props },
propsData: { ...props, formSelector: '.test' },
store,
stubs: {
OverrideDropdown,
@ -55,31 +62,13 @@ describe('IntegrationForm', () => {
const findConfirmationModal = () => wrapper.findComponent(ConfirmationModal);
const findResetConfirmationModal = () => wrapper.findComponent(ResetConfirmationModal);
const findResetButton = () => wrapper.findByTestId('reset-button');
const findSaveButton = () => wrapper.findByTestId('save-button');
const findTestButton = () => wrapper.findByTestId('test-button');
const findJiraTriggerFields = () => wrapper.findComponent(JiraTriggerFields);
const findJiraIssuesFields = () => wrapper.findComponent(JiraIssuesFields);
const findTriggerFields = () => wrapper.findComponent(TriggerFields);
describe('template', () => {
describe('showActive is true', () => {
it('renders ActiveCheckbox', () => {
createComponent();
expect(findActiveCheckbox().exists()).toBe(true);
});
});
describe('showActive is false', () => {
it('does not render ActiveCheckbox', () => {
createComponent({
customStateProps: {
showActive: false,
},
});
expect(findActiveCheckbox().exists()).toBe(false);
});
});
describe('integrationLevel is instance', () => {
it('renders ConfirmationModal', () => {
createComponent({
@ -323,4 +312,122 @@ describe('IntegrationForm', () => {
});
});
});
describe('ActiveCheckbox', () => {
describe.each`
showActive
${true}
${false}
`('when `showActive` is $showActive', ({ showActive }) => {
it(`${showActive ? 'renders' : 'does not render'} ActiveCheckbox`, () => {
createComponent({
customStateProps: {
showActive,
},
});
expect(findActiveCheckbox().exists()).toBe(showActive);
});
});
describe.each`
formActive | novalidate
${true} | ${null}
${false} | ${'true'}
`(
'when `toggle-integration-active` is emitted with $formActive',
({ formActive, novalidate }) => {
let mockForm;
beforeEach(async () => {
mockForm = document.createElement('form');
jest.spyOn(document, 'querySelector').mockReturnValue(mockForm);
createComponent({
customStateProps: {
showActive: true,
initialActivated: false,
},
});
await findActiveCheckbox().vm.$emit('toggle-integration-active', formActive);
});
it(`sets noValidate to ${novalidate}`, () => {
expect(mockForm.getAttribute('novalidate')).toBe(novalidate);
});
},
);
});
describe('when `save` button is clicked', () => {
let mockForm;
describe.each`
checkValidityReturn | integrationActive | formValid
${true} | ${false} | ${true}
${true} | ${true} | ${true}
${false} | ${true} | ${false}
${false} | ${false} | ${true}
`(
'when form checkValidity returns $checkValidityReturn and integrationActive is $integrationActive',
({ formValid, integrationActive, checkValidityReturn }) => {
beforeEach(() => {
mockForm = document.createElement('form');
jest.spyOn(document, 'querySelector').mockReturnValue(mockForm);
jest.spyOn(mockForm, 'checkValidity').mockReturnValue(checkValidityReturn);
createComponent({
customStateProps: {
showActive: true,
initialActivated: integrationActive,
},
});
findSaveButton().vm.$emit('click', new Event('click'));
});
it('dispatches setIsSaving action', () => {
expect(dispatch).toHaveBeenCalledWith('setIsSaving', true);
});
it(`emits \`SAVE_INTEGRATION_EVENT\` event with payload \`${formValid}\``, () => {
expect(eventHub.$emit).toHaveBeenCalledWith(SAVE_INTEGRATION_EVENT, formValid);
});
},
);
});
describe('when `test` button is clicked', () => {
let mockForm;
describe.each`
formValid
${true}
${false}
`('when form checkValidity returns $formValid', ({ formValid }) => {
beforeEach(() => {
mockForm = document.createElement('form');
jest.spyOn(document, 'querySelector').mockReturnValue(mockForm);
jest.spyOn(mockForm, 'checkValidity').mockReturnValue(formValid);
createComponent({
customStateProps: {
showActive: true,
canTest: true,
},
});
findTestButton().vm.$emit('click', new Event('click'));
});
it('dispatches setIsTesting action', () => {
expect(dispatch).toHaveBeenCalledWith('setIsTesting', true);
});
it(`emits \`TEST_INTEGRATION_EVENT\` event with payload \`${formValid}\``, () => {
expect(eventHub.$emit).toHaveBeenCalledWith(TEST_INTEGRATION_EVENT, formValid);
});
});
});
});

View File

@ -6,7 +6,6 @@ import toast from '~/vue_shared/plugins/global_toast';
import {
I18N_SUCCESSFUL_CONNECTION_MESSAGE,
I18N_DEFAULT_ERROR_MESSAGE,
TOGGLE_INTEGRATION_EVENT,
TEST_INTEGRATION_EVENT,
SAVE_INTEGRATION_EVENT,
} from '~/integrations/constants';
@ -16,6 +15,7 @@ jest.mock('~/vue_shared/plugins/global_toast');
jest.mock('lodash/delay', () => (callback) => callback());
const FIXTURE = 'services/edit_service.html';
const mockFormSelector = '.js-integration-settings-form';
describe('IntegrationSettingsForm', () => {
let integrationSettingsForm;
@ -25,7 +25,7 @@ describe('IntegrationSettingsForm', () => {
beforeEach(() => {
loadFixtures(FIXTURE);
integrationSettingsForm = new IntegrationSettingsForm('.js-integration-settings-form');
integrationSettingsForm = new IntegrationSettingsForm(mockFormSelector);
integrationSettingsForm.init();
});
@ -33,7 +33,7 @@ describe('IntegrationSettingsForm', () => {
it('should initialize form element refs on class object', () => {
expect(integrationSettingsForm.$form).toBeDefined();
expect(integrationSettingsForm.$form.nodeName).toBe('FORM');
expect(integrationSettingsForm.formActive).toBeDefined();
expect(integrationSettingsForm.formSelector).toBe(mockFormSelector);
});
it('should initialize form metadata on class object', () => {
@ -47,6 +47,8 @@ describe('IntegrationSettingsForm', () => {
beforeEach(() => {
mockAxios = new MockAdaptor(axios);
jest.spyOn(axios, 'put');
jest.spyOn(integrationSettingsForm, 'testSettings');
jest.spyOn(integrationSettingsForm.$form, 'submit');
});
afterEach(() => {
@ -54,28 +56,10 @@ describe('IntegrationSettingsForm', () => {
eventHub.dispose(); // clear event hub handlers
});
describe('when event hub receives `TOGGLE_INTEGRATION_EVENT`', () => {
it('should remove `novalidate` attribute to form when called with `true`', () => {
eventHub.$emit(TOGGLE_INTEGRATION_EVENT, true);
expect(integrationSettingsForm.$form.getAttribute('novalidate')).toBe(null);
});
it('should set `novalidate` attribute to form when called with `false`', () => {
eventHub.$emit(TOGGLE_INTEGRATION_EVENT, false);
expect(integrationSettingsForm.$form.getAttribute('novalidate')).toBe('novalidate');
});
});
describe('when event hub receives `TEST_INTEGRATION_EVENT`', () => {
describe('when form is valid', () => {
beforeEach(() => {
jest.spyOn(integrationSettingsForm.$form, 'checkValidity').mockReturnValue(true);
});
it('should make an ajax request with provided `formData`', async () => {
eventHub.$emit(TEST_INTEGRATION_EVENT);
eventHub.$emit(TEST_INTEGRATION_EVENT, true);
await waitForPromises();
expect(axios.put).toHaveBeenCalledWith(
@ -91,7 +75,7 @@ describe('IntegrationSettingsForm', () => {
error: false,
});
eventHub.$emit(TEST_INTEGRATION_EVENT);
eventHub.$emit(TEST_INTEGRATION_EVENT, true);
await waitForPromises();
expect(toast).toHaveBeenCalledWith(I18N_SUCCESSFUL_CONNECTION_MESSAGE);
@ -108,7 +92,7 @@ describe('IntegrationSettingsForm', () => {
test_failed: false,
});
eventHub.$emit(TEST_INTEGRATION_EVENT);
eventHub.$emit(TEST_INTEGRATION_EVENT, true);
await waitForPromises();
expect(toast).toHaveBeenCalledWith(`${errorMessage} ${serviceResponse}`);
@ -117,7 +101,7 @@ describe('IntegrationSettingsForm', () => {
it('should show error message if ajax request failed', async () => {
mockAxios.onPut(integrationSettingsForm.testEndPoint).networkError();
eventHub.$emit(TEST_INTEGRATION_EVENT);
eventHub.$emit(TEST_INTEGRATION_EVENT, true);
await waitForPromises();
expect(toast).toHaveBeenCalledWith(I18N_DEFAULT_ERROR_MESSAGE);
@ -127,7 +111,7 @@ describe('IntegrationSettingsForm', () => {
const dispatchSpy = mockStoreDispatch();
mockAxios.onPut(integrationSettingsForm.testEndPoint).networkError();
eventHub.$emit(TEST_INTEGRATION_EVENT);
eventHub.$emit(TEST_INTEGRATION_EVENT, true);
await waitForPromises();
expect(dispatchSpy).toHaveBeenCalledWith('setIsTesting', false);
@ -135,15 +119,10 @@ describe('IntegrationSettingsForm', () => {
});
describe('when form is invalid', () => {
beforeEach(() => {
jest.spyOn(integrationSettingsForm.$form, 'checkValidity').mockReturnValue(false);
jest.spyOn(integrationSettingsForm, 'testSettings');
});
it('should dispatch `setIsTesting` with `false` and not call `testSettings`', async () => {
const dispatchSpy = mockStoreDispatch();
eventHub.$emit(TEST_INTEGRATION_EVENT);
eventHub.$emit(TEST_INTEGRATION_EVENT, false);
await waitForPromises();
expect(dispatchSpy).toHaveBeenCalledWith('setIsTesting', false);
@ -154,13 +133,8 @@ describe('IntegrationSettingsForm', () => {
describe('when event hub receives `SAVE_INTEGRATION_EVENT`', () => {
describe('when form is valid', () => {
beforeEach(() => {
jest.spyOn(integrationSettingsForm.$form, 'checkValidity').mockReturnValue(true);
jest.spyOn(integrationSettingsForm.$form, 'submit');
});
it('should submit the form', async () => {
eventHub.$emit(SAVE_INTEGRATION_EVENT);
eventHub.$emit(SAVE_INTEGRATION_EVENT, true);
await waitForPromises();
expect(integrationSettingsForm.$form.submit).toHaveBeenCalled();
@ -169,15 +143,10 @@ describe('IntegrationSettingsForm', () => {
});
describe('when form is invalid', () => {
beforeEach(() => {
jest.spyOn(integrationSettingsForm.$form, 'checkValidity').mockReturnValue(false);
jest.spyOn(integrationSettingsForm.$form, 'submit');
});
it('should dispatch `setIsSaving` with `false` and not submit form', async () => {
const dispatchSpy = mockStoreDispatch();
eventHub.$emit(SAVE_INTEGRATION_EVENT);
eventHub.$emit(SAVE_INTEGRATION_EVENT, false);
await waitForPromises();

View File

@ -3,13 +3,17 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import createFlash from '~/flash';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { captureException } from '~/runner/sentry_utils';
import RunnerActionCell from '~/runner/components/cells/runner_actions_cell.vue';
import RunnerDeleteModal from '~/runner/components/runner_delete_modal.vue';
import getGroupRunnersQuery from '~/runner/graphql/get_group_runners.query.graphql';
import getRunnersQuery from '~/runner/graphql/get_runners.query.graphql';
import runnerDeleteMutation from '~/runner/graphql/runner_delete.mutation.graphql';
import runnerActionsUpdateMutation from '~/runner/graphql/runner_actions_update.mutation.graphql';
import { captureException } from '~/runner/sentry_utils';
import { runnersData } from '../../mock_data';
const mockRunner = runnersData.data.runners.nodes[0];
@ -25,12 +29,16 @@ jest.mock('~/runner/sentry_utils');
describe('RunnerTypeCell', () => {
let wrapper;
const mockToastShow = jest.fn();
const runnerDeleteMutationHandler = jest.fn();
const runnerActionsUpdateMutationHandler = jest.fn();
const findEditBtn = () => wrapper.findByTestId('edit-runner');
const findToggleActiveBtn = () => wrapper.findByTestId('toggle-active-runner');
const findRunnerDeleteModal = () => wrapper.findComponent(RunnerDeleteModal);
const findDeleteBtn = () => wrapper.findByTestId('delete-runner');
const getTooltip = (w) => getBinding(w.element, 'gl-tooltip')?.value;
const createComponent = ({ active = true } = {}, options) => {
wrapper = extendedWrapper(
@ -38,6 +46,7 @@ describe('RunnerTypeCell', () => {
propsData: {
runner: {
id: mockRunner.id,
shortSha: mockRunner.shortSha,
adminUrl: mockRunner.adminUrl,
active,
},
@ -47,6 +56,15 @@ describe('RunnerTypeCell', () => {
[runnerDeleteMutation, runnerDeleteMutationHandler],
[runnerActionsUpdateMutation, runnerActionsUpdateMutationHandler],
]),
directives: {
GlTooltip: createMockDirective(),
GlModal: createMockDirective(),
},
mocks: {
$toast: {
show: mockToastShow,
},
},
...options,
}),
);
@ -72,197 +90,85 @@ describe('RunnerTypeCell', () => {
});
afterEach(() => {
mockToastShow.mockReset();
runnerDeleteMutationHandler.mockReset();
runnerActionsUpdateMutationHandler.mockReset();
wrapper.destroy();
});
it('Displays the runner edit link with the correct href', () => {
createComponent();
describe('Edit Action', () => {
it('Displays the runner edit link with the correct href', () => {
createComponent();
expect(findEditBtn().attributes('href')).toBe(mockRunner.adminUrl);
expect(findEditBtn().attributes('href')).toBe(mockRunner.adminUrl);
});
});
describe.each`
state | label | icon | isActive | newActiveValue
${'active'} | ${'Pause'} | ${'pause'} | ${true} | ${false}
${'paused'} | ${'Resume'} | ${'play'} | ${false} | ${true}
`('When the runner is $state', ({ label, icon, isActive, newActiveValue }) => {
beforeEach(() => {
createComponent({ active: isActive });
});
it(`Displays a ${icon} button`, () => {
expect(findToggleActiveBtn().props('loading')).toBe(false);
expect(findToggleActiveBtn().props('icon')).toBe(icon);
expect(findToggleActiveBtn().attributes('title')).toBe(label);
expect(findToggleActiveBtn().attributes('aria-label')).toBe(label);
});
it(`After clicking the ${icon} button, the button has a loading state`, async () => {
await findToggleActiveBtn().vm.$emit('click');
expect(findToggleActiveBtn().props('loading')).toBe(true);
});
it(`After the ${icon} button is clicked, stale tooltip is removed`, async () => {
await findToggleActiveBtn().vm.$emit('click');
expect(findToggleActiveBtn().attributes('title')).toBe('');
expect(findToggleActiveBtn().attributes('aria-label')).toBe('');
});
describe(`When clicking on the ${icon} button`, () => {
it(`The apollo mutation to set active to ${newActiveValue} is called`, async () => {
expect(runnerActionsUpdateMutationHandler).toHaveBeenCalledTimes(0);
await findToggleActiveBtn().vm.$emit('click');
expect(runnerActionsUpdateMutationHandler).toHaveBeenCalledTimes(1);
expect(runnerActionsUpdateMutationHandler).toHaveBeenCalledWith({
input: {
id: mockRunner.id,
active: newActiveValue,
},
});
describe('Toggle active action', () => {
describe.each`
state | label | icon | isActive | newActiveValue
${'active'} | ${'Pause'} | ${'pause'} | ${true} | ${false}
${'paused'} | ${'Resume'} | ${'play'} | ${false} | ${true}
`('When the runner is $state', ({ label, icon, isActive, newActiveValue }) => {
beforeEach(() => {
createComponent({ active: isActive });
});
it('The button does not have a loading state after the mutation occurs', async () => {
it(`Displays a ${icon} button`, () => {
expect(findToggleActiveBtn().props('loading')).toBe(false);
expect(findToggleActiveBtn().props('icon')).toBe(icon);
expect(getTooltip(findToggleActiveBtn())).toBe(label);
expect(findToggleActiveBtn().attributes('aria-label')).toBe(label);
});
it(`After clicking the ${icon} button, the button has a loading state`, async () => {
await findToggleActiveBtn().vm.$emit('click');
expect(findToggleActiveBtn().props('loading')).toBe(true);
await waitForPromises();
expect(findToggleActiveBtn().props('loading')).toBe(false);
});
});
describe('When update fails', () => {
describe('On a network error', () => {
const mockErrorMsg = 'Update error!';
it(`After the ${icon} button is clicked, stale tooltip is removed`, async () => {
await findToggleActiveBtn().vm.$emit('click');
beforeEach(async () => {
runnerActionsUpdateMutationHandler.mockRejectedValueOnce(new Error(mockErrorMsg));
expect(getTooltip(findToggleActiveBtn())).toBe('');
expect(findToggleActiveBtn().attributes('aria-label')).toBe('');
});
describe(`When clicking on the ${icon} button`, () => {
it(`The apollo mutation to set active to ${newActiveValue} is called`, async () => {
expect(runnerActionsUpdateMutationHandler).toHaveBeenCalledTimes(0);
await findToggleActiveBtn().vm.$emit('click');
});
it('error is reported to sentry', () => {
expect(captureException).toHaveBeenCalledWith({
error: new Error(`Network error: ${mockErrorMsg}`),
component: 'RunnerActionsCell',
});
});
it('error is shown to the user', () => {
expect(createFlash).toHaveBeenCalledTimes(1);
});
});
describe('On a validation error', () => {
const mockErrorMsg = 'Runner not found!';
const mockErrorMsg2 = 'User not allowed!';
beforeEach(async () => {
runnerActionsUpdateMutationHandler.mockResolvedValue({
data: {
runnerUpdate: {
runner: mockRunner,
errors: [mockErrorMsg, mockErrorMsg2],
},
},
});
await findToggleActiveBtn().vm.$emit('click');
});
it('error is reported to sentry', () => {
expect(captureException).toHaveBeenCalledWith({
error: new Error(`${mockErrorMsg} ${mockErrorMsg2}`),
component: 'RunnerActionsCell',
});
});
it('error is shown to the user', () => {
expect(createFlash).toHaveBeenCalledTimes(1);
});
});
});
});
describe('When the user clicks a runner', () => {
beforeEach(() => {
jest.spyOn(window, 'confirm');
createComponent();
});
afterEach(() => {
window.confirm.mockRestore();
});
describe('When the user confirms deletion', () => {
beforeEach(async () => {
window.confirm.mockReturnValue(true);
await findDeleteBtn().vm.$emit('click');
});
it('The user sees a confirmation alert', () => {
expect(window.confirm).toHaveBeenCalledTimes(1);
expect(window.confirm).toHaveBeenCalledWith(expect.any(String));
});
it('The delete mutation is called correctly', () => {
expect(runnerDeleteMutationHandler).toHaveBeenCalledTimes(1);
expect(runnerDeleteMutationHandler).toHaveBeenCalledWith({
input: { id: mockRunner.id },
});
});
it('When delete mutation is called, current runners are refetched', async () => {
jest.spyOn(wrapper.vm.$apollo, 'mutate');
await findDeleteBtn().vm.$emit('click');
expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
mutation: runnerDeleteMutation,
variables: {
expect(runnerActionsUpdateMutationHandler).toHaveBeenCalledTimes(1);
expect(runnerActionsUpdateMutationHandler).toHaveBeenCalledWith({
input: {
id: mockRunner.id,
active: newActiveValue,
},
},
awaitRefetchQueries: true,
refetchQueries: [getRunnersQueryName, getGroupRunnersQueryName],
});
});
it('The button does not have a loading state after the mutation occurs', async () => {
await findToggleActiveBtn().vm.$emit('click');
expect(findToggleActiveBtn().props('loading')).toBe(true);
await waitForPromises();
expect(findToggleActiveBtn().props('loading')).toBe(false);
});
});
it('The delete button does not have a loading state', () => {
expect(findDeleteBtn().props('loading')).toBe(false);
expect(findDeleteBtn().attributes('title')).toBe('Remove');
});
it('After the delete button is clicked, loading state is shown', async () => {
await findDeleteBtn().vm.$emit('click');
expect(findDeleteBtn().props('loading')).toBe(true);
});
it('After the delete button is clicked, stale tooltip is removed', async () => {
await findDeleteBtn().vm.$emit('click');
expect(findDeleteBtn().attributes('title')).toBe('');
});
describe('When delete fails', () => {
describe('When update fails', () => {
describe('On a network error', () => {
const mockErrorMsg = 'Delete error!';
const mockErrorMsg = 'Update error!';
beforeEach(async () => {
runnerDeleteMutationHandler.mockRejectedValueOnce(new Error(mockErrorMsg));
runnerActionsUpdateMutationHandler.mockRejectedValueOnce(new Error(mockErrorMsg));
await findDeleteBtn().vm.$emit('click');
await findToggleActiveBtn().vm.$emit('click');
});
it('error is reported to sentry', () => {
@ -282,15 +188,16 @@ describe('RunnerTypeCell', () => {
const mockErrorMsg2 = 'User not allowed!';
beforeEach(async () => {
runnerDeleteMutationHandler.mockResolvedValue({
runnerActionsUpdateMutationHandler.mockResolvedValue({
data: {
runnerDelete: {
runnerUpdate: {
runner: mockRunner,
errors: [mockErrorMsg, mockErrorMsg2],
},
},
});
await findDeleteBtn().vm.$emit('click');
await findToggleActiveBtn().vm.$emit('click');
});
it('error is reported to sentry', () => {
@ -306,24 +213,129 @@ describe('RunnerTypeCell', () => {
});
});
});
});
describe('When the user does not confirm deletion', () => {
beforeEach(async () => {
window.confirm.mockReturnValue(false);
await findDeleteBtn().vm.$emit('click');
describe('Delete action', () => {
beforeEach(() => {
createComponent(
{},
{
stubs: { RunnerDeleteModal },
},
);
});
it('Delete button opens delete modal', () => {
const modalId = getBinding(findDeleteBtn().element, 'gl-modal').value;
expect(findRunnerDeleteModal().attributes('modal-id')).toBeDefined();
expect(findRunnerDeleteModal().attributes('modal-id')).toBe(modalId);
});
it('Delete modal shows the runner name', () => {
expect(findRunnerDeleteModal().props('runnerName')).toBe(
`#${getIdFromGraphQLId(mockRunner.id)} (${mockRunner.shortSha})`,
);
});
it('The delete button does not have a loading icon', () => {
expect(findDeleteBtn().props('loading')).toBe(false);
expect(getTooltip(findDeleteBtn())).toBe('Delete runner');
});
it('When delete mutation is called, current runners are refetched', () => {
jest.spyOn(wrapper.vm.$apollo, 'mutate');
findRunnerDeleteModal().vm.$emit('primary');
expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
mutation: runnerDeleteMutation,
variables: {
input: {
id: mockRunner.id,
},
},
awaitRefetchQueries: true,
refetchQueries: [getRunnersQueryName, getGroupRunnersQueryName],
});
});
describe('When delete is clicked', () => {
beforeEach(() => {
findRunnerDeleteModal().vm.$emit('primary');
});
it('The user sees a confirmation alert', () => {
expect(window.confirm).toHaveBeenCalledTimes(1);
it('The delete mutation is called correctly', () => {
expect(runnerDeleteMutationHandler).toHaveBeenCalledTimes(1);
expect(runnerDeleteMutationHandler).toHaveBeenCalledWith({
input: { id: mockRunner.id },
});
});
it('The delete mutation is not called', () => {
expect(runnerDeleteMutationHandler).toHaveBeenCalledTimes(0);
it('The delete button has a loading icon', () => {
expect(findDeleteBtn().props('loading')).toBe(true);
expect(getTooltip(findDeleteBtn())).toBe('');
});
it('The delete button does not have a loading state', () => {
expect(findDeleteBtn().props('loading')).toBe(false);
expect(findDeleteBtn().attributes('title')).toBe('Remove');
it('The toast notification is shown', () => {
expect(mockToastShow).toHaveBeenCalledTimes(1);
expect(mockToastShow).toHaveBeenCalledWith(
expect.stringContaining(`#${getIdFromGraphQLId(mockRunner.id)} (${mockRunner.shortSha})`),
);
});
});
describe('When delete fails', () => {
describe('On a network error', () => {
const mockErrorMsg = 'Delete error!';
beforeEach(() => {
runnerDeleteMutationHandler.mockRejectedValueOnce(new Error(mockErrorMsg));
findRunnerDeleteModal().vm.$emit('primary');
});
it('error is reported to sentry', () => {
expect(captureException).toHaveBeenCalledWith({
error: new Error(`Network error: ${mockErrorMsg}`),
component: 'RunnerActionsCell',
});
});
it('error is shown to the user', () => {
expect(createFlash).toHaveBeenCalledTimes(1);
});
it('toast notification is not shown', () => {
expect(mockToastShow).not.toHaveBeenCalled();
});
});
describe('On a validation error', () => {
const mockErrorMsg = 'Runner not found!';
const mockErrorMsg2 = 'User not allowed!';
beforeEach(() => {
runnerDeleteMutationHandler.mockResolvedValue({
data: {
runnerDelete: {
errors: [mockErrorMsg, mockErrorMsg2],
},
},
});
findRunnerDeleteModal().vm.$emit('primary');
});
it('error is reported to sentry', () => {
expect(captureException).toHaveBeenCalledWith({
error: new Error(`${mockErrorMsg} ${mockErrorMsg2}`),
component: 'RunnerActionsCell',
});
});
it('error is shown to the user', () => {
expect(createFlash).toHaveBeenCalledTimes(1);
});
});
});
});

View File

@ -0,0 +1,60 @@
import { GlModal } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import RunnerDeleteModal from '~/runner/components/runner_delete_modal.vue';
describe('RunnerDeleteModal', () => {
let wrapper;
const findGlModal = () => wrapper.findComponent(GlModal);
const createComponent = ({ props = {} } = {}, mountFn = shallowMount) => {
wrapper = mountFn(RunnerDeleteModal, {
attachTo: document.body,
propsData: {
runnerName: '#99 (AABBCCDD)',
...props,
},
attrs: {
modalId: 'delete-runner-modal-99',
},
});
};
it('Displays title', () => {
createComponent();
expect(findGlModal().props('title')).toBe('Delete runner #99 (AABBCCDD)?');
});
it('Displays buttons', () => {
createComponent();
expect(findGlModal().props('actionPrimary')).toMatchObject({ text: 'Delete runner' });
expect(findGlModal().props('actionCancel')).toMatchObject({ text: 'Cancel' });
});
it('Displays contents', () => {
createComponent();
expect(findGlModal().html()).toContain(
'The runner will be permanently deleted and no longer available for projects or groups in the instance. Are you sure you want to continue?',
);
});
describe('When modal is confirmed by the user', () => {
let hideModalSpy;
beforeEach(() => {
createComponent({}, mount);
hideModalSpy = jest.spyOn(wrapper.vm.$refs.modal, 'hide').mockImplementation(() => {});
});
it('Modal gets hidden', () => {
expect(hideModalSpy).toHaveBeenCalledTimes(0);
findGlModal().vm.$emit('primary');
expect(hideModalSpy).toHaveBeenCalledTimes(1);
});
});
});

View File

@ -52,6 +52,12 @@ describe('RunnerList', () => {
]);
});
it('Sets runner id as a row key', () => {
createComponent({}, shallowMount);
expect(findTable().attributes('primary-key')).toBe('id');
});
it('Displays a list of runners', () => {
expect(findRows()).toHaveLength(4);

View File

@ -10,16 +10,26 @@ import DropdownContents from '~/vue_shared/components/sidebar/labels_select_widg
import DropdownValue from '~/vue_shared/components/sidebar/labels_select_widget/dropdown_value.vue';
import DropdownValueCollapsed from '~/vue_shared/components/sidebar/labels_select_widget/dropdown_value_collapsed.vue';
import issueLabelsQuery from '~/vue_shared/components/sidebar/labels_select_widget/graphql/issue_labels.query.graphql';
import updateIssueLabelsMutation from '~/boards/graphql/issue_set_labels.mutation.graphql';
import updateMergeRequestLabelsMutation from '~/sidebar/queries/update_merge_request_labels.mutation.graphql';
import updateEpicLabelsMutation from '~/vue_shared/components/sidebar/labels_select_widget/graphql/epic_update_labels.mutation.graphql';
import LabelsSelectRoot from '~/vue_shared/components/sidebar/labels_select_widget/labels_select_root.vue';
import { mockConfig, issuableLabelsQueryResponse } from './mock_data';
import { mockConfig, issuableLabelsQueryResponse, updateLabelsMutationResponse } from './mock_data';
jest.mock('~/flash');
Vue.use(VueApollo);
const successfulQueryHandler = jest.fn().mockResolvedValue(issuableLabelsQueryResponse);
const successfulMutationHandler = jest.fn().mockResolvedValue(updateLabelsMutationResponse);
const errorQueryHandler = jest.fn().mockRejectedValue('Houston, we have a problem');
const updateLabelsMutation = {
[IssuableType.Issue]: updateIssueLabelsMutation,
[IssuableType.MergeRequest]: updateMergeRequestLabelsMutation,
[IssuableType.Epic]: updateEpicLabelsMutation,
};
describe('LabelsSelectRoot', () => {
let wrapper;
@ -31,16 +41,21 @@ describe('LabelsSelectRoot', () => {
const createComponent = ({
config = mockConfig,
slots = {},
issuableType = IssuableType.Issue,
queryHandler = successfulQueryHandler,
mutationHandler = successfulMutationHandler,
} = {}) => {
const mockApollo = createMockApollo([[issueLabelsQuery, queryHandler]]);
const mockApollo = createMockApollo([
[issueLabelsQuery, queryHandler],
[updateLabelsMutation[issuableType], mutationHandler],
]);
wrapper = shallowMount(LabelsSelectRoot, {
slots,
apolloProvider: mockApollo,
propsData: {
...config,
issuableType: IssuableType.Issue,
issuableType,
labelCreateType: 'project',
workspaceType: 'project',
},
@ -133,4 +148,46 @@ describe('LabelsSelectRoot', () => {
findDropdownContents().vm.$emit('setLabels', [label]);
expect(wrapper.emitted('updateSelectedLabels')).toEqual([[{ labels: [label] }]]);
});
describe.each`
issuableType
${IssuableType.Issue}
${IssuableType.MergeRequest}
${IssuableType.Epic}
`('when updating labels for $issuableType', ({ issuableType }) => {
const label = { id: 'gid://gitlab/ProjectLabel/2' };
it('sets the loading state', async () => {
createComponent({ issuableType });
await nextTick();
findDropdownContents().vm.$emit('setLabels', [label]);
await nextTick();
expect(findSidebarEditableItem().props('loading')).toBe(true);
});
it('updates labels correctly after successful mutation', async () => {
createComponent({ issuableType });
await nextTick();
findDropdownContents().vm.$emit('setLabels', [label]);
await waitForPromises();
expect(findDropdownValue().props('selectedLabels')).toEqual(
updateLabelsMutationResponse.data.updateIssuableLabels.issuable.labels.nodes,
);
});
it('displays an error if mutation was rejected', async () => {
createComponent({ issuableType, mutationHandler: errorQueryHandler });
await nextTick();
findDropdownContents().vm.$emit('setLabels', [label]);
await waitForPromises();
expect(createFlash).toHaveBeenCalledWith({
captureError: true,
error: expect.anything(),
message: 'An error occurred while updating labels.',
});
});
});
});

View File

@ -120,6 +120,7 @@ export const issuableLabelsQueryResponse = {
workspace: {
id: 'workspace-1',
issuable: {
__typename: 'Issue',
id: '1',
labels: {
nodes: [
@ -136,3 +137,18 @@ export const issuableLabelsQueryResponse = {
},
},
};
export const updateLabelsMutationResponse = {
data: {
updateIssuableLabels: {
errors: [],
issuable: {
__typename: 'Issue',
id: '1',
labels: {
nodes: [],
},
},
},
},
};

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Projects::Pipelines::ContainerExpirationPolicyPipeline do
let_it_be(:project) { create(:project) }
let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project) }
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let_it_be(:policy) do
{
'created_at' => '2019-12-13 13:45:04 UTC',
'updated_at' => '2019-12-14 13:45:04 UTC',
'next_run_at' => '2019-12-15 13:45:04 UTC',
'name_regex' => 'test',
'name_regex_keep' => 'regex_keep',
'cadence' => '3month',
'older_than' => '1month',
'keep_n' => 100,
'enabled' => true
}
end
subject(:pipeline) { described_class.new(context) }
describe '#run' do
it 'imports project feature', :aggregate_failures do
allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[policy, 0]]))
end
pipeline.run
policy.each_pair do |key, value|
expect(entity.project.container_expiration_policy.public_send(key)).to eq(value)
end
end
end
end

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Projects::Pipelines::ServiceDeskSettingPipeline do
let_it_be(:project) { create(:project) }
let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project) }
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let_it_be(:setting) { { 'issue_template_key' => 'test', 'project_key' => 'key' } }
subject(:pipeline) { described_class.new(context) }
describe '#run' do
it 'imports project feature', :aggregate_failures do
allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[setting, 0]]))
end
pipeline.run
setting.each_pair do |key, value|
expect(entity.project.service_desk_setting.public_send(key)).to eq(value)
end
end
end
end

View File

@ -20,6 +20,8 @@ RSpec.describe BulkImports::Projects::Stage do
[4, BulkImports::Projects::Pipelines::ProtectedBranchesPipeline],
[4, BulkImports::Projects::Pipelines::CiPipelinesPipeline],
[4, BulkImports::Projects::Pipelines::ProjectFeaturePipeline],
[4, BulkImports::Projects::Pipelines::ContainerExpirationPolicyPipeline],
[4, BulkImports::Projects::Pipelines::ServiceDeskSettingPipeline],
[5, BulkImports::Common::Pipelines::WikiPipeline],
[5, BulkImports::Common::Pipelines::UploadsPipeline],
[5, BulkImports::Projects::Pipelines::AutoDevopsPipeline],

View File

@ -236,14 +236,20 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
end
shared_examples_for 'an attr_writer that demodulizes assigned class names' do |attribute_name|
shared_examples_for 'an attr_writer that assigns class names' do |attribute_name|
let(:batched_migration) { build(:batched_background_migration) }
context 'when a module name exists' do
it 'removes the module name' do
it 'keeps the class with module name' do
batched_migration.public_send(:"#{attribute_name}=", 'Foo::Bar')
expect(batched_migration[attribute_name]).to eq('Foo::Bar')
end
it 'removes leading namespace resolution operator' do
batched_migration.public_send(:"#{attribute_name}=", '::Foo::Bar')
expect(batched_migration[attribute_name]).to eq('Bar')
expect(batched_migration[attribute_name]).to eq('Foo::Bar')
end
end
@ -293,11 +299,11 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
describe '#job_class_name=' do
it_behaves_like 'an attr_writer that demodulizes assigned class names', :job_class_name
it_behaves_like 'an attr_writer that assigns class names', :job_class_name
end
describe '#batch_class_name=' do
it_behaves_like 'an attr_writer that demodulizes assigned class names', :batch_class_name
it_behaves_like 'an attr_writer that assigns class names', :batch_class_name
end
describe '#migrated_tuple_count' do

View File

@ -32,22 +32,15 @@ RSpec.describe ::Packages::Npm::PackagePresenter do
}
end
let(:presenter) { described_class.new(package_name, packages, include_metadata: include_metadata) }
let(:presenter) { described_class.new(package_name, packages) }
subject { presenter.versions }
where(:has_dependencies, :has_metadatum, :include_metadata) do
true | true | true
false | true | true
true | false | true
false | false | true
# TODO : to remove along with packages_npm_abbreviated_metadata
# See https://gitlab.com/gitlab-org/gitlab/-/issues/344827
true | true | false
false | true | false
true | false | false
false | false | false
where(:has_dependencies, :has_metadatum) do
true | true
false | true
true | false
false | false
end
with_them do
@ -80,7 +73,7 @@ RSpec.describe ::Packages::Npm::PackagePresenter do
context 'metadatum' do
::Packages::Npm::PackagePresenter::PACKAGE_JSON_ALLOWED_FIELDS.each do |metadata_field|
if params[:has_metadatum] && params[:include_metadata]
if params[:has_metadatum]
it { expect(subject.dig(package1.version, metadata_field)).not_to be nil }
else
it { expect(subject.dig(package1.version, metadata_field)).to be nil }

View File

@ -589,6 +589,15 @@ RSpec.describe API::Labels do
expect(response).to have_gitlab_http_status(:forbidden)
end
it 'returns 403 if reporter promotes label' do
reporter = create(:user)
project.add_reporter(reporter)
put api("/projects/#{project.id}/labels/promote", reporter), params: { name: label1.name }
expect(response).to have_gitlab_http_status(:forbidden)
end
it 'returns 404 if label does not exist' do
put api("/projects/#{project.id}/labels/promote", user), params: { name: 'unknown' }
@ -601,6 +610,13 @@ RSpec.describe API::Labels do
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('name is missing')
end
it 'returns 400 if project does not have a group' do
project = create(:project, creator_id: user.id, namespace: user.namespace)
put api("/projects/#{project.id}/labels/promote", user), params: { name: label1.name }
expect(response).to have_gitlab_http_status(:bad_request)
end
end
describe "POST /projects/:id/labels/:label_id/subscribe" do

View File

@ -122,6 +122,23 @@ RSpec.describe API::Search do
end
end
context 'when DB timeouts occur from global searches', :aggregate_errors do
%w(
issues
merge_requests
milestones
projects
snippet_titles
users
).each do |scope|
it "returns a 408 error if search with scope: #{scope} times out" do
allow(SearchService).to receive(:new).and_raise ActiveRecord::QueryCanceled
get api(endpoint, user), params: { scope: scope, search: 'awesome' }
expect(response).to have_gitlab_http_status(:request_timeout)
end
end
end
context 'when scope is not supported' do
it 'returns 400 error' do
get api(endpoint, user), params: { scope: 'unsupported', search: 'awesome' }

View File

@ -89,17 +89,6 @@ RSpec.describe Packages::Npm::CreatePackageService do
end
end
end
context 'with packages_npm_abbreviated_metadata disabled' do
before do
stub_feature_flags(packages_npm_abbreviated_metadata: false)
end
it 'creates a package without metadatum' do
expect { subject }
.not_to change { Packages::Npm::Metadatum.count }
end
end
end
describe '#execute' do

View File

@ -41,19 +41,6 @@ RSpec.shared_examples 'handling get metadata requests' do |scope: :project|
# query count can slightly change between the examples so we're using a custom threshold
expect { get(url, headers: headers) }.not_to exceed_query_limit(control).with_threshold(4)
end
context 'with packages_npm_abbreviated_metadata disabled' do
before do
stub_feature_flags(packages_npm_abbreviated_metadata: false)
end
it 'calls the presenter without including metadata' do
expect(::Packages::Npm::PackagePresenter)
.to receive(:new).with(anything, anything, include_metadata: false).and_call_original
subject
end
end
end
shared_examples 'reject metadata request' do |status:|