Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-10-05 03:09:43 +00:00
parent 59429d48eb
commit b164696957
68 changed files with 1260 additions and 257 deletions

View File

@ -133,9 +133,13 @@ export default {
return this.designCollection && this.designCollection.copyState === 'IN_PROGRESS';
},
designDropzoneWrapperClass() {
return this.isDesignListEmpty
? 'col-12'
: 'gl-flex-direction-column col-md-6 col-lg-3 gl-mt-5';
if (!this.isDesignListEmpty) {
return 'gl-flex-direction-column col-md-6 col-lg-3 gl-mt-5';
}
if (this.showToolbar) {
return 'col-12 gl-mt-5';
}
return 'col-12';
},
},
mounted() {

View File

@ -1,4 +1,4 @@
import createFlash from '~/flash';
import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import { __ } from '~/locale';
@ -29,7 +29,7 @@ export default class PayloadDownloader {
PayloadDownloader.downloadFile(data);
})
.catch(() => {
createFlash({
createAlert({
message: __('Error fetching payload data.'),
});
})

View File

@ -1,4 +1,4 @@
import createFlash from '~/flash';
import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import { __ } from '~/locale';
@ -43,7 +43,7 @@ export default class PayloadPreviewer {
})
.catch(() => {
this.spinner.classList.remove('gl-display-inline');
createFlash({
createAlert({
message: __('Error fetching payload data.'),
});
});

View File

@ -1,6 +1,6 @@
import $ from 'jquery';
import { debounce } from 'lodash';
import createFlash from '~/flash';
import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import { __ } from '~/locale';
@ -30,7 +30,7 @@ export default () => {
$jsBroadcastMessagePreview.html(data);
})
.catch(() =>
createFlash({
createAlert({
message: __('An error occurred while rendering preview broadcast message'),
}),
);

View File

@ -1,6 +1,6 @@
<script>
import { GlModal } from '@gitlab/ui';
import createFlash from '~/flash';
import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import { redirectTo } from '~/lib/utils/url_utility';
import { __, s__ } from '~/locale';
@ -31,7 +31,7 @@ export default {
redirectTo(response.request.responseURL);
})
.catch((error) => {
createFlash({
createAlert({
message: s__('AdminArea|Stopping jobs failed'),
});
throw error;

View File

@ -4,7 +4,7 @@ import $ from 'jquery';
import { getGroups } from '~/api/groups_api';
import { getProjects } from '~/api/projects_api';
import initDeprecatedJQueryDropdown from '~/deprecated_jquery_dropdown';
import createFlash from '~/flash';
import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import { isMetaClick } from '~/lib/utils/common_utils';
import { addDelimiter } from '~/lib/utils/text_utility';
@ -119,7 +119,7 @@ export default class Todos {
})
.catch(() => {
this.updateRowState(target, true);
return createFlash({
return createAlert({
message: __('Error updating status of to-do item.'),
});
});
@ -168,7 +168,7 @@ export default class Todos {
this.updateBadges(data);
})
.catch(() =>
createFlash({
createAlert({
message: __('Error updating status for all to-do items.'),
}),
);

View File

@ -1,6 +1,6 @@
import { debounce } from 'lodash';
import createFlash from '~/flash';
import { createAlert } from '~/flash';
import { __ } from '~/locale';
import InputValidator from '~/validators/input_validator';
import { getGroupPathAvailability } from '~/rest_api';
@ -62,7 +62,7 @@ export default class GroupPathValidator extends InputValidator {
}
})
.catch(() =>
createFlash({
createAlert({
message: __('An error occurred while validating group path'),
}),
);

View File

@ -2,7 +2,7 @@
import { GlButton, GlEmptyState, GlLink, GlLoadingIcon, GlTable } from '@gitlab/ui';
import { s__, __ } from '~/locale';
import createFlash from '~/flash';
import { createAlert } from '~/flash';
import { parseIntPagination, normalizeHeaders } from '~/lib/utils/common_utils';
import { joinPaths } from '~/lib/utils/url_utility';
import { getBulkImportsHistory } from '~/rest_api';
@ -107,7 +107,7 @@ export default {
this.pageInfo = parseIntPagination(normalizeHeaders(headers));
this.historyItems = historyItems;
} catch (e) {
createFlash({ message: DEFAULT_ERROR, captureError: true, error: e });
createAlert({ message: DEFAULT_ERROR, captureError: true, error: e });
} finally {
this.loading = false;
}

View File

@ -1,7 +1,7 @@
<script>
import { GlButton, GlEmptyState, GlIcon, GlLink, GlLoadingIcon, GlTable } from '@gitlab/ui';
import { s__, __ } from '~/locale';
import createFlash from '~/flash';
import { createAlert } from '~/flash';
import { parseIntPagination, normalizeHeaders } from '~/lib/utils/common_utils';
import { getProjects } from '~/rest_api';
import ImportStatus from '~/import_entities/components/import_status.vue';
@ -104,7 +104,7 @@ export default {
this.pageInfo = parseIntPagination(normalizeHeaders(headers));
this.historyItems = historyItems;
} catch (e) {
createFlash({ message: DEFAULT_ERROR, captureError: true, error: e });
createAlert({ message: DEFAULT_ERROR, captureError: true, error: e });
} finally {
this.loading = false;
}

View File

@ -4,7 +4,7 @@ import Vue from 'vue';
import loadAwardsHandler from '~/awards_handler';
import ShortcutsNavigation from '~/behaviors/shortcuts/shortcuts_navigation';
import Diff from '~/diff';
import createFlash from '~/flash';
import { createAlert } from '~/flash';
import initDeprecatedNotes from '~/init_deprecated_notes';
import { initDiffStatsDropdown } from '~/init_diff_stats_dropdown';
import axios from '~/lib/utils/axios_utils';
@ -69,7 +69,7 @@ if (filesContainer.length) {
loadDiffStats();
})
.catch(() => {
createFlash({ message: __('An error occurred while retrieving diff files') });
createAlert({ message: __('An error occurred while retrieving diff files') });
});
} else {
new Diff();

View File

@ -12,7 +12,7 @@ import {
} from '@gitlab/ui';
import { kebabCase } from 'lodash';
import { buildApiUrl } from '~/api/api_utils';
import createFlash from '~/flash';
import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import csrf from '~/lib/utils/csrf';
import { redirectTo } from '~/lib/utils/url_utility';
@ -220,7 +220,7 @@ export default {
redirectTo(data.web_url);
return;
} catch (error) {
createFlash({
createAlert({
message: s__(
'ForkProject|An error occurred while forking the project. Please try again.',
),

View File

@ -1,4 +1,5 @@
<script>
import getPipelineSchedulesQuery from '../graphql/queries/get_pipeline_schedules.query.graphql';
import PipelineSchedulesTable from './table/pipeline_schedules_table.vue';
export default {
@ -10,11 +11,31 @@ export default {
default: '',
},
},
apollo: {
schedules: {
query: getPipelineSchedulesQuery,
variables() {
return {
projectPath: this.fullPath,
};
},
update({ project }) {
return project?.pipelineSchedules?.nodes || [];
},
},
},
data() {
return {
schedules: [],
};
},
};
</script>
<template>
<div>
<pipeline-schedules-table />
<!-- Tabs will be addressed in #371989 -->
<pipeline-schedules-table :schedules="schedules" />
</div>
</template>

View File

@ -0,0 +1,64 @@
<script>
import { GlButton, GlButtonGroup, GlTooltipDirective as GlTooltip } from '@gitlab/ui';
import { s__ } from '~/locale';
export const i18n = {
playTooltip: s__('PipelineSchedules|Run pipeline schedule'),
editTooltip: s__('PipelineSchedules|Edit pipeline schedule'),
deleteTooltip: s__('PipelineSchedules|Delete pipeline schedule'),
takeOwnershipTooltip: s__('PipelineSchedules|Take ownership of pipeline schedule'),
};
export default {
i18n,
components: {
GlButton,
GlButtonGroup,
},
directives: {
GlTooltip,
},
props: {
schedule: {
type: Object,
required: true,
},
},
computed: {
canPlay() {
return this.schedule.userPermissions.playPipelineSchedule;
},
canTakeOwnership() {
return this.schedule.userPermissions.takeOwnershipPipelineSchedule;
},
canUpdate() {
return this.schedule.userPermissions.updatePipelineSchedule;
},
canRemove() {
return this.schedule.userPermissions.adminPipelineSchedule;
},
},
};
</script>
<template>
<div class="gl-display-flex gl-justify-content-end">
<gl-button-group>
<gl-button v-if="canPlay" v-gl-tooltip :title="$options.i18n.playTooltip" icon="play" />
<gl-button
v-if="canTakeOwnership"
v-gl-tooltip
:title="$options.i18n.takeOwnershipTooltip"
icon="user"
/>
<gl-button v-if="canUpdate" v-gl-tooltip :title="$options.i18n.editTooltip" icon="pencil" />
<gl-button
v-if="canRemove"
v-gl-tooltip
:title="$options.i18n.deleteTooltip"
icon="remove"
variant="danger"
/>
</gl-button-group>
</div>
</template>

View File

@ -0,0 +1,32 @@
<script>
import CiBadge from '~/vue_shared/components/ci_badge_link.vue';
export default {
components: {
CiBadge,
},
props: {
schedule: {
type: Object,
required: true,
},
},
computed: {
hasPipeline() {
return this.schedule.lastPipeline;
},
lastPipelineStatus() {
return this.schedule?.lastPipeline?.detailedStatus;
},
},
};
</script>
<template>
<div>
<ci-badge v-if="hasPipeline" :status="lastPipelineStatus" class="gl-vertical-align-middle" />
<span v-else data-testid="pipeline-schedule-status-text">
{{ s__('PipelineSchedules|None') }}
</span>
</div>
</template>

View File

@ -0,0 +1,32 @@
<script>
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
export default {
components: {
TimeAgoTooltip,
},
props: {
schedule: {
type: Object,
required: true,
},
},
computed: {
showTimeAgo() {
return this.schedule.active && this.schedule.nextRunAt;
},
realNextRunTime() {
return this.schedule.realNextRun;
},
},
};
</script>
<template>
<div>
<time-ago-tooltip v-if="showTimeAgo" :time="realNextRunTime" />
<span v-else data-testid="pipeline-schedule-inactive">
{{ s__('PipelineSchedules|Inactive') }}
</span>
</div>
</template>

View File

@ -0,0 +1,29 @@
<script>
import { GlAvatar, GlAvatarLink } from '@gitlab/ui';
export default {
components: {
GlAvatar,
GlAvatarLink,
},
props: {
schedule: {
type: Object,
required: true,
},
},
computed: {
owner() {
return this.schedule.owner;
},
},
};
</script>
<template>
<div>
<gl-avatar-link :href="owner.webPath" :title="owner.name" class="gl-ml-3">
<gl-avatar :size="32" :src="owner.avatarUrl" />
</gl-avatar-link>
</div>
</template>

View File

@ -0,0 +1,36 @@
<script>
import { GlIcon, GlLink } from '@gitlab/ui';
export default {
components: {
GlIcon,
GlLink,
},
props: {
schedule: {
type: Object,
required: true,
},
},
computed: {
iconName() {
return this.schedule.forTag ? 'tag' : 'fork';
},
refPath() {
return this.schedule.refPath;
},
refDisplay() {
return this.schedule.refForDisplay;
},
},
};
</script>
<template>
<div>
<gl-icon :name="iconName" />
<span v-if="refPath">
<gl-link :href="refPath" class="gl-text-gray-900">{{ refDisplay }}</gl-link>
</span>
</div>
</template>

View File

@ -1,13 +1,92 @@
<script>
import { GlTableLite } from '@gitlab/ui';
import { s__ } from '~/locale';
import PipelineScheduleActions from './cells/pipeline_schedule_actions.vue';
import PipelineScheduleLastPipeline from './cells/pipeline_schedule_last_pipeline.vue';
import PipelineScheduleNextRun from './cells/pipeline_schedule_next_run.vue';
import PipelineScheduleOwner from './cells/pipeline_schedule_owner.vue';
import PipelineScheduleTarget from './cells/pipeline_schedule_target.vue';
export default {
fields: [
{
key: 'description',
label: s__('PipelineSchedules|Description'),
columnClass: 'gl-w-40p',
},
{
key: 'target',
label: s__('PipelineSchedules|Target'),
columnClass: 'gl-w-10p',
},
{
key: 'pipeline',
label: s__('PipelineSchedules|Last Pipeline'),
columnClass: 'gl-w-10p',
},
{
key: 'next',
label: s__('PipelineSchedules|Next Run'),
columnClass: 'gl-w-15p',
},
{
key: 'owner',
label: s__('PipelineSchedules|Owner'),
columnClass: 'gl-w-10p',
},
{
key: 'actions',
label: '',
columnClass: 'gl-w-15p',
},
],
components: {
GlTableLite,
PipelineScheduleActions,
PipelineScheduleLastPipeline,
PipelineScheduleNextRun,
PipelineScheduleOwner,
PipelineScheduleTarget,
},
props: {
schedules: {
type: Array,
required: true,
},
},
};
</script>
<template>
<gl-table-lite />
<gl-table-lite :fields="$options.fields" :items="schedules" stacked="md">
<template #table-colgroup="{ fields }">
<col v-for="field in fields" :key="field.key" :class="field.columnClass" />
</template>
<template #cell(description)="{ item }">
<span data-testid="pipeline-schedule-description">
{{ item.description }}
</span>
</template>
<template #cell(target)="{ item }">
<pipeline-schedule-target :schedule="item" />
</template>
<template #cell(pipeline)="{ item }">
<pipeline-schedule-last-pipeline :schedule="item" />
</template>
<template #cell(next)="{ item }">
<pipeline-schedule-next-run :schedule="item" />
</template>
<template #cell(owner)="{ item }">
<pipeline-schedule-owner :schedule="item" />
</template>
<template #cell(actions)="{ item }">
<pipeline-schedule-actions :schedule="item" />
</template>
</gl-table-lite>
</template>

View File

@ -0,0 +1,40 @@
query getPipelineSchedulesQuery($projectPath: ID!) {
project(fullPath: $projectPath) {
id
pipelineSchedules {
nodes {
id
description
forTag
refPath
refForDisplay
lastPipeline {
id
detailedStatus {
id
group
icon
label
text
detailsPath
}
}
active
nextRunAt
realNextRun
owner {
id
avatarUrl
name
webPath
}
userPermissions {
playPipelineSchedule
takeOwnershipPipelineSchedule
updatePipelineSchedule
adminPipelineSchedule
}
}
}
}
}

View File

@ -1,8 +1,6 @@
<script>
import { GlSafeHtmlDirective } from '@gitlab/ui';
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import { setAttributes } from '~/lib/utils/dom_utils';
import { BIDI_CHARS, BIDI_CHARS_CLASS_LIST, BIDI_CHAR_TOOLTIP } from '../constants';
export default {
directives: {
@ -27,34 +25,6 @@ export default {
required: true,
},
},
computed: {
formattedContent() {
let { content } = this;
BIDI_CHARS.forEach((bidiChar) => {
if (content.includes(bidiChar)) {
content = content.replace(bidiChar, this.wrapBidiChar(bidiChar));
}
});
return content;
},
},
methods: {
wrapBidiChar(bidiChar) {
const span = document.createElement('span');
setAttributes(span, {
class: BIDI_CHARS_CLASS_LIST,
title: BIDI_CHAR_TOOLTIP,
'data-testid': 'bidi-wrapper',
});
span.innerText = bidiChar;
return span.outerHTML;
},
},
};
</script>
<template>
@ -79,6 +49,6 @@ export default {
<pre
class="gl-p-0! gl-w-full gl-overflow-visible! gl-border-none! code highlight gl-line-height-normal"
><code><span :id="`LC${number}`" v-safe-html="formattedContent" :lang="language" class="line" data-testid="content"></span></code></pre>
><code><span :id="`LC${number}`" v-safe-html="content" :lang="language" class="line" data-testid="content"></span></code></pre>
</div>
</template>

View File

@ -1,5 +1,3 @@
import { __ } from '~/locale';
// Language map from Rouge::Lexer to highlight.js
// Rouge::Lexer - We use it on the BE to determine the language of a source file (https://github.com/rouge-ruby/rouge/blob/master/docs/Languages.md).
// Highlight.js - We use it on the FE to highlight the syntax of a source file (https://github.com/highlightjs/highlight.js/tree/main/src/languages).
@ -139,9 +137,7 @@ export const BIDI_CHARS = [
export const BIDI_CHARS_CLASS_LIST = 'unicode-bidi has-tooltip';
export const BIDI_CHAR_TOOLTIP = __(
'Potentially unwanted character detected: Unicode BiDi Control',
);
export const BIDI_CHAR_TOOLTIP = 'Potentially unwanted character detected: Unicode BiDi Control';
export const HLJS_COMMENT_SELECTOR = 'hljs-comment';

View File

@ -1,6 +1,8 @@
import { HLJS_ON_AFTER_HIGHLIGHT } from '../constants';
import wrapComments from './wrap_comments';
import linkDependencies from './link_dependencies';
import wrapBidiChars from './wrap_bidi_chars';
export const HLJS_ON_AFTER_HIGHLIGHT = 'after:highlight';
/**
* Registers our plugins for Highlight.js
@ -10,6 +12,7 @@ import linkDependencies from './link_dependencies';
* @param {Object} hljs - the Highlight.js instance.
*/
export const registerPlugins = (hljs, fileType, rawContent) => {
hljs.addPlugin({ [HLJS_ON_AFTER_HIGHLIGHT]: wrapBidiChars });
hljs.addPlugin({ [HLJS_ON_AFTER_HIGHLIGHT]: wrapComments });
hljs.addPlugin({
[HLJS_ON_AFTER_HIGHLIGHT]: (result) => linkDependencies(result, fileType, rawContent),

View File

@ -0,0 +1,30 @@
import {
BIDI_CHARS,
BIDI_CHARS_CLASS_LIST,
BIDI_CHAR_TOOLTIP,
} from '~/vue_shared/components/source_viewer/constants';
/**
* Highlight.js plugin for wrapping BIDI chars.
* This ensures potentially dangerous BIDI characters are highlighted.
*
* Plugin API: https://github.com/highlightjs/highlight.js/blob/main/docs/plugin-api.rst
*
* @param {Object} Result - an object that represents the highlighted result from Highlight.js
*/
function wrapBidiChar(bidiChar) {
return `<span class="${BIDI_CHARS_CLASS_LIST}" title="${BIDI_CHAR_TOOLTIP}">${bidiChar}</span>`;
}
export default (result) => {
let { value } = result;
BIDI_CHARS.forEach((bidiChar) => {
if (value.includes(bidiChar)) {
value = value.replace(bidiChar, wrapBidiChar(bidiChar));
}
});
// eslint-disable-next-line no-param-reassign
result.value = value; // Highlight.js expects the result param to be mutated for plugins to work
};

View File

@ -24,13 +24,21 @@ module Clusters
# rubocop: disable CodeReuse/ActiveRecord
def project_authorizations
ancestor_ids = project.group ? project.ancestors.select(:id) : project.namespace_id
namespace_ids = if project.group
if include_descendants?
all_namespace_ids
else
ancestor_namespace_ids
end
else
project.namespace_id
end
Clusters::Agents::ProjectAuthorization
.where(project_id: project.id)
.joins(agent: :project)
.preload(agent: :project)
.where(cluster_agents: { projects: { namespace_id: ancestor_ids } })
.where(cluster_agents: { projects: { namespace_id: namespace_ids } })
.with_available_ci_access_fields(project)
.to_a
end
@ -49,17 +57,35 @@ module Clusters
authorizations[:group_id].eq(ordered_ancestors_cte.table[:id])
).join_sources
Clusters::Agents::GroupAuthorization
authorized_groups = Clusters::Agents::GroupAuthorization
.with(ordered_ancestors_cte.to_arel)
.joins(cte_join_sources)
.joins(agent: :project)
.where('projects.namespace_id IN (SELECT id FROM ordered_ancestors)')
.with_available_ci_access_fields(project)
.order(Arel.sql('agent_id, array_position(ARRAY(SELECT id FROM ordered_ancestors)::bigint[], agent_group_authorizations.group_id)'))
.select('DISTINCT ON (agent_id) agent_group_authorizations.*')
.preload(agent: :project)
.to_a
authorized_groups = if include_descendants?
authorized_groups.where(projects: { namespace_id: all_namespace_ids })
else
authorized_groups.where('projects.namespace_id IN (SELECT id FROM ordered_ancestors)')
end
authorized_groups.to_a
end
# rubocop: enable CodeReuse/ActiveRecord
def ancestor_namespace_ids
project.ancestors.select(:id)
end
def all_namespace_ids
project.root_ancestor.self_and_descendants.select(:id)
end
def include_descendants?
Feature.enabled?(:agent_authorization_include_descendants, project)
end
end
end

View File

@ -32,6 +32,14 @@ module GraphqlTriggers
merge_request
)
end
def self.merge_request_merge_status_updated(merge_request)
GitlabSchema.subscriptions.trigger(
'mergeRequestMergeStatusUpdated',
{ issuable_id: merge_request.to_gid },
merge_request
)
end
end
GraphqlTriggers.prepend_mod

View File

@ -245,6 +245,12 @@ class MergeRequest < ApplicationRecord
Gitlab::Timeless.timeless(merge_request, &block)
end
after_transition any => [:unchecked, :cannot_be_merged_recheck, :checking, :cannot_be_merged_rechecking, :can_be_merged, :cannot_be_merged] do |merge_request, transition|
if Feature.enabled?(:trigger_mr_subscription_on_merge_status_change, merge_request.project)
GraphqlTriggers.merge_request_merge_status_updated(merge_request)
end
end
# rubocop: disable CodeReuse/ServiceClass
after_transition [:unchecked, :checking] => :cannot_be_merged do |merge_request, transition|
if merge_request.notify_conflict?

View File

@ -267,7 +267,7 @@ class IssuableBaseService < ::BaseProjectService
end
def after_update(issuable)
handle_description_updated(issuable) if Feature.enabled?(:broadcast_issuable_description_updated)
handle_description_updated(issuable)
end
def handle_description_updated(issuable)

View File

@ -194,13 +194,10 @@ class WebHookService
headers = {
'Content-Type' => 'application/json',
'User-Agent' => "GitLab/#{Gitlab::VERSION}",
Gitlab::WebHooks::GITLAB_EVENT_HEADER => self.class.hook_to_event(hook_name)
Gitlab::WebHooks::GITLAB_EVENT_HEADER => self.class.hook_to_event(hook_name),
Gitlab::WebHooks::GITLAB_INSTANCE_HEADER => Gitlab.config.gitlab.base_url
}
if Feature.enabled?(:webhooks_gitlab_instance_header)
headers[Gitlab::WebHooks::GITLAB_INSTANCE_HEADER] = Gitlab.config.gitlab.base_url
end
headers['X-Gitlab-Token'] = Gitlab::Utils.remove_line_breaks(hook.token) if hook.token.present?
headers.merge!(Gitlab::WebHooks::RecursionDetection.header(hook))
end

View File

@ -1,6 +1,7 @@
- breadcrumb_title _("Schedules")
- page_title _("Pipeline Schedules")
- add_page_specific_style 'page_bundles/pipeline_schedules'
- add_page_specific_style 'page_bundles/ci_status'
#pipeline-schedules-callout{ data: { docs_url: help_page_path('ci/pipelines/schedules'), illustration_url: image_path('illustrations/pipeline_schedule_callout.svg') } }

View File

@ -1,8 +1,8 @@
---
name: webhooks_gitlab_instance_header
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/98624
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/375001
name: agent_authorization_include_descendants
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/95774
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/371310
milestone: '15.5'
type: development
group: group::integrations
group: group::configure
default_enabled: false

View File

@ -1,8 +0,0 @@
---
name: broadcast_issuable_description_updated
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/98458
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/375183
milestone: '15.5'
type: development
group: group::project management
default_enabled: false

View File

@ -0,0 +1,8 @@
---
name: trigger_mr_subscription_on_merge_status_change
introduced_by_url: "https://gitlab.com/gitlab-org/gitlab/-/merge_requests/99213"
rollout_issue_url: "https://gitlab.com/gitlab-org/gitlab/-/issues/375704"
milestone: '15.5'
type: development
group: group::code review
default_enabled: false

View File

@ -662,9 +662,10 @@ Enterprise Edition instance. This has some implications:
- Regular migrations run before the new code is running on the instance.
- [Post-deployment migrations](database/post_deployment_migrations.md) run _after_
the new code is deployed, when the instance is configured to do that.
- [Background migrations](database/background_migrations.md) run in Sidekiq, and
should only be done for migrations that would take an extreme amount of
time at GitLab.com scale.
- [Batched background migrations](database/batched_background_migrations.md) run in Sidekiq, and
should be used for migrations that
[exceed the post-deployment migration time limit](migration_style_guide.md#how-long-a-migration-should-take)
GitLab.com scale.
1. **Sidekiq workers** [cannot change in a backwards-incompatible way](sidekiq/compatibility_across_updates.md):
1. Sidekiq queues are not drained before a deploy happens, so there are
workers in the queue from the previous version of GitLab.

View File

@ -126,7 +126,7 @@ Validating the foreign key scans the whole table and makes sure that each relati
Fortunately, this does not lock the source table (`users`) while running.
NOTE:
When using [background migrations](background_migrations.md), foreign key validation should happen in the next GitLab release.
When using [batched background migrations](batched_background_migrations.md), foreign key validation should happen in the next GitLab release.
Migration file for validating the foreign key:

View File

@ -296,7 +296,7 @@ when migrating a column in a large table (for example, `issues`). Background
migrations spread the work / load over a longer time period, without slowing
down deployments.
For more information, see [the documentation on cleaning up background migrations](background_migrations.md#cleaning-up).
For more information, see [the documentation on cleaning up batched background migrations](batched_background_migrations.md#cleaning-up).
## Adding Indexes

View File

@ -670,3 +670,8 @@ You can view failures in two ways:
ON transition_logs.batched_background_migration_job_id = jobs.id
WHERE transition_logs.next_status = '2' AND migration.job_class_name = "CLASS_NAME";
```
## Legacy background migrations
Batched background migrations replaced the [legacy background migrations framework](background_migrations.md).
Check that documentation in reference to any changes involving that framework.

View File

@ -26,7 +26,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
- [Different types of migrations](../migration_style_guide.md#choose-an-appropriate-migration-type)
- [Create a regular migration](../migration_style_guide.md#create-a-regular-schema-migration), including creating new models
- [Post-deployment migrations guidelines](post_deployment_migrations.md) and [how to create one](post_deployment_migrations.md#creating-migrations)
- [Background migrations guidelines](background_migrations.md)
- [Legacy Background migrations guidelines](background_migrations.md)
- [Batched background migrations guidelines](batched_background_migrations.md)
- [Deleting migrations](deleting_migrations.md)
- [Running database migrations](database_debugging.md#migration-wrangling)
@ -36,7 +36,6 @@ info: To determine the technical writer assigned to the Stage/Group associated w
- [Migrations style guide](../migration_style_guide.md) for creating safe SQL migrations
- [Testing Rails migrations](../testing_guide/testing_migrations_guide.md) guide
- [Post deployment migrations](post_deployment_migrations.md)
- [Background migrations](background_migrations.md)
- [Swapping tables](swapping_tables.md)
- [Deleting migrations](deleting_migrations.md)
- [SQL guidelines](../sql.md) for working with SQL queries

View File

@ -114,7 +114,7 @@ falling into an endless loop as described in following
When dealing with data migrations the preferred way to iterate over a large volume of data is using
`EachBatch`.
A special case of data migration is a [background migration](background_migrations.md#scheduling)
A special case of data migration is a [batched background migration](batched_background_migrations.md)
where the actual data modification is executed in a background job. The migration code that
determines the data ranges (slices) and schedules the background jobs uses `each_batch`.

View File

@ -202,7 +202,7 @@ end
If you have to clean up a nullable column for a [high-traffic table](../migration_style_guide.md#high-traffic-tables)
(for example, the `artifacts` in `ci_builds`), your background migration goes on for a while and
it needs an additional [background migration cleaning up](background_migrations.md#cleaning-up)
it needs an additional [batched background migration cleaning up](batched_background_migrations.md#cleaning-up)
in the release after adding the data migration.
In that rare case you need 3 releases end-to-end:

View File

@ -196,7 +196,7 @@ to add a background migration for the 13.0 milestone (current),
`db/post_migrate/20200501000002_schedule_cap_title_length_on_issues.rb`:
```ruby
class ScheduleCapTitleLengthOnIssues < Gitlab::Database::Migration[1.0]
class ScheduleCapTitleLengthOnIssues < Gitlab::Database::Migration[2.0]
# Info on how many records will be affected on GitLab.com
# time each batch needs to run on average, etc ...
BATCH_SIZE = 5000
@ -207,30 +207,25 @@ class ScheduleCapTitleLengthOnIssues < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
class Issue < ::ApplicationRecord
include EachBatch
self.table_name = 'issues'
end
def up
queue_background_migration_jobs_by_range_at_intervals(
Issue.where('char_length(title_html) > 1024'),
ISSUES_MIGRATION,
DELAY_INTERVAL,
queue_batched_background_migration(
ISSUES_BACKGROUND_MIGRATION,
:issues,
:id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE
)
end
def down
# no-op : the part of the title_html after the limit is lost forever
delete_batched_background_migration(ISSUES_BACKGROUND_MIGRATION, :issues, :id, [])
end
end
```
To keep this guide short, we skipped the definition of the background migration and only
provided a high level example of the post-deployment migration that is used to schedule the batches.
You can find more information on the guide about [background migrations](background_migrations.md)
You can find more information on the guide about [batched background migrations](batched_background_migrations.md)
#### Validate the text limit (next release)
@ -278,7 +273,7 @@ end
If you have to clean up a text column for a really [large table](https://gitlab.com/gitlab-org/gitlab/-/blob/master/rubocop/rubocop-migrations.yml#L3)
(for example, the `artifacts` in `ci_builds`), your background migration goes on for a while and
it needs an additional [background migration cleaning up](background_migrations.md#cleaning-up)
it needs an additional [batched background migration cleaning up](batched_background_migrations.md#cleaning-up)
in the release after adding the data migration.
In that rare case you need 3 releases end-to-end:

View File

@ -240,9 +240,11 @@ Include in the MR description:
- Check queries timing (If any): In a single transaction, cumulative query time executed in a migration
needs to fit comfortably in `15s` - preferably much less than that - on GitLab.com.
- For column removals, make sure the column has been [ignored in a previous release](database/avoiding_downtime_in_migrations.md#dropping-columns)
- Check [background migrations](database/background_migrations.md):
- Check [batched background migrations](database/batched_background_migrations.md):
- Establish a time estimate for execution on GitLab.com. For historical purposes,
it's highly recommended to include this estimation on the merge request description.
This can be the number of expected batches times the delay interval.
- Manually trigger the [database testing](database/database_migration_pipeline.md) job (`db:gitlabcom-database-testing`) in the `test` stage.
- If a single `update` is below than `1s` the query can be placed
directly in a regular migration (inside `db/migrate`).
- Background migrations are normally used, but not limited to:
@ -253,8 +255,6 @@ Include in the MR description:
it's suggested to treat background migrations as
[post migrations](migration_style_guide.md#choose-an-appropriate-migration-type):
place them in `db/post_migrate` instead of `db/migrate`.
- If a migration [has tracking enabled](database/background_migrations.md#background-jobs-tracking),
ensure `mark_all_as_succeeded` is called even if no work is done.
- Check [timing guidelines for migrations](migration_style_guide.md#how-long-a-migration-should-take)
- Check migrations are reversible and implement a `#down` method
- Check new table migrations:

View File

@ -275,7 +275,7 @@ in that limit. Singular query timings should fit within the [standard limit](dat
In case you need to insert, update, or delete a significant amount of data, you:
- Must disable the single transaction with `disable_ddl_transaction!`.
- Should consider doing it in a [Background Migration](database/background_migrations.md).
- Should consider doing it in a [batched background migration](database/batched_background_migrations.md).
## Migration helpers and versioning
@ -1266,7 +1266,7 @@ by an integer. For example: `users` would turn into `users0`
## Using models in migrations (discouraged)
The use of models in migrations is generally discouraged. As such models are
[contraindicated for background migrations](database/background_migrations.md#isolation),
[contraindicated for batched background migrations](database/batched_background_migrations.md#isolation),
the model needs to be declared in the migration.
If using a model in the migrations, you should first

View File

@ -236,12 +236,7 @@ For more information about supported events for Webhooks, go to [Webhook events]
## Delivery headers
> `X-Gitlab-Instance` header introduced in GitLab 15.5 [with a flag](../../../administration/feature_flags.md) named `webhooks_gitlab_instance_header`. Disabled by default.
FLAG:
On self-managed GitLab, by default this feature is not available. To make it available,
ask an administrator to [enable the feature flag](../../../administration/feature_flags.md) named `webhooks_gitlab_instance_header`.
The feature is not ready for production use.
> `X-Gitlab-Instance` header [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/31333) in GitLab 15.5.
Webhook requests to your endpoint include the following headers:

View File

@ -105,16 +105,25 @@ module Gitlab
def read_write
connection = nil
transaction_open = nil
attempts = 3
if prevent_load_balancer_retries_in_transaction?
attempts = 1 if pool.connection.transaction_open?
end
# In the event of a failover the primary may be briefly unavailable.
# Instead of immediately grinding to a halt we'll retry the operation
# a few times.
retry_with_backoff do
# It is not possible preserve transaction state during a retry, so we do not retry in that case.
retry_with_backoff(attempts: attempts) do |attempt|
connection = pool.connection
transaction_open = connection.transaction_open?
yield connection
rescue StandardError => e
if transaction_open && connection_error?(e)
# No leaking will happen on the final attempt. Leaks are caused by subsequent retries
not_final_attempt = attempt && attempt < attempts
if transaction_open && connection_error?(e) && not_final_attempt
::Gitlab::Database::LoadBalancing::Logger.warn(
event: :transaction_leak,
message: 'A write transaction has leaked during database fail-over'
@ -171,7 +180,7 @@ module Gitlab
end
# Yields a block, retrying it upon error using an exponential backoff.
def retry_with_backoff(retries = 3, time = 2)
def retry_with_backoff(attempts: 3, time: 2)
# In CI we only use the primary, but databases may not always be
# available (or take a few seconds to become available). Retrying in
# this case can slow down CI jobs. In addition, retrying with _only_
@ -183,12 +192,12 @@ module Gitlab
# replicas were configured.
return yield if primary_only?
retried = 0
attempt = 1
last_error = nil
while retried < retries
while attempt <= attempts
begin
return yield
return yield attempt # Yield the current attempt count
rescue StandardError => error
raise error unless connection_error?(error)
@ -198,7 +207,7 @@ module Gitlab
last_error = error
sleep(time)
retried += 1
attempt += 1
time **= 2
end
end
@ -332,6 +341,10 @@ module Gitlab
row = ar_connection.select_all(sql).first
row['location'] if row
end
def prevent_load_balancer_retries_in_transaction?
Gitlab::Utils.to_boolean(ENV['PREVENT_LOAD_BALANCER_RETRIES_IN_TRANSACTION'], default: false)
end
end
end
end

View File

@ -39,6 +39,7 @@ module Gitlab
incident_management_alerts
issues_edit
kubernetes_agent
manage
pipeline_authoring
quickactions
search

View File

@ -29421,9 +29421,21 @@ msgstr ""
msgid "PipelineSchedules|All"
msgstr ""
msgid "PipelineSchedules|Delete pipeline schedule"
msgstr ""
msgid "PipelineSchedules|Description"
msgstr ""
msgid "PipelineSchedules|Edit pipeline schedule"
msgstr ""
msgid "PipelineSchedules|Inactive"
msgstr ""
msgid "PipelineSchedules|Last Pipeline"
msgstr ""
msgid "PipelineSchedules|Next Run"
msgstr ""
@ -29433,12 +29445,21 @@ msgstr ""
msgid "PipelineSchedules|Only the owner of a pipeline schedule can make changes to it. Do you want to take ownership of this schedule?"
msgstr ""
msgid "PipelineSchedules|Owner"
msgstr ""
msgid "PipelineSchedules|Provide a short description for this pipeline"
msgstr ""
msgid "PipelineSchedules|Run pipeline schedule"
msgstr ""
msgid "PipelineSchedules|Take ownership"
msgstr ""
msgid "PipelineSchedules|Take ownership of pipeline schedule"
msgstr ""
msgid "PipelineSchedules|Target"
msgstr ""
@ -36417,9 +36438,6 @@ msgstr ""
msgid "Select Git revision"
msgstr ""
msgid "Select Page"
msgstr ""
msgid "Select Profile"
msgstr ""
@ -36537,6 +36555,9 @@ msgstr ""
msgid "Select projects"
msgstr ""
msgid "Select report"
msgstr ""
msgid "Select reviewer(s)"
msgstr ""

View File

@ -23,7 +23,7 @@ module Gitlab
h2 :users_over_subscription
table :subscription_history
span :no_valid_license_alert, text: /no longer has a valid license/
div :no_valid_license_alert, text: /no longer has a valid license/
h3 :no_active_subscription_title, text: /do not have an active subscription/
def accept_terms

View File

@ -9,6 +9,10 @@ RSpec.describe Clusters::AgentAuthorizationsFinder do
let_it_be(:subgroup2) { create(:group, parent: subgroup1) }
let_it_be(:bottom_level_group) { create(:group, parent: subgroup2) }
let_it_be(:non_ancestor_group) { create(:group, parent: top_level_group) }
let_it_be(:non_ancestor_project) { create(:project, namespace: non_ancestor_group) }
let_it_be(:non_ancestor_agent) { create(:cluster_agent, project: non_ancestor_project) }
let_it_be(:agent_configuration_project) { create(:project, namespace: subgroup1) }
let_it_be(:requesting_project, reload: true) { create(:project, namespace: bottom_level_group) }
@ -56,6 +60,20 @@ RSpec.describe Clusters::AgentAuthorizationsFinder do
it { is_expected.to be_empty }
end
context 'agent configuration project shares a root namespace, but does not belong to an ancestor of the given project' do
let!(:project_authorization) { create(:agent_project_authorization, agent: non_ancestor_agent, project: requesting_project) }
it { is_expected.to match_array([project_authorization]) }
context 'agent_authorization_include_descendants feature flag is disabled' do
before do
stub_feature_flags(agent_authorization_include_descendants: false)
end
it { is_expected.to be_empty }
end
end
context 'with project authorizations present' do
let!(:authorization) { create(:agent_project_authorization, agent: production_agent, project: requesting_project) }
@ -116,6 +134,20 @@ RSpec.describe Clusters::AgentAuthorizationsFinder do
end
end
context 'agent configuration project does not belong to an ancestor of the authorized group' do
let!(:group_authorization) { create(:agent_group_authorization, agent: non_ancestor_agent, group: bottom_level_group) }
it { is_expected.to match_array([group_authorization]) }
context 'agent_authorization_include_descendants feature flag is disabled' do
before do
stub_feature_flags(agent_authorization_include_descendants: false)
end
it { is_expected.to be_empty }
end
end
it_behaves_like 'access_as' do
let!(:authorization) { create(:agent_group_authorization, agent: production_agent, group: top_level_group, config: config) }
end

View File

@ -2,41 +2,74 @@
require 'spec_helper'
RSpec.describe Projects::PipelineSchedulesController, '(JavaScript fixtures)', type: :controller do
RSpec.describe 'Pipeline schedules (JavaScript fixtures)' do
include ApiHelpers
include JavaScriptFixturesHelpers
include GraphqlHelpers
let(:namespace) { create(:namespace, name: 'frontend-fixtures' ) }
let(:project) { create(:project, :public, :repository) }
let(:user) { project.first_owner }
let!(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project, owner: user) }
let!(:pipeline_schedule_inactive) { create(:ci_pipeline_schedule, :inactive, project: project, owner: user) }
let!(:pipeline_schedule_populated) { create(:ci_pipeline_schedule, project: project, owner: user) }
let!(:pipeline_schedule_variable1) { create(:ci_pipeline_schedule_variable, key: 'foo', value: 'foovalue', pipeline_schedule: pipeline_schedule_populated) }
let!(:pipeline_schedule_variable2) { create(:ci_pipeline_schedule_variable, key: 'bar', value: 'barvalue', pipeline_schedule: pipeline_schedule_populated) }
render_views
describe Projects::PipelineSchedulesController, type: :controller do
render_views
before do
sign_in(user)
stub_feature_flags(pipeline_schedules_vue: false)
before do
sign_in(user)
stub_feature_flags(pipeline_schedules_vue: false)
end
it 'pipeline_schedules/edit.html' do
get :edit, params: {
namespace_id: project.namespace.to_param,
project_id: project,
id: pipeline_schedule.id
}
expect(response).to be_successful
end
it 'pipeline_schedules/edit_with_variables.html' do
get :edit, params: {
namespace_id: project.namespace.to_param,
project_id: project,
id: pipeline_schedule_populated.id
}
expect(response).to be_successful
end
end
it 'pipeline_schedules/edit.html' do
get :edit, params: {
namespace_id: project.namespace.to_param,
project_id: project,
id: pipeline_schedule.id
}
describe GraphQL::Query, type: :request do
before do
pipeline_schedule.pipelines << build(:ci_pipeline, project: project)
end
expect(response).to be_successful
end
fixtures_path = 'graphql/pipeline_schedules/'
get_pipeline_schedules_query = 'get_pipeline_schedules.query.graphql'
it 'pipeline_schedules/edit_with_variables.html' do
get :edit, params: {
namespace_id: project.namespace.to_param,
project_id: project,
id: pipeline_schedule_populated.id
}
let_it_be(:query) do
get_graphql_query_as_string("pipeline_schedules/graphql/queries/#{get_pipeline_schedules_query}")
end
expect(response).to be_successful
it "#{fixtures_path}#{get_pipeline_schedules_query}.json" do
post_graphql(query, current_user: user, variables: { projectPath: project.full_path })
expect_graphql_errors_to_be_empty
end
it "#{fixtures_path}#{get_pipeline_schedules_query}.as_guest.json" do
guest = create(:user)
project.add_guest(user)
post_graphql(query, current_user: guest, variables: { projectPath: project.full_path })
expect_graphql_errors_to_be_empty
end
end
end

View File

@ -6,7 +6,7 @@ import AxiosMockAdapter from 'axios-mock-adapter';
import { kebabCase } from 'lodash';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createFlash from '~/flash';
import { createAlert } from '~/flash';
import * as urlUtility from '~/lib/utils/url_utility';
import ForkForm from '~/pages/projects/forks/new/components/fork_form.vue';
import createMockApollo from 'helpers/mock_apollo_helper';
@ -449,7 +449,7 @@ describe('ForkForm component', () => {
await submitForm();
expect(urlUtility.redirectTo).not.toHaveBeenCalled();
expect(createFlash).toHaveBeenCalledWith({
expect(createAlert).toHaveBeenCalledWith({
message: 'An error occurred while forking the project. Please try again.',
});
});

View File

@ -1,12 +1,33 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import PipelineSchedules from '~/pipeline_schedules/components/pipeline_schedules.vue';
import PipelineSchedulesTable from '~/pipeline_schedules/components/table/pipeline_schedules_table.vue';
import getPipelineSchedulesQuery from '~/pipeline_schedules/graphql/queries/get_pipeline_schedules.query.graphql';
import { mockGetPipelineSchedulesGraphQLResponse, mockPipelineScheduleNodes } from '../mock_data';
Vue.use(VueApollo);
describe('Pipeline schedules app', () => {
let wrapper;
const createComponent = () => {
wrapper = shallowMount(PipelineSchedules);
const successHandler = jest.fn().mockResolvedValue(mockGetPipelineSchedulesGraphQLResponse);
const createMockApolloProvider = (handler) => {
const requestHandlers = [[getPipelineSchedulesQuery, handler]];
return createMockApollo(requestHandlers);
};
const createComponent = (handler = successHandler) => {
wrapper = shallowMount(PipelineSchedules, {
provide: {
fullPath: 'gitlab-org/gitlab',
},
apolloProvider: createMockApolloProvider(handler),
});
};
const findTable = () => wrapper.findComponent(PipelineSchedulesTable);
@ -22,4 +43,12 @@ describe('Pipeline schedules app', () => {
it('displays table', () => {
expect(findTable().exists()).toBe(true);
});
it('fetches query and passes an array of pipeline schedules', async () => {
expect(successHandler).toHaveBeenCalled();
await waitForPromises();
expect(findTable().props('schedules')).toEqual(mockPipelineScheduleNodes);
});
});

View File

@ -0,0 +1,38 @@
import { GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import PipelineScheduleActions from '~/pipeline_schedules/components/table/cells/pipeline_schedule_actions.vue';
import { mockPipelineScheduleNodes, mockPipelineScheduleAsGuestNodes } from '../../../mock_data';
describe('Pipeline schedule actions', () => {
let wrapper;
const defaultProps = {
schedule: mockPipelineScheduleNodes[0],
};
const createComponent = (props = defaultProps) => {
wrapper = shallowMount(PipelineScheduleActions, {
propsData: {
...props,
},
});
};
const findAllButtons = () => wrapper.findAllComponents(GlButton);
afterEach(() => {
wrapper.destroy();
});
it('displays action buttons', () => {
createComponent();
expect(findAllButtons()).toHaveLength(3);
});
it('does not display action buttons', () => {
createComponent({ schedule: mockPipelineScheduleAsGuestNodes[0] });
expect(findAllButtons()).toHaveLength(0);
});
});

View File

@ -0,0 +1,42 @@
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import CiBadge from '~/vue_shared/components/ci_badge_link.vue';
import PipelineScheduleLastPipeline from '~/pipeline_schedules/components/table/cells/pipeline_schedule_last_pipeline.vue';
import { mockPipelineScheduleNodes } from '../../../mock_data';
describe('Pipeline schedule last pipeline', () => {
let wrapper;
const defaultProps = {
schedule: mockPipelineScheduleNodes[2],
};
const createComponent = (props = defaultProps) => {
wrapper = shallowMountExtended(PipelineScheduleLastPipeline, {
propsData: {
...props,
},
});
};
const findCIBadge = () => wrapper.findComponent(CiBadge);
const findStatusText = () => wrapper.findByTestId('pipeline-schedule-status-text');
afterEach(() => {
wrapper.destroy();
});
it('displays pipeline status', () => {
createComponent();
expect(findCIBadge().exists()).toBe(true);
expect(findCIBadge().props('status')).toBe(defaultProps.schedule.lastPipeline.detailedStatus);
expect(findStatusText().exists()).toBe(false);
});
it('displays "none" status text', () => {
createComponent({ schedule: mockPipelineScheduleNodes[0] });
expect(findStatusText().text()).toBe('None');
expect(findCIBadge().exists()).toBe(false);
});
});

View File

@ -0,0 +1,43 @@
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import PipelineScheduleNextRun from '~/pipeline_schedules/components/table/cells/pipeline_schedule_next_run.vue';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
import { mockPipelineScheduleNodes } from '../../../mock_data';
describe('Pipeline schedule next run', () => {
let wrapper;
const defaultProps = {
schedule: mockPipelineScheduleNodes[0],
};
const createComponent = (props = defaultProps) => {
wrapper = shallowMountExtended(PipelineScheduleNextRun, {
propsData: {
...props,
},
});
};
const findTimeAgo = () => wrapper.findComponent(TimeAgoTooltip);
const findInactive = () => wrapper.findByTestId('pipeline-schedule-inactive');
afterEach(() => {
wrapper.destroy();
});
it('displays time ago', () => {
createComponent();
expect(findTimeAgo().exists()).toBe(true);
expect(findInactive().exists()).toBe(false);
expect(findTimeAgo().props('time')).toBe(defaultProps.schedule.realNextRun);
});
it('displays inactive state', () => {
const inactiveSchedule = mockPipelineScheduleNodes[1];
createComponent({ schedule: inactiveSchedule });
expect(findInactive().text()).toBe('Inactive');
expect(findTimeAgo().exists()).toBe(false);
});
});

View File

@ -0,0 +1,40 @@
import { GlAvatar, GlAvatarLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import PipelineScheduleOwner from '~/pipeline_schedules/components/table/cells/pipeline_schedule_owner.vue';
import { mockPipelineScheduleNodes } from '../../../mock_data';
describe('Pipeline schedule owner', () => {
let wrapper;
const defaultProps = {
schedule: mockPipelineScheduleNodes[0],
};
const createComponent = (props = defaultProps) => {
wrapper = shallowMount(PipelineScheduleOwner, {
propsData: {
...props,
},
});
};
const findAvatar = () => wrapper.findComponent(GlAvatar);
const findAvatarLink = () => wrapper.findComponent(GlAvatarLink);
beforeEach(() => {
createComponent();
});
afterEach(() => {
wrapper.destroy();
});
it('displays avatar', () => {
expect(findAvatar().exists()).toBe(true);
expect(findAvatar().props('src')).toBe(defaultProps.schedule.owner.avatarUrl);
});
it('avatar links to user', () => {
expect(findAvatarLink().attributes('href')).toBe(defaultProps.schedule.owner.webPath);
});
});

View File

@ -0,0 +1,41 @@
import { GlIcon, GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import PipelineScheduleTarget from '~/pipeline_schedules/components/table/cells/pipeline_schedule_target.vue';
import { mockPipelineScheduleNodes } from '../../../mock_data';
describe('Pipeline schedule target', () => {
let wrapper;
const defaultProps = {
schedule: mockPipelineScheduleNodes[0],
};
const createComponent = (props = defaultProps) => {
wrapper = shallowMount(PipelineScheduleTarget, {
propsData: {
...props,
},
});
};
const findIcon = () => wrapper.findComponent(GlIcon);
const findLink = () => wrapper.findComponent(GlLink);
beforeEach(() => {
createComponent();
});
afterEach(() => {
wrapper.destroy();
});
it('displays icon', () => {
expect(findIcon().exists()).toBe(true);
expect(findIcon().props('name')).toBe('fork');
});
it('displays ref link', () => {
expect(findLink().attributes('href')).toBe(defaultProps.schedule.refPath);
expect(findLink().text()).toBe(defaultProps.schedule.refForDisplay);
});
});

View File

@ -1,15 +1,25 @@
import { shallowMount } from '@vue/test-utils';
import { GlTableLite } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import PipelineSchedulesTable from '~/pipeline_schedules/components/table/pipeline_schedules_table.vue';
import { mockPipelineScheduleNodes } from '../../mock_data';
describe('Pipeline schedules table', () => {
let wrapper;
const createComponent = () => {
wrapper = shallowMount(PipelineSchedulesTable);
const defaultProps = {
schedules: mockPipelineScheduleNodes,
};
const createComponent = (props = defaultProps) => {
wrapper = mountExtended(PipelineSchedulesTable, {
propsData: {
...props,
},
});
};
const findTable = () => wrapper.findComponent(GlTableLite);
const findScheduleDescription = () => wrapper.findByTestId('pipeline-schedule-description');
beforeEach(() => {
createComponent();
@ -22,4 +32,8 @@ describe('Pipeline schedules table', () => {
it('displays table', () => {
expect(findTable().exists()).toBe(true);
});
it('displays schedule description', () => {
expect(findScheduleDescription().text()).toBe('pipeline schedule');
});
});

View File

@ -0,0 +1,24 @@
import mockGetPipelineSchedulesGraphQLResponse from 'test_fixtures/graphql/pipeline_schedules/get_pipeline_schedules.query.graphql.json';
import mockGetPipelineSchedulesAsGuestGraphQLResponse from 'test_fixtures/graphql/pipeline_schedules/get_pipeline_schedules.query.graphql.as_guest.json';
const {
data: {
project: {
pipelineSchedules: { nodes },
},
},
} = mockGetPipelineSchedulesGraphQLResponse;
const {
data: {
project: {
pipelineSchedules: { nodes: guestNodes },
},
},
} = mockGetPipelineSchedulesAsGuestGraphQLResponse;
export const mockPipelineScheduleNodes = nodes;
export const mockPipelineScheduleAsGuestNodes = guestNodes;
export { mockGetPipelineSchedulesGraphQLResponse };

View File

@ -1,10 +1,5 @@
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import ChunkLine from '~/vue_shared/components/source_viewer/components/chunk_line.vue';
import {
BIDI_CHARS,
BIDI_CHARS_CLASS_LIST,
BIDI_CHAR_TOOLTIP,
} from '~/vue_shared/components/source_viewer/constants';
const DEFAULT_PROPS = {
number: 2,
@ -31,7 +26,6 @@ describe('Chunk Line component', () => {
const findLineLink = () => wrapper.find('.file-line-num');
const findBlameLink = () => wrapper.find('.file-line-blame');
const findContent = () => wrapper.findByTestId('content');
const findWrappedBidiChars = () => wrapper.findAllByTestId('bidi-wrapper');
beforeEach(() => {
createComponent();
@ -40,22 +34,6 @@ describe('Chunk Line component', () => {
afterEach(() => wrapper.destroy());
describe('rendering', () => {
it('wraps BiDi characters', () => {
const content = `// some content ${BIDI_CHARS.toString()} with BiDi chars`;
createComponent({ content });
const wrappedBidiChars = findWrappedBidiChars();
expect(wrappedBidiChars.length).toBe(BIDI_CHARS.length);
wrappedBidiChars.wrappers.forEach((_, i) => {
expect(wrappedBidiChars.at(i).text()).toBe(BIDI_CHARS[i]);
expect(wrappedBidiChars.at(i).attributes()).toMatchObject({
class: BIDI_CHARS_CLASS_LIST,
title: BIDI_CHAR_TOOLTIP,
});
});
});
it('renders a blame link', () => {
expect(findBlameLink().attributes()).toMatchObject({
href: `${DEFAULT_PROPS.blamePath}#L${DEFAULT_PROPS.number}`,

View File

@ -1,6 +1,9 @@
import { registerPlugins } from '~/vue_shared/components/source_viewer/plugins/index';
import { HLJS_ON_AFTER_HIGHLIGHT } from '~/vue_shared/components/source_viewer/constants';
import {
registerPlugins,
HLJS_ON_AFTER_HIGHLIGHT,
} from '~/vue_shared/components/source_viewer/plugins/index';
import wrapComments from '~/vue_shared/components/source_viewer/plugins/wrap_comments';
import wrapBidiChars from '~/vue_shared/components/source_viewer/plugins/wrap_bidi_chars';
jest.mock('~/vue_shared/components/source_viewer/plugins/wrap_comments');
const hljsMock = { addPlugin: jest.fn() };
@ -9,6 +12,7 @@ describe('Highlight.js plugin registration', () => {
beforeEach(() => registerPlugins(hljsMock));
it('registers our plugins', () => {
expect(hljsMock.addPlugin).toHaveBeenCalledWith({ [HLJS_ON_AFTER_HIGHLIGHT]: wrapBidiChars });
expect(hljsMock.addPlugin).toHaveBeenCalledWith({ [HLJS_ON_AFTER_HIGHLIGHT]: wrapComments });
});
});

View File

@ -0,0 +1,17 @@
import wrapBidiChars from '~/vue_shared/components/source_viewer/plugins/wrap_bidi_chars';
import {
BIDI_CHARS,
BIDI_CHARS_CLASS_LIST,
BIDI_CHAR_TOOLTIP,
} from '~/vue_shared/components/source_viewer/constants';
describe('Highlight.js plugin for wrapping BiDi characters', () => {
it.each(BIDI_CHARS)('wraps %s BiDi char', (bidiChar) => {
const inputValue = `// some content ${bidiChar} with BiDi chars`;
const outputValue = `// some content <span class="${BIDI_CHARS_CLASS_LIST}" title="${BIDI_CHAR_TOOLTIP}">${bidiChar}</span>`;
const hljsResultMock = { value: inputValue };
wrapBidiChars(hljsResultMock);
expect(hljsResultMock.value).toContain(outputValue);
});
});

View File

@ -89,4 +89,18 @@ RSpec.describe GraphqlTriggers do
GraphqlTriggers.merge_request_reviewers_updated(merge_request)
end
end
describe '.merge_request_merge_status_updated' do
it 'triggers the mergeRequestMergeStatusUpdated subscription' do
merge_request = build_stubbed(:merge_request)
expect(GitlabSchema.subscriptions).to receive(:trigger).with(
'mergeRequestMergeStatusUpdated',
{ issuable_id: merge_request.to_gid },
merge_request
).and_call_original
GraphqlTriggers.merge_request_merge_status_updated(merge_request)
end
end
end

View File

@ -210,10 +210,25 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
end
it 'uses a retry with exponential backoffs' do
expect(lb).to receive(:retry_with_backoff).and_yield
expect(lb).to receive(:retry_with_backoff).and_yield(0)
lb.read_write { 10 }
end
it 'does not raise NoMethodError error when primary_only?' do
connection = ActiveRecord::Base.connection_pool.connection
expected_error = Gitlab::Database::LoadBalancing::CONNECTION_ERRORS.first
allow(lb).to receive(:primary_only?).and_return(true)
expect do
lb.read_write do
connection.transaction do
raise expected_error
end
end
end.to raise_error(expected_error)
end
end
describe '#host' do
@ -330,6 +345,19 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
expect { lb.retry_with_backoff { raise } }.to raise_error(RuntimeError)
end
it 'yields the current retry iteration' do
allow(lb).to receive(:connection_error?).and_return(true)
expect(lb).to receive(:release_primary_connection).exactly(3).times
iterations = []
# time: 0 so that we don't sleep and slow down the test
# rubocop: disable Style/Semicolon
expect { lb.retry_with_backoff(attempts: 3, time: 0) { |i| iterations << i; raise } }.to raise_error(RuntimeError)
# rubocop: enable Style/Semicolon
expect(iterations).to eq([1, 2, 3])
end
end
describe '#connection_error?' do

View File

@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe 'Load balancer behavior with errors inside a transaction', :redis, :delete do
include StubENV
let(:model) { ApplicationRecord }
let(:db_host) { model.connection_pool.db_config.host }
@ -19,6 +20,10 @@ RSpec.describe 'Load balancer behavior with errors inside a transaction', :redis
model.connection.execute(<<~SQL)
CREATE TABLE IF NOT EXISTS #{test_table_name} (id SERIAL PRIMARY KEY, value INTEGER)
SQL
# The load balancer sleeps between attempts to retry a query.
# Mocking the sleep call significantly reduces the runtime of this spec file.
allow(model.connection.load_balancer).to receive(:sleep)
end
after do
@ -46,36 +51,62 @@ RSpec.describe 'Load balancer behavior with errors inside a transaction', :redis
conn.execute("INSERT INTO #{test_table_name} (value) VALUES (2)")
end
it 'logs a warning when violating transaction semantics with writes' do
conn = model.connection
context 'with the PREVENT_LOAD_BALANCER_RETRIES_IN_TRANSACTION environment variable not set' do
it 'logs a warning when violating transaction semantics with writes' do
conn = model.connection
expect(::Gitlab::Database::LoadBalancing::Logger).to receive(:warn).with(hash_including(event: :transaction_leak))
expect(::Gitlab::Database::LoadBalancing::Logger).to receive(:warn).with(hash_including(event: :transaction_leak))
conn.transaction do
expect(conn).to be_transaction_open
conn.transaction do
expect(conn).to be_transaction_open
execute(conn)
expect(conn).not_to be_transaction_open
end
values = conn.execute("SELECT value FROM #{test_table_name}").to_a.map { |row| row['value'] }
expect(values).to contain_exactly(2) # Does not include 1 because the transaction was aborted and leaked
end
it 'does not log a warning when no transaction is open to be leaked' do
conn = model.connection
expect(::Gitlab::Database::LoadBalancing::Logger)
.not_to receive(:warn).with(hash_including(event: :transaction_leak))
expect(conn).not_to be_transaction_open
execute(conn)
expect(conn).not_to be_transaction_open
values = conn.execute("SELECT value FROM #{test_table_name}").to_a.map { |row| row['value'] }
expect(values).to contain_exactly(1, 2) # Includes both rows because there was no transaction to roll back
end
end
context 'with the PREVENT_LOAD_BALANCER_RETRIES_IN_TRANSACTION environment variable set' do
before do
stub_env('PREVENT_LOAD_BALANCER_RETRIES_IN_TRANSACTION' => '1')
end
values = conn.execute("SELECT value FROM #{test_table_name}").to_a.map { |row| row['value'] }
expect(values).to contain_exactly(2) # Does not include 1 because the transaction was aborted and leaked
end
it 'raises an exception when a retry would occur during a transaction' do
expect(::Gitlab::Database::LoadBalancing::Logger)
.not_to receive(:warn).with(hash_including(event: :transaction_leak))
it 'does not log a warning when no transaction is open to be leaked' do
conn = model.connection
expect do
model.transaction do
execute(model.connection)
end
end.to raise_error(ActiveRecord::StatementInvalid) { |e| expect(e.cause).to be_a(PG::ConnectionBad) }
end
expect(::Gitlab::Database::LoadBalancing::Logger)
.not_to receive(:warn).with(hash_including(event: :transaction_leak))
it 'retries when not in a transaction' do
expect(::Gitlab::Database::LoadBalancing::Logger)
.not_to receive(:warn).with(hash_including(event: :transaction_leak))
expect(conn).not_to be_transaction_open
execute(conn)
expect(conn).not_to be_transaction_open
values = conn.execute("SELECT value FROM #{test_table_name}").to_a.map { |row| row['value'] }
expect(values).to contain_exactly(1, 2) # Includes both rows because there was no transaction to roll back
expect { execute(model.connection) }.not_to raise_error
end
end
end

View File

@ -4199,6 +4199,45 @@ RSpec.describe MergeRequest, factory_default: :keep do
context 'state machine transitions' do
let(:project) { create(:project, :repository) }
shared_examples_for 'transition not triggering mergeRequestMergeStatusUpdated GraphQL subscription' do
specify do
expect(GraphqlTriggers).not_to receive(:merge_request_merge_status_updated)
transition!
end
end
shared_examples_for 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription' do
specify do
expect(GraphqlTriggers).to receive(:merge_request_merge_status_updated).with(subject).and_call_original
transition!
end
context 'when trigger_mr_subscription_on_merge_status_change is disabled' do
before do
stub_feature_flags(trigger_mr_subscription_on_merge_status_change: false)
end
it_behaves_like 'transition not triggering mergeRequestMergeStatusUpdated GraphQL subscription'
end
end
shared_examples 'for an invalid state transition' do
specify 'is not a valid state transition' do
expect { transition! }.to raise_error(StateMachines::InvalidTransition)
end
end
shared_examples 'for a valid state transition' do
it 'is a valid state transition' do
expect { transition! }
.to change { subject.merge_status }
.from(merge_status.to_s)
.to(expected_merge_status)
end
end
describe '#unlock_mr' do
subject { create(:merge_request, state: 'locked', source_project: project, merge_jid: 123) }
@ -4213,22 +4252,58 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
describe '#mark_as_preparing' do
subject { create(:merge_request, source_project: project, merge_status: merge_status) }
let(:expected_merge_status) { 'preparing' }
def transition!
subject.mark_as_preparing!
end
context 'when the status is unchecked' do
let(:merge_status) { :unchecked }
include_examples 'for a valid state transition'
it_behaves_like 'transition not triggering mergeRequestMergeStatusUpdated GraphQL subscription'
end
context 'when the status is checking' do
let(:merge_status) { :checking }
include_examples 'for an invalid state transition'
end
context 'when the status is can_be_merged' do
let(:merge_status) { :can_be_merged }
include_examples 'for an invalid state transition'
end
context 'when the status is cannot_be_merged_recheck' do
let(:merge_status) { :cannot_be_merged_recheck }
include_examples 'for an invalid state transition'
end
context 'when the status is cannot_be_merged' do
let(:merge_status) { :cannot_be_merged }
include_examples 'for an invalid state transition'
end
context 'when the status is cannot_be_merged_rechecking' do
let(:merge_status) { :cannot_be_merged_rechecking }
include_examples 'for an invalid state transition'
end
end
describe '#mark_as_unchecked' do
subject { create(:merge_request, source_project: project, merge_status: merge_status) }
shared_examples 'for an invalid state transition' do
it 'is not a valid state transition' do
expect { subject.mark_as_unchecked! }.to raise_error(StateMachines::InvalidTransition)
end
end
shared_examples 'for an valid state transition' do
it 'is a valid state transition' do
expect { subject.mark_as_unchecked! }
.to change { subject.merge_status }
.from(merge_status.to_s)
.to(expected_merge_status)
end
def transition!
subject.mark_as_unchecked!
end
context 'when the status is unchecked' do
@ -4241,14 +4316,16 @@ RSpec.describe MergeRequest, factory_default: :keep do
let(:merge_status) { :checking }
let(:expected_merge_status) { 'unchecked' }
include_examples 'for an valid state transition'
include_examples 'for a valid state transition'
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
end
context 'when the status is can_be_merged' do
let(:merge_status) { :can_be_merged }
let(:expected_merge_status) { 'unchecked' }
include_examples 'for an valid state transition'
include_examples 'for a valid state transition'
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
end
context 'when the status is cannot_be_merged_recheck' do
@ -4261,14 +4338,164 @@ RSpec.describe MergeRequest, factory_default: :keep do
let(:merge_status) { :cannot_be_merged }
let(:expected_merge_status) { 'cannot_be_merged_recheck' }
include_examples 'for an valid state transition'
include_examples 'for a valid state transition'
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
end
context 'when the status is cannot_be_merged_rechecking' do
let(:merge_status) { :cannot_be_merged_rechecking }
let(:expected_merge_status) { 'cannot_be_merged_recheck' }
include_examples 'for a valid state transition'
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
end
end
describe '#mark_as_checking' do
subject { create(:merge_request, source_project: project, merge_status: merge_status) }
def transition!
subject.mark_as_checking!
end
context 'when the status is unchecked' do
let(:merge_status) { :unchecked }
let(:expected_merge_status) { 'checking' }
include_examples 'for a valid state transition'
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
end
context 'when the status is checking' do
let(:merge_status) { :checking }
include_examples 'for an invalid state transition'
end
context 'when the status is can_be_merged' do
let(:merge_status) { :can_be_merged }
include_examples 'for an invalid state transition'
end
context 'when the status is cannot_be_merged_recheck' do
let(:merge_status) { :cannot_be_merged_recheck }
let(:expected_merge_status) { 'cannot_be_merged_rechecking' }
include_examples 'for a valid state transition'
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
end
context 'when the status is cannot_be_merged' do
let(:merge_status) { :cannot_be_merged }
let(:expected_merge_status) { 'cannot_be_merged_recheck' }
include_examples 'for an valid state transition'
include_examples 'for an invalid state transition'
end
context 'when the status is cannot_be_merged_rechecking' do
let(:merge_status) { :cannot_be_merged_rechecking }
include_examples 'for an invalid state transition'
end
end
describe '#mark_as_mergeable' do
subject { create(:merge_request, source_project: project, merge_status: merge_status) }
let(:expected_merge_status) { 'can_be_merged' }
def transition!
subject.mark_as_mergeable!
end
context 'when the status is unchecked' do
let(:merge_status) { :unchecked }
include_examples 'for a valid state transition'
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
end
context 'when the status is checking' do
let(:merge_status) { :checking }
include_examples 'for a valid state transition'
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
end
context 'when the status is can_be_merged' do
let(:merge_status) { :can_be_merged }
include_examples 'for an invalid state transition'
end
context 'when the status is cannot_be_merged_recheck' do
let(:merge_status) { :cannot_be_merged_recheck }
include_examples 'for a valid state transition'
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
end
context 'when the status is cannot_be_merged' do
let(:merge_status) { :cannot_be_merged }
include_examples 'for an invalid state transition'
end
context 'when the status is cannot_be_merged_rechecking' do
let(:merge_status) { :cannot_be_merged_rechecking }
include_examples 'for a valid state transition'
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
end
end
describe '#mark_as_unmergeable' do
subject { create(:merge_request, source_project: project, merge_status: merge_status) }
let(:expected_merge_status) { 'cannot_be_merged' }
def transition!
subject.mark_as_unmergeable!
end
context 'when the status is unchecked' do
let(:merge_status) { :unchecked }
include_examples 'for a valid state transition'
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
end
context 'when the status is checking' do
let(:merge_status) { :checking }
include_examples 'for a valid state transition'
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
end
context 'when the status is can_be_merged' do
let(:merge_status) { :can_be_merged }
include_examples 'for an invalid state transition'
end
context 'when the status is cannot_be_merged_recheck' do
let(:merge_status) { :cannot_be_merged_recheck }
include_examples 'for a valid state transition'
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
end
context 'when the status is cannot_be_merged' do
let(:merge_status) { :cannot_be_merged }
include_examples 'for an invalid state transition'
end
context 'when the status is cannot_be_merged_rechecking' do
let(:merge_status) { :cannot_be_merged_rechecking }
include_examples 'for a valid state transition'
it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
end
end

View File

@ -518,18 +518,6 @@ RSpec.describe Issues::UpdateService, :mailer do
update_issue(description: 'Changed description')
end
context 'when broadcast_issuable_description_updated is disabled' do
before do
stub_feature_flags(broadcast_issuable_description_updated: false)
end
it 'does not trigger GraphQL description updated subscription' do
expect(GraphqlTriggers).not_to receive(:issuable_description_updated)
update_issue(title: 'Changed title')
end
end
end
context 'when decription is not changed' do

View File

@ -631,18 +631,6 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
update_merge_request(description: 'updated description')
end
context 'when broadcast_issuable_description_updated is disabled' do
before do
stub_feature_flags(broadcast_issuable_description_updated: false)
end
it 'does not trigger GraphQL description updated subscription' do
expect(GraphqlTriggers).not_to receive(:issuable_description_updated)
update_merge_request(description: 'updated description')
end
end
end
context 'when decription is not changed' do

View File

@ -96,18 +96,6 @@ RSpec.describe WorkItems::UpdateService do
update_work_item
end
context 'when broadcast_issuable_description_updated is disabled' do
before do
stub_feature_flags(broadcast_issuable_description_updated: false)
end
it 'does not trigger GraphQL description updated subscription' do
expect(GraphqlTriggers).not_to receive(:issuable_description_updated)
update_work_item
end
end
end
context 'when decription is not changed' do