Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-08-02 15:09:10 +00:00
parent c658e2d292
commit 9c33625b8e
99 changed files with 1249 additions and 547 deletions

View File

@ -564,7 +564,7 @@ lib/gitlab/checks/** @proglottis @toon @zj-gitlab
/doc/development/iterating_tables_in_batches.md @aqualls
/doc/development/kubernetes.md @sselhorn
/doc/development/lfs.md @aqualls
/doc/development/licensed_feature_availability.md @sselhorn
/doc/development/ee_features.md @fneill
/doc/development/logging.md @msedlakjakubowski
/doc/development/maintenance_mode.md @axil
/doc/development/new_fe_guide/modules/widget_extensions.md @aqualls

View File

@ -97,7 +97,7 @@ populate-qa-tests-var:
- tooling/bin/find_change_diffs ${CHANGES_DIFFS_DIR}
script:
- 'echo "QA_TESTS: $QA_TESTS"'
- exit_code=0 && tooling/bin/qa/package_and_qa_check ${CHANGES_DIFFS_DIR} || exit_code=$?
- exit_code=0 && tooling/bin/qa/run_qa_check ${CHANGES_DIFFS_DIR} || exit_code=$?
- echo $exit_code
- |
if [ $exit_code -eq 0 ]; then
@ -105,7 +105,7 @@ populate-qa-tests-var:
elif [ $exit_code -eq 1 ]; then
exit 1
else
echo "Downstream jobs will not be triggered because package_and_qa_check exited with code: $exit_code"
echo "Downstream jobs will not be triggered because run_qa_check exited with code: $exit_code"
fi
# These jobs often time out, so temporarily use private runners and a long timeout: https://gitlab.com/gitlab-org/gitlab/-/issues/238563
tags:

View File

@ -0,0 +1,13 @@
stages:
- review
include:
- local: .gitlab/ci/global.gitlab-ci.yml
- local: .gitlab/ci/rules.gitlab-ci.yml
no-op:
extends:
- .review:rules:start-review-app-pipeline
stage: review
script:
- echo "Skip Review App because the MR includes only quarantine changes"

View File

@ -23,12 +23,42 @@ review-cleanup:
- ruby -rrubygems scripts/review_apps/automated_cleanup.rb
- gcp_cleanup
review-app-pipeline-generate:
image: ${GITLAB_DEPENDENCY_PROXY}ruby:${RUBY_VERSION}
stage: prepare
extends:
- .review:rules:start-review-app-pipeline
artifacts:
expire_in: 7d
paths:
- ${CHANGES_DIFFS_DIR}/*
- review-app-pipeline.yml
variables:
CHANGES_DIFFS_DIR: tmp/diffs
before_script:
- source scripts/utils.sh
- install_gitlab_gem
- tooling/bin/find_change_diffs ${CHANGES_DIFFS_DIR}
script:
- exit_code=0 && tooling/bin/qa/run_qa_check ${CHANGES_DIFFS_DIR} || exit_code=$?
- |
if [ $exit_code -eq 0 ]; then
echo "Review App will use the full pipeline"
cp .gitlab/ci/review-apps/main.gitlab-ci.yml review-app-pipeline.yml
elif [ $exit_code -eq 2 ]; then
echo "Skip Review App because the MR includes only quarantine changes"
cp .gitlab/ci/review-apps/skip-qa.gitlab-ci.yml review-app-pipeline.yml
else
exit $exit_code
fi
start-review-app-pipeline:
extends:
- .review:rules:start-review-app-pipeline
resource_group: review/${CI_COMMIT_REF_SLUG}${SCHEDULE_TYPE} # CI_ENVIRONMENT_SLUG is not available here and we want this to be the same as the environment
stage: review
needs:
- review-app-pipeline-generate
- job: build-assets-image
artifacts: false
# These variables are set in the pipeline schedules.
@ -39,7 +69,8 @@ start-review-app-pipeline:
DAST_RUN: $DAST_RUN
trigger:
include:
- local: .gitlab/ci/review-apps/main.gitlab-ci.yml
- artifact: review-app-pipeline.yml
job: review-app-pipeline-generate
strategy: depend
danger-review:

View File

@ -6017,7 +6017,6 @@ Layout/LineLength:
- 'spec/services/repository_archive_clean_up_service_spec.rb'
- 'spec/services/resource_access_tokens/create_service_spec.rb'
- 'spec/services/resource_access_tokens/revoke_service_spec.rb'
- 'spec/services/resource_events/change_labels_service_spec.rb'
- 'spec/services/resource_events/change_state_service_spec.rb'
- 'spec/services/resource_events/synthetic_milestone_notes_builder_service_spec.rb'
- 'spec/services/search/global_service_spec.rb'

View File

@ -139,15 +139,15 @@ export default {
title,
fingerprint,
fingerprint_sha256,
projects_with_write_access,
created_at,
projects_with_write_access: projects,
created_at: created,
}) => ({
id,
title,
fingerprint,
fingerprint_sha256,
projects: projects_with_write_access,
created: created_at,
projects,
created,
}),
);
} catch (error) {

View File

@ -19,24 +19,22 @@ export const toYmd = (date) => dateFormat(date, dateFormats.isoDate);
* @returns {Object}
*/
export const extractFilterQueryParameters = (url = '') => {
/* eslint-disable camelcase */
const {
source_branch_name = null,
target_branch_name = null,
author_username = null,
milestone_title = null,
assignee_username = [],
label_name = [],
source_branch_name: selectedSourceBranch = null,
target_branch_name: selectedTargetBranch = null,
author_username: selectedAuthor = null,
milestone_title: selectedMilestone = null,
assignee_username: selectedAssigneeList = [],
label_name: selectedLabelList = [],
} = urlQueryToFilter(url);
/* eslint-enable camelcase */
return {
selectedSourceBranch: source_branch_name,
selectedTargetBranch: target_branch_name,
selectedAuthor: author_username,
selectedMilestone: milestone_title,
selectedAssigneeList: assignee_username,
selectedLabelList: label_name,
selectedSourceBranch,
selectedTargetBranch,
selectedAuthor,
selectedMilestone,
selectedAssigneeList,
selectedLabelList,
};
};

View File

@ -4,15 +4,15 @@ export const parsedData = (state) => {
const byAuthorEmail = {};
const total = {};
state.chartData.forEach(({ date, author_name, author_email }) => {
state.chartData.forEach(({ date, author_name: name, author_email: email }) => {
total[date] = total[date] ? total[date] + 1 : 1;
const normalizedEmail = author_email.toLowerCase();
const normalizedEmail = email.toLowerCase();
const authorData = byAuthorEmail[normalizedEmail];
if (!authorData) {
byAuthorEmail[normalizedEmail] = {
name: author_name,
name,
commits: 1,
dates: {
[date]: 1,

View File

@ -175,6 +175,7 @@ export default class Diff {
}
}
// eslint-disable-next-line class-methods-use-this
formatElementToObject = (element) => {
const key = element.attributes['data-file-hash'].value;
const name = element.attributes['data-diff-toggle-entity'].value;
@ -192,6 +193,7 @@ export default class Diff {
return $elements.toArray().map(diff.formatElementToObject).reduce(merge);
};
// eslint-disable-next-line class-methods-use-this
showRawViewer = (fileHash, elements) => {
if (elements === undefined) return;
@ -202,6 +204,7 @@ export default class Diff {
elements.rawViewer.classList.remove('hidden');
};
// eslint-disable-next-line class-methods-use-this
showRenderedViewer = (fileHash, elements) => {
if (elements === undefined) return;

View File

@ -119,10 +119,10 @@ export const fetchDiffFilesBatch = ({ commit, state, dispatch }) => {
const getBatch = (page = startPage) =>
axios
.get(mergeUrlParams({ ...urlParams, page, per_page: perPage }, state.endpointBatch))
.then(({ data: { pagination, diff_files } }) => {
totalLoaded += diff_files.length;
.then(({ data: { pagination, diff_files: diffFiles } }) => {
totalLoaded += diffFiles.length;
commit(types.SET_DIFF_DATA_BATCH, { diff_files });
commit(types.SET_DIFF_DATA_BATCH, { diff_files: diffFiles });
commit(types.SET_BATCH_LOADING_STATE, 'loaded');
if (!scrolledVirtualScroller) {
@ -138,7 +138,7 @@ export const fetchDiffFilesBatch = ({ commit, state, dispatch }) => {
}
if (!isNoteLink && !state.currentDiffFileId) {
commit(types.SET_CURRENT_DIFF_FILE, diff_files[0]?.file_hash);
commit(types.SET_CURRENT_DIFF_FILE, diffFiles[0]?.file_hash);
}
if (isNoteLink) {
@ -293,8 +293,8 @@ export const assignDiscussionsToDiff = (
};
export const removeDiscussionsFromDiff = ({ commit }, removeDiscussion) => {
const { file_hash, line_code, id } = removeDiscussion;
commit(types.REMOVE_LINE_DISCUSSIONS_FOR_FILE, { fileHash: file_hash, lineCode: line_code, id });
const { file_hash: fileHash, line_code: lineCode, id } = removeDiscussion;
commit(types.REMOVE_LINE_DISCUSSIONS_FOR_FILE, { fileHash, lineCode, id });
};
export const toggleLineDiscussions = ({ commit }, options) => {

View File

@ -11,6 +11,7 @@ export const displayAndLogError = (error) =>
const EVENT_ICONS = {
comment: 'comment',
issues: 'issues',
label: 'label',
status: 'status',
default: 'comment',
};

View File

@ -61,7 +61,7 @@ export const decorateLineForInlineView = (line, id, conflict) => {
};
export const getLineForParallelView = (line, id, lineType, isHead) => {
const { old_line, new_line, rich_text } = line;
const { old_line: oldLine, new_line: newLine, rich_text: richText } = line;
const hasConflict = lineType === 'conflict';
return {
@ -71,10 +71,9 @@ export const getLineForParallelView = (line, id, lineType, isHead) => {
isHead: hasConflict && isHead,
isOrigin: hasConflict && !isHead,
hasMatch: lineType === 'match',
// eslint-disable-next-line camelcase
lineNumber: isHead ? new_line : old_line,
lineNumber: isHead ? newLine : oldLine,
section: isHead ? 'head' : 'origin',
richText: rich_text,
richText,
isSelected: false,
isUnselected: false,
};

View File

@ -237,10 +237,10 @@ export default {
recentDeployments() {
return this.deploymentData.reduce((acc, deployment) => {
if (deployment.created_at >= this.earliestDatapoint) {
const { id, created_at, sha, ref, tag } = deployment;
const { id, created_at: createdAt, sha, ref, tag } = deployment;
acc.push({
id,
createdAt: created_at,
createdAt,
sha,
commitUrl: `${this.projectPath}/-/commit/${sha}`,
tag,

View File

@ -16,8 +16,8 @@ export const gqClient = createGqClient(
);
/**
* Metrics loaded from project-defined dashboards do not have a metric_id.
* This method creates a unique ID combining metric_id and id, if either is present.
* Metrics loaded from project-defined dashboards do not have a metricId.
* This method creates a unique ID combining metricId and id, if either is present.
* This is hopefully a temporary solution until BE processes metrics before passing to FE
*
* Related:
@ -25,12 +25,11 @@ export const gqClient = createGqClient(
* https://gitlab.com/gitlab-org/gitlab/-/merge_requests/27447
*
* @param {Object} metric - metric
* @param {Number} metric.metric_id - Database metric id
* @param {Number} metric.metricId - Database metric id
* @param {String} metric.id - User-defined identifier
* @returns {Object} - normalized metric with a uniqueID
*/
// eslint-disable-next-line camelcase
export const uniqMetricsId = ({ metric_id, id }) => `${metric_id || NOT_IN_DB_PREFIX}_${id}`;
export const uniqMetricsId = ({ metricId, id }) => `${metricId || NOT_IN_DB_PREFIX}_${id}`;
/**
* Project path has a leading slash that doesn't work well
@ -100,19 +99,28 @@ export const parseAnnotationsResponse = (response) => {
* @returns {Object}
*/
const mapToMetricsViewModel = (metrics) =>
metrics.map(({ label, id, metric_id, query_range, prometheus_endpoint_path, ...metric }) => ({
label,
queryRange: query_range,
prometheusEndpointPath: prometheus_endpoint_path,
metricId: uniqMetricsId({ metric_id, id }),
metrics.map(
({
label,
id,
metric_id: metricId,
query_range: queryRange,
prometheus_endpoint_path: prometheusEndpointPath,
...metric
}) => ({
label,
queryRange,
prometheusEndpointPath,
metricId: uniqMetricsId({ metricId, id }),
// metric data
loading: false,
result: null,
state: null,
// metric data
loading: false,
result: null,
state: null,
...metric,
}));
...metric,
}),
);
/**
* Maps X-axis view model
@ -169,26 +177,26 @@ export const mapPanelToViewModel = ({
id = null,
title = '',
type,
x_axis = {}, // eslint-disable-line camelcase
x_label,
y_label,
y_axis = {}, // eslint-disable-line camelcase
x_axis: xAxisBase = {},
x_label: xLabel,
y_label: yLabel,
y_axis: yAxisBase = {},
field,
metrics = [],
links = [],
min_value,
max_value,
min_value: minValue,
max_value: maxValue,
split,
thresholds,
format,
}) => {
// Both `x_axis.name` and `x_label` are supported for now
// https://gitlab.com/gitlab-org/gitlab/issues/210521
const xAxis = mapXAxisToViewModel({ name: x_label, ...x_axis }); // eslint-disable-line camelcase
const xAxis = mapXAxisToViewModel({ name: xLabel, ...xAxisBase });
// Both `y_axis.name` and `y_label` are supported for now
// https://gitlab.com/gitlab-org/gitlab/issues/208385
const yAxis = mapYAxisToViewModel({ name: y_label, ...y_axis }); // eslint-disable-line camelcase
const yAxis = mapYAxisToViewModel({ name: yLabel, ...yAxisBase });
return {
id,
@ -199,8 +207,8 @@ export const mapPanelToViewModel = ({
yAxis,
xAxis,
field,
minValue: min_value,
maxValue: max_value,
minValue,
maxValue,
split,
thresholds,
format,
@ -295,13 +303,13 @@ export const mapToDashboardViewModel = ({
dashboard = '',
templating = {},
links = [],
panel_groups = [], // eslint-disable-line camelcase
panel_groups: panelGroups = [],
}) => {
return {
dashboard,
variables: mergeURLVariables(parseTemplatingVariables(templating.variables)),
links: links.map(mapLinksToViewModel),
panelGroups: panel_groups.map(mapToPanelGroupViewModel),
panelGroups: panelGroups.map(mapToPanelGroupViewModel),
};
};

View File

@ -7,7 +7,7 @@ import * as utils from './utils';
export default {
[types.ADD_NEW_NOTE](state, data) {
const note = data.discussion ? data.discussion.notes[0] : data;
const { discussion_id, type } = note;
const { discussion_id: discussionId, type } = note;
const [exists] = state.discussions.filter((n) => n.id === note.discussion_id);
const isDiscussion = type === constants.DISCUSSION_NOTE || type === constants.DIFF_NOTE;
@ -17,9 +17,9 @@ export default {
if (!discussion) {
discussion = {
expanded: true,
id: discussion_id,
id: discussionId,
individual_note: !isDiscussion,
reply_id: discussion_id,
reply_id: discussionId,
};
if (isDiscussion && isInMRPage()) {

View File

@ -26,8 +26,7 @@ export const receivePackagesListSuccess = ({ commit }, { data, headers }) => {
export const requestPackagesList = ({ dispatch, state }, params = {}) => {
dispatch('setLoading', true);
// eslint-disable-next-line camelcase
const { page = DEFAULT_PAGE, per_page = DEFAULT_PAGE_SIZE } = params;
const { page = DEFAULT_PAGE, per_page: perPage = DEFAULT_PAGE_SIZE } = params;
const { sort, orderBy } = state.sorting;
const type = state.config.forceTerraform
? TERRAFORM_SEARCH_TYPE
@ -38,7 +37,7 @@ export const requestPackagesList = ({ dispatch, state }, params = {}) => {
const apiMethod = state.config.isGroupPage ? 'groupPackages' : 'projectPackages';
return Api[apiMethod](state.config.resourceId, {
params: { page, per_page, sort, order_by: orderBy, ...packageFilters },
params: { page, per_page: perPage, sort, order_by: orderBy, ...packageFilters },
})
.then(({ data, headers }) => {
dispatch('receivePackagesListSuccess', { data, headers });

View File

@ -55,6 +55,7 @@ waitForCSSLoaded(() => {
},
attrs: {
height: LANGUAGE_CHART_HEIGHT,
responsive: true,
},
});
},
@ -103,6 +104,9 @@ waitForCSSLoaded(() => {
yAxisTitle: __('No. of commits'),
xAxisType: 'category',
},
attrs: {
responsive: true,
},
});
},
});
@ -136,6 +140,9 @@ waitForCSSLoaded(() => {
yAxisTitle: __('No. of commits'),
xAxisType: 'category',
},
attrs: {
responsive: true,
},
});
},
});
@ -160,6 +167,9 @@ waitForCSSLoaded(() => {
yAxisTitle: __('No. of commits'),
xAxisType: 'category',
},
attrs: {
responsive: true,
},
});
},
});

View File

@ -194,6 +194,7 @@ export default {
:data="chartData"
:option="chartOptions"
:format-tooltip-text="formatTooltipText"
responsive
>
<template v-if="canShowData" #tooltip-title>
{{ tooltipTitle }}

View File

@ -3,10 +3,11 @@ import {
GlAlert,
GlIcon,
GlButton,
GlDropdown,
GlDropdownItem,
GlForm,
GlFormGroup,
GlFormInput,
GlFormSelect,
GlFormTextarea,
GlLink,
GlSprintf,
@ -43,10 +44,10 @@ const i18n = {
};
export default {
typeOptions: [
{ value: VARIABLE_TYPE, text: __('Variable') },
{ value: FILE_TYPE, text: __('File') },
],
typeOptions: {
[VARIABLE_TYPE]: __('Variable'),
[FILE_TYPE]: __('File'),
},
i18n,
formElementClasses: 'gl-mr-3 gl-mb-3 gl-flex-basis-quarter gl-flex-shrink-0 gl-flex-grow-0',
// this height value is used inline on the textarea to match the input field height
@ -56,10 +57,11 @@ export default {
GlAlert,
GlIcon,
GlButton,
GlDropdown,
GlDropdownItem,
GlForm,
GlFormGroup,
GlFormInput,
GlFormSelect,
GlFormTextarea,
GlLink,
GlSprintf,
@ -202,6 +204,11 @@ export default {
});
}
},
setVariableType(key, type) {
const { variables } = this.form[this.refFullName];
const variable = variables.find((v) => v.key === key);
variable.variable_type = type;
},
setVariableParams(refValue, type, paramsObj) {
Object.entries(paramsObj).forEach(([key, value]) => {
this.setVariable(refValue, type, key, value);
@ -401,12 +408,19 @@ export default {
<div
class="gl-display-flex gl-align-items-stretch gl-flex-direction-column gl-md-flex-direction-row"
>
<gl-form-select
v-model="variable.variable_type"
<gl-dropdown
:text="$options.typeOptions[variable.variable_type]"
:class="$options.formElementClasses"
:options="$options.typeOptions"
data-testid="pipeline-form-ci-variable-type"
/>
>
<gl-dropdown-item
v-for="type in Object.keys($options.typeOptions)"
:key="type"
@click="setVariableType(variable.key, type)"
>
{{ $options.typeOptions[type] }}
</gl-dropdown-item>
</gl-dropdown>
<gl-form-input
v-model="variable.key"
:placeholder="s__('CiVariables|Input variable key')"

View File

@ -30,7 +30,6 @@ export const fetchTestSuite = ({ state, commit, dispatch }, index) => {
dispatch('toggleLoading');
// eslint-disable-next-line camelcase
const { build_ids = [] } = state.testReports?.test_suites?.[index] || {};
// Replacing `/:suite_name.json` with the name of the suite. Including the extra characters
// to ensure that we replace exactly the template part of the URL string

View File

@ -262,8 +262,8 @@ export default {
const selectedUsers = this.preselectedItems
.filter(({ type }) => type === LEVEL_TYPES.USER)
.map(({ user_id, name, username, avatar_url, type }) => ({
id: user_id,
.map(({ user_id: id, name, username, avatar_url, type }) => ({
id,
name,
username,
avatar_url,

View File

@ -3,9 +3,7 @@
import $ from 'jquery';
import { setCookie } from '~/lib/utils/common_utils';
import { hide, fixTitle } from '~/tooltips';
import createFlash from './flash';
import axios from './lib/utils/axios_utils';
import { sprintf, s__, __ } from './locale';
import { __ } from './locale';
const updateSidebarClasses = (layoutPage, rightSidebar) => {
if (window.innerWidth >= 992) {
@ -20,7 +18,6 @@ const updateSidebarClasses = (layoutPage, rightSidebar) => {
};
function Sidebar() {
this.toggleTodo = this.toggleTodo.bind(this);
this.sidebar = $('aside');
this.removeListeners();
@ -54,7 +51,6 @@ Sidebar.prototype.addEventListeners = function () {
this.sidebar.on('hiddenGlDropdown', this, this.onSidebarDropdownHidden);
$document.on('click', '.js-sidebar-toggle', this.sidebarToggleClicked);
$(document).off('click', '.js-issuable-todo').on('click', '.js-issuable-todo', this.toggleTodo);
if (window.gon?.features?.movedMrSidebar) {
const layoutPage = document.querySelector('.layout-page');
@ -105,32 +101,6 @@ Sidebar.prototype.sidebarToggleClicked = function (e, triggered) {
}
};
Sidebar.prototype.toggleTodo = function (e) {
const $this = $(e.currentTarget);
const ajaxType = $this.data('deletePath') ? 'delete' : 'post';
const url = String($this.data('deletePath') || $this.data('createPath'));
hide($this);
$('.js-issuable-todo').disable().addClass('is-loading');
axios[ajaxType](url, {
issuable_id: $this.data('issuableId'),
issuable_type: $this.data('issuableType'),
})
.then(({ data }) => {
this.todoUpdateDone(data);
})
.catch(() =>
createFlash({
message: sprintf(__('There was an error %{message} to-do item.'), {
message:
ajaxType === 'post' ? s__('RightSidebar|adding a') : s__('RightSidebar|deleting the'),
}),
}),
);
};
Sidebar.prototype.sidebarCollapseClicked = function (e) {
if ($(e.currentTarget).hasClass('js-dont-change-state')) {
return;

View File

@ -80,22 +80,26 @@ export default {
},
computeGraphData(metrics, deploymentTime) {
this.loadingMetrics = false;
const { memory_before, memory_after, memory_values } = metrics;
const {
memory_before: memoryBefore,
memory_after: memoryAfter,
memory_values: memoryValues,
} = metrics;
// Both `memory_before` and `memory_after` objects
// have peculiar structure where accessing only a specific
// index yeilds correct value that we can use to show memory delta.
if (memory_before.length > 0) {
this.memoryFrom = this.getMegabytes(memory_before[0].value[1]);
if (memoryBefore.length > 0) {
this.memoryFrom = this.getMegabytes(memoryBefore[0].value[1]);
}
if (memory_after.length > 0) {
this.memoryTo = this.getMegabytes(memory_after[0].value[1]);
if (memoryAfter.length > 0) {
this.memoryTo = this.getMegabytes(memoryAfter[0].value[1]);
}
if (memory_values.length > 0) {
if (memoryValues.length > 0) {
this.hasMetrics = true;
this.memoryMetrics = memory_values[0].values;
this.memoryMetrics = memoryValues[0].values;
this.deploymentTime = deploymentTime;
}
},

View File

@ -75,9 +75,9 @@ export default {
return sprintf(s__('AccessibilityReport|Message: %{message}'), { message });
},
prepareReports() {
const { new_errors, existing_errors, resolved_errors } = this.collapsedData;
const { collapsedData } = this;
const newErrors = new_errors.map((error) => {
const newErrors = collapsedData.new_errors.map((error) => {
return {
header: __('New'),
id: uniqueId('new-error-'),
@ -91,7 +91,7 @@ export default {
};
});
const existingErrors = existing_errors.map((error) => {
const existingErrors = collapsedData.existing_errors.map((error) => {
return {
id: uniqueId('existing-error-'),
text: this.formatText(error.code),
@ -104,7 +104,7 @@ export default {
};
});
const resolvedErrors = resolved_errors.map((error) => {
const resolvedErrors = collapsedData.resolved_errors.map((error) => {
return {
id: uniqueId('resolved-error-'),
text: this.formatText(error.code),

View File

@ -51,14 +51,14 @@ export const recentFailuresTextBuilder = (summary = {}) => {
return i18n.recentFailureSummary(recentlyFailed, failed);
};
export const reportSubTextBuilder = ({ suite_errors, summary }) => {
if (suite_errors?.head || suite_errors?.base) {
export const reportSubTextBuilder = ({ suite_errors: suiteErrors, summary }) => {
if (suiteErrors?.head || suiteErrors?.base) {
const errors = [];
if (suite_errors?.head) {
errors.push(`${i18n.headReportParsingError} ${suite_errors.head}`);
if (suiteErrors?.head) {
errors.push(`${i18n.headReportParsingError} ${suiteErrors.head}`);
}
if (suite_errors?.base) {
errors.push(`${i18n.baseReportParsingError} ${suite_errors.base}`);
if (suiteErrors?.base) {
errors.push(`${i18n.baseReportParsingError} ${suiteErrors.base}`);
}
return errors.join('<br />');
}

View File

@ -30,12 +30,12 @@ export function fetchBranches({ commit, state }, search = '') {
});
}
export const fetchMilestones = ({ commit, state }, search_title = '') => {
export const fetchMilestones = ({ commit, state }, searchTitle = '') => {
commit(types.REQUEST_MILESTONES);
const { milestonesEndpoint } = state;
return axios
.get(milestonesEndpoint, { params: { search_title } })
.get(milestonesEndpoint, { params: { search_title: searchTitle } })
.then((response) => {
commit(types.RECEIVE_MILESTONES_SUCCESS, response.data);
return response;

View File

@ -46,7 +46,8 @@ class IssuableFinder
requires_cross_project_access unless: -> { params.project? }
FULL_TEXT_SEARCH_TERM_REGEX = /\A[\p{ASCII}|\p{Latin}]+\z/.freeze
FULL_TEXT_SEARCH_TERM_PATTERN = '[\u0000-\u218F]*'
FULL_TEXT_SEARCH_TERM_REGEX = /\A#{FULL_TEXT_SEARCH_TERM_PATTERN}\z/.freeze
NEGATABLE_PARAMS_HELPER_KEYS = %i[project_id scope status include_subgroups].freeze
attr_accessor :current_user, :params

View File

@ -24,7 +24,7 @@ module Mutations
check_spam_action_response!(issue)
{
issue: issue,
issue: issue.reset,
errors: errors_on_object(issue)
}
end

View File

@ -52,7 +52,8 @@ module Ci
cluster_applications: 'gl-cluster-applications.json', # DEPRECATED: https://gitlab.com/gitlab-org/gitlab/-/issues/361094
requirements: 'requirements.json',
coverage_fuzzing: 'gl-coverage-fuzzing.json',
api_fuzzing: 'gl-api-fuzzing-report.json'
api_fuzzing: 'gl-api-fuzzing-report.json',
cyclonedx: 'gl-sbom.cdx.zip'
}.freeze
INTERNAL_TYPES = {
@ -92,7 +93,8 @@ module Ci
terraform: :raw,
requirements: :raw,
coverage_fuzzing: :raw,
api_fuzzing: :raw
api_fuzzing: :raw,
cyclonedx: :zip
}.freeze
DOWNLOADABLE_TYPES = %w[
@ -116,6 +118,7 @@ module Ci
secret_detection
requirements
cluster_image_scanning
cyclonedx
].freeze
TYPE_AND_FORMAT_PAIRS = INTERNAL_TYPES.merge(REPORT_TYPES).freeze
@ -207,7 +210,8 @@ module Ci
browser_performance: 24, ## EE-specific
load_performance: 25, ## EE-specific
api_fuzzing: 26, ## EE-specific
cluster_image_scanning: 27 ## EE-specific
cluster_image_scanning: 27, ## EE-specific
cyclonedx: 28 ## EE-specific
}
# `file_location` indicates where actual files are stored.

View File

@ -101,6 +101,7 @@ class Issue < ApplicationRecord
validates :namespace, presence: true, if: -> { project.present? }
validate :due_date_after_start_date
validate :parent_link_confidentiality
enum issue_type: WorkItems::Type.base_types
@ -291,6 +292,16 @@ class Issue < ApplicationRecord
def pg_full_text_search(search_term)
super.where('issue_search_data.project_id = issues.project_id')
end
override :full_search
def full_search(query, matched_columns: nil, use_minimum_char_limit: true)
return super if query.match?(IssuableFinder::FULL_TEXT_SEARCH_TERM_REGEX)
super.where(
'issues.title NOT SIMILAR TO :pattern OR issues.description NOT SIMILAR TO :pattern',
pattern: IssuableFinder::FULL_TEXT_SEARCH_TERM_PATTERN
)
end
end
def next_object_by_relative_position(ignoring: nil, order: :asc)
@ -670,6 +681,21 @@ class Issue < ApplicationRecord
end
end
# Although parent/child relationship can be set only for WorkItems, we
# still need to validate it for Issue model too, because both models use
# same table.
def parent_link_confidentiality
return unless persisted?
if confidential? && WorkItems::ParentLink.has_public_children?(id)
errors.add(:confidential, _('confidential parent can not be used if there are non-confidential children.'))
end
if !confidential? && WorkItems::ParentLink.has_confidential_parent?(id)
errors.add(:confidential, _('associated parent is confidential and can not have non-confidential children.'))
end
end
override :persist_pg_full_text_search_vector
def persist_pg_full_text_search_vector(search_vector)
Issues::SearchData.upsert({ project_id: project_id, issue_id: id, search_vector: search_vector }, unique_by: %i(project_id issue_id))

View File

@ -34,6 +34,17 @@ class WorkItem < Issue
private
override :parent_link_confidentiality
def parent_link_confidentiality
if confidential? && work_item_children.public_only.exists?
errors.add(:confidential, _('confidential parent can not be used if there are non-confidential children.'))
end
if !confidential? && work_item_parent&.confidential?
errors.add(:confidential, _('associated parent is confidential and can not have non-confidential children.'))
end
end
def record_create_action
super

View File

@ -16,6 +16,20 @@ module WorkItems
validate :validate_parent_type
validate :validate_same_project
validate :validate_max_children
validate :validate_confidentiality
class << self
def has_public_children?(parent_id)
joins(:work_item).where(work_item_parent_id: parent_id, 'issues.confidential': false).exists?
end
def has_confidential_parent?(id)
link = find_by_work_item_id(id)
return false unless link
link.work_item_parent.confidential?
end
end
private
@ -56,5 +70,14 @@ module WorkItems
errors.add :work_item_parent, _('parent already has maximum number of children.')
end
end
def validate_confidentiality
return unless work_item_parent && work_item
if work_item_parent.confidential? && !work_item.confidential?
errors.add :work_item, _("cannot assign a non-confidential work item to a confidential "\
"parent. Make the work item confidential and try again.")
end
end
end
end

View File

@ -0,0 +1,36 @@
# frozen_string_literal: true
module Ci
module Runners
class BulkDeleteRunnersService
attr_reader :runners
RUNNER_LIMIT = 50
# @param runners [Array<Ci::Runner, Integer>] the runners to unregister/destroy
def initialize(runners:)
@runners = runners
end
def execute
if @runners
# Delete a few runners immediately
return delete_runners
end
{ deleted_count: 0, deleted_ids: [] }
end
private
def delete_runners
# rubocop:disable CodeReuse/ActiveRecord
runners_to_be_deleted = Ci::Runner.where(id: @runners).limit(RUNNER_LIMIT)
# rubocop:enable CodeReuse/ActiveRecord
deleted_ids = runners_to_be_deleted.destroy_all.map(&:id) # rubocop: disable Cop/DestroyAll
{ deleted_count: deleted_ids.count, deleted_ids: deleted_ids }
end
end
end
end

View File

@ -48,6 +48,26 @@ module IncidentManagement
new(incident, user, note: note, occurred_at: occurred_at, action: action, auto_created: true).execute
end
def change_labels(incident, user, added_labels: [], removed_labels: [])
return if Feature.disabled?(:incident_timeline_events_from_labels, incident.project)
if added_labels.blank? && removed_labels.blank?
return ServiceResponse.error(message: _('There are no changed labels'))
end
labels_note = -> (verb, labels) {
"#{verb} #{labels.map(&:to_reference).join(' ')} #{'label'.pluralize(labels.count)}" if labels.present?
}
added_note = labels_note.call('added', added_labels)
removed_note = labels_note.call('removed', removed_labels)
note = "@#{user.username} #{[added_note, removed_note].compact.join(' and ')}"
occurred_at = incident.updated_at
action = 'label'
new(incident, user, note: note, occurred_at: occurred_at, action: action, auto_created: true).execute
end
end
def execute

View File

@ -24,6 +24,9 @@ module ResourceEvents
end
ApplicationRecord.legacy_bulk_insert(ResourceLabelEvent.table_name, labels) # rubocop:disable Gitlab/BulkInsert
create_timeline_events_from(added_labels: added_labels, removed_labels: removed_labels)
resource.expire_note_etag_cache
Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_label_changed_action(author: user) if resource.is_a?(Issue)
@ -41,6 +44,17 @@ module ResourceEvents
raise ArgumentError, "Unknown resource type #{resource.class.name}"
end
end
def create_timeline_events_from(added_labels: [], removed_labels: [])
return unless resource.incident?
IncidentManagement::TimelineEvents::CreateService.change_labels(
resource,
user,
added_labels: added_labels,
removed_labels: removed_labels
)
end
end
end

View File

@ -69,6 +69,9 @@ module WorkItems
end
def service_response!(result)
work_item.reload_work_item_parent
work_item.work_item_children.reset
return result unless result[:status] == :error
raise WidgetError, result[:message]

View File

@ -17,11 +17,11 @@
paginate_diffs: true,
paginate_diffs_per_page: Projects::CompareController::COMMIT_DIFFS_PER_PAGE
- else
.card.gl-bg-gray-50.gl-border-none.gl-p-2
.center
= render Pajamas::CardComponent.new(card_options: { class: "gl-bg-gray-50 gl-mb-5 gl-border-none gl-text-center" }) do |c|
- c.body do
%h4
= s_("CompareBranches|There isn't anything to compare.")
%p.slead
%p.gl-mb-4.gl-line-height-24
- if params[:to] == params[:from]
- source_branch = capture do
%span.ref-name= params[:from]

View File

@ -2261,8 +2261,7 @@
:resource_boundary: :unknown
:weight: 2
:idempotent: false
:tags:
- :needs_own_queue
:tags: []
- :name: emails_on_push
:worker_name: EmailsOnPushWorker
:feature_category: :source_code_management
@ -3045,8 +3044,7 @@
:resource_boundary: :unknown
:weight: 2
:idempotent: false
:tags:
- :needs_own_queue
:tags: []
- :name: snippets_schedule_bulk_repository_shard_moves
:worker_name: Snippets::ScheduleBulkRepositoryShardMovesWorker
:feature_category: :gitaly

View File

@ -11,9 +11,6 @@ class EmailReceiverWorker # rubocop:disable Scalability/IdempotentWorker
urgency :high
weight 2
# https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1263
tags :needs_own_queue
attr_accessor :raw
def perform(raw)

View File

@ -9,9 +9,6 @@ class ServiceDeskEmailReceiverWorker < EmailReceiverWorker # rubocop:disable Sca
urgency :high
sidekiq_options retry: 3
# https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1263
tags :needs_own_queue
def should_perform?
::Gitlab::ServiceDeskEmail.enabled?
end

View File

@ -0,0 +1,8 @@
---
name: incident_timeline_events_from_labels
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/93175
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/369416
milestone: '15.3'
type: development
group: group::respond
default_enabled: false

View File

@ -27,6 +27,9 @@ options:
- incident_management_alert_status_changed
- incident_management_alert_assigned
- incident_management_alert_todo
- incident_management_timeline_event_created
- incident_management_timeline_event_edited
- incident_management_timeline_event_deleted
distribution:
- ce
- ee

View File

@ -27,6 +27,9 @@ options:
- incident_management_alert_status_changed
- incident_management_alert_assigned
- incident_management_alert_todo
- incident_management_timeline_event_created
- incident_management_timeline_event_edited
- incident_management_timeline_event_deleted
distribution:
- ce
- ee

View File

@ -0,0 +1,7 @@
# frozen_string_literal: true
class AddPlanLimitsMaxSizeCyclonedxReportColumn < Gitlab::Database::Migration[2.0]
def change
add_column :plan_limits, :ci_max_artifact_size_cyclonedx, :integer, null: false, default: 1
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class AddPartialTrigramIndexForIssueTitle < Gitlab::Database::Migration[2.0]
disable_ddl_transaction!
INDEX_NAME = 'index_issues_on_title_trigram_non_latin'
def up
add_concurrent_index :issues, :title, name: INDEX_NAME,
using: :gin, opclass: { description: :gin_trgm_ops },
where: "title NOT SIMILAR TO '[\\u0000-\\u218F]*' OR description NOT SIMILAR TO '[\\u0000-\\u218F]*'"
end
def down
remove_concurrent_index_by_name :issues, INDEX_NAME
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class AddPartialTrigramIndexForIssueDescription < Gitlab::Database::Migration[2.0]
disable_ddl_transaction!
INDEX_NAME = 'index_issues_on_description_trigram_non_latin'
def up
add_concurrent_index :issues, :description, name: INDEX_NAME,
using: :gin, opclass: { description: :gin_trgm_ops },
where: "title NOT SIMILAR TO '[\\u0000-\\u218F]*' OR description NOT SIMILAR TO '[\\u0000-\\u218F]*'"
end
def down
remove_concurrent_index_by_name :issues, INDEX_NAME
end
end

View File

@ -0,0 +1 @@
fd68f63f6ec50233c99b5173b69fa6ff99b1ae9b2d023cdccdff3cd94be35b36

View File

@ -0,0 +1 @@
a332483a18eb46c1c4ce0d93b4269d630978a8c93e307f22ec5b412f3fa8f355

View File

@ -0,0 +1 @@
6515dbfcf8bc32c25e0547ea21af95c1dc2e89e3571abdef0e43e5289cd71c2c

View File

@ -18903,7 +18903,8 @@ CREATE TABLE plan_limits (
web_hook_calls_mid integer DEFAULT 0 NOT NULL,
web_hook_calls_low integer DEFAULT 0 NOT NULL,
project_ci_variables integer DEFAULT 200 NOT NULL,
group_ci_variables integer DEFAULT 200 NOT NULL
group_ci_variables integer DEFAULT 200 NOT NULL,
ci_max_artifact_size_cyclonedx integer DEFAULT 1 NOT NULL
);
CREATE SEQUENCE plan_limits_id_seq
@ -28583,6 +28584,8 @@ CREATE INDEX index_issues_on_confidential ON issues USING btree (confidential);
CREATE INDEX index_issues_on_description_trigram ON issues USING gin (description gin_trgm_ops);
CREATE INDEX index_issues_on_description_trigram_non_latin ON issues USING gin (description gin_trgm_ops) WHERE (((title)::text !~ similar_escape('[\u0000-\u218F]*'::text, NULL::text)) OR (description !~ similar_escape('[\u0000-\u218F]*'::text, NULL::text)));
CREATE INDEX index_issues_on_duplicated_to_id ON issues USING btree (duplicated_to_id) WHERE (duplicated_to_id IS NOT NULL);
CREATE INDEX index_issues_on_id_and_weight ON issues USING btree (id, weight);
@ -28617,6 +28620,8 @@ CREATE INDEX index_issues_on_sprint_id ON issues USING btree (sprint_id);
CREATE INDEX index_issues_on_title_trigram ON issues USING gin (title gin_trgm_ops);
CREATE INDEX index_issues_on_title_trigram_non_latin ON issues USING gin (title gin_trgm_ops) WHERE (((title)::text !~ similar_escape('[\u0000-\u218F]*'::text, NULL::text)) OR (description !~ similar_escape('[\u0000-\u218F]*'::text, NULL::text)));
CREATE INDEX index_issues_on_updated_at ON issues USING btree (updated_at);
CREATE INDEX index_issues_on_updated_by_id ON issues USING btree (updated_by_id) WHERE (updated_by_id IS NOT NULL);

View File

@ -639,6 +639,7 @@ setting is used:
| `ci_max_artifact_size_secret_detection` | 0 |
| `ci_max_artifact_size_terraform` | 5 MB ([introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/37018) in GitLab 13.3) |
| `ci_max_artifact_size_trace` | 0 |
| `ci_max_artifact_size_cyclonedx` | 1 MB |
For example, to set the `ci_max_artifact_size_junit` limit to 10 MB on a self-managed
installation, run the following in the [GitLab Rails console](operations/rails_console.md#starting-a-rails-console-session):

View File

@ -179,15 +179,3 @@ sidekiq['routing_rules'] = [
These queues must also be included in at least one [Sidekiq
queue group](extra_sidekiq_processes.md#start-multiple-processes).
The following table shows the workers that should have their own queue:
| Worker name | Queue name | GitLab issue |
| --- | --- | --- |
| `EmailReceiverWorker` | `email_receiver` | [`gitlab-com/gl-infra/scalability#1263`](https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1263) |
| `ServiceDeskEmailReceiverWorker` | `service_desk_email_receiver` | [`gitlab-com/gl-infra/scalability#1263`](https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1263) |
| `ProjectImportScheduleWorker` | `project_import_schedule` | [`gitlab-org/gitlab#340630`](https://gitlab.com/gitlab-org/gitlab/-/issues/340630) |
| `HashedStorage::MigratorWorker` | `hashed_storage:hashed_storage_migrator` | [`gitlab-org/gitlab#340629`](https://gitlab.com/gitlab-org/gitlab/-/issues/340629) |
| `HashedStorage::ProjectMigrateWorker` | `hashed_storage:hashed_storage_project_migrate` | [`gitlab-org/gitlab#340629`](https://gitlab.com/gitlab-org/gitlab/-/issues/340629) |
| `HashedStorage::ProjectRollbackWorker` | `hashed_storage:hashed_storage_project_rollback` | [`gitlab-org/gitlab#340629`](https://gitlab.com/gitlab-org/gitlab/-/issues/340629) |
| `HashedStorage::RollbackerWorker` | `hashed_storage:hashed_storage_rollbacker` | [`gitlab-org/gitlab#340629`](https://gitlab.com/gitlab-org/gitlab/-/issues/340629) |

View File

@ -246,7 +246,7 @@ automatically, due to differences between the two application servers.
To switch from Unicorn to Puma:
1. Determine suitable Puma [worker and thread settings](../../install/requirements.md#puma-settings).
1. Convert any custom Unicorn settings to Puma.
1. Convert any custom Unicorn settings to Puma in `/etc/gitlab/gitlab.rb`.
The table below summarizes which Unicorn configuration keys correspond to those
in Puma when using the Linux package, and which ones have no corresponding counterpart.

View File

@ -19801,6 +19801,7 @@ Iteration ID wildcard values.
| <a id="jobartifactfiletypecodequality"></a>`CODEQUALITY` | CODE QUALITY job artifact file type. |
| <a id="jobartifactfiletypecontainer_scanning"></a>`CONTAINER_SCANNING` | CONTAINER SCANNING job artifact file type. |
| <a id="jobartifactfiletypecoverage_fuzzing"></a>`COVERAGE_FUZZING` | COVERAGE FUZZING job artifact file type. |
| <a id="jobartifactfiletypecyclonedx"></a>`CYCLONEDX` | CYCLONEDX job artifact file type. |
| <a id="jobartifactfiletypedast"></a>`DAST` | DAST job artifact file type. |
| <a id="jobartifactfiletypedependency_scanning"></a>`DEPENDENCY_SCANNING` | DEPENDENCY SCANNING job artifact file type. |
| <a id="jobartifactfiletypedotenv"></a>`DOTENV` | DOTENV job artifact file type. |

View File

@ -329,3 +329,27 @@ GitLab can display the results of one or more reports in the merge request
[terraform widget](../../user/infrastructure/iac/mr_integration.md#output-terraform-plan-information-into-a-merge-request).
For more information, see [Output `terraform plan` information into a merge request](../../user/infrastructure/iac/mr_integration.md).
## `artifacts:reports:cyclonedx`
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/360766) in GitLab 15.3
This report is a Software Bill of Materials describing the components of a project
following the [cyclonedx](https://cyclonedx.org/docs/1.4) protocol format.
You can specify multiple cyclonedx reports per job. These can be either supplied
as a list of filenames, a filename pattern, or both:
- List of filenames: `cyclonedx: [gl-sbom-npm-npm.cdx.json, gl-sbom-bundler-gem.cdx.json]`.
- A filename pattern: `cyclonedx: gl-sbom-*.json`.
- Combination of both of the above: `cyclonedx: [gl-sbom-*.json, my-cyclonedx.json]`.
Below is an example of a job exposing cyclonedx artifacts:
```yaml
artifacts:
reports:
cyclonedx:
- gl-sbom-npm-npm.cdx.json
- gl-sbom-bundler-gem.cdx.json
```

View File

@ -102,7 +102,7 @@ Two scenarios exist where schema items are exempt from the deprecation process,
and can be removed or changed at any time without notice. These are schema items that either:
- Use the [`feature_flag` property](#feature_flag-property) _and_ the flag is disabled by default.
- Are [marked as alpha](#marking-schema-items-as-alpha).
- Are [marked as alpha](#mark-schema-items-as-alpha).
## Global IDs
@ -539,7 +539,7 @@ return value of the field. This can be done in the resolver, in the
type, or even in a model method, depending on your preference and
situation.
Consider also [marking the field as Alpha](#marking-schema-items-as-alpha)
Consider also [marking the field as Alpha](#mark-schema-items-as-alpha)
while the value of the field can be toggled. You can
[change or remove Alpha fields at any time](#breaking-change-exemptions) without needing to deprecate them.
This also signals to consumers of the public GraphQL API that the field is not
@ -586,7 +586,7 @@ To deprecate a schema item in GraphQL:
See also:
- [Aliasing and deprecating mutations](#aliasing-and-deprecating-mutations).
- [Marking schema items as Alpha](#marking-schema-items-as-alpha).
- [Marking schema items as Alpha](#mark-schema-items-as-alpha).
- [How to filter Kibana for queries that used deprecated fields](graphql_guide/monitoring.md#queries-that-used-a-deprecated-field).
### Create a deprecation issue
@ -746,18 +746,22 @@ aware of the support.
The documentation will mention that the old Global ID style is now deprecated.
## Marking schema items as Alpha
## Mark schema items as alpha
Fields, arguments, enum values, and mutations can be marked as being in
You can mark fields, arguments, enum values, and mutations as
[alpha](https://about.gitlab.com/handbook/product/gitlab-the-product/#alpha-beta-ga).
An item marked as "alpha" is exempt from the deprecation process and can be removed
at any time without notice.
An item marked as alpha is exempt from the deprecation process and can be removed
at any time without notice. This way, you can add an item that might be
subject to change and is not ready for public use.
This leverages GraphQL deprecations to cause the schema item to appear as deprecated,
and will be described as being in "alpha" in our generated docs and its GraphQL description.
You can only mark a new item as alpha. This item then appears as deprecated
in our generated docs and its GraphQL description. You cannot mark an existing item
as alpha because it's already public.
To mark a schema item as being in "alpha", use the `alpha:` keyword.
Like all deprecated schema items, you can test an `alpha` field in [GraphiQL](../api/graphql/index.md#graphiql). However, be aware that the GraphiQL autocomplete editor doesn't suggest deprecated fields.
To mark a schema item as alpha, use the `alpha:` keyword.
You must provide the `milestone:` that introduced the alpha item.
For example:

View File

@ -176,7 +176,7 @@ See the [test engineering process](https://about.gitlab.com/handbook/engineering
1. I have tested this MR in [all supported browsers](../install/requirements.md#supported-web-browsers), or determined that this testing is not needed.
1. I have confirmed that this change is [backwards compatible across updates](multi_version_compatibility.md), or I have decided that this does not apply.
1. I have properly separated EE content from FOSS, or this MR is FOSS only.
- [Where should EE code go?](ee_features.md#separation-of-ee-code)
- [Where should EE code go?](ee_features.md)
1. I have considered that existing data may be surprisingly varied. For example, a new model validation can break existing records. Consider making validation on existing data optional rather than required if you haven't confirmed that existing data will pass validation.
##### Performance, reliability, and availability

View File

@ -6,8 +6,10 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Guidelines for implementing Enterprise Edition features
- **Write the code and the tests.**: As with any code, EE features should have
good test coverage to prevent regressions.
- **Place code in `ee/`**: Put all Enterprise Edition (EE) inside the `ee/` top-level directory. The
rest of the code must be as close to the Community Edition (CE) files as possible.
- **Write tests**: As with any code, EE features must have good test coverage to prevent
regressions. All `ee/` code must have corresponding tests in `ee/`.
- **Write documentation.**: Add documentation to the `doc/` directory. Describe
the feature and include screenshots, if applicable. Indicate [what editions](documentation/styleguide/index.md#product-tier-badges)
the feature applies to.
@ -16,54 +18,72 @@ info: To determine the technical writer assigned to the Stage/Group associated w
[EE features list](https://about.gitlab.com/features/).
<!-- markdownlint-enable MD044 -->
## Act as SaaS
## Implement a new EE feature
When developing locally, there are times when you need your instance to act like the SaaS version of the product.
In those instances, you can simulate SaaS by exporting an environment variable as seen below:
If you're developing a GitLab Starter, GitLab Premium, or GitLab Ultimate licensed feature, use these steps to
add your new feature or extend it.
```shell
export GITLAB_SIMULATE_SAAS=1
```
GitLab license features are added to [`ee/app/models/gitlab_subscriptions/features.rb`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/app/models/gitlab_subscriptions/features.rb). To determine how
to modify this file, first discuss how your feature fits into our licensing with your Product Manager.
There are many ways to pass an environment variable to your local GitLab instance.
For example, you can create a `env.runit` file in the root of your GDK with the above snippet.
Use the following questions to guide you:
## Act as CE when unlicensed
1. Is this a new feature, or are you extending an existing licensed feature?
- If your feature already exists, you don't have to modify `features.rb`, but you
must locate the existing feature identifier to [guard it](#guard-your-ee-feature).
- If this is a new feature, decide on an identifier, such as `my_feature_name`, to add to the
`features.rb` file.
1. Is this a **GitLab Starter**, **GitLab Premium**, or **GitLab Ultimate** feature?
- Based on the plan you choose to use the feature in, add the feature identifier to `STARTER_FEATURES`,
`PREMIUM_FEATURES`, or `ULTIMATE_FEATURES`.
1. Will this feature be available globally (system-wide at the GitLab instance level)?
- Features such as [Geo](../administration/geo/index.md) and
[Database Load Balancing](../administration/postgresql/database_load_balancing.md) are used by the entire instance
and cannot be restricted to individual user namespaces. These features are defined in the instance license.
Add these features to `GLOBAL_FEATURES`.
Since the implementation of
### Guard your EE feature
A licensed feature can only be available to licensed users. You must add a check or guard
to determine if users have access to the feature.
To guard your licensed feature:
1. Locate your feature identifier in `ee/app/models/gitlab_subscriptions/features.rb`.
1. Use the following methods, where `my_feature_name` is your feature
identifier:
- In a project context:
```ruby
my_project.licensed_feature_available?(:my_feature_name) # true if available for my_project
```
- In a group or user namespace context:
```ruby
my_group.licensed_feature_available?(:my_feature_name) # true if available for my_group
```
- For a global (system-wide) feature:
```ruby
License.feature_available?(:my_feature_name) # true if available in this instance
```
1. Optional. If your global feature is also available to namespaces with a paid plan, combine two
feature identifiers to allow both admins and group users. For example:
```ruby
License.feature_available?(:my_feature_name) || group.licensed_feature_available?(:my_feature_name_for_namespace) # Both admins and group members can see this EE feature
```
### Simulate a CE instance when unlicensed
After the implementation of
[GitLab CE features to work with unlicensed EE instance](https://gitlab.com/gitlab-org/gitlab/-/issues/2500)
GitLab Enterprise Edition should work like GitLab Community Edition
when no license is active. So EE features always should be guarded by
`project.feature_available?` or `group.licensed_feature_available?` (or
`License.feature_available?` if it is a system-wide feature).
Frontend features should be guarded by pushing a flag from the backend by [using `push_licensed_feature`](licensed_feature_availability.md#restricting-frontend-features), and checked using `this.glFeatures.someFeature` in the frontend. For example:
```html
<script>
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
export default {
mixins: [glFeatureFlagMixin()],
components: {
EEComponent: () => import('ee_component/components/test.vue'),
},
computed: {
shouldRenderComponent() {
return this.glFeatures.myEEFeature;
}
},
};
</script>
<template>
<div>
<ee-component v-if="shouldRenderComponent"/>
</div>
</template>
```
Look in `ee/app/models/license.rb` for the names of the licensed features.
GitLab Enterprise Edition works like GitLab Community Edition
when no license is active.
CE specs should remain untouched as much as possible and extra specs
should be added for EE. Licensed features can be stubbed using the
@ -74,7 +94,7 @@ setting the [`FOSS_ONLY` environment variable](https://gitlab.com/gitlab-org/git
to something that evaluates as `true`. The same works for running tests
(for example `FOSS_ONLY=1 yarn jest`).
### Running feature specs as CE
#### Run feature specs as CE
When running [feature specs](testing_guide/best_practices.md#system--feature-tests)
as CE, you should ensure that the edition of backend and frontend match.
@ -98,7 +118,28 @@ To do so:
bin/rspec spec/features/<path_to_your_spec>
```
## CI pipelines in a FOSS context
### Simulate a SaaS instance
If you're developing locally and need your instance to act like the SaaS version of the product,
you can simulate SaaS by exporting an environment variable:
```shell
export GITLAB_SIMULATE_SAAS=1
```
There are many ways to pass an environment variable to your local GitLab instance.
For example, you can create a `env.runit` file in the root of your GDK with the above snippet.
#### Allow use of licensed EE feature
To enable plans per namespace turn on the `Allow use of licensed EE features` option from the settings page.
This will make licensed EE features available to projects only if the project namespace's plan includes the feature
or if the project is public. To enable it:
1. If you are developing locally, follow the steps in [Simulate a SaaS instance](#simulate-a-saas-instance) to make the option available.
1. Visit Admin > Settings > General > "Account and limit" and enable "Allow use of licensed EE features".
### Run CI pipelines in a FOSS context
By default, merge request pipelines for development run in an EE-context only. If you are
developing features that differ between FOSS and EE, you may wish to run pipelines in a
@ -108,10 +149,7 @@ To run pipelines in both contexts, add the `~"pipeline:run-as-if-foss"` label to
See the [As-if-FOSS jobs](pipelines.md#as-if-foss-jobs) pipelines documentation for more information.
## Separation of EE code
All EE code should be put inside the `ee/` top-level directory. The
rest of the code should be as close to the CE files as possible.
## Separation of EE code in the backend
### EE-only features
@ -144,7 +182,7 @@ To test an EE class that doesn't exist in CE, create the spec file as you normal
would in the `ee/spec` directory, but without the second `ee/` subdirectory.
For example, a class `ee/app/models/vulnerability.rb` would have its tests in `ee/spec/models/vulnerability_spec.rb`.
### EE features based on CE features
### Extend CE features with EE backend code
For features that build on existing CE features, write a module in the `EE`
namespace and inject it in the CE class, on the last line of the file that the
@ -633,7 +671,7 @@ might need different strategies to extend it. To apply different strategies
easily, we would use `extend ActiveSupport::Concern` in the EE module.
Put the EE module files following
[EE features based on CE features](#ee-features-based-on-ce-features).
[Extend CE features with EE backend code](#extend-ce-features-with-ee-backend-code).
#### EE API routes
@ -1009,9 +1047,9 @@ FactoryBot.define do
end
```
## JavaScript code in `assets/javascripts/`
## Separate of EE code in the frontend
To separate EE-specific JS-files we should also move the files into an `ee` folder.
To separate EE-specific JS-files, move the files into an `ee` folder.
For example there can be an
`app/assets/javascripts/protected_branches/protected_branches_bundle.js` and an
@ -1032,40 +1070,123 @@ import bundle from 'ee/protected_branches/protected_branches_bundle.js';
import bundle from 'ee_else_ce/protected_branches/protected_branches_bundle.js';
```
See the frontend guide [performance section](fe_guide/performance.md) for
information on managing page-specific JavaScript within EE.
### Add new EE-only features in the frontend
## Vue code in `assets/javascript`
If the feature being developed is not present in CE, add your entry point in
`ee/`. For example:
### script tag
```shell
# Add HTML element to mount
ee/app/views/admin/geo/designs/index.html.haml
#### Child Component only used in EE
# Init the application
ee/app/assets/javascripts/pages/ee_only_feature/index.js
To separate Vue template differences we should [import the components asynchronously](https://v2.vuejs.org/v2/guide/components-dynamic-async.html#Async-Components).
# Mount the feature
ee/app/assets/javascripts/ee_only_feature/index.js
```
Doing this allows for us to load the correct component in EE while in CE
we can load a empty component that renders nothing. This code **should**
exist in the CE repository as well as the EE repository.
Feature guarding `licensed_feature_available?` and `License.feature_available?` typical
occurs in the controller, as described in the [backend guide](#ee-only-features).
#### Test EE-only features
Add your EE tests to `ee/spec/frontend/` following the same directory structure you use for CE.
### Extend CE features with EE frontend code
Use the [`push_licensed_feature`](#guard-your-ee-feature) to guard frontend features that extend
existing views:
```ruby
# ee/app/controllers/ee/admin/my_controller.rb
before_action do
push_licensed_feature(:my_feature_name) # for global features
end
```
```ruby
# ee/app/controllers/ee/group/my_controller.rb
before_action do
push_licensed_feature(:my_feature_name, @group) # for group pages
end
```
```ruby
# ee/app/controllers/ee/project/my_controller.rb
before_action do
push_licensed_feature(:my_feature_name, @group) # for group pages
push_licensed_feature(:my_feature_name, @project) # for project pages
end
```
Verify your feature appears in `gon.licensed_features` in the browser console.
#### Extend Vue applications with EE Vue components
EE licensed features that enhance existing functionality in the UI add new
elements or interactions to your Vue application as components.
To separate template differences, use a child EE component to separate Vue template differences.
You must import the EE component [asynchronously](https://vuejs.org/v2/guide/components-dynamic-async.html#Async-Components).
This allows GitLab to load the correct component in EE, while in CE GitLab loads an empty component
that renders nothing. This code **must** exist in the CE repository, in addition to the EE repository.
A CE component acts as the entry point to your EE feature. To add a EE component,
locate it the `ee/` directory and add it with `import('ee_component/...')`:
```html
<script>
// app/assets/javascripts/feature/components/form.vue
export default {
mixins: [glFeatureFlagMixin()],
components: {
EEComponent: () => import('ee_component/components/test.vue'),
// Import an EE component from CE
MyEeComponent: () => import('ee_component/components/my_ee_component.vue'),
},
};
</script>
<template>
<div>
<ee-component />
<!-- ... -->
<my-ee-component/>
<!-- ... -->
</div>
</template>
```
#### For JS code that is EE only, like props, computed properties, methods, etc
Check `glFeatures` to ensure that the Vue components are guarded. The components render only when
the license is present.
- Please do not use mixins unless ABSOLUTELY NECESSARY. Please try to find an alternative pattern.
```html
<script>
// ee/app/assets/javascripts/feature/components/special_component.vue
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
export default {
mixins: [glFeatureFlagMixin()],
computed: {
shouldRenderComponent() {
// Comes from gon.licensed_features as a camel-case version of `my_feature_name`
return this.glFeatures.myFeatureName;
}
},
};
</script>
<template>
<div v-if="shouldRenderComponent">
<!-- EE licensed feature UI -->
</div>
</template>
```
NOTE:
Do not use mixins unless ABSOLUTELY NECESSARY. Try to find an alternative pattern.
##### Recommended alternative approach (named/scoped slots)
@ -1138,11 +1259,65 @@ export default {
**For EE components that need different results for the same computed values, we can pass in props to the CE wrapper as seen in the example.**
- **EE Child components**
- Since we are using the asynchronous loading to check which component to load, we'd still use the component's name, check [this example](#child-component-only-used-in-ee).
- Since we are using the asynchronous loading to check which component to load, we'd still use the component's name, check [this example](#extend-vue-applications-with-ee-vue-components).
- **EE extra HTML**
- For the templates that have extra HTML in EE we should move it into a new component and use the `ee_else_ce` dynamic import
#### Extend other JS code
To extend JS files, complete the following steps:
1. Use the `ee_else_ce` helper, where that EE only code must be inside the `ee/` folder.
1. Create an EE file with only the EE, and extend the CE counterpart.
1. For code inside functions that can't be extended, move the code to a new file and use `ee_else_ce` helper:
```javascript
import eeCode from 'ee_else_ce/ee_code';
function test() {
const test = 'a';
eeCode();
return test;
}
```
In some cases, you'll need to extend other logic in your application. To extend your JS
modules, create an EE version of the file and extend it with your custom logic:
```javascript
// app/assets/javascripts/feature/utils.js
export const myFunction = () => {
// ...
};
// ... other CE functions ...
```
```javascript
// ee/app/assets/javascripts/feature/utils.js
import {
myFunction as ceMyFunction,
} from '~/feature/utils';
/* eslint-disable import/export */
// Export same utils as CE
export * from '~/feature/utils';
// Only override `myFunction`
export const myFunction = () => {
const result = ceMyFunction();
// add EE feature logic
return result;
};
/* eslint-enable import/export */
```
#### Testing modules using EE/CE aliases
When writing Frontend tests, if the module under test imports other modules with `ee_else_ce/...` and these modules are also needed by the relevant test, then the relevant test **must** import these modules with `ee_else_ce/...`. This avoids unexpected EE or FOSS failures, and helps ensure the EE behaves like CE when it is unlicensed.
@ -1185,29 +1360,7 @@ describe('ComponentUnderTest', () => {
```
### Non Vue Files
For regular JS files, the approach is similar.
1. We keep using the [`ee_else_ce`](../development/ee_features.md#javascript-code-in-assetsjavascripts) helper, this means that EE only code should be inside the `ee/` folder.
1. An EE file should be created with the EE only code, and it should extend the CE counterpart.
1. For code inside functions that can't be extended, the code should be moved into a new file and we should use `ee_else_ce` helper:
#### Example
```javascript
import eeCode from 'ee_else_ce/ee_code';
function test() {
const test = 'a';
eeCode();
return test;
}
```
## SCSS code in `assets/stylesheets`
#### SCSS code in `assets/stylesheets`
If a component you're adding styles for is limited to EE, it is better to have a
separate SCSS file in an appropriate directory within `app/assets/stylesheets`.
@ -1218,9 +1371,8 @@ styles are usually kept in a stylesheet that is common for both CE and EE, and i
to isolate such ruleset from rest of CE rules (along with adding comment describing the same)
to avoid conflicts during CE to EE merge.
### Bad
```scss
// Bad
.section-body {
.section-title {
background: $gl-header-color;
@ -1234,9 +1386,8 @@ to avoid conflicts during CE to EE merge.
}
```
### Good
```scss
// Good
.section-body {
.section-title {
background: $gl-header-color;
@ -1252,7 +1403,7 @@ to avoid conflicts during CE to EE merge.
// EE-specific end
```
## GitLab-svgs
### GitLab-svgs
Conflicts in `app/assets/images/icons.json` or `app/assets/images/icons.svg` can
be resolved simply by regenerating those assets with

View File

@ -71,7 +71,7 @@ Consult these topics for information on contributing to specific GitLab features
- [Developing against interacting components or features](interacting_components.md)
- [Manage feature flags](feature_flags/index.md)
- [Licensed feature availability](licensed_feature_availability.md)
- [Implementing Enterprise Edition features](ee_features.md)
- [Accessing session data](session.md)
- [How to dump production data to staging](db_dump.md)
- [Geo development](geo.md)

View File

@ -62,7 +62,8 @@ For features that build on existing CE/EE features, a module in the `JH`
namespace injected in the CE/EE class/module is needed. This aligns with
what we're doing with EE features.
See [EE features based on CE features](ee_features.md#ee-features-based-on-ce-features) for more details.
See [Extend CE features with EE backend code](ee_features.md#extend-ce-features-with-ee-backend-code)
for more details.
For example, to prepend a module into the `User` class you would use
the following approach:

View File

@ -1,72 +1,11 @@
---
stage: Fulfillment
group: Provision
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
redirect_to: 'ee_features.md'
remove_date: '2022-10-08'
---
# Licensed feature availability
This document was moved to [another location](ee_features.md).
As of GitLab 9.4, we've been supporting a simplified version of licensed
feature availability checks via `ee/app/models/license.rb`, both for
on-premise or GitLab.com plans and features.
## Restricting features scoped by namespaces or projects
GitLab.com plans are persisted on user groups and namespaces, therefore, if you're adding a
feature such as [Related issues](../user/project/issues/related_issues.md) or
[Service Desk](../user/project/service_desk.md),
it should be restricted on namespace scope.
1. Add the feature symbol on `STARTER_FEATURES`, `PREMIUM_FEATURES`, or `ULTIMATE_FEATURES` constants in
`ee/app/models/gitlab_subscriptions/features.rb`.
1. Check using:
```ruby
project.licensed_feature_available?(:feature_symbol)
```
or
```ruby
group.licensed_feature_available?(:feature_symbol)
```
For projects, `licensed_feature_available` delegates to its associated `namespace`.
## Restricting global features (instance)
However, for features such as [Geo](../administration/geo/index.md) and
[Database Load Balancing](../administration/postgresql/database_load_balancing.md), which cannot be restricted
to only a subset of projects or namespaces, the check is made directly in
the instance license.
1. Add the feature symbol to `STARTER_FEATURES`, `PREMIUM_FEATURES` or `ULTIMATE_FEATURES` constants in
`ee/app/models/gitlab_subscriptions/features.rb`.
1. Add the same feature symbol to `GLOBAL_FEATURES`.
1. Check using:
```ruby
License.feature_available?(:feature_symbol)
```
## Restricting frontend features
To restrict frontend features based on the license, use `push_licensed_feature`.
The frontend can then access this via `this.glFeatures`:
```ruby
before_action do
push_licensed_feature(:feature_symbol)
# or by project/namespace
push_licensed_feature(:feature_symbol, project)
end
```
## Allow use of licensed EE features
To enable plans per namespace turn on the `Allow use of licensed EE features` option from the settings page.
This will make licensed EE features available to projects only if the project namespace's plan includes the feature
or if the project is public. To enable it:
1. If you are developing locally, follow the steps in [simulate SaaS](ee_features.md#act-as-saas) to make the option available.
1. Select Admin > Settings > General > "Account and limit" and enable "Allow use of licensed EE features".
<!-- This redirect file can be deleted after <2022-10-08>. -->
<!-- Redirects that point to other docs in the same project expire in three months. -->
<!-- Redirects that point to docs in a different project or site (link is not relative and starts with `https:`) expire in one year. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/redirects.html -->

View File

@ -7,7 +7,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Downgrading from EE to CE
If you ever decide to downgrade your Enterprise Edition back to the
Community Edition, there are a few steps you need take beforehand. On Omnibus GitLab
Community Edition, there are a few steps you need to take beforehand. On Omnibus GitLab
installations, these steps are made before installing the CE package on top of
the current EE package. On installations from source, they are done before
you change remotes and fetch the latest CE code.

View File

@ -166,7 +166,7 @@ install, and upgrade your Docker-based GitLab installation:
GitLab:
```shell
docker-compose up -d
docker compose up -d
```
NOTE:
@ -510,8 +510,8 @@ To upgrade GitLab that was [installed using Docker Compose](#install-gitlab-usin
1. Download the newest release and upgrade your GitLab instance:
```shell
docker-compose pull
docker-compose up -d
docker compose pull
docker compose up -d
```
If you have used [tags](#use-tagged-versions-of-gitlab) instead, you'll need

View File

@ -16,6 +16,8 @@ On this page we detail several best practices to improve performance with these
It's *strongly* recommended in any Git system that binary or blob files (for example, packages, audio, video, graphics, etc.) are stored as Large File Storage (LFS) objects. In such setup, the Objects are stored elsewhere, such as in Object Storage, and this can reduce the repository size significantly, thus improving performance.
To analyze if the repository has these sorts of objects, it's recommended to run [`git-sizer`](https://github.com/github/git-sizer) to get a detailed analysis. This tool shows in detail what makes up the repository as well as highlights any areas of concern.
Refer to the [Git LFS documentation for more information](../../../topics/git/lfs/index.md).
## Gitaly Pack Objects Cache
@ -32,7 +34,7 @@ In these types of setups it's recommended that the GitLab environment used match
## Gitaly Cluster
Gitaly Cluster can notably improve large repository performance as it holds multiple replicas of the repository across several nodes. As a result, Gitaly Cluster can load balance read requests against those repositories and is also fault tolerant.
Gitaly Cluster can notably improve large repository performance as it holds multiple replicas of the repository across several nodes. As a result, Gitaly Cluster can load balance read requests against those repositories and is also fault-tolerant.
It's recommended for large repositories, however, Gitaly Cluster is a large solution with additional complexity of setup, and management. Refer to the [Gitaly Cluster documentation for more information](../../../administration/gitaly/index.md), specifically the [Before deploying Gitaly Cluster](../../../administration/gitaly/index.md#before-deploying-gitaly-cluster) section.

View File

@ -17,7 +17,7 @@ module Gitlab
dast performance browser_performance load_performance license_scanning metrics lsif
dotenv terraform accessibility
requirements coverage_fuzzing api_fuzzing cluster_image_scanning
coverage_report].freeze
coverage_report cyclonedx].freeze
attributes ALLOWED_KEYS
@ -48,6 +48,7 @@ module Gitlab
validates :terraform, array_of_strings_or_string: true
validates :accessibility, array_of_strings_or_string: true
validates :requirements, array_of_strings_or_string: true
validates :cyclonedx, array_of_strings_or_string: true
end
end

View File

@ -33,11 +33,11 @@ module Gitlab
# fetched from YML config file.
MAILBOX_SPECIFIC_CONFIGS = {
incoming_email: {
queue: 'email_receiver',
queue: 'default',
worker: 'EmailReceiverWorker'
},
service_desk_email: {
queue: 'service_desk_email_receiver',
queue: 'default',
worker: 'ServiceDeskEmailReceiverWorker'
}
}.freeze

View File

@ -32,7 +32,6 @@ module Gitlab
issues_edit
pipeline_authoring
quickactions
search
user_packages
].freeze
@ -43,6 +42,7 @@ module Gitlab
importer
incident_management_alerts
pipeline_authoring
search
secure
snippets
source_code

View File

@ -24,14 +24,6 @@
category: search
redis_slot: search
aggregation: weekly
- name: i_search_advanced
category: search
redis_slot: search
aggregation: weekly
- name: i_search_paid
category: search
redis_slot: search
aggregation: weekly
- name: wiki_action
category: source_code
aggregation: daily

View File

@ -33475,12 +33475,6 @@ msgstr ""
msgid "RightSidebar|Issue email"
msgstr ""
msgid "RightSidebar|adding a"
msgstr ""
msgid "RightSidebar|deleting the"
msgstr ""
msgid "Roadmap"
msgstr ""
@ -39372,6 +39366,9 @@ msgstr ""
msgid "There are no archived test cases"
msgstr ""
msgid "There are no changed labels"
msgstr ""
msgid "There are no changes"
msgstr ""
@ -39549,9 +39546,6 @@ msgstr ""
msgid "There was a problem updating the keep latest artifacts setting."
msgstr ""
msgid "There was an error %{message} to-do item."
msgstr ""
msgid "There was an error adding a To Do."
msgstr ""
@ -45487,6 +45481,9 @@ msgstr ""
msgid "assign yourself"
msgstr ""
msgid "associated parent is confidential and can not have non-confidential children."
msgstr ""
msgid "at"
msgstr ""
@ -45567,6 +45564,9 @@ msgstr ""
msgid "can't reference a branch that does not exist"
msgstr ""
msgid "cannot assign a non-confidential work item to a confidential parent. Make the work item confidential and try again."
msgstr ""
msgid "cannot be a date in the past"
msgstr ""
@ -45981,6 +45981,9 @@ msgstr ""
msgid "compliance violation has already been recorded"
msgstr ""
msgid "confidential parent can not be used if there are non-confidential children."
msgstr ""
msgid "contacts can only be added to root groups"
msgstr ""

View File

@ -196,7 +196,7 @@
"yaml": "^2.0.0-10"
},
"devDependencies": {
"@gitlab/eslint-plugin": "13.1.0",
"@gitlab/eslint-plugin": "14.0.0",
"@gitlab/stylelint-config": "4.1.0",
"@graphql-eslint/eslint-plugin": "3.10.6",
"@testing-library/dom": "^7.16.2",
@ -211,7 +211,7 @@
"cheerio": "^1.0.0-rc.9",
"commander": "^2.20.3",
"custom-jquery-matchers": "^2.1.0",
"eslint": "7.32.0",
"eslint": "8.19.0",
"eslint-import-resolver-jest": "3.0.2",
"eslint-import-resolver-webpack": "0.13.2",
"eslint-plugin-no-jquery": "2.7.0",

View File

@ -5,7 +5,7 @@ FactoryBot.define do
email { generate(:email) }
name { generate(:name) }
username { generate(:username) }
password { "12345678" }
password { User.random_password }
role { 'software_developer' }
confirmed_at { Time.now }
confirmation_token { nil }

View File

@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe 'Cluster agent registration', :js do
let_it_be(:project) { create(:project, :custom_repo, files: { '.gitlab/agents/example-agent-1/config.yaml' => '' }) }
let_it_be(:current_user) { create(:user, maintainer_projects: [project]) }
let_it_be(:token) { Devise.friendly_token }
before do
allow(Gitlab::Kas).to receive(:enabled?).and_return(true)
@ -18,7 +19,7 @@ RSpec.describe 'Cluster agent registration', :js do
allow(client).to receive(:get_connected_agents).and_return([])
end
allow(Devise).to receive(:friendly_token).and_return('example-agent-token')
allow(Devise).to receive(:friendly_token).and_return(token)
sign_in(current_user)
visit project_clusters_path(project)
@ -33,7 +34,7 @@ RSpec.describe 'Cluster agent registration', :js do
click_button('Register')
expect(page).to have_content('You cannot see this token again after you close this window.')
expect(page).to have_content('example-agent-token')
expect(page).to have_content(token)
expect(page).to have_content('helm upgrade --install')
expect(page).to have_content('example-agent-2')

View File

@ -16,7 +16,6 @@ RSpec.describe "Populate new pipeline CI variables with url params", :js do
it "var[key1]=value1 populates env_var variable correctly" do
page.within(all("[data-testid='ci-variable-row']")[0]) do
expect(find("[data-testid='pipeline-form-ci-variable-type']").value).to eq('env_var')
expect(find("[data-testid='pipeline-form-ci-variable-key']").value).to eq('key1')
expect(find("[data-testid='pipeline-form-ci-variable-value']").value).to eq('value1')
end
@ -24,7 +23,6 @@ RSpec.describe "Populate new pipeline CI variables with url params", :js do
it "file_var[key2]=value2 populates file variable correctly" do
page.within(all("[data-testid='ci-variable-row']")[1]) do
expect(find("[data-testid='pipeline-form-ci-variable-type']").value).to eq('file')
expect(find("[data-testid='pipeline-form-ci-variable-key']").value).to eq('key2')
expect(find("[data-testid='pipeline-form-ci-variable-value']").value).to eq('value2')
end

View File

@ -22,14 +22,12 @@ class MockObserver {
takeRecords() {}
// eslint-disable-next-line camelcase
$_triggerObserve(node, { entry = {}, options = {} } = {}) {
if (this.$_hasObserver(node, options)) {
this.$_cb([{ target: node, ...entry }]);
}
}
// eslint-disable-next-line camelcase
$_hasObserver(node, options = {}) {
return this.$_observers.some(
([obvNode, obvOptions]) => node === obvNode && isMatch(options, obvOptions),

View File

@ -88,11 +88,11 @@ describe('Environment item', () => {
it('should render user avatar with link to profile', () => {
const avatarLink = findLastDeploymentAvatarLink();
const avatar = findLastDeploymentAvatar();
const { username, avatar_url, web_url } = environment.last_deployment.user;
const { username, avatar_url: src, web_url } = environment.last_deployment.user;
expect(avatarLink.attributes('href')).toBe(web_url);
expect(avatar.props()).toMatchObject({
src: avatar_url,
src,
entityName: username,
});
expect(avatar.attributes()).toMatchObject({
@ -127,12 +127,12 @@ describe('Environment item', () => {
it('should render the build ID and user', () => {
const avatarLink = findUpcomingDeploymentAvatarLink();
const avatar = findUpcomingDeploymentAvatar();
const { username, avatar_url, web_url } = environment.upcoming_deployment.user;
const { username, avatar_url: src, web_url } = environment.upcoming_deployment.user;
expect(findUpcomingDeploymentContent().text()).toMatchInterpolatedText('#27 by');
expect(avatarLink.attributes('href')).toBe(web_url);
expect(avatar.props()).toMatchObject({
src: avatar_url,
src,
entityName: username,
});
});
@ -166,12 +166,12 @@ describe('Environment item', () => {
it('should still render the build ID and user avatar', () => {
const avatarLink = findUpcomingDeploymentAvatarLink();
const avatar = findUpcomingDeploymentAvatar();
const { username, avatar_url, web_url } = environment.upcoming_deployment.user;
const { username, avatar_url: src, web_url } = environment.upcoming_deployment.user;
expect(findUpcomingDeploymentContent().text()).toMatchInterpolatedText('#27 by');
expect(avatarLink.attributes('href')).toBe(web_url);
expect(avatar.props()).toMatchObject({
src: avatar_url,
src,
entityName: username,
});
});

View File

@ -145,11 +145,16 @@ describe('Frequent Items App Component', () => {
expect(findFrequentItemsList().props()).toEqual(
expect.objectContaining({
items: mockSearchedProjects.data.map(
({ avatar_url, web_url, name_with_namespace, ...item }) => ({
({
avatar_url: avatarUrl,
web_url: webUrl,
name_with_namespace: namespace,
...item
}) => ({
...item,
avatarUrl: avatar_url,
webUrl: web_url,
namespace: name_with_namespace,
avatarUrl,
webUrl,
namespace,
}),
),
namespace: TEST_NAMESPACE,

View File

@ -24,7 +24,7 @@ describe('incident utils', () => {
describe('get event icon', () => {
it('should display a matching event icon name', () => {
['comment', 'issues', 'status'].forEach((name) => {
['comment', 'issues', 'label', 'status'].forEach((name) => {
expect(getEventIcon(name)).toBe(name);
});
});

View File

@ -511,10 +511,10 @@ describe('mapToDashboardViewModel', () => {
describe('uniqMetricsId', () => {
[
{ input: { id: 1 }, expected: `${NOT_IN_DB_PREFIX}_1` },
{ input: { metric_id: 2 }, expected: '2_undefined' },
{ input: { metric_id: 2, id: 21 }, expected: '2_21' },
{ input: { metric_id: 22, id: 1 }, expected: '22_1' },
{ input: { metric_id: 'aaa', id: '_a' }, expected: 'aaa__a' },
{ input: { metricId: 2 }, expected: '2_undefined' },
{ input: { metricId: 2, id: 21 }, expected: '2_21' },
{ input: { metricId: 22, id: 1 }, expected: '22_1' },
{ input: { metricId: 'aaa', id: '_a' }, expected: 'aaa__a' },
].forEach(({ input, expected }) => {
it(`creates unique metric ID with ${JSON.stringify(input)}`, () => {
expect(uniqMetricsId(input)).toEqual(expected);

View File

@ -106,6 +106,7 @@ exports[`Code Coverage when fetching data is successful matches the snapshot 1`]
legendmaxtext="Max"
legendmintext="Min"
option="[object Object]"
responsive=""
thresholds=""
/>
</div>

View File

@ -39,6 +39,7 @@ describe('Pipeline New Form', () => {
const findSubmitButton = () => wrapper.find('[data-testid="run_pipeline_button"]');
const findVariableRows = () => wrapper.findAll('[data-testid="ci-variable-row"]');
const findRemoveIcons = () => wrapper.findAll('[data-testid="remove-ci-variable-row"]');
const findDropdowns = () => wrapper.findAll('[data-testid="pipeline-form-ci-variable-type"]');
const findKeyInputs = () => wrapper.findAll('[data-testid="pipeline-form-ci-variable-key"]');
const findValueInputs = () => wrapper.findAll('[data-testid="pipeline-form-ci-variable-value"]');
const findErrorAlert = () => wrapper.find('[data-testid="run-pipeline-error-alert"]');
@ -102,6 +103,8 @@ describe('Pipeline New Form', () => {
});
it('displays the correct values for the provided query params', async () => {
expect(findDropdowns().at(0).props('text')).toBe('Variable');
expect(findDropdowns().at(1).props('text')).toBe('File');
expect(findRefsDropdown().props('value')).toEqual({ shortName: 'tag-1' });
expect(findVariableRows()).toHaveLength(3);
});
@ -114,6 +117,7 @@ describe('Pipeline New Form', () => {
it('displays an empty variable for the user to fill out', async () => {
expect(findKeyInputs().at(2).element.value).toBe('');
expect(findValueInputs().at(2).element.value).toBe('');
expect(findDropdowns().at(2).props('text')).toBe('Variable');
});
it('does not display remove icon for last row', () => {

View File

@ -61,14 +61,14 @@ describe('Pipeline Url Component', () => {
describe('commit user avatar', () => {
it('renders when commit author exists', () => {
const pipelineBranch = mockPipelineBranch();
const { avatar_url, name, path } = pipelineBranch.pipeline.commit.author;
const { avatar_url: imgSrc, name, path } = pipelineBranch.pipeline.commit.author;
createComponent(pipelineBranch);
const component = wrapper.findComponent(UserAvatarLink);
expect(component.exists()).toBe(true);
expect(component.props()).toMatchObject({
imgSize: 16,
imgSrc: avatar_url,
imgSrc,
imgAlt: name,
linkHref: path,
tooltipText: name,

View File

@ -153,9 +153,9 @@ describe('Author Select', () => {
});
it('has the correct props', async () => {
const [{ avatar_url, username }] = authors;
const [{ avatar_url: avatarUrl, username }] = authors;
const result = {
avatarUrl: avatar_url,
avatarUrl,
secondaryText: username,
isChecked: true,
};

View File

@ -47,6 +47,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Reports do
:dotenv | 'build.dotenv'
:terraform | 'tfplan.json'
:accessibility | 'gl-accessibility.json'
:cyclonedx | 'gl-sbom.cdx.zip'
end
with_them do

View File

@ -246,7 +246,7 @@ RSpec.describe Gitlab::MailRoom do
redis_url: "localhost",
redis_db: 99,
namespace: "resque:gitlab",
queue: "email_receiver",
queue: "default",
worker: "EmailReceiverWorker",
sentinels: [{ host: "localhost", port: 1234 }]
}
@ -259,7 +259,7 @@ RSpec.describe Gitlab::MailRoom do
redis_url: "localhost",
redis_db: 99,
namespace: "resque:gitlab",
queue: "service_desk_email_receiver",
queue: "default",
worker: "ServiceDeskEmailReceiverWorker",
sentinels: [{ host: "localhost", port: 1234 }]
}

View File

@ -194,7 +194,7 @@ RSpec.describe Gitlab::SidekiqConfig do
queues = described_class.routing_queues
expect(queues).to match_array(%w[
default mailers high_urgency gitaly email_receiver service_desk_email_receiver
default mailers high_urgency gitaly
])
expect(queues).not_to include('not_exist')
end

View File

@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe Board do
let(:project) { create(:project) }
let(:other_project) { create(:project) }
let_it_be(:project) { create(:project) }
let_it_be(:other_project) { create(:project) }
describe 'relationships' do
it { is_expected.to belong_to(:project) }

View File

@ -72,7 +72,7 @@ RSpec.describe Ci::Processable do
job_artifacts_network_referee job_artifacts_dotenv
job_artifacts_cobertura needs job_artifacts_accessibility
job_artifacts_requirements job_artifacts_coverage_fuzzing
job_artifacts_api_fuzzing terraform_state_versions].freeze
job_artifacts_api_fuzzing terraform_state_versions job_artifacts_cyclonedx].freeze
end
let(:ignore_accessors) do

View File

@ -126,7 +126,7 @@ RSpec.describe PersonalAccessToken, 'TokenAuthenticatable' do
end
end
let(:token_value) { 'token' }
let(:token_value) { Devise.friendly_token }
let(:token_digest) { Gitlab::CryptoHelper.sha256(token_value) }
let(:user) { create(:user) }
let(:personal_access_token) do

View File

@ -136,6 +136,54 @@ RSpec.describe Issue do
it { is_expected.to eq(false) }
end
end
describe 'confidentiality' do
let_it_be(:project) { create(:project) }
context 'when parent and child are confidential' do
let_it_be(:parent) { create(:work_item, confidential: true, project: project) }
let_it_be(:child) { create(:work_item, :task, confidential: true, project: project) }
let_it_be(:link) { create(:parent_link, work_item: child, work_item_parent: parent) }
it 'does not allow to make child not-confidential' do
issue = Issue.find(child.id)
issue.confidential = false
expect(issue).not_to be_valid
expect(issue.errors[:confidential])
.to include('associated parent is confidential and can not have non-confidential children.')
end
it 'allows to make parent not-confidential' do
issue = Issue.find(parent.id)
issue.confidential = false
expect(issue).to be_valid
end
end
context 'when parent and child are not-confidential' do
let_it_be(:parent) { create(:work_item, project: project) }
let_it_be(:child) { create(:work_item, :task, project: project) }
let_it_be(:link) { create(:parent_link, work_item: child, work_item_parent: parent) }
it 'does not allow to make parent confidential' do
issue = Issue.find(parent.id)
issue.confidential = true
expect(issue).not_to be_valid
expect(issue.errors[:confidential])
.to include('confidential parent can not be used if there are non-confidential children.')
end
it 'allows to make child confidential' do
issue = Issue.find(child.id)
issue.confidential = true
expect(issue).to be_valid
end
end
end
end
subject { create(:issue, project: reusable_project) }
@ -1671,4 +1719,20 @@ RSpec.describe Issue do
end
end
end
describe '#full_search' do
context 'when searching non-english terms' do
[
'abc 中文語',
'中文語cn',
'中文語'
].each do |term|
it 'adds extra where clause to match partial index' do
expect(described_class.full_search(term).to_sql).to include(
"AND (issues.title NOT SIMILAR TO '[\\u0000-\\u218F]*' OR issues.description NOT SIMILAR TO '[\\u0000-\\u218F]*')"
)
end
end
end
end
end

View File

@ -109,5 +109,61 @@ RSpec.describe WorkItem do
it { is_expected.to eq(false) }
end
end
describe 'confidentiality' do
let_it_be(:project) { create(:project) }
context 'when parent and child are confidential' do
let_it_be(:parent) { create(:work_item, confidential: true, project: project) }
let_it_be(:child) { create(:work_item, :task, confidential: true, project: project) }
let_it_be(:link) { create(:parent_link, work_item: child, work_item_parent: parent) }
it 'does not allow to make child non-confidential' do
child.confidential = false
expect(child).not_to be_valid
expect(child.errors[:confidential])
.to include('associated parent is confidential and can not have non-confidential children.')
end
it 'allows to make parent non-confidential' do
parent.confidential = false
expect(parent).to be_valid
end
end
context 'when parent and child are non-confidential' do
let_it_be(:parent) { create(:work_item, project: project) }
let_it_be(:child) { create(:work_item, :task, project: project) }
let_it_be(:link) { create(:parent_link, work_item: child, work_item_parent: parent) }
it 'does not allow to make parent confidential' do
parent.confidential = true
expect(parent).not_to be_valid
expect(parent.errors[:confidential])
.to include('confidential parent can not be used if there are non-confidential children.')
end
it 'allows to make child confidential' do
child.confidential = true
expect(child).to be_valid
end
end
context 'when creating new child' do
let_it_be(:child) { build(:work_item, project: project) }
it 'does not allow to set confidential parent' do
child.work_item_parent = create(:work_item, confidential: true, project: project)
expect(child).not_to be_valid
expect(child.errors[:confidential])
.to include('associated parent is confidential and can not have non-confidential children.')
end
end
end
end
end

View File

@ -69,6 +69,70 @@ RSpec.describe WorkItems::ParentLink do
expect(link1).to be_valid
end
end
context 'when setting confidentiality' do
using RSpec::Parameterized::TableSyntax
where(:confidential_parent, :confidential_child, :valid) do
false | false | true
true | true | true
false | true | true
true | false | false
end
with_them do
before do
issue.confidential = confidential_parent
task1.confidential = confidential_child
end
it 'validates if child confidentiality is compatible with parent' do
link = build(:parent_link, work_item_parent: issue, work_item: task1)
expect(link.valid?).to eq(valid)
end
end
end
end
end
context 'with confidential work items' do
let_it_be(:project) { create(:project) }
let_it_be(:confidential_child) { create(:work_item, :task, confidential: true, project: project) }
let_it_be(:putlic_child) { create(:work_item, :task, project: project) }
let_it_be(:confidential_parent) { create(:work_item, confidential: true, project: project) }
let_it_be(:public_parent) { create(:work_item, project: project) }
describe '.has_public_children?' do
subject { described_class.has_public_children?(public_parent.id) }
context 'with confidential child' do
let_it_be(:link) { create(:parent_link, work_item_parent: public_parent, work_item: confidential_child) }
it { is_expected.to be_falsey }
context 'with also public child' do
let_it_be(:link) { create(:parent_link, work_item_parent: public_parent, work_item: putlic_child) }
it { is_expected.to be_truthy }
end
end
end
describe '.has_confidential_parent?' do
subject { described_class.has_confidential_parent?(confidential_child.id) }
context 'with confidential parent' do
let_it_be(:link) { create(:parent_link, work_item_parent: confidential_parent, work_item: confidential_child) }
it { is_expected.to be_truthy }
end
context 'with public parent' do
let_it_be(:link) { create(:parent_link, work_item_parent: public_parent, work_item: confidential_child) }
it { is_expected.to be_falsey }
end
end
end
end

View File

@ -21,7 +21,7 @@ RSpec.describe WorkItems::Widgets::Hierarchy do
end
describe '#parent' do
let_it_be(:parent_link) { create(:parent_link, work_item: task, work_item_parent: work_item_parent) }
let_it_be(:parent_link) { create(:parent_link, work_item: task, work_item_parent: work_item_parent).reload }
subject { described_class.new(parent_link.work_item).parent }
@ -45,8 +45,8 @@ RSpec.describe WorkItems::Widgets::Hierarchy do
end
describe '#children' do
let_it_be(:parent_link1) { create(:parent_link, work_item_parent: work_item_parent, work_item: task) }
let_it_be(:parent_link2) { create(:parent_link, work_item_parent: work_item_parent) }
let_it_be(:parent_link1) { create(:parent_link, work_item_parent: work_item_parent, work_item: task).reload }
let_it_be(:parent_link2) { create(:parent_link, work_item_parent: work_item_parent).reload }
subject { described_class.new(work_item_parent).children }

View File

@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe API::Branches do
let_it_be(:user) { create(:user) }
let(:project) { create(:project, :repository, creator: user, path: 'my.project') }
let(:project) { create(:project, :repository, creator: user, path: 'my.project', create_branch: 'ends-with.txt') }
let(:guest) { create(:user).tap { |u| project.add_guest(u) } }
let(:branch_name) { 'feature' }
let(:branch_sha) { '0b4bc9a49b562e85de7cc9e834518ea6828729b9' }
@ -17,7 +17,6 @@ RSpec.describe API::Branches do
before do
project.add_maintainer(user)
project.repository.add_branch(user, 'ends-with.txt', branch_sha)
stub_feature_flags(branch_list_keyset_pagination: false)
end

View File

@ -3,14 +3,14 @@
require 'spec_helper'
RSpec.describe API::Members do
let(:maintainer) { create(:user, username: 'maintainer_user') }
let(:maintainer2) { create(:user, username: 'user-with-maintainer-role') }
let(:developer) { create(:user) }
let(:access_requester) { create(:user) }
let(:stranger) { create(:user) }
let(:user_with_minimal_access) { create(:user) }
let_it_be(:maintainer) { create(:user, username: 'maintainer_user') }
let_it_be(:maintainer2) { create(:user, username: 'user-with-maintainer-role') }
let_it_be(:developer) { create(:user) }
let_it_be(:access_requester) { create(:user) }
let_it_be(:stranger) { create(:user) }
let_it_be(:user_with_minimal_access) { create(:user) }
let(:project) do
let_it_be(:project, refind: true) do
create(:project, :public, creator_id: maintainer.id, group: create(:group, :public)) do |project|
project.add_maintainer(maintainer)
project.add_developer(developer, current_user: maintainer)
@ -18,7 +18,7 @@ RSpec.describe API::Members do
end
end
let!(:group) do
let_it_be(:group, refind: true) do
create(:group, :public) do |group|
group.add_owner(maintainer)
group.add_developer(developer, maintainer)

View File

@ -0,0 +1,79 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ::Ci::Runners::BulkDeleteRunnersService, '#execute' do
subject { described_class.new(**service_args).execute }
let(:service_args) { { runners: runners_arg } }
let(:runners_arg) { }
context 'with runners specified' do
let!(:instance_runner) { create(:ci_runner) }
let!(:group_runner) { create(:ci_runner, :group) }
let!(:project_runner) { create(:ci_runner, :project) }
shared_examples 'a service deleting runners in bulk' do
it 'destroys runners', :aggregate_failures do
expect { subject }.to change { Ci::Runner.count }.by(-2)
is_expected.to eq({ deleted_count: 2, deleted_ids: [instance_runner.id, project_runner.id] })
expect(instance_runner[:errors]).to be_nil
expect(project_runner[:errors]).to be_nil
expect { project_runner.runner_projects.first.reload }.to raise_error(ActiveRecord::RecordNotFound)
expect { group_runner.reload }.not_to raise_error
expect { instance_runner.reload }.to raise_error(ActiveRecord::RecordNotFound)
expect { project_runner.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
context 'with some runners already deleted' do
before do
instance_runner.destroy!
end
let(:runners_arg) { [instance_runner.id, project_runner.id] }
it 'destroys runners and returns only deleted runners', :aggregate_failures do
expect { subject }.to change { Ci::Runner.count }.by(-1)
is_expected.to eq({ deleted_count: 1, deleted_ids: [project_runner.id] })
expect(instance_runner[:errors]).to be_nil
expect(project_runner[:errors]).to be_nil
expect { project_runner.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
end
context 'with too many runners specified' do
before do
stub_const("#{described_class}::RUNNER_LIMIT", 1)
end
it 'deletes only first RUNNER_LIMIT runners' do
expect { subject }.to change { Ci::Runner.count }.by(-1)
is_expected.to eq({ deleted_count: 1, deleted_ids: [instance_runner.id] })
end
end
end
context 'with runners specified as relation' do
let(:runners_arg) { Ci::Runner.not_group_type }
include_examples 'a service deleting runners in bulk'
end
context 'with runners specified as array of IDs' do
let(:runners_arg) { Ci::Runner.not_group_type.ids }
include_examples 'a service deleting runners in bulk'
end
context 'with no arguments specified' do
let(:runners_arg) { nil }
it 'returns 0 deleted runners' do
is_expected.to eq({ deleted_count: 0, deleted_ids: [] })
end
end
end
end

View File

@ -244,5 +244,88 @@ RSpec.describe IncidentManagement::TimelineEvents::CreateService do
it_behaves_like 'successfully created timeline event'
end
describe '.change_labels' do
subject(:execute) do
described_class.change_labels(incident, current_user, added_labels: added, removed_labels: removed)
end
let_it_be(:labels) { create_list(:label, 4, project: project) }
let(:expected_action) { 'label' }
context 'when there are neither added nor removed labels' do
let(:added) { [] }
let(:removed) { [] }
it 'responds with error', :aggregate_failures do
expect(execute).to be_error
expect(execute.message).to eq(_('There are no changed labels'))
end
it 'does not create timeline event' do
expect { execute }.not_to change { incident.incident_management_timeline_events.count }
end
end
context 'when there are only added labels' do
let(:added) { [labels[0], labels[1]] }
let(:removed) { [] }
let(:expected_note) { "@#{current_user.username} added #{added.map(&:to_reference).join(' ')} labels" }
it_behaves_like 'successfully created timeline event'
end
context 'when there are only removed labels' do
let(:added) { [] }
let(:removed) { [labels[2], labels[3]] }
let(:expected_note) { "@#{current_user.username} removed #{removed.map(&:to_reference).join(' ')} labels" }
it_behaves_like 'successfully created timeline event'
end
context 'when there are both added and removed labels' do
let(:added) { [labels[0], labels[1]] }
let(:removed) { [labels[2], labels[3]] }
let(:expected_note) do
added_note = "added #{added.map(&:to_reference).join(' ')} labels"
removed_note = "removed #{removed.map(&:to_reference).join(' ')} labels"
"@#{current_user.username} #{added_note} and #{removed_note}"
end
it_behaves_like 'successfully created timeline event'
end
context 'when there is a single added and single removed labels' do
let(:added) { [labels[0]] }
let(:removed) { [labels[3]] }
let(:expected_note) do
added_note = "added #{added.first.to_reference} label"
removed_note = "removed #{removed.first.to_reference} label"
"@#{current_user.username} #{added_note} and #{removed_note}"
end
it_behaves_like 'successfully created timeline event'
end
context 'when feature flag is disabled' do
let(:added) { [labels[0], labels[1]] }
let(:removed) { [labels[2], labels[3]] }
before do
stub_feature_flags(incident_timeline_events_from_labels: false)
end
it 'does not create timeline event' do
expect { execute }.not_to change { incident.incident_management_timeline_events.count }
end
end
end
end
end

View File

@ -5,11 +5,40 @@ require 'spec_helper'
RSpec.describe ResourceEvents::ChangeLabelsService do
let_it_be(:project) { create(:project) }
let_it_be(:author) { create(:user) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:incident) { create(:incident, project: project) }
let(:resource) { create(:issue, project: project) }
let(:resource) { issue }
describe '.change_labels' do
subject { described_class.new(resource, author).execute(added_labels: added, removed_labels: removed) }
describe '#execute' do
shared_examples 'creating timeline events' do
context 'when resource is not an incident' do
let(:resource) { issue }
it 'does not call create timeline events service' do
expect(IncidentManagement::TimelineEvents::CreateService).not_to receive(:change_labels)
change_labels
end
end
context 'when resource is an incident' do
let(:resource) { incident }
it 'calls create timeline events service with correct attributes' do
expect(IncidentManagement::TimelineEvents::CreateService)
.to receive(:change_labels)
.with(resource, author, added_labels: added, removed_labels: removed)
.and_call_original
change_labels
end
end
end
subject(:change_labels) do
described_class.new(resource, author).execute(added_labels: added, removed_labels: removed)
end
let_it_be(:labels) { create_list(:label, 2, project: project) }
@ -20,9 +49,9 @@ RSpec.describe ResourceEvents::ChangeLabelsService do
end
it 'expires resource note etag cache' do
expect_any_instance_of(Gitlab::EtagCaching::Store)
.to receive(:touch)
.with("/#{resource.project.namespace.to_param}/#{resource.project.to_param}/noteable/issue/#{resource.id}/notes")
expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(
"/#{resource.project.namespace.to_param}/#{resource.project.to_param}/noteable/issue/#{resource.id}/notes"
)
described_class.new(resource, author).execute(added_labels: [labels[0]])
end
@ -32,10 +61,12 @@ RSpec.describe ResourceEvents::ChangeLabelsService do
let(:removed) { [] }
it 'creates new label event' do
expect { subject }.to change { resource.resource_label_events.count }.from(0).to(1)
expect { change_labels }.to change { resource.resource_label_events.count }.from(0).to(1)
expect_label_event(resource.resource_label_events.first, labels[0], 'add')
end
it_behaves_like 'creating timeline events'
end
context 'when removing a label' do
@ -43,10 +74,12 @@ RSpec.describe ResourceEvents::ChangeLabelsService do
let(:removed) { [labels[1]] }
it 'creates new label event' do
expect { subject }.to change { resource.resource_label_events.count }.from(0).to(1)
expect { change_labels }.to change { resource.resource_label_events.count }.from(0).to(1)
expect_label_event(resource.resource_label_events.first, labels[1], 'remove')
end
it_behaves_like 'creating timeline events'
end
context 'when both adding and removing labels' do
@ -55,8 +88,10 @@ RSpec.describe ResourceEvents::ChangeLabelsService do
it 'creates all label events in a single query' do
expect(ApplicationRecord).to receive(:legacy_bulk_insert).once.and_call_original
expect { subject }.to change { resource.resource_label_events.count }.from(0).to(2)
expect { change_labels }.to change { resource.resource_label_events.count }.from(0).to(2)
end
it_behaves_like 'creating timeline events'
end
describe 'usage data' do
@ -67,7 +102,7 @@ RSpec.describe ResourceEvents::ChangeLabelsService do
it 'tracks changed labels' do
expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_label_changed_action)
subject
change_labels
end
end
@ -75,9 +110,10 @@ RSpec.describe ResourceEvents::ChangeLabelsService do
let(:resource) { create(:merge_request, source_project: project) }
it 'does not track changed labels' do
expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).not_to receive(:track_issue_label_changed_action)
expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter)
.not_to receive(:track_issue_label_changed_action)
subject
change_labels
end
end
end

View File

@ -3,7 +3,7 @@
require 'pathname'
# This script checks if the package-and-qa job should trigger downstream pipelines to run the QA suite.
# This script checks if the code changes justify running the QA suite.
#
# It assumes the first argument is a directory of files containing diffs of changes from an MR
# (e.g., created by tooling/bin/find_change_diffs). It exits with a success code if there are no diffs, or if the diffs
@ -11,14 +11,14 @@ require 'pathname'
#
# The script will abort (exit code 1) if the argument is missing.
#
# The following condition will result in a failure code (2), indicating that package-and-qa should not run:
# The following condition will result in a failure code (2), indicating that QA tests should not run:
#
# - If the changes only include tests being put in quarantine
abort("ERROR: Please specify the directory containing MR diffs.") if ARGV.empty?
diffs_dir = Pathname.new(ARGV.shift).expand_path
# Run package-and-qa if there are no diffs. E.g., in scheduled pipelines
# Run QA tests if there are no diffs. E.g., in scheduled pipelines
exit 0 if diffs_dir.glob('**/*').empty?
files_count = 0
@ -35,11 +35,11 @@ diffs_dir.glob('**/*').each do |path|
quarantine_specs_count += 1 if path.read.match?(/^\+.*, quarantine:/)
end
# Run package-and-qa if there are no specs. E.g., when the MR changes QA framework files.
# Run QA tests if there are no specs. E.g., when the MR changes QA framework files.
exit 0 if specs_count == 0
# Skip package-and-qa if there are only specs being put in quarantine.
# Skip QA tests if there are only specs being put in quarantine.
exit 2 if quarantine_specs_count == specs_count && quarantine_specs_count == files_count
# Run package-and-qa under any other circumstances. E.g., if there are specs being put in quarantine but there are also
# Run QA tests under any other circumstances. E.g., if there are specs being put in quarantine but there are also
# other changes that might need to be tested.

186
yarn.lock
View File

@ -38,13 +38,6 @@
tslib "^2.3.0"
zen-observable-ts "^1.2.0"
"@babel/code-frame@7.12.11":
version "7.12.11"
resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.12.11.tgz#f4ad435aa263db935b8f10f2c552d23fb716a63f"
integrity sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw==
dependencies:
"@babel/highlight" "^7.10.4"
"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.16.7":
version "7.16.7"
resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.16.7.tgz#44416b6bd7624b998f5b1af5d470856c40138789"
@ -300,7 +293,7 @@
"@babel/traverse" "^7.18.2"
"@babel/types" "^7.18.2"
"@babel/highlight@^7.10.4", "@babel/highlight@^7.16.7":
"@babel/highlight@^7.16.7":
version "7.16.10"
resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.16.10.tgz#744f2eb81579d6eea753c227b0f570ad785aba88"
integrity sha512-5FnTQLSLswEj6IkgVw5KusNUUFY9ZGqe/TRFnP/BKYHYgfh7tc+C7mwiy95/yNP7Dh9x580Vv8r7u7ZfTBFxdw==
@ -1006,19 +999,19 @@
ts-node "^9"
tslib "^2"
"@eslint/eslintrc@^0.4.3":
version "0.4.3"
resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.4.3.tgz#9e42981ef035beb3dd49add17acb96e8ff6f394c"
integrity sha512-J6KFFz5QCYUJq3pf0mjEcCJVERbzv71PUIDczuh9JkwGEzced6CO5ADLHB1rbf/+oPBtoPfMYNOpGDzCANlbXw==
"@eslint/eslintrc@^1.3.0":
version "1.3.0"
resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-1.3.0.tgz#29f92c30bb3e771e4a2048c95fa6855392dfac4f"
integrity sha512-UWW0TMTmk2d7hLcWD1/e2g5HDM/HQ3csaLSqXCfqwh4uNDuNqlaKWXmEsL4Cs41Z0KnILNvwbHAah3C2yt06kw==
dependencies:
ajv "^6.12.4"
debug "^4.1.1"
espree "^7.3.0"
globals "^13.9.0"
ignore "^4.0.6"
debug "^4.3.2"
espree "^9.3.2"
globals "^13.15.0"
ignore "^5.2.0"
import-fresh "^3.2.1"
js-yaml "^3.13.1"
minimatch "^3.0.4"
js-yaml "^4.1.0"
minimatch "^3.1.2"
strip-json-comments "^3.1.1"
"@gitlab/at.js@1.5.7":
@ -1026,10 +1019,10 @@
resolved "https://registry.yarnpkg.com/@gitlab/at.js/-/at.js-1.5.7.tgz#1ee6f838cc4410a1d797770934df91d90df8179e"
integrity sha512-c6ySRK/Ma7lxwpIVbSAF3P+xiTLrNTGTLRx4/pHK111AdFxwgUwrYF6aVZFXvmG65jHOJHoa0eQQ21RW6rm0Rg==
"@gitlab/eslint-plugin@13.1.0":
version "13.1.0"
resolved "https://registry.yarnpkg.com/@gitlab/eslint-plugin/-/eslint-plugin-13.1.0.tgz#d0698251e601d8732b6db994c8ebd8c37be404fa"
integrity sha512-/eWbTomAipyp/nbaNLq8aU1IcqG029+clvUOgkckm704q38G0/r6KHgcuXvxWj2erzwcEveEXXyilZAaTQquRA==
"@gitlab/eslint-plugin@14.0.0":
version "14.0.0"
resolved "https://registry.yarnpkg.com/@gitlab/eslint-plugin/-/eslint-plugin-14.0.0.tgz#dc841d83521afdaf86afc943f94ad11d19c37b7c"
integrity sha512-idTZojh+0lvKqdPcNlY4w3c9+qCTS0WYBrFkagWRifUYBqXGHbWw8CRfxCMYZSA3GnFRuxXhodpilRFq2YzURw==
dependencies:
"@babel/core" "^7.17.0"
"@babel/eslint-parser" "^7.17.0"
@ -1255,19 +1248,19 @@
resolved "https://registry.yarnpkg.com/@graphql-typed-document-node/core/-/core-3.1.0.tgz#0eee6373e11418bfe0b5638f654df7a4ca6a3950"
integrity sha512-wYn6r8zVZyQJ6rQaALBEln5B1pzxb9shV5Ef97kTvn6yVGrqyXVnDqnU24MXnFubR+rZjBY9NWuxX3FB2sTsjg==
"@humanwhocodes/config-array@^0.5.0":
version "0.5.0"
resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.5.0.tgz#1407967d4c6eecd7388f83acf1eaf4d0c6e58ef9"
integrity sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg==
"@humanwhocodes/config-array@^0.9.2":
version "0.9.5"
resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.9.5.tgz#2cbaf9a89460da24b5ca6531b8bbfc23e1df50c7"
integrity sha512-ObyMyWxZiCu/yTisA7uzx81s40xR2fD5Cg/2Kq7G02ajkNubJf6BopgDTmDyc3U7sXpNKM8cYOw7s7Tyr+DnCw==
dependencies:
"@humanwhocodes/object-schema" "^1.2.0"
"@humanwhocodes/object-schema" "^1.2.1"
debug "^4.1.1"
minimatch "^3.0.4"
"@humanwhocodes/object-schema@^1.2.0":
version "1.2.0"
resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.0.tgz#87de7af9c231826fdd68ac7258f77c429e0e5fcf"
integrity sha512-wdppn25U8z/2yiaT6YGquE6X8sSv7hNMWSXYSSU1jGv/yd6XqjXgTDJ8KP4NgjTXfJ3GbRjeeb8RTV7a/VpM+w==
"@humanwhocodes/object-schema@^1.2.1":
version "1.2.1"
resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45"
integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==
"@iarna/toml@^2.2.5":
version "2.2.5"
@ -2467,10 +2460,10 @@ acorn-globals@^6.0.0:
acorn "^7.1.1"
acorn-walk "^7.1.1"
acorn-jsx@^5.3.1:
version "5.3.1"
resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.1.tgz#fc8661e11b7ac1539c47dbfea2e72b3af34d267b"
integrity sha512-K0Ptm/47OKfQRpNQ2J/oIN/3QYiK6FwW+eJbILhsdxh2WTLdl+30o8aGdTbm5JbffpFFAg/g+zi1E+jvJha5ng==
acorn-jsx@^5.3.2:
version "5.3.2"
resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937"
integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==
acorn-walk@^7.1.1:
version "7.2.0"
@ -2487,15 +2480,15 @@ acorn@^6.3.0, acorn@^6.4.1:
resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.4.2.tgz#35866fd710528e92de10cf06016498e47e39e1e6"
integrity sha512-XtGIhXwF8YM8bJhGxG5kXgjkEuNGLTkoYqVE+KMR+aspr4KGYmKYg7yUe3KghyQ9yheNwLnjmzh/7+gfDBmHCQ==
acorn@^7.1.1, acorn@^7.4.0:
acorn@^7.1.1:
version "7.4.1"
resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa"
integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==
acorn@^8.0.4, acorn@^8.2.4, acorn@^8.7.0:
version "8.7.1"
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.1.tgz#0197122c843d1bf6d0a5e83220a788f278f63c30"
integrity sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A==
acorn@^8.0.4, acorn@^8.2.4, acorn@^8.7.1:
version "8.8.0"
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.8.0.tgz#88c0187620435c7f6015803f5539dae05a9dbea8"
integrity sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w==
agent-base@6:
version "6.0.2"
@ -2556,11 +2549,6 @@ ajv@^8.0.0, ajv@^8.0.1, ajv@^8.10.0, ajv@^8.8.0:
require-from-string "^2.0.2"
uri-js "^4.2.2"
ansi-colors@^4.1.1:
version "4.1.1"
resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.1.tgz#cbb9ae256bf750af1eab344f229aa27fe94ba348"
integrity sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==
ansi-escapes@^4.2.1:
version "4.3.2"
resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e"
@ -4536,7 +4524,7 @@ debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.9:
dependencies:
ms "2.0.0"
debug@4, debug@^4.0.0, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.3.2, debug@^4.3.4:
debug@4, debug@^4.0.0, debug@^4.1.0, debug@^4.1.1, debug@^4.3.2, debug@^4.3.4:
version "4.3.4"
resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865"
integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==
@ -4962,13 +4950,6 @@ enhanced-resolve@^4.5.0:
memory-fs "^0.5.0"
tapable "^1.0.0"
enquirer@^2.3.5:
version "2.3.6"
resolved "https://registry.yarnpkg.com/enquirer/-/enquirer-2.3.6.tgz#2a7fe5dd634a1e4125a975ec994ff5456dc3734d"
integrity sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==
dependencies:
ansi-colors "^4.1.1"
entities@^2.0.0, entities@~2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/entities/-/entities-2.1.0.tgz#992d3129cf7df6870b96c57858c249a120f8b8b5"
@ -5244,7 +5225,7 @@ eslint-scope@^5.0.0, eslint-scope@^5.1.1:
esrecurse "^4.3.0"
estraverse "^4.1.1"
eslint-scope@^7.0.0:
eslint-scope@^7.0.0, eslint-scope@^7.1.1:
version "7.1.1"
resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.1.1.tgz#fff34894c2f65e5226d3041ac480b4513a163642"
integrity sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==
@ -5252,7 +5233,7 @@ eslint-scope@^7.0.0:
esrecurse "^4.3.0"
estraverse "^5.2.0"
eslint-utils@^2.0.0, eslint-utils@^2.1.0:
eslint-utils@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-2.1.0.tgz#d2de5e03424e707dc10c74068ddedae708741b27"
integrity sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==
@ -5266,7 +5247,7 @@ eslint-utils@^3.0.0:
dependencies:
eslint-visitor-keys "^2.0.0"
eslint-visitor-keys@^1.1.0, eslint-visitor-keys@^1.3.0:
eslint-visitor-keys@^1.1.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz#30ebd1ef7c2fdff01c3a4f151044af25fab0523e"
integrity sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==
@ -5281,68 +5262,54 @@ eslint-visitor-keys@^3.1.0, eslint-visitor-keys@^3.3.0:
resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz#f6480fa6b1f30efe2d1968aa8ac745b862469826"
integrity sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==
eslint@7.32.0:
version "7.32.0"
resolved "https://registry.yarnpkg.com/eslint/-/eslint-7.32.0.tgz#c6d328a14be3fb08c8d1d21e12c02fdb7a2a812d"
integrity sha512-VHZ8gX+EDfz+97jGcgyGCyRia/dPOd6Xh9yPv8Bl1+SoaIwD+a/vlrOmGRUyOYu7MwUhc7CxqeaDZU13S4+EpA==
eslint@8.19.0:
version "8.19.0"
resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.19.0.tgz#7342a3cbc4fbc5c106a1eefe0fd0b50b6b1a7d28"
integrity sha512-SXOPj3x9VKvPe81TjjUJCYlV4oJjQw68Uek+AM0X4p+33dj2HY5bpTZOgnQHcG2eAm1mtCU9uNMnJi7exU/kYw==
dependencies:
"@babel/code-frame" "7.12.11"
"@eslint/eslintrc" "^0.4.3"
"@humanwhocodes/config-array" "^0.5.0"
"@eslint/eslintrc" "^1.3.0"
"@humanwhocodes/config-array" "^0.9.2"
ajv "^6.10.0"
chalk "^4.0.0"
cross-spawn "^7.0.2"
debug "^4.0.1"
debug "^4.3.2"
doctrine "^3.0.0"
enquirer "^2.3.5"
escape-string-regexp "^4.0.0"
eslint-scope "^5.1.1"
eslint-utils "^2.1.0"
eslint-visitor-keys "^2.0.0"
espree "^7.3.1"
eslint-scope "^7.1.1"
eslint-utils "^3.0.0"
eslint-visitor-keys "^3.3.0"
espree "^9.3.2"
esquery "^1.4.0"
esutils "^2.0.2"
fast-deep-equal "^3.1.3"
file-entry-cache "^6.0.1"
functional-red-black-tree "^1.0.1"
glob-parent "^5.1.2"
globals "^13.6.0"
ignore "^4.0.6"
glob-parent "^6.0.1"
globals "^13.15.0"
ignore "^5.2.0"
import-fresh "^3.0.0"
imurmurhash "^0.1.4"
is-glob "^4.0.0"
js-yaml "^3.13.1"
js-yaml "^4.1.0"
json-stable-stringify-without-jsonify "^1.0.1"
levn "^0.4.1"
lodash.merge "^4.6.2"
minimatch "^3.0.4"
minimatch "^3.1.2"
natural-compare "^1.4.0"
optionator "^0.9.1"
progress "^2.0.0"
regexpp "^3.1.0"
semver "^7.2.1"
strip-ansi "^6.0.0"
regexpp "^3.2.0"
strip-ansi "^6.0.1"
strip-json-comments "^3.1.0"
table "^6.0.9"
text-table "^0.2.0"
v8-compile-cache "^2.0.3"
espree@^7.3.0, espree@^7.3.1:
version "7.3.1"
resolved "https://registry.yarnpkg.com/espree/-/espree-7.3.1.tgz#f2df330b752c6f55019f8bd89b7660039c1bbbb6"
integrity sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g==
espree@^9.0.0, espree@^9.3.2:
version "9.3.2"
resolved "https://registry.yarnpkg.com/espree/-/espree-9.3.2.tgz#f58f77bd334731182801ced3380a8cc859091596"
integrity sha512-D211tC7ZwouTIuY5x9XnS0E9sWNChB7IYKX/Xp5eQj3nFXhqmiUDB9q27y76oFl8jTg3pXcQx/bpxMfs3CIZbA==
dependencies:
acorn "^7.4.0"
acorn-jsx "^5.3.1"
eslint-visitor-keys "^1.3.0"
espree@^9.0.0:
version "9.3.1"
resolved "https://registry.yarnpkg.com/espree/-/espree-9.3.1.tgz#8793b4bc27ea4c778c19908e0719e7b8f4115bcd"
integrity sha512-bvdyLmJMfwkV3NCRl5ZhJf22zBFo1y8bYh3VYb+bfzqNB4Je68P2sSuXyuFquzWLebHpNd2/d5uv7yoP9ISnGQ==
dependencies:
acorn "^8.7.0"
acorn-jsx "^5.3.1"
acorn "^8.7.1"
acorn-jsx "^5.3.2"
eslint-visitor-keys "^3.3.0"
esprima@^4.0.0, esprima@^4.0.1:
@ -5956,6 +5923,13 @@ glob-parent@^5.1.1, glob-parent@^5.1.2, glob-parent@~5.1.2:
dependencies:
is-glob "^4.0.1"
glob-parent@^6.0.1:
version "6.0.2"
resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3"
integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==
dependencies:
is-glob "^4.0.3"
"glob@5 - 7", glob@^7.0.0, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6, glob@~7.2.0:
version "7.2.0"
resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.0.tgz#d15535af7732e02e948f4c41628bd910293f6023"
@ -5997,10 +5971,10 @@ globals@^11.1.0:
resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e"
integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==
globals@^13.6.0, globals@^13.9.0:
version "13.9.0"
resolved "https://registry.yarnpkg.com/globals/-/globals-13.9.0.tgz#4bf2bf635b334a173fb1daf7c5e6b218ecdc06cb"
integrity sha512-74/FduwI/JaIrr1H8e71UbDE+5x7pIPs1C2rrwC52SszOo043CsWOZEMW7o2Y58xwm9b+0RBKDxY5n2sUpEFxA==
globals@^13.15.0:
version "13.16.0"
resolved "https://registry.yarnpkg.com/globals/-/globals-13.16.0.tgz#9be4aca28f311aaeb974ea54978ebbb5e35ce46a"
integrity sha512-A1lrQfpNF+McdPOnnFqY3kSN0AFTy485bTi1bkLk4mVPODIUEcSfhHgRqA+QdXPksrSTTztYXx37NFV+GpGk3Q==
dependencies:
type-fest "^0.20.2"
@ -6491,11 +6465,6 @@ ignore-by-default@^1.0.1:
resolved "https://registry.yarnpkg.com/ignore-by-default/-/ignore-by-default-1.0.1.tgz#48ca6d72f6c6a3af00a9ad4ae6876be3889e2b09"
integrity sha1-SMptcvbGo68Aqa1K5odr44ieKwk=
ignore@^4.0.6:
version "4.0.6"
resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc"
integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==
ignore@^5.2.0, ignore@~5.2.0:
version "5.2.0"
resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.0.tgz#6d3bac8fa7fe0d45d9f9be7bac2fc279577e345a"
@ -9690,11 +9659,6 @@ process@^0.11.10:
resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182"
integrity sha1-czIwDoQBYb2j5podHZGn1LwW8YI=
progress@^2.0.0:
version "2.0.3"
resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8"
integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==
promise-inflight@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3"
@ -10129,7 +10093,7 @@ regexp.prototype.flags@^1.4.3:
define-properties "^1.1.3"
functions-have-names "^1.2.2"
regexpp@^3.1.0:
regexpp@^3.2.0:
version "3.2.0"
resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.2.0.tgz#0425a2768d8f23bad70ca4b90461fa2f1213e1b2"
integrity sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==
@ -10487,7 +10451,7 @@ semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0:
resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d"
integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==
semver@^7.2.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5:
semver@^7.3.2, semver@^7.3.4, semver@^7.3.5:
version "7.3.5"
resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7"
integrity sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==
@ -11185,7 +11149,7 @@ sync-fetch@^0.3.1:
buffer "^5.7.0"
node-fetch "^2.6.1"
table@^6.0.9, table@^6.8.0:
table@^6.8.0:
version "6.8.0"
resolved "https://registry.yarnpkg.com/table/-/table-6.8.0.tgz#87e28f14fa4321c3377ba286f07b79b281a3b3ca"
integrity sha512-s/fitrbVeEyHKFa7mFdkuQMWlH1Wgw/yEXMt5xACT4ZpzWFluehAxRtUUQKPuWhaLAWhFcVx6w3oC8VKaUfPGA==