Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-10-18 18:09:22 +00:00
parent b556d0fab7
commit e4220eecca
73 changed files with 918 additions and 293 deletions

View file

@ -29,7 +29,7 @@ cache-workhorse:
variables:
WEBPACK_REPORT: "false"
script:
- !reference [.yarn-install, script]
- yarn_install_script
- export GITLAB_ASSETS_HASH=$(bundle exec rake gitlab:assets:hash_sum)
- source scripts/gitlab_component_helpers.sh
- 'gitlab_assets_archive_doesnt_exist || { echoinfo "INFO: Exiting early as package exists."; exit 0; }'

View file

@ -1,13 +1,3 @@
.yarn-install:
script:
- source scripts/utils.sh
- yarn_install_script
.storybook-yarn-install:
script:
- source scripts/utils.sh
- run_timed_command "retry yarn run storybook:install --frozen-lockfile"
.compile-assets-base:
extends:
- .default-retry
@ -98,20 +88,22 @@ update-assets-compile-test-cache:
update-yarn-cache:
extends:
- .default-retry
- .default-utils-before_script
- .yarn-cache-push
- .shared:rules:update-cache
stage: prepare
script:
- !reference [.yarn-install, script]
- yarn_install_script
update-storybook-yarn-cache:
extends:
- .default-retry
- .default-utils-before_script
- .storybook-yarn-cache-push
- .shared:rules:update-cache
stage: prepare
script:
- !reference [.storybook-yarn-install, script]
- yarn_install_script
.frontend-fixtures-base:
extends:
@ -194,7 +186,7 @@ graphql-schema-dump as-if-foss:
SETUP_DB: "false"
before_script:
- !reference [.default-before_script, before_script]
- !reference [.yarn-install, script]
- yarn_install_script
stage: test
.jest-base:
@ -261,6 +253,7 @@ jest-integration:
coverage-frontend:
extends:
- .default-retry
- .default-utils-before_script
- .yarn-cache
- .frontend:rules:coverage-frontend
needs:
@ -269,9 +262,8 @@ coverage-frontend:
- job: "jest minimal"
optional: true
stage: post-test
before_script:
- !reference [.yarn-install, script]
script:
- yarn_install_script
- run_timed_command "yarn node scripts/frontend/merge_coverage_frontend.js"
# Removing the individual coverage results, as we just merged them.
- if ls coverage-frontend/jest-* > /dev/null 2>&1; then
@ -291,12 +283,13 @@ coverage-frontend:
.qa-frontend-node:
extends:
- .default-retry
- .default-utils-before_script
- .qa-frontend-node-cache
- .frontend:rules:qa-frontend-node
stage: test
needs: []
script:
- !reference [.yarn-install, script]
- yarn_install_script
- run_timed_command "retry yarn run webpack-prod"
qa-frontend-node:14:
@ -316,6 +309,7 @@ qa-frontend-node:latest:
webpack-dev-server:
extends:
- .default-retry
- .default-utils-before_script
- .yarn-cache
- .frontend:rules:default-frontend-jobs
stage: test
@ -324,7 +318,7 @@ webpack-dev-server:
WEBPACK_MEMORY_TEST: "true"
WEBPACK_VENDOR_DLL: "true"
script:
- !reference [.yarn-install, script]
- yarn_install_script
- run_timed_command "retry yarn webpack-vendor"
- run_timed_command "node --expose-gc node_modules/.bin/webpack-dev-server --config config/webpack.config.js"
artifacts:
@ -336,13 +330,14 @@ webpack-dev-server:
bundle-size-review:
extends:
- .default-retry
- .default-utils-before_script
- .assets-compile-cache
- .frontend:rules:bundle-size-review
image: ${REGISTRY_HOST}/${REGISTRY_GROUP}/gitlab-build-images:danger
stage: test
needs: []
script:
- !reference [.yarn-install, script]
- yarn_install_script
- scripts/bundle_size_review
artifacts:
when: always
@ -380,8 +375,8 @@ startup-css-check as-if-foss:
- .frontend-test-base
- .storybook-yarn-cache
script:
- !reference [.storybook-yarn-install, script]
- yarn run storybook:build
- run_timed_command "retry yarn run storybook:install --frozen-lockfile"
- run_timed_command "yarn run storybook:build"
needs: ["graphql-schema-dump"]
compile-storybook:

View file

@ -8,13 +8,17 @@
- job_execution_timeout
- stuck_or_timeout_failure
.default-before_script:
.default-utils-before_script:
before_script:
- echo $FOSS_ONLY
- '[ "$FOSS_ONLY" = "1" ] && rm -rf ee/ qa/spec/ee/ qa/qa/specs/features/ee/ qa/qa/ee/ qa/qa/ee.rb'
- export GOPATH=$CI_PROJECT_DIR/.go
- mkdir -p $GOPATH
- source scripts/utils.sh
.default-before_script:
before_script:
- !reference [.default-utils-before_script, before_script]
- source scripts/prepare_build.sh
.ruby-gems-cache: &ruby-gems-cache

View file

@ -462,6 +462,12 @@ RSpec/FactoriesInMigrationSpecs:
- 'spec/lib/ee/gitlab/background_migration/**/*.rb'
- 'ee/spec/lib/ee/gitlab/background_migration/**/*.rb'
RSpec/FactoryBot/AvoidCreate:
Enabled: true
Include:
- 'spec/serializers/**/*.rb'
- 'ee/spec/serializers/**/*.rb'
Cop/IncludeSidekiqWorker:
Enabled: true
Exclude:

View file

@ -0,0 +1,211 @@
---
RSpec/FactoryBot/AvoidCreate:
Details: grace period
Exclude:
- 'ee/spec/serializers/analytics/cycle_analytics/stage_entity_spec.rb'
- 'ee/spec/serializers/analytics/cycle_analytics/value_stream_errors_serializer_spec.rb'
- 'ee/spec/serializers/audit_event_entity_spec.rb'
- 'ee/spec/serializers/audit_event_serializer_spec.rb'
- 'ee/spec/serializers/blocking_merge_request_entity_spec.rb'
- 'ee/spec/serializers/clusters/deployment_entity_spec.rb'
- 'ee/spec/serializers/clusters/environment_entity_spec.rb'
- 'ee/spec/serializers/clusters/environment_serializer_spec.rb'
- 'ee/spec/serializers/dashboard_environment_entity_spec.rb'
- 'ee/spec/serializers/dashboard_environments_project_entity_spec.rb'
- 'ee/spec/serializers/dashboard_environments_serializer_spec.rb'
- 'ee/spec/serializers/dashboard_operations_project_entity_spec.rb'
- 'ee/spec/serializers/dependency_entity_spec.rb'
- 'ee/spec/serializers/dependency_list_serializer_spec.rb'
- 'ee/spec/serializers/ee/blob_entity_spec.rb'
- 'ee/spec/serializers/ee/build_details_entity_spec.rb'
- 'ee/spec/serializers/ee/ci/job_entity_spec.rb'
- 'ee/spec/serializers/ee/deployment_entity_spec.rb'
- 'ee/spec/serializers/ee/environment_serializer_spec.rb'
- 'ee/spec/serializers/ee/group_child_entity_spec.rb'
- 'ee/spec/serializers/ee/issue_board_entity_spec.rb'
- 'ee/spec/serializers/ee/issue_entity_spec.rb'
- 'ee/spec/serializers/ee/issue_sidebar_basic_entity_spec.rb'
- 'ee/spec/serializers/ee/issue_sidebar_extras_entity_spec.rb'
- 'ee/spec/serializers/ee/merge_request_poll_cached_widget_entity_spec.rb'
- 'ee/spec/serializers/ee/note_entity_spec.rb'
- 'ee/spec/serializers/ee/user_serializer_spec.rb'
- 'ee/spec/serializers/environment_entity_spec.rb'
- 'ee/spec/serializers/epic_entity_spec.rb'
- 'ee/spec/serializers/epic_note_entity_spec.rb'
- 'ee/spec/serializers/epics/related_epic_entity_spec.rb'
- 'ee/spec/serializers/fork_namespace_entity_spec.rb'
- 'ee/spec/serializers/geo_project_registry_entity_spec.rb'
- 'ee/spec/serializers/incident_management/escalation_policy_entity_spec.rb'
- 'ee/spec/serializers/incident_management/oncall_schedule_entity_spec.rb'
- 'ee/spec/serializers/integrations/field_entity_spec.rb'
- 'ee/spec/serializers/integrations/jira_serializers/issue_detail_entity_spec.rb'
- 'ee/spec/serializers/integrations/jira_serializers/issue_entity_spec.rb'
- 'ee/spec/serializers/integrations/jira_serializers/issue_serializer_spec.rb'
- 'ee/spec/serializers/integrations/zentao_serializers/issue_entity_spec.rb'
- 'ee/spec/serializers/issuable_sidebar_extras_entity_spec.rb'
- 'ee/spec/serializers/issue_serializer_spec.rb'
- 'ee/spec/serializers/issues/linked_issue_feature_flag_entity_spec.rb'
- 'ee/spec/serializers/license_compliance/collapsed_comparer_entity_spec.rb'
- 'ee/spec/serializers/license_compliance/comparer_entity_spec.rb'
- 'ee/spec/serializers/licenses_list_entity_spec.rb'
- 'ee/spec/serializers/licenses_list_serializer_spec.rb'
- 'ee/spec/serializers/linked_feature_flag_issue_entity_spec.rb'
- 'ee/spec/serializers/member_entity_spec.rb'
- 'ee/spec/serializers/member_user_entity_spec.rb'
- 'ee/spec/serializers/merge_request_poll_widget_entity_spec.rb'
- 'ee/spec/serializers/merge_request_sidebar_basic_entity_spec.rb'
- 'ee/spec/serializers/merge_request_widget_entity_spec.rb'
- 'ee/spec/serializers/pipeline_serializer_spec.rb'
- 'ee/spec/serializers/productivity_analytics_merge_request_entity_spec.rb'
- 'ee/spec/serializers/project_mirror_entity_spec.rb'
- 'ee/spec/serializers/scim_oauth_access_token_entity_spec.rb'
- 'ee/spec/serializers/security/vulnerability_report_data_entity_spec.rb'
- 'ee/spec/serializers/status_page/incident_comment_entity_spec.rb'
- 'ee/spec/serializers/status_page/incident_entity_spec.rb'
- 'ee/spec/serializers/status_page/incident_serializer_spec.rb'
- 'ee/spec/serializers/vulnerabilities/feedback_entity_spec.rb'
- 'ee/spec/serializers/vulnerabilities/finding_entity_spec.rb'
- 'ee/spec/serializers/vulnerabilities/finding_reports_comparer_entity_spec.rb'
- 'ee/spec/serializers/vulnerabilities/finding_serializer_spec.rb'
- 'ee/spec/serializers/vulnerabilities/identifier_entity_spec.rb'
- 'ee/spec/serializers/vulnerabilities/request_entity_spec.rb'
- 'ee/spec/serializers/vulnerabilities/response_entity_spec.rb'
- 'ee/spec/serializers/vulnerabilities/scanner_entity_spec.rb'
- 'ee/spec/serializers/vulnerability_entity_spec.rb'
- 'ee/spec/serializers/vulnerability_note_entity_spec.rb'
- 'spec/serializers/access_token_entity_base_spec.rb'
- 'spec/serializers/analytics_build_entity_spec.rb'
- 'spec/serializers/analytics_build_serializer_spec.rb'
- 'spec/serializers/analytics_issue_entity_spec.rb'
- 'spec/serializers/analytics_issue_serializer_spec.rb'
- 'spec/serializers/analytics_merge_request_serializer_spec.rb'
- 'spec/serializers/analytics_summary_serializer_spec.rb'
- 'spec/serializers/base_discussion_entity_spec.rb'
- 'spec/serializers/blob_entity_spec.rb'
- 'spec/serializers/build_action_entity_spec.rb'
- 'spec/serializers/build_artifact_entity_spec.rb'
- 'spec/serializers/build_details_entity_spec.rb'
- 'spec/serializers/ci/dag_job_entity_spec.rb'
- 'spec/serializers/ci/dag_job_group_entity_spec.rb'
- 'spec/serializers/ci/dag_pipeline_entity_spec.rb'
- 'spec/serializers/ci/dag_pipeline_serializer_spec.rb'
- 'spec/serializers/ci/dag_stage_entity_spec.rb'
- 'spec/serializers/ci/downloadable_artifact_entity_spec.rb'
- 'spec/serializers/ci/downloadable_artifact_serializer_spec.rb'
- 'spec/serializers/ci/group_variable_entity_spec.rb'
- 'spec/serializers/ci/job_entity_spec.rb'
- 'spec/serializers/ci/job_serializer_spec.rb'
- 'spec/serializers/ci/lint/result_serializer_spec.rb'
- 'spec/serializers/ci/pipeline_entity_spec.rb'
- 'spec/serializers/ci/trigger_entity_spec.rb'
- 'spec/serializers/ci/variable_entity_spec.rb'
- 'spec/serializers/cluster_application_entity_spec.rb'
- 'spec/serializers/cluster_entity_spec.rb'
- 'spec/serializers/cluster_serializer_spec.rb'
- 'spec/serializers/clusters/kubernetes_error_entity_spec.rb'
- 'spec/serializers/commit_entity_spec.rb'
- 'spec/serializers/container_repositories_serializer_spec.rb'
- 'spec/serializers/container_repository_entity_spec.rb'
- 'spec/serializers/container_tag_entity_spec.rb'
- 'spec/serializers/context_commits_diff_entity_spec.rb'
- 'spec/serializers/deploy_keys/basic_deploy_key_entity_spec.rb'
- 'spec/serializers/deploy_keys/deploy_key_entity_spec.rb'
- 'spec/serializers/deployment_cluster_entity_spec.rb'
- 'spec/serializers/deployment_entity_spec.rb'
- 'spec/serializers/deployment_serializer_spec.rb'
- 'spec/serializers/diff_file_base_entity_spec.rb'
- 'spec/serializers/diff_file_entity_spec.rb'
- 'spec/serializers/diff_file_metadata_entity_spec.rb'
- 'spec/serializers/diff_viewer_entity_spec.rb'
- 'spec/serializers/diffs_entity_spec.rb'
- 'spec/serializers/diffs_metadata_entity_spec.rb'
- 'spec/serializers/discussion_diff_file_entity_spec.rb'
- 'spec/serializers/discussion_entity_spec.rb'
- 'spec/serializers/environment_entity_spec.rb'
- 'spec/serializers/environment_serializer_spec.rb'
- 'spec/serializers/environment_status_entity_spec.rb'
- 'spec/serializers/evidences/evidence_entity_spec.rb'
- 'spec/serializers/evidences/release_entity_spec.rb'
- 'spec/serializers/feature_flag_entity_spec.rb'
- 'spec/serializers/feature_flag_serializer_spec.rb'
- 'spec/serializers/feature_flag_summary_entity_spec.rb'
- 'spec/serializers/feature_flag_summary_serializer_spec.rb'
- 'spec/serializers/feature_flags_client_serializer_spec.rb'
- 'spec/serializers/fork_namespace_entity_spec.rb'
- 'spec/serializers/group_access_token_entity_spec.rb'
- 'spec/serializers/group_access_token_serializer_spec.rb'
- 'spec/serializers/group_child_entity_spec.rb'
- 'spec/serializers/group_child_serializer_spec.rb'
- 'spec/serializers/group_deploy_key_entity_spec.rb'
- 'spec/serializers/group_link/group_group_link_entity_spec.rb'
- 'spec/serializers/group_link/project_group_link_entity_spec.rb'
- 'spec/serializers/group_link/project_group_link_serializer_spec.rb'
- 'spec/serializers/impersonation_access_token_entity_spec.rb'
- 'spec/serializers/impersonation_access_token_serializer_spec.rb'
- 'spec/serializers/import/manifest_provider_repo_entity_spec.rb'
- 'spec/serializers/integrations/event_entity_spec.rb'
- 'spec/serializers/integrations/field_entity_spec.rb'
- 'spec/serializers/integrations/harbor_serializers/artifact_entity_spec.rb'
- 'spec/serializers/integrations/harbor_serializers/repository_entity_spec.rb'
- 'spec/serializers/integrations/harbor_serializers/tag_entity_spec.rb'
- 'spec/serializers/integrations/project_entity_spec.rb'
- 'spec/serializers/issuable_sidebar_extras_entity_spec.rb'
- 'spec/serializers/issue_board_entity_spec.rb'
- 'spec/serializers/issue_entity_spec.rb'
- 'spec/serializers/issue_serializer_spec.rb'
- 'spec/serializers/issue_sidebar_basic_entity_spec.rb'
- 'spec/serializers/jira_connect/app_data_serializer_spec.rb'
- 'spec/serializers/jira_connect/group_entity_spec.rb'
- 'spec/serializers/jira_connect/subscription_entity_spec.rb'
- 'spec/serializers/job_artifact_report_entity_spec.rb'
- 'spec/serializers/label_serializer_spec.rb'
- 'spec/serializers/lfs_file_lock_entity_spec.rb'
- 'spec/serializers/linked_project_issue_entity_spec.rb'
- 'spec/serializers/member_entity_spec.rb'
- 'spec/serializers/member_serializer_spec.rb'
- 'spec/serializers/member_user_entity_spec.rb'
- 'spec/serializers/merge_request_current_user_entity_spec.rb'
- 'spec/serializers/merge_request_diff_entity_spec.rb'
- 'spec/serializers/merge_request_for_pipeline_entity_spec.rb'
- 'spec/serializers/merge_request_metrics_helper_spec.rb'
- 'spec/serializers/merge_request_poll_cached_widget_entity_spec.rb'
- 'spec/serializers/merge_request_poll_widget_entity_spec.rb'
- 'spec/serializers/merge_request_serializer_spec.rb'
- 'spec/serializers/merge_request_sidebar_basic_entity_spec.rb'
- 'spec/serializers/merge_request_sidebar_extras_entity_spec.rb'
- 'spec/serializers/merge_request_user_entity_spec.rb'
- 'spec/serializers/merge_request_widget_commit_entity_spec.rb'
- 'spec/serializers/merge_request_widget_entity_spec.rb'
- 'spec/serializers/merge_requests/pipeline_entity_spec.rb'
- 'spec/serializers/namespace_basic_entity_spec.rb'
- 'spec/serializers/note_entity_spec.rb'
- 'spec/serializers/paginated_diff_entity_spec.rb'
- 'spec/serializers/personal_access_token_entity_spec.rb'
- 'spec/serializers/personal_access_token_serializer_spec.rb'
- 'spec/serializers/pipeline_details_entity_spec.rb'
- 'spec/serializers/pipeline_serializer_spec.rb'
- 'spec/serializers/project_access_token_entity_spec.rb'
- 'spec/serializers/project_access_token_serializer_spec.rb'
- 'spec/serializers/project_import_entity_spec.rb'
- 'spec/serializers/project_mirror_entity_spec.rb'
- 'spec/serializers/project_note_entity_spec.rb'
- 'spec/serializers/project_serializer_spec.rb'
- 'spec/serializers/prometheus_alert_entity_spec.rb'
- 'spec/serializers/release_serializer_spec.rb'
- 'spec/serializers/remote_mirror_entity_spec.rb'
- 'spec/serializers/review_app_setup_entity_spec.rb'
- 'spec/serializers/runner_entity_spec.rb'
- 'spec/serializers/serverless/domain_entity_spec.rb'
- 'spec/serializers/stage_entity_spec.rb'
- 'spec/serializers/stage_serializer_spec.rb'
- 'spec/serializers/suggestion_entity_spec.rb'
- 'spec/serializers/test_case_entity_spec.rb'
- 'spec/serializers/test_report_entity_spec.rb'
- 'spec/serializers/test_report_summary_entity_spec.rb'
- 'spec/serializers/test_suite_entity_spec.rb'
- 'spec/serializers/test_suite_summary_entity_spec.rb'
- 'spec/serializers/trigger_variable_entity_spec.rb'
- 'spec/serializers/user_entity_spec.rb'
- 'spec/serializers/user_serializer_spec.rb'
- 'spec/serializers/web_ide_terminal_entity_spec.rb'
- 'spec/serializers/web_ide_terminal_serializer_spec.rb'

View file

@ -1,21 +1,112 @@
<script>
import { GlPagination } from '@gitlab/ui';
import { redirectTo } from '~/lib/utils/url_utility';
import { buildUrlWithCurrentLocation } from '~/lib/utils/common_utils';
import { createAlert, VARIANT_DANGER } from '~/flash';
import { s__ } from '~/locale';
import axios from '~/lib/utils/axios_utils';
import MessagesTable from './messages_table.vue';
const PER_PAGE = 20;
export default {
name: 'BroadcastMessagesBase',
components: {
GlPagination,
MessagesTable,
},
props: {
page: {
type: Number,
required: true,
},
messagesCount: {
type: Number,
required: true,
},
messages: {
type: Array,
required: true,
},
},
i18n: {
deleteError: s__(
'BroadcastMessages|There was an issue deleting this message, please try again later.',
),
},
data() {
return {
currentPage: this.page,
totalMessages: this.messagesCount,
visibleMessages: this.messages.map((message) => ({
...message,
disable_delete: false,
})),
};
},
computed: {
hasVisibleMessages() {
return this.visibleMessages.length > 0;
},
},
watch: {
totalMessages(newVal, oldVal) {
// Pagination controls disappear when there is only
// one page worth of messages. Since we're relying on static data,
// this could hide messages on the next page, or leave the user
// stranded on page 2 when deleting the last message.
// Force a page reload to avoid this edge case.
if (newVal === PER_PAGE && oldVal === PER_PAGE + 1) {
redirectTo(this.buildPageUrl(1));
}
},
},
methods: {
buildPageUrl(newPage) {
return buildUrlWithCurrentLocation(`?page=${newPage}`);
},
async deleteMessage(messageId) {
const index = this.visibleMessages.findIndex((m) => m.id === messageId);
if (!index === -1) return;
const message = this.visibleMessages[index];
this.$set(this.visibleMessages, index, { ...message, disable_delete: true });
try {
await axios.delete(message.delete_path);
} catch (e) {
this.$set(this.visibleMessages, index, { ...message, disable_delete: false });
createAlert({ message: this.$options.i18n.deleteError, variant: VARIANT_DANGER });
return;
}
// Remove the message from the table
this.visibleMessages = this.visibleMessages.filter((m) => m.id !== messageId);
this.totalMessages -= 1;
},
},
};
</script>
<template>
<div>
<messages-table v-if="messages.length > 0" :messages="messages" />
<messages-table
v-if="hasVisibleMessages"
:messages="visibleMessages"
@delete-message="deleteMessage"
/>
<gl-pagination
v-model="currentPage"
:total-items="totalMessages"
:link-gen="buildPageUrl"
align="center"
/>
</div>
</template>

View file

@ -1,10 +1,23 @@
<script>
import MessagesTableRow from './messages_table_row.vue';
import { GlButton, GlTableLite, GlSafeHtmlDirective } from '@gitlab/ui';
import { __ } from '~/locale';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
const DEFAULT_TD_CLASSES = 'gl-vertical-align-middle!';
export default {
name: 'MessagesTable',
components: {
MessagesTableRow,
GlButton,
GlTableLite,
},
directives: {
SafeHtml: GlSafeHtmlDirective,
},
mixins: [glFeatureFlagsMixin()],
i18n: {
edit: __('Edit'),
delete: __('Delete'),
},
props: {
messages: {
@ -12,10 +25,89 @@ export default {
required: true,
},
},
computed: {
fields() {
if (this.glFeatures.roleTargetedBroadcastMessages) return this.$options.allFields;
return this.$options.allFields.filter((f) => f.key !== 'target_roles');
},
},
allFields: [
{
key: 'status',
label: __('Status'),
tdClass: DEFAULT_TD_CLASSES,
},
{
key: 'preview',
label: __('Preview'),
tdClass: DEFAULT_TD_CLASSES,
},
{
key: 'starts_at',
label: __('Starts'),
tdClass: DEFAULT_TD_CLASSES,
},
{
key: 'ends_at',
label: __('Ends'),
tdClass: DEFAULT_TD_CLASSES,
},
{
key: 'target_roles',
label: __('Target roles'),
tdClass: DEFAULT_TD_CLASSES,
thAttr: { 'data-testid': 'target-roles-th' },
},
{
key: 'target_path',
label: __('Target Path'),
tdClass: DEFAULT_TD_CLASSES,
},
{
key: 'type',
label: __('Type'),
tdClass: DEFAULT_TD_CLASSES,
},
{
key: 'buttons',
label: '',
tdClass: `${DEFAULT_TD_CLASSES} gl-white-space-nowrap`,
},
],
safeHtmlConfig: {
ADD_TAGS: ['use'],
},
};
</script>
<template>
<div>
<messages-table-row v-for="message in messages" :key="message.id" :message="message" />
</div>
<gl-table-lite
:items="messages"
:fields="fields"
:tbody-tr-attr="{ 'data-testid': 'message-row' }"
stacked="md"
>
<template #cell(preview)="{ item: { preview } }">
<div v-safe-html:[$options.safeHtmlConfig]="preview"></div>
</template>
<template #cell(buttons)="{ item: { id, edit_path, disable_delete } }">
<gl-button
icon="pencil"
:aria-label="$options.i18n.edit"
:href="edit_path"
data-testid="edit-message"
/>
<gl-button
class="gl-ml-3"
icon="remove"
variant="danger"
:aria-label="$options.i18n.delete"
rel="nofollow"
:disabled="disable_delete"
:data-testid="`delete-message-${id}`"
@click="$emit('delete-message', id)"
/>
</template>
</gl-table-lite>
</template>

View file

@ -1,16 +0,0 @@
<script>
export default {
name: 'MessagesTableRow',
props: {
message: {
type: Object,
required: true,
},
},
};
</script>
<template>
<div>
{{ message.id }}
</div>
</template>

View file

@ -3,14 +3,16 @@ import BroadcastMessagesBase from './components/base.vue';
export default () => {
const el = document.querySelector('#js-broadcast-messages');
const { messages } = el.dataset;
const { page, messagesCount, messages } = el.dataset;
return new Vue({
el,
name: 'BroadcastMessagesBase',
name: 'BroadcastMessages',
render(createElement) {
return createElement(BroadcastMessagesBase, {
props: {
page: Number(page),
messagesCount: Number(messagesCount),
messages: JSON.parse(messages),
},
});

View file

@ -11,6 +11,7 @@ class Admin::BroadcastMessagesController < Admin::ApplicationController
# rubocop: disable CodeReuse/ActiveRecord
def index
push_frontend_feature_flag(:vue_broadcast_messages, current_user)
push_frontend_feature_flag(:role_targeted_broadcast_messages, current_user)
@broadcast_messages = BroadcastMessage.order(ends_at: :desc).page(params[:page])
@broadcast_message = BroadcastMessage.new

View file

@ -22,7 +22,7 @@ module Packages
def packages_for_group_projects(installable_only: false)
packages = ::Packages::Package
.including_project_route
.including_project_namespace_route
.including_tags
.for_projects(group_projects_visible_to_current_user.select(:id))
.sort_by_attribute("#{params[:order_by]}_#{params[:sort]}")

View file

@ -10,7 +10,7 @@ module Packages
@project
.packages
.preload_pipelines
.including_project_route
.including_project_namespace_route
.including_tags
.displayable
.find(@package_id)

View file

@ -14,7 +14,7 @@ module Packages
def execute
packages = project.packages
.including_project_route
.including_project_namespace_route
.including_tags
packages = packages.preload_pipelines if preload_pipelines

View file

@ -124,8 +124,8 @@ class Packages::Package < ApplicationRecord
scope :with_package_type, ->(package_type) { where(package_type: package_type) }
scope :without_package_type, ->(package_type) { where.not(package_type: package_type) }
scope :displayable, -> { with_status(DISPLAYABLE_STATUSES) }
scope :including_project_full_path, -> { includes(project: :route) }
scope :including_project_route, -> { includes(project: { namespace: :route }) }
scope :including_project_route, -> { includes(project: :route) }
scope :including_project_namespace_route, -> { includes(project: { namespace: :route }) }
scope :including_tags, -> { includes(:tags) }
scope :including_dependency_links, -> { includes(dependency_links: :dependency) }

View file

@ -36,6 +36,8 @@ class EventPresenter < Gitlab::View::Presenter::Delegated
'Design'
elsif wiki_page?
'Wiki Page'
elsif issue? || work_item?
target.issue_type
elsif target_type.present?
target_type.titleize
else

View file

@ -23,6 +23,7 @@ module MergeRequests
cleanup_environments(merge_request)
abort_auto_merge(merge_request, 'merge request was closed')
cleanup_refs(merge_request)
trigger_merge_request_merge_status_updated(merge_request)
end
merge_request
@ -38,5 +39,9 @@ module MergeRequests
merge_request_metrics_service(merge_request).close(close_event)
end
end
def trigger_merge_request_merge_status_updated(merge_request)
GraphqlTriggers.merge_request_merge_status_updated(merge_request)
end
end
end

View file

@ -33,7 +33,7 @@ module Packages
min_batch_size = [batch_size, BATCH_SIZE].min
@packages.each_batch(of: min_batch_size) do |batched_packages|
loaded_packages = batched_packages.including_project_full_path.to_a
loaded_packages = batched_packages.including_project_route.to_a
break no_access = true unless can_destroy_packages?(loaded_packages)

View file

@ -8,7 +8,22 @@
= _('Use banners and notifications to notify your users about scheduled maintenance, recent upgrades, and more.')
- if vue_app_enabled
#js-broadcast-messages{ data: { messages: @broadcast_messages.to_json } }
#js-broadcast-messages{ data: {
page: params[:page] || 1,
messages_count: @broadcast_messages.total_count,
messages: @broadcast_messages.map { |message| {
id: message.id,
status: broadcast_message_status(message),
preview: broadcast_message(message, preview: true),
starts_at: message.starts_at.to_s,
ends_at: message.ends_at.to_s,
target_roles: target_access_levels_display(message.target_access_levels),
target_path: message.target_path,
type: message.broadcast_type.capitalize,
edit_path: edit_admin_broadcast_message_path(message),
delete_path: admin_broadcast_message_path(message) + '.js'
} }.to_json
} }
- else
= render 'form'
%br.clearfix

View file

@ -657,7 +657,7 @@ Settings.cron_jobs['ci_runner_versions_reconciliation_worker'] ||= Settingslogic
Settings.cron_jobs['ci_runner_versions_reconciliation_worker']['cron'] ||= '@daily'
Settings.cron_jobs['ci_runner_versions_reconciliation_worker']['job_class'] = 'Ci::Runners::ReconcileExistingRunnerVersionsCronWorker'
Settings.cron_jobs['users_migrate_records_to_ghost_user_in_batches_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['users_migrate_records_to_ghost_user_in_batches_worker']['cron'] ||= '*/1 * * * *'
Settings.cron_jobs['users_migrate_records_to_ghost_user_in_batches_worker']['cron'] ||= '*/2 * * * *'
Settings.cron_jobs['users_migrate_records_to_ghost_user_in_batches_worker']['job_class'] = 'Users::MigrateRecordsToGhostUserInBatchesWorker'
Gitlab.ee do

View file

@ -10,8 +10,11 @@ value_type: number
status: active
time_frame: 28d
data_source: redis_hll
instrumentation_class: RedisHLLMetric
instrumentation_class: AggregatedMetric
options:
aggregate:
operator: OR
attribute: user_id
events:
- g_edit_by_web_ide
- g_edit_by_sfe

View file

@ -10,8 +10,11 @@ value_type: number
status: active
time_frame: 28d
data_source: redis_hll
instrumentation_class: RedisHLLMetric
instrumentation_class: AggregatedMetric
options:
aggregate:
operator: OR
attribute: user_id
events:
- incident_management_incident_created
- incident_management_incident_reopened

View file

@ -13,8 +13,11 @@ value_type: number
status: active
time_frame: 28d
data_source: redis_hll
instrumentation_class: RedisHLLMetric
instrumentation_class: AggregatedMetric
options:
aggregate:
operator: OR
attribute: user_id
events:
- i_search_total
- i_search_advanced

View file

@ -10,8 +10,11 @@ value_type: number
status: active
time_frame: 28d
data_source: redis_hll
instrumentation_class: RedisHLLMetric
instrumentation_class: AggregatedMetric
options:
aggregate:
operator: OR
attribute: user_id
events:
- i_quickactions_approve
- i_quickactions_assign_single

View file

@ -10,8 +10,11 @@ value_type: number
status: active
time_frame: 7d
data_source: redis_hll
instrumentation_class: RedisHLLMetric
instrumentation_class: AggregatedMetric
options:
aggregate:
operator: OR
attribute: user_id
events:
- i_quickactions_approve
- i_quickactions_assign_single

View file

@ -105,6 +105,8 @@
- 1
- - compliance_management_merge_requests_compliance_violations
- 1
- - compliance_management_update_default_framework
- 1
- - container_repository
- 1
- - create_commit_signature

View file

@ -98,7 +98,7 @@ As an additional layer of HA resilience you can deploy select components in Kube
Note that this is an alternative and more **advanced** setup compared to a standard Reference Architecture. Running services in Kubernetes is well known to be complex. **This setup is only recommended** if you have strong working knowledge and experience in Kubernetes.
### GitLab Geo (Cross Regional Distribution / Disaster Recovery)
With [GitLab Geo](../geo/index.md) you can have both distributed environments in different regions and a full Disaster Recovery (DR) setup in place. With this setup you would have 2 or more separate environments, with one being a primary that gets replicated to the others. In the rare event the primary site went down completely you could fail over to one of the other environments.
This is an **advanced and involved** setup and should only be undertaken if you have DR as a key requirement. Decisions then on how each environment are configured would also need to be taken, such as if each environment itself would be the full size and / or have HA.

View file

@ -75,7 +75,7 @@ To start multiple processes:
]
```
`*` which matches all workers.
`*` which matches all workers.
As a result, the wildcard query must stay at the end of the list or the rules after it are ignored.
`*` cannot be combined with concrete queue names - `*, mailers`

View file

@ -341,7 +341,7 @@ curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" "https://git
It schedules for deletion multiple environments that have already been
[stopped](../ci/environments/index.md#stop-an-environment) and
are [in the review app folder](../ci/review_apps/index.md).
The actual deletion is performed after 1 week from the time of execution.
The actual deletion is performed after 1 week from the time of execution.
By default, it only deletes environments 30 days or older. You can change this default using the `before` parameter.
```plaintext

View file

@ -66,7 +66,7 @@ There is more than two billion rows in `ci_builds` table. We store many
terabytes of data in that table, and the total size of indexes is measured in
terabytes as well.
This amount of data contributes to a significant number of performance
This amount of data contributes to a significant number of performance
problems we experience on our CI PostgreSQL database.
Most of the problems are related to how PostgreSQL database works internally,

View file

@ -148,7 +148,7 @@ At this moment, GitLab.com has "social-network"-like capabilities that may not f
1. How will existing `gitlab-org` contributors contribute to the namespace??
1. How do we move existing top-level namespaces into the new model (effectively breaking their social features)?
We should evaluate if the SMB and mid market segment is interested in these features, or if not having them is acceptable in most cases.
We should evaluate if the SMB and mid market segment is interested in these features, or if not having them is acceptable in most cases.
## High-level architecture problems to solve

View file

@ -265,7 +265,7 @@ test, maintain and extend.
A primary design decision will be which concerns to externalize to the plugin
and which should remain with the runner system. The current implementation
has several abstractions internally which could be used as cut points for a
new abstraction.
new abstraction.
For example the [`Build`](https://gitlab.com/gitlab-org/gitlab-runner/-/blob/267f40d871cd260dd063f7fbd36a921fedc62241/common/build.go#L125)
type uses the [`GetExecutorProvider`](https://gitlab.com/gitlab-org/gitlab-runner/-/blob/267f40d871cd260dd063f7fbd36a921fedc62241/common/executor.go#L171)

View file

@ -139,7 +139,7 @@ Premium license:
If you use `13,000` minutes during the month, the next month your additional minutes become
`2,000`. If you use `9,000` minutes during the month, your additional minutes remain the same.
If you bought additional CI/CD minutes while on a trial subscription those minutes will be available after the trial ends or you upgrade to a paid plan.
If you bought additional CI/CD minutes while on a trial subscription those minutes will be available after the trial ends or you upgrade to a paid plan.
You can find pricing for additional CI/CD minutes on the
[GitLab Pricing page](https://about.gitlab.com/pricing/).

View file

@ -155,26 +155,38 @@ The pipeline now executes the jobs as configured.
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/30101) in GitLab 13.7.
You can use the [`value` and `description`](../yaml/index.md#variablesdescription)
keywords to define
[pipeline-level (global) variables](../variables/index.md#create-a-custom-cicd-variable-in-the-gitlab-ciyml-file)
that are prefilled when running a pipeline manually.
You can use the [`description` and `value`](../yaml/index.md#variablesdescription)
keywords to define [pipeline-level (global) variables](../variables/index.md#create-a-custom-cicd-variable-in-the-gitlab-ciyml-file)
that are prefilled when running a pipeline manually. Use the description to explain
what the variable is used for, what the acceptable values are, and so on.
In pipelines triggered manually, the **Run pipelines** page displays all top-level variables
with a `description` and `value` defined in the `.gitlab-ci.yml` file. The values
can then be modified if needed, which overrides the value for that single pipeline run.
Job-level variables cannot be pre-filled.
The description is displayed next to the variable. It can be used to explain what
the variable is used for, what the acceptable values are, and so on:
In manually-triggered pipelines, the **Run pipeline** page displays all pipeline-level variables
with a `description` defined in the `.gitlab-ci.yml` file. The description displays
below the variable.
You can change the prefilled value, which overrides the value for that single pipeline run.
If you do not define a `value` for the variable in the configuration file, the variable still displays,
but the value field is blank.
For example:
```yaml
variables:
TEST_SUITE:
description: "The test suite that will run. Valid options are: 'default', 'short', 'full'."
value: "default"
DEPLOY_ENVIRONMENT:
value: "staging" # Deploy to staging by default
description: "The deployment target. Change this variable to 'canary' or 'production' if needed."
description: "Select the deployment target. Valid options are: 'canary', 'staging', 'production', or a stable branch of your choice."
```
You cannot set job-level variables to be pre-filled when you run a pipeline manually.
In this example:
- `TEST_SUITE` is pre-filled in the **Run pipeline** page with `default`,
and the message explains the other options.
- `DEPLOY_ENVIRONMENT` is listed in the **Run pipeline** page, but with no value set.
The user is expected to define the value each time the pipeline is run manually.
### Run a pipeline by using a URL query string

View file

@ -374,7 +374,7 @@ For this solution to work, you must:
- Use [the networking mode that creates a new network for each job](https://docs.gitlab.com/runner/executors/docker.html#create-a-network-for-each-job).
- [Not use the Docker executor with Docker socket binding](../docker/using_docker_build.md#use-the-docker-executor-with-docker-socket-binding).
If you must, then in the above example, instead of `host`, use the dynamic network name created for this job.
If you must, then in the above example, instead of `host`, use the dynamic network name created for this job.
## How Docker integration works

View file

@ -193,7 +193,7 @@ The output is:
> Support for environment scopes [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/2874) in GitLab Premium 13.11
To make a CI/CD variable available to all projects in a group, define a group CI/CD variable. Only group owners can add or update group-level CI/CD variables.
To make a CI/CD variable available to all projects in a group, define a group CI/CD variable. Only group owners can add or update group-level CI/CD variables.
Use group variables to store secrets like passwords, SSH keys, and credentials, if you:

View file

@ -4200,9 +4200,9 @@ deploy_review_job:
Use the `description` keyword to define a [pipeline-level (global) variable that is prefilled](../pipelines/index.md#prefill-variables-in-manual-pipelines)
when [running a pipeline manually](../pipelines/index.md#run-a-pipeline-manually).
Must be used with `value`, for the variable value.
If used with `value`, the variable value is also prefilled when running a pipeline manually.
**Keyword type**: Global keyword. You cannot set job-level variables to be pre-filled when you run a pipeline manually.
**Keyword type**: Global keyword. You cannot use it for job-level variables.
**Possible inputs**:
@ -4213,10 +4213,15 @@ Must be used with `value`, for the variable value.
```yaml
variables:
DEPLOY_ENVIRONMENT:
value: "staging"
description: "The deployment target. Change this variable to 'canary' or 'production' if needed."
value: "staging"
```
**Additional details**:
- A global variable defined with `value` but no `description` behaves the same as
[`variables`](#variables).
### `when`
Use `when` to configure the conditions for when jobs run. If not defined in a job,

View file

@ -77,7 +77,7 @@ Reviewer roulette is an internal tool for use on GitLab.com, and not available f
The [Danger bot](dangerbot.md) randomly picks a reviewer and a maintainer for
each area of the codebase that your merge request seems to touch. It makes
**recommendations** for developer reviewers and you should override it if you think someone else is a better
fit. User-facing changes are required to have a UX review, too. Default to the recommended UX reviewer suggested.
fit. User-facing changes are required to have a UX review, too. Default to the recommended UX reviewer suggested.
It picks reviewers and maintainers from the list at the
[engineering projects](https://about.gitlab.com/handbook/engineering/projects/)

View file

@ -149,7 +149,7 @@ between your computer and GitLab.
1. GitLab requests your username and password.
If you have enabled two-factor authentication (2FA) on your account, you cannot use your account password. Instead, you can do one of the following:
If you have enabled two-factor authentication (2FA) on your account, you cannot use your account password. Instead, you can do one of the following:
- [Clone using a token](#clone-using-a-token) with `read_repository` or `write_repository` permissions.
- Install [Git Credential Manager](../user/profile/account/two_factor_authentication.md#git-credential-manager).

View file

@ -17,7 +17,7 @@ You can restore a backup only to _the exact same version and type (CE/EE)_ of
GitLab that you created it on (for example CE 9.1.0).
If your backup is a different version than the current installation, you must
[downgrade your GitLab installation](../update/package/downgrade.md)
[downgrade](../update/package/downgrade.md) or [upgrade](../update/package/index.md#upgrade-to-a-specific-version-using-the-official-repositories) your GitLab installation
before restoring the backup.
## Restore prerequisites

Binary file not shown.

Before

Width:  |  Height:  |  Size: 54 KiB

View file

@ -202,7 +202,7 @@ Next, take screenshots of your project to confirm that project's eligibility. Yo
- [Publicly visible settings](#screenshot-3-publicly-visible-settings)
NOTE:
Benefits of the GitLab Open Source Program apply to all projects in a GitLab namespace. All projects in an eligible namespace must meet program requirements.
Benefits of the GitLab Open Source Program apply to all projects in a GitLab namespace. All projects in an eligible namespace must meet program requirements.
##### Screenshot 1: License overview

View file

@ -6,49 +6,50 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Multiple Kubernetes clusters for Auto DevOps **(FREE)**
When using Auto DevOps, you can deploy different environments to
different Kubernetes clusters, due to the 1:1 connection
[existing between them](../../user/project/clusters/multiple_kubernetes_clusters.md).
When using Auto DevOps, you can deploy different environments to different Kubernetes clusters.
The [Deploy Job template](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml)
used by Auto DevOps defines 3 environment names:
The [Deploy Job template](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml) used by Auto DevOps defines three environment names:
- `review/` (every environment starting with `review/`)
- `staging`
- `production`
Those environments are tied to jobs using [Auto Deploy](stages.md#auto-deploy), so
except for the environment scope, they must have a different deployment domain.
You must define a separate `KUBE_INGRESS_BASE_DOMAIN` variable for each of the above
[based on the environment](../../ci/variables/index.md#limit-the-environment-scope-of-a-cicd-variable).
These environments are tied to jobs using [Auto Deploy](stages.md#auto-deploy), so they must have different deployment domains. You must define separate [`KUBE_CONTEXT`](../../user/clusters/agent/ci_cd_workflow.md#using-the-agent-with-auto-devops) and [`KUBE_INGRESS_BASE_DOMAIN`](requirements.md#auto-devops-base-domain) variables for each of the three environments.
The following table is an example of how to configure the three different clusters:
## Deploy to different clusters
| Cluster name | Cluster environment scope | `KUBE_INGRESS_BASE_DOMAIN` variable value | Variable environment scope | Notes |
|--------------|---------------------------|-------------------------------------------|----------------------------|---|
| review | `review/*` | `review.example.com` | `review/*` | The review cluster which runs all [Review Apps](../../ci/review_apps/index.md). `*` is a wildcard, used by every environment name starting with `review/`. |
| staging | `staging` | `staging.example.com` | `staging` | Optional. The staging cluster that runs the deployments of the staging environments. You must [enable it first](customize.md#deploy-policy-for-staging-and-production-environments). |
| production | `production` | `example.com` | `production` | The production cluster which runs the production environment deployments. You can use [incremental rollouts](customize.md#incremental-rollout-to-production). |
To deploy your environments to different Kubernetes clusters:
To add a different cluster for each environment:
1. [Create Kubernetes clusters](../../user/infrastructure/clusters/connect/new_gke_cluster.md).
1. Associate the clusters to your project:
1. [Install a GitLab Agent on each cluster](../../user/clusters/agent/index.md).
1. [Configure each agent to access your project](../../user/clusters/agent/install/index.md#configure-your-agent).
1. [Install NGINX Ingress Controller](cloud_deployments/auto_devops_with_gke.md#install-ingress) in each cluster. Save the IP address and Kubernetes namespace for the next step.
1. [Configure the Auto DevOps CI/CD Pipeline variables](customize.md#build-and-deployment)
- Set up a `KUBE_CONTEXT` variable [for each environment](../../ci/variables/index.md#limit-the-environment-scope-of-a-cicd-variable). The value must point to the agent of the relevant cluster.
- Set up a `KUBE_INGRESS_BASE_DOMAIN`. You must [configure the base domain](requirements.md#auto-devops-base-domain) for each environment to point to the Ingress of the relevant cluster.
- Add a `KUBE_NAMESPACE` variable with a value of the Kubernetes namespace you want your deployments to target. You can scope the variable to multiple environments.
1. Navigate to your project's **Infrastructure > Kubernetes clusters**.
1. Create the Kubernetes clusters with their respective environment scope, as
described from the table above.
1. After creating the clusters, navigate to each cluster and
[install Ingress](cloud_deployments/auto_devops_with_gke.md#install-ingress).
Wait for the Ingress IP address to be assigned.
1. Make sure you've [configured your DNS](requirements.md#auto-devops-base-domain) with the
specified Auto DevOps domains.
1. Navigate to each cluster's page, through **Infrastructure > Kubernetes clusters**,
and add the domain based on its Ingress IP address.
For deprecated, [certificate-based clusters](../../user/infrastructure/clusters/index.md#certificate-based-kubernetes-integration-deprecated):
1. Go to the project and select **Infrastructure > Kubernetes clusters** from the left sidebar.
1. [Set the environment scope of each cluster](../../user/project/clusters/multiple_kubernetes_clusters.md#setting-the-environment-scope).
1. For each cluster, [add a domain based on its Ingress IP address](../../user/project/clusters/gitlab_managed_clusters.md#base-domain).
NOTE:
[Cluster environment scope is not respected when checking for active Kubernetes clusters](https://gitlab.com/gitlab-org/gitlab/-/issues/20351). For a multi-cluster setup to work with Auto DevOps, you must create a fallback cluster with **Cluster environment scope** set to `*`. You can set any of the clusters you've already added as a fallback cluster.
### Example configurations
| Cluster name | Cluster environment scope | `KUBE_INGRESS_BASE_DOMAIN` value | `KUBE CONTEXT` value | Variable environment scope | Notes |
| :------------| :-------------------------| :------------------------------- | :--------------------------------- | :--------------------------|:--|
| review | `review/*` | `review.example.com` | `path/to/project:review-agent` | `review/*` | A review cluster that runs all [Review Apps](../../ci/review_apps/index.md).|
| staging | `staging` | `staging.example.com` | `path/to/project:staging-agent` | `staging` | Optional. A staging cluster that runs the deployments of the staging environments. You must [enable it first](customize.md#deploy-policy-for-staging-and-production-environments). |
| production | `production` | `example.com` | `path/to/project:production-agent` | `production` | A production cluster that runs the production environment deployments. You can use [incremental rollouts](customize.md#incremental-rollout-to-production). |
## Test your configuration
After completing configuration, test your setup by creating a merge request.
Verify whether your application deployed as a Review App in the Kubernetes
cluster with the `review/*` environment scope. Similarly, check the
other environments.
[Cluster environment scope isn't respected](https://gitlab.com/gitlab-org/gitlab/-/issues/20351)
when checking for active Kubernetes clusters. For multi-cluster setup to work with Auto DevOps,
create a fallback cluster with **Cluster environment scope** set to `*`. A new cluster isn't
required. You can use any of the clusters already added.

View file

@ -93,7 +93,7 @@ Lead time for changes displays in several charts:
To retrieve metrics for lead time for changes, use the [GraphQL](../../api/graphql/reference/index.md) or the [REST](../../api/dora/metrics.md) APIs.
- The definition of lead time for change can vary widely, which often creates confusion within the industry.
- The definition of lead time for change can vary widely, which often creates confusion within the industry.
- "Lead time for changes" is not the same as "Lead time". In the value stream, "Lead time" measures the time it takes for work on issue to move from the moment it's requested (Issue created) to the time it's fulfilled and delivered (Issue closed).
### Time to restore service

View file

@ -26,7 +26,7 @@ If GitLab finds a CI/CD pipeline, then it inspects each job in the `.gitlab-ci.y
- If a job defines an [`artifacts:reports` keyword](../../../ci/yaml/artifacts_reports.md)
for a security scanner, then GitLab considers the security scanner enabled and shows the **Enabled** status.
- If no jobs define an `artifacts:reports` keyword for a security scanner, then GitLab considers
- If no jobs define an `artifacts:reports` keyword for a security scanner, then GitLab considers
the security scanner disabled and shows the **Not enabled** status.
If GitLab does not find a CI/CD pipeline, then it considers all security scanners disabled and shows the **Not enabled** status.

View file

@ -196,7 +196,7 @@ The modules that can be configured for logging are as follows:
### Artifacts
DAST's browser-based analyzer generates artifacts that can help you understand how the scanner works.
DAST's browser-based analyzer generates artifacts that can help you understand how the scanner works.
Using the latest version of the DAST [template](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Security/DAST.latest.gitlab-ci.yml) these artifacts are exposed for download by default.
The list of artifacts includes the following files:

View file

@ -736,7 +736,7 @@ After DAST has authenticated with the application, all cookies are collected fro
For each cookie a matching session token is created for use by ZAP. This ensures ZAP is recognized
by the application as correctly authenticated.
Authentication supports single form logins, multi-step login forms, and authenticating to URLs outside of the configured target URL.
Authentication supports single form logins, multi-step login forms, and authenticating to URLs outside of the configured target URL.
WARNING:
**Never** run an authenticated scan against a production server. When an authenticated
@ -752,7 +752,7 @@ DAST can authenticate to websites making use of SSO, with the following restrict
- DAST cannot handle multi-factor authentication like one-time passwords (OTP) by using SMS or authenticator apps.
- DAST must get a cookie, or a local or session storage, with a sufficiently random value.
The [authentication debug output](index.md#configure-the-authentication-debug-output) can be helpful for troubleshooting SSO authentication
The [authentication debug output](index.md#configure-the-authentication-debug-output) can be helpful for troubleshooting SSO authentication
with DAST.
### Log in using automatic detection of the login form

View file

@ -1322,7 +1322,7 @@ for a Go project will contain dependencies that are compatible with this environ
`linux/amd64`, the final list of dependencies might contain additional incompatible
modules. The dependency list might also omit modules that are only compatible with your deployment environment. To prevent
this issue, you can configure the build process to target the operating system and architecture of the deployment
environment by setting the `GOOS` and `GOARCH` [environment variables](https://go.dev/ref/mod#minimal-version-selection)
environment by setting the `GOOS` and `GOARCH` [environment variables](https://go.dev/ref/mod#minimal-version-selection)
of your `.gitlab-ci.yml` file.
For example:

View file

@ -7,13 +7,13 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Generate test vulnerabilities
You can generate test vulnerabilities for the [Vulnerability Report](../vulnerability_report/index.md) to test GitLab
You can generate test vulnerabilities for the [Vulnerability Report](../vulnerability_report/index.md) to test GitLab
vulnerability management features without running a pipeline.
1. Login in to GitLab.
1. Go to `/-/profile/personal_access_tokens` and generate a personal access token with `api` permissions.
1. Go to your project page and find the project ID. You can find the project ID below the project title.
1. [Clone the GitLab repository](../../../gitlab-basics/start-using-git.md#clone-a-repository) to your local machine.
1. [Clone the GitLab repository](../../../gitlab-basics/start-using-git.md#clone-a-repository) to your local machine.
1. Open a terminal and go to `gitlab/qa` directory.
1. Run `bundle install`
1. Run the following command:

View file

@ -100,7 +100,7 @@ Findings are all potential vulnerability items scanners identify in MRs/feature
A flexible and non-destructive way to visually organize vulnerabilities in groups when there are multiple findings
that are likely related but do not qualify for deduplication. For example, you can include findings that should be
evaluated together, would be fixed by the same action, or come from the same source. Grouping behavior for vulnerabilities is
evaluated together, would be fixed by the same action, or come from the same source. Grouping behavior for vulnerabilities is
under development and tracked in issue [267588](https://gitlab.com/gitlab-org/gitlab/-/issues/267588).
### Insignificant finding

View file

@ -146,7 +146,7 @@ the timeline header represent the days of the week.
The timeline bar indicates the approximate position of an epic or milestone based on its start and
due dates.
## Blocked epics
## Blocked epics **(ULTIMATE)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/33587) in GitLab 15.5: View blocking epics when hovering over the “blocked” icon.

View file

@ -25,21 +25,21 @@ review merge requests in Visual Studio Code.
> [Introduced](https://gitlab.com/groups/gitlab-org/modelops/applied-ml/review-recommender/-/epics/3) in GitLab 15.4.
GitLab can recommend reviewers with Suggested Reviewers. Using the changes in a merge request and a project's contribution graph, machine learning powered suggestions appear in the reviewer section of the right merge request sidebar.
GitLab can recommend reviewers with Suggested Reviewers. Using the changes in a merge request and a project's contribution graph, machine learning powered suggestions appear in the reviewer section of the right merge request sidebar.
![Suggested Reviewers](img/suggested_reviewers_v15_4.png)
This feature is currently in [Open Beta](https://about.gitlab.com/handbook/product/gitlab-the-product/#open-beta) behind a [feature flag](https://gitlab.com/gitlab-org/gitlab/-/issues/368356).
Learn more about [how suggested reviewers works and data privacy](data_usage.md).
Learn more about [how suggested reviewers works and data privacy](data_usage.md).
### Enable suggested reviewers
Project Maintainers or Owners can enable suggested reviewers by visiting the [project settings](../../settings/index.md).
Project Maintainers or Owners can enable suggested reviewers by visiting the [project settings](../../settings/index.md).
Enabling suggested reviewers will trigger GitLab to create an ML model for your project that will be used to generate reviewers. The larger your project, the longer this can take, but usually, the model will be ready to generate suggestions within a few hours.
No action is required once the feature is enabled. Once the model is ready, recommendations will populate the Reviewer dropdown in the right-hand sidebar of a merge request with new commits.
No action is required once the feature is enabled. Once the model is ready, recommendations will populate the Reviewer dropdown in the right-hand sidebar of a merge request with new commits.
## Review a merge request

View file

@ -180,7 +180,7 @@ Prerequisites:
To promote a project milestone:
1. On the top bar, select **Main menu > Projects** and find your project.
1. On the left sidebar, select **Issues > Milestones**.
1. On the left sidebar, select **Issues > Milestones**.
1. Either:
- Select **Promote to Group Milestone** (**{level-up}**) next to the milestone you want to promote.
- Select the milestone title, and then select **Promote**.

View file

@ -161,9 +161,7 @@ module API
end
end
desc 'Delete deploy key for a project' do
success Key
end
desc 'Delete deploy key for a project'
params do
requires :key_id, type: Integer, desc: 'The ID of the deploy key'
end

View file

@ -54,7 +54,7 @@ module API
params do
requires :path, type: String, desc: 'Group path'
requires :name, type: String, desc: 'Group name'
requires :file, type: ::API::Validations::Types::WorkhorseFile, desc: 'The group export file to be imported'
requires :file, type: ::API::Validations::Types::WorkhorseFile, desc: 'The group export file to be imported', documentation: { type: 'file' }
optional :parent_id, type: Integer, desc: "The ID of the parent group that the group will be imported into. Defaults to the current user's namespace."
end
post 'import' do

View file

@ -91,7 +91,7 @@ module API
end
params do
requires :channel, type: String, desc: 'Helm channel', regexp: Gitlab::Regex.helm_channel_regex
requires :chart, type: ::API::Validations::Types::WorkhorseFile, desc: 'The chart file to be published (generated by Multipart middleware)'
requires :chart, type: ::API::Validations::Types::WorkhorseFile, desc: 'The chart file to be published (generated by Multipart middleware)', documentation: { type: 'file' }
end
post "api/:channel/charts" do
authorize_upload!(authorized_user_project)

View file

@ -11,7 +11,7 @@ module API
optional :visibility, type: String,
values: Gitlab::VisibilityLevel.string_values,
desc: 'The visibility of the group'
optional :avatar, type: ::API::Validations::Types::WorkhorseFile, desc: 'Avatar image for the group'
optional :avatar, type: ::API::Validations::Types::WorkhorseFile, desc: 'Avatar image for the group', documentation: { type: 'file' }
optional :share_with_group_lock, type: Boolean, desc: 'Prevent sharing a project with another group within this group'
optional :require_two_factor_authentication, type: Boolean, desc: 'Require all users in this group to setup Two-factor authentication'
optional :two_factor_grace_period, type: Integer, desc: 'Time before Two-factor authentication is enforced'

View file

@ -3,7 +3,7 @@
module Gitlab
module Utils
class ExecutionTracker
MAX_RUNTIME = 30.seconds
MAX_RUNTIME = 60.seconds
ExecutionTimeOutError = Class.new(StandardError)

View file

@ -7056,6 +7056,9 @@ msgstr ""
msgid "Broadcast Messages"
msgstr ""
msgid "BroadcastMessages|There was an issue deleting this message, please try again later."
msgstr ""
msgid "Browse Directory"
msgstr ""
@ -12254,6 +12257,9 @@ msgstr ""
msgid "DastProfiles|Save profile"
msgstr ""
msgid "DastProfiles|Scan Method"
msgstr ""
msgid "DastProfiles|Scan method"
msgstr ""

View file

@ -0,0 +1,46 @@
# frozen_string_literal: true
require 'rubocop-rspec'
module RuboCop
module Cop
module RSpec
module FactoryBot
# This cop checks for the creation of ActiveRecord objects in serializers specs specs
#
# @example
#
# # bad
# let(:user) { create(:user) }
# let(:users) { create_list(:user, 2) }
#
# # good
# let(:user) { build_stubbed(:user) }
# let(:user) { build(:user) }
# let(:users) { build_stubbed_list(:user, 2) }
# let(:users) { build_list(:user, 2) }
class AvoidCreate < RuboCop::Cop::Base
MESSAGE = "Prefer using `build_stubbed` or similar over `%{method_name}`. See https://docs.gitlab.com/ee/development/testing_guide/best_practices.html#optimize-factory-usage"
FORBIDDEN_METHODS = %i[create create_list].freeze
RESTRICT_ON_SEND = FORBIDDEN_METHODS
def_node_matcher :forbidden_factory_usage, <<~PATTERN
(
send
{(const nil? :FactoryBot) nil?}
${ #{FORBIDDEN_METHODS.map(&:inspect).join(' ')} }
...
)
PATTERN
def on_send(node)
method_name = forbidden_factory_usage(node)
return unless method_name
add_offense(node, message: format(MESSAGE, method_name: method_name))
end
end
end
end
end
end

View file

@ -30,12 +30,42 @@ require_relative '../config/bundler_setup'
require 'rubocop'
require 'optparse'
def print_ast(file, source, version)
version ||= RuboCop::ConfigStore.new.for_file(file).target_ruby_version
puts RuboCop::AST::ProcessedSource.new(source, version).ast.to_s
module Helper
extend self
class << self
attr_writer :ruby_version
end
def ast(source, file: '', version: nil)
version ||= ruby_version
puts RuboCop::AST::ProcessedSource.new(source, version).ast.to_s
end
def ruby_version
@ruby_version ||= rubocop_target_ruby_version
end
def rubocop_target_ruby_version
@rubocop_target_ruby_version ||= RuboCop::ConfigStore.new.for_file('.').target_ruby_version
end
end
options = Struct.new(:eval, :ruby_version, :print_help, keyword_init: true).new
def start_irb
require 'irb'
include Helper # rubocop:disable Style/MixinUsage
puts "Ruby version: #{ruby_version}"
puts
puts "Use `ast(source_string, version: nil)` method to parse code and output AST. For example:"
puts " ast('puts :hello')"
puts
IRB.start
end
options = Struct.new(:eval, :interactive, :print_help, keyword_init: true).new
parser = OptionParser.new do |opts|
opts.banner = "Usage: #{$PROGRAM_NAME} [-e code] [FILE...]"
@ -44,9 +74,13 @@ parser = OptionParser.new do |opts|
options.eval = code
end
opts.on('-i', '--interactive', '') do
options.interactive = true
end
opts.on('-v RUBY_VERSION', '--ruby-version RUBY_VERSION',
'Parse as Ruby would. Defaults to RuboCop TargetRubyVersion setting.') do |ruby_version|
options.ruby_version = Float(ruby_version)
Helper.ruby_version = Float(ruby_version)
end
opts.on('-h', '--help') do
@ -54,20 +88,31 @@ parser = OptionParser.new do |opts|
end
end
args = parser.parse!
files = parser.parse!
if options.print_help
puts parser
exit
end
elsif options.interactive
if options.eval || files.any?
puts "Cannot combine `--interactive` with `--eval` or passing files. Aborting..."
puts
print_ast('', options.eval, options.ruby_version) if options.eval
args.each do |arg|
if File.file?(arg)
source = File.read(arg)
print_ast(arg, source, options.ruby_version)
puts parser
exit 1
else
warn "Skipping non-file #{arg.inspect}"
start_irb
end
elsif options.eval
Helper.ast(options.eval)
elsif files.any?
files.each do |file|
if File.file?(file)
source = File.read(file)
Helper.ast(source, file: file)
else
warn "Skipping non-file #{file.inspect}"
end
end
else
puts parser
end

View file

@ -54,6 +54,16 @@ FactoryBot.define do
target { note }
end
trait :for_issue do
target { association(:issue, issue_type: :issue) }
target_type { 'Issue' }
end
trait :for_work_item do
target { association(:work_item, :task) }
target_type { 'WorkItem' }
end
factory :design_event, traits: [:has_design] do
action { :created }
target { design }

View file

@ -1,35 +1,112 @@
import { shallowMount } from '@vue/test-utils';
import { GlPagination } from '@gitlab/ui';
import AxiosMockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'helpers/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import { redirectTo } from '~/lib/utils/url_utility';
import BroadcastMessagesBase from '~/admin/broadcast_messages/components/base.vue';
import MessagesTable from '~/admin/broadcast_messages/components/messages_table.vue';
import { MOCK_MESSAGES } from '../mock_data';
import { generateMockMessages, MOCK_MESSAGES } from '../mock_data';
jest.mock('~/flash');
jest.mock('~/lib/utils/url_utility');
describe('BroadcastMessagesBase', () => {
let wrapper;
let axiosMock;
useMockLocationHelper();
const findTable = () => wrapper.findComponent(MessagesTable);
const findPagination = () => wrapper.findComponent(GlPagination);
function createComponent(props = {}) {
wrapper = shallowMount(BroadcastMessagesBase, {
propsData: {
page: 1,
messagesCount: MOCK_MESSAGES.length,
messages: MOCK_MESSAGES,
...props,
},
});
}
beforeEach(() => {
axiosMock = new AxiosMockAdapter(axios);
});
afterEach(() => {
axiosMock.restore();
wrapper.destroy();
});
it('renders the table when there are existing messages', () => {
it('renders the table and pagination when there are existing messages', () => {
createComponent();
expect(findTable().exists()).toBe(true);
expect(findPagination().exists()).toBe(true);
});
it('does not render the table when there are no existing messages', () => {
it('does not render the table when there are no visible messages', () => {
createComponent({ messages: [] });
expect(findTable().exists()).toBe(false);
expect(findPagination().exists()).toBe(true);
});
it('does not remove a deleted message if it was not in visibleMessages', async () => {
createComponent();
findTable().vm.$emit('delete-message', -1);
await waitForPromises();
expect(axiosMock.history.delete).toHaveLength(0);
expect(wrapper.vm.visibleMessages.length).toBe(MOCK_MESSAGES.length);
});
it('does not remove a deleted message if the request fails', async () => {
createComponent();
const { id, delete_path } = MOCK_MESSAGES[0];
axiosMock.onDelete(delete_path).replyOnce(500);
findTable().vm.$emit('delete-message', id);
await waitForPromises();
expect(wrapper.vm.visibleMessages.find((m) => m.id === id)).not.toBeUndefined();
expect(createAlert).toHaveBeenCalledWith(
expect.objectContaining({
message: BroadcastMessagesBase.i18n.deleteError,
}),
);
});
it('removes a deleted message from visibleMessages on success', async () => {
createComponent();
const { id, delete_path } = MOCK_MESSAGES[0];
axiosMock.onDelete(delete_path).replyOnce(200);
findTable().vm.$emit('delete-message', id);
await waitForPromises();
expect(wrapper.vm.visibleMessages.find((m) => m.id === id)).toBeUndefined();
expect(wrapper.vm.totalMessages).toBe(MOCK_MESSAGES.length - 1);
});
it('redirects to the first page when totalMessages changes from 21 to 20', async () => {
window.location.pathname = `${TEST_HOST}/admin/broadcast_messages`;
const messages = generateMockMessages(21);
const { id, delete_path } = messages[0];
createComponent({ messages, messagesCount: messages.length });
axiosMock.onDelete(delete_path).replyOnce(200);
findTable().vm.$emit('delete-message', id);
await waitForPromises();
expect(redirectTo).toHaveBeenCalledWith(`${TEST_HOST}/admin/broadcast_messages?page=1`);
});
});

View file

@ -1,26 +0,0 @@
import { shallowMount } from '@vue/test-utils';
import MessagesTableRow from '~/admin/broadcast_messages/components/messages_table_row.vue';
import { MOCK_MESSAGE } from '../mock_data';
describe('MessagesTableRow', () => {
let wrapper;
function createComponent(props = {}) {
wrapper = shallowMount(MessagesTableRow, {
propsData: {
message: MOCK_MESSAGE,
...props,
},
});
}
afterEach(() => {
wrapper.destroy();
});
it('renders the message ID', () => {
createComponent();
expect(wrapper.text()).toBe(`${MOCK_MESSAGE.id}`);
});
});

View file

@ -1,15 +1,19 @@
import { shallowMount } from '@vue/test-utils';
import { mount } from '@vue/test-utils';
import MessagesTable from '~/admin/broadcast_messages/components/messages_table.vue';
import MessagesTableRow from '~/admin/broadcast_messages/components/messages_table_row.vue';
import { MOCK_MESSAGES } from '../mock_data';
describe('MessagesTable', () => {
let wrapper;
const findRows = () => wrapper.findAllComponents(MessagesTableRow);
const findRows = () => wrapper.findAll('[data-testid="message-row"]');
const findTargetRoles = () => wrapper.find('[data-testid="target-roles-th"]');
const findDeleteButton = (id) => wrapper.find(`[data-testid="delete-message-${id}"]`);
function createComponent(props = {}) {
wrapper = shallowMount(MessagesTable, {
function createComponent(props = {}, glFeatures = {}) {
wrapper = mount(MessagesTable, {
provide: {
glFeatures,
},
propsData: {
messages: MOCK_MESSAGES,
...props,
@ -26,4 +30,22 @@ describe('MessagesTable', () => {
expect(findRows()).toHaveLength(MOCK_MESSAGES.length);
});
it('renders the "Target Roles" column when roleTargetedBroadcastMessages is enabled', () => {
createComponent({}, { roleTargetedBroadcastMessages: true });
expect(findTargetRoles().exists()).toBe(true);
});
it('does not render the "Target Roles" column when roleTargetedBroadcastMessages is disabled', () => {
createComponent();
expect(findTargetRoles().exists()).toBe(false);
});
it('emits a delete-message event when a delete button is clicked', () => {
const { id } = MOCK_MESSAGES[0];
createComponent();
findDeleteButton(id).element.click();
expect(wrapper.emitted('delete-message')).toHaveLength(1);
expect(wrapper.emitted('delete-message')[0]).toEqual([id]);
});
});

View file

@ -1,5 +1,17 @@
export const MOCK_MESSAGE = {
id: 100,
};
const generateMockMessage = (id) => ({
id,
delete_path: `/admin/broadcast_messages/${id}.js`,
edit_path: `/admin/broadcast_messages/${id}/edit`,
starts_at: new Date().toISOString(),
ends_at: new Date().toISOString(),
preview: '<div>YEET</div>',
status: 'Expired',
target_path: '*/welcome',
target_roles: 'Maintainer, Owner',
type: 'Banner',
});
export const MOCK_MESSAGES = [MOCK_MESSAGE, { id: 200 }, { id: 300 }];
export const generateMockMessages = (n) =>
[...Array(n).keys()].map((id) => generateMockMessage(id + 1));
export const MOCK_MESSAGES = generateMockMessages(5).map((id) => generateMockMessage(id));

View file

@ -314,15 +314,15 @@ RSpec.describe GitlabSchema do
end
describe '.parse_gids' do
let_it_be(:global_ids) { %w[gid://gitlab/TestOne/123 gid://gitlab/TestOne/456] }
let_it_be(:global_ids) { %w[gid://gitlab/TestOne/123 gid://gitlab/TestTwo/456] }
subject(:parse_gids) { described_class.parse_gids(global_ids, expected_type: TestOne) }
subject(:parse_gids) { described_class.parse_gids(global_ids, expected_type: [TestOne, TestTwo]) }
it 'parses the gids' do
expect(described_class).to receive(:parse_gid).with('gid://gitlab/TestOne/123', expected_type: TestOne).and_call_original
expect(described_class).to receive(:parse_gid).with('gid://gitlab/TestOne/456', expected_type: TestOne).and_call_original
expect(described_class).to receive(:parse_gid).with('gid://gitlab/TestOne/123', expected_type: [TestOne, TestTwo]).and_call_original
expect(described_class).to receive(:parse_gid).with('gid://gitlab/TestTwo/456', expected_type: [TestOne, TestTwo]).and_call_original
expect(parse_gids.map(&:model_id)).to eq %w[123 456]
expect(parse_gids.map(&:model_class)).to match_array [TestOne, TestOne]
expect(parse_gids.map(&:model_class)).to eq [TestOne, TestTwo]
end
end
end

View file

@ -51,6 +51,14 @@ RSpec.describe EventPresenter do
it 'returns milestone for a milestone event' do
expect(group_event.present).to have_attributes(target_type_name: 'milestone')
end
it 'returns the issue_type for issue events' do
expect(build(:event, :for_issue, :created).present).to have_attributes(target_type_name: 'issue')
end
it 'returns the issue_type for work item events' do
expect(build(:event, :for_work_item, :created).present).to have_attributes(target_type_name: 'task')
end
end
describe '#note_target_type_name' do

View file

@ -438,21 +438,7 @@ RSpec.describe API::MavenPackages do
it_behaves_like 'processing HEAD requests', instance_level: true
end
context 'with check_maven_path_first enabled' do
before do
stub_feature_flags(check_maven_path_first: true)
end
it_behaves_like 'handling groups, subgroups and user namespaces for', 'heading a file'
end
context 'with check_maven_path_first disabled' do
before do
stub_feature_flags(check_maven_path_first: false)
end
it_behaves_like 'handling groups, subgroups and user namespaces for', 'heading a file'
end
it_behaves_like 'handling groups, subgroups and user namespaces for', 'heading a file'
end
describe 'GET /api/v4/groups/:id/-/packages/maven/*path/:file_name' do
@ -668,21 +654,7 @@ RSpec.describe API::MavenPackages do
let(:path) { package.maven_metadatum.path }
let(:url) { "/groups/#{group.id}/-/packages/maven/#{path}/#{package_file.file_name}" }
context 'with check_maven_path_first enabled' do
before do
stub_feature_flags(check_maven_path_first: true)
end
it_behaves_like 'handling groups and subgroups for', 'processing HEAD requests'
end
context 'with check_maven_path_first disabled' do
before do
stub_feature_flags(check_maven_path_first: false)
end
it_behaves_like 'handling groups and subgroups for', 'processing HEAD requests'
end
it_behaves_like 'handling groups and subgroups for', 'processing HEAD requests'
end
describe 'GET /api/v4/projects/:id/packages/maven/*path/:file_name' do
@ -774,21 +746,7 @@ RSpec.describe API::MavenPackages do
let(:path) { package.maven_metadatum.path }
let(:url) { "/projects/#{project.id}/packages/maven/#{path}/#{package_file.file_name}" }
context 'with check_maven_path_first enabled' do
before do
stub_feature_flags(check_maven_path_first: true)
end
it_behaves_like 'processing HEAD requests'
end
context 'with check_maven_path_first disabled' do
before do
stub_feature_flags(check_maven_path_first: false)
end
it_behaves_like 'processing HEAD requests'
end
it_behaves_like 'processing HEAD requests'
end
describe 'PUT /api/v4/projects/:id/packages/maven/*path/:file_name/authorize' do

View file

@ -0,0 +1,25 @@
# frozen_string_literal: true
require 'rubocop_spec_helper'
require_relative '../../../../../rubocop/cop/rspec/factory_bot/avoid_create'
RSpec.describe RuboCop::Cop::RSpec::FactoryBot::AvoidCreate do
shared_examples 'an offensive factory call' do |namespace|
%i[create create_list].each do |forbidden_method|
namespaced_forbidden_method = "#{namespace}#{forbidden_method}(:user)"
it "registers an offense for #{namespaced_forbidden_method}" do
expect_offense(<<-RUBY)
describe 'foo' do
let(:user) { #{namespaced_forbidden_method} }
#{'^' * namespaced_forbidden_method.size} Prefer using `build_stubbed` or similar over `#{forbidden_method}`. See https://docs.gitlab.com/ee/development/testing_guide/best_practices.html#optimize-factory-usage
end
RUBY
end
end
end
it_behaves_like 'an offensive factory call', ''
it_behaves_like 'an offensive factory call', 'FactoryBot.'
end

View file

@ -9,6 +9,7 @@ RSpec.describe MergeRequests::CloseService do
let(:merge_request) { create(:merge_request, assignees: [user2], author: create(:user)) }
let(:project) { merge_request.project }
let!(:todo) { create(:todo, :assigned, user: user, project: project, target: merge_request, author: user2) }
let(:service) { described_class.new(project: project, current_user: user) }
before do
project.add_maintainer(user)
@ -16,18 +17,20 @@ RSpec.describe MergeRequests::CloseService do
project.add_guest(guest)
end
def execute
service.execute(merge_request)
end
describe '#execute' do
it_behaves_like 'cache counters invalidator'
it_behaves_like 'merge request reviewers cache counters invalidator'
context 'valid params' do
let(:service) { described_class.new(project: project, current_user: user) }
before do
allow(service).to receive(:execute_hooks)
perform_enqueued_jobs do
@merge_request = service.execute(merge_request)
@merge_request = execute
end
end
@ -73,7 +76,7 @@ RSpec.describe MergeRequests::CloseService do
expect(metrics_service).to receive(:close)
described_class.new(project: project, current_user: user).execute(merge_request)
execute
end
it 'calls the merge request activity counter' do
@ -81,13 +84,11 @@ RSpec.describe MergeRequests::CloseService do
.to receive(:track_close_mr_action)
.with(user: user)
described_class.new(project: project, current_user: user).execute(merge_request)
execute
end
it 'refreshes the number of open merge requests for a valid MR', :use_clean_rails_memory_store_caching do
service = described_class.new(project: project, current_user: user)
expect { service.execute(merge_request) }
expect { execute }
.to change { project.open_merge_requests_count }.from(1).to(0)
end
@ -96,25 +97,39 @@ RSpec.describe MergeRequests::CloseService do
expect(service).to receive(:execute_for_merge_request_pipeline).with(merge_request)
end
described_class.new(project: project, current_user: user).execute(merge_request)
execute
end
it 'schedules CleanupRefsService' do
expect(MergeRequests::CleanupRefsService).to receive(:schedule).with(merge_request)
described_class.new(project: project, current_user: user).execute(merge_request)
execute
end
it 'triggers GraphQL subscription mergeRequestMergeStatusUpdated' do
expect(GraphqlTriggers).to receive(:merge_request_merge_status_updated).with(merge_request)
execute
end
context 'current user is not authorized to close merge request' do
let(:user) { guest }
before do
perform_enqueued_jobs do
@merge_request = described_class.new(project: project, current_user: guest).execute(merge_request)
@merge_request = execute
end
end
it 'does not close the merge request' do
expect(@merge_request).to be_open
end
it 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
expect(GraphqlTriggers).not_to receive(:merge_request_merge_status_updated)
execute
end
end
end
end

View file

@ -425,16 +425,10 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
create(:merge_request, :simple, source_project: project, reviewer_ids: [user2.id])
end
context 'when merge_request_reviewer feature is enabled' do
before do
stub_feature_flags(merge_request_reviewer: true)
end
let(:opts) { { reviewer_ids: [IssuableFinder::Params::NONE] } }
let(:opts) { { reviewer_ids: [IssuableFinder::Params::NONE] } }
it 'removes reviewers' do
expect(update_merge_request(opts).reviewers).to eq []
end
it 'removes reviewers' do
expect(update_merge_request(opts).reviewers).to eq []
end
end
end

View file

@ -19,29 +19,13 @@ RSpec.shared_examples 'reviewer_ids filter' do
let(:reviewer2) { create(:user) }
context 'when the current user can admin the merge_request' do
context 'when merge_request_reviewer feature is enabled' do
context 'with a reviewer who can read the merge_request' do
before do
stub_feature_flags(merge_request_reviewer: true)
project.add_developer(reviewer1)
end
context 'with a reviewer who can read the merge_request' do
before do
project.add_developer(reviewer1)
end
it 'contains reviewers who can read the merge_request' do
expect(execute.reviewers).to contain_exactly(reviewer1)
end
end
end
context 'when merge_request_reviewer feature is disabled' do
before do
stub_feature_flags(merge_request_reviewer: false)
end
it 'contains no reviewers' do
expect(execute.reviewers).to eq []
it 'contains reviewers who can read the merge_request' do
expect(execute.reviewers).to contain_exactly(reviewer1)
end
end
end

View file

@ -7,33 +7,41 @@ RSpec.describe 'events/event/_common.html.haml' do
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:user) { create(:user) }
context 'when it is a work item event' do
let(:work_item) { create(:work_item, project: project) }
before do
render partial: 'events/event/common', locals: { event: event.present }
end
let(:event) do
context 'when it is a work item event' do
let_it_be(:work_item) { create(:work_item, :task, project: project) }
let_it_be(:event) do
create(:event, :created, project: project, target: work_item, target_type: 'WorkItem', author: user)
end
it 'renders the correct url' do
render partial: 'events/event/common', locals: { event: event.present }
expect(rendered).to have_link(
work_item.reference_link_text, href: "/#{project.full_path}/-/work_items/#{work_item.id}"
)
end
it 'uses issue_type for the target_name' do
expect(rendered).to have_content("#{s_('Event|opened')} task #{work_item.to_reference}")
end
end
context 'when it is an isssue event' do
let(:issue) { create(:issue, project: project) }
context 'when it is an issue event' do
let_it_be(:issue) { create(:issue, project: project) }
let(:event) do
let_it_be(:event) do
create(:event, :created, project: project, target: issue, author: user)
end
it 'renders the correct url' do
render partial: 'events/event/common', locals: { event: event.present }
expect(rendered).to have_link(issue.reference_link_text, href: "/#{project.full_path}/-/issues/#{issue.iid}")
end
it 'uses issue_type for the target_name' do
expect(rendered).to have_content("#{s_('Event|opened')} issue #{issue.to_reference}")
end
end
end