Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
bd746eebdc
commit
1f992463a9
|
@ -49,6 +49,7 @@ workflow:
|
||||||
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_PIPELINE_SOURCE == "schedule" && $SCHEDULE_TYPE == "maintenance"'
|
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_PIPELINE_SOURCE == "schedule" && $SCHEDULE_TYPE == "maintenance"'
|
||||||
variables:
|
variables:
|
||||||
CRYSTALBALL: "true"
|
CRYSTALBALL: "true"
|
||||||
|
CREATE_INCIDENT_FOR_PIPELINE_FAILURE: "true"
|
||||||
NOTIFY_PIPELINE_FAILURE_CHANNEL: "master-broken"
|
NOTIFY_PIPELINE_FAILURE_CHANNEL: "master-broken"
|
||||||
# Run pipelines for ruby3 branch
|
# Run pipelines for ruby3 branch
|
||||||
- if: '$CI_COMMIT_BRANCH == "ruby3"'
|
- if: '$CI_COMMIT_BRANCH == "ruby3"'
|
||||||
|
@ -62,9 +63,12 @@ workflow:
|
||||||
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $GITLAB_USER_LOGIN =~ /project_\d+_bot\d*/'
|
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $GITLAB_USER_LOGIN =~ /project_\d+_bot\d*/'
|
||||||
variables:
|
variables:
|
||||||
GITLAB_DEPENDENCY_PROXY_ADDRESS: ""
|
GITLAB_DEPENDENCY_PROXY_ADDRESS: ""
|
||||||
|
CREATE_INCIDENT_FOR_PIPELINE_FAILURE: "true"
|
||||||
|
NOTIFY_PIPELINE_FAILURE_CHANNEL: "master-broken"
|
||||||
# For `$CI_DEFAULT_BRANCH` branch, create a pipeline (this includes on schedules, pushes, merges, etc.).
|
# For `$CI_DEFAULT_BRANCH` branch, create a pipeline (this includes on schedules, pushes, merges, etc.).
|
||||||
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
|
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
|
||||||
variables:
|
variables:
|
||||||
|
CREATE_INCIDENT_FOR_PIPELINE_FAILURE: "true"
|
||||||
NOTIFY_PIPELINE_FAILURE_CHANNEL: "master-broken"
|
NOTIFY_PIPELINE_FAILURE_CHANNEL: "master-broken"
|
||||||
# For tags, create a pipeline.
|
# For tags, create a pipeline.
|
||||||
- if: '$CI_COMMIT_TAG'
|
- if: '$CI_COMMIT_TAG'
|
||||||
|
|
|
@ -1,8 +1,12 @@
|
||||||
.notify-slack:
|
.notify-defaults:
|
||||||
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}alpine/curl
|
|
||||||
stage: notify
|
stage: notify
|
||||||
dependencies: []
|
dependencies: []
|
||||||
cache: {}
|
cache: {}
|
||||||
|
|
||||||
|
.notify-slack:
|
||||||
|
extends:
|
||||||
|
- .notify-defaults
|
||||||
|
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}alpine/curl
|
||||||
variables:
|
variables:
|
||||||
MERGE_REQUEST_URL: ${CI_MERGE_REQUEST_PROJECT_URL}/-/merge_requests/${CI_MERGE_REQUEST_IID}
|
MERGE_REQUEST_URL: ${CI_MERGE_REQUEST_PROJECT_URL}/-/merge_requests/${CI_MERGE_REQUEST_IID}
|
||||||
before_script:
|
before_script:
|
||||||
|
@ -34,28 +38,31 @@ notify-security-pipeline:
|
||||||
- scripts/slack ${NOTIFY_CHANNEL} "<!subteam^S0127FU8PDE> ☠️ Pipeline for merged result failed! ☠️ See ${CI_PIPELINE_URL} (triggered from ${MERGE_REQUEST_URL})" ci_failing "GitLab Release Tools Bot"
|
- scripts/slack ${NOTIFY_CHANNEL} "<!subteam^S0127FU8PDE> ☠️ Pipeline for merged result failed! ☠️ See ${CI_PIPELINE_URL} (triggered from ${MERGE_REQUEST_URL})" ci_failing "GitLab Release Tools Bot"
|
||||||
|
|
||||||
notify-pipeline-failure:
|
notify-pipeline-failure:
|
||||||
extends: .notify-slack
|
extends:
|
||||||
|
- .notify-defaults
|
||||||
|
- .notify:rules:notify-pipeline-failure
|
||||||
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}ruby:${RUBY_VERSION}
|
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}ruby:${RUBY_VERSION}
|
||||||
rules:
|
|
||||||
# Don't report child pipeline failures
|
|
||||||
- if: '$CI_PIPELINE_SOURCE == "parent_pipeline"'
|
|
||||||
when: never
|
|
||||||
- if: '$CI_SLACK_WEBHOOK_URL && $NOTIFY_PIPELINE_FAILURE_CHANNEL'
|
|
||||||
when: on_failure
|
|
||||||
allow_failure: true
|
|
||||||
variables:
|
variables:
|
||||||
|
BROKEN_MASTER_INCIDENTS_PROJECT: "gitlab-org/quality/engineering-productivity/master-broken-incidents"
|
||||||
|
BROKEN_MASTER_INCIDENT_JSON: "${CI_PROJECT_DIR}/incident.json"
|
||||||
SLACK_CHANNEL: "${NOTIFY_PIPELINE_FAILURE_CHANNEL}"
|
SLACK_CHANNEL: "${NOTIFY_PIPELINE_FAILURE_CHANNEL}"
|
||||||
FAILED_PIPELINE_REPORT_FILE: "failed_pipeline_report.json"
|
FAILED_PIPELINE_SLACK_MESSAGE_FILE: "${CI_PROJECT_DIR}/failed_pipeline_slack_message.json"
|
||||||
before_script:
|
before_script:
|
||||||
- source scripts/utils.sh
|
- source scripts/utils.sh
|
||||||
- apt-get update && apt-get install -y jq
|
- apt-get update && apt-get install -y jq
|
||||||
- install_gitlab_gem
|
- install_gitlab_gem
|
||||||
script:
|
script:
|
||||||
- scripts/generate-failed-pipeline-slack-message.rb
|
|
||||||
- |
|
- |
|
||||||
curl -X POST -H 'Content-Type: application/json' --data @${FAILED_PIPELINE_REPORT_FILE} "$CI_SLACK_WEBHOOK_URL"
|
if [[ "${CREATE_INCIDENT_FOR_PIPELINE_FAILURE}" == "true" ]]; then
|
||||||
|
scripts/create-pipeline-failure-incident.rb -p ${BROKEN_MASTER_INCIDENTS_PROJECT} -f ${BROKEN_MASTER_INCIDENT_JSON} -t ${BROKEN_MASTER_INCIDENTS_PROJECT_TOKEN};
|
||||||
|
echosuccess "Created incident $(jq '.web_url' ${BROKEN_MASTER_INCIDENT_JSON})";
|
||||||
|
fi
|
||||||
|
- |
|
||||||
|
scripts/generate-failed-pipeline-slack-message.rb -i ${BROKEN_MASTER_INCIDENT_JSON} -f ${FAILED_PIPELINE_SLACK_MESSAGE_FILE};
|
||||||
|
curl -X POST -H 'Content-Type: application/json' --data @${FAILED_PIPELINE_SLACK_MESSAGE_FILE} "$CI_SLACK_WEBHOOK_URL";
|
||||||
artifacts:
|
artifacts:
|
||||||
paths:
|
paths:
|
||||||
- ${FAILED_PIPELINE_REPORT_FILE}
|
- ${BROKEN_MASTER_INCIDENT_JSON}
|
||||||
|
- ${FAILED_PIPELINE_SLACK_MESSAGE_FILE}
|
||||||
when: always
|
when: always
|
||||||
expire_in: 2 days
|
expire_in: 2 days
|
||||||
|
|
|
@ -997,6 +997,18 @@
|
||||||
- <<: *if-default-refs
|
- <<: *if-default-refs
|
||||||
changes: *code-patterns
|
changes: *code-patterns
|
||||||
|
|
||||||
|
##########
|
||||||
|
# Notify #
|
||||||
|
##########
|
||||||
|
.notify:rules:notify-pipeline-failure:
|
||||||
|
rules:
|
||||||
|
# Don't report child pipeline failures
|
||||||
|
- if: '$CI_PIPELINE_SOURCE == "parent_pipeline"'
|
||||||
|
when: never
|
||||||
|
- if: '$CI_SLACK_WEBHOOK_URL && $NOTIFY_PIPELINE_FAILURE_CHANNEL'
|
||||||
|
when: on_failure
|
||||||
|
allow_failure: true
|
||||||
|
|
||||||
###############
|
###############
|
||||||
# Pages rules #
|
# Pages rules #
|
||||||
###############
|
###############
|
||||||
|
|
|
@ -249,28 +249,6 @@ Layout/SpaceInsideParens:
|
||||||
- 'spec/requests/search_controller_spec.rb'
|
- 'spec/requests/search_controller_spec.rb'
|
||||||
- 'spec/serializers/analytics_build_entity_spec.rb'
|
- 'spec/serializers/analytics_build_entity_spec.rb'
|
||||||
- 'spec/serializers/merge_request_user_entity_spec.rb'
|
- 'spec/serializers/merge_request_user_entity_spec.rb'
|
||||||
- 'spec/services/boards/issues/list_service_spec.rb'
|
|
||||||
- 'spec/services/ci/compare_test_reports_service_spec.rb'
|
|
||||||
- 'spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb'
|
|
||||||
- 'spec/services/ci/retry_job_service_spec.rb'
|
|
||||||
- 'spec/services/clusters/gcp/provision_service_spec.rb'
|
|
||||||
- 'spec/services/clusters/gcp/verify_provision_status_service_spec.rb'
|
|
||||||
- 'spec/services/groups/destroy_service_spec.rb'
|
|
||||||
- 'spec/services/groups/update_shared_runners_service_spec.rb'
|
|
||||||
- 'spec/services/import/gitlab_projects/file_acquisition_strategies/file_upload_spec.rb'
|
|
||||||
- 'spec/services/issues/export_csv_service_spec.rb'
|
|
||||||
- 'spec/services/labels/promote_service_spec.rb'
|
|
||||||
- 'spec/services/members/invite_service_spec.rb'
|
|
||||||
- 'spec/services/notes/update_service_spec.rb'
|
|
||||||
- 'spec/services/packages/composer/composer_json_service_spec.rb'
|
|
||||||
- 'spec/services/packages/npm/create_package_service_spec.rb'
|
|
||||||
- 'spec/services/projects/lfs_pointers/lfs_download_service_spec.rb'
|
|
||||||
- 'spec/services/search/group_service_spec.rb'
|
|
||||||
- 'spec/services/security/merge_reports_service_spec.rb'
|
|
||||||
- 'spec/services/suggestions/apply_service_spec.rb'
|
|
||||||
- 'spec/services/system_notes/issuables_service_spec.rb'
|
|
||||||
- 'spec/services/users/destroy_service_spec.rb'
|
|
||||||
- 'spec/services/x509_certificate_revoke_service_spec.rb'
|
|
||||||
- 'spec/support/helpers/database/partitioning_helpers.rb'
|
- 'spec/support/helpers/database/partitioning_helpers.rb'
|
||||||
- 'spec/support/helpers/dependency_proxy_helpers.rb'
|
- 'spec/support/helpers/dependency_proxy_helpers.rb'
|
||||||
- 'spec/support/helpers/javascript_fixtures_helpers.rb'
|
- 'spec/support/helpers/javascript_fixtures_helpers.rb'
|
||||||
|
|
4
Gemfile
4
Gemfile
|
@ -168,9 +168,9 @@ gem 'seed-fu', '~> 2.3.7'
|
||||||
gem 'elasticsearch-model', '~> 7.2'
|
gem 'elasticsearch-model', '~> 7.2'
|
||||||
gem 'elasticsearch-rails', '~> 7.2', require: 'elasticsearch/rails/instrumentation'
|
gem 'elasticsearch-rails', '~> 7.2', require: 'elasticsearch/rails/instrumentation'
|
||||||
gem 'elasticsearch-api', '7.13.3'
|
gem 'elasticsearch-api', '7.13.3'
|
||||||
gem 'aws-sdk-core', '~> 3.159.0'
|
gem 'aws-sdk-core', '~> 3.166.0'
|
||||||
gem 'aws-sdk-cloudformation', '~> 1'
|
gem 'aws-sdk-cloudformation', '~> 1'
|
||||||
gem 'aws-sdk-s3', '~> 1.114.0'
|
gem 'aws-sdk-s3', '~> 1.117.1'
|
||||||
gem 'faraday_middleware-aws-sigv4', '~>0.3.0'
|
gem 'faraday_middleware-aws-sigv4', '~>0.3.0'
|
||||||
gem 'typhoeus', '~> 1.4.0' # Used with Elasticsearch to support http keep-alive connections
|
gem 'typhoeus', '~> 1.4.0' # Used with Elasticsearch to support http keep-alive connections
|
||||||
|
|
||||||
|
|
|
@ -33,11 +33,11 @@
|
||||||
{"name":"awesome_print","version":"1.9.2","platform":"ruby","checksum":"e99b32b704acff16d768b3468680793ced40bfdc4537eb07e06a4be11133786e"},
|
{"name":"awesome_print","version":"1.9.2","platform":"ruby","checksum":"e99b32b704acff16d768b3468680793ced40bfdc4537eb07e06a4be11133786e"},
|
||||||
{"name":"awrence","version":"1.1.1","platform":"ruby","checksum":"9be584c97408ed92d5e1ca11740853646fe270de675f2f8dd44e8233226dfc97"},
|
{"name":"awrence","version":"1.1.1","platform":"ruby","checksum":"9be584c97408ed92d5e1ca11740853646fe270de675f2f8dd44e8233226dfc97"},
|
||||||
{"name":"aws-eventstream","version":"1.2.0","platform":"ruby","checksum":"ffa53482c92880b001ff2fb06919b9bb82fd847cbb0fa244985d2ebb6dd0d1df"},
|
{"name":"aws-eventstream","version":"1.2.0","platform":"ruby","checksum":"ffa53482c92880b001ff2fb06919b9bb82fd847cbb0fa244985d2ebb6dd0d1df"},
|
||||||
{"name":"aws-partitions","version":"1.644.0","platform":"ruby","checksum":"63791750839afff110c5b5a8805018c4275720d7a5c7ec79319d4d520c7da874"},
|
{"name":"aws-partitions","version":"1.651.0","platform":"ruby","checksum":"61f354049eb2c10bf0aa96b115f7443d181d79ec5508f7a34b8724c4cfa95dda"},
|
||||||
{"name":"aws-sdk-cloudformation","version":"1.41.0","platform":"ruby","checksum":"31e47539719734413671edf9b1a31f8673fbf9688549f50c41affabbcb1c6b26"},
|
{"name":"aws-sdk-cloudformation","version":"1.41.0","platform":"ruby","checksum":"31e47539719734413671edf9b1a31f8673fbf9688549f50c41affabbcb1c6b26"},
|
||||||
{"name":"aws-sdk-core","version":"3.159.0","platform":"ruby","checksum":"8863e2cdfd6816a0532ad9eb82a07b81b3d12667da747e9e82554e4dd7adb231"},
|
{"name":"aws-sdk-core","version":"3.166.0","platform":"ruby","checksum":"827b82a31f13007fbd3ce78801949019ad3b6fa0c658270d5caa6095cb4945fa"},
|
||||||
{"name":"aws-sdk-kms","version":"1.57.0","platform":"ruby","checksum":"ffd7dbb9b4251f29d4f508af761d0addd7035a346a88e3481cdb4dc548e51bd5"},
|
{"name":"aws-sdk-kms","version":"1.59.0","platform":"ruby","checksum":"6c002ebf8e404625c8338ca12ae69b1329399f9dc1b0ebca474e00ff06700153"},
|
||||||
{"name":"aws-sdk-s3","version":"1.114.0","platform":"ruby","checksum":"ce0f71df1a7b0fb1f88d40a70636ef1a9b08e69fb560694c5dab3f4ac7efcde4"},
|
{"name":"aws-sdk-s3","version":"1.117.1","platform":"ruby","checksum":"76f6dac5baeb2b78616eb34c6af650c1b7a15c1078b169d1b27e8421904c509d"},
|
||||||
{"name":"aws-sigv4","version":"1.5.1","platform":"ruby","checksum":"d68c87fff4ee843b4b92b23c7f31f957f254ec6eb064181f7119124aab8b8bb4"},
|
{"name":"aws-sigv4","version":"1.5.1","platform":"ruby","checksum":"d68c87fff4ee843b4b92b23c7f31f957f254ec6eb064181f7119124aab8b8bb4"},
|
||||||
{"name":"azure-storage-blob","version":"2.0.3","platform":"ruby","checksum":"61b76118843c91776bd24bee22c74adafeb7c4bb3a858a325047dae3b59d0363"},
|
{"name":"azure-storage-blob","version":"2.0.3","platform":"ruby","checksum":"61b76118843c91776bd24bee22c74adafeb7c4bb3a858a325047dae3b59d0363"},
|
||||||
{"name":"azure-storage-common","version":"2.0.4","platform":"ruby","checksum":"608f4daab0e06b583b73dcffd3246ea39e78056de31630286b0cf97af7d6956b"},
|
{"name":"azure-storage-common","version":"2.0.4","platform":"ruby","checksum":"608f4daab0e06b583b73dcffd3246ea39e78056de31630286b0cf97af7d6956b"},
|
||||||
|
|
20
Gemfile.lock
20
Gemfile.lock
|
@ -185,20 +185,20 @@ GEM
|
||||||
awesome_print (1.9.2)
|
awesome_print (1.9.2)
|
||||||
awrence (1.1.1)
|
awrence (1.1.1)
|
||||||
aws-eventstream (1.2.0)
|
aws-eventstream (1.2.0)
|
||||||
aws-partitions (1.644.0)
|
aws-partitions (1.651.0)
|
||||||
aws-sdk-cloudformation (1.41.0)
|
aws-sdk-cloudformation (1.41.0)
|
||||||
aws-sdk-core (~> 3, >= 3.99.0)
|
aws-sdk-core (~> 3, >= 3.99.0)
|
||||||
aws-sigv4 (~> 1.1)
|
aws-sigv4 (~> 1.1)
|
||||||
aws-sdk-core (3.159.0)
|
aws-sdk-core (3.166.0)
|
||||||
aws-eventstream (~> 1, >= 1.0.2)
|
aws-eventstream (~> 1, >= 1.0.2)
|
||||||
aws-partitions (~> 1, >= 1.525.0)
|
aws-partitions (~> 1, >= 1.651.0)
|
||||||
aws-sigv4 (~> 1.1)
|
aws-sigv4 (~> 1.5)
|
||||||
jmespath (~> 1, >= 1.6.1)
|
jmespath (~> 1, >= 1.6.1)
|
||||||
aws-sdk-kms (1.57.0)
|
aws-sdk-kms (1.59.0)
|
||||||
aws-sdk-core (~> 3, >= 3.127.0)
|
aws-sdk-core (~> 3, >= 3.165.0)
|
||||||
aws-sigv4 (~> 1.1)
|
aws-sigv4 (~> 1.1)
|
||||||
aws-sdk-s3 (1.114.0)
|
aws-sdk-s3 (1.117.1)
|
||||||
aws-sdk-core (~> 3, >= 3.127.0)
|
aws-sdk-core (~> 3, >= 3.165.0)
|
||||||
aws-sdk-kms (~> 1)
|
aws-sdk-kms (~> 1)
|
||||||
aws-sigv4 (~> 1.4)
|
aws-sigv4 (~> 1.4)
|
||||||
aws-sigv4 (1.5.1)
|
aws-sigv4 (1.5.1)
|
||||||
|
@ -1558,8 +1558,8 @@ DEPENDENCIES
|
||||||
autoprefixer-rails (= 10.2.5.1)
|
autoprefixer-rails (= 10.2.5.1)
|
||||||
awesome_print
|
awesome_print
|
||||||
aws-sdk-cloudformation (~> 1)
|
aws-sdk-cloudformation (~> 1)
|
||||||
aws-sdk-core (~> 3.159.0)
|
aws-sdk-core (~> 3.166.0)
|
||||||
aws-sdk-s3 (~> 1.114.0)
|
aws-sdk-s3 (~> 1.117.1)
|
||||||
babosa (~> 1.0.4)
|
babosa (~> 1.0.4)
|
||||||
base32 (~> 0.3.0)
|
base32 (~> 0.3.0)
|
||||||
batch-loader (~> 2.0.1)
|
batch-loader (~> 2.0.1)
|
||||||
|
|
|
@ -0,0 +1,54 @@
|
||||||
|
import { GlButton } from '@gitlab/ui';
|
||||||
|
import { MOCK_HTML } from '../../../../../../spec/frontend/vue_shared/components/markdown_drawer/mock_data';
|
||||||
|
import MarkdownDrawer from './markdown_drawer.vue';
|
||||||
|
|
||||||
|
export default {
|
||||||
|
component: MarkdownDrawer,
|
||||||
|
title: 'vue_shared/markdown_drawer',
|
||||||
|
parameters: {
|
||||||
|
mirage: {
|
||||||
|
timing: 1000,
|
||||||
|
handlers: {
|
||||||
|
get: {
|
||||||
|
'/help/user/search/global_search/advanced_search_syntax.json': [
|
||||||
|
200,
|
||||||
|
{},
|
||||||
|
{ html: MOCK_HTML },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const createStory = ({ ...options }) => (_, { argTypes }) => ({
|
||||||
|
components: { MarkdownDrawer, GlButton },
|
||||||
|
props: Object.keys(argTypes),
|
||||||
|
data() {
|
||||||
|
return {
|
||||||
|
render: false,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
methods: {
|
||||||
|
toggleDrawer() {
|
||||||
|
this.$refs.drawer.toggleDrawer();
|
||||||
|
},
|
||||||
|
},
|
||||||
|
mounted() {
|
||||||
|
window.requestAnimationFrame(() => {
|
||||||
|
this.render = true;
|
||||||
|
});
|
||||||
|
},
|
||||||
|
template: `
|
||||||
|
<div v-if="render">
|
||||||
|
<gl-button @click="toggleDrawer">Open Drawer</gl-button>
|
||||||
|
<markdown-drawer
|
||||||
|
:documentPath="'user/search/global_search/advanced_search_syntax.json'"
|
||||||
|
ref="drawer"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
`,
|
||||||
|
...options,
|
||||||
|
});
|
||||||
|
|
||||||
|
export const Default = createStory({});
|
|
@ -0,0 +1,117 @@
|
||||||
|
<script>
|
||||||
|
import { GlSafeHtmlDirective as SafeHtml, GlDrawer, GlAlert, GlSkeletonLoader } from '@gitlab/ui';
|
||||||
|
import $ from 'jquery';
|
||||||
|
import '~/behaviors/markdown/render_gfm';
|
||||||
|
import { s__ } from '~/locale';
|
||||||
|
import { contentTop } from '~/lib/utils/common_utils';
|
||||||
|
import { getRenderedMarkdown } from './utils/fetch';
|
||||||
|
|
||||||
|
export const cache = {};
|
||||||
|
|
||||||
|
export default {
|
||||||
|
name: 'MarkdownDrawer',
|
||||||
|
components: {
|
||||||
|
GlDrawer,
|
||||||
|
GlAlert,
|
||||||
|
GlSkeletonLoader,
|
||||||
|
},
|
||||||
|
directives: {
|
||||||
|
SafeHtml,
|
||||||
|
},
|
||||||
|
i18n: {
|
||||||
|
alert: s__('MardownDrawer|Could not fetch help contents.'),
|
||||||
|
},
|
||||||
|
props: {
|
||||||
|
documentPath: {
|
||||||
|
type: String,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
data() {
|
||||||
|
return {
|
||||||
|
loading: false,
|
||||||
|
hasFetchError: false,
|
||||||
|
title: '',
|
||||||
|
body: null,
|
||||||
|
open: false,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
computed: {
|
||||||
|
drawerOffsetTop() {
|
||||||
|
return `${contentTop()}px`;
|
||||||
|
},
|
||||||
|
},
|
||||||
|
watch: {
|
||||||
|
documentPath: {
|
||||||
|
immediate: true,
|
||||||
|
handler: 'fetchMarkdown',
|
||||||
|
},
|
||||||
|
open(open) {
|
||||||
|
if (open && this.body) {
|
||||||
|
this.renderGLFM();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
methods: {
|
||||||
|
async fetchMarkdown() {
|
||||||
|
const cached = cache[this.documentPath];
|
||||||
|
this.hasFetchError = false;
|
||||||
|
this.title = '';
|
||||||
|
if (cached) {
|
||||||
|
this.title = cached.title;
|
||||||
|
this.body = cached.body;
|
||||||
|
if (this.open) {
|
||||||
|
this.renderGLFM();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this.loading = true;
|
||||||
|
const { body, title, hasFetchError } = await getRenderedMarkdown(this.documentPath);
|
||||||
|
this.title = title;
|
||||||
|
this.body = body;
|
||||||
|
this.loading = false;
|
||||||
|
this.hasFetchError = hasFetchError;
|
||||||
|
if (this.open) {
|
||||||
|
this.renderGLFM();
|
||||||
|
}
|
||||||
|
cache[this.documentPath] = { title, body };
|
||||||
|
}
|
||||||
|
},
|
||||||
|
renderGLFM() {
|
||||||
|
this.$nextTick(() => {
|
||||||
|
$(this.$refs['content-element']).renderGFM();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
closeDrawer() {
|
||||||
|
this.open = false;
|
||||||
|
},
|
||||||
|
toggleDrawer() {
|
||||||
|
this.open = !this.open;
|
||||||
|
},
|
||||||
|
openDrawer() {
|
||||||
|
this.open = true;
|
||||||
|
},
|
||||||
|
},
|
||||||
|
safeHtmlConfig: {
|
||||||
|
ADD_TAGS: ['copy-code'],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
</script>
|
||||||
|
<template>
|
||||||
|
<gl-drawer :header-height="drawerOffsetTop" :open="open" header-sticky @close="closeDrawer">
|
||||||
|
<template #title>
|
||||||
|
<h4 data-testid="title-element" class="gl-m-0">{{ title }}</h4>
|
||||||
|
</template>
|
||||||
|
<template #default>
|
||||||
|
<div v-if="hasFetchError">
|
||||||
|
<gl-alert :dismissible="false" variant="danger">{{ $options.i18n.alert }}</gl-alert>
|
||||||
|
</div>
|
||||||
|
<gl-skeleton-loader v-else-if="loading" />
|
||||||
|
<div
|
||||||
|
v-else
|
||||||
|
ref="content-element"
|
||||||
|
v-safe-html:[$options.safeHtmlConfig]="body"
|
||||||
|
class="md"
|
||||||
|
></div>
|
||||||
|
</template>
|
||||||
|
</gl-drawer>
|
||||||
|
</template>
|
|
@ -0,0 +1,32 @@
|
||||||
|
import * as Sentry from '@sentry/browser';
|
||||||
|
import { helpPagePath } from '~/helpers/help_page_helper';
|
||||||
|
import axios from '~/lib/utils/axios_utils';
|
||||||
|
|
||||||
|
export const splitDocument = (htmlString) => {
|
||||||
|
const htmlDocument = new DOMParser().parseFromString(htmlString, 'text/html');
|
||||||
|
const title = htmlDocument.querySelector('h1')?.innerText;
|
||||||
|
htmlDocument.querySelector('h1')?.remove();
|
||||||
|
return {
|
||||||
|
title,
|
||||||
|
body: htmlDocument.querySelector('body').innerHTML.toString(),
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getRenderedMarkdown = (documentPath) => {
|
||||||
|
return axios
|
||||||
|
.get(helpPagePath(documentPath))
|
||||||
|
.then(({ data }) => {
|
||||||
|
const { body, title } = splitDocument(data.html);
|
||||||
|
return {
|
||||||
|
body,
|
||||||
|
title,
|
||||||
|
hasFetchError: false,
|
||||||
|
};
|
||||||
|
})
|
||||||
|
.catch((e) => {
|
||||||
|
Sentry.captureException(e);
|
||||||
|
return {
|
||||||
|
hasFetchError: true,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
};
|
|
@ -33,9 +33,8 @@ module Ci
|
||||||
end
|
end
|
||||||
|
|
||||||
def runner_variables
|
def runner_variables
|
||||||
stop_expanding_file_vars = ::Feature.enabled?(:ci_stop_expanding_file_vars_for_runners, project)
|
|
||||||
variables
|
variables
|
||||||
.sort_and_expand_all(keep_undefined: true, expand_file_vars: !stop_expanding_file_vars, project: project)
|
.sort_and_expand_all(keep_undefined: true, expand_file_vars: false, project: project)
|
||||||
.to_runner_variables
|
.to_runner_variables
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
- if can?(current_user, :create_deployment, deployment)
|
- if can?(current_user, :read_deployment, deployment)
|
||||||
- actions = deployment.manual_actions
|
- actions = deployment.manual_actions
|
||||||
- if actions.present?
|
- if actions.present?
|
||||||
.btn-group
|
.btn-group
|
||||||
|
@ -8,7 +8,7 @@
|
||||||
= sprite_icon('chevron-down')
|
= sprite_icon('chevron-down')
|
||||||
%ul.dropdown-menu.dropdown-menu-right
|
%ul.dropdown-menu.dropdown-menu-right
|
||||||
- actions.each do |action|
|
- actions.each do |action|
|
||||||
- next unless can?(current_user, :update_build, action)
|
- next unless can?(current_user, :play_job, action)
|
||||||
%li
|
%li
|
||||||
= link_to [:play, @project, action], method: :post, rel: 'nofollow' do
|
= link_to [:play, @project, action], method: :post, rel: 'nofollow' do
|
||||||
%span= action.name
|
%span= action.name
|
||||||
|
|
|
@ -1,8 +0,0 @@
|
||||||
---
|
|
||||||
name: ci_stop_expanding_file_vars_for_runners
|
|
||||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/94198
|
|
||||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/369907
|
|
||||||
milestone: '15.3'
|
|
||||||
type: development
|
|
||||||
group: group::pipeline authoring
|
|
||||||
default_enabled: true
|
|
|
@ -145,6 +145,7 @@ help benefit translation. For example, we:
|
||||||
- Avoid words that can be translated incorrectly, like:
|
- Avoid words that can be translated incorrectly, like:
|
||||||
- [since and because](word_list.md#since)
|
- [since and because](word_list.md#since)
|
||||||
- [once and after](word_list.md#once)
|
- [once and after](word_list.md#once)
|
||||||
|
- [it](word_list.md#it)
|
||||||
- Avoid [ing](word_list.md#-ing-words) words.
|
- Avoid [ing](word_list.md#-ing-words) words.
|
||||||
|
|
||||||
[The GitLab voice](#the-gitlab-voice) dictates that we write clearly and directly,
|
[The GitLab voice](#the-gitlab-voice) dictates that we write clearly and directly,
|
||||||
|
|
|
@ -612,6 +612,21 @@ Use lowercase for **issue board**.
|
||||||
|
|
||||||
Use lowercase for **issue weights**.
|
Use lowercase for **issue weights**.
|
||||||
|
|
||||||
|
## it
|
||||||
|
|
||||||
|
When you use the word **it**, ensure the word it refers to is obvious.
|
||||||
|
If it's not obvious, repeat the word rather than using **it**.
|
||||||
|
|
||||||
|
Use:
|
||||||
|
|
||||||
|
- The field returns a connection. The field accepts four arguments.
|
||||||
|
|
||||||
|
Instead of:
|
||||||
|
|
||||||
|
- The field returns a connection. It accepts four arguments.
|
||||||
|
|
||||||
|
See also [this, these, that, those](#this-these-that-those).
|
||||||
|
|
||||||
## job
|
## job
|
||||||
|
|
||||||
Do not use **build** to be synonymous with **job**. A job is defined in the `.gitlab-ci.yml` file and runs as part of a pipeline.
|
Do not use **build** to be synonymous with **job**. A job is defined in the `.gitlab-ci.yml` file and runs as part of a pipeline.
|
||||||
|
|
|
@ -24768,6 +24768,9 @@ msgstr ""
|
||||||
msgid "March"
|
msgid "March"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
|
msgid "MardownDrawer|Could not fetch help contents."
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
msgid "Mark as done"
|
msgid "Mark as done"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
|
|
|
@ -96,9 +96,14 @@ module QA
|
||||||
return {} unless Env.admin_personal_access_token || Env.personal_access_token
|
return {} unless Env.admin_personal_access_token || Env.personal_access_token
|
||||||
|
|
||||||
client = Env.admin_personal_access_token ? API::Client.as_admin : API::Client.new
|
client = Env.admin_personal_access_token ? API::Client.as_admin : API::Client.new
|
||||||
response = get(API::Request.new(client, '/version').url)
|
response = get(API::Request.new(client, '/metadata').url)
|
||||||
|
|
||||||
JSON.parse(response.body, symbolize_names: true)
|
JSON.parse(response.body, symbolize_names: true).then do |metadata|
|
||||||
|
{
|
||||||
|
**metadata.slice(:version, :revision),
|
||||||
|
kas_version: metadata.dig(:kas, :version)
|
||||||
|
}.compact
|
||||||
|
end
|
||||||
rescue StandardError, ArgumentError => e
|
rescue StandardError, ArgumentError => e
|
||||||
Logger.error("Failed to attach version info to allure report: #{e}")
|
Logger.error("Failed to attach version info to allure report: #{e}")
|
||||||
{}
|
{}
|
||||||
|
|
|
@ -1,10 +1,7 @@
|
||||||
# frozen_string_literal: true
|
# frozen_string_literal: true
|
||||||
|
|
||||||
module QA
|
module QA
|
||||||
RSpec.describe 'Verify', :runner, product_group: :pipeline_authoring, feature_flag: {
|
RSpec.describe 'Verify', :runner, product_group: :pipeline_authoring do
|
||||||
name: 'ci_stop_expanding_file_vars_for_runners',
|
|
||||||
scope: :project
|
|
||||||
} do
|
|
||||||
describe 'Pipeline with project file variables' do
|
describe 'Pipeline with project file variables' do
|
||||||
let(:executor) { "qa-runner-#{Faker::Alphanumeric.alphanumeric(number: 8)}" }
|
let(:executor) { "qa-runner-#{Faker::Alphanumeric.alphanumeric(number: 8)}" }
|
||||||
|
|
||||||
|
@ -14,7 +11,7 @@ module QA
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
let(:runner) do
|
let!(:runner) do
|
||||||
Resource::Runner.fabricate! do |runner|
|
Resource::Runner.fabricate! do |runner|
|
||||||
runner.project = project
|
runner.project = project
|
||||||
runner.name = executor
|
runner.name = executor
|
||||||
|
@ -60,59 +57,30 @@ module QA
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
before do
|
||||||
|
add_file_variables
|
||||||
|
add_ci_file
|
||||||
|
trigger_pipeline
|
||||||
|
wait_for_pipeline
|
||||||
|
end
|
||||||
|
|
||||||
after do
|
after do
|
||||||
runner.remove_via_api!
|
runner.remove_via_api!
|
||||||
end
|
end
|
||||||
|
|
||||||
shared_examples 'variables are read correctly' do
|
it 'shows in job log accordingly', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/370791' do
|
||||||
it 'shows in job log accordingly' do
|
job = Resource::Job.fabricate_via_api! do |job|
|
||||||
job = Resource::Job.fabricate_via_api! do |job|
|
job.project = project
|
||||||
job.project = project
|
job.id = project.job_by_name('test')[:id]
|
||||||
job.id = project.job_by_name('test')[:id]
|
|
||||||
end
|
|
||||||
|
|
||||||
aggregate_failures do
|
|
||||||
trace = job.trace
|
|
||||||
expect(trace).to have_content('run something -f hello, this is test')
|
|
||||||
expect(trace).to have_content('docker run --tlscacert="This is secret"')
|
|
||||||
expect(trace).to have_content('run --output=This is secret.crt')
|
|
||||||
expect(trace).to have_content('Will read private key from hello, this is test')
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# FF does not change current behavior
|
|
||||||
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/94198#note_1057609893
|
|
||||||
#
|
|
||||||
# TODO: Remove when FF is removed
|
|
||||||
# TODO: Archive testcase issue when FF is removed
|
|
||||||
# Rollout issue: https://gitlab.com/gitlab-org/gitlab/-/issues/369907
|
|
||||||
context 'when FF is on', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/370787' do
|
|
||||||
before do
|
|
||||||
Runtime::Feature.enable(:ci_stop_expanding_file_vars_for_runners, project: project)
|
|
||||||
|
|
||||||
runner
|
|
||||||
add_file_variables
|
|
||||||
add_ci_file
|
|
||||||
trigger_pipeline
|
|
||||||
wait_for_pipeline
|
|
||||||
end
|
end
|
||||||
|
|
||||||
it_behaves_like 'variables are read correctly'
|
aggregate_failures do
|
||||||
end
|
trace = job.trace
|
||||||
|
expect(trace).to have_content('run something -f hello, this is test')
|
||||||
# TODO: Refactor when FF is removed
|
expect(trace).to have_content('docker run --tlscacert="This is secret"')
|
||||||
# TODO: Update testcase issue title and description to not refer to FF status
|
expect(trace).to have_content('run --output=This is secret.crt')
|
||||||
context 'when FF is off', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/370791' do
|
expect(trace).to have_content('Will read private key from hello, this is test')
|
||||||
before do
|
|
||||||
runner
|
|
||||||
add_file_variables
|
|
||||||
add_ci_file
|
|
||||||
trigger_pipeline
|
|
||||||
wait_for_pipeline
|
|
||||||
end
|
end
|
||||||
|
|
||||||
it_behaves_like 'variables are read correctly'
|
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
|
@ -0,0 +1,29 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'gitlab'
|
||||||
|
require_relative 'default_options'
|
||||||
|
|
||||||
|
class CreateIssue
|
||||||
|
def initialize(options)
|
||||||
|
@project = options.fetch(:project)
|
||||||
|
|
||||||
|
# Force the token to be a string so that if api_token is nil, it's set to '',
|
||||||
|
# allowing unauthenticated requests (for forks).
|
||||||
|
api_token = options.delete(:api_token).to_s
|
||||||
|
|
||||||
|
warn "No API token given." if api_token.empty?
|
||||||
|
|
||||||
|
@client = Gitlab.client(
|
||||||
|
endpoint: options.delete(:endpoint) || API::DEFAULT_OPTIONS[:endpoint],
|
||||||
|
private_token: api_token
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def execute(issue_data)
|
||||||
|
client.create_issue(project, issue_data.delete(:title), issue_data)
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
attr_reader :project, :client
|
||||||
|
end
|
|
@ -1,7 +1,7 @@
|
||||||
# frozen_string_literal: true
|
# frozen_string_literal: true
|
||||||
|
|
||||||
require 'gitlab'
|
require 'gitlab'
|
||||||
require 'optparse'
|
|
||||||
require_relative 'default_options'
|
require_relative 'default_options'
|
||||||
|
|
||||||
class PipelineFailedJobs
|
class PipelineFailedJobs
|
||||||
|
|
|
@ -0,0 +1,172 @@
|
||||||
|
#!/usr/bin/env ruby
|
||||||
|
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'optparse'
|
||||||
|
require 'json'
|
||||||
|
|
||||||
|
require_relative 'api/pipeline_failed_jobs'
|
||||||
|
require_relative 'api/create_issue'
|
||||||
|
|
||||||
|
class CreatePipelineFailureIncident
|
||||||
|
DEFAULT_OPTIONS = {
|
||||||
|
project: nil,
|
||||||
|
incident_json_file: 'incident.json'
|
||||||
|
}.freeze
|
||||||
|
DEFAULT_LABELS = ['Engineering Productivity'].freeze
|
||||||
|
|
||||||
|
def initialize(options)
|
||||||
|
@project = options.delete(:project)
|
||||||
|
@api_token = options.delete(:api_token)
|
||||||
|
end
|
||||||
|
|
||||||
|
def execute
|
||||||
|
payload = {
|
||||||
|
issue_type: 'incident',
|
||||||
|
title: title,
|
||||||
|
description: description,
|
||||||
|
labels: incident_labels
|
||||||
|
}
|
||||||
|
|
||||||
|
CreateIssue.new(project: project, api_token: api_token).execute(payload)
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
attr_reader :project, :api_token
|
||||||
|
|
||||||
|
def failed_jobs
|
||||||
|
@failed_jobs ||= PipelineFailedJobs.new(API::DEFAULT_OPTIONS.dup.merge(exclude_allowed_to_fail_jobs: true)).execute
|
||||||
|
end
|
||||||
|
|
||||||
|
def now
|
||||||
|
@now ||= Time.now.utc
|
||||||
|
end
|
||||||
|
|
||||||
|
def title
|
||||||
|
"#{now.strftime('%A %F %R UTC')} - `#{ENV['CI_PROJECT_PATH']}` broken `#{ENV['CI_COMMIT_REF_NAME']}` " \
|
||||||
|
"with #{failed_jobs.size} failed jobs"
|
||||||
|
end
|
||||||
|
|
||||||
|
def description
|
||||||
|
<<~MARKDOWN
|
||||||
|
## #{project_link} pipeline #{pipeline_link} failed
|
||||||
|
|
||||||
|
**Branch: #{branch_link}**
|
||||||
|
|
||||||
|
**Commit: #{commit_link}**
|
||||||
|
|
||||||
|
**Triggered by** #{triggered_by_link} • **Source:** #{source} • **Duration:** #{pipeline_duration} minutes
|
||||||
|
|
||||||
|
**Failed jobs (#{failed_jobs.size}):**
|
||||||
|
|
||||||
|
#{failed_jobs_list}
|
||||||
|
|
||||||
|
### General guidelines
|
||||||
|
|
||||||
|
Follow the [Broken `master` handbook guide](https://about.gitlab.com/handbook/engineering/workflow/#broken-master).
|
||||||
|
|
||||||
|
### Investigation
|
||||||
|
|
||||||
|
**Be sure to fill the `Timeline` for this incident.**
|
||||||
|
|
||||||
|
1. If the failure is new, and looks like a potential flaky failure, you can retry the failing job.
|
||||||
|
Make sure to mention the retry in the `Timeline` and leave a link to the retried job.
|
||||||
|
1. If the failure looks like a broken `master`, communicate the broken `master` in Slack using the "Broadcast Master Broken" workflow:
|
||||||
|
- Click the Shortcut lightning bolt icon in the `#master-broken` channel and select "Broadcast Master Broken".
|
||||||
|
- Click "Continue the broadcast" after the automated message in `#master-broken`.
|
||||||
|
|
||||||
|
### Pre-resolution
|
||||||
|
|
||||||
|
If you believe that there's an easy resolution by either:
|
||||||
|
|
||||||
|
- Reverting a particular merge request. Make sure to add the ~"pipeline:revert" label in that case, to speed up the revert pipeline.
|
||||||
|
- Making a quick fix (for example, one line or a few similar simple changes in a few lines).
|
||||||
|
You can create a merge request, assign to any available maintainer, and ping people that were involved/related to the introduction of the failure.
|
||||||
|
Additionally, a message can be posted in `#backend_maintainers` or `#frontend_maintainers` to get a maintainer take a look at the fix ASAP.
|
||||||
|
|
||||||
|
### Resolution
|
||||||
|
|
||||||
|
Follow [the Resolution steps from the handbook](https://about.gitlab.com/handbook/engineering/workflow/#responsibilities-of-the-resolution-dri).
|
||||||
|
MARKDOWN
|
||||||
|
end
|
||||||
|
|
||||||
|
def incident_labels
|
||||||
|
master_broken_label =
|
||||||
|
if ENV['CI_PROJECT_NAME'] == 'gitlab-foss'
|
||||||
|
'master:foss-broken'
|
||||||
|
else
|
||||||
|
'master:broken'
|
||||||
|
end
|
||||||
|
|
||||||
|
DEFAULT_LABELS.dup << master_broken_label
|
||||||
|
end
|
||||||
|
|
||||||
|
def pipeline_link
|
||||||
|
"[##{ENV['CI_PIPELINE_ID']}](#{ENV['CI_PIPELINE_URL']})"
|
||||||
|
end
|
||||||
|
|
||||||
|
def branch_link
|
||||||
|
"[`#{ENV['CI_COMMIT_REF_NAME']}`](#{ENV['CI_PROJECT_URL']}/-/commits/#{ENV['CI_COMMIT_REF_NAME']})"
|
||||||
|
end
|
||||||
|
|
||||||
|
def pipeline_duration
|
||||||
|
((Time.now - Time.parse(ENV['CI_PIPELINE_CREATED_AT'])) / 60.to_f).round(2)
|
||||||
|
end
|
||||||
|
|
||||||
|
def commit_link
|
||||||
|
"[#{ENV['CI_COMMIT_TITLE']}](#{ENV['CI_PROJECT_URL']}/-/commit/#{ENV['CI_COMMIT_SHA']})"
|
||||||
|
end
|
||||||
|
|
||||||
|
def source
|
||||||
|
"`#{ENV['CI_PIPELINE_SOURCE']}`"
|
||||||
|
end
|
||||||
|
|
||||||
|
def project_link
|
||||||
|
"[#{ENV['CI_PROJECT_PATH']}](#{ENV['CI_PROJECT_URL']})"
|
||||||
|
end
|
||||||
|
|
||||||
|
def triggered_by_link
|
||||||
|
"[#{ENV['GITLAB_USER_NAME']}](#{ENV['CI_SERVER_URL']}/#{ENV['GITLAB_USER_LOGIN']})"
|
||||||
|
end
|
||||||
|
|
||||||
|
def failed_jobs_list_for_title
|
||||||
|
failed_jobs.map(&:name).join(', ')
|
||||||
|
end
|
||||||
|
|
||||||
|
def failed_jobs_list
|
||||||
|
failed_jobs.map { |job| "- [#{job.name}](#{job.web_url})" }.join("\n")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
if $PROGRAM_NAME == __FILE__
|
||||||
|
options = CreatePipelineFailureIncident::DEFAULT_OPTIONS.dup
|
||||||
|
|
||||||
|
OptionParser.new do |opts|
|
||||||
|
opts.on("-p", "--project PROJECT", String, "Project where to create the incident (defaults to "\
|
||||||
|
"`#{CreatePipelineFailureIncident::DEFAULT_OPTIONS[:project]}`)") do |value|
|
||||||
|
options[:project] = value
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on("-f", "--incident-json-file file_path", String, "Path to a file where to save the incident JSON data "\
|
||||||
|
"(defaults to `#{CreatePipelineFailureIncident::DEFAULT_OPTIONS[:incident_json_file]}`)") do |value|
|
||||||
|
options[:incident_json_file] = value
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on("-t", "--api-token API_TOKEN", String, "A valid Project token with the `Reporter` role and `api` scope "\
|
||||||
|
"to create the incident") do |value|
|
||||||
|
options[:api_token] = value
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on("-h", "--help", "Prints this help") do
|
||||||
|
puts opts
|
||||||
|
exit
|
||||||
|
end
|
||||||
|
end.parse!
|
||||||
|
|
||||||
|
incident_json_file = options.delete(:incident_json_file)
|
||||||
|
|
||||||
|
CreatePipelineFailureIncident.new(options).execute.tap do |incident|
|
||||||
|
File.write(incident_json_file, JSON.pretty_generate(incident.to_h)) if incident_json_file
|
||||||
|
end
|
||||||
|
end
|
|
@ -2,21 +2,23 @@
|
||||||
|
|
||||||
# frozen_string_literal: true
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'optparse'
|
||||||
|
require 'json'
|
||||||
|
|
||||||
require_relative 'api/pipeline_failed_jobs'
|
require_relative 'api/pipeline_failed_jobs'
|
||||||
|
|
||||||
finder_options = API::DEFAULT_OPTIONS.dup.merge(exclude_allowed_to_fail_jobs: true)
|
class GenerateFailedPipelineSlackMessage
|
||||||
failed_jobs = PipelineFailedJobs.new(finder_options).execute
|
DEFAULT_OPTIONS = {
|
||||||
|
failed_pipeline_slack_message_file: 'failed_pipeline_slack_message.json',
|
||||||
|
incident_json_file: 'incident.json'
|
||||||
|
}.freeze
|
||||||
|
|
||||||
class SlackReporter
|
def initialize(options)
|
||||||
DEFAULT_FAILED_PIPELINE_REPORT_FILE = 'failed_pipeline_report.json'
|
@incident_json_file = options.delete(:incident_json_file)
|
||||||
|
|
||||||
def initialize(failed_jobs)
|
|
||||||
@failed_jobs = failed_jobs
|
|
||||||
@failed_pipeline_report_file = ENV.fetch('FAILED_PIPELINE_REPORT_FILE', DEFAULT_FAILED_PIPELINE_REPORT_FILE)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def report
|
def execute
|
||||||
payload = {
|
{
|
||||||
channel: ENV['SLACK_CHANNEL'],
|
channel: ENV['SLACK_CHANNEL'],
|
||||||
username: "Failed pipeline reporter",
|
username: "Failed pipeline reporter",
|
||||||
icon_emoji: ":boom:",
|
icon_emoji: ":boom:",
|
||||||
|
@ -27,33 +29,36 @@ class SlackReporter
|
||||||
text: {
|
text: {
|
||||||
type: "mrkdwn",
|
type: "mrkdwn",
|
||||||
text: "*#{title}*"
|
text: "*#{title}*"
|
||||||
|
},
|
||||||
|
accessory: {
|
||||||
|
type: "button",
|
||||||
|
text: {
|
||||||
|
type: "plain_text",
|
||||||
|
text: incident_button_text
|
||||||
|
},
|
||||||
|
url: incident_button_link
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
type: "section",
|
type: "section",
|
||||||
fields: [
|
text: {
|
||||||
{
|
type: "mrkdwn",
|
||||||
type: "mrkdwn",
|
text: "*Branch*: #{branch_link}"
|
||||||
text: "*Commit*\n#{commit_link}"
|
}
|
||||||
},
|
|
||||||
{
|
|
||||||
type: "mrkdwn",
|
|
||||||
text: "*Triggered by*\n#{triggered_by_link}"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
type: "section",
|
type: "section",
|
||||||
fields: [
|
text: {
|
||||||
{
|
type: "mrkdwn",
|
||||||
type: "mrkdwn",
|
text: "*Commit*: #{commit_link}"
|
||||||
text: "*Source*\n#{source} from #{project_link}"
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
type: "mrkdwn",
|
type: "section",
|
||||||
text: "*Duration*\n#{pipeline_duration} minutes"
|
text: {
|
||||||
}
|
type: "mrkdwn",
|
||||||
]
|
text: "*Triggered by* #{triggered_by_link} • *Source:* #{source} • *Duration:* #{pipeline_duration} minutes"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
type: "section",
|
type: "section",
|
||||||
|
@ -64,16 +69,41 @@ class SlackReporter
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
File.write(failed_pipeline_report_file, JSON.pretty_generate(payload))
|
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
||||||
attr_reader :failed_jobs, :failed_pipeline_report_file
|
attr_reader :incident_json_file
|
||||||
|
|
||||||
|
def failed_jobs
|
||||||
|
@failed_jobs ||= PipelineFailedJobs.new(API::DEFAULT_OPTIONS.dup.merge(exclude_allowed_to_fail_jobs: true)).execute
|
||||||
|
end
|
||||||
|
|
||||||
def title
|
def title
|
||||||
"Pipeline #{pipeline_link} for #{branch_link} failed"
|
"#{project_link} pipeline #{pipeline_link} failed"
|
||||||
|
end
|
||||||
|
|
||||||
|
def incident_exist?
|
||||||
|
return @incident_exist if defined?(@incident_exist)
|
||||||
|
|
||||||
|
@incident_exist = File.exist?(incident_json_file)
|
||||||
|
end
|
||||||
|
|
||||||
|
def incident_button_text
|
||||||
|
if incident_exist?
|
||||||
|
'View incident'
|
||||||
|
else
|
||||||
|
'Create incident'
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def incident_button_link
|
||||||
|
if incident_exist?
|
||||||
|
JSON.parse(File.read(incident_json_file))['web_url']
|
||||||
|
else
|
||||||
|
"#{ENV['CI_SERVER_URL']}/#{ENV['BROKEN_MASTER_INCIDENTS_PROJECT']}/-/issues/new?" \
|
||||||
|
"issuable_template=incident&issue%5Bissue_type%5D=incident"
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def pipeline_link
|
def pipeline_link
|
||||||
|
@ -101,7 +131,7 @@ class SlackReporter
|
||||||
end
|
end
|
||||||
|
|
||||||
def project_link
|
def project_link
|
||||||
"<#{ENV['CI_PROJECT_URL']}|#{ENV['CI_PROJECT_NAME']}>"
|
"<#{ENV['CI_PROJECT_URL']}|#{ENV['CI_PROJECT_PATH']}>"
|
||||||
end
|
end
|
||||||
|
|
||||||
def triggered_by_link
|
def triggered_by_link
|
||||||
|
@ -113,4 +143,33 @@ class SlackReporter
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
SlackReporter.new(failed_jobs).report
|
if $PROGRAM_NAME == __FILE__
|
||||||
|
options = GenerateFailedPipelineSlackMessage::DEFAULT_OPTIONS.dup
|
||||||
|
|
||||||
|
OptionParser.new do |opts|
|
||||||
|
opts.on("-i", "--incident-json-file file_path", String, "Path to a file where the incident JSON data "\
|
||||||
|
"can be found (defaults to "\
|
||||||
|
"`#{GenerateFailedPipelineSlackMessage::DEFAULT_OPTIONS[:incident_json_file]}`)") do |value|
|
||||||
|
options[:incident_json_file] = value
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on("-f", "--failed-pipeline-slack-message-file file_path", String, "Path to a file where to save the Slack "\
|
||||||
|
"message (defaults to "\
|
||||||
|
"`#{GenerateFailedPipelineSlackMessage::DEFAULT_OPTIONS[:failed_pipeline_slack_message_file]}`)") do |value|
|
||||||
|
options[:failed_pipeline_slack_message_file] = value
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on("-h", "--help", "Prints this help") do
|
||||||
|
puts opts
|
||||||
|
exit
|
||||||
|
end
|
||||||
|
end.parse!
|
||||||
|
|
||||||
|
failed_pipeline_slack_message_file = options.delete(:failed_pipeline_slack_message_file)
|
||||||
|
|
||||||
|
GenerateFailedPipelineSlackMessage.new(options).execute.tap do |message_payload|
|
||||||
|
if failed_pipeline_slack_message_file
|
||||||
|
File.write(failed_pipeline_slack_message_file, JSON.pretty_generate(message_payload))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
|
@ -0,0 +1,205 @@
|
||||||
|
import { GlDrawer, GlAlert, GlSkeletonLoader } from '@gitlab/ui';
|
||||||
|
import { nextTick } from 'vue';
|
||||||
|
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
|
||||||
|
import MarkdownDrawer, { cache } from '~/vue_shared/components/markdown_drawer/markdown_drawer.vue';
|
||||||
|
import { getRenderedMarkdown } from '~/vue_shared/components/markdown_drawer/utils/fetch';
|
||||||
|
import { contentTop } from '~/lib/utils/common_utils';
|
||||||
|
|
||||||
|
jest.mock('~/vue_shared/components/markdown_drawer/utils/fetch', () => ({
|
||||||
|
getRenderedMarkdown: jest.fn().mockReturnValue({
|
||||||
|
title: 'test title test',
|
||||||
|
body: `<div id="content-body">
|
||||||
|
<div class="documentation md gl-mt-3">
|
||||||
|
test body
|
||||||
|
</div>
|
||||||
|
</div>`,
|
||||||
|
}),
|
||||||
|
}));
|
||||||
|
|
||||||
|
jest.mock('~/lib/utils/common_utils', () => ({
|
||||||
|
contentTop: jest.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe('MarkdownDrawer', () => {
|
||||||
|
let wrapper;
|
||||||
|
const defaultProps = {
|
||||||
|
documentPath: 'user/search/global_search/advanced_search_syntax.json',
|
||||||
|
};
|
||||||
|
|
||||||
|
const createComponent = (props) => {
|
||||||
|
wrapper = shallowMountExtended(MarkdownDrawer, {
|
||||||
|
propsData: {
|
||||||
|
...defaultProps,
|
||||||
|
...props,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
wrapper.destroy();
|
||||||
|
wrapper = null;
|
||||||
|
Object.keys(cache).forEach((key) => delete cache[key]);
|
||||||
|
});
|
||||||
|
|
||||||
|
const findDrawer = () => wrapper.findComponent(GlDrawer);
|
||||||
|
const findAlert = () => wrapper.findComponent(GlAlert);
|
||||||
|
const findSkeleton = () => wrapper.findComponent(GlSkeletonLoader);
|
||||||
|
const findDrawerTitle = () => wrapper.findComponent('[data-testid="title-element"]');
|
||||||
|
const findDrawerBody = () => wrapper.findComponent({ ref: 'content-element' });
|
||||||
|
|
||||||
|
describe('component', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
createComponent();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('renders correctly', () => {
|
||||||
|
expect(findDrawer().exists()).toBe(true);
|
||||||
|
expect(findDrawerTitle().text()).toBe('test title test');
|
||||||
|
expect(findDrawerBody().text()).toBe('test body');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe.each`
|
||||||
|
hasNavbar | navbarHeight
|
||||||
|
${false} | ${0}
|
||||||
|
${true} | ${100}
|
||||||
|
`('computes offsetTop', ({ hasNavbar, navbarHeight }) => {
|
||||||
|
beforeEach(() => {
|
||||||
|
global.document.querySelector = jest.fn(() =>
|
||||||
|
hasNavbar
|
||||||
|
? {
|
||||||
|
dataset: {
|
||||||
|
page: 'test',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
: undefined,
|
||||||
|
);
|
||||||
|
contentTop.mockReturnValue(navbarHeight);
|
||||||
|
createComponent();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
contentTop.mockClear();
|
||||||
|
});
|
||||||
|
|
||||||
|
it(`computes offsetTop ${hasNavbar ? 'with' : 'without'} .navbar-gitlab`, () => {
|
||||||
|
expect(findDrawer().attributes('headerheight')).toBe(`${navbarHeight}px`);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('watcher', () => {
|
||||||
|
let renderGLFMSpy;
|
||||||
|
let fetchMarkdownSpy;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
renderGLFMSpy = jest.spyOn(MarkdownDrawer.methods, 'renderGLFM');
|
||||||
|
fetchMarkdownSpy = jest.spyOn(MarkdownDrawer.methods, 'fetchMarkdown');
|
||||||
|
global.document.querySelector = jest.fn(() => ({
|
||||||
|
getBoundingClientRect: jest.fn(() => ({ bottom: 100 })),
|
||||||
|
dataset: {
|
||||||
|
page: 'test',
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
createComponent();
|
||||||
|
await nextTick();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
renderGLFMSpy.mockClear();
|
||||||
|
fetchMarkdownSpy.mockClear();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('for documentPath triggers fetch', async () => {
|
||||||
|
expect(fetchMarkdownSpy).toHaveBeenCalledTimes(1);
|
||||||
|
|
||||||
|
await wrapper.setProps({ documentPath: '/test/me' });
|
||||||
|
await nextTick();
|
||||||
|
|
||||||
|
expect(fetchMarkdownSpy).toHaveBeenCalledTimes(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('for open triggers renderGLFM', async () => {
|
||||||
|
wrapper.vm.fetchMarkdown();
|
||||||
|
wrapper.vm.openDrawer();
|
||||||
|
await nextTick();
|
||||||
|
expect(renderGLFMSpy).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Markdown fetching', () => {
|
||||||
|
let renderGLFMSpy;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
renderGLFMSpy = jest.spyOn(MarkdownDrawer.methods, 'renderGLFM');
|
||||||
|
createComponent();
|
||||||
|
await nextTick();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
renderGLFMSpy.mockClear();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('fetches the Markdown and caches it', async () => {
|
||||||
|
expect(getRenderedMarkdown).toHaveBeenCalledTimes(1);
|
||||||
|
expect(Object.keys(cache)).toHaveLength(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('when the document changes, fetches it and caches it as well', async () => {
|
||||||
|
expect(getRenderedMarkdown).toHaveBeenCalledTimes(1);
|
||||||
|
expect(Object.keys(cache)).toHaveLength(1);
|
||||||
|
|
||||||
|
await wrapper.setProps({ documentPath: '/test/me2' });
|
||||||
|
await nextTick();
|
||||||
|
|
||||||
|
expect(getRenderedMarkdown).toHaveBeenCalledTimes(2);
|
||||||
|
expect(Object.keys(cache)).toHaveLength(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('when re-using an already fetched document, gets it from the cache', async () => {
|
||||||
|
await wrapper.setProps({ documentPath: '/test/me2' });
|
||||||
|
await nextTick();
|
||||||
|
|
||||||
|
expect(getRenderedMarkdown).toHaveBeenCalledTimes(2);
|
||||||
|
expect(Object.keys(cache)).toHaveLength(2);
|
||||||
|
|
||||||
|
await wrapper.setProps({ documentPath: defaultProps.documentPath });
|
||||||
|
await nextTick();
|
||||||
|
|
||||||
|
expect(getRenderedMarkdown).toHaveBeenCalledTimes(2);
|
||||||
|
expect(Object.keys(cache)).toHaveLength(2);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Markdown fetching returns error', () => {
|
||||||
|
beforeEach(async () => {
|
||||||
|
getRenderedMarkdown.mockReturnValue({
|
||||||
|
hasFetchError: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
createComponent();
|
||||||
|
await nextTick();
|
||||||
|
});
|
||||||
|
afterEach(() => {
|
||||||
|
getRenderedMarkdown.mockClear();
|
||||||
|
});
|
||||||
|
it('shows alert', () => {
|
||||||
|
expect(findAlert().exists()).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('While Markdown is fetching', () => {
|
||||||
|
beforeEach(async () => {
|
||||||
|
getRenderedMarkdown.mockReturnValue(new Promise(() => {}));
|
||||||
|
|
||||||
|
createComponent();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
getRenderedMarkdown.mockClear();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows skeleton', async () => {
|
||||||
|
expect(findSkeleton().exists()).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,42 @@
|
||||||
|
export const MOCK_HTML = `<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<body>
|
||||||
|
<div id="content-body">
|
||||||
|
<h1>test title <strong>test</strong></h1>
|
||||||
|
<div class="documentation md gl-mt-3">
|
||||||
|
<a href="../advanced_search.md">Advanced Search</a>
|
||||||
|
<a href="../advanced_search2.md">Advanced Search2</a>
|
||||||
|
<h2>test header h2</h2>
|
||||||
|
<table class="testClass">
|
||||||
|
<tr>
|
||||||
|
<td>Emil</td>
|
||||||
|
<td>Tobias</td>
|
||||||
|
<td>Linus</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>16</td>
|
||||||
|
<td>14</td>
|
||||||
|
<td>10</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</body>
|
||||||
|
</html>`.replace(/\n/g, '');
|
||||||
|
|
||||||
|
export const MOCK_DRAWER_DATA = {
|
||||||
|
hasFetchError: false,
|
||||||
|
title: 'test title test',
|
||||||
|
body: ` <div id="content-body"> <div class="documentation md gl-mt-3"> <a href="../advanced_search.md">Advanced Search</a> <a href="../advanced_search2.md">Advanced Search2</a> <h2>test header h2</h2> <table class="testClass"> <tbody><tr> <td>Emil</td> <td>Tobias</td> <td>Linus</td> </tr> <tr> <td>16</td> <td>14</td> <td>10</td> </tr> </tbody></table> </div> </div>`,
|
||||||
|
};
|
||||||
|
|
||||||
|
export const MOCK_DRAWER_DATA_ERROR = {
|
||||||
|
hasFetchError: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
export const MOCK_TABLE_DATA_BEFORE = `<head></head><body><h1>test</h1></test><table><tbody><tr><td></td></tr></tbody></table></body>`;
|
||||||
|
|
||||||
|
export const MOCK_HTML_DATA_AFTER = {
|
||||||
|
body: '<table><tbody><tr><td></td></tr></tbody></table>',
|
||||||
|
title: 'test',
|
||||||
|
};
|
|
@ -0,0 +1,43 @@
|
||||||
|
import MockAdapter from 'axios-mock-adapter';
|
||||||
|
import {
|
||||||
|
getRenderedMarkdown,
|
||||||
|
splitDocument,
|
||||||
|
} from '~/vue_shared/components/markdown_drawer/utils/fetch';
|
||||||
|
import axios from '~/lib/utils/axios_utils';
|
||||||
|
import {
|
||||||
|
MOCK_HTML,
|
||||||
|
MOCK_DRAWER_DATA,
|
||||||
|
MOCK_DRAWER_DATA_ERROR,
|
||||||
|
MOCK_TABLE_DATA_BEFORE,
|
||||||
|
MOCK_HTML_DATA_AFTER,
|
||||||
|
} from '../mock_data';
|
||||||
|
|
||||||
|
describe('utils/fetch', () => {
|
||||||
|
let mock;
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
mock.restore();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe.each`
|
||||||
|
axiosMock | type | toExpect
|
||||||
|
${{ code: 200, res: { html: MOCK_HTML } }} | ${'success'} | ${MOCK_DRAWER_DATA}
|
||||||
|
${{ code: 500, res: null }} | ${'error'} | ${MOCK_DRAWER_DATA_ERROR}
|
||||||
|
`('process markdown data', ({ axiosMock, type, toExpect }) => {
|
||||||
|
describe(`if api fetch responds with ${type}`, () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
mock = new MockAdapter(axios);
|
||||||
|
mock.onGet().reply(axiosMock.code, axiosMock.res);
|
||||||
|
});
|
||||||
|
it(`should update drawer correctly`, async () => {
|
||||||
|
expect(await getRenderedMarkdown('/any/path')).toStrictEqual(toExpect);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('splitDocument', () => {
|
||||||
|
it(`should update tables correctly`, () => {
|
||||||
|
expect(splitDocument(MOCK_TABLE_DATA_BEFORE)).toStrictEqual(MOCK_HTML_DATA_AFTER);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -359,23 +359,6 @@ RSpec.describe Ci::BuildRunnerPresenter do
|
||||||
|
|
||||||
runner_variables
|
runner_variables
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'when the FF ci_stop_expanding_file_vars_for_runners is disabled' do
|
|
||||||
before do
|
|
||||||
stub_feature_flags(ci_stop_expanding_file_vars_for_runners: false)
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'returns variables with expanded' do
|
|
||||||
expect(runner_variables).to include(
|
|
||||||
{ key: 'regular_var', value: 'value 1',
|
|
||||||
public: false, masked: false },
|
|
||||||
{ key: 'file_var', value: 'value 2',
|
|
||||||
public: false, masked: false, file: true },
|
|
||||||
{ key: 'var_with_variables', value: 'value 3 and value 1 and value 2 and $undefined_var',
|
|
||||||
public: false, masked: false }
|
|
||||||
)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -225,7 +225,7 @@ RSpec.describe AutoMerge::MergeWhenPipelineSucceedsService do
|
||||||
|
|
||||||
let!(:build) do
|
let!(:build) do
|
||||||
create(:ci_build, :created, pipeline: pipeline, ref: ref,
|
create(:ci_build, :created, pipeline: pipeline, ref: ref,
|
||||||
name: 'build', ci_stage: build_stage )
|
name: 'build', ci_stage: build_stage)
|
||||||
end
|
end
|
||||||
|
|
||||||
let!(:test) do
|
let!(:test) do
|
||||||
|
|
|
@ -27,7 +27,7 @@ RSpec.describe Boards::Issues::ListService do
|
||||||
|
|
||||||
let_it_be(:opened_issue1) { create(:labeled_issue, project: project, milestone: m1, title: 'Issue 1', labels: [bug]) }
|
let_it_be(:opened_issue1) { create(:labeled_issue, project: project, milestone: m1, title: 'Issue 1', labels: [bug]) }
|
||||||
let_it_be(:opened_issue2) { create(:labeled_issue, project: project, milestone: m2, title: 'Issue 2', labels: [p2]) }
|
let_it_be(:opened_issue2) { create(:labeled_issue, project: project, milestone: m2, title: 'Issue 2', labels: [p2]) }
|
||||||
let_it_be(:reopened_issue1) { create(:issue, :opened, project: project, title: 'Reopened Issue 1' ) }
|
let_it_be(:reopened_issue1) { create(:issue, :opened, project: project, title: 'Reopened Issue 1') }
|
||||||
|
|
||||||
let_it_be(:list1_issue1) { create(:labeled_issue, project: project, milestone: m1, labels: [p2, development]) }
|
let_it_be(:list1_issue1) { create(:labeled_issue, project: project, milestone: m1, labels: [p2, development]) }
|
||||||
let_it_be(:list1_issue2) { create(:labeled_issue, project: project, milestone: m2, labels: [development]) }
|
let_it_be(:list1_issue2) { create(:labeled_issue, project: project, milestone: m2, labels: [development]) }
|
||||||
|
@ -110,7 +110,7 @@ RSpec.describe Boards::Issues::ListService do
|
||||||
let!(:opened_issue1) { create(:labeled_issue, project: project, milestone: m1, title: 'Issue 1', labels: [bug]) }
|
let!(:opened_issue1) { create(:labeled_issue, project: project, milestone: m1, title: 'Issue 1', labels: [bug]) }
|
||||||
let!(:opened_issue2) { create(:labeled_issue, project: project, milestone: m2, title: 'Issue 2', labels: [p2, p2_project]) }
|
let!(:opened_issue2) { create(:labeled_issue, project: project, milestone: m2, title: 'Issue 2', labels: [p2, p2_project]) }
|
||||||
let!(:opened_issue3) { create(:labeled_issue, project: project_archived, milestone: m1, title: 'Issue 3', labels: [bug]) }
|
let!(:opened_issue3) { create(:labeled_issue, project: project_archived, milestone: m1, title: 'Issue 3', labels: [bug]) }
|
||||||
let!(:reopened_issue1) { create(:issue, state: 'opened', project: project, title: 'Reopened Issue 1', closed_at: Time.current ) }
|
let!(:reopened_issue1) { create(:issue, state: 'opened', project: project, title: 'Reopened Issue 1', closed_at: Time.current) }
|
||||||
|
|
||||||
let!(:list1_issue1) { create(:labeled_issue, project: project, milestone: m1, labels: [p2, p2_project, development]) }
|
let!(:list1_issue1) { create(:labeled_issue, project: project, milestone: m1, labels: [p2, p2_project, development]) }
|
||||||
let!(:list1_issue2) { create(:labeled_issue, project: project, milestone: m2, labels: [development]) }
|
let!(:list1_issue2) { create(:labeled_issue, project: project, milestone: m2, labels: [development]) }
|
||||||
|
|
|
@ -41,7 +41,7 @@ RSpec.describe Ci::CompareTestReportsService do
|
||||||
it 'returns a parsed TestReports success status and failure on the individual suite' do
|
it 'returns a parsed TestReports success status and failure on the individual suite' do
|
||||||
expect(comparison[:status]).to eq(:parsed)
|
expect(comparison[:status]).to eq(:parsed)
|
||||||
expect(comparison.dig(:data, 'status')).to eq('success')
|
expect(comparison.dig(:data, 'status')).to eq('success')
|
||||||
expect(comparison.dig(:data, 'suites', 0, 'status') ).to eq('error')
|
expect(comparison.dig(:data, 'suites', 0, 'status')).to eq('error')
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -104,7 +104,7 @@ RSpec.describe Ci::PipelineProcessing::AtomicProcessingService::StatusCollection
|
||||||
|
|
||||||
describe '#processing_processables' do
|
describe '#processing_processables' do
|
||||||
it 'returns processables marked as processing' do
|
it 'returns processables marked as processing' do
|
||||||
expect(collection.processing_processables.map { |processable| processable[:id] } )
|
expect(collection.processing_processables.map { |processable| processable[:id] })
|
||||||
.to contain_exactly(build_a.id, build_b.id, test_a.id, test_b.id, deploy.id)
|
.to contain_exactly(build_a.id, build_b.id, test_a.id, test_b.id, deploy.id)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -153,7 +153,7 @@ RSpec.describe Ci::RetryJobService do
|
||||||
|
|
||||||
context 'when the pipeline has other jobs' do
|
context 'when the pipeline has other jobs' do
|
||||||
let!(:stage2) { create(:ci_stage, project: project, pipeline: pipeline, name: 'deploy') }
|
let!(:stage2) { create(:ci_stage, project: project, pipeline: pipeline, name: 'deploy') }
|
||||||
let!(:build2) { create(:ci_build, pipeline: pipeline, ci_stage: stage ) }
|
let!(:build2) { create(:ci_build, pipeline: pipeline, ci_stage: stage) }
|
||||||
let!(:deploy) { create(:ci_build, pipeline: pipeline, ci_stage: stage2) }
|
let!(:deploy) { create(:ci_build, pipeline: pipeline, ci_stage: stage2) }
|
||||||
let!(:deploy_needs_build2) { create(:ci_build_need, build: deploy, name: build2.name) }
|
let!(:deploy_needs_build2) { create(:ci_build_need, build: deploy, name: build2.name) }
|
||||||
|
|
||||||
|
|
|
@ -42,7 +42,7 @@ RSpec.describe Clusters::Gcp::ProvisionService do
|
||||||
gcp_project_id, zone,
|
gcp_project_id, zone,
|
||||||
{
|
{
|
||||||
"status": 'unexpected'
|
"status": 'unexpected'
|
||||||
} )
|
})
|
||||||
end
|
end
|
||||||
|
|
||||||
it_behaves_like 'error'
|
it_behaves_like 'error'
|
||||||
|
|
|
@ -44,7 +44,7 @@ RSpec.describe Clusters::Gcp::VerifyProvisionStatusService do
|
||||||
{
|
{
|
||||||
"status": 'RUNNING',
|
"status": 'RUNNING',
|
||||||
"startTime": 1.minute.ago.strftime("%FT%TZ")
|
"startTime": 1.minute.ago.strftime("%FT%TZ")
|
||||||
} )
|
})
|
||||||
end
|
end
|
||||||
|
|
||||||
it_behaves_like 'continue_creation'
|
it_behaves_like 'continue_creation'
|
||||||
|
@ -56,7 +56,7 @@ RSpec.describe Clusters::Gcp::VerifyProvisionStatusService do
|
||||||
{
|
{
|
||||||
"status": 'RUNNING',
|
"status": 'RUNNING',
|
||||||
"startTime": 30.minutes.ago.strftime("%FT%TZ")
|
"startTime": 30.minutes.ago.strftime("%FT%TZ")
|
||||||
} )
|
})
|
||||||
end
|
end
|
||||||
|
|
||||||
it_behaves_like 'error'
|
it_behaves_like 'error'
|
||||||
|
@ -70,7 +70,7 @@ RSpec.describe Clusters::Gcp::VerifyProvisionStatusService do
|
||||||
{
|
{
|
||||||
"status": 'PENDING',
|
"status": 'PENDING',
|
||||||
"startTime": 1.minute.ago.strftime("%FT%TZ")
|
"startTime": 1.minute.ago.strftime("%FT%TZ")
|
||||||
} )
|
})
|
||||||
end
|
end
|
||||||
|
|
||||||
it_behaves_like 'continue_creation'
|
it_behaves_like 'continue_creation'
|
||||||
|
@ -82,7 +82,7 @@ RSpec.describe Clusters::Gcp::VerifyProvisionStatusService do
|
||||||
gcp_project_id, zone, operation_id,
|
gcp_project_id, zone, operation_id,
|
||||||
{
|
{
|
||||||
"status": 'DONE'
|
"status": 'DONE'
|
||||||
} )
|
})
|
||||||
end
|
end
|
||||||
|
|
||||||
it_behaves_like 'finalize_creation'
|
it_behaves_like 'finalize_creation'
|
||||||
|
@ -94,7 +94,7 @@ RSpec.describe Clusters::Gcp::VerifyProvisionStatusService do
|
||||||
gcp_project_id, zone, operation_id,
|
gcp_project_id, zone, operation_id,
|
||||||
{
|
{
|
||||||
"status": 'unexpected'
|
"status": 'unexpected'
|
||||||
} )
|
})
|
||||||
end
|
end
|
||||||
|
|
||||||
it_behaves_like 'error'
|
it_behaves_like 'error'
|
||||||
|
|
|
@ -146,7 +146,7 @@ RSpec.describe Groups::DestroyService do
|
||||||
end
|
end
|
||||||
|
|
||||||
expect { destroy_group(group, user, false) }
|
expect { destroy_group(group, user, false) }
|
||||||
.to raise_error(Groups::DestroyService::DestroyError, "Project #{project.id} can't be deleted" )
|
.to raise_error(Groups::DestroyService::DestroyError, "Project #{project.id} can't be deleted")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -127,7 +127,7 @@ RSpec.describe Groups::UpdateSharedRunnersService do
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'when parent does not allow' do
|
context 'when parent does not allow' do
|
||||||
let_it_be(:parent) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false ) }
|
let_it_be(:parent) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false) }
|
||||||
let_it_be(:group) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false, parent: parent) }
|
let_it_be(:group) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false, parent: parent) }
|
||||||
|
|
||||||
it 'results error' do
|
it 'results error' do
|
||||||
|
|
|
@ -119,7 +119,7 @@ RSpec.describe Import::FogbugzService do
|
||||||
let(:error_messages_array) { instance_double(Array, join: "something went wrong") }
|
let(:error_messages_array) { instance_double(Array, join: "something went wrong") }
|
||||||
let(:errors_double) { instance_double(ActiveModel::Errors, full_messages: error_messages_array, :[] => nil) }
|
let(:errors_double) { instance_double(ActiveModel::Errors, full_messages: error_messages_array, :[] => nil) }
|
||||||
let(:project_double) { instance_double(Project, persisted?: false, errors: errors_double) }
|
let(:project_double) { instance_double(Project, persisted?: false, errors: errors_double) }
|
||||||
let(:project_creator) { instance_double(Gitlab::FogbugzImport::ProjectCreator, execute: project_double ) }
|
let(:project_creator) { instance_double(Gitlab::FogbugzImport::ProjectCreator, execute: project_double) }
|
||||||
|
|
||||||
before do
|
before do
|
||||||
allow(Gitlab::FogbugzImport::ProjectCreator).to receive(:new).and_return(project_creator)
|
allow(Gitlab::FogbugzImport::ProjectCreator).to receive(:new).and_return(project_creator)
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
require 'spec_helper'
|
require 'spec_helper'
|
||||||
|
|
||||||
RSpec.describe ::Import::GitlabProjects::FileAcquisitionStrategies::FileUpload, :aggregate_failures do
|
RSpec.describe ::Import::GitlabProjects::FileAcquisitionStrategies::FileUpload, :aggregate_failures do
|
||||||
let(:file) { UploadedFile.new( File.join('spec', 'features', 'projects', 'import_export', 'test_project_export.tar.gz') ) }
|
let(:file) { UploadedFile.new(File.join('spec', 'features', 'projects', 'import_export', 'test_project_export.tar.gz')) }
|
||||||
|
|
||||||
describe 'validation' do
|
describe 'validation' do
|
||||||
it 'validates presence of file' do
|
it 'validates presence of file' do
|
||||||
|
|
|
@ -185,7 +185,7 @@ RSpec.describe Issues::ExportCsvService do
|
||||||
|
|
||||||
labeled_rows = csv.select { |entry| labeled_issues.map(&:iid).include?(entry['Issue ID'].to_i) }
|
labeled_rows = csv.select { |entry| labeled_issues.map(&:iid).include?(entry['Issue ID'].to_i) }
|
||||||
expect(labeled_rows.count).to eq(2)
|
expect(labeled_rows.count).to eq(2)
|
||||||
expect(labeled_rows.map { |entry| entry['Labels'] }).to all( eq("Feature,Idea") )
|
expect(labeled_rows.map { |entry| entry['Labels'] }).to all(eq("Feature,Idea"))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -171,7 +171,7 @@ RSpec.describe Labels::PromoteService do
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'when there is an existing identical group label' do
|
context 'when there is an existing identical group label' do
|
||||||
let!(:existing_group_label) { create(:group_label, group: group_1, title: project_label_1_1.title ) }
|
let!(:existing_group_label) { create(:group_label, group: group_1, title: project_label_1_1.title) }
|
||||||
|
|
||||||
it 'uses the existing group label' do
|
it 'uses the existing group label' do
|
||||||
expect { service.execute(project_label_1_1) }
|
expect { service.execute(project_label_1_1) }
|
||||||
|
|
|
@ -12,7 +12,7 @@ RSpec.describe Members::InviteService, :aggregate_failures, :clean_gitlab_redis_
|
||||||
let(:params) { {} }
|
let(:params) { {} }
|
||||||
let(:base_params) { { access_level: Gitlab::Access::GUEST, source: project, invite_source: '_invite_source_' } }
|
let(:base_params) { { access_level: Gitlab::Access::GUEST, source: project, invite_source: '_invite_source_' } }
|
||||||
|
|
||||||
subject(:result) { described_class.new(user, base_params.merge(params) ).execute }
|
subject(:result) { described_class.new(user, base_params.merge(params)).execute }
|
||||||
|
|
||||||
context 'when there is a valid member invited' do
|
context 'when there is a valid member invited' do
|
||||||
let(:params) { { email: 'email@example.org' } }
|
let(:params) { { email: 'email@example.org' } }
|
||||||
|
|
|
@ -93,7 +93,7 @@ RSpec.describe MergeRequests::BuildService do
|
||||||
|
|
||||||
shared_examples 'with a Default.md template' do
|
shared_examples 'with a Default.md template' do
|
||||||
let(:files) { { '.gitlab/merge_request_templates/Default.md' => 'Default template contents' } }
|
let(:files) { { '.gitlab/merge_request_templates/Default.md' => 'Default template contents' } }
|
||||||
let(:project) { create(:project, :custom_repo, files: files ) }
|
let(:project) { create(:project, :custom_repo, files: files) }
|
||||||
|
|
||||||
it 'the template description is preferred' do
|
it 'the template description is preferred' do
|
||||||
expect(merge_request.description).to eq('Default template contents')
|
expect(merge_request.description).to eq('Default template contents')
|
||||||
|
@ -306,7 +306,7 @@ RSpec.describe MergeRequests::BuildService do
|
||||||
|
|
||||||
context 'a Default.md template is defined' do
|
context 'a Default.md template is defined' do
|
||||||
let(:files) { { '.gitlab/merge_request_templates/Default.md' => 'Default template contents' } }
|
let(:files) { { '.gitlab/merge_request_templates/Default.md' => 'Default template contents' } }
|
||||||
let(:project) { create(:project, :custom_repo, files: files ) }
|
let(:project) { create(:project, :custom_repo, files: files) }
|
||||||
|
|
||||||
it 'appends the closing description to a Default.md template' do
|
it 'appends the closing description to a Default.md template' do
|
||||||
expected_description = ['Default template contents', closing_message].compact.join("\n\n")
|
expected_description = ['Default template contents', closing_message].compact.join("\n\n")
|
||||||
|
@ -386,7 +386,7 @@ RSpec.describe MergeRequests::BuildService do
|
||||||
|
|
||||||
context 'a Default.md template is defined' do
|
context 'a Default.md template is defined' do
|
||||||
let(:files) { { '.gitlab/merge_request_templates/Default.md' => 'Default template contents' } }
|
let(:files) { { '.gitlab/merge_request_templates/Default.md' => 'Default template contents' } }
|
||||||
let(:project) { create(:project, :custom_repo, files: files ) }
|
let(:project) { create(:project, :custom_repo, files: files) }
|
||||||
|
|
||||||
it 'keeps the description from the initial params' do
|
it 'keeps the description from the initial params' do
|
||||||
expect(merge_request.description).to eq(description)
|
expect(merge_request.description).to eq(description)
|
||||||
|
@ -425,7 +425,7 @@ RSpec.describe MergeRequests::BuildService do
|
||||||
|
|
||||||
context 'a Default.md template is defined' do
|
context 'a Default.md template is defined' do
|
||||||
let(:files) { { '.gitlab/merge_request_templates/Default.md' => 'Default template contents' } }
|
let(:files) { { '.gitlab/merge_request_templates/Default.md' => 'Default template contents' } }
|
||||||
let(:project) { create(:project, :custom_repo, files: files ) }
|
let(:project) { create(:project, :custom_repo, files: files) }
|
||||||
|
|
||||||
it 'appends the closing description to a Default.md template' do
|
it 'appends the closing description to a Default.md template' do
|
||||||
expected_description = ['Default template contents', closing_message].compact.join("\n\n")
|
expected_description = ['Default template contents', closing_message].compact.join("\n\n")
|
||||||
|
@ -486,7 +486,7 @@ RSpec.describe MergeRequests::BuildService do
|
||||||
|
|
||||||
context 'a Default.md template is defined' do
|
context 'a Default.md template is defined' do
|
||||||
let(:files) { { '.gitlab/merge_request_templates/Default.md' => 'Default template contents' } }
|
let(:files) { { '.gitlab/merge_request_templates/Default.md' => 'Default template contents' } }
|
||||||
let(:project) { create(:project, :custom_repo, files: files ) }
|
let(:project) { create(:project, :custom_repo, files: files) }
|
||||||
|
|
||||||
it 'appends the closing description to a Default.md template' do
|
it 'appends the closing description to a Default.md template' do
|
||||||
expected_description = ['Default template contents', closing_message].compact.join("\n\n")
|
expected_description = ['Default template contents', closing_message].compact.join("\n\n")
|
||||||
|
@ -715,7 +715,7 @@ RSpec.describe MergeRequests::BuildService do
|
||||||
context 'when a Default template is found' do
|
context 'when a Default template is found' do
|
||||||
context 'when its contents cannot be retrieved' do
|
context 'when its contents cannot be retrieved' do
|
||||||
let(:files) { { '.gitlab/merge_request_templates/OtherTemplate.md' => 'Other template contents' } }
|
let(:files) { { '.gitlab/merge_request_templates/OtherTemplate.md' => 'Other template contents' } }
|
||||||
let(:project) { create(:project, :custom_repo, files: files ) }
|
let(:project) { create(:project, :custom_repo, files: files) }
|
||||||
|
|
||||||
it 'does not modify the merge request description' do
|
it 'does not modify the merge request description' do
|
||||||
allow(TemplateFinder).to receive(:all_template_names).and_return({
|
allow(TemplateFinder).to receive(:all_template_names).and_return({
|
||||||
|
@ -732,7 +732,7 @@ RSpec.describe MergeRequests::BuildService do
|
||||||
|
|
||||||
context 'when its contents can be retrieved' do
|
context 'when its contents can be retrieved' do
|
||||||
let(:files) { { '.gitlab/merge_request_templates/Default.md' => 'Default template contents' } }
|
let(:files) { { '.gitlab/merge_request_templates/Default.md' => 'Default template contents' } }
|
||||||
let(:project) { create(:project, :custom_repo, files: files ) }
|
let(:project) { create(:project, :custom_repo, files: files) }
|
||||||
|
|
||||||
it 'modifies the merge request description' do
|
it 'modifies the merge request description' do
|
||||||
merge_request.description = nil
|
merge_request.description = nil
|
||||||
|
|
|
@ -245,7 +245,7 @@ RSpec.describe Notes::UpdateService do
|
||||||
context 'for a personal snippet' do
|
context 'for a personal snippet' do
|
||||||
let_it_be(:snippet) { create(:personal_snippet, :public) }
|
let_it_be(:snippet) { create(:personal_snippet, :public) }
|
||||||
|
|
||||||
let(:note) { create(:note, project: nil, noteable: snippet, author: user, note: "Note on a snippet with reference #{issue.to_reference}" ) }
|
let(:note) { create(:note, project: nil, noteable: snippet, author: user, note: "Note on a snippet with reference #{issue.to_reference}") }
|
||||||
|
|
||||||
it 'does not create todos' do
|
it 'does not create todos' do
|
||||||
expect { update_note({ note: "Mentioning user #{user2}" }) }.not_to change { note.todos.count }
|
expect { update_note({ note: "Mentioning user #{user2}" }) }.not_to change { note.todos.count }
|
||||||
|
|
|
@ -9,7 +9,7 @@ RSpec.describe Packages::Composer::ComposerJsonService do
|
||||||
subject { described_class.new(project, target).execute }
|
subject { described_class.new(project, target).execute }
|
||||||
|
|
||||||
context 'with an existing file' do
|
context 'with an existing file' do
|
||||||
let(:project) { create(:project, :custom_repo, files: { 'composer.json' => json } ) }
|
let(:project) { create(:project, :custom_repo, files: { 'composer.json' => json }) }
|
||||||
|
|
||||||
context 'with a valid file' do
|
context 'with a valid file' do
|
||||||
let(:json) { '{ "name": "package-name"}' }
|
let(:json) { '{ "name": "package-name"}' }
|
||||||
|
|
|
@ -148,7 +148,7 @@ RSpec.describe Packages::Npm::CreatePackageService do
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'when file size is faked by setting the attachment length param to a lower size' do
|
context 'when file size is faked by setting the attachment length param to a lower size' do
|
||||||
let(:params) { super().deep_merge!( { _attachments: { "#{package_name}-#{version}.tgz" => { data: encoded_package_data, length: 1 } } }) }
|
let(:params) { super().deep_merge!({ _attachments: { "#{package_name}-#{version}.tgz" => { data: encoded_package_data, length: 1 } } }) }
|
||||||
|
|
||||||
# TODO (technical debt): Extract the package size calculation outside the service and add separate specs for it.
|
# TODO (technical debt): Extract the package size calculation outside the service and add separate specs for it.
|
||||||
# Right now we have several contexts here to test the calculation's different scenarios.
|
# Right now we have several contexts here to test the calculation's different scenarios.
|
||||||
|
@ -193,7 +193,7 @@ RSpec.describe Packages::Npm::CreatePackageService do
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'with empty versions' do
|
context 'with empty versions' do
|
||||||
let(:params) { super().merge!({ versions: {} } ) }
|
let(:params) { super().merge!({ versions: {} }) }
|
||||||
|
|
||||||
it { expect(subject[:http_status]).to eq 400 }
|
it { expect(subject[:http_status]).to eq 400 }
|
||||||
it { expect(subject[:message]).to eq 'Version is empty.' }
|
it { expect(subject[:message]).to eq 'Version is empty.' }
|
||||||
|
|
|
@ -126,7 +126,7 @@ RSpec.describe Projects::LfsPointers::LfsDownloadService do
|
||||||
let(:redirect_link) { 'http://external-link' }
|
let(:redirect_link) { 'http://external-link' }
|
||||||
|
|
||||||
before do
|
before do
|
||||||
stub_full_request(download_link).to_return(status: 301, body: 'You are being redirected', headers: { 'Location' => redirect_link } )
|
stub_full_request(download_link).to_return(status: 301, body: 'You are being redirected', headers: { 'Location' => redirect_link })
|
||||||
stub_full_request(redirect_link).to_return(body: lfs_content)
|
stub_full_request(redirect_link).to_return(body: lfs_content)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,7 @@ RSpec.describe Search::GroupService do
|
||||||
|
|
||||||
# These projects shouldn't be found
|
# These projects shouldn't be found
|
||||||
let!(:outside_project) { create(:project, :public, name: "Outside #{term}") }
|
let!(:outside_project) { create(:project, :public, name: "Outside #{term}") }
|
||||||
let!(:private_project) { create(:project, :private, namespace: nested_group, name: "Private #{term}" ) }
|
let!(:private_project) { create(:project, :private, namespace: nested_group, name: "Private #{term}") }
|
||||||
let!(:other_project) { create(:project, :public, namespace: nested_group, name: term.reverse) }
|
let!(:other_project) { create(:project, :public, namespace: nested_group, name: term.reverse) }
|
||||||
|
|
||||||
# These projects should be found
|
# These projects should be found
|
||||||
|
|
|
@ -219,10 +219,10 @@ RSpec.describe Security::MergeReportsService, '#execute' do
|
||||||
|
|
||||||
let(:finding_id_1) { build(:ci_reports_security_finding, identifiers: [identifier_bandit, identifier_cve], scanner: bandit_scanner, report_type: :sast) }
|
let(:finding_id_1) { build(:ci_reports_security_finding, identifiers: [identifier_bandit, identifier_cve], scanner: bandit_scanner, report_type: :sast) }
|
||||||
let(:finding_id_2) { build(:ci_reports_security_finding, identifiers: [identifier_cve], scanner: semgrep_scanner, report_type: :sast) }
|
let(:finding_id_2) { build(:ci_reports_security_finding, identifiers: [identifier_cve], scanner: semgrep_scanner, report_type: :sast) }
|
||||||
let(:finding_id_3) { build(:ci_reports_security_finding, identifiers: [identifier_semgrep], scanner: semgrep_scanner, report_type: :sast ) }
|
let(:finding_id_3) { build(:ci_reports_security_finding, identifiers: [identifier_semgrep], scanner: semgrep_scanner, report_type: :sast) }
|
||||||
|
|
||||||
let(:bandit_report) do
|
let(:bandit_report) do
|
||||||
build( :ci_reports_security_report,
|
build(:ci_reports_security_report,
|
||||||
type: :sast,
|
type: :sast,
|
||||||
scanners: [bandit_scanner],
|
scanners: [bandit_scanner],
|
||||||
findings: [finding_id_1],
|
findings: [finding_id_1],
|
||||||
|
|
|
@ -175,7 +175,7 @@ RSpec.describe ::SystemNotes::IssuablesService do
|
||||||
|
|
||||||
it 'builds a correct phrase when one reviewer removed from a set' do
|
it 'builds a correct phrase when one reviewer removed from a set' do
|
||||||
expect(build_note([reviewer, reviewer1, reviewer2], [reviewer, reviewer1])).to(
|
expect(build_note([reviewer, reviewer1, reviewer2], [reviewer, reviewer1])).to(
|
||||||
eq( "removed review request for @#{reviewer2.username}")
|
eq("removed review request for @#{reviewer2.username}")
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -681,7 +681,7 @@ RSpec.describe ::SystemNotes::IssuablesService do
|
||||||
|
|
||||||
it 'tracks usage' do
|
it 'tracks usage' do
|
||||||
expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter)
|
expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter)
|
||||||
.to receive(:track_issue_cloned_action).with(author: author, project: project )
|
.to receive(:track_issue_cloned_action).with(author: author, project: project)
|
||||||
|
|
||||||
subject
|
subject
|
||||||
end
|
end
|
||||||
|
|
|
@ -17,7 +17,7 @@ RSpec.describe Todos::Destroy::EntityLeaveService do
|
||||||
let!(:todo_issue_user) { create(:todo, user: user, target: issue, project: project) }
|
let!(:todo_issue_user) { create(:todo, user: user, target: issue, project: project) }
|
||||||
let!(:todo_issue_c_user) { create(:todo, user: user, target: issue_c, project: project) }
|
let!(:todo_issue_c_user) { create(:todo, user: user, target: issue_c, project: project) }
|
||||||
let!(:todo_issue_c_user2) { create(:todo, user: user2, target: issue_c, project: project) }
|
let!(:todo_issue_c_user2) { create(:todo, user: user2, target: issue_c, project: project) }
|
||||||
let(:internal_note) { create(:note, noteable: issue, project: project, confidential: true ) }
|
let(:internal_note) { create(:note, noteable: issue, project: project, confidential: true) }
|
||||||
let!(:todo_for_internal_note) do
|
let!(:todo_for_internal_note) do
|
||||||
create(:todo, user: user, target: issue, project: project, note: internal_note)
|
create(:todo, user: user, target: issue, project: project, note: internal_note)
|
||||||
end
|
end
|
||||||
|
@ -250,7 +250,7 @@ RSpec.describe Todos::Destroy::EntityLeaveService do
|
||||||
let!(:todo_subproject_user2) { create(:todo, user: user2, project: subproject) }
|
let!(:todo_subproject_user2) { create(:todo, user: user2, project: subproject) }
|
||||||
let!(:todo_subpgroup_user2) { create(:todo, user: user2, group: subgroup) }
|
let!(:todo_subpgroup_user2) { create(:todo, user: user2, group: subgroup) }
|
||||||
let!(:todo_parent_group_user) { create(:todo, user: user, group: parent_group) }
|
let!(:todo_parent_group_user) { create(:todo, user: user, group: parent_group) }
|
||||||
let(:subproject_internal_note) { create(:note, noteable: issue, project: project, confidential: true ) }
|
let(:subproject_internal_note) { create(:note, noteable: issue, project: project, confidential: true) }
|
||||||
let!(:todo_for_internal_subproject_note) do
|
let!(:todo_for_internal_subproject_note) do
|
||||||
create(:todo, user: user, target: issue, project: project, note: subproject_internal_note)
|
create(:todo, user: user, target: issue, project: project, note: subproject_internal_note)
|
||||||
end
|
end
|
||||||
|
|
|
@ -5,10 +5,10 @@ require 'spec_helper'
|
||||||
RSpec.describe Topics::MergeService do
|
RSpec.describe Topics::MergeService do
|
||||||
let_it_be(:source_topic) { create(:topic, name: 'source_topic') }
|
let_it_be(:source_topic) { create(:topic, name: 'source_topic') }
|
||||||
let_it_be(:target_topic) { create(:topic, name: 'target_topic') }
|
let_it_be(:target_topic) { create(:topic, name: 'target_topic') }
|
||||||
let_it_be(:project_1) { create(:project, :public, topic_list: source_topic.name ) }
|
let_it_be(:project_1) { create(:project, :public, topic_list: source_topic.name) }
|
||||||
let_it_be(:project_2) { create(:project, :private, topic_list: source_topic.name ) }
|
let_it_be(:project_2) { create(:project, :private, topic_list: source_topic.name) }
|
||||||
let_it_be(:project_3) { create(:project, :public, topic_list: target_topic.name ) }
|
let_it_be(:project_3) { create(:project, :public, topic_list: target_topic.name) }
|
||||||
let_it_be(:project_4) { create(:project, :public, topic_list: [source_topic.name, target_topic.name] ) }
|
let_it_be(:project_4) { create(:project, :public, topic_list: [source_topic.name, target_topic.name]) }
|
||||||
|
|
||||||
subject { described_class.new(source_topic, target_topic).execute }
|
subject { described_class.new(source_topic, target_topic).execute }
|
||||||
|
|
||||||
|
|
|
@ -129,7 +129,7 @@ RSpec.describe Users::DestroyService do
|
||||||
|
|
||||||
expect { service.execute(user) }
|
expect { service.execute(user) }
|
||||||
.to raise_error(Users::DestroyService::DestroyError,
|
.to raise_error(Users::DestroyService::DestroyError,
|
||||||
"Project #{project.id} can't be deleted" )
|
"Project #{project.id} can't be deleted")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -243,7 +243,7 @@ RSpec.describe Users::DestroyService do
|
||||||
|
|
||||||
aggregate_failures do
|
aggregate_failures do
|
||||||
expect { service.execute(user) }
|
expect { service.execute(user) }
|
||||||
.to raise_error(Users::DestroyService::DestroyError, 'foo' )
|
.to raise_error(Users::DestroyService::DestroyError, 'foo')
|
||||||
expect(snippet.reload).not_to be_nil
|
expect(snippet.reload).not_to be_nil
|
||||||
expect(
|
expect(
|
||||||
gitlab_shell.repository_exists?(snippet.repository_storage,
|
gitlab_shell.repository_exists?(snippet.repository_storage,
|
||||||
|
@ -546,7 +546,7 @@ RSpec.describe Users::DestroyService do
|
||||||
end.to(
|
end.to(
|
||||||
change do
|
change do
|
||||||
Users::GhostUserMigration.where(user: other_user,
|
Users::GhostUserMigration.where(user: other_user,
|
||||||
initiator_user: user )
|
initiator_user: user)
|
||||||
.exists?
|
.exists?
|
||||||
end.from(false).to(true))
|
end.from(false).to(true))
|
||||||
end
|
end
|
||||||
|
|
|
@ -235,7 +235,7 @@ RSpec.describe Users::MigrateRecordsToGhostUserService do
|
||||||
|
|
||||||
aggregate_failures do
|
aggregate_failures do
|
||||||
expect { service.execute }.to(
|
expect { service.execute }.to(
|
||||||
raise_error(Users::MigrateRecordsToGhostUserService::DestroyError, 'foo' ))
|
raise_error(Users::MigrateRecordsToGhostUserService::DestroyError, 'foo'))
|
||||||
expect(snippet.reload).not_to be_nil
|
expect(snippet.reload).not_to be_nil
|
||||||
expect(
|
expect(
|
||||||
gitlab_shell.repository_exists?(snippet.repository_storage,
|
gitlab_shell.repository_exists?(snippet.repository_storage,
|
||||||
|
|
|
@ -5,11 +5,11 @@ require 'spec_helper'
|
||||||
RSpec.describe X509CertificateRevokeService do
|
RSpec.describe X509CertificateRevokeService do
|
||||||
describe '#execute' do
|
describe '#execute' do
|
||||||
let(:service) { described_class.new }
|
let(:service) { described_class.new }
|
||||||
let!(:x509_signature_1) { create(:x509_commit_signature, x509_certificate: x509_certificate, verification_status: :verified ) }
|
let!(:x509_signature_1) { create(:x509_commit_signature, x509_certificate: x509_certificate, verification_status: :verified) }
|
||||||
let!(:x509_signature_2) { create(:x509_commit_signature, x509_certificate: x509_certificate, verification_status: :verified ) }
|
let!(:x509_signature_2) { create(:x509_commit_signature, x509_certificate: x509_certificate, verification_status: :verified) }
|
||||||
|
|
||||||
context 'for revoked certificates' do
|
context 'for revoked certificates' do
|
||||||
let(:x509_certificate) { create(:x509_certificate, certificate_status: :revoked ) }
|
let(:x509_certificate) { create(:x509_certificate, certificate_status: :revoked) }
|
||||||
|
|
||||||
it 'update all commit signatures' do
|
it 'update all commit signatures' do
|
||||||
expect do
|
expect do
|
||||||
|
|
|
@ -3,6 +3,10 @@ import Vue from 'vue';
|
||||||
import { createMockServer } from 'test_helpers/mock_server';
|
import { createMockServer } from 'test_helpers/mock_server';
|
||||||
import translateMixin from '~/vue_shared/translate';
|
import translateMixin from '~/vue_shared/translate';
|
||||||
|
|
||||||
|
// fixing toJSON error
|
||||||
|
// https://github.com/storybookjs/storybook/issues/14933
|
||||||
|
Vue.prototype.toJSON = () => {};
|
||||||
|
|
||||||
const stylesheetsRequireCtx = require.context(
|
const stylesheetsRequireCtx = require.context(
|
||||||
'../../app/assets/stylesheets',
|
'../../app/assets/stylesheets',
|
||||||
true,
|
true,
|
||||||
|
|
Loading…
Reference in New Issue