diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index a02d8d83ff3..0c9e0391c06 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -49,6 +49,7 @@ workflow: - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_PIPELINE_SOURCE == "schedule" && $SCHEDULE_TYPE == "maintenance"' variables: CRYSTALBALL: "true" + CREATE_INCIDENT_FOR_PIPELINE_FAILURE: "true" NOTIFY_PIPELINE_FAILURE_CHANNEL: "master-broken" # Run pipelines for ruby3 branch - if: '$CI_COMMIT_BRANCH == "ruby3"' @@ -62,9 +63,12 @@ workflow: - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $GITLAB_USER_LOGIN =~ /project_\d+_bot\d*/' variables: GITLAB_DEPENDENCY_PROXY_ADDRESS: "" + CREATE_INCIDENT_FOR_PIPELINE_FAILURE: "true" + NOTIFY_PIPELINE_FAILURE_CHANNEL: "master-broken" # For `$CI_DEFAULT_BRANCH` branch, create a pipeline (this includes on schedules, pushes, merges, etc.). - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH' variables: + CREATE_INCIDENT_FOR_PIPELINE_FAILURE: "true" NOTIFY_PIPELINE_FAILURE_CHANNEL: "master-broken" # For tags, create a pipeline. - if: '$CI_COMMIT_TAG' diff --git a/.gitlab/ci/notify.gitlab-ci.yml b/.gitlab/ci/notify.gitlab-ci.yml index 51b0f4071eb..ae77caa140a 100644 --- a/.gitlab/ci/notify.gitlab-ci.yml +++ b/.gitlab/ci/notify.gitlab-ci.yml @@ -1,8 +1,12 @@ -.notify-slack: - image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}alpine/curl +.notify-defaults: stage: notify dependencies: [] cache: {} + +.notify-slack: + extends: + - .notify-defaults + image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}alpine/curl variables: MERGE_REQUEST_URL: ${CI_MERGE_REQUEST_PROJECT_URL}/-/merge_requests/${CI_MERGE_REQUEST_IID} before_script: @@ -34,28 +38,31 @@ notify-security-pipeline: - scripts/slack ${NOTIFY_CHANNEL} " ☠️ Pipeline for merged result failed! ☠️ See ${CI_PIPELINE_URL} (triggered from ${MERGE_REQUEST_URL})" ci_failing "GitLab Release Tools Bot" notify-pipeline-failure: - extends: .notify-slack + extends: + - .notify-defaults + - .notify:rules:notify-pipeline-failure image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}ruby:${RUBY_VERSION} - rules: - # Don't report child pipeline failures - - if: '$CI_PIPELINE_SOURCE == "parent_pipeline"' - when: never - - if: '$CI_SLACK_WEBHOOK_URL && $NOTIFY_PIPELINE_FAILURE_CHANNEL' - when: on_failure - allow_failure: true variables: + BROKEN_MASTER_INCIDENTS_PROJECT: "gitlab-org/quality/engineering-productivity/master-broken-incidents" + BROKEN_MASTER_INCIDENT_JSON: "${CI_PROJECT_DIR}/incident.json" SLACK_CHANNEL: "${NOTIFY_PIPELINE_FAILURE_CHANNEL}" - FAILED_PIPELINE_REPORT_FILE: "failed_pipeline_report.json" + FAILED_PIPELINE_SLACK_MESSAGE_FILE: "${CI_PROJECT_DIR}/failed_pipeline_slack_message.json" before_script: - source scripts/utils.sh - apt-get update && apt-get install -y jq - install_gitlab_gem script: - - scripts/generate-failed-pipeline-slack-message.rb - | - curl -X POST -H 'Content-Type: application/json' --data @${FAILED_PIPELINE_REPORT_FILE} "$CI_SLACK_WEBHOOK_URL" + if [[ "${CREATE_INCIDENT_FOR_PIPELINE_FAILURE}" == "true" ]]; then + scripts/create-pipeline-failure-incident.rb -p ${BROKEN_MASTER_INCIDENTS_PROJECT} -f ${BROKEN_MASTER_INCIDENT_JSON} -t ${BROKEN_MASTER_INCIDENTS_PROJECT_TOKEN}; + echosuccess "Created incident $(jq '.web_url' ${BROKEN_MASTER_INCIDENT_JSON})"; + fi + - | + scripts/generate-failed-pipeline-slack-message.rb -i ${BROKEN_MASTER_INCIDENT_JSON} -f ${FAILED_PIPELINE_SLACK_MESSAGE_FILE}; + curl -X POST -H 'Content-Type: application/json' --data @${FAILED_PIPELINE_SLACK_MESSAGE_FILE} "$CI_SLACK_WEBHOOK_URL"; artifacts: paths: - - ${FAILED_PIPELINE_REPORT_FILE} + - ${BROKEN_MASTER_INCIDENT_JSON} + - ${FAILED_PIPELINE_SLACK_MESSAGE_FILE} when: always expire_in: 2 days diff --git a/.gitlab/ci/rules.gitlab-ci.yml b/.gitlab/ci/rules.gitlab-ci.yml index 75035a9062b..553debc0fdb 100644 --- a/.gitlab/ci/rules.gitlab-ci.yml +++ b/.gitlab/ci/rules.gitlab-ci.yml @@ -997,6 +997,18 @@ - <<: *if-default-refs changes: *code-patterns +########## +# Notify # +########## +.notify:rules:notify-pipeline-failure: + rules: + # Don't report child pipeline failures + - if: '$CI_PIPELINE_SOURCE == "parent_pipeline"' + when: never + - if: '$CI_SLACK_WEBHOOK_URL && $NOTIFY_PIPELINE_FAILURE_CHANNEL' + when: on_failure + allow_failure: true + ############### # Pages rules # ############### diff --git a/.rubocop_todo/layout/space_inside_parens.yml b/.rubocop_todo/layout/space_inside_parens.yml index b22a58fe459..2875c7fea54 100644 --- a/.rubocop_todo/layout/space_inside_parens.yml +++ b/.rubocop_todo/layout/space_inside_parens.yml @@ -249,28 +249,6 @@ Layout/SpaceInsideParens: - 'spec/requests/search_controller_spec.rb' - 'spec/serializers/analytics_build_entity_spec.rb' - 'spec/serializers/merge_request_user_entity_spec.rb' - - 'spec/services/boards/issues/list_service_spec.rb' - - 'spec/services/ci/compare_test_reports_service_spec.rb' - - 'spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb' - - 'spec/services/ci/retry_job_service_spec.rb' - - 'spec/services/clusters/gcp/provision_service_spec.rb' - - 'spec/services/clusters/gcp/verify_provision_status_service_spec.rb' - - 'spec/services/groups/destroy_service_spec.rb' - - 'spec/services/groups/update_shared_runners_service_spec.rb' - - 'spec/services/import/gitlab_projects/file_acquisition_strategies/file_upload_spec.rb' - - 'spec/services/issues/export_csv_service_spec.rb' - - 'spec/services/labels/promote_service_spec.rb' - - 'spec/services/members/invite_service_spec.rb' - - 'spec/services/notes/update_service_spec.rb' - - 'spec/services/packages/composer/composer_json_service_spec.rb' - - 'spec/services/packages/npm/create_package_service_spec.rb' - - 'spec/services/projects/lfs_pointers/lfs_download_service_spec.rb' - - 'spec/services/search/group_service_spec.rb' - - 'spec/services/security/merge_reports_service_spec.rb' - - 'spec/services/suggestions/apply_service_spec.rb' - - 'spec/services/system_notes/issuables_service_spec.rb' - - 'spec/services/users/destroy_service_spec.rb' - - 'spec/services/x509_certificate_revoke_service_spec.rb' - 'spec/support/helpers/database/partitioning_helpers.rb' - 'spec/support/helpers/dependency_proxy_helpers.rb' - 'spec/support/helpers/javascript_fixtures_helpers.rb' diff --git a/Gemfile b/Gemfile index ed250fa16ea..7851769e477 100644 --- a/Gemfile +++ b/Gemfile @@ -168,9 +168,9 @@ gem 'seed-fu', '~> 2.3.7' gem 'elasticsearch-model', '~> 7.2' gem 'elasticsearch-rails', '~> 7.2', require: 'elasticsearch/rails/instrumentation' gem 'elasticsearch-api', '7.13.3' -gem 'aws-sdk-core', '~> 3.159.0' +gem 'aws-sdk-core', '~> 3.166.0' gem 'aws-sdk-cloudformation', '~> 1' -gem 'aws-sdk-s3', '~> 1.114.0' +gem 'aws-sdk-s3', '~> 1.117.1' gem 'faraday_middleware-aws-sigv4', '~>0.3.0' gem 'typhoeus', '~> 1.4.0' # Used with Elasticsearch to support http keep-alive connections diff --git a/Gemfile.checksum b/Gemfile.checksum index 642d3f89a92..8d20aa8cd28 100644 --- a/Gemfile.checksum +++ b/Gemfile.checksum @@ -33,11 +33,11 @@ {"name":"awesome_print","version":"1.9.2","platform":"ruby","checksum":"e99b32b704acff16d768b3468680793ced40bfdc4537eb07e06a4be11133786e"}, {"name":"awrence","version":"1.1.1","platform":"ruby","checksum":"9be584c97408ed92d5e1ca11740853646fe270de675f2f8dd44e8233226dfc97"}, {"name":"aws-eventstream","version":"1.2.0","platform":"ruby","checksum":"ffa53482c92880b001ff2fb06919b9bb82fd847cbb0fa244985d2ebb6dd0d1df"}, -{"name":"aws-partitions","version":"1.644.0","platform":"ruby","checksum":"63791750839afff110c5b5a8805018c4275720d7a5c7ec79319d4d520c7da874"}, +{"name":"aws-partitions","version":"1.651.0","platform":"ruby","checksum":"61f354049eb2c10bf0aa96b115f7443d181d79ec5508f7a34b8724c4cfa95dda"}, {"name":"aws-sdk-cloudformation","version":"1.41.0","platform":"ruby","checksum":"31e47539719734413671edf9b1a31f8673fbf9688549f50c41affabbcb1c6b26"}, -{"name":"aws-sdk-core","version":"3.159.0","platform":"ruby","checksum":"8863e2cdfd6816a0532ad9eb82a07b81b3d12667da747e9e82554e4dd7adb231"}, -{"name":"aws-sdk-kms","version":"1.57.0","platform":"ruby","checksum":"ffd7dbb9b4251f29d4f508af761d0addd7035a346a88e3481cdb4dc548e51bd5"}, -{"name":"aws-sdk-s3","version":"1.114.0","platform":"ruby","checksum":"ce0f71df1a7b0fb1f88d40a70636ef1a9b08e69fb560694c5dab3f4ac7efcde4"}, +{"name":"aws-sdk-core","version":"3.166.0","platform":"ruby","checksum":"827b82a31f13007fbd3ce78801949019ad3b6fa0c658270d5caa6095cb4945fa"}, +{"name":"aws-sdk-kms","version":"1.59.0","platform":"ruby","checksum":"6c002ebf8e404625c8338ca12ae69b1329399f9dc1b0ebca474e00ff06700153"}, +{"name":"aws-sdk-s3","version":"1.117.1","platform":"ruby","checksum":"76f6dac5baeb2b78616eb34c6af650c1b7a15c1078b169d1b27e8421904c509d"}, {"name":"aws-sigv4","version":"1.5.1","platform":"ruby","checksum":"d68c87fff4ee843b4b92b23c7f31f957f254ec6eb064181f7119124aab8b8bb4"}, {"name":"azure-storage-blob","version":"2.0.3","platform":"ruby","checksum":"61b76118843c91776bd24bee22c74adafeb7c4bb3a858a325047dae3b59d0363"}, {"name":"azure-storage-common","version":"2.0.4","platform":"ruby","checksum":"608f4daab0e06b583b73dcffd3246ea39e78056de31630286b0cf97af7d6956b"}, diff --git a/Gemfile.lock b/Gemfile.lock index 654c5bbe340..d63719e07a7 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -185,20 +185,20 @@ GEM awesome_print (1.9.2) awrence (1.1.1) aws-eventstream (1.2.0) - aws-partitions (1.644.0) + aws-partitions (1.651.0) aws-sdk-cloudformation (1.41.0) aws-sdk-core (~> 3, >= 3.99.0) aws-sigv4 (~> 1.1) - aws-sdk-core (3.159.0) + aws-sdk-core (3.166.0) aws-eventstream (~> 1, >= 1.0.2) - aws-partitions (~> 1, >= 1.525.0) - aws-sigv4 (~> 1.1) + aws-partitions (~> 1, >= 1.651.0) + aws-sigv4 (~> 1.5) jmespath (~> 1, >= 1.6.1) - aws-sdk-kms (1.57.0) - aws-sdk-core (~> 3, >= 3.127.0) + aws-sdk-kms (1.59.0) + aws-sdk-core (~> 3, >= 3.165.0) aws-sigv4 (~> 1.1) - aws-sdk-s3 (1.114.0) - aws-sdk-core (~> 3, >= 3.127.0) + aws-sdk-s3 (1.117.1) + aws-sdk-core (~> 3, >= 3.165.0) aws-sdk-kms (~> 1) aws-sigv4 (~> 1.4) aws-sigv4 (1.5.1) @@ -1558,8 +1558,8 @@ DEPENDENCIES autoprefixer-rails (= 10.2.5.1) awesome_print aws-sdk-cloudformation (~> 1) - aws-sdk-core (~> 3.159.0) - aws-sdk-s3 (~> 1.114.0) + aws-sdk-core (~> 3.166.0) + aws-sdk-s3 (~> 1.117.1) babosa (~> 1.0.4) base32 (~> 0.3.0) batch-loader (~> 2.0.1) diff --git a/app/assets/javascripts/vue_shared/components/markdown_drawer/makrdown_drawer.stories.js b/app/assets/javascripts/vue_shared/components/markdown_drawer/makrdown_drawer.stories.js new file mode 100644 index 00000000000..03bd64e2a57 --- /dev/null +++ b/app/assets/javascripts/vue_shared/components/markdown_drawer/makrdown_drawer.stories.js @@ -0,0 +1,54 @@ +import { GlButton } from '@gitlab/ui'; +import { MOCK_HTML } from '../../../../../../spec/frontend/vue_shared/components/markdown_drawer/mock_data'; +import MarkdownDrawer from './markdown_drawer.vue'; + +export default { + component: MarkdownDrawer, + title: 'vue_shared/markdown_drawer', + parameters: { + mirage: { + timing: 1000, + handlers: { + get: { + '/help/user/search/global_search/advanced_search_syntax.json': [ + 200, + {}, + { html: MOCK_HTML }, + ], + }, + }, + }, + }, +}; + +const createStory = ({ ...options }) => (_, { argTypes }) => ({ + components: { MarkdownDrawer, GlButton }, + props: Object.keys(argTypes), + data() { + return { + render: false, + }; + }, + methods: { + toggleDrawer() { + this.$refs.drawer.toggleDrawer(); + }, + }, + mounted() { + window.requestAnimationFrame(() => { + this.render = true; + }); + }, + template: ` +
+ Open Drawer + +
+ `, + ...options, +}); + +export const Default = createStory({}); diff --git a/app/assets/javascripts/vue_shared/components/markdown_drawer/markdown_drawer.vue b/app/assets/javascripts/vue_shared/components/markdown_drawer/markdown_drawer.vue new file mode 100644 index 00000000000..a4b509f8656 --- /dev/null +++ b/app/assets/javascripts/vue_shared/components/markdown_drawer/markdown_drawer.vue @@ -0,0 +1,117 @@ + + diff --git a/app/assets/javascripts/vue_shared/components/markdown_drawer/utils/fetch.js b/app/assets/javascripts/vue_shared/components/markdown_drawer/utils/fetch.js new file mode 100644 index 00000000000..7c8e1bc160a --- /dev/null +++ b/app/assets/javascripts/vue_shared/components/markdown_drawer/utils/fetch.js @@ -0,0 +1,32 @@ +import * as Sentry from '@sentry/browser'; +import { helpPagePath } from '~/helpers/help_page_helper'; +import axios from '~/lib/utils/axios_utils'; + +export const splitDocument = (htmlString) => { + const htmlDocument = new DOMParser().parseFromString(htmlString, 'text/html'); + const title = htmlDocument.querySelector('h1')?.innerText; + htmlDocument.querySelector('h1')?.remove(); + return { + title, + body: htmlDocument.querySelector('body').innerHTML.toString(), + }; +}; + +export const getRenderedMarkdown = (documentPath) => { + return axios + .get(helpPagePath(documentPath)) + .then(({ data }) => { + const { body, title } = splitDocument(data.html); + return { + body, + title, + hasFetchError: false, + }; + }) + .catch((e) => { + Sentry.captureException(e); + return { + hasFetchError: true, + }; + }); +}; diff --git a/app/presenters/ci/build_runner_presenter.rb b/app/presenters/ci/build_runner_presenter.rb index 706608e3029..e3b992ff49e 100644 --- a/app/presenters/ci/build_runner_presenter.rb +++ b/app/presenters/ci/build_runner_presenter.rb @@ -33,9 +33,8 @@ module Ci end def runner_variables - stop_expanding_file_vars = ::Feature.enabled?(:ci_stop_expanding_file_vars_for_runners, project) variables - .sort_and_expand_all(keep_undefined: true, expand_file_vars: !stop_expanding_file_vars, project: project) + .sort_and_expand_all(keep_undefined: true, expand_file_vars: false, project: project) .to_runner_variables end diff --git a/app/views/projects/deployments/_actions.haml b/app/views/projects/deployments/_actions.haml index c0fe143020a..e1c8992a28c 100644 --- a/app/views/projects/deployments/_actions.haml +++ b/app/views/projects/deployments/_actions.haml @@ -1,4 +1,4 @@ -- if can?(current_user, :create_deployment, deployment) +- if can?(current_user, :read_deployment, deployment) - actions = deployment.manual_actions - if actions.present? .btn-group @@ -8,7 +8,7 @@ = sprite_icon('chevron-down') %ul.dropdown-menu.dropdown-menu-right - actions.each do |action| - - next unless can?(current_user, :update_build, action) + - next unless can?(current_user, :play_job, action) %li = link_to [:play, @project, action], method: :post, rel: 'nofollow' do %span= action.name diff --git a/config/feature_flags/development/ci_stop_expanding_file_vars_for_runners.yml b/config/feature_flags/development/ci_stop_expanding_file_vars_for_runners.yml deleted file mode 100644 index a78290b65d6..00000000000 --- a/config/feature_flags/development/ci_stop_expanding_file_vars_for_runners.yml +++ /dev/null @@ -1,8 +0,0 @@ ---- -name: ci_stop_expanding_file_vars_for_runners -introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/94198 -rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/369907 -milestone: '15.3' -type: development -group: group::pipeline authoring -default_enabled: true diff --git a/doc/development/documentation/styleguide/index.md b/doc/development/documentation/styleguide/index.md index a3ad8a6c0f7..adc999d2a18 100644 --- a/doc/development/documentation/styleguide/index.md +++ b/doc/development/documentation/styleguide/index.md @@ -145,6 +145,7 @@ help benefit translation. For example, we: - Avoid words that can be translated incorrectly, like: - [since and because](word_list.md#since) - [once and after](word_list.md#once) + - [it](word_list.md#it) - Avoid [ing](word_list.md#-ing-words) words. [The GitLab voice](#the-gitlab-voice) dictates that we write clearly and directly, diff --git a/doc/development/documentation/styleguide/word_list.md b/doc/development/documentation/styleguide/word_list.md index adf3f9bcb92..21981d858f3 100644 --- a/doc/development/documentation/styleguide/word_list.md +++ b/doc/development/documentation/styleguide/word_list.md @@ -612,6 +612,21 @@ Use lowercase for **issue board**. Use lowercase for **issue weights**. +## it + +When you use the word **it**, ensure the word it refers to is obvious. +If it's not obvious, repeat the word rather than using **it**. + +Use: + +- The field returns a connection. The field accepts four arguments. + +Instead of: + +- The field returns a connection. It accepts four arguments. + +See also [this, these, that, those](#this-these-that-those). + ## job Do not use **build** to be synonymous with **job**. A job is defined in the `.gitlab-ci.yml` file and runs as part of a pipeline. diff --git a/locale/gitlab.pot b/locale/gitlab.pot index ad8f7182367..efbbf7d49ca 100644 --- a/locale/gitlab.pot +++ b/locale/gitlab.pot @@ -24768,6 +24768,9 @@ msgstr "" msgid "March" msgstr "" +msgid "MardownDrawer|Could not fetch help contents." +msgstr "" + msgid "Mark as done" msgstr "" diff --git a/qa/qa/runtime/allure_report.rb b/qa/qa/runtime/allure_report.rb index 10f47ca56ba..a9152a5555c 100644 --- a/qa/qa/runtime/allure_report.rb +++ b/qa/qa/runtime/allure_report.rb @@ -96,9 +96,14 @@ module QA return {} unless Env.admin_personal_access_token || Env.personal_access_token client = Env.admin_personal_access_token ? API::Client.as_admin : API::Client.new - response = get(API::Request.new(client, '/version').url) + response = get(API::Request.new(client, '/metadata').url) - JSON.parse(response.body, symbolize_names: true) + JSON.parse(response.body, symbolize_names: true).then do |metadata| + { + **metadata.slice(:version, :revision), + kas_version: metadata.dig(:kas, :version) + }.compact + end rescue StandardError, ArgumentError => e Logger.error("Failed to attach version info to allure report: #{e}") {} diff --git a/qa/qa/specs/features/api/4_verify/file_variable_spec.rb b/qa/qa/specs/features/api/4_verify/file_variable_spec.rb index 5d6bc5a44f7..4ae97f589cf 100644 --- a/qa/qa/specs/features/api/4_verify/file_variable_spec.rb +++ b/qa/qa/specs/features/api/4_verify/file_variable_spec.rb @@ -1,10 +1,7 @@ # frozen_string_literal: true module QA - RSpec.describe 'Verify', :runner, product_group: :pipeline_authoring, feature_flag: { - name: 'ci_stop_expanding_file_vars_for_runners', - scope: :project - } do + RSpec.describe 'Verify', :runner, product_group: :pipeline_authoring do describe 'Pipeline with project file variables' do let(:executor) { "qa-runner-#{Faker::Alphanumeric.alphanumeric(number: 8)}" } @@ -14,7 +11,7 @@ module QA end end - let(:runner) do + let!(:runner) do Resource::Runner.fabricate! do |runner| runner.project = project runner.name = executor @@ -60,59 +57,30 @@ module QA end end + before do + add_file_variables + add_ci_file + trigger_pipeline + wait_for_pipeline + end + after do runner.remove_via_api! end - shared_examples 'variables are read correctly' do - it 'shows in job log accordingly' do - job = Resource::Job.fabricate_via_api! do |job| - job.project = project - job.id = project.job_by_name('test')[:id] - end - - aggregate_failures do - trace = job.trace - expect(trace).to have_content('run something -f hello, this is test') - expect(trace).to have_content('docker run --tlscacert="This is secret"') - expect(trace).to have_content('run --output=This is secret.crt') - expect(trace).to have_content('Will read private key from hello, this is test') - end - end - end - - # FF does not change current behavior - # https://gitlab.com/gitlab-org/gitlab/-/merge_requests/94198#note_1057609893 - # - # TODO: Remove when FF is removed - # TODO: Archive testcase issue when FF is removed - # Rollout issue: https://gitlab.com/gitlab-org/gitlab/-/issues/369907 - context 'when FF is on', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/370787' do - before do - Runtime::Feature.enable(:ci_stop_expanding_file_vars_for_runners, project: project) - - runner - add_file_variables - add_ci_file - trigger_pipeline - wait_for_pipeline + it 'shows in job log accordingly', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/370791' do + job = Resource::Job.fabricate_via_api! do |job| + job.project = project + job.id = project.job_by_name('test')[:id] end - it_behaves_like 'variables are read correctly' - end - - # TODO: Refactor when FF is removed - # TODO: Update testcase issue title and description to not refer to FF status - context 'when FF is off', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/370791' do - before do - runner - add_file_variables - add_ci_file - trigger_pipeline - wait_for_pipeline + aggregate_failures do + trace = job.trace + expect(trace).to have_content('run something -f hello, this is test') + expect(trace).to have_content('docker run --tlscacert="This is secret"') + expect(trace).to have_content('run --output=This is secret.crt') + expect(trace).to have_content('Will read private key from hello, this is test') end - - it_behaves_like 'variables are read correctly' end private diff --git a/scripts/api/create_issue.rb b/scripts/api/create_issue.rb new file mode 100644 index 00000000000..2117c285771 --- /dev/null +++ b/scripts/api/create_issue.rb @@ -0,0 +1,29 @@ +# frozen_string_literal: true + +require 'gitlab' +require_relative 'default_options' + +class CreateIssue + def initialize(options) + @project = options.fetch(:project) + + # Force the token to be a string so that if api_token is nil, it's set to '', + # allowing unauthenticated requests (for forks). + api_token = options.delete(:api_token).to_s + + warn "No API token given." if api_token.empty? + + @client = Gitlab.client( + endpoint: options.delete(:endpoint) || API::DEFAULT_OPTIONS[:endpoint], + private_token: api_token + ) + end + + def execute(issue_data) + client.create_issue(project, issue_data.delete(:title), issue_data) + end + + private + + attr_reader :project, :client +end diff --git a/scripts/api/pipeline_failed_jobs.rb b/scripts/api/pipeline_failed_jobs.rb index c25567af698..df9a7e76dcd 100644 --- a/scripts/api/pipeline_failed_jobs.rb +++ b/scripts/api/pipeline_failed_jobs.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true require 'gitlab' -require 'optparse' + require_relative 'default_options' class PipelineFailedJobs diff --git a/scripts/create-pipeline-failure-incident.rb b/scripts/create-pipeline-failure-incident.rb new file mode 100755 index 00000000000..c6c9851fa16 --- /dev/null +++ b/scripts/create-pipeline-failure-incident.rb @@ -0,0 +1,172 @@ +#!/usr/bin/env ruby + +# frozen_string_literal: true + +require 'optparse' +require 'json' + +require_relative 'api/pipeline_failed_jobs' +require_relative 'api/create_issue' + +class CreatePipelineFailureIncident + DEFAULT_OPTIONS = { + project: nil, + incident_json_file: 'incident.json' + }.freeze + DEFAULT_LABELS = ['Engineering Productivity'].freeze + + def initialize(options) + @project = options.delete(:project) + @api_token = options.delete(:api_token) + end + + def execute + payload = { + issue_type: 'incident', + title: title, + description: description, + labels: incident_labels + } + + CreateIssue.new(project: project, api_token: api_token).execute(payload) + end + + private + + attr_reader :project, :api_token + + def failed_jobs + @failed_jobs ||= PipelineFailedJobs.new(API::DEFAULT_OPTIONS.dup.merge(exclude_allowed_to_fail_jobs: true)).execute + end + + def now + @now ||= Time.now.utc + end + + def title + "#{now.strftime('%A %F %R UTC')} - `#{ENV['CI_PROJECT_PATH']}` broken `#{ENV['CI_COMMIT_REF_NAME']}` " \ + "with #{failed_jobs.size} failed jobs" + end + + def description + <<~MARKDOWN + ## #{project_link} pipeline #{pipeline_link} failed + + **Branch: #{branch_link}** + + **Commit: #{commit_link}** + + **Triggered by** #{triggered_by_link} • **Source:** #{source} • **Duration:** #{pipeline_duration} minutes + + **Failed jobs (#{failed_jobs.size}):** + + #{failed_jobs_list} + + ### General guidelines + + Follow the [Broken `master` handbook guide](https://about.gitlab.com/handbook/engineering/workflow/#broken-master). + + ### Investigation + + **Be sure to fill the `Timeline` for this incident.** + + 1. If the failure is new, and looks like a potential flaky failure, you can retry the failing job. + Make sure to mention the retry in the `Timeline` and leave a link to the retried job. + 1. If the failure looks like a broken `master`, communicate the broken `master` in Slack using the "Broadcast Master Broken" workflow: + - Click the Shortcut lightning bolt icon in the `#master-broken` channel and select "Broadcast Master Broken". + - Click "Continue the broadcast" after the automated message in `#master-broken`. + + ### Pre-resolution + + If you believe that there's an easy resolution by either: + + - Reverting a particular merge request. Make sure to add the ~"pipeline:revert" label in that case, to speed up the revert pipeline. + - Making a quick fix (for example, one line or a few similar simple changes in a few lines). + You can create a merge request, assign to any available maintainer, and ping people that were involved/related to the introduction of the failure. + Additionally, a message can be posted in `#backend_maintainers` or `#frontend_maintainers` to get a maintainer take a look at the fix ASAP. + + ### Resolution + + Follow [the Resolution steps from the handbook](https://about.gitlab.com/handbook/engineering/workflow/#responsibilities-of-the-resolution-dri). + MARKDOWN + end + + def incident_labels + master_broken_label = + if ENV['CI_PROJECT_NAME'] == 'gitlab-foss' + 'master:foss-broken' + else + 'master:broken' + end + + DEFAULT_LABELS.dup << master_broken_label + end + + def pipeline_link + "[##{ENV['CI_PIPELINE_ID']}](#{ENV['CI_PIPELINE_URL']})" + end + + def branch_link + "[`#{ENV['CI_COMMIT_REF_NAME']}`](#{ENV['CI_PROJECT_URL']}/-/commits/#{ENV['CI_COMMIT_REF_NAME']})" + end + + def pipeline_duration + ((Time.now - Time.parse(ENV['CI_PIPELINE_CREATED_AT'])) / 60.to_f).round(2) + end + + def commit_link + "[#{ENV['CI_COMMIT_TITLE']}](#{ENV['CI_PROJECT_URL']}/-/commit/#{ENV['CI_COMMIT_SHA']})" + end + + def source + "`#{ENV['CI_PIPELINE_SOURCE']}`" + end + + def project_link + "[#{ENV['CI_PROJECT_PATH']}](#{ENV['CI_PROJECT_URL']})" + end + + def triggered_by_link + "[#{ENV['GITLAB_USER_NAME']}](#{ENV['CI_SERVER_URL']}/#{ENV['GITLAB_USER_LOGIN']})" + end + + def failed_jobs_list_for_title + failed_jobs.map(&:name).join(', ') + end + + def failed_jobs_list + failed_jobs.map { |job| "- [#{job.name}](#{job.web_url})" }.join("\n") + end +end + +if $PROGRAM_NAME == __FILE__ + options = CreatePipelineFailureIncident::DEFAULT_OPTIONS.dup + + OptionParser.new do |opts| + opts.on("-p", "--project PROJECT", String, "Project where to create the incident (defaults to "\ + "`#{CreatePipelineFailureIncident::DEFAULT_OPTIONS[:project]}`)") do |value| + options[:project] = value + end + + opts.on("-f", "--incident-json-file file_path", String, "Path to a file where to save the incident JSON data "\ + "(defaults to `#{CreatePipelineFailureIncident::DEFAULT_OPTIONS[:incident_json_file]}`)") do |value| + options[:incident_json_file] = value + end + + opts.on("-t", "--api-token API_TOKEN", String, "A valid Project token with the `Reporter` role and `api` scope "\ + "to create the incident") do |value| + options[:api_token] = value + end + + opts.on("-h", "--help", "Prints this help") do + puts opts + exit + end + end.parse! + + incident_json_file = options.delete(:incident_json_file) + + CreatePipelineFailureIncident.new(options).execute.tap do |incident| + File.write(incident_json_file, JSON.pretty_generate(incident.to_h)) if incident_json_file + end +end diff --git a/scripts/generate-failed-pipeline-slack-message.rb b/scripts/generate-failed-pipeline-slack-message.rb index 5dfa74e89ea..1adae56cceb 100755 --- a/scripts/generate-failed-pipeline-slack-message.rb +++ b/scripts/generate-failed-pipeline-slack-message.rb @@ -2,21 +2,23 @@ # frozen_string_literal: true +require 'optparse' +require 'json' + require_relative 'api/pipeline_failed_jobs' -finder_options = API::DEFAULT_OPTIONS.dup.merge(exclude_allowed_to_fail_jobs: true) -failed_jobs = PipelineFailedJobs.new(finder_options).execute +class GenerateFailedPipelineSlackMessage + DEFAULT_OPTIONS = { + failed_pipeline_slack_message_file: 'failed_pipeline_slack_message.json', + incident_json_file: 'incident.json' + }.freeze -class SlackReporter - DEFAULT_FAILED_PIPELINE_REPORT_FILE = 'failed_pipeline_report.json' - - def initialize(failed_jobs) - @failed_jobs = failed_jobs - @failed_pipeline_report_file = ENV.fetch('FAILED_PIPELINE_REPORT_FILE', DEFAULT_FAILED_PIPELINE_REPORT_FILE) + def initialize(options) + @incident_json_file = options.delete(:incident_json_file) end - def report - payload = { + def execute + { channel: ENV['SLACK_CHANNEL'], username: "Failed pipeline reporter", icon_emoji: ":boom:", @@ -27,33 +29,36 @@ class SlackReporter text: { type: "mrkdwn", text: "*#{title}*" + }, + accessory: { + type: "button", + text: { + type: "plain_text", + text: incident_button_text + }, + url: incident_button_link } }, { type: "section", - fields: [ - { - type: "mrkdwn", - text: "*Commit*\n#{commit_link}" - }, - { - type: "mrkdwn", - text: "*Triggered by*\n#{triggered_by_link}" - } - ] + text: { + type: "mrkdwn", + text: "*Branch*: #{branch_link}" + } }, { type: "section", - fields: [ - { - type: "mrkdwn", - text: "*Source*\n#{source} from #{project_link}" - }, - { - type: "mrkdwn", - text: "*Duration*\n#{pipeline_duration} minutes" - } - ] + text: { + type: "mrkdwn", + text: "*Commit*: #{commit_link}" + } + }, + { + type: "section", + text: { + type: "mrkdwn", + text: "*Triggered by* #{triggered_by_link} • *Source:* #{source} • *Duration:* #{pipeline_duration} minutes" + } }, { type: "section", @@ -64,16 +69,41 @@ class SlackReporter } ] } - - File.write(failed_pipeline_report_file, JSON.pretty_generate(payload)) end private - attr_reader :failed_jobs, :failed_pipeline_report_file + attr_reader :incident_json_file + + def failed_jobs + @failed_jobs ||= PipelineFailedJobs.new(API::DEFAULT_OPTIONS.dup.merge(exclude_allowed_to_fail_jobs: true)).execute + end def title - "Pipeline #{pipeline_link} for #{branch_link} failed" + "#{project_link} pipeline #{pipeline_link} failed" + end + + def incident_exist? + return @incident_exist if defined?(@incident_exist) + + @incident_exist = File.exist?(incident_json_file) + end + + def incident_button_text + if incident_exist? + 'View incident' + else + 'Create incident' + end + end + + def incident_button_link + if incident_exist? + JSON.parse(File.read(incident_json_file))['web_url'] + else + "#{ENV['CI_SERVER_URL']}/#{ENV['BROKEN_MASTER_INCIDENTS_PROJECT']}/-/issues/new?" \ + "issuable_template=incident&issue%5Bissue_type%5D=incident" + end end def pipeline_link @@ -101,7 +131,7 @@ class SlackReporter end def project_link - "<#{ENV['CI_PROJECT_URL']}|#{ENV['CI_PROJECT_NAME']}>" + "<#{ENV['CI_PROJECT_URL']}|#{ENV['CI_PROJECT_PATH']}>" end def triggered_by_link @@ -113,4 +143,33 @@ class SlackReporter end end -SlackReporter.new(failed_jobs).report +if $PROGRAM_NAME == __FILE__ + options = GenerateFailedPipelineSlackMessage::DEFAULT_OPTIONS.dup + + OptionParser.new do |opts| + opts.on("-i", "--incident-json-file file_path", String, "Path to a file where the incident JSON data "\ + "can be found (defaults to "\ + "`#{GenerateFailedPipelineSlackMessage::DEFAULT_OPTIONS[:incident_json_file]}`)") do |value| + options[:incident_json_file] = value + end + + opts.on("-f", "--failed-pipeline-slack-message-file file_path", String, "Path to a file where to save the Slack "\ + "message (defaults to "\ + "`#{GenerateFailedPipelineSlackMessage::DEFAULT_OPTIONS[:failed_pipeline_slack_message_file]}`)") do |value| + options[:failed_pipeline_slack_message_file] = value + end + + opts.on("-h", "--help", "Prints this help") do + puts opts + exit + end + end.parse! + + failed_pipeline_slack_message_file = options.delete(:failed_pipeline_slack_message_file) + + GenerateFailedPipelineSlackMessage.new(options).execute.tap do |message_payload| + if failed_pipeline_slack_message_file + File.write(failed_pipeline_slack_message_file, JSON.pretty_generate(message_payload)) + end + end +end diff --git a/spec/frontend/vue_shared/components/markdown_drawer/markdown_drawer_spec.js b/spec/frontend/vue_shared/components/markdown_drawer/markdown_drawer_spec.js new file mode 100644 index 00000000000..8edcb905096 --- /dev/null +++ b/spec/frontend/vue_shared/components/markdown_drawer/markdown_drawer_spec.js @@ -0,0 +1,205 @@ +import { GlDrawer, GlAlert, GlSkeletonLoader } from '@gitlab/ui'; +import { nextTick } from 'vue'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import MarkdownDrawer, { cache } from '~/vue_shared/components/markdown_drawer/markdown_drawer.vue'; +import { getRenderedMarkdown } from '~/vue_shared/components/markdown_drawer/utils/fetch'; +import { contentTop } from '~/lib/utils/common_utils'; + +jest.mock('~/vue_shared/components/markdown_drawer/utils/fetch', () => ({ + getRenderedMarkdown: jest.fn().mockReturnValue({ + title: 'test title test', + body: `
+
+ test body +
+
`, + }), +})); + +jest.mock('~/lib/utils/common_utils', () => ({ + contentTop: jest.fn(), +})); + +describe('MarkdownDrawer', () => { + let wrapper; + const defaultProps = { + documentPath: 'user/search/global_search/advanced_search_syntax.json', + }; + + const createComponent = (props) => { + wrapper = shallowMountExtended(MarkdownDrawer, { + propsData: { + ...defaultProps, + ...props, + }, + }); + }; + + afterEach(() => { + wrapper.destroy(); + wrapper = null; + Object.keys(cache).forEach((key) => delete cache[key]); + }); + + const findDrawer = () => wrapper.findComponent(GlDrawer); + const findAlert = () => wrapper.findComponent(GlAlert); + const findSkeleton = () => wrapper.findComponent(GlSkeletonLoader); + const findDrawerTitle = () => wrapper.findComponent('[data-testid="title-element"]'); + const findDrawerBody = () => wrapper.findComponent({ ref: 'content-element' }); + + describe('component', () => { + beforeEach(() => { + createComponent(); + }); + + it('renders correctly', () => { + expect(findDrawer().exists()).toBe(true); + expect(findDrawerTitle().text()).toBe('test title test'); + expect(findDrawerBody().text()).toBe('test body'); + }); + }); + + describe.each` + hasNavbar | navbarHeight + ${false} | ${0} + ${true} | ${100} + `('computes offsetTop', ({ hasNavbar, navbarHeight }) => { + beforeEach(() => { + global.document.querySelector = jest.fn(() => + hasNavbar + ? { + dataset: { + page: 'test', + }, + } + : undefined, + ); + contentTop.mockReturnValue(navbarHeight); + createComponent(); + }); + + afterEach(() => { + contentTop.mockClear(); + }); + + it(`computes offsetTop ${hasNavbar ? 'with' : 'without'} .navbar-gitlab`, () => { + expect(findDrawer().attributes('headerheight')).toBe(`${navbarHeight}px`); + }); + }); + + describe('watcher', () => { + let renderGLFMSpy; + let fetchMarkdownSpy; + + beforeEach(async () => { + renderGLFMSpy = jest.spyOn(MarkdownDrawer.methods, 'renderGLFM'); + fetchMarkdownSpy = jest.spyOn(MarkdownDrawer.methods, 'fetchMarkdown'); + global.document.querySelector = jest.fn(() => ({ + getBoundingClientRect: jest.fn(() => ({ bottom: 100 })), + dataset: { + page: 'test', + }, + })); + createComponent(); + await nextTick(); + }); + + afterEach(() => { + renderGLFMSpy.mockClear(); + fetchMarkdownSpy.mockClear(); + }); + + it('for documentPath triggers fetch', async () => { + expect(fetchMarkdownSpy).toHaveBeenCalledTimes(1); + + await wrapper.setProps({ documentPath: '/test/me' }); + await nextTick(); + + expect(fetchMarkdownSpy).toHaveBeenCalledTimes(2); + }); + + it('for open triggers renderGLFM', async () => { + wrapper.vm.fetchMarkdown(); + wrapper.vm.openDrawer(); + await nextTick(); + expect(renderGLFMSpy).toHaveBeenCalled(); + }); + }); + + describe('Markdown fetching', () => { + let renderGLFMSpy; + + beforeEach(async () => { + renderGLFMSpy = jest.spyOn(MarkdownDrawer.methods, 'renderGLFM'); + createComponent(); + await nextTick(); + }); + + afterEach(() => { + renderGLFMSpy.mockClear(); + }); + + it('fetches the Markdown and caches it', async () => { + expect(getRenderedMarkdown).toHaveBeenCalledTimes(1); + expect(Object.keys(cache)).toHaveLength(1); + }); + + it('when the document changes, fetches it and caches it as well', async () => { + expect(getRenderedMarkdown).toHaveBeenCalledTimes(1); + expect(Object.keys(cache)).toHaveLength(1); + + await wrapper.setProps({ documentPath: '/test/me2' }); + await nextTick(); + + expect(getRenderedMarkdown).toHaveBeenCalledTimes(2); + expect(Object.keys(cache)).toHaveLength(2); + }); + + it('when re-using an already fetched document, gets it from the cache', async () => { + await wrapper.setProps({ documentPath: '/test/me2' }); + await nextTick(); + + expect(getRenderedMarkdown).toHaveBeenCalledTimes(2); + expect(Object.keys(cache)).toHaveLength(2); + + await wrapper.setProps({ documentPath: defaultProps.documentPath }); + await nextTick(); + + expect(getRenderedMarkdown).toHaveBeenCalledTimes(2); + expect(Object.keys(cache)).toHaveLength(2); + }); + }); + + describe('Markdown fetching returns error', () => { + beforeEach(async () => { + getRenderedMarkdown.mockReturnValue({ + hasFetchError: true, + }); + + createComponent(); + await nextTick(); + }); + afterEach(() => { + getRenderedMarkdown.mockClear(); + }); + it('shows alert', () => { + expect(findAlert().exists()).toBe(true); + }); + }); + + describe('While Markdown is fetching', () => { + beforeEach(async () => { + getRenderedMarkdown.mockReturnValue(new Promise(() => {})); + + createComponent(); + }); + + afterEach(() => { + getRenderedMarkdown.mockClear(); + }); + + it('shows skeleton', async () => { + expect(findSkeleton().exists()).toBe(true); + }); + }); +}); diff --git a/spec/frontend/vue_shared/components/markdown_drawer/mock_data.js b/spec/frontend/vue_shared/components/markdown_drawer/mock_data.js new file mode 100644 index 00000000000..53b40407556 --- /dev/null +++ b/spec/frontend/vue_shared/components/markdown_drawer/mock_data.js @@ -0,0 +1,42 @@ +export const MOCK_HTML = ` + + +
+

test title test

+
+ Advanced Search + Advanced Search2 +

test header h2

+ + + + + + + + + + + +
EmilTobiasLinus
161410
+
+
+ +`.replace(/\n/g, ''); + +export const MOCK_DRAWER_DATA = { + hasFetchError: false, + title: 'test title test', + body: `
Advanced Search Advanced Search2

test header h2

Emil Tobias Linus
16 14 10
`, +}; + +export const MOCK_DRAWER_DATA_ERROR = { + hasFetchError: true, +}; + +export const MOCK_TABLE_DATA_BEFORE = `

test

`; + +export const MOCK_HTML_DATA_AFTER = { + body: '
', + title: 'test', +}; diff --git a/spec/frontend/vue_shared/components/markdown_drawer/utils/fetch_spec.js b/spec/frontend/vue_shared/components/markdown_drawer/utils/fetch_spec.js new file mode 100644 index 00000000000..ff07b2cf838 --- /dev/null +++ b/spec/frontend/vue_shared/components/markdown_drawer/utils/fetch_spec.js @@ -0,0 +1,43 @@ +import MockAdapter from 'axios-mock-adapter'; +import { + getRenderedMarkdown, + splitDocument, +} from '~/vue_shared/components/markdown_drawer/utils/fetch'; +import axios from '~/lib/utils/axios_utils'; +import { + MOCK_HTML, + MOCK_DRAWER_DATA, + MOCK_DRAWER_DATA_ERROR, + MOCK_TABLE_DATA_BEFORE, + MOCK_HTML_DATA_AFTER, +} from '../mock_data'; + +describe('utils/fetch', () => { + let mock; + + afterEach(() => { + mock.restore(); + }); + + describe.each` + axiosMock | type | toExpect + ${{ code: 200, res: { html: MOCK_HTML } }} | ${'success'} | ${MOCK_DRAWER_DATA} + ${{ code: 500, res: null }} | ${'error'} | ${MOCK_DRAWER_DATA_ERROR} + `('process markdown data', ({ axiosMock, type, toExpect }) => { + describe(`if api fetch responds with ${type}`, () => { + beforeEach(() => { + mock = new MockAdapter(axios); + mock.onGet().reply(axiosMock.code, axiosMock.res); + }); + it(`should update drawer correctly`, async () => { + expect(await getRenderedMarkdown('/any/path')).toStrictEqual(toExpect); + }); + }); + }); + + describe('splitDocument', () => { + it(`should update tables correctly`, () => { + expect(splitDocument(MOCK_TABLE_DATA_BEFORE)).toStrictEqual(MOCK_HTML_DATA_AFTER); + }); + }); +}); diff --git a/spec/presenters/ci/build_runner_presenter_spec.rb b/spec/presenters/ci/build_runner_presenter_spec.rb index 8d6278ab1d7..845ae79c497 100644 --- a/spec/presenters/ci/build_runner_presenter_spec.rb +++ b/spec/presenters/ci/build_runner_presenter_spec.rb @@ -359,23 +359,6 @@ RSpec.describe Ci::BuildRunnerPresenter do runner_variables end - - context 'when the FF ci_stop_expanding_file_vars_for_runners is disabled' do - before do - stub_feature_flags(ci_stop_expanding_file_vars_for_runners: false) - end - - it 'returns variables with expanded' do - expect(runner_variables).to include( - { key: 'regular_var', value: 'value 1', - public: false, masked: false }, - { key: 'file_var', value: 'value 2', - public: false, masked: false, file: true }, - { key: 'var_with_variables', value: 'value 3 and value 1 and value 2 and $undefined_var', - public: false, masked: false } - ) - end - end end end diff --git a/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb b/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb index 73d185283b6..676f55be28a 100644 --- a/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb +++ b/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb @@ -225,7 +225,7 @@ RSpec.describe AutoMerge::MergeWhenPipelineSucceedsService do let!(:build) do create(:ci_build, :created, pipeline: pipeline, ref: ref, - name: 'build', ci_stage: build_stage ) + name: 'build', ci_stage: build_stage) end let!(:test) do diff --git a/spec/services/boards/issues/list_service_spec.rb b/spec/services/boards/issues/list_service_spec.rb index 72027911e51..1959710bb0c 100644 --- a/spec/services/boards/issues/list_service_spec.rb +++ b/spec/services/boards/issues/list_service_spec.rb @@ -27,7 +27,7 @@ RSpec.describe Boards::Issues::ListService do let_it_be(:opened_issue1) { create(:labeled_issue, project: project, milestone: m1, title: 'Issue 1', labels: [bug]) } let_it_be(:opened_issue2) { create(:labeled_issue, project: project, milestone: m2, title: 'Issue 2', labels: [p2]) } - let_it_be(:reopened_issue1) { create(:issue, :opened, project: project, title: 'Reopened Issue 1' ) } + let_it_be(:reopened_issue1) { create(:issue, :opened, project: project, title: 'Reopened Issue 1') } let_it_be(:list1_issue1) { create(:labeled_issue, project: project, milestone: m1, labels: [p2, development]) } let_it_be(:list1_issue2) { create(:labeled_issue, project: project, milestone: m2, labels: [development]) } @@ -110,7 +110,7 @@ RSpec.describe Boards::Issues::ListService do let!(:opened_issue1) { create(:labeled_issue, project: project, milestone: m1, title: 'Issue 1', labels: [bug]) } let!(:opened_issue2) { create(:labeled_issue, project: project, milestone: m2, title: 'Issue 2', labels: [p2, p2_project]) } let!(:opened_issue3) { create(:labeled_issue, project: project_archived, milestone: m1, title: 'Issue 3', labels: [bug]) } - let!(:reopened_issue1) { create(:issue, state: 'opened', project: project, title: 'Reopened Issue 1', closed_at: Time.current ) } + let!(:reopened_issue1) { create(:issue, state: 'opened', project: project, title: 'Reopened Issue 1', closed_at: Time.current) } let!(:list1_issue1) { create(:labeled_issue, project: project, milestone: m1, labels: [p2, p2_project, development]) } let!(:list1_issue2) { create(:labeled_issue, project: project, milestone: m2, labels: [development]) } diff --git a/spec/services/ci/compare_test_reports_service_spec.rb b/spec/services/ci/compare_test_reports_service_spec.rb index 6d3df0f5383..f259072fe87 100644 --- a/spec/services/ci/compare_test_reports_service_spec.rb +++ b/spec/services/ci/compare_test_reports_service_spec.rb @@ -41,7 +41,7 @@ RSpec.describe Ci::CompareTestReportsService do it 'returns a parsed TestReports success status and failure on the individual suite' do expect(comparison[:status]).to eq(:parsed) expect(comparison.dig(:data, 'status')).to eq('success') - expect(comparison.dig(:data, 'suites', 0, 'status') ).to eq('error') + expect(comparison.dig(:data, 'suites', 0, 'status')).to eq('error') end end diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb index 7578afa7c50..d0aa1ba4c6c 100644 --- a/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb +++ b/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb @@ -104,7 +104,7 @@ RSpec.describe Ci::PipelineProcessing::AtomicProcessingService::StatusCollection describe '#processing_processables' do it 'returns processables marked as processing' do - expect(collection.processing_processables.map { |processable| processable[:id] } ) + expect(collection.processing_processables.map { |processable| processable[:id] }) .to contain_exactly(build_a.id, build_b.id, test_a.id, test_b.id, deploy.id) end end diff --git a/spec/services/ci/retry_job_service_spec.rb b/spec/services/ci/retry_job_service_spec.rb index 69f19c5acf2..1c9ec610bc6 100644 --- a/spec/services/ci/retry_job_service_spec.rb +++ b/spec/services/ci/retry_job_service_spec.rb @@ -153,7 +153,7 @@ RSpec.describe Ci::RetryJobService do context 'when the pipeline has other jobs' do let!(:stage2) { create(:ci_stage, project: project, pipeline: pipeline, name: 'deploy') } - let!(:build2) { create(:ci_build, pipeline: pipeline, ci_stage: stage ) } + let!(:build2) { create(:ci_build, pipeline: pipeline, ci_stage: stage) } let!(:deploy) { create(:ci_build, pipeline: pipeline, ci_stage: stage2) } let!(:deploy_needs_build2) { create(:ci_build_need, build: deploy, name: build2.name) } diff --git a/spec/services/clusters/gcp/provision_service_spec.rb b/spec/services/clusters/gcp/provision_service_spec.rb index c5778db6001..c8b7f628e5b 100644 --- a/spec/services/clusters/gcp/provision_service_spec.rb +++ b/spec/services/clusters/gcp/provision_service_spec.rb @@ -42,7 +42,7 @@ RSpec.describe Clusters::Gcp::ProvisionService do gcp_project_id, zone, { "status": 'unexpected' - } ) + }) end it_behaves_like 'error' diff --git a/spec/services/clusters/gcp/verify_provision_status_service_spec.rb b/spec/services/clusters/gcp/verify_provision_status_service_spec.rb index ccb4b3b6c15..ffe4516c02b 100644 --- a/spec/services/clusters/gcp/verify_provision_status_service_spec.rb +++ b/spec/services/clusters/gcp/verify_provision_status_service_spec.rb @@ -44,7 +44,7 @@ RSpec.describe Clusters::Gcp::VerifyProvisionStatusService do { "status": 'RUNNING', "startTime": 1.minute.ago.strftime("%FT%TZ") - } ) + }) end it_behaves_like 'continue_creation' @@ -56,7 +56,7 @@ RSpec.describe Clusters::Gcp::VerifyProvisionStatusService do { "status": 'RUNNING', "startTime": 30.minutes.ago.strftime("%FT%TZ") - } ) + }) end it_behaves_like 'error' @@ -70,7 +70,7 @@ RSpec.describe Clusters::Gcp::VerifyProvisionStatusService do { "status": 'PENDING', "startTime": 1.minute.ago.strftime("%FT%TZ") - } ) + }) end it_behaves_like 'continue_creation' @@ -82,7 +82,7 @@ RSpec.describe Clusters::Gcp::VerifyProvisionStatusService do gcp_project_id, zone, operation_id, { "status": 'DONE' - } ) + }) end it_behaves_like 'finalize_creation' @@ -94,7 +94,7 @@ RSpec.describe Clusters::Gcp::VerifyProvisionStatusService do gcp_project_id, zone, operation_id, { "status": 'unexpected' - } ) + }) end it_behaves_like 'error' diff --git a/spec/services/groups/destroy_service_spec.rb b/spec/services/groups/destroy_service_spec.rb index 36e868fa5f1..0b552602204 100644 --- a/spec/services/groups/destroy_service_spec.rb +++ b/spec/services/groups/destroy_service_spec.rb @@ -146,7 +146,7 @@ RSpec.describe Groups::DestroyService do end expect { destroy_group(group, user, false) } - .to raise_error(Groups::DestroyService::DestroyError, "Project #{project.id} can't be deleted" ) + .to raise_error(Groups::DestroyService::DestroyError, "Project #{project.id} can't be deleted") end end diff --git a/spec/services/groups/update_shared_runners_service_spec.rb b/spec/services/groups/update_shared_runners_service_spec.rb index 6e938984052..98eccedeace 100644 --- a/spec/services/groups/update_shared_runners_service_spec.rb +++ b/spec/services/groups/update_shared_runners_service_spec.rb @@ -127,7 +127,7 @@ RSpec.describe Groups::UpdateSharedRunnersService do end context 'when parent does not allow' do - let_it_be(:parent) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false ) } + let_it_be(:parent) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false) } let_it_be(:group) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false, parent: parent) } it 'results error' do diff --git a/spec/services/import/fogbugz_service_spec.rb b/spec/services/import/fogbugz_service_spec.rb index 7b86c5c45b0..027d0240a7a 100644 --- a/spec/services/import/fogbugz_service_spec.rb +++ b/spec/services/import/fogbugz_service_spec.rb @@ -119,7 +119,7 @@ RSpec.describe Import::FogbugzService do let(:error_messages_array) { instance_double(Array, join: "something went wrong") } let(:errors_double) { instance_double(ActiveModel::Errors, full_messages: error_messages_array, :[] => nil) } let(:project_double) { instance_double(Project, persisted?: false, errors: errors_double) } - let(:project_creator) { instance_double(Gitlab::FogbugzImport::ProjectCreator, execute: project_double ) } + let(:project_creator) { instance_double(Gitlab::FogbugzImport::ProjectCreator, execute: project_double) } before do allow(Gitlab::FogbugzImport::ProjectCreator).to receive(:new).and_return(project_creator) diff --git a/spec/services/import/gitlab_projects/file_acquisition_strategies/file_upload_spec.rb b/spec/services/import/gitlab_projects/file_acquisition_strategies/file_upload_spec.rb index 28af6219812..3c788138157 100644 --- a/spec/services/import/gitlab_projects/file_acquisition_strategies/file_upload_spec.rb +++ b/spec/services/import/gitlab_projects/file_acquisition_strategies/file_upload_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe ::Import::GitlabProjects::FileAcquisitionStrategies::FileUpload, :aggregate_failures do - let(:file) { UploadedFile.new( File.join('spec', 'features', 'projects', 'import_export', 'test_project_export.tar.gz') ) } + let(:file) { UploadedFile.new(File.join('spec', 'features', 'projects', 'import_export', 'test_project_export.tar.gz')) } describe 'validation' do it 'validates presence of file' do diff --git a/spec/services/issues/export_csv_service_spec.rb b/spec/services/issues/export_csv_service_spec.rb index d04480bec18..66d017464bf 100644 --- a/spec/services/issues/export_csv_service_spec.rb +++ b/spec/services/issues/export_csv_service_spec.rb @@ -185,7 +185,7 @@ RSpec.describe Issues::ExportCsvService do labeled_rows = csv.select { |entry| labeled_issues.map(&:iid).include?(entry['Issue ID'].to_i) } expect(labeled_rows.count).to eq(2) - expect(labeled_rows.map { |entry| entry['Labels'] }).to all( eq("Feature,Idea") ) + expect(labeled_rows.map { |entry| entry['Labels'] }).to all(eq("Feature,Idea")) end end end diff --git a/spec/services/labels/promote_service_spec.rb b/spec/services/labels/promote_service_spec.rb index a10aaa14030..3af6cf4c8f4 100644 --- a/spec/services/labels/promote_service_spec.rb +++ b/spec/services/labels/promote_service_spec.rb @@ -171,7 +171,7 @@ RSpec.describe Labels::PromoteService do end context 'when there is an existing identical group label' do - let!(:existing_group_label) { create(:group_label, group: group_1, title: project_label_1_1.title ) } + let!(:existing_group_label) { create(:group_label, group: group_1, title: project_label_1_1.title) } it 'uses the existing group label' do expect { service.execute(project_label_1_1) } diff --git a/spec/services/members/invite_service_spec.rb b/spec/services/members/invite_service_spec.rb index 6dbe161ee02..6fa781a84c2 100644 --- a/spec/services/members/invite_service_spec.rb +++ b/spec/services/members/invite_service_spec.rb @@ -12,7 +12,7 @@ RSpec.describe Members::InviteService, :aggregate_failures, :clean_gitlab_redis_ let(:params) { {} } let(:base_params) { { access_level: Gitlab::Access::GUEST, source: project, invite_source: '_invite_source_' } } - subject(:result) { described_class.new(user, base_params.merge(params) ).execute } + subject(:result) { described_class.new(user, base_params.merge(params)).execute } context 'when there is a valid member invited' do let(:params) { { email: 'email@example.org' } } diff --git a/spec/services/merge_requests/build_service_spec.rb b/spec/services/merge_requests/build_service_spec.rb index 6a6f01e6a95..4f27ff30da7 100644 --- a/spec/services/merge_requests/build_service_spec.rb +++ b/spec/services/merge_requests/build_service_spec.rb @@ -93,7 +93,7 @@ RSpec.describe MergeRequests::BuildService do shared_examples 'with a Default.md template' do let(:files) { { '.gitlab/merge_request_templates/Default.md' => 'Default template contents' } } - let(:project) { create(:project, :custom_repo, files: files ) } + let(:project) { create(:project, :custom_repo, files: files) } it 'the template description is preferred' do expect(merge_request.description).to eq('Default template contents') @@ -306,7 +306,7 @@ RSpec.describe MergeRequests::BuildService do context 'a Default.md template is defined' do let(:files) { { '.gitlab/merge_request_templates/Default.md' => 'Default template contents' } } - let(:project) { create(:project, :custom_repo, files: files ) } + let(:project) { create(:project, :custom_repo, files: files) } it 'appends the closing description to a Default.md template' do expected_description = ['Default template contents', closing_message].compact.join("\n\n") @@ -386,7 +386,7 @@ RSpec.describe MergeRequests::BuildService do context 'a Default.md template is defined' do let(:files) { { '.gitlab/merge_request_templates/Default.md' => 'Default template contents' } } - let(:project) { create(:project, :custom_repo, files: files ) } + let(:project) { create(:project, :custom_repo, files: files) } it 'keeps the description from the initial params' do expect(merge_request.description).to eq(description) @@ -425,7 +425,7 @@ RSpec.describe MergeRequests::BuildService do context 'a Default.md template is defined' do let(:files) { { '.gitlab/merge_request_templates/Default.md' => 'Default template contents' } } - let(:project) { create(:project, :custom_repo, files: files ) } + let(:project) { create(:project, :custom_repo, files: files) } it 'appends the closing description to a Default.md template' do expected_description = ['Default template contents', closing_message].compact.join("\n\n") @@ -486,7 +486,7 @@ RSpec.describe MergeRequests::BuildService do context 'a Default.md template is defined' do let(:files) { { '.gitlab/merge_request_templates/Default.md' => 'Default template contents' } } - let(:project) { create(:project, :custom_repo, files: files ) } + let(:project) { create(:project, :custom_repo, files: files) } it 'appends the closing description to a Default.md template' do expected_description = ['Default template contents', closing_message].compact.join("\n\n") @@ -715,7 +715,7 @@ RSpec.describe MergeRequests::BuildService do context 'when a Default template is found' do context 'when its contents cannot be retrieved' do let(:files) { { '.gitlab/merge_request_templates/OtherTemplate.md' => 'Other template contents' } } - let(:project) { create(:project, :custom_repo, files: files ) } + let(:project) { create(:project, :custom_repo, files: files) } it 'does not modify the merge request description' do allow(TemplateFinder).to receive(:all_template_names).and_return({ @@ -732,7 +732,7 @@ RSpec.describe MergeRequests::BuildService do context 'when its contents can be retrieved' do let(:files) { { '.gitlab/merge_request_templates/Default.md' => 'Default template contents' } } - let(:project) { create(:project, :custom_repo, files: files ) } + let(:project) { create(:project, :custom_repo, files: files) } it 'modifies the merge request description' do merge_request.description = nil diff --git a/spec/services/notes/update_service_spec.rb b/spec/services/notes/update_service_spec.rb index 989ca7b8df1..05703ac548d 100644 --- a/spec/services/notes/update_service_spec.rb +++ b/spec/services/notes/update_service_spec.rb @@ -245,7 +245,7 @@ RSpec.describe Notes::UpdateService do context 'for a personal snippet' do let_it_be(:snippet) { create(:personal_snippet, :public) } - let(:note) { create(:note, project: nil, noteable: snippet, author: user, note: "Note on a snippet with reference #{issue.to_reference}" ) } + let(:note) { create(:note, project: nil, noteable: snippet, author: user, note: "Note on a snippet with reference #{issue.to_reference}") } it 'does not create todos' do expect { update_note({ note: "Mentioning user #{user2}" }) }.not_to change { note.todos.count } diff --git a/spec/services/packages/composer/composer_json_service_spec.rb b/spec/services/packages/composer/composer_json_service_spec.rb index 378016a6ffb..d2187688c4c 100644 --- a/spec/services/packages/composer/composer_json_service_spec.rb +++ b/spec/services/packages/composer/composer_json_service_spec.rb @@ -9,7 +9,7 @@ RSpec.describe Packages::Composer::ComposerJsonService do subject { described_class.new(project, target).execute } context 'with an existing file' do - let(:project) { create(:project, :custom_repo, files: { 'composer.json' => json } ) } + let(:project) { create(:project, :custom_repo, files: { 'composer.json' => json }) } context 'with a valid file' do let(:json) { '{ "name": "package-name"}' } diff --git a/spec/services/packages/npm/create_package_service_spec.rb b/spec/services/packages/npm/create_package_service_spec.rb index a3e59913918..ef8cdf2e8ab 100644 --- a/spec/services/packages/npm/create_package_service_spec.rb +++ b/spec/services/packages/npm/create_package_service_spec.rb @@ -148,7 +148,7 @@ RSpec.describe Packages::Npm::CreatePackageService do end context 'when file size is faked by setting the attachment length param to a lower size' do - let(:params) { super().deep_merge!( { _attachments: { "#{package_name}-#{version}.tgz" => { data: encoded_package_data, length: 1 } } }) } + let(:params) { super().deep_merge!({ _attachments: { "#{package_name}-#{version}.tgz" => { data: encoded_package_data, length: 1 } } }) } # TODO (technical debt): Extract the package size calculation outside the service and add separate specs for it. # Right now we have several contexts here to test the calculation's different scenarios. @@ -193,7 +193,7 @@ RSpec.describe Packages::Npm::CreatePackageService do end context 'with empty versions' do - let(:params) { super().merge!({ versions: {} } ) } + let(:params) { super().merge!({ versions: {} }) } it { expect(subject[:http_status]).to eq 400 } it { expect(subject[:message]).to eq 'Version is empty.' } diff --git a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb index b67b4d64c1d..6c7164c5e06 100644 --- a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb +++ b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb @@ -126,7 +126,7 @@ RSpec.describe Projects::LfsPointers::LfsDownloadService do let(:redirect_link) { 'http://external-link' } before do - stub_full_request(download_link).to_return(status: 301, body: 'You are being redirected', headers: { 'Location' => redirect_link } ) + stub_full_request(download_link).to_return(status: 301, body: 'You are being redirected', headers: { 'Location' => redirect_link }) stub_full_request(redirect_link).to_return(body: lfs_content) end diff --git a/spec/services/search/group_service_spec.rb b/spec/services/search/group_service_spec.rb index 152d0700cc1..c9bfa7cb7b4 100644 --- a/spec/services/search/group_service_spec.rb +++ b/spec/services/search/group_service_spec.rb @@ -11,7 +11,7 @@ RSpec.describe Search::GroupService do # These projects shouldn't be found let!(:outside_project) { create(:project, :public, name: "Outside #{term}") } - let!(:private_project) { create(:project, :private, namespace: nested_group, name: "Private #{term}" ) } + let!(:private_project) { create(:project, :private, namespace: nested_group, name: "Private #{term}") } let!(:other_project) { create(:project, :public, namespace: nested_group, name: term.reverse) } # These projects should be found diff --git a/spec/services/security/merge_reports_service_spec.rb b/spec/services/security/merge_reports_service_spec.rb index e61977297c5..8415ed8a22f 100644 --- a/spec/services/security/merge_reports_service_spec.rb +++ b/spec/services/security/merge_reports_service_spec.rb @@ -219,10 +219,10 @@ RSpec.describe Security::MergeReportsService, '#execute' do let(:finding_id_1) { build(:ci_reports_security_finding, identifiers: [identifier_bandit, identifier_cve], scanner: bandit_scanner, report_type: :sast) } let(:finding_id_2) { build(:ci_reports_security_finding, identifiers: [identifier_cve], scanner: semgrep_scanner, report_type: :sast) } - let(:finding_id_3) { build(:ci_reports_security_finding, identifiers: [identifier_semgrep], scanner: semgrep_scanner, report_type: :sast ) } + let(:finding_id_3) { build(:ci_reports_security_finding, identifiers: [identifier_semgrep], scanner: semgrep_scanner, report_type: :sast) } let(:bandit_report) do - build( :ci_reports_security_report, + build(:ci_reports_security_report, type: :sast, scanners: [bandit_scanner], findings: [finding_id_1], diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb index b2ccd9dba52..3263e410d3c 100644 --- a/spec/services/system_notes/issuables_service_spec.rb +++ b/spec/services/system_notes/issuables_service_spec.rb @@ -175,7 +175,7 @@ RSpec.describe ::SystemNotes::IssuablesService do it 'builds a correct phrase when one reviewer removed from a set' do expect(build_note([reviewer, reviewer1, reviewer2], [reviewer, reviewer1])).to( - eq( "removed review request for @#{reviewer2.username}") + eq("removed review request for @#{reviewer2.username}") ) end @@ -681,7 +681,7 @@ RSpec.describe ::SystemNotes::IssuablesService do it 'tracks usage' do expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter) - .to receive(:track_issue_cloned_action).with(author: author, project: project ) + .to receive(:track_issue_cloned_action).with(author: author, project: project) subject end diff --git a/spec/services/todos/destroy/entity_leave_service_spec.rb b/spec/services/todos/destroy/entity_leave_service_spec.rb index 225e7933d79..9942bd8688b 100644 --- a/spec/services/todos/destroy/entity_leave_service_spec.rb +++ b/spec/services/todos/destroy/entity_leave_service_spec.rb @@ -17,7 +17,7 @@ RSpec.describe Todos::Destroy::EntityLeaveService do let!(:todo_issue_user) { create(:todo, user: user, target: issue, project: project) } let!(:todo_issue_c_user) { create(:todo, user: user, target: issue_c, project: project) } let!(:todo_issue_c_user2) { create(:todo, user: user2, target: issue_c, project: project) } - let(:internal_note) { create(:note, noteable: issue, project: project, confidential: true ) } + let(:internal_note) { create(:note, noteable: issue, project: project, confidential: true) } let!(:todo_for_internal_note) do create(:todo, user: user, target: issue, project: project, note: internal_note) end @@ -250,7 +250,7 @@ RSpec.describe Todos::Destroy::EntityLeaveService do let!(:todo_subproject_user2) { create(:todo, user: user2, project: subproject) } let!(:todo_subpgroup_user2) { create(:todo, user: user2, group: subgroup) } let!(:todo_parent_group_user) { create(:todo, user: user, group: parent_group) } - let(:subproject_internal_note) { create(:note, noteable: issue, project: project, confidential: true ) } + let(:subproject_internal_note) { create(:note, noteable: issue, project: project, confidential: true) } let!(:todo_for_internal_subproject_note) do create(:todo, user: user, target: issue, project: project, note: subproject_internal_note) end diff --git a/spec/services/topics/merge_service_spec.rb b/spec/services/topics/merge_service_spec.rb index eef31817aa8..98247250a61 100644 --- a/spec/services/topics/merge_service_spec.rb +++ b/spec/services/topics/merge_service_spec.rb @@ -5,10 +5,10 @@ require 'spec_helper' RSpec.describe Topics::MergeService do let_it_be(:source_topic) { create(:topic, name: 'source_topic') } let_it_be(:target_topic) { create(:topic, name: 'target_topic') } - let_it_be(:project_1) { create(:project, :public, topic_list: source_topic.name ) } - let_it_be(:project_2) { create(:project, :private, topic_list: source_topic.name ) } - let_it_be(:project_3) { create(:project, :public, topic_list: target_topic.name ) } - let_it_be(:project_4) { create(:project, :public, topic_list: [source_topic.name, target_topic.name] ) } + let_it_be(:project_1) { create(:project, :public, topic_list: source_topic.name) } + let_it_be(:project_2) { create(:project, :private, topic_list: source_topic.name) } + let_it_be(:project_3) { create(:project, :public, topic_list: target_topic.name) } + let_it_be(:project_4) { create(:project, :public, topic_list: [source_topic.name, target_topic.name]) } subject { described_class.new(source_topic, target_topic).execute } diff --git a/spec/services/users/destroy_service_spec.rb b/spec/services/users/destroy_service_spec.rb index 03e1811c8a5..30532e929a6 100644 --- a/spec/services/users/destroy_service_spec.rb +++ b/spec/services/users/destroy_service_spec.rb @@ -129,7 +129,7 @@ RSpec.describe Users::DestroyService do expect { service.execute(user) } .to raise_error(Users::DestroyService::DestroyError, - "Project #{project.id} can't be deleted" ) + "Project #{project.id} can't be deleted") end end end @@ -243,7 +243,7 @@ RSpec.describe Users::DestroyService do aggregate_failures do expect { service.execute(user) } - .to raise_error(Users::DestroyService::DestroyError, 'foo' ) + .to raise_error(Users::DestroyService::DestroyError, 'foo') expect(snippet.reload).not_to be_nil expect( gitlab_shell.repository_exists?(snippet.repository_storage, @@ -546,7 +546,7 @@ RSpec.describe Users::DestroyService do end.to( change do Users::GhostUserMigration.where(user: other_user, - initiator_user: user ) + initiator_user: user) .exists? end.from(false).to(true)) end diff --git a/spec/services/users/migrate_records_to_ghost_user_service_spec.rb b/spec/services/users/migrate_records_to_ghost_user_service_spec.rb index 766be51ae13..a2495eb8e2b 100644 --- a/spec/services/users/migrate_records_to_ghost_user_service_spec.rb +++ b/spec/services/users/migrate_records_to_ghost_user_service_spec.rb @@ -235,7 +235,7 @@ RSpec.describe Users::MigrateRecordsToGhostUserService do aggregate_failures do expect { service.execute }.to( - raise_error(Users::MigrateRecordsToGhostUserService::DestroyError, 'foo' )) + raise_error(Users::MigrateRecordsToGhostUserService::DestroyError, 'foo')) expect(snippet.reload).not_to be_nil expect( gitlab_shell.repository_exists?(snippet.repository_storage, diff --git a/spec/services/x509_certificate_revoke_service_spec.rb b/spec/services/x509_certificate_revoke_service_spec.rb index adad3281c13..ff5d2dc058b 100644 --- a/spec/services/x509_certificate_revoke_service_spec.rb +++ b/spec/services/x509_certificate_revoke_service_spec.rb @@ -5,11 +5,11 @@ require 'spec_helper' RSpec.describe X509CertificateRevokeService do describe '#execute' do let(:service) { described_class.new } - let!(:x509_signature_1) { create(:x509_commit_signature, x509_certificate: x509_certificate, verification_status: :verified ) } - let!(:x509_signature_2) { create(:x509_commit_signature, x509_certificate: x509_certificate, verification_status: :verified ) } + let!(:x509_signature_1) { create(:x509_commit_signature, x509_certificate: x509_certificate, verification_status: :verified) } + let!(:x509_signature_2) { create(:x509_commit_signature, x509_certificate: x509_certificate, verification_status: :verified) } context 'for revoked certificates' do - let(:x509_certificate) { create(:x509_certificate, certificate_status: :revoked ) } + let(:x509_certificate) { create(:x509_certificate, certificate_status: :revoked) } it 'update all commit signatures' do expect do diff --git a/storybook/config/preview.js b/storybook/config/preview.js index 6f3b8190742..70bd9873833 100644 --- a/storybook/config/preview.js +++ b/storybook/config/preview.js @@ -3,6 +3,10 @@ import Vue from 'vue'; import { createMockServer } from 'test_helpers/mock_server'; import translateMixin from '~/vue_shared/translate'; +// fixing toJSON error +// https://github.com/storybookjs/storybook/issues/14933 +Vue.prototype.toJSON = () => {}; + const stylesheetsRequireCtx = require.context( '../../app/assets/stylesheets', true,