Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-10-27 21:09:58 +00:00
parent bd746eebdc
commit 1f992463a9
54 changed files with 944 additions and 220 deletions

View File

@ -49,6 +49,7 @@ workflow:
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_PIPELINE_SOURCE == "schedule" && $SCHEDULE_TYPE == "maintenance"'
variables:
CRYSTALBALL: "true"
CREATE_INCIDENT_FOR_PIPELINE_FAILURE: "true"
NOTIFY_PIPELINE_FAILURE_CHANNEL: "master-broken"
# Run pipelines for ruby3 branch
- if: '$CI_COMMIT_BRANCH == "ruby3"'
@ -62,9 +63,12 @@ workflow:
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $GITLAB_USER_LOGIN =~ /project_\d+_bot\d*/'
variables:
GITLAB_DEPENDENCY_PROXY_ADDRESS: ""
CREATE_INCIDENT_FOR_PIPELINE_FAILURE: "true"
NOTIFY_PIPELINE_FAILURE_CHANNEL: "master-broken"
# For `$CI_DEFAULT_BRANCH` branch, create a pipeline (this includes on schedules, pushes, merges, etc.).
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
variables:
CREATE_INCIDENT_FOR_PIPELINE_FAILURE: "true"
NOTIFY_PIPELINE_FAILURE_CHANNEL: "master-broken"
# For tags, create a pipeline.
- if: '$CI_COMMIT_TAG'

View File

@ -1,8 +1,12 @@
.notify-slack:
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}alpine/curl
.notify-defaults:
stage: notify
dependencies: []
cache: {}
.notify-slack:
extends:
- .notify-defaults
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}alpine/curl
variables:
MERGE_REQUEST_URL: ${CI_MERGE_REQUEST_PROJECT_URL}/-/merge_requests/${CI_MERGE_REQUEST_IID}
before_script:
@ -34,28 +38,31 @@ notify-security-pipeline:
- scripts/slack ${NOTIFY_CHANNEL} "<!subteam^S0127FU8PDE> ☠️ Pipeline for merged result failed! ☠️ See ${CI_PIPELINE_URL} (triggered from ${MERGE_REQUEST_URL})" ci_failing "GitLab Release Tools Bot"
notify-pipeline-failure:
extends: .notify-slack
extends:
- .notify-defaults
- .notify:rules:notify-pipeline-failure
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}ruby:${RUBY_VERSION}
rules:
# Don't report child pipeline failures
- if: '$CI_PIPELINE_SOURCE == "parent_pipeline"'
when: never
- if: '$CI_SLACK_WEBHOOK_URL && $NOTIFY_PIPELINE_FAILURE_CHANNEL'
when: on_failure
allow_failure: true
variables:
BROKEN_MASTER_INCIDENTS_PROJECT: "gitlab-org/quality/engineering-productivity/master-broken-incidents"
BROKEN_MASTER_INCIDENT_JSON: "${CI_PROJECT_DIR}/incident.json"
SLACK_CHANNEL: "${NOTIFY_PIPELINE_FAILURE_CHANNEL}"
FAILED_PIPELINE_REPORT_FILE: "failed_pipeline_report.json"
FAILED_PIPELINE_SLACK_MESSAGE_FILE: "${CI_PROJECT_DIR}/failed_pipeline_slack_message.json"
before_script:
- source scripts/utils.sh
- apt-get update && apt-get install -y jq
- install_gitlab_gem
script:
- scripts/generate-failed-pipeline-slack-message.rb
- |
curl -X POST -H 'Content-Type: application/json' --data @${FAILED_PIPELINE_REPORT_FILE} "$CI_SLACK_WEBHOOK_URL"
if [[ "${CREATE_INCIDENT_FOR_PIPELINE_FAILURE}" == "true" ]]; then
scripts/create-pipeline-failure-incident.rb -p ${BROKEN_MASTER_INCIDENTS_PROJECT} -f ${BROKEN_MASTER_INCIDENT_JSON} -t ${BROKEN_MASTER_INCIDENTS_PROJECT_TOKEN};
echosuccess "Created incident $(jq '.web_url' ${BROKEN_MASTER_INCIDENT_JSON})";
fi
- |
scripts/generate-failed-pipeline-slack-message.rb -i ${BROKEN_MASTER_INCIDENT_JSON} -f ${FAILED_PIPELINE_SLACK_MESSAGE_FILE};
curl -X POST -H 'Content-Type: application/json' --data @${FAILED_PIPELINE_SLACK_MESSAGE_FILE} "$CI_SLACK_WEBHOOK_URL";
artifacts:
paths:
- ${FAILED_PIPELINE_REPORT_FILE}
- ${BROKEN_MASTER_INCIDENT_JSON}
- ${FAILED_PIPELINE_SLACK_MESSAGE_FILE}
when: always
expire_in: 2 days

View File

@ -997,6 +997,18 @@
- <<: *if-default-refs
changes: *code-patterns
##########
# Notify #
##########
.notify:rules:notify-pipeline-failure:
rules:
# Don't report child pipeline failures
- if: '$CI_PIPELINE_SOURCE == "parent_pipeline"'
when: never
- if: '$CI_SLACK_WEBHOOK_URL && $NOTIFY_PIPELINE_FAILURE_CHANNEL'
when: on_failure
allow_failure: true
###############
# Pages rules #
###############

View File

@ -249,28 +249,6 @@ Layout/SpaceInsideParens:
- 'spec/requests/search_controller_spec.rb'
- 'spec/serializers/analytics_build_entity_spec.rb'
- 'spec/serializers/merge_request_user_entity_spec.rb'
- 'spec/services/boards/issues/list_service_spec.rb'
- 'spec/services/ci/compare_test_reports_service_spec.rb'
- 'spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb'
- 'spec/services/ci/retry_job_service_spec.rb'
- 'spec/services/clusters/gcp/provision_service_spec.rb'
- 'spec/services/clusters/gcp/verify_provision_status_service_spec.rb'
- 'spec/services/groups/destroy_service_spec.rb'
- 'spec/services/groups/update_shared_runners_service_spec.rb'
- 'spec/services/import/gitlab_projects/file_acquisition_strategies/file_upload_spec.rb'
- 'spec/services/issues/export_csv_service_spec.rb'
- 'spec/services/labels/promote_service_spec.rb'
- 'spec/services/members/invite_service_spec.rb'
- 'spec/services/notes/update_service_spec.rb'
- 'spec/services/packages/composer/composer_json_service_spec.rb'
- 'spec/services/packages/npm/create_package_service_spec.rb'
- 'spec/services/projects/lfs_pointers/lfs_download_service_spec.rb'
- 'spec/services/search/group_service_spec.rb'
- 'spec/services/security/merge_reports_service_spec.rb'
- 'spec/services/suggestions/apply_service_spec.rb'
- 'spec/services/system_notes/issuables_service_spec.rb'
- 'spec/services/users/destroy_service_spec.rb'
- 'spec/services/x509_certificate_revoke_service_spec.rb'
- 'spec/support/helpers/database/partitioning_helpers.rb'
- 'spec/support/helpers/dependency_proxy_helpers.rb'
- 'spec/support/helpers/javascript_fixtures_helpers.rb'

View File

@ -168,9 +168,9 @@ gem 'seed-fu', '~> 2.3.7'
gem 'elasticsearch-model', '~> 7.2'
gem 'elasticsearch-rails', '~> 7.2', require: 'elasticsearch/rails/instrumentation'
gem 'elasticsearch-api', '7.13.3'
gem 'aws-sdk-core', '~> 3.159.0'
gem 'aws-sdk-core', '~> 3.166.0'
gem 'aws-sdk-cloudformation', '~> 1'
gem 'aws-sdk-s3', '~> 1.114.0'
gem 'aws-sdk-s3', '~> 1.117.1'
gem 'faraday_middleware-aws-sigv4', '~>0.3.0'
gem 'typhoeus', '~> 1.4.0' # Used with Elasticsearch to support http keep-alive connections

View File

@ -33,11 +33,11 @@
{"name":"awesome_print","version":"1.9.2","platform":"ruby","checksum":"e99b32b704acff16d768b3468680793ced40bfdc4537eb07e06a4be11133786e"},
{"name":"awrence","version":"1.1.1","platform":"ruby","checksum":"9be584c97408ed92d5e1ca11740853646fe270de675f2f8dd44e8233226dfc97"},
{"name":"aws-eventstream","version":"1.2.0","platform":"ruby","checksum":"ffa53482c92880b001ff2fb06919b9bb82fd847cbb0fa244985d2ebb6dd0d1df"},
{"name":"aws-partitions","version":"1.644.0","platform":"ruby","checksum":"63791750839afff110c5b5a8805018c4275720d7a5c7ec79319d4d520c7da874"},
{"name":"aws-partitions","version":"1.651.0","platform":"ruby","checksum":"61f354049eb2c10bf0aa96b115f7443d181d79ec5508f7a34b8724c4cfa95dda"},
{"name":"aws-sdk-cloudformation","version":"1.41.0","platform":"ruby","checksum":"31e47539719734413671edf9b1a31f8673fbf9688549f50c41affabbcb1c6b26"},
{"name":"aws-sdk-core","version":"3.159.0","platform":"ruby","checksum":"8863e2cdfd6816a0532ad9eb82a07b81b3d12667da747e9e82554e4dd7adb231"},
{"name":"aws-sdk-kms","version":"1.57.0","platform":"ruby","checksum":"ffd7dbb9b4251f29d4f508af761d0addd7035a346a88e3481cdb4dc548e51bd5"},
{"name":"aws-sdk-s3","version":"1.114.0","platform":"ruby","checksum":"ce0f71df1a7b0fb1f88d40a70636ef1a9b08e69fb560694c5dab3f4ac7efcde4"},
{"name":"aws-sdk-core","version":"3.166.0","platform":"ruby","checksum":"827b82a31f13007fbd3ce78801949019ad3b6fa0c658270d5caa6095cb4945fa"},
{"name":"aws-sdk-kms","version":"1.59.0","platform":"ruby","checksum":"6c002ebf8e404625c8338ca12ae69b1329399f9dc1b0ebca474e00ff06700153"},
{"name":"aws-sdk-s3","version":"1.117.1","platform":"ruby","checksum":"76f6dac5baeb2b78616eb34c6af650c1b7a15c1078b169d1b27e8421904c509d"},
{"name":"aws-sigv4","version":"1.5.1","platform":"ruby","checksum":"d68c87fff4ee843b4b92b23c7f31f957f254ec6eb064181f7119124aab8b8bb4"},
{"name":"azure-storage-blob","version":"2.0.3","platform":"ruby","checksum":"61b76118843c91776bd24bee22c74adafeb7c4bb3a858a325047dae3b59d0363"},
{"name":"azure-storage-common","version":"2.0.4","platform":"ruby","checksum":"608f4daab0e06b583b73dcffd3246ea39e78056de31630286b0cf97af7d6956b"},

View File

@ -185,20 +185,20 @@ GEM
awesome_print (1.9.2)
awrence (1.1.1)
aws-eventstream (1.2.0)
aws-partitions (1.644.0)
aws-partitions (1.651.0)
aws-sdk-cloudformation (1.41.0)
aws-sdk-core (~> 3, >= 3.99.0)
aws-sigv4 (~> 1.1)
aws-sdk-core (3.159.0)
aws-sdk-core (3.166.0)
aws-eventstream (~> 1, >= 1.0.2)
aws-partitions (~> 1, >= 1.525.0)
aws-sigv4 (~> 1.1)
aws-partitions (~> 1, >= 1.651.0)
aws-sigv4 (~> 1.5)
jmespath (~> 1, >= 1.6.1)
aws-sdk-kms (1.57.0)
aws-sdk-core (~> 3, >= 3.127.0)
aws-sdk-kms (1.59.0)
aws-sdk-core (~> 3, >= 3.165.0)
aws-sigv4 (~> 1.1)
aws-sdk-s3 (1.114.0)
aws-sdk-core (~> 3, >= 3.127.0)
aws-sdk-s3 (1.117.1)
aws-sdk-core (~> 3, >= 3.165.0)
aws-sdk-kms (~> 1)
aws-sigv4 (~> 1.4)
aws-sigv4 (1.5.1)
@ -1558,8 +1558,8 @@ DEPENDENCIES
autoprefixer-rails (= 10.2.5.1)
awesome_print
aws-sdk-cloudformation (~> 1)
aws-sdk-core (~> 3.159.0)
aws-sdk-s3 (~> 1.114.0)
aws-sdk-core (~> 3.166.0)
aws-sdk-s3 (~> 1.117.1)
babosa (~> 1.0.4)
base32 (~> 0.3.0)
batch-loader (~> 2.0.1)

View File

@ -0,0 +1,54 @@
import { GlButton } from '@gitlab/ui';
import { MOCK_HTML } from '../../../../../../spec/frontend/vue_shared/components/markdown_drawer/mock_data';
import MarkdownDrawer from './markdown_drawer.vue';
export default {
component: MarkdownDrawer,
title: 'vue_shared/markdown_drawer',
parameters: {
mirage: {
timing: 1000,
handlers: {
get: {
'/help/user/search/global_search/advanced_search_syntax.json': [
200,
{},
{ html: MOCK_HTML },
],
},
},
},
},
};
const createStory = ({ ...options }) => (_, { argTypes }) => ({
components: { MarkdownDrawer, GlButton },
props: Object.keys(argTypes),
data() {
return {
render: false,
};
},
methods: {
toggleDrawer() {
this.$refs.drawer.toggleDrawer();
},
},
mounted() {
window.requestAnimationFrame(() => {
this.render = true;
});
},
template: `
<div v-if="render">
<gl-button @click="toggleDrawer">Open Drawer</gl-button>
<markdown-drawer
:documentPath="'user/search/global_search/advanced_search_syntax.json'"
ref="drawer"
/>
</div>
`,
...options,
});
export const Default = createStory({});

View File

@ -0,0 +1,117 @@
<script>
import { GlSafeHtmlDirective as SafeHtml, GlDrawer, GlAlert, GlSkeletonLoader } from '@gitlab/ui';
import $ from 'jquery';
import '~/behaviors/markdown/render_gfm';
import { s__ } from '~/locale';
import { contentTop } from '~/lib/utils/common_utils';
import { getRenderedMarkdown } from './utils/fetch';
export const cache = {};
export default {
name: 'MarkdownDrawer',
components: {
GlDrawer,
GlAlert,
GlSkeletonLoader,
},
directives: {
SafeHtml,
},
i18n: {
alert: s__('MardownDrawer|Could not fetch help contents.'),
},
props: {
documentPath: {
type: String,
required: true,
},
},
data() {
return {
loading: false,
hasFetchError: false,
title: '',
body: null,
open: false,
};
},
computed: {
drawerOffsetTop() {
return `${contentTop()}px`;
},
},
watch: {
documentPath: {
immediate: true,
handler: 'fetchMarkdown',
},
open(open) {
if (open && this.body) {
this.renderGLFM();
}
},
},
methods: {
async fetchMarkdown() {
const cached = cache[this.documentPath];
this.hasFetchError = false;
this.title = '';
if (cached) {
this.title = cached.title;
this.body = cached.body;
if (this.open) {
this.renderGLFM();
}
} else {
this.loading = true;
const { body, title, hasFetchError } = await getRenderedMarkdown(this.documentPath);
this.title = title;
this.body = body;
this.loading = false;
this.hasFetchError = hasFetchError;
if (this.open) {
this.renderGLFM();
}
cache[this.documentPath] = { title, body };
}
},
renderGLFM() {
this.$nextTick(() => {
$(this.$refs['content-element']).renderGFM();
});
},
closeDrawer() {
this.open = false;
},
toggleDrawer() {
this.open = !this.open;
},
openDrawer() {
this.open = true;
},
},
safeHtmlConfig: {
ADD_TAGS: ['copy-code'],
},
};
</script>
<template>
<gl-drawer :header-height="drawerOffsetTop" :open="open" header-sticky @close="closeDrawer">
<template #title>
<h4 data-testid="title-element" class="gl-m-0">{{ title }}</h4>
</template>
<template #default>
<div v-if="hasFetchError">
<gl-alert :dismissible="false" variant="danger">{{ $options.i18n.alert }}</gl-alert>
</div>
<gl-skeleton-loader v-else-if="loading" />
<div
v-else
ref="content-element"
v-safe-html:[$options.safeHtmlConfig]="body"
class="md"
></div>
</template>
</gl-drawer>
</template>

View File

@ -0,0 +1,32 @@
import * as Sentry from '@sentry/browser';
import { helpPagePath } from '~/helpers/help_page_helper';
import axios from '~/lib/utils/axios_utils';
export const splitDocument = (htmlString) => {
const htmlDocument = new DOMParser().parseFromString(htmlString, 'text/html');
const title = htmlDocument.querySelector('h1')?.innerText;
htmlDocument.querySelector('h1')?.remove();
return {
title,
body: htmlDocument.querySelector('body').innerHTML.toString(),
};
};
export const getRenderedMarkdown = (documentPath) => {
return axios
.get(helpPagePath(documentPath))
.then(({ data }) => {
const { body, title } = splitDocument(data.html);
return {
body,
title,
hasFetchError: false,
};
})
.catch((e) => {
Sentry.captureException(e);
return {
hasFetchError: true,
};
});
};

View File

@ -33,9 +33,8 @@ module Ci
end
def runner_variables
stop_expanding_file_vars = ::Feature.enabled?(:ci_stop_expanding_file_vars_for_runners, project)
variables
.sort_and_expand_all(keep_undefined: true, expand_file_vars: !stop_expanding_file_vars, project: project)
.sort_and_expand_all(keep_undefined: true, expand_file_vars: false, project: project)
.to_runner_variables
end

View File

@ -1,4 +1,4 @@
- if can?(current_user, :create_deployment, deployment)
- if can?(current_user, :read_deployment, deployment)
- actions = deployment.manual_actions
- if actions.present?
.btn-group
@ -8,7 +8,7 @@
= sprite_icon('chevron-down')
%ul.dropdown-menu.dropdown-menu-right
- actions.each do |action|
- next unless can?(current_user, :update_build, action)
- next unless can?(current_user, :play_job, action)
%li
= link_to [:play, @project, action], method: :post, rel: 'nofollow' do
%span= action.name

View File

@ -1,8 +0,0 @@
---
name: ci_stop_expanding_file_vars_for_runners
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/94198
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/369907
milestone: '15.3'
type: development
group: group::pipeline authoring
default_enabled: true

View File

@ -145,6 +145,7 @@ help benefit translation. For example, we:
- Avoid words that can be translated incorrectly, like:
- [since and because](word_list.md#since)
- [once and after](word_list.md#once)
- [it](word_list.md#it)
- Avoid [ing](word_list.md#-ing-words) words.
[The GitLab voice](#the-gitlab-voice) dictates that we write clearly and directly,

View File

@ -612,6 +612,21 @@ Use lowercase for **issue board**.
Use lowercase for **issue weights**.
## it
When you use the word **it**, ensure the word it refers to is obvious.
If it's not obvious, repeat the word rather than using **it**.
Use:
- The field returns a connection. The field accepts four arguments.
Instead of:
- The field returns a connection. It accepts four arguments.
See also [this, these, that, those](#this-these-that-those).
## job
Do not use **build** to be synonymous with **job**. A job is defined in the `.gitlab-ci.yml` file and runs as part of a pipeline.

View File

@ -24768,6 +24768,9 @@ msgstr ""
msgid "March"
msgstr ""
msgid "MardownDrawer|Could not fetch help contents."
msgstr ""
msgid "Mark as done"
msgstr ""

View File

@ -96,9 +96,14 @@ module QA
return {} unless Env.admin_personal_access_token || Env.personal_access_token
client = Env.admin_personal_access_token ? API::Client.as_admin : API::Client.new
response = get(API::Request.new(client, '/version').url)
response = get(API::Request.new(client, '/metadata').url)
JSON.parse(response.body, symbolize_names: true)
JSON.parse(response.body, symbolize_names: true).then do |metadata|
{
**metadata.slice(:version, :revision),
kas_version: metadata.dig(:kas, :version)
}.compact
end
rescue StandardError, ArgumentError => e
Logger.error("Failed to attach version info to allure report: #{e}")
{}

View File

@ -1,10 +1,7 @@
# frozen_string_literal: true
module QA
RSpec.describe 'Verify', :runner, product_group: :pipeline_authoring, feature_flag: {
name: 'ci_stop_expanding_file_vars_for_runners',
scope: :project
} do
RSpec.describe 'Verify', :runner, product_group: :pipeline_authoring do
describe 'Pipeline with project file variables' do
let(:executor) { "qa-runner-#{Faker::Alphanumeric.alphanumeric(number: 8)}" }
@ -14,7 +11,7 @@ module QA
end
end
let(:runner) do
let!(:runner) do
Resource::Runner.fabricate! do |runner|
runner.project = project
runner.name = executor
@ -60,12 +57,18 @@ module QA
end
end
before do
add_file_variables
add_ci_file
trigger_pipeline
wait_for_pipeline
end
after do
runner.remove_via_api!
end
shared_examples 'variables are read correctly' do
it 'shows in job log accordingly' do
it 'shows in job log accordingly', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/370791' do
job = Resource::Job.fabricate_via_api! do |job|
job.project = project
job.id = project.job_by_name('test')[:id]
@ -79,41 +82,6 @@ module QA
expect(trace).to have_content('Will read private key from hello, this is test')
end
end
end
# FF does not change current behavior
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/94198#note_1057609893
#
# TODO: Remove when FF is removed
# TODO: Archive testcase issue when FF is removed
# Rollout issue: https://gitlab.com/gitlab-org/gitlab/-/issues/369907
context 'when FF is on', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/370787' do
before do
Runtime::Feature.enable(:ci_stop_expanding_file_vars_for_runners, project: project)
runner
add_file_variables
add_ci_file
trigger_pipeline
wait_for_pipeline
end
it_behaves_like 'variables are read correctly'
end
# TODO: Refactor when FF is removed
# TODO: Update testcase issue title and description to not refer to FF status
context 'when FF is off', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/370791' do
before do
runner
add_file_variables
add_ci_file
trigger_pipeline
wait_for_pipeline
end
it_behaves_like 'variables are read correctly'
end
private

View File

@ -0,0 +1,29 @@
# frozen_string_literal: true
require 'gitlab'
require_relative 'default_options'
class CreateIssue
def initialize(options)
@project = options.fetch(:project)
# Force the token to be a string so that if api_token is nil, it's set to '',
# allowing unauthenticated requests (for forks).
api_token = options.delete(:api_token).to_s
warn "No API token given." if api_token.empty?
@client = Gitlab.client(
endpoint: options.delete(:endpoint) || API::DEFAULT_OPTIONS[:endpoint],
private_token: api_token
)
end
def execute(issue_data)
client.create_issue(project, issue_data.delete(:title), issue_data)
end
private
attr_reader :project, :client
end

View File

@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'gitlab'
require 'optparse'
require_relative 'default_options'
class PipelineFailedJobs

View File

@ -0,0 +1,172 @@
#!/usr/bin/env ruby
# frozen_string_literal: true
require 'optparse'
require 'json'
require_relative 'api/pipeline_failed_jobs'
require_relative 'api/create_issue'
class CreatePipelineFailureIncident
DEFAULT_OPTIONS = {
project: nil,
incident_json_file: 'incident.json'
}.freeze
DEFAULT_LABELS = ['Engineering Productivity'].freeze
def initialize(options)
@project = options.delete(:project)
@api_token = options.delete(:api_token)
end
def execute
payload = {
issue_type: 'incident',
title: title,
description: description,
labels: incident_labels
}
CreateIssue.new(project: project, api_token: api_token).execute(payload)
end
private
attr_reader :project, :api_token
def failed_jobs
@failed_jobs ||= PipelineFailedJobs.new(API::DEFAULT_OPTIONS.dup.merge(exclude_allowed_to_fail_jobs: true)).execute
end
def now
@now ||= Time.now.utc
end
def title
"#{now.strftime('%A %F %R UTC')} - `#{ENV['CI_PROJECT_PATH']}` broken `#{ENV['CI_COMMIT_REF_NAME']}` " \
"with #{failed_jobs.size} failed jobs"
end
def description
<<~MARKDOWN
## #{project_link} pipeline #{pipeline_link} failed
**Branch: #{branch_link}**
**Commit: #{commit_link}**
**Triggered by** #{triggered_by_link} • **Source:** #{source} • **Duration:** #{pipeline_duration} minutes
**Failed jobs (#{failed_jobs.size}):**
#{failed_jobs_list}
### General guidelines
Follow the [Broken `master` handbook guide](https://about.gitlab.com/handbook/engineering/workflow/#broken-master).
### Investigation
**Be sure to fill the `Timeline` for this incident.**
1. If the failure is new, and looks like a potential flaky failure, you can retry the failing job.
Make sure to mention the retry in the `Timeline` and leave a link to the retried job.
1. If the failure looks like a broken `master`, communicate the broken `master` in Slack using the "Broadcast Master Broken" workflow:
- Click the Shortcut lightning bolt icon in the `#master-broken` channel and select "Broadcast Master Broken".
- Click "Continue the broadcast" after the automated message in `#master-broken`.
### Pre-resolution
If you believe that there's an easy resolution by either:
- Reverting a particular merge request. Make sure to add the ~"pipeline:revert" label in that case, to speed up the revert pipeline.
- Making a quick fix (for example, one line or a few similar simple changes in a few lines).
You can create a merge request, assign to any available maintainer, and ping people that were involved/related to the introduction of the failure.
Additionally, a message can be posted in `#backend_maintainers` or `#frontend_maintainers` to get a maintainer take a look at the fix ASAP.
### Resolution
Follow [the Resolution steps from the handbook](https://about.gitlab.com/handbook/engineering/workflow/#responsibilities-of-the-resolution-dri).
MARKDOWN
end
def incident_labels
master_broken_label =
if ENV['CI_PROJECT_NAME'] == 'gitlab-foss'
'master:foss-broken'
else
'master:broken'
end
DEFAULT_LABELS.dup << master_broken_label
end
def pipeline_link
"[##{ENV['CI_PIPELINE_ID']}](#{ENV['CI_PIPELINE_URL']})"
end
def branch_link
"[`#{ENV['CI_COMMIT_REF_NAME']}`](#{ENV['CI_PROJECT_URL']}/-/commits/#{ENV['CI_COMMIT_REF_NAME']})"
end
def pipeline_duration
((Time.now - Time.parse(ENV['CI_PIPELINE_CREATED_AT'])) / 60.to_f).round(2)
end
def commit_link
"[#{ENV['CI_COMMIT_TITLE']}](#{ENV['CI_PROJECT_URL']}/-/commit/#{ENV['CI_COMMIT_SHA']})"
end
def source
"`#{ENV['CI_PIPELINE_SOURCE']}`"
end
def project_link
"[#{ENV['CI_PROJECT_PATH']}](#{ENV['CI_PROJECT_URL']})"
end
def triggered_by_link
"[#{ENV['GITLAB_USER_NAME']}](#{ENV['CI_SERVER_URL']}/#{ENV['GITLAB_USER_LOGIN']})"
end
def failed_jobs_list_for_title
failed_jobs.map(&:name).join(', ')
end
def failed_jobs_list
failed_jobs.map { |job| "- [#{job.name}](#{job.web_url})" }.join("\n")
end
end
if $PROGRAM_NAME == __FILE__
options = CreatePipelineFailureIncident::DEFAULT_OPTIONS.dup
OptionParser.new do |opts|
opts.on("-p", "--project PROJECT", String, "Project where to create the incident (defaults to "\
"`#{CreatePipelineFailureIncident::DEFAULT_OPTIONS[:project]}`)") do |value|
options[:project] = value
end
opts.on("-f", "--incident-json-file file_path", String, "Path to a file where to save the incident JSON data "\
"(defaults to `#{CreatePipelineFailureIncident::DEFAULT_OPTIONS[:incident_json_file]}`)") do |value|
options[:incident_json_file] = value
end
opts.on("-t", "--api-token API_TOKEN", String, "A valid Project token with the `Reporter` role and `api` scope "\
"to create the incident") do |value|
options[:api_token] = value
end
opts.on("-h", "--help", "Prints this help") do
puts opts
exit
end
end.parse!
incident_json_file = options.delete(:incident_json_file)
CreatePipelineFailureIncident.new(options).execute.tap do |incident|
File.write(incident_json_file, JSON.pretty_generate(incident.to_h)) if incident_json_file
end
end

View File

@ -2,21 +2,23 @@
# frozen_string_literal: true
require 'optparse'
require 'json'
require_relative 'api/pipeline_failed_jobs'
finder_options = API::DEFAULT_OPTIONS.dup.merge(exclude_allowed_to_fail_jobs: true)
failed_jobs = PipelineFailedJobs.new(finder_options).execute
class GenerateFailedPipelineSlackMessage
DEFAULT_OPTIONS = {
failed_pipeline_slack_message_file: 'failed_pipeline_slack_message.json',
incident_json_file: 'incident.json'
}.freeze
class SlackReporter
DEFAULT_FAILED_PIPELINE_REPORT_FILE = 'failed_pipeline_report.json'
def initialize(failed_jobs)
@failed_jobs = failed_jobs
@failed_pipeline_report_file = ENV.fetch('FAILED_PIPELINE_REPORT_FILE', DEFAULT_FAILED_PIPELINE_REPORT_FILE)
def initialize(options)
@incident_json_file = options.delete(:incident_json_file)
end
def report
payload = {
def execute
{
channel: ENV['SLACK_CHANNEL'],
username: "Failed pipeline reporter",
icon_emoji: ":boom:",
@ -27,33 +29,36 @@ class SlackReporter
text: {
type: "mrkdwn",
text: "*#{title}*"
},
accessory: {
type: "button",
text: {
type: "plain_text",
text: incident_button_text
},
url: incident_button_link
}
},
{
type: "section",
fields: [
{
text: {
type: "mrkdwn",
text: "*Commit*\n#{commit_link}"
},
{
type: "mrkdwn",
text: "*Triggered by*\n#{triggered_by_link}"
text: "*Branch*: #{branch_link}"
}
]
},
{
type: "section",
fields: [
{
text: {
type: "mrkdwn",
text: "*Source*\n#{source} from #{project_link}"
text: "*Commit*: #{commit_link}"
}
},
{
type: "section",
text: {
type: "mrkdwn",
text: "*Duration*\n#{pipeline_duration} minutes"
text: "*Triggered by* #{triggered_by_link} • *Source:* #{source} • *Duration:* #{pipeline_duration} minutes"
}
]
},
{
type: "section",
@ -64,16 +69,41 @@ class SlackReporter
}
]
}
File.write(failed_pipeline_report_file, JSON.pretty_generate(payload))
end
private
attr_reader :failed_jobs, :failed_pipeline_report_file
attr_reader :incident_json_file
def failed_jobs
@failed_jobs ||= PipelineFailedJobs.new(API::DEFAULT_OPTIONS.dup.merge(exclude_allowed_to_fail_jobs: true)).execute
end
def title
"Pipeline #{pipeline_link} for #{branch_link} failed"
"#{project_link} pipeline #{pipeline_link} failed"
end
def incident_exist?
return @incident_exist if defined?(@incident_exist)
@incident_exist = File.exist?(incident_json_file)
end
def incident_button_text
if incident_exist?
'View incident'
else
'Create incident'
end
end
def incident_button_link
if incident_exist?
JSON.parse(File.read(incident_json_file))['web_url']
else
"#{ENV['CI_SERVER_URL']}/#{ENV['BROKEN_MASTER_INCIDENTS_PROJECT']}/-/issues/new?" \
"issuable_template=incident&issue%5Bissue_type%5D=incident"
end
end
def pipeline_link
@ -101,7 +131,7 @@ class SlackReporter
end
def project_link
"<#{ENV['CI_PROJECT_URL']}|#{ENV['CI_PROJECT_NAME']}>"
"<#{ENV['CI_PROJECT_URL']}|#{ENV['CI_PROJECT_PATH']}>"
end
def triggered_by_link
@ -113,4 +143,33 @@ class SlackReporter
end
end
SlackReporter.new(failed_jobs).report
if $PROGRAM_NAME == __FILE__
options = GenerateFailedPipelineSlackMessage::DEFAULT_OPTIONS.dup
OptionParser.new do |opts|
opts.on("-i", "--incident-json-file file_path", String, "Path to a file where the incident JSON data "\
"can be found (defaults to "\
"`#{GenerateFailedPipelineSlackMessage::DEFAULT_OPTIONS[:incident_json_file]}`)") do |value|
options[:incident_json_file] = value
end
opts.on("-f", "--failed-pipeline-slack-message-file file_path", String, "Path to a file where to save the Slack "\
"message (defaults to "\
"`#{GenerateFailedPipelineSlackMessage::DEFAULT_OPTIONS[:failed_pipeline_slack_message_file]}`)") do |value|
options[:failed_pipeline_slack_message_file] = value
end
opts.on("-h", "--help", "Prints this help") do
puts opts
exit
end
end.parse!
failed_pipeline_slack_message_file = options.delete(:failed_pipeline_slack_message_file)
GenerateFailedPipelineSlackMessage.new(options).execute.tap do |message_payload|
if failed_pipeline_slack_message_file
File.write(failed_pipeline_slack_message_file, JSON.pretty_generate(message_payload))
end
end
end

View File

@ -0,0 +1,205 @@
import { GlDrawer, GlAlert, GlSkeletonLoader } from '@gitlab/ui';
import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import MarkdownDrawer, { cache } from '~/vue_shared/components/markdown_drawer/markdown_drawer.vue';
import { getRenderedMarkdown } from '~/vue_shared/components/markdown_drawer/utils/fetch';
import { contentTop } from '~/lib/utils/common_utils';
jest.mock('~/vue_shared/components/markdown_drawer/utils/fetch', () => ({
getRenderedMarkdown: jest.fn().mockReturnValue({
title: 'test title test',
body: `<div id="content-body">
<div class="documentation md gl-mt-3">
test body
</div>
</div>`,
}),
}));
jest.mock('~/lib/utils/common_utils', () => ({
contentTop: jest.fn(),
}));
describe('MarkdownDrawer', () => {
let wrapper;
const defaultProps = {
documentPath: 'user/search/global_search/advanced_search_syntax.json',
};
const createComponent = (props) => {
wrapper = shallowMountExtended(MarkdownDrawer, {
propsData: {
...defaultProps,
...props,
},
});
};
afterEach(() => {
wrapper.destroy();
wrapper = null;
Object.keys(cache).forEach((key) => delete cache[key]);
});
const findDrawer = () => wrapper.findComponent(GlDrawer);
const findAlert = () => wrapper.findComponent(GlAlert);
const findSkeleton = () => wrapper.findComponent(GlSkeletonLoader);
const findDrawerTitle = () => wrapper.findComponent('[data-testid="title-element"]');
const findDrawerBody = () => wrapper.findComponent({ ref: 'content-element' });
describe('component', () => {
beforeEach(() => {
createComponent();
});
it('renders correctly', () => {
expect(findDrawer().exists()).toBe(true);
expect(findDrawerTitle().text()).toBe('test title test');
expect(findDrawerBody().text()).toBe('test body');
});
});
describe.each`
hasNavbar | navbarHeight
${false} | ${0}
${true} | ${100}
`('computes offsetTop', ({ hasNavbar, navbarHeight }) => {
beforeEach(() => {
global.document.querySelector = jest.fn(() =>
hasNavbar
? {
dataset: {
page: 'test',
},
}
: undefined,
);
contentTop.mockReturnValue(navbarHeight);
createComponent();
});
afterEach(() => {
contentTop.mockClear();
});
it(`computes offsetTop ${hasNavbar ? 'with' : 'without'} .navbar-gitlab`, () => {
expect(findDrawer().attributes('headerheight')).toBe(`${navbarHeight}px`);
});
});
describe('watcher', () => {
let renderGLFMSpy;
let fetchMarkdownSpy;
beforeEach(async () => {
renderGLFMSpy = jest.spyOn(MarkdownDrawer.methods, 'renderGLFM');
fetchMarkdownSpy = jest.spyOn(MarkdownDrawer.methods, 'fetchMarkdown');
global.document.querySelector = jest.fn(() => ({
getBoundingClientRect: jest.fn(() => ({ bottom: 100 })),
dataset: {
page: 'test',
},
}));
createComponent();
await nextTick();
});
afterEach(() => {
renderGLFMSpy.mockClear();
fetchMarkdownSpy.mockClear();
});
it('for documentPath triggers fetch', async () => {
expect(fetchMarkdownSpy).toHaveBeenCalledTimes(1);
await wrapper.setProps({ documentPath: '/test/me' });
await nextTick();
expect(fetchMarkdownSpy).toHaveBeenCalledTimes(2);
});
it('for open triggers renderGLFM', async () => {
wrapper.vm.fetchMarkdown();
wrapper.vm.openDrawer();
await nextTick();
expect(renderGLFMSpy).toHaveBeenCalled();
});
});
describe('Markdown fetching', () => {
let renderGLFMSpy;
beforeEach(async () => {
renderGLFMSpy = jest.spyOn(MarkdownDrawer.methods, 'renderGLFM');
createComponent();
await nextTick();
});
afterEach(() => {
renderGLFMSpy.mockClear();
});
it('fetches the Markdown and caches it', async () => {
expect(getRenderedMarkdown).toHaveBeenCalledTimes(1);
expect(Object.keys(cache)).toHaveLength(1);
});
it('when the document changes, fetches it and caches it as well', async () => {
expect(getRenderedMarkdown).toHaveBeenCalledTimes(1);
expect(Object.keys(cache)).toHaveLength(1);
await wrapper.setProps({ documentPath: '/test/me2' });
await nextTick();
expect(getRenderedMarkdown).toHaveBeenCalledTimes(2);
expect(Object.keys(cache)).toHaveLength(2);
});
it('when re-using an already fetched document, gets it from the cache', async () => {
await wrapper.setProps({ documentPath: '/test/me2' });
await nextTick();
expect(getRenderedMarkdown).toHaveBeenCalledTimes(2);
expect(Object.keys(cache)).toHaveLength(2);
await wrapper.setProps({ documentPath: defaultProps.documentPath });
await nextTick();
expect(getRenderedMarkdown).toHaveBeenCalledTimes(2);
expect(Object.keys(cache)).toHaveLength(2);
});
});
describe('Markdown fetching returns error', () => {
beforeEach(async () => {
getRenderedMarkdown.mockReturnValue({
hasFetchError: true,
});
createComponent();
await nextTick();
});
afterEach(() => {
getRenderedMarkdown.mockClear();
});
it('shows alert', () => {
expect(findAlert().exists()).toBe(true);
});
});
describe('While Markdown is fetching', () => {
beforeEach(async () => {
getRenderedMarkdown.mockReturnValue(new Promise(() => {}));
createComponent();
});
afterEach(() => {
getRenderedMarkdown.mockClear();
});
it('shows skeleton', async () => {
expect(findSkeleton().exists()).toBe(true);
});
});
});

View File

@ -0,0 +1,42 @@
export const MOCK_HTML = `<!DOCTYPE html>
<html>
<body>
<div id="content-body">
<h1>test title <strong>test</strong></h1>
<div class="documentation md gl-mt-3">
<a href="../advanced_search.md">Advanced Search</a>
<a href="../advanced_search2.md">Advanced Search2</a>
<h2>test header h2</h2>
<table class="testClass">
<tr>
<td>Emil</td>
<td>Tobias</td>
<td>Linus</td>
</tr>
<tr>
<td>16</td>
<td>14</td>
<td>10</td>
</tr>
</table>
</div>
</div>
</body>
</html>`.replace(/\n/g, '');
export const MOCK_DRAWER_DATA = {
hasFetchError: false,
title: 'test title test',
body: ` <div id="content-body"> <div class="documentation md gl-mt-3"> <a href="../advanced_search.md">Advanced Search</a> <a href="../advanced_search2.md">Advanced Search2</a> <h2>test header h2</h2> <table class="testClass"> <tbody><tr> <td>Emil</td> <td>Tobias</td> <td>Linus</td> </tr> <tr> <td>16</td> <td>14</td> <td>10</td> </tr> </tbody></table> </div> </div>`,
};
export const MOCK_DRAWER_DATA_ERROR = {
hasFetchError: true,
};
export const MOCK_TABLE_DATA_BEFORE = `<head></head><body><h1>test</h1></test><table><tbody><tr><td></td></tr></tbody></table></body>`;
export const MOCK_HTML_DATA_AFTER = {
body: '<table><tbody><tr><td></td></tr></tbody></table>',
title: 'test',
};

View File

@ -0,0 +1,43 @@
import MockAdapter from 'axios-mock-adapter';
import {
getRenderedMarkdown,
splitDocument,
} from '~/vue_shared/components/markdown_drawer/utils/fetch';
import axios from '~/lib/utils/axios_utils';
import {
MOCK_HTML,
MOCK_DRAWER_DATA,
MOCK_DRAWER_DATA_ERROR,
MOCK_TABLE_DATA_BEFORE,
MOCK_HTML_DATA_AFTER,
} from '../mock_data';
describe('utils/fetch', () => {
let mock;
afterEach(() => {
mock.restore();
});
describe.each`
axiosMock | type | toExpect
${{ code: 200, res: { html: MOCK_HTML } }} | ${'success'} | ${MOCK_DRAWER_DATA}
${{ code: 500, res: null }} | ${'error'} | ${MOCK_DRAWER_DATA_ERROR}
`('process markdown data', ({ axiosMock, type, toExpect }) => {
describe(`if api fetch responds with ${type}`, () => {
beforeEach(() => {
mock = new MockAdapter(axios);
mock.onGet().reply(axiosMock.code, axiosMock.res);
});
it(`should update drawer correctly`, async () => {
expect(await getRenderedMarkdown('/any/path')).toStrictEqual(toExpect);
});
});
});
describe('splitDocument', () => {
it(`should update tables correctly`, () => {
expect(splitDocument(MOCK_TABLE_DATA_BEFORE)).toStrictEqual(MOCK_HTML_DATA_AFTER);
});
});
});

View File

@ -359,23 +359,6 @@ RSpec.describe Ci::BuildRunnerPresenter do
runner_variables
end
context 'when the FF ci_stop_expanding_file_vars_for_runners is disabled' do
before do
stub_feature_flags(ci_stop_expanding_file_vars_for_runners: false)
end
it 'returns variables with expanded' do
expect(runner_variables).to include(
{ key: 'regular_var', value: 'value 1',
public: false, masked: false },
{ key: 'file_var', value: 'value 2',
public: false, masked: false, file: true },
{ key: 'var_with_variables', value: 'value 3 and value 1 and value 2 and $undefined_var',
public: false, masked: false }
)
end
end
end
end

View File

@ -3,6 +3,10 @@ import Vue from 'vue';
import { createMockServer } from 'test_helpers/mock_server';
import translateMixin from '~/vue_shared/translate';
// fixing toJSON error
// https://github.com/storybookjs/storybook/issues/14933
Vue.prototype.toJSON = () => {};
const stylesheetsRequireCtx = require.context(
'../../app/assets/stylesheets',
true,