Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-09-08 09:09:10 +00:00
parent 1758f49bb5
commit 5cd68a92fa
78 changed files with 1084 additions and 297 deletions

View File

@ -172,6 +172,8 @@ graphql-schema-dump:
# Disable warnings in browserslist which can break on backports
# https://github.com/browserslist/browserslist/blob/a287ec6/node.js#L367-L384
BROWSERSLIST_IGNORE_OLD_DATA: "true"
before_script:
- *yarn-install
stage: test
eslint-as-if-foss:
@ -181,20 +183,19 @@ eslint-as-if-foss:
- .as-if-foss
needs: []
script:
- *yarn-install
- run_timed_command "yarn run lint:eslint:all"
.jest-base:
extends: .frontend-test-base
script:
- *yarn-install
- run_timed_command "yarn jest --ci --coverage --testSequencer ./scripts/frontend/parallel_ci_sequencer.js"
- run_timed_command "yarn jest:ci"
jest:
extends:
- .jest-base
- .frontend:rules:default-frontend-jobs
- .frontend:rules:jest
needs:
- job: "detect-tests"
- job: "rspec frontend_fixture"
- job: "rspec-ee frontend_fixture"
optional: true
@ -210,12 +211,18 @@ jest:
junit: junit_jest.xml
parallel: 5
jest minimal:
extends:
- jest
- .frontend:rules:jest:minimal
script:
- run_timed_command "yarn jest:ci:minimal"
jest-integration:
extends:
- .frontend-test-base
- .frontend:rules:default-frontend-jobs
script:
- *yarn-install
- run_timed_command "yarn jest:integration --ci"
needs:
- job: "rspec frontend_fixture"
@ -236,7 +243,11 @@ coverage-frontend:
- .default-retry
- .yarn-cache
- .frontend:rules:ee-mr-and-default-branch-only
needs: ["jest"]
needs:
- job: "jest"
optional: true
- job: "jest minimal"
optional: true
stage: post-test
before_script:
- *yarn-install
@ -321,7 +332,6 @@ bundle-size-review:
extends:
- .frontend-test-base
script:
- *yarn-install
- run_timed_command "yarn generate:startup_css"
- yarn check:startup_css
@ -349,7 +359,6 @@ startup-css-check as-if-foss:
- .frontend-test-base
- .storybook-yarn-cache
script:
- *yarn-install # storybook depends on the global webpack config, so we must install global deps.
- *storybook-yarn-install
- yarn run storybook:build

View File

@ -46,6 +46,9 @@
.if-merge-request-title-run-all-rspec: &if-merge-request-title-run-all-rspec
if: '$CI_MERGE_REQUEST_LABELS =~ /pipeline:run-all-rspec/'
.if-merge-request-title-run-all-jest: &if-merge-request-title-run-all-jest
if: '$CI_MERGE_REQUEST_LABELS =~ /pipeline:run-all-jest/'
.if-merge-request-run-decomposed: &if-merge-request-run-decomposed
if: '$CI_MERGE_REQUEST_LABELS =~ /pipeline:run-decomposed/'
@ -365,6 +368,14 @@
- "danger/**/*"
- "tooling/danger/**/*"
.core-frontend-patterns: &core-frontend-patterns
- "{package.json,yarn.lock}"
- "babel.config.js"
- "jest.config.{base,integration,unit}.js"
- "config/helpers/**/*.js"
- "vendor/assets/javascripts/**/*"
- "{,ee}/app/assets/**/*.graphql"
################
# Shared rules #
################
@ -525,6 +536,37 @@
- <<: *if-merge-request
changes: *ci-patterns
.frontend:rules:jest:
rules:
- <<: *if-merge-request-title-run-all-jest
- <<: *if-default-refs
changes: *core-frontend-patterns
- <<: *if-merge-request
changes: *ci-patterns
- <<: *if-automated-merge-request
changes: *code-backstage-patterns
- <<: *if-merge-request-not-approved
when: never
- <<: *if-default-refs
changes: *code-backstage-patterns
.frontend:rules:jest:minimal:
rules:
- <<: *if-merge-request-approved
when: never
- <<: *if-automated-merge-request
when: never
- <<: *if-merge-request-title-run-all-jest
when: never
- <<: *if-default-refs
changes: *core-frontend-patterns
when: never
- <<: *if-merge-request
changes: *ci-patterns
when: never
- <<: *if-merge-request
changes: *code-backstage-patterns
.frontend:rules:eslint-as-if-foss:
rules:
- <<: *if-not-ee

View File

@ -70,11 +70,16 @@ verify-tests-yml:
- install_gitlab_gem
- install_tff_gem
- retrieve_tests_mapping
- 'if [ -n "$CI_MERGE_REQUEST_IID" ]; then tooling/bin/find_tests ${MATCHED_TESTS_FILE}; fi'
- 'if [ -n "$CI_MERGE_REQUEST_IID" ]; then echo "test files affected: $(cat $MATCHED_TESTS_FILE)"; fi'
- |
if [ -n "$CI_MERGE_REQUEST_IID" ]; then
tooling/bin/find_changes ${CHANGES_FILE};
tooling/bin/find_tests ${CHANGES_FILE} ${MATCHED_TESTS_FILE};
echo "related rspec tests: $(cat $MATCHED_TESTS_FILE)";
fi
artifacts:
expire_in: 7d
paths:
- ${CHANGES_FILE}
- ${MATCHED_TESTS_FILE}
detect-tests:
@ -83,6 +88,7 @@ detect-tests:
- .rails:rules:detect-tests
variables:
RSPEC_TESTS_MAPPING_ENABLED: "true"
CHANGES_FILE: tmp/changed_files.txt
MATCHED_TESTS_FILE: tmp/matching_tests.txt
detect-tests as-if-foss:
@ -91,6 +97,7 @@ detect-tests as-if-foss:
- .rails:rules:detect-tests
- .as-if-foss
variables:
CHANGES_FILE: tmp/changed_foss_files.txt
MATCHED_TESTS_FILE: tmp/matching_foss_tests.txt
before_script:
- '[ "$FOSS_ONLY" = "1" ] && rm -rf ee/ qa/spec/ee/ qa/qa/specs/features/ee/ qa/qa/ee/ qa/qa/ee.rb'

View File

@ -1,14 +1,195 @@
<script>
import { GlButton, GlButtonGroup, GlModal, GlModalDirective, GlSprintf } from '@gitlab/ui';
import GlCountdown from '~/vue_shared/components/gl_countdown.vue';
import {
ACTIONS_DOWNLOAD_ARTIFACTS,
ACTIONS_START_NOW,
ACTIONS_UNSCHEDULE,
ACTIONS_PLAY,
ACTIONS_RETRY,
CANCEL,
GENERIC_ERROR,
JOB_SCHEDULED,
PLAY_JOB_CONFIRMATION_MESSAGE,
RUN_JOB_NOW_HEADER_TITLE,
} from '../constants';
import eventHub from '../event_hub';
import cancelJobMutation from '../graphql/mutations/job_cancel.mutation.graphql';
import playJobMutation from '../graphql/mutations/job_play.mutation.graphql';
import retryJobMutation from '../graphql/mutations/job_retry.mutation.graphql';
import unscheduleJobMutation from '../graphql/mutations/job_unschedule.mutation.graphql';
export default {
ACTIONS_DOWNLOAD_ARTIFACTS,
ACTIONS_START_NOW,
ACTIONS_UNSCHEDULE,
ACTIONS_PLAY,
ACTIONS_RETRY,
CANCEL,
GENERIC_ERROR,
PLAY_JOB_CONFIRMATION_MESSAGE,
RUN_JOB_NOW_HEADER_TITLE,
jobRetry: 'jobRetry',
jobCancel: 'jobCancel',
jobPlay: 'jobPlay',
jobUnschedule: 'jobUnschedule',
playJobModalId: 'play-job-modal',
components: {
GlButton,
GlButtonGroup,
GlCountdown,
GlModal,
GlSprintf,
},
directives: {
GlModalDirective,
},
inject: {
admin: {
default: false,
},
},
props: {
job: {
type: Object,
required: true,
},
},
computed: {
artifactDownloadPath() {
return this.job.artifacts?.nodes[0]?.downloadPath;
},
canReadJob() {
return this.job.userPermissions?.readBuild;
},
isActive() {
return this.job.active;
},
manualJobPlayable() {
return this.job.playable && !this.admin && this.job.manualJob;
},
isRetryable() {
return this.job.retryable;
},
isScheduled() {
return this.job.status === JOB_SCHEDULED;
},
scheduledAt() {
return this.job.scheduledAt;
},
currentJobActionPath() {
return this.job.detailedStatus?.action?.path;
},
currentJobMethod() {
return this.job.detailedStatus?.action?.method;
},
shouldDisplayArtifacts() {
return this.job.userPermissions?.readJobArtifacts && this.job.artifacts?.nodes.length > 0;
},
},
methods: {
async postJobAction(name, mutation) {
try {
const {
data: {
[name]: { errors },
},
} = await this.$apollo.mutate({
mutation,
variables: { id: this.job.id },
});
if (errors.length > 0) {
this.reportFailure();
} else {
eventHub.$emit('jobActionPerformed');
}
} catch {
this.reportFailure();
}
},
reportFailure() {
const toastProps = {
text: this.$options.GENERIC_ERROR,
variant: 'danger',
};
this.$toast.show(toastProps.text, {
variant: toastProps.variant,
});
},
cancelJob() {
this.postJobAction(this.$options.jobCancel, cancelJobMutation);
},
retryJob() {
this.postJobAction(this.$options.jobRetry, retryJobMutation);
},
playJob() {
this.postJobAction(this.$options.jobPlay, playJobMutation);
},
unscheduleJob() {
this.postJobAction(this.$options.jobUnschedule, unscheduleJobMutation);
},
},
};
</script>
<template>
<div></div>
<gl-button-group>
<gl-button
v-if="shouldDisplayArtifacts"
icon="download"
:title="$options.ACTIONS_DOWNLOAD_ARTIFACTS"
:href="artifactDownloadPath"
rel="nofollow"
download
data-testid="download-artifacts"
/>
<template v-if="canReadJob">
<gl-button v-if="isActive" icon="cancel" :title="$options.CANCEL" @click="cancelJob()" />
<template v-else-if="isScheduled">
<gl-button icon="planning" disabled data-testid="countdown">
<gl-countdown :end-date-string="scheduledAt" />
</gl-button>
<gl-button
v-gl-modal-directive="$options.playJobModalId"
icon="play"
:title="$options.ACTIONS_START_NOW"
data-testid="play-scheduled"
/>
<gl-modal
:modal-id="$options.playJobModalId"
:title="$options.RUN_JOB_NOW_HEADER_TITLE"
@primary="playJob()"
>
<gl-sprintf :message="$options.PLAY_JOB_CONFIRMATION_MESSAGE">
<template #job_name>{{ job.name }}</template>
</gl-sprintf>
</gl-modal>
<gl-button
icon="time-out"
:title="$options.ACTIONS_UNSCHEDULE"
data-testid="unschedule"
@click="unscheduleJob()"
/>
</template>
<template v-else>
<!--Note: This is the manual job play button -->
<gl-button
v-if="manualJobPlayable"
icon="play"
:title="$options.ACTIONS_PLAY"
data-testid="play"
@click="playJob()"
/>
<gl-button
v-else-if="isRetryable"
icon="repeat"
:title="$options.ACTIONS_RETRY"
:method="currentJobMethod"
data-testid="retry"
@click="retryJob()"
/>
</template>
</template>
</gl-button-group>
</template>

View File

@ -1,3 +1,5 @@
import { s__, __ } from '~/locale';
export const GRAPHQL_PAGE_SIZE = 30;
export const initialPaginationState = {
@ -7,3 +9,24 @@ export const initialPaginationState = {
first: GRAPHQL_PAGE_SIZE,
last: null,
};
/* Error constants */
export const POST_FAILURE = 'post_failure';
export const DEFAULT = 'default';
/* Job Status Constants */
export const JOB_SCHEDULED = 'SCHEDULED';
/* i18n */
export const ACTIONS_DOWNLOAD_ARTIFACTS = __('Download artifacts');
export const ACTIONS_START_NOW = s__('DelayedJobs|Start now');
export const ACTIONS_UNSCHEDULE = s__('DelayedJobs|Unschedule');
export const ACTIONS_PLAY = __('Play');
export const ACTIONS_RETRY = __('Retry');
export const CANCEL = __('Cancel');
export const GENERIC_ERROR = __('An error occurred while making the request.');
export const PLAY_JOB_CONFIRMATION_MESSAGE = s__(
`DelayedJobs|Are you sure you want to run %{job_name} immediately? This job will run automatically after its timer finishes.`,
);
export const RUN_JOB_NOW_HEADER_TITLE = s__('DelayedJobs|Run the delayed job now?');

View File

@ -0,0 +1,3 @@
import createEventHub from '~/helpers/event_hub_factory';
export default createEventHub();

View File

@ -0,0 +1,3 @@
fragment Job on CiJob {
id
}

View File

@ -0,0 +1,10 @@
#import "../fragments/job.fragment.graphql"
mutation cancelJob($id: CiBuildID!) {
jobCancel(input: { id: $id }) {
job {
...Job
}
errors
}
}

View File

@ -0,0 +1,10 @@
#import "../fragments/job.fragment.graphql"
mutation playJob($id: CiBuildID!) {
jobPlay(input: { id: $id }) {
job {
...Job
}
errors
}
}

View File

@ -0,0 +1,10 @@
#import "../fragments/job.fragment.graphql"
mutation retryJob($id: CiBuildID!) {
jobRetry(input: { id: $id }) {
job {
...Job
}
errors
}
}

View File

@ -0,0 +1,10 @@
#import "../fragments/job.fragment.graphql"
mutation unscheduleJob($id: CiBuildID!) {
jobUnschedule(input: { id: $id }) {
job {
...Job
}
errors
}
}

View File

@ -69,6 +69,7 @@ query getJobs(
stuck
userPermissions {
readBuild
readJobArtifacts
}
}
}

View File

@ -1,9 +1,12 @@
import { GlToast } from '@gitlab/ui';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import JobsTableApp from '~/jobs/components/table/jobs_table_app.vue';
import createDefaultClient from '~/lib/graphql';
import { parseBoolean } from '~/lib/utils/common_utils';
Vue.use(VueApollo);
Vue.use(GlToast);
const apolloProvider = new VueApollo({
defaultClient: createDefaultClient(),
@ -22,6 +25,7 @@ export default (containerId = 'js-jobs-table') => {
jobStatuses,
pipelineEditorPath,
emptyStateSvgPath,
admin,
} = containerEl.dataset;
return new Vue({
@ -33,6 +37,7 @@ export default (containerId = 'js-jobs-table') => {
pipelineEditorPath,
jobStatuses: JSON.parse(jobStatuses),
jobCounts: JSON.parse(jobCounts),
admin: parseBoolean(admin),
},
render(createElement) {
return createElement(JobsTableApp);

View File

@ -2,6 +2,7 @@
import { GlAlert, GlPagination, GlSkeletonLoader } from '@gitlab/ui';
import { __ } from '~/locale';
import { GRAPHQL_PAGE_SIZE, initialPaginationState } from './constants';
import eventHub from './event_hub';
import GetJobs from './graphql/queries/get_jobs.query.graphql';
import JobsTable from './jobs_table.vue';
import JobsTableEmptyState from './jobs_table_empty_state.vue';
@ -74,7 +75,16 @@ export default {
return Boolean(this.prevPage || this.nextPage) && !this.$apollo.loading;
},
},
mounted() {
eventHub.$on('jobActionPerformed', this.handleJobAction);
},
beforeDestroy() {
eventHub.$off('jobActionPerformed', this.handleJobAction);
},
methods: {
handleJobAction() {
this.$apollo.queries.jobs.refetch({ statuses: this.scope });
},
fetchJobsByStatus(scope) {
this.scope = scope;

View File

@ -0,0 +1,29 @@
import { ApolloLink } from 'apollo-link';
import { memoize } from 'lodash';
export const FEATURE_CATEGORY_HEADER = 'x-gitlab-feature-category';
/**
* Returns the ApolloLink (or null) used to add instrumentation metadata to the GraphQL request.
*
* - The result will be null if the `feature_category` cannot be found.
* - The result is memoized since the `feature_category` is the same for the entire page.
*/
export const getInstrumentationLink = memoize(() => {
const { feature_category: featureCategory } = gon;
if (!featureCategory) {
return null;
}
return new ApolloLink((operation, forward) => {
operation.setContext(({ headers = {} }) => ({
headers: {
...headers,
[FEATURE_CATEGORY_HEADER]: featureCategory,
},
}));
return forward(operation);
});
});

View File

@ -10,6 +10,7 @@ import { StartupJSLink } from '~/lib/utils/apollo_startup_js_link';
import csrf from '~/lib/utils/csrf';
import { objectToQuery, queryToObject } from '~/lib/utils/url_utility';
import PerformanceBarService from '~/performance_bar/services/performance_bar_service';
import { getInstrumentationLink } from './apollo/instrumentation_link';
export const fetchPolicies = {
CACHE_FIRST: 'cache-first',
@ -140,14 +141,17 @@ export default (resolvers = {}, config = {}) => {
const appLink = ApolloLink.split(
hasSubscriptionOperation,
new ActionCableLink(),
ApolloLink.from([
requestCounterLink,
performanceBarLink,
new StartupJSLink(),
apolloCaptchaLink,
uploadsLink,
requestLink,
]),
ApolloLink.from(
[
getInstrumentationLink(),
requestCounterLink,
performanceBarLink,
new StartupJSLink(),
apolloCaptchaLink,
uploadsLink,
requestLink,
].filter(Boolean),
),
);
return new ApolloClient({

View File

@ -173,8 +173,6 @@ export default {
footerManageLabelTitle: this.footerManageLabelTitle,
});
setTimeout(() => this.updateLabelsSetState(), 100);
this.$store.subscribeAction({
after: this.handleVuexActionDispatch,
});

View File

@ -2,6 +2,7 @@
class NamespaceSetting < ApplicationRecord
include CascadingNamespaceSettingAttribute
include Sanitizable
cascading_attr :delayed_project_removal
@ -25,6 +26,8 @@ class NamespaceSetting < ApplicationRecord
self.primary_key = :namespace_id
sanitizes! :default_branch_name
def prevent_sharing_groups_outside_hierarchy
return super if namespace.root?
@ -34,11 +37,7 @@ class NamespaceSetting < ApplicationRecord
private
def normalize_default_branch_name
self.default_branch_name = if default_branch_name.blank?
nil
else
Sanitize.fragment(self.default_branch_name)
end
self.default_branch_name = default_branch_name.presence
end
def default_branch_name_content

View File

@ -1,8 +1,9 @@
- page_title _("Jobs")
- add_page_specific_style 'page_bundles/ci_status'
- admin = local_assigns.fetch(:admin, false)
- if Feature.enabled?(:jobs_table_vue, @project, default_enabled: :yaml)
#js-jobs-table{ data: { full_path: @project.full_path, job_counts: job_counts.to_json, job_statuses: job_statuses.to_json, pipeline_editor_path: project_ci_pipeline_editor_path(@project), empty_state_svg_path: image_path('jobs-empty-state.svg') } }
#js-jobs-table{ data: { admin: admin, full_path: @project.full_path, job_counts: job_counts.to_json, job_statuses: job_statuses.to_json, pipeline_editor_path: project_ci_pipeline_editor_path(@project), empty_state_svg_path: image_path('jobs-empty-state.svg') } }
- else
.top-area
- build_path_proc = ->(scope) { project_jobs_path(@project, scope: scope) }

View File

@ -7,6 +7,8 @@ module WebHooks
data_consistency :always
feature_category :integrations
urgency :low
sidekiq_options retry: 3
loggable_arguments 0, 2, 3
idempotent!

View File

@ -1,9 +1,80 @@
#!/usr/bin/env bash
cd $(dirname $0)/.. || exit 1
cd $(dirname $0)/..
app_root=$(pwd)
sidekiq_workers=${SIDEKIQ_WORKERS:-1}
sidekiq_pidfile="$app_root/tmp/pids/sidekiq-cluster.pid"
sidekiq_logfile="$app_root/log/sidekiq.log"
gitlab_user=$(ls -l config.ru | awk '{print $3}')
if [ -n "$SIDEKIQ_WORKERS" ] ; then
exec bin/background_jobs_sk_cluster "$@"
else
exec bin/background_jobs_sk "$@"
fi
warn()
{
echo "$@" 1>&2
}
get_sidekiq_pid()
{
if [ ! -f $sidekiq_pidfile ]; then
warn "No pidfile found at $sidekiq_pidfile; is Sidekiq running?"
return
fi
cat $sidekiq_pidfile
}
stop()
{
sidekiq_pid=$(get_sidekiq_pid)
if [ $sidekiq_pid ]; then
kill -TERM $sidekiq_pid
fi
}
restart()
{
if [ -f $sidekiq_pidfile ]; then
stop
fi
warn "Sidekiq output will be written to $sidekiq_logfile"
start_sidekiq "$@" >> $sidekiq_logfile 2>&1
}
start_sidekiq()
{
cmd="exec"
chpst=$(command -v chpst)
if [ -n "$chpst" ]; then
cmd="${cmd} ${chpst} -P"
fi
# sidekiq-cluster expects '*' '*' arguments (one wildcard for each process).
for (( i=1; i<=$sidekiq_workers; i++ ))
do
processes_args+=("*")
done
${cmd} bin/sidekiq-cluster "${processes_args[@]}" -P $sidekiq_pidfile -e $RAILS_ENV "$@"
}
action="$1"
shift
case "$action" in
stop)
stop
;;
start)
restart "$@" &
;;
start_foreground)
start_sidekiq "$@"
;;
restart)
restart "$@" &
;;
*)
echo "Usage: RAILS_ENV=<env> SIDEKIQ_WORKERS=<n> $0 {stop|start|start_foreground|restart}"
esac

View File

@ -1,67 +0,0 @@
#!/usr/bin/env bash
cd $(dirname $0)/..
app_root=$(pwd)
sidekiq_pidfile="$app_root/tmp/pids/sidekiq.pid"
sidekiq_logfile="$app_root/log/sidekiq.log"
sidekiq_config="$app_root/config/sidekiq_queues.yml"
gitlab_user=$(ls -l config.ru | awk '{print $3}')
warn()
{
echo "$@" 1>&2
}
stop()
{
bundle exec sidekiqctl stop $sidekiq_pidfile >> $sidekiq_logfile 2>&1
}
restart()
{
if [ -f $sidekiq_pidfile ]; then
stop
fi
pkill -u $gitlab_user -f 'sidekiq [0-9]'
start_sidekiq -P $sidekiq_pidfile -d -L $sidekiq_logfile "$@" >> $sidekiq_logfile 2>&1
}
# Starts on foreground but output to the logfile instead stdout.
start_silent()
{
start_sidekiq "$@" >> $sidekiq_logfile 2>&1
}
start_sidekiq()
{
cmd="exec"
chpst=$(command -v chpst)
if [ -n "$chpst" ]; then
cmd="${cmd} ${chpst} -P"
fi
${cmd} bundle exec sidekiq -C "${sidekiq_config}" -e $RAILS_ENV "$@"
}
case "$1" in
stop)
stop
;;
start)
restart "$@"
;;
start_silent)
warn "Deprecated: Will be removed at 13.0 (see https://gitlab.com/gitlab-org/gitlab/-/issues/196731)."
start_silent
;;
start_foreground)
start_sidekiq "$@"
;;
restart)
restart "$@"
;;
*)
echo "Usage: RAILS_ENV=<env> $0 {stop|start|start_silent|start_foreground|restart}"
esac

View File

@ -1,76 +0,0 @@
#!/usr/bin/env bash
cd $(dirname $0)/..
app_root=$(pwd)
sidekiq_pidfile="$app_root/tmp/pids/sidekiq-cluster.pid"
sidekiq_logfile="$app_root/log/sidekiq.log"
gitlab_user=$(ls -l config.ru | awk '{print $3}')
warn()
{
echo "$@" 1>&2
}
get_sidekiq_pid()
{
if [ ! -f $sidekiq_pidfile ]; then
warn "No pidfile found at $sidekiq_pidfile; is Sidekiq running?"
return
fi
cat $sidekiq_pidfile
}
stop()
{
sidekiq_pid=$(get_sidekiq_pid)
if [ $sidekiq_pid ]; then
kill -TERM $sidekiq_pid
fi
}
restart()
{
if [ -f $sidekiq_pidfile ]; then
stop
fi
warn "Sidekiq output will be written to $sidekiq_logfile"
start_sidekiq "$@" >> $sidekiq_logfile 2>&1
}
start_sidekiq()
{
cmd="exec"
chpst=$(command -v chpst)
if [ -n "$chpst" ]; then
cmd="${cmd} ${chpst} -P"
fi
# sidekiq-cluster expects '*' '*' arguments (one wildcard for each process).
for (( i=1; i<=$SIDEKIQ_WORKERS; i++ ))
do
processes_args+=("*")
done
${cmd} bin/sidekiq-cluster "${processes_args[@]}" -P $sidekiq_pidfile -e $RAILS_ENV "$@"
}
case "$1" in
stop)
stop
;;
start)
restart "$@" &
;;
start_foreground)
start_sidekiq "$@"
;;
restart)
restart "$@" &
;;
*)
echo "Usage: RAILS_ENV=<env> SIDEKIQ_WORKERS=<n> $0 {stop|start|start_foreground|restart}"
esac

View File

@ -1,8 +1,5 @@
#!/usr/bin/env ruby
require 'bundler'
ENV['BUNDLE_GEMFILE'] ||=
Bundler.settings[:gemfile] || File.expand_path('../Gemfile', __dir__)
require_relative '../config/bundler_setup'
load Gem.bin_path('bundler', 'bundle')

View File

@ -1,10 +1,4 @@
# frozen_string_literal: true
require 'bundler'
ENV['BUNDLE_GEMFILE'] ||=
Bundler.settings[:gemfile] || File.expand_path('../Gemfile', __dir__)
# Set up gems listed in the Gemfile.
require 'bundler/setup' if File.exist?(ENV['BUNDLE_GEMFILE'])
require_relative 'bundler_setup'
require 'bootsnap/setup' if ENV['RAILS_ENV'] != 'production' || %w(1 yes true).include?(ENV['ENABLE_BOOTSNAP'])

View File

@ -1,5 +1,24 @@
# frozen_string_literal: true
# Instead of requiring 'bundle/setup' directly, we need the following
# to make bundler more consistent when it comes to loading from
# bundler config. See the following links for more context:
# https://gitlab.com/gitlab-org/gitlab/-/issues/339939
# https://github.com/rubygems/rubygems/pull/4892
# https://github.com/rubygems/rubygems/issues/3363
require 'bundler'
ENV['BUNDLE_GEMFILE'] ||= Bundler.settings[:gemfile]
ENV['BUNDLE_GEMFILE'] ||= Bundler.settings[:gemfile] || File.expand_path('../Gemfile', __dir__)
Bundler::SharedHelpers.set_env('BUNDLE_GEMFILE', ENV['BUNDLE_GEMFILE'])
if Bundler.respond_to?(:reset_settings_and_root!)
Bundler.reset_settings_and_root!
else
# Bundler 2.1.4 does not have this method. Do the same as Bundler 2.2.26
# https://github.com/rubygems/rubygems/blob/bundler-v2.2.26/bundler/lib/bundler.rb#L612-L615
Bundler.instance_eval do
@settings = nil
@root = nil
end
end
require 'bundler/setup'

View File

@ -9,12 +9,14 @@ product_category:
value_type: number
status: data_available
time_frame: 28d
data_source:
data_source: redis_hll
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate
performance_indicator_type:
- smau
- gmau

View File

@ -16,8 +16,10 @@ options:
- p_analytics_pipelines
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate
performance_indicator_type: []
milestone: "<13.9"

View File

@ -16,8 +16,10 @@ options:
- p_analytics_valuestream
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate
performance_indicator_type: []
milestone: "<13.9"

View File

@ -1,7 +1,7 @@
---
data_category: optional
key_path: redis_hll_counters.analytics.i_analytics_cohorts_monthly
description:
description: "Unique visitors to /-/instance_statistics/cohorts"
product_section: fulfillment
product_stage: fulfillment
product_group: group::utilization
@ -16,8 +16,10 @@ options:
- i_analytics_cohorts
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate
performance_indicator_type: []
milestone: "<13.9"

View File

@ -16,8 +16,10 @@ options:
- i_analytics_dev_ops_score
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate
performance_indicator_type: []
milestone: "<13.9"

View File

@ -16,8 +16,10 @@ options:
- g_analytics_merge_request
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate
performance_indicator_type: []
milestone: "<13.9"

View File

@ -16,8 +16,10 @@ options:
- i_analytics_instance_statistics
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate
performance_indicator_type: []
milestone: "<13.9"

View File

@ -32,8 +32,10 @@ options:
- i_analytics_cohorts
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate
performance_indicator_type: []
milestone: "<13.9"

View File

@ -16,7 +16,10 @@ options:
- i_analytics_instance_statistics
distribution:
- ee
tier: []
skip_validation: true
- ee
tier:
- free
- premium
- ultimate
performance_indicator_type: []
milestone: "<13.9"

View File

@ -31,8 +31,11 @@ options:
- p_analytics_repo
- i_analytics_cohorts
distribution:
- ce
- ee
tier: []
skip_validation: true
tier:
- free
- premium
- ultimate
performance_indicator_type: []
milestone: "<13.9"

View File

@ -9,10 +9,12 @@ product_category:
value_type: number
status: data_available
time_frame: all
data_source:
data_source: redis_hll
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate
milestone: "<13.9"

View File

@ -9,10 +9,12 @@ product_category:
value_type: number
status: data_available
time_frame: all
data_source:
data_source: redis_hll
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate
milestone: "<13.9"

View File

@ -9,10 +9,13 @@ product_category:
value_type: number
status: data_available
time_frame: all
data_source:
data_source: redis_hll
distribution:
- ce
- ee
tier:
- free
- premium
- ultimate
skip_validation: true
milestone: "<13.9"

View File

@ -9,10 +9,12 @@ product_category:
value_type: number
status: data_available
time_frame: all
data_source:
data_source: redis_hll
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate
milestone: "<13.9"

View File

@ -9,10 +9,12 @@ product_category:
value_type: number
status: data_available
time_frame: all
data_source:
data_source: redis_hll
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate
milestone: "<13.9"

View File

@ -9,10 +9,12 @@ product_category:
value_type: number
status: removed
time_frame: 7d
data_source:
data_source: redis_hll
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate
milestone: "<13.9"

View File

@ -9,10 +9,12 @@ product_category:
value_type: number
status: data_available
time_frame: all
data_source:
data_source: redis_hll
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate
milestone: "<13.9"

View File

@ -9,11 +9,13 @@ product_category:
value_type: number
status: data_available
time_frame: all
data_source:
data_source: redis_hll
distribution:
- ce
- ee
tier:
- free
skip_validation: true
- premium
- ultimate
performance_indicator_type: []
milestone: "<13.9"

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class CleanupBigintConversionForDeployments < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
TABLE = :deployments
# rubocop:disable Migration/WithLockRetriesDisallowedMethod
def up
with_lock_retries do
cleanup_conversion_of_integer_to_bigint(TABLE, :deployable_id)
end
end
# rubocop:enable Migration/WithLockRetriesDisallowedMethod
def down
restore_conversion_of_integer_to_bigint(TABLE, :deployable_id)
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class CleanupBigintConversionForGeoJobArtifactDeletedEvents < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
TABLE = :geo_job_artifact_deleted_events
# rubocop:disable Migration/WithLockRetriesDisallowedMethod
def up
with_lock_retries do
cleanup_conversion_of_integer_to_bigint(TABLE, :job_artifact_id)
end
end
# rubocop:enable Migration/WithLockRetriesDisallowedMethod
def down
restore_conversion_of_integer_to_bigint(TABLE, :job_artifact_id)
end
end

View File

@ -0,0 +1 @@
d02cc8e136f1d761a34c7c585a3fe2b8c3bc3bca67e0e45f950248a5fad36a24

View File

@ -0,0 +1 @@
23d4d2d037cd70c5b810824a837b45f016a3be5d112938123c1da08416f667cd

View File

@ -89,15 +89,6 @@ BEGIN
END;
$$;
CREATE FUNCTION trigger_77f5e1d20482() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
NEW."deployable_id_convert_to_bigint" := NEW."deployable_id";
RETURN NEW;
END;
$$;
CREATE FUNCTION trigger_8487d4de3e7b() RETURNS trigger
LANGUAGE plpgsql
AS $$
@ -135,15 +126,6 @@ BEGIN
END;
$$;
CREATE FUNCTION trigger_f1ca8ec18d78() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
NEW."job_artifact_id_convert_to_bigint" := NEW."job_artifact_id";
RETURN NEW;
END;
$$;
CREATE TABLE audit_events (
id bigint NOT NULL,
author_id integer NOT NULL,
@ -13225,7 +13207,6 @@ CREATE TABLE deployments (
tag boolean NOT NULL,
sha character varying NOT NULL,
user_id integer,
deployable_id_convert_to_bigint integer,
deployable_type character varying,
created_at timestamp without time zone,
updated_at timestamp without time zone,
@ -14104,7 +14085,6 @@ ALTER SEQUENCE geo_hashed_storage_migrated_events_id_seq OWNED BY geo_hashed_sto
CREATE TABLE geo_job_artifact_deleted_events (
id bigint NOT NULL,
job_artifact_id_convert_to_bigint integer DEFAULT 0 NOT NULL,
file_path character varying NOT NULL,
job_artifact_id bigint NOT NULL
);
@ -27312,8 +27292,6 @@ CREATE TRIGGER trigger_51ab7cef8934 BEFORE INSERT OR UPDATE ON ci_builds_runner_
CREATE TRIGGER trigger_542d6c2ad72e BEFORE INSERT OR UPDATE ON ci_builds_metadata FOR EACH ROW EXECUTE FUNCTION trigger_542d6c2ad72e();
CREATE TRIGGER trigger_77f5e1d20482 BEFORE INSERT OR UPDATE ON deployments FOR EACH ROW EXECUTE FUNCTION trigger_77f5e1d20482();
CREATE TRIGGER trigger_8487d4de3e7b BEFORE INSERT OR UPDATE ON ci_builds_metadata FOR EACH ROW EXECUTE FUNCTION trigger_8487d4de3e7b();
CREATE TRIGGER trigger_91dc388a5fe6 BEFORE INSERT OR UPDATE ON dep_ci_build_trace_sections FOR EACH ROW EXECUTE FUNCTION trigger_91dc388a5fe6();
@ -27322,8 +27300,6 @@ CREATE TRIGGER trigger_aebe8b822ad3 BEFORE INSERT OR UPDATE ON taggings FOR EACH
CREATE TRIGGER trigger_cf2f9e35f002 BEFORE INSERT OR UPDATE ON ci_build_trace_chunks FOR EACH ROW EXECUTE FUNCTION trigger_cf2f9e35f002();
CREATE TRIGGER trigger_f1ca8ec18d78 BEFORE INSERT OR UPDATE ON geo_job_artifact_deleted_events FOR EACH ROW EXECUTE FUNCTION trigger_f1ca8ec18d78();
CREATE TRIGGER trigger_has_external_issue_tracker_on_delete AFTER DELETE ON integrations FOR EACH ROW WHEN ((((old.category)::text = 'issue_tracker'::text) AND (old.active = true) AND (old.project_id IS NOT NULL))) EXECUTE FUNCTION set_has_external_issue_tracker();
CREATE TRIGGER trigger_has_external_issue_tracker_on_insert AFTER INSERT ON integrations FOR EACH ROW WHEN ((((new.category)::text = 'issue_tracker'::text) AND (new.active = true) AND (new.project_id IS NOT NULL))) EXECUTE FUNCTION set_has_external_issue_tracker();

View File

@ -10,4 +10,4 @@ link: https://docs.gitlab.com/ee/development/documentation/restful_api_styleguid
level: error
scope: code
raw:
- 'curl.*[^"=]https?://.*'
- 'curl [^"]+://.*'

View File

@ -10,4 +10,4 @@ link: https://docs.gitlab.com/ee/development/documentation/styleguide/index.html
level: error
scope: raw
raw:
- '\[.+\]\((https?:){0}[\w\/\.-]+(\.html).*?\)'
- '\[.+\]\([\w\/\.-]+\.html[^)]*\)'

View File

@ -10,4 +10,4 @@ link: https://docs.gitlab.com/ee/development/documentation/styleguide/index.html
level: error
scope: raw
raw:
- '\[.+\]\(\.\/.+?\)'
- '\[.+\]\(\.\/.*?\)'

View File

@ -9,9 +9,9 @@
# - `> Introduced` (version text without a link)
# - `> [Introduced` (version text with a link)
#
# Because it excludes `-`, it doesn't look for multi-line version text, for which content
# immediately on the next line is ok. However, this will often highlight where multi-line version
# text is attempted without `-` characters.
# Because it excludes the prefix `> - `, it doesn't look for multi-line version text, for which
# content immediately on the next line is ok. However, this will often highlight where multi-line
# version text is attempted without `-` characters.
#
# For a list of all options, see https://errata-ai.gitbook.io/vale/getting-started/styles
extends: existence
@ -20,4 +20,4 @@ link: https://docs.gitlab.com/ee/development/documentation/styleguide/index.html
level: error
scope: raw
raw:
- '> (- ){0}\[?Introduced.+\n[^\n`]'
- '> \[?Introduced.+\n[^\n]'

View File

@ -5,7 +5,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
type: concepts, howto
---
# Environments API
# Environments API **(FREE)**
## List environments

View File

@ -15,7 +15,7 @@ Every API call to `epic_links` must be authenticated.
If a user is not a member of a private group, a `GET` request on that
group results in a `404` status code.
Multi-level Epics are available only in GitLab [Ultimate](https://about.gitlab.com/pricing/).
Multi-level Epics are available only in [GitLab Ultimate](https://about.gitlab.com/pricing/).
If the Multi-level Epics feature is not available, a `403` status code is returned.
## List epics related to a given epic

View File

@ -51,7 +51,7 @@ PATCH /projects/:id/error_tracking/settings
| ------------ | ------- | -------- | --------------------- |
| `id` | integer | yes | The ID or [URL-encoded path of the project](index.md#namespaced-path-encoding) owned by the authenticated user. |
| `active` | boolean | yes | Pass `true` to enable the already configured error tracking settings or `false` to disable it. |
| `integrated` | boolean | no | Pass `true` to enable the integrated error tracking backend. Available in [GitLab 14.2](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/68260) and later. |
| `integrated` | boolean | no | Pass `true` to enable the integrated error tracking backend. [Available in](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/68260) GitLab 14.2 and later. |
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/error_tracking/settings?active=true"

View File

@ -4,7 +4,7 @@ group: Compliance
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---
# Events API
# Events API **(FREE)**
## Filter parameters
@ -15,7 +15,7 @@ Available types for the `action` parameter, and the resources that might be affe
- `approved`
- Merge request
- `closed`
- Epic
- Epic **(PREMIUM)**
- Issue
- Merge request
- Milestone
@ -28,7 +28,7 @@ Available types for the `action` parameter, and the resources that might be affe
- Snippet
- `created`
- Design
- Epic
- Epic **(PREMIUM)**
- Issue
- Merge request
- Milestone
@ -49,7 +49,7 @@ Available types for the `action` parameter, and the resources that might be affe
- `pushed` commits to (or deleted commits from) a repository, individually or in bulk.
- Project
- `reopened`
- Epic
- Epic **(PREMIUM)**
- Issue
- Merge request
- Milestone

View File

@ -38,14 +38,14 @@ GET projects/:id/packages/helm/:channel/index.yaml
```shell
curl --user <username>:<personal_access_token> \
https://gitlab.example.com/api/v4/projects/1/packages/helm/stable/index.yaml
"https://gitlab.example.com/api/v4/projects/1/packages/helm/stable/index.yaml"
```
Write the output to a file:
```shell
curl --user <username>:<personal_access_token> \
https://gitlab.example.com/api/v4/projects/1/packages/helm/stable/index.yaml \
"https://gitlab.example.com/api/v4/projects/1/packages/helm/stable/index.yaml" \
--remote-name
```
@ -67,7 +67,7 @@ GET projects/:id/packages/helm/:channel/charts/:file_name.tgz
```shell
curl --user <username>:<personal_access_token> \
https://gitlab.example.com/api/v4/projects/1/packages/helm/stable/charts/mychart.tgz \
"https://gitlab.example.com/api/v4/projects/1/packages/helm/stable/charts/mychart.tgz" \
--remote-name
```
@ -91,5 +91,5 @@ POST projects/:id/packages/helm/api/:channel/charts
curl --request POST \
--form 'chart=@mychart.tgz' \
--user <username>:<personal_access_token> \
https://gitlab.example.com/api/v4/projects/1/packages/helm/api/stable/charts
"https://gitlab.example.com/api/v4/projects/1/packages/helm/api/stable/charts"
```

View File

@ -37,7 +37,7 @@ sequenceDiagram
1. The CI/CD job generates a document in an LSIF format (usually `dump.lsif`) using [an
indexer](https://lsif.dev) for the language of a project. The format
[describes](https://github.com/sourcegraph/sourcegraph/blob/master/doc/user/code_intelligence/writing_an_indexer.md)
[describes](https://github.com/sourcegraph/sourcegraph/blob/main/doc/code_intelligence/explanations/writing_an_indexer.md)
interactions between a method or function and its definition(s) or references. The
document is marked to be stored as an LSIF report artifact.

View File

@ -24,8 +24,10 @@ There are two types of redirects:
for users who view the docs on [`docs.gitlab.com`](https://docs.gitlab.com).
The Technical Writing team manages the [process](https://gitlab.com/gitlab-org/technical-writing/-/blob/main/.gitlab/issue_templates/tw-monthly-tasks.md)
to regularly update the [`redirects.yaml`](https://gitlab.com/gitlab-org/gitlab-docs/-/blob/main/content/_data/redirects.yaml)
file.
to regularly update and [clean up the redirects](https://gitlab.com/gitlab-org/gitlab-docs/-/blob/main/doc/raketasks.md#clean-up-redirects).
If you're a contributor, you may add a new redirect, but you don't need to delete
the old ones. This process is automatic and handled by the Technical
Writing team.
To add a redirect:

View File

@ -725,7 +725,7 @@ Example request:
```shell
curl --request POST \
--url http://localhost:3000/api/v4/namespaces/123/minutes \
--url "http://localhost:3000/api/v4/namespaces/123/minutes" \
--header 'Content-Type: application/json' \
--header 'PRIVATE-TOKEN: <admin access token>' \
--data '{
@ -769,7 +769,7 @@ Example request:
```shell
curl --request PATCH \
--url http://localhost:3000/api/v4/namespaces/123/minutes/move/321 \
--url "http://localhost:3000/api/v4/namespaces/123/minutes/move/321" \
--header 'PRIVATE-TOKEN: <admin access token>'
```

View File

@ -430,6 +430,23 @@ In the `detect-tests` job, we use this mapping to identify the minimal tests nee
After a merge request has been approved, the pipeline would contain the full RSpec tests. This will ensure that all tests
have been run before a merge request is merged.
### Jest minimal jobs
Before a merge request is approved, the pipeline will run a minimal set of Jest tests that are related to the merge request changes.
This is to reduce the pipeline cost and shorten the job duration.
To identify the minimal set of tests needed, we pass a list of all the changed files into `jest` using the [`--findRelatedTests`](https://jestjs.io/docs/cli#--findrelatedtests-spaceseparatedlistofsourcefiles) option.
In this mode, `jest` would resolve all the dependencies of related to the changed files, which include test files that have these files in the dependency chain.
After a merge request has been approved, the pipeline would contain the full Jest tests. This will ensure that all tests
have been run before a merge request is merged.
In addition, there are a few circumstances where we would always run the full Jest tests:
- when `package.json`, `yarn.lock`, `jest` config changes
- when vendored JavaScript is changed
- when `.graphql` files are changed
### PostgreSQL versions testing
Our test suite runs against PG12 as GitLab.com runs on PG12 and

View File

@ -125,7 +125,7 @@ Example request that uses HTTP Basic authentication:
```shell
curl --user "user:<your_access_token>" \
https://gitlab.example.com/api/v4/projects/24/packages/generic/my_package/0.0.1/file.txt
"https://gitlab.example.com/api/v4/projects/24/packages/generic/my_package/0.0.1/file.txt"
```
## Publish a generic package by using CI/CD

View File

@ -275,7 +275,7 @@ might look like this example:
job1:
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"' # Run job1 in merge request pipelines
- if: '$CI_COMMIT_BRANCH == "master"' # Run job1 in pipelines on the master branch (but not in other branch pipelines)
- if: '$CI_COMMIT_BRANCH == "main"' # Run job1 in pipelines on the main branch (but not in other branch pipelines)
- if: '$CI_COMMIT_TAG' # Run job1 in pipelines for tags
```
@ -291,7 +291,7 @@ code_quality:
- if: '$CODE_QUALITY_DISABLED'
when: never
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"' # Run code quality job in merge request pipelines
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH' # Run code quality job in pipelines on the master branch (but not in other branch pipelines)
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH' # Run code quality job in pipelines on the default branch (but not in other branch pipelines)
- if: '$CI_COMMIT_TAG' # Run code quality job in pipelines for tags
```

View File

@ -392,9 +392,9 @@ upload:
- if: $CI_COMMIT_TAG
script:
- |
curl --header "JOB-TOKEN: ${CI_JOB_TOKEN}" --upload-file bin/${DARWIN_AMD64_BINARY} ${PACKAGE_REGISTRY_URL}/${DARWIN_AMD64_BINARY}
curl --header "JOB-TOKEN: ${CI_JOB_TOKEN}" --upload-file bin/${DARWIN_AMD64_BINARY} "${PACKAGE_REGISTRY_URL}/${DARWIN_AMD64_BINARY}"
- |
curl --header "JOB-TOKEN: ${CI_JOB_TOKEN}" --upload-file bin/${LINUX_AMD64_BINARY} ${PACKAGE_REGISTRY_URL}/${LINUX_AMD64_BINARY}
curl --header "JOB-TOKEN: ${CI_JOB_TOKEN}" --upload-file bin/${LINUX_AMD64_BINARY} "${PACKAGE_REGISTRY_URL}/${LINUX_AMD64_BINARY}"
release:
# Caution, as of 2021-02-02 these assets links require a login, see:

View File

@ -10776,6 +10776,12 @@ msgstr ""
msgid "DelayedJobs|Are you sure you want to run %{job_name} immediately? This job will run automatically after it's timer finishes."
msgstr ""
msgid "DelayedJobs|Are you sure you want to run %{job_name} immediately? This job will run automatically after its timer finishes."
msgstr ""
msgid "DelayedJobs|Run the delayed job now?"
msgstr ""
msgid "DelayedJobs|Start now"
msgstr ""
@ -16009,6 +16015,9 @@ msgstr ""
msgid "GroupRoadmap|Within 3 years"
msgstr ""
msgid "GroupSAML|\"persistent\" recommended"
msgstr ""
msgid "GroupSAML|%{strongOpen}Warning%{strongClose} - Enabling %{linkStart}SSO enforcement%{linkEnd} can reduce security risks."
msgstr ""
@ -16156,13 +16165,10 @@ msgstr ""
msgid "GroupSAML|as %{access_level}"
msgstr ""
msgid "GroupSAML|must match stored NameID of \"%{extern_uid}\" as we use this to identify users. If the NameID changes users will be unable to sign in."
msgid "GroupSAML|must match stored NameID of \"%{extern_uid}\" to identify user and allow sign in"
msgstr ""
msgid "GroupSAML|should be \"persistent\""
msgstr ""
msgid "GroupSAML|should be a random persistent ID, emails are discouraged"
msgid "GroupSAML|recommend persistent ID instead of email"
msgstr ""
msgid "GroupSelect|No matching results"

View File

@ -13,6 +13,8 @@
"prejest": "yarn check-dependencies",
"jest": "jest --config jest.config.js",
"jest-debug": "node --inspect-brk node_modules/.bin/jest --runInBand",
"jest:ci": "jest --config jest.config.js --ci --coverage --testSequencer ./scripts/frontend/parallel_ci_sequencer.js",
"jest:ci:minimal": "jest --config jest.config.js --ci --coverage --findRelatedTests $(cat tmp/changed_files.txt) --passWithNoTests --testSequencer ./scripts/frontend/parallel_ci_sequencer.js",
"jest:integration": "jest --config jest.config.integration.js",
"jsdoc": "jsdoc -c config/jsdocs.config.js",
"lint:eslint": "yarn run internal:eslint",

View File

@ -0,0 +1,46 @@
import { InMemoryCache } from 'apollo-cache-inmemory';
import { ApolloClient } from 'apollo-client';
import { ApolloLink } from 'apollo-link';
import gql from 'graphql-tag';
const FOO_QUERY = gql`
query {
foo
}
`;
/**
* This function returns a promise that resolves to the final operation after
* running an ApolloClient query with the given ApolloLink
*
* @typedef {Object} TestApolloLinkOptions
* @property {Object} context the default context object sent along the ApolloLink chain
*
* @param {ApolloLink} subjectLink the ApolloLink which is under test
* @param {TestApolloLinkOptions} options contains options to send a long with the query
*
* @returns Promise resolving to the resulting operation after running the subjectLink
*/
export const testApolloLink = (subjectLink, options = {}) =>
new Promise((resolve) => {
const { context = {} } = options;
// Use the terminating link to capture the final operation and resolve with this.
const terminatingLink = new ApolloLink((operation) => {
resolve(operation);
return null;
});
const client = new ApolloClient({
link: ApolloLink.from([subjectLink, terminatingLink]),
// cache is a required option
cache: new InMemoryCache(),
});
// Trigger a query so the ApolloLink chain will be executed.
client.query({
context,
query: FOO_QUERY,
});
});

View File

@ -0,0 +1,126 @@
import { GlModal } from '@gitlab/ui';
import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import ActionsCell from '~/jobs/components/table/cells/actions_cell.vue';
import JobPlayMutation from '~/jobs/components/table/graphql/mutations/job_play.mutation.graphql';
import JobRetryMutation from '~/jobs/components/table/graphql/mutations/job_retry.mutation.graphql';
import JobUnscheduleMutation from '~/jobs/components/table/graphql/mutations/job_unschedule.mutation.graphql';
import { playableJob, retryableJob, scheduledJob } from '../../../mock_data';
describe('Job actions cell', () => {
let wrapper;
let mutate;
const findRetryButton = () => wrapper.findByTestId('retry');
const findPlayButton = () => wrapper.findByTestId('play');
const findDownloadArtifactsButton = () => wrapper.findByTestId('download-artifacts');
const findCountdownButton = () => wrapper.findByTestId('countdown');
const findPlayScheduledJobButton = () => wrapper.findByTestId('play-scheduled');
const findUnscheduleButton = () => wrapper.findByTestId('unschedule');
const findModal = () => wrapper.findComponent(GlModal);
const MUTATION_SUCCESS = { data: { JobRetryMutation: { jobId: retryableJob.id } } };
const MUTATION_SUCCESS_UNSCHEDULE = {
data: { JobUnscheduleMutation: { jobId: scheduledJob.id } },
};
const MUTATION_SUCCESS_PLAY = { data: { JobPlayMutation: { jobId: playableJob.id } } };
const $toast = {
show: jest.fn(),
};
const createComponent = (jobType, mutationType = MUTATION_SUCCESS, props = {}) => {
mutate = jest.fn().mockResolvedValue(mutationType);
wrapper = shallowMountExtended(ActionsCell, {
propsData: {
job: jobType,
...props,
},
mocks: {
$apollo: {
mutate,
},
$toast,
},
});
};
afterEach(() => {
wrapper.destroy();
});
it('does not display an artifacts download button', () => {
createComponent(retryableJob);
expect(findDownloadArtifactsButton().exists()).toBe(false);
});
it.each`
button | action | jobType
${findPlayButton} | ${'play'} | ${playableJob}
${findRetryButton} | ${'retry'} | ${retryableJob}
${findDownloadArtifactsButton} | ${'download artifacts'} | ${playableJob}
`('displays the $action button', ({ button, jobType }) => {
createComponent(jobType);
expect(button().exists()).toBe(true);
});
it.each`
button | mutationResult | action | jobType | mutationFile
${findPlayButton} | ${MUTATION_SUCCESS_PLAY} | ${'play'} | ${playableJob} | ${JobPlayMutation}
${findRetryButton} | ${MUTATION_SUCCESS} | ${'retry'} | ${retryableJob} | ${JobRetryMutation}
`('performs the $action mutation', ({ button, mutationResult, jobType, mutationFile }) => {
createComponent(jobType, mutationResult);
button().vm.$emit('click');
expect(mutate).toHaveBeenCalledWith({
mutation: mutationFile,
variables: {
id: jobType.id,
},
});
});
describe('Scheduled Jobs', () => {
const today = () => new Date('2021-08-31');
beforeEach(() => {
jest.spyOn(Date, 'now').mockImplementation(today);
});
it('displays the countdown, play and unschedule buttons', () => {
createComponent(scheduledJob);
expect(findCountdownButton().exists()).toBe(true);
expect(findPlayScheduledJobButton().exists()).toBe(true);
expect(findUnscheduleButton().exists()).toBe(true);
});
it('unschedules a job', () => {
createComponent(scheduledJob, MUTATION_SUCCESS_UNSCHEDULE);
findUnscheduleButton().vm.$emit('click');
expect(mutate).toHaveBeenCalledWith({
mutation: JobUnscheduleMutation,
variables: {
id: scheduledJob.id,
},
});
});
it('shows the play job confirmation modal', async () => {
createComponent(scheduledJob, MUTATION_SUCCESS);
findPlayScheduledJobButton().vm.$emit('click');
await nextTick();
expect(findModal().exists()).toBe(true);
});
});
});

View File

@ -1555,7 +1555,11 @@ export const mockJobsQueryResponse = {
cancelable: false,
active: false,
stuck: false,
userPermissions: { readBuild: true, __typename: 'JobPermissions' },
userPermissions: {
readBuild: true,
readJobArtifacts: true,
__typename: 'JobPermissions',
},
__typename: 'CiJob',
},
],
@ -1573,3 +1577,179 @@ export const mockJobsQueryEmptyResponse = {
},
},
};
export const retryableJob = {
artifacts: { nodes: [], __typename: 'CiJobArtifactConnection' },
allowFailure: false,
status: 'SUCCESS',
scheduledAt: null,
manualJob: false,
triggered: null,
createdByTag: false,
detailedStatus: {
detailsPath: '/root/test-job-artifacts/-/jobs/1981',
group: 'success',
icon: 'status_success',
label: 'passed',
text: 'passed',
tooltip: 'passed',
action: {
buttonTitle: 'Retry this job',
icon: 'retry',
method: 'post',
path: '/root/test-job-artifacts/-/jobs/1981/retry',
title: 'Retry',
__typename: 'StatusAction',
},
__typename: 'DetailedStatus',
},
id: 'gid://gitlab/Ci::Build/1981',
refName: 'main',
refPath: '/root/test-job-artifacts/-/commits/main',
tags: [],
shortSha: '75daf01b',
commitPath: '/root/test-job-artifacts/-/commit/75daf01b465e7eab5a04a315e44660c9a17c8055',
pipeline: {
id: 'gid://gitlab/Ci::Pipeline/288',
path: '/root/test-job-artifacts/-/pipelines/288',
user: {
webPath: '/root',
avatarUrl:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
__typename: 'UserCore',
},
__typename: 'Pipeline',
},
stage: { name: 'test', __typename: 'CiStage' },
name: 'hello_world',
duration: 7,
finishedAt: '2021-08-30T20:33:56Z',
coverage: null,
retryable: true,
playable: false,
cancelable: false,
active: false,
stuck: false,
userPermissions: { readBuild: true, __typename: 'JobPermissions' },
__typename: 'CiJob',
};
export const playableJob = {
artifacts: {
nodes: [
{
downloadPath: '/root/test-job-artifacts/-/jobs/1982/artifacts/download?file_type=trace',
__typename: 'CiJobArtifact',
},
],
__typename: 'CiJobArtifactConnection',
},
allowFailure: false,
status: 'SUCCESS',
scheduledAt: null,
manualJob: true,
triggered: null,
createdByTag: false,
detailedStatus: {
detailsPath: '/root/test-job-artifacts/-/jobs/1982',
group: 'success',
icon: 'status_success',
label: 'manual play action',
text: 'passed',
tooltip: 'passed',
action: {
buttonTitle: 'Trigger this manual action',
icon: 'play',
method: 'post',
path: '/root/test-job-artifacts/-/jobs/1982/play',
title: 'Play',
__typename: 'StatusAction',
},
__typename: 'DetailedStatus',
},
id: 'gid://gitlab/Ci::Build/1982',
refName: 'main',
refPath: '/root/test-job-artifacts/-/commits/main',
tags: [],
shortSha: '75daf01b',
commitPath: '/root/test-job-artifacts/-/commit/75daf01b465e7eab5a04a315e44660c9a17c8055',
pipeline: {
id: 'gid://gitlab/Ci::Pipeline/288',
path: '/root/test-job-artifacts/-/pipelines/288',
user: {
webPath: '/root',
avatarUrl:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
__typename: 'UserCore',
},
__typename: 'Pipeline',
},
stage: { name: 'test', __typename: 'CiStage' },
name: 'hello_world_delayed',
duration: 6,
finishedAt: '2021-08-30T20:36:12Z',
coverage: null,
retryable: true,
playable: true,
cancelable: false,
active: false,
stuck: false,
userPermissions: { readBuild: true, readJobArtifacts: true, __typename: 'JobPermissions' },
__typename: 'CiJob',
};
export const scheduledJob = {
artifacts: { nodes: [], __typename: 'CiJobArtifactConnection' },
allowFailure: false,
status: 'SCHEDULED',
scheduledAt: '2021-08-31T22:36:05Z',
manualJob: true,
triggered: null,
createdByTag: false,
detailedStatus: {
detailsPath: '/root/test-job-artifacts/-/jobs/1986',
group: 'scheduled',
icon: 'status_scheduled',
label: 'unschedule action',
text: 'delayed',
tooltip: 'delayed manual action (%{remainingTime})',
action: {
buttonTitle: 'Unschedule job',
icon: 'time-out',
method: 'post',
path: '/root/test-job-artifacts/-/jobs/1986/unschedule',
title: 'Unschedule',
__typename: 'StatusAction',
},
__typename: 'DetailedStatus',
},
id: 'gid://gitlab/Ci::Build/1986',
refName: 'main',
refPath: '/root/test-job-artifacts/-/commits/main',
tags: [],
shortSha: '75daf01b',
commitPath: '/root/test-job-artifacts/-/commit/75daf01b465e7eab5a04a315e44660c9a17c8055',
pipeline: {
id: 'gid://gitlab/Ci::Pipeline/290',
path: '/root/test-job-artifacts/-/pipelines/290',
user: {
webPath: '/root',
avatarUrl:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
__typename: 'UserCore',
},
__typename: 'Pipeline',
},
stage: { name: 'test', __typename: 'CiStage' },
name: 'hello_world_delayed',
duration: null,
finishedAt: null,
coverage: null,
retryable: false,
playable: true,
cancelable: false,
active: false,
stuck: false,
userPermissions: { readBuild: true, __typename: 'JobPermissions' },
__typename: 'CiJob',
};

View File

@ -0,0 +1,54 @@
import { testApolloLink } from 'helpers/test_apollo_link';
import { getInstrumentationLink, FEATURE_CATEGORY_HEADER } from '~/lib/apollo/instrumentation_link';
const TEST_FEATURE_CATEGORY = 'foo_feature';
describe('~/lib/apollo/instrumentation_link', () => {
const setFeatureCategory = (val) => {
window.gon.feature_category = val;
};
afterEach(() => {
getInstrumentationLink.cache.clear();
});
describe('getInstrumentationLink', () => {
describe('with no gon.feature_category', () => {
beforeEach(() => {
setFeatureCategory(null);
});
it('returns null', () => {
expect(getInstrumentationLink()).toBe(null);
});
});
describe('with gon.feature_category', () => {
beforeEach(() => {
setFeatureCategory(TEST_FEATURE_CATEGORY);
});
it('returns memoized apollo link', () => {
const result = getInstrumentationLink();
// expect.any(ApolloLink) doesn't work for some reason...
expect(result).toHaveProp('request');
expect(result).toBe(getInstrumentationLink());
});
it('adds a feature category header from the returned apollo link', async () => {
const defaultHeaders = { Authorization: 'foo' };
const operation = await testApolloLink(getInstrumentationLink(), {
context: { headers: defaultHeaders },
});
const { headers } = operation.getContext();
expect(headers).toEqual({
...defaultHeaders,
[FEATURE_CATEGORY_HEADER]: TEST_FEATURE_CATEGORY,
});
});
});
});
});

View File

@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe NamespaceSetting, type: :model do
it_behaves_like 'sanitizable', :namespace_settings, %i[default_branch_name]
# Relationships
#
describe "Associations" do
@ -41,14 +43,6 @@ RSpec.describe NamespaceSetting, type: :model do
it_behaves_like "doesn't return an error"
end
context "when it contains javascript tags" do
it "gets sanitized properly" do
namespace_settings.update!(default_branch_name: "hello<script>alert(1)</script>")
expect(namespace_settings.default_branch_name).to eq('hello')
end
end
end
describe '#allow_mfa_for_group' do

View File

@ -452,6 +452,7 @@ RSpec.describe 'Every Sidekiq worker' do
'WaitForClusterCreationWorker' => 3,
'WebHookWorker' => 4,
'WebHooks::DestroyWorker' => 3,
'WebHooks::LogExecutionWorker' => 3,
'Wikis::GitGarbageCollectWorker' => false,
'X509CertificateRevokeWorker' => 3
}

21
tooling/bin/find_changes Executable file
View File

@ -0,0 +1,21 @@
#!/usr/bin/env ruby
# frozen_string_literal: true
require 'gitlab'
gitlab_token = ENV.fetch('DANGER_GITLAB_API_TOKEN', '')
gitlab_endpoint = ENV.fetch('CI_API_V4_URL')
mr_project_path = ENV.fetch('CI_MERGE_REQUEST_PROJECT_PATH')
mr_iid = ENV.fetch('CI_MERGE_REQUEST_IID')
output_file = ARGV.shift
Gitlab.configure do |config|
config.endpoint = gitlab_endpoint
config.private_token = gitlab_token
end
mr_changes = Gitlab.merge_request_changes(mr_project_path, mr_iid)
file_changes = mr_changes.changes.map { |change| change['new_path'] }
File.write(output_file, file_changes.join(' '))

View File

@ -1,24 +1,12 @@
#!/usr/bin/env ruby
# frozen_string_literal: true
require 'gitlab'
require 'test_file_finder'
gitlab_token = ENV.fetch('DANGER_GITLAB_API_TOKEN', '')
gitlab_endpoint = ENV.fetch('CI_API_V4_URL')
Gitlab.configure do |config|
config.endpoint = gitlab_endpoint
config.private_token = gitlab_token
end
changes = ARGV.shift
output_file = ARGV.shift
mr_project_path = ENV.fetch('CI_MERGE_REQUEST_PROJECT_PATH')
mr_iid = ENV.fetch('CI_MERGE_REQUEST_IID')
mr_changes = Gitlab.merge_request_changes(mr_project_path, mr_iid)
changed_files = mr_changes.changes.map { |change| change['new_path'] }
changed_files = File.read(changes).split(' ')
tff = TestFileFinder::FileFinder.new(paths: changed_files).tap do |file_finder|
file_finder.use TestFileFinder::MappingStrategies::PatternMatching.load('tests.yml')