Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
cddf2db96b
commit
28b15b6b1c
40 changed files with 742 additions and 665 deletions
|
@ -2589,7 +2589,6 @@ Rails/IncludeUrlHelper:
|
|||
- 'app/models/integrations/youtrack.rb'
|
||||
- 'app/presenters/alert_management/alert_presenter.rb'
|
||||
- 'app/presenters/ci/pipeline_presenter.rb'
|
||||
- 'app/presenters/clusters/cluster_presenter.rb'
|
||||
- 'app/presenters/environment_presenter.rb'
|
||||
- 'app/presenters/gitlab/blame_presenter.rb'
|
||||
- 'app/presenters/group_clusterable_presenter.rb'
|
||||
|
|
|
@ -1089,6 +1089,12 @@ entry.
|
|||
- [Add helpful text to URL group validation and limit text](gitlab-org/gitlab@59a5a6266cb0d5434596170ffa36e4e74b8d2c2c) ([merge request](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/65369)) **GitLab Enterprise Edition**
|
||||
- [Refactor external storage admin area configuration UI and docs](gitlab-org/gitlab@497ba4fc8f4ec1d234c9f5f1ec5c69712b8c7cb3) ([merge request](gitlab-org/gitlab!66219))
|
||||
|
||||
## 14.1.6 (2021-09-27)
|
||||
|
||||
### Fixed (1 change)
|
||||
|
||||
- [Fix Elastic::MigrationWorker current_migration (2nd attempt)](gitlab-org/gitlab@f07c7a5f173a2fc053247664f21c03d29df543a4) ([merge request](gitlab-org/gitlab!71187)) **GitLab Enterprise Edition**
|
||||
|
||||
## 14.1.5 (2021-09-02)
|
||||
|
||||
### Fixed (1 change)
|
||||
|
|
|
@ -29,6 +29,7 @@ import {
|
|||
WARNING,
|
||||
MT_MERGE_STRATEGY,
|
||||
PIPELINE_FAILED_STATE,
|
||||
STATE_MACHINE,
|
||||
} from '../../constants';
|
||||
import eventHub from '../../event_hub';
|
||||
import mergeRequestQueryVariablesMixin from '../../mixins/merge_request_query_variables';
|
||||
|
@ -47,6 +48,9 @@ const MERGE_FAILED_STATUS = 'failed';
|
|||
const MERGE_SUCCESS_STATUS = 'success';
|
||||
const MERGE_HOOK_VALIDATION_ERROR_STATUS = 'hook_validation_error';
|
||||
|
||||
const { transitions } = STATE_MACHINE;
|
||||
const { MERGE, MERGED, MERGE_FAILURE } = transitions;
|
||||
|
||||
export default {
|
||||
name: 'ReadyToMerge',
|
||||
apollo: {
|
||||
|
@ -361,6 +365,7 @@ export default {
|
|||
}
|
||||
|
||||
this.isMakingRequest = true;
|
||||
this.mr.transitionStateMachine({ transition: MERGE });
|
||||
this.service
|
||||
.merge(options)
|
||||
.then((res) => res.data)
|
||||
|
@ -375,6 +380,7 @@ export default {
|
|||
this.initiateMergePolling();
|
||||
} else if (hasError) {
|
||||
eventHub.$emit('FailedToMerge', data.merge_error);
|
||||
this.mr.transitionStateMachine({ transition: MERGE_FAILURE });
|
||||
}
|
||||
|
||||
if (this.glFeatures.mergeRequestWidgetGraphql) {
|
||||
|
@ -383,6 +389,7 @@ export default {
|
|||
})
|
||||
.catch(() => {
|
||||
this.isMakingRequest = false;
|
||||
this.mr.transitionStateMachine({ transition: MERGE_FAILURE });
|
||||
createFlash({
|
||||
message: __('Something went wrong. Please try again.'),
|
||||
});
|
||||
|
@ -417,6 +424,7 @@ export default {
|
|||
eventHub.$emit('FetchActionsContent');
|
||||
MergeRequest.hideCloseButton();
|
||||
MergeRequest.decreaseCounter();
|
||||
this.mr.transitionStateMachine({ transition: MERGED });
|
||||
stopPolling();
|
||||
|
||||
refreshUserMergeRequestCounts();
|
||||
|
@ -428,6 +436,7 @@ export default {
|
|||
}
|
||||
} else if (data.merge_error) {
|
||||
eventHub.$emit('FailedToMerge', data.merge_error);
|
||||
this.mr.transitionStateMachine({ transition: MERGE_FAILURE });
|
||||
stopPolling();
|
||||
} else {
|
||||
// MR is not merged yet, continue polling until the state becomes 'merged'
|
||||
|
@ -438,6 +447,7 @@ export default {
|
|||
createFlash({
|
||||
message: __('Something went wrong while merging this merge request. Please try again.'),
|
||||
});
|
||||
this.mr.transitionStateMachine({ transition: MERGE_FAILURE });
|
||||
stopPolling();
|
||||
});
|
||||
},
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { s__ } from '~/locale';
|
||||
import { stateToComponentMap as classStateMap, stateKey } from './stores/state_maps';
|
||||
|
||||
export const SUCCESS = 'success';
|
||||
export const WARNING = 'warning';
|
||||
|
@ -52,3 +53,42 @@ export const MERGE_ACTIVE_STATUS_PHRASES = [
|
|||
emoji: 'heart_eyes',
|
||||
},
|
||||
];
|
||||
|
||||
const STATE_MACHINE = {
|
||||
states: {
|
||||
IDLE: 'IDLE',
|
||||
MERGING: 'MERGING',
|
||||
},
|
||||
transitions: {
|
||||
MERGE: 'start-merge',
|
||||
MERGE_FAILURE: 'merge-failed',
|
||||
MERGED: 'merge-done',
|
||||
},
|
||||
};
|
||||
const { states, transitions } = STATE_MACHINE;
|
||||
|
||||
STATE_MACHINE.definition = {
|
||||
initial: states.IDLE,
|
||||
states: {
|
||||
[states.IDLE]: {
|
||||
on: {
|
||||
[transitions.MERGE]: states.MERGING,
|
||||
},
|
||||
},
|
||||
[states.MERGING]: {
|
||||
on: {
|
||||
[transitions.MERGED]: states.IDLE,
|
||||
[transitions.MERGE_FAILURE]: states.IDLE,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const stateToTransitionMap = {
|
||||
[stateKey.merging]: transitions.MERGE,
|
||||
[stateKey.merged]: transitions.MERGED,
|
||||
};
|
||||
export const stateToComponentMap = {
|
||||
[states.MERGING]: classStateMap[stateKey.merging],
|
||||
};
|
||||
export { STATE_MACHINE };
|
||||
|
|
|
@ -4,7 +4,7 @@ import { isEmpty } from 'lodash';
|
|||
import MrWidgetApprovals from 'ee_else_ce/vue_merge_request_widget/components/approvals/approvals.vue';
|
||||
import MRWidgetService from 'ee_else_ce/vue_merge_request_widget/services/mr_widget_service';
|
||||
import MRWidgetStore from 'ee_else_ce/vue_merge_request_widget/stores/mr_widget_store';
|
||||
import stateMaps from 'ee_else_ce/vue_merge_request_widget/stores/state_maps';
|
||||
import { stateToComponentMap as classState } from 'ee_else_ce/vue_merge_request_widget/stores/state_maps';
|
||||
import createFlash from '~/flash';
|
||||
import { secondsToMilliseconds } from '~/lib/utils/datetime_utility';
|
||||
import notify from '~/lib/utils/notify';
|
||||
|
@ -39,6 +39,7 @@ import ShaMismatch from './components/states/sha_mismatch.vue';
|
|||
import UnresolvedDiscussionsState from './components/states/unresolved_discussions.vue';
|
||||
import WorkInProgressState from './components/states/work_in_progress.vue';
|
||||
import ExtensionsContainer from './components/extensions/container';
|
||||
import { STATE_MACHINE, stateToComponentMap } from './constants';
|
||||
import eventHub from './event_hub';
|
||||
import mergeRequestQueryVariablesMixin from './mixins/merge_request_query_variables';
|
||||
import getStateQuery from './queries/get_state.query.graphql';
|
||||
|
@ -124,7 +125,9 @@ export default {
|
|||
mr: store,
|
||||
state: store && store.state,
|
||||
service: store && this.createService(store),
|
||||
machineState: store?.machineValue || STATE_MACHINE.definition.initial,
|
||||
loading: true,
|
||||
recomputeComponentName: 0,
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
|
@ -139,7 +142,7 @@ export default {
|
|||
return this.mr.state !== 'nothingToMerge';
|
||||
},
|
||||
componentName() {
|
||||
return stateMaps.stateToComponentMap[this.mr.state];
|
||||
return stateToComponentMap[this.machineState] || classState[this.mr.state];
|
||||
},
|
||||
hasPipelineMustSucceedConflict() {
|
||||
return !this.mr.hasCI && this.mr.onlyAllowMergeIfPipelineSucceeds;
|
||||
|
@ -206,6 +209,11 @@ export default {
|
|||
},
|
||||
},
|
||||
watch: {
|
||||
'mr.machineValue': {
|
||||
handler(newValue) {
|
||||
this.machineState = newValue;
|
||||
},
|
||||
},
|
||||
state(newVal, oldVal) {
|
||||
if (newVal !== oldVal && this.shouldRenderMergedPipeline) {
|
||||
// init polling
|
||||
|
@ -247,6 +255,8 @@ export default {
|
|||
this.mr = new MRWidgetStore({ ...window.gl.mrWidgetData, ...data });
|
||||
}
|
||||
|
||||
this.machineState = this.mr.machineValue;
|
||||
|
||||
if (!this.state) {
|
||||
this.state = this.mr.state;
|
||||
}
|
||||
|
|
|
@ -1,11 +1,21 @@
|
|||
import getStateKey from 'ee_else_ce/vue_merge_request_widget/stores/get_state_key';
|
||||
import { statusBoxState } from '~/issuable/components/status_box.vue';
|
||||
import { formatDate, getTimeago } from '~/lib/utils/datetime_utility';
|
||||
import { MTWPS_MERGE_STRATEGY, MT_MERGE_STRATEGY, MWPS_MERGE_STRATEGY } from '../constants';
|
||||
import { machine } from '~/lib/utils/finite_state_machine';
|
||||
import {
|
||||
MTWPS_MERGE_STRATEGY,
|
||||
MT_MERGE_STRATEGY,
|
||||
MWPS_MERGE_STRATEGY,
|
||||
STATE_MACHINE,
|
||||
stateToTransitionMap,
|
||||
} from '../constants';
|
||||
import { stateKey } from './state_maps';
|
||||
|
||||
const { format } = getTimeago();
|
||||
|
||||
const { states } = STATE_MACHINE;
|
||||
const { IDLE } = states;
|
||||
|
||||
export default class MergeRequestStore {
|
||||
constructor(data) {
|
||||
this.sha = data.diff_head_sha;
|
||||
|
@ -16,6 +26,9 @@ export default class MergeRequestStore {
|
|||
this.apiUnapprovePath = data.api_unapprove_path;
|
||||
this.hasApprovalsAvailable = data.has_approvals_available;
|
||||
|
||||
this.stateMachine = machine(STATE_MACHINE.definition);
|
||||
this.machineValue = this.stateMachine.value;
|
||||
|
||||
this.setPaths(data);
|
||||
|
||||
this.setData(data);
|
||||
|
@ -215,10 +228,7 @@ export default class MergeRequestStore {
|
|||
setState() {
|
||||
if (this.mergeOngoing) {
|
||||
this.state = 'merging';
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.isOpen) {
|
||||
} else if (this.isOpen) {
|
||||
this.state = getStateKey.call(this);
|
||||
} else {
|
||||
switch (this.mergeRequestState) {
|
||||
|
@ -232,6 +242,8 @@ export default class MergeRequestStore {
|
|||
this.state = null;
|
||||
}
|
||||
}
|
||||
|
||||
this.translateStateToMachine();
|
||||
}
|
||||
|
||||
setPaths(data) {
|
||||
|
@ -356,4 +368,32 @@ export default class MergeRequestStore {
|
|||
(this.onlyAllowMergeIfPipelineSucceeds && this.isPipelineFailed)
|
||||
);
|
||||
}
|
||||
|
||||
// Because the state machine doesn't yet handle every state and transition,
|
||||
// some use-cases will need to force a state that can't be reached by
|
||||
// a known transition. This is undesirable long-term (as it subverts
|
||||
// the intent of a state machine), but is necessary until the machine
|
||||
// can handle all possible combinations. (unsafeForce)
|
||||
transitionStateMachine({ transition, state, unsafeForce = false } = {}) {
|
||||
if (unsafeForce && state) {
|
||||
this.stateMachine.value = state;
|
||||
} else {
|
||||
this.stateMachine.send(transition);
|
||||
}
|
||||
|
||||
this.machineValue = this.stateMachine.value;
|
||||
}
|
||||
translateStateToMachine() {
|
||||
const transition = stateToTransitionMap[this.state];
|
||||
let transitionOptions = {
|
||||
state: IDLE,
|
||||
unsafeForce: true,
|
||||
};
|
||||
|
||||
if (transition) {
|
||||
transitionOptions = { transition };
|
||||
}
|
||||
|
||||
this.transitionStateMachine(transitionOptions);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
const stateToComponentMap = {
|
||||
export const stateToComponentMap = {
|
||||
merged: 'mr-widget-merged',
|
||||
closed: 'mr-widget-closed',
|
||||
merging: 'mr-widget-merging',
|
||||
|
@ -21,7 +21,7 @@ const stateToComponentMap = {
|
|||
mergeChecksFailed: 'mergeChecksFailed',
|
||||
};
|
||||
|
||||
const statesToShowHelpWidget = [
|
||||
export const statesToShowHelpWidget = [
|
||||
'merging',
|
||||
'conflicts',
|
||||
'workInProgress',
|
||||
|
@ -50,11 +50,7 @@ export const stateKey = {
|
|||
notAllowedToMerge: 'notAllowedToMerge',
|
||||
readyToMerge: 'readyToMerge',
|
||||
rebase: 'rebase',
|
||||
merging: 'merging',
|
||||
merged: 'merged',
|
||||
mergeChecksFailed: 'mergeChecksFailed',
|
||||
};
|
||||
|
||||
export default {
|
||||
stateToComponentMap,
|
||||
statesToShowHelpWidget,
|
||||
};
|
||||
|
|
|
@ -38,8 +38,13 @@ module GroupTree
|
|||
#
|
||||
# Pagination needs to be applied before loading the ancestors to
|
||||
# make sure ancestors are not cut off by pagination.
|
||||
Gitlab::ObjectHierarchy.new(Group.where(id: filtered_groups.select(:id)))
|
||||
.base_and_ancestors
|
||||
filtered_groups_relation = Group.where(id: filtered_groups.select(:id))
|
||||
|
||||
if Feature.enabled?(:linear_group_tree_ancestor_scopes, current_user, default_enabled: :yaml)
|
||||
filtered_groups_relation.self_and_ancestors
|
||||
else
|
||||
Gitlab::ObjectHierarchy.new(filtered_groups_relation).base_and_ancestors
|
||||
end
|
||||
end
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
end
|
||||
|
|
|
@ -8,8 +8,12 @@ module Checksummable
|
|||
Zlib.crc32(data)
|
||||
end
|
||||
|
||||
def hexdigest(path)
|
||||
def sha256_hexdigest(path)
|
||||
::Digest::SHA256.file(path).hexdigest
|
||||
end
|
||||
|
||||
def md5_hexdigest(path)
|
||||
::Digest::MD5.file(path).hexdigest
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -192,9 +192,15 @@ class Group < Namespace
|
|||
# Returns the ids of the passed group models where the `emails_disabled`
|
||||
# column is set to true anywhere in the ancestor hierarchy.
|
||||
def ids_with_disabled_email(groups)
|
||||
innner_query = Gitlab::ObjectHierarchy
|
||||
.new(Group.where('id = namespaces_with_emails_disabled.id'))
|
||||
.base_and_ancestors
|
||||
inner_groups = Group.where('id = namespaces_with_emails_disabled.id')
|
||||
|
||||
inner_ancestors = if Feature.enabled?(:linear_group_ancestor_scopes, default_enabled: :yaml)
|
||||
inner_groups.self_and_ancestors
|
||||
else
|
||||
Gitlab::ObjectHierarchy.new(inner_groups).base_and_ancestors
|
||||
end
|
||||
|
||||
inner_query = inner_ancestors
|
||||
.where(emails_disabled: true)
|
||||
.select('1')
|
||||
.limit(1)
|
||||
|
@ -202,7 +208,7 @@ class Group < Namespace
|
|||
group_ids = Namespace
|
||||
.from('(SELECT * FROM namespaces) as namespaces_with_emails_disabled')
|
||||
.where(namespaces_with_emails_disabled: { id: groups })
|
||||
.where('EXISTS (?)', innner_query)
|
||||
.where('EXISTS (?)', inner_query)
|
||||
.pluck(:id)
|
||||
|
||||
Set.new(group_ids)
|
||||
|
|
|
@ -49,7 +49,7 @@ class LfsObject < ApplicationRecord
|
|||
end
|
||||
|
||||
def self.calculate_oid(path)
|
||||
self.hexdigest(path)
|
||||
self.sha256_hexdigest(path)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -67,7 +67,7 @@ class Upload < ApplicationRecord
|
|||
self.checksum = nil
|
||||
return unless needs_checksum?
|
||||
|
||||
self.checksum = self.class.hexdigest(absolute_path)
|
||||
self.checksum = self.class.sha256_hexdigest(absolute_path)
|
||||
end
|
||||
|
||||
# Initialize the associated Uploader class with current model
|
||||
|
|
|
@ -3,24 +3,11 @@
|
|||
module Clusters
|
||||
class ClusterPresenter < Gitlab::View::Presenter::Delegated
|
||||
include ::Gitlab::Utils::StrongMemoize
|
||||
include ActionView::Helpers::SanitizeHelper
|
||||
include ActionView::Helpers::UrlHelper
|
||||
include IconsHelper
|
||||
|
||||
delegator_override_with ::Gitlab::Utils::StrongMemoize # TODO: Remove `::Gitlab::Utils::StrongMemoize` inclusion as it's duplicate
|
||||
|
||||
presents ::Clusters::Cluster, as: :cluster
|
||||
|
||||
# We do not want to show the group path for clusters belonging to the
|
||||
# clusterable, only for the ancestor clusters.
|
||||
def item_link(clusterable_presenter, *html_options)
|
||||
if cluster.group_type? && clusterable != clusterable_presenter.subject
|
||||
contracted_group_name(cluster.group) + ' / ' + link_to_cluster
|
||||
else
|
||||
link_to_cluster(*html_options)
|
||||
end
|
||||
end
|
||||
|
||||
def provider_label
|
||||
if aws?
|
||||
s_('ClusterIntegration|Elastic Kubernetes Service')
|
||||
|
@ -41,16 +28,6 @@ module Clusters
|
|||
can?(current_user, :read_cluster, cluster)
|
||||
end
|
||||
|
||||
def cluster_type_description
|
||||
if cluster.project_type?
|
||||
s_("ClusterIntegration|Project cluster")
|
||||
elsif cluster.group_type?
|
||||
s_("ClusterIntegration|Group cluster")
|
||||
elsif cluster.instance_type?
|
||||
s_("ClusterIntegration|Instance cluster")
|
||||
end
|
||||
end
|
||||
|
||||
def show_path(params: {})
|
||||
if cluster.project_type?
|
||||
project_cluster_path(project, cluster, params)
|
||||
|
@ -109,7 +86,7 @@ module Clusters
|
|||
private
|
||||
|
||||
def image_path(path)
|
||||
ActionController::Base.helpers.image_path(path)
|
||||
ApplicationController.helpers.image_path(path)
|
||||
end
|
||||
|
||||
# currently log explorer is only available in the scope of the project
|
||||
|
@ -129,20 +106,6 @@ module Clusters
|
|||
cluster.project
|
||||
end
|
||||
end
|
||||
|
||||
def contracted_group_name(group)
|
||||
sanitize(group.full_name)
|
||||
.sub(%r{\/.*\/}, "/ #{contracted_icon} /")
|
||||
.html_safe
|
||||
end
|
||||
|
||||
def contracted_icon
|
||||
sprite_icon('ellipsis_h', size: 12, css_class: 'vertical-align-middle')
|
||||
end
|
||||
|
||||
def link_to_cluster(html_options: {})
|
||||
link_to_if(can_read_cluster?, cluster.name, show_path, html_options)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -2,6 +2,11 @@
|
|||
= form_errors(@application_setting)
|
||||
|
||||
%fieldset
|
||||
%h5
|
||||
= _('reCAPTCHA')
|
||||
%p
|
||||
= _('reCAPTCHA helps prevent credential stuffing.')
|
||||
= link_to _('Only reCAPTCHA v2 is supported:'), 'https://developers.google.com/recaptcha/docs/versions', target: '_blank', rel: 'noopener noreferrer'
|
||||
.form-group
|
||||
.form-check
|
||||
= f.check_box :recaptcha_enabled, class: 'form-check-input'
|
||||
|
@ -9,25 +14,31 @@
|
|||
= _("Enable reCAPTCHA")
|
||||
%span.form-text.text-muted#recaptcha_help_block
|
||||
= _('Helps prevent bots from creating accounts.')
|
||||
= link_to _('How do I configure it?'), help_page_path('integration/recaptcha.md'), target: '_blank', rel: 'noopener noreferrer'
|
||||
.form-group
|
||||
.form-check
|
||||
= f.check_box :login_recaptcha_protection_enabled, class: 'form-check-input'
|
||||
= f.label :login_recaptcha_protection_enabled, class: 'form-check-label' do
|
||||
= _("Enable reCAPTCHA for login")
|
||||
= _('Enable reCAPTCHA for login.')
|
||||
%span.form-text.text-muted#recaptcha_help_block
|
||||
= _('Helps prevent bots from brute-force attacks.')
|
||||
.form-group
|
||||
= f.label :recaptcha_site_key, _('reCAPTCHA Site Key'), class: 'label-bold'
|
||||
= f.label :recaptcha_site_key, _('reCAPTCHA site key'), class: 'label-bold'
|
||||
= f.text_field :recaptcha_site_key, class: 'form-control gl-form-input'
|
||||
.form-text.text-muted
|
||||
= _("Generate site and private keys at")
|
||||
%a{ href: 'http://www.google.com/recaptcha', target: 'blank' } http://www.google.com/recaptcha
|
||||
|
||||
.form-group
|
||||
= f.label :recaptcha_private_key, _('reCAPTCHA Private Key'), class: 'label-bold'
|
||||
.form-group
|
||||
= f.label :recaptcha_private_key, _('reCAPTCHA private key'), class: 'label-bold'
|
||||
= f.text_field :recaptcha_private_key, class: 'form-control gl-form-input'
|
||||
|
||||
%h5
|
||||
= _('Invisible Captcha')
|
||||
%p
|
||||
= _('Invisible Captcha helps prevent the creation of spam accounts. It adds a honeypot field and time-sensitive form submission to the account signup form.')
|
||||
= link_to _('Read their documentation.'), 'https://github.com/markets/invisible_captcha', target: '_blank', rel: 'noopener noreferrer'
|
||||
|
||||
.form-group
|
||||
.form-check
|
||||
= f.check_box :invisible_captcha_enabled, class: 'form-check-input'
|
||||
|
@ -36,12 +47,18 @@
|
|||
%span.form-text.text-muted
|
||||
= _('Helps prevent bots from creating accounts.')
|
||||
|
||||
%h5
|
||||
= _('Akismet')
|
||||
%p
|
||||
= _('Akismet helps prevent the creation of spam issues in public projects.')
|
||||
= link_to _('How do I configure Akismet?'), help_page_path('integration/akismet.md'), target: '_blank', rel: 'noopener noreferrer'
|
||||
|
||||
.form-group
|
||||
.form-check
|
||||
= f.check_box :akismet_enabled, class: 'form-check-input'
|
||||
= f.label :akismet_enabled, class: 'form-check-label' do
|
||||
Enable Akismet
|
||||
%span.form-text.text-muted#akismet_help_block= _("Helps prevent bots from creating issues")
|
||||
%span.form-text.text-muted#akismet_help_block= _("Helps prevent bots from creating issues.")
|
||||
|
||||
.form-group
|
||||
= f.label :akismet_api_key, _('Akismet API Key'), class: 'label-bold'
|
||||
|
@ -50,25 +67,31 @@
|
|||
Generate API key at
|
||||
%a{ href: 'http://www.akismet.com', target: 'blank' } http://www.akismet.com
|
||||
|
||||
%h5
|
||||
= _('IP address restrictions')
|
||||
|
||||
.form-group
|
||||
.form-check
|
||||
= f.check_box :unique_ips_limit_enabled, class: 'form-check-input'
|
||||
= f.label :unique_ips_limit_enabled, class: 'form-check-label' do
|
||||
= _("Limit sign in from multiple ips")
|
||||
= _("Limit sign in from multiple IP addresses")
|
||||
%span.form-text.text-muted#unique_ip_help_block
|
||||
= _("Helps prevent malicious users hide their activity")
|
||||
= _("Helps prevent malicious users hide their activity.")
|
||||
|
||||
.form-group
|
||||
= f.label :unique_ips_limit_per_user, _('IPs per user'), class: 'label-bold'
|
||||
= f.label :unique_ips_limit_per_user, _('IP addresses per user'), class: 'label-bold'
|
||||
= f.number_field :unique_ips_limit_per_user, class: 'form-control gl-form-input'
|
||||
.form-text.text-muted
|
||||
= _("Maximum number of unique IPs per user")
|
||||
= _("Maximum number of unique IP addresses per user.")
|
||||
|
||||
.form-group
|
||||
= f.label :unique_ips_limit_time_window, _('IP expiration time'), class: 'label-bold'
|
||||
= f.label :unique_ips_limit_time_window, _('IP address expiration time'), class: 'label-bold'
|
||||
= f.number_field :unique_ips_limit_time_window, class: 'form-control gl-form-input'
|
||||
.form-text.text-muted
|
||||
= _("How many seconds an IP will be counted towards the limit")
|
||||
= _("How many seconds an IP counts toward the IP address limit.")
|
||||
|
||||
%h5
|
||||
= _('Spam Check')
|
||||
|
||||
.form-group
|
||||
.form-check
|
||||
|
@ -79,8 +102,8 @@
|
|||
= f.label :spam_check_endpoint_url, _('URL of the external Spam Check endpoint'), class: 'label-bold'
|
||||
= f.text_field :spam_check_endpoint_url, class: 'form-control gl-form-input'
|
||||
.form-group
|
||||
= f.label :spam_check_api_key, _('Spam Check API Key'), class: 'gl-font-weight-bold'
|
||||
= f.label :spam_check_api_key, _('Spam Check API key'), class: 'gl-font-weight-bold'
|
||||
= f.text_field :spam_check_api_key, class: 'form-control gl-form-input'
|
||||
.form-text.text-muted= _('The API key used by GitLab for accessing the Spam Check service endpoint')
|
||||
.form-text.text-muted= _('The API key used by GitLab for accessing the Spam Check service endpoint.')
|
||||
|
||||
= f.submit _('Save changes'), class: "gl-button btn btn-confirm"
|
||||
|
|
|
@ -9,9 +9,7 @@
|
|||
%button.btn.gl-button.btn-default.js-settings-toggle{ type: 'button' }
|
||||
= expanded_by_default? ? _('Collapse') : _('Expand')
|
||||
%p
|
||||
- recaptcha_v2_link_url = 'https://developers.google.com/recaptcha/docs/versions'
|
||||
- recaptcha_v2_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: recaptcha_v2_link_url }
|
||||
= _('Enable reCAPTCHA, Invisible Captcha, Akismet and set IP limits. For reCAPTCHA, we currently only support %{recaptcha_v2_link_start}v2%{recaptcha_v2_link_end}').html_safe % { recaptcha_v2_link_start: recaptcha_v2_link_start, recaptcha_v2_link_end: '</a>'.html_safe }
|
||||
= _('Configure CAPTCHAs, IP address limits, and other anti-spam measures.')
|
||||
.settings-content
|
||||
= render 'spam'
|
||||
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
name: linear_group_ancestor_scopes
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/70495
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/341115
|
||||
milestone: '14.4'
|
||||
type: development
|
||||
group: group::access
|
||||
default_enabled: false
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
name: linear_group_tree_ancestor_scopes
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/70503
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/341117
|
||||
milestone: '14.4'
|
||||
type: development
|
||||
group: group::access
|
||||
default_enabled: false
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class RemoveRedundantTaggingsIndex < Gitlab::Database::Migration[1.0]
|
||||
disable_ddl_transaction!
|
||||
|
||||
INDEX_NAME = :index_taggings_on_taggable_id_and_taggable_type
|
||||
|
||||
def up
|
||||
remove_concurrent_index_by_name :taggings, INDEX_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
add_concurrent_index :taggings, [:taggable_id, :taggable_type], name: INDEX_NAME
|
||||
end
|
||||
end
|
1
db/schema_migrations/20210923133143
Normal file
1
db/schema_migrations/20210923133143
Normal file
|
@ -0,0 +1 @@
|
|||
d2736a06009d6232d832a03d6842a81b1de2ce79b901331a0e09ac40fc51a463
|
|
@ -26508,8 +26508,6 @@ CREATE UNIQUE INDEX index_system_note_metadata_on_note_id ON system_note_metadat
|
|||
|
||||
CREATE INDEX index_taggings_on_tag_id ON taggings USING btree (tag_id);
|
||||
|
||||
CREATE INDEX index_taggings_on_taggable_id_and_taggable_type ON taggings USING btree (taggable_id, taggable_type);
|
||||
|
||||
CREATE INDEX index_taggings_on_taggable_id_and_taggable_type_and_context ON taggings USING btree (taggable_id, taggable_type, context);
|
||||
|
||||
CREATE UNIQUE INDEX index_tags_on_name ON tags USING btree (name);
|
||||
|
|
|
@ -387,7 +387,7 @@ the following are true:
|
|||
- The configured `admin_group` in the `gitlab.rb` is a CN, rather than a DN or an array.
|
||||
- This CN falls under the scope of the configured `group_base`.
|
||||
- The members of the `admin_group` have already signed into GitLab with their LDAP
|
||||
credentials. GitLab only grants this administrator access to the users whose
|
||||
credentials. GitLab only grants the Administrator role to the users whose
|
||||
accounts are already connected to LDAP.
|
||||
|
||||
If all the above are true and the users are still not getting access, [run a manual
|
||||
|
|
|
@ -104,7 +104,7 @@ In Omnibus GitLab, find the logs in `/var/log/gitlab/gitlab-kas/`.
|
|||
See also the [user documentation](../../user/clusters/agent/index.md#troubleshooting)
|
||||
for troubleshooting problems with individual agents.
|
||||
|
||||
### KAS logs - GitOps: failed to get project info
|
||||
### KAS logs - GitOps: failed to get project information
|
||||
|
||||
If you get the following error message:
|
||||
|
||||
|
|
|
@ -393,7 +393,7 @@ $ sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config
|
|||
praefect sql-migrate: OK (applied 21 migrations)
|
||||
```
|
||||
|
||||
### Requests fail with 'repo scoped: invalid Repository' errors
|
||||
### Requests fail with 'repository scoped: invalid Repository' errors
|
||||
|
||||
This indicates that the virtual storage name used in the
|
||||
[Praefect configuration](praefect.md#praefect) does not match the storage name used in
|
||||
|
|
|
@ -12,7 +12,7 @@ If you run a medium-sized self-managed instance (50+ users) of a free version of
|
|||
[either Community Edition or unlicensed Enterprise Edition](https://about.gitlab.com/install/ce-or-ee/),
|
||||
you qualify for a free Instance Review.
|
||||
|
||||
1. Sign in as a user with administrator [permissions](../user/permissions.md).
|
||||
1. Sign in as a user with Administrator [role](../user/permissions.md).
|
||||
1. In the top menu, click your user icon, and select
|
||||
**Get a free instance review**:
|
||||
|
||||
|
|
|
@ -1271,7 +1271,7 @@ the details of each Gitaly node that makes up the cluster. Each storage is also
|
|||
and this name is used in several areas of the configuration. In this guide, the name of the storage will be
|
||||
`default`. Also, this guide is geared towards new installs, if upgrading an existing environment
|
||||
to use Gitaly Cluster, you may need to use a different name.
|
||||
Refer to the [Praefect documentation](../gitaly/praefect.md#praefect) for more info.
|
||||
Refer to the [Praefect documentation](../gitaly/praefect.md#praefect) for more information.
|
||||
|
||||
The following IPs will be used as an example:
|
||||
|
||||
|
|
|
@ -1277,7 +1277,7 @@ the details of each Gitaly node that makes up the cluster. Each storage is also
|
|||
and this name is used in several areas of the configuration. In this guide, the name of the storage will be
|
||||
`default`. Also, this guide is geared towards new installs, if upgrading an existing environment
|
||||
to use Gitaly Cluster, you may need to use a different name.
|
||||
Refer to the [Praefect documentation](../gitaly/praefect.md#praefect) for more info.
|
||||
Refer to the [Praefect documentation](../gitaly/praefect.md#praefect) for more information.
|
||||
|
||||
The following IPs will be used as an example:
|
||||
|
||||
|
|
|
@ -6,8 +6,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
|
||||
# Pipelines for the GitLab project
|
||||
|
||||
Pipelines for [`gitlab-org/gitlab`](https://gitlab.com/gitlab-org/gitlab) and [`gitlab-org/gitlab-foss`](https://gitlab.com/gitlab-org/gitlab-foss) (as well as the
|
||||
`dev` instance's mirrors) are configured in the usual
|
||||
Pipelines for [`gitlab-org/gitlab`](https://gitlab.com/gitlab-org/gitlab) (as well as the `dev` instance's) is configured in the usual
|
||||
[`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml)
|
||||
which itself includes files under
|
||||
[`.gitlab/ci/`](https://gitlab.com/gitlab-org/gitlab/-/tree/master/.gitlab/ci)
|
||||
|
@ -17,29 +16,159 @@ We're striving to [dogfood](https://about.gitlab.com/handbook/engineering/#dogfo
|
|||
GitLab [CI/CD features and best-practices](../ci/yaml/index.md)
|
||||
as much as possible.
|
||||
|
||||
## Overview
|
||||
## Minimal test jobs before a merge request is approved
|
||||
|
||||
Pipelines for the GitLab project are created using the [`workflow:rules` keyword](../ci/yaml/index.md#workflow)
|
||||
feature of the GitLab CI/CD.
|
||||
**To reduce the pipeline cost and shorten the job duration, before a merge request is approved, the pipeline will run a minimal set of RSpec & Jest tests that are related to the merge request changes.**
|
||||
|
||||
Pipelines are always created for the following scenarios:
|
||||
After a merge request has been approved, the pipeline would contain the full RSpec & Jest tests. This will ensure that all tests
|
||||
have been run before a merge request is merged.
|
||||
|
||||
- `main` branch, including on schedules, pushes, merges, and so on.
|
||||
- Merge requests.
|
||||
- Tags.
|
||||
- Stable, `auto-deploy`, and security branches.
|
||||
### RSpec minimal jobs
|
||||
|
||||
Pipeline creation is also affected by the following CI/CD variables:
|
||||
#### Determining related RSpec test files in a merge request
|
||||
|
||||
- If `$FORCE_GITLAB_CI` is set, pipelines are created.
|
||||
- If `$GITLAB_INTERNAL` is not set, pipelines are not created.
|
||||
To identify the minimal set of tests needed, we use the [`test_file_finder` gem](https://gitlab.com/gitlab-org/ci-cd/test_file_finder), with two strategies:
|
||||
|
||||
No pipeline is created in any other cases (for example, when pushing a branch with no
|
||||
MR for it).
|
||||
- dynamic mapping from test coverage tracing (generated via the [Crystalball gem](https://github.com/toptal/crystalball))
|
||||
([see where it's used](https://gitlab.com/gitlab-org/gitlab/-/blob/47d507c93779675d73a05002e2ec9c3c467cd698/tooling/bin/find_tests#L15))
|
||||
- static mapping maintained in the [`tests.yml` file](https://gitlab.com/gitlab-org/gitlab/-/blob/master/tests.yml) for special cases that cannot
|
||||
be mapped via coverage tracing ([see where it's used](https://gitlab.com/gitlab-org/gitlab/-/blob/47d507c93779675d73a05002e2ec9c3c467cd698/tooling/bin/find_tests#L12))
|
||||
|
||||
The source of truth for these workflow rules is defined in [`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml).
|
||||
The test mappings contain a map of each source files to a list of test files which is dependent of the source file.
|
||||
|
||||
### Pipelines for Merge Requests
|
||||
In the `detect-tests` job, we use this mapping to identify the minimal tests needed for the current merge request.
|
||||
|
||||
#### Exceptional cases
|
||||
|
||||
In addition, there are a few circumstances where we would always run the full RSpec tests:
|
||||
|
||||
- when the `pipeline:run-all-rspec` label is set on the merge request
|
||||
- when the merge request is created by an automation (e.g. Gitaly update or MR targeting a stable branch)
|
||||
- when any CI config file is changed (i.e. `.gitlab-ci.yml` or `.gitlab/ci/**/*`)
|
||||
|
||||
### Jest minimal jobs
|
||||
|
||||
#### Determining related Jest test files in a merge request
|
||||
|
||||
To identify the minimal set of tests needed, we pass a list of all the changed files into `jest` using the [`--findRelatedTests`](https://jestjs.io/docs/cli#--findrelatedtests-spaceseparatedlistofsourcefiles) option.
|
||||
In this mode, `jest` would resolve all the dependencies of related to the changed files, which include test files that have these files in the dependency chain.
|
||||
|
||||
#### Exceptional cases
|
||||
|
||||
In addition, there are a few circumstances where we would always run the full Jest tests:
|
||||
|
||||
- when the `pipeline:run-all-rspec` label is set on the merge request
|
||||
- when the merge request is created by an automation (e.g. Gitaly update or MR targeting a stable branch)
|
||||
- when any CI config file is changed (i.e. `.gitlab-ci.yml` or `.gitlab/ci/**/*`)
|
||||
- when any frontend "core" file is changed (i.e. `package.json`, `yarn.lock`, `babel.config.js`, `jest.config.*.js`, `config/helpers/**/*.js`)
|
||||
- when any vendored JavaScript file is changed (i.e. `vendor/assets/javascripts/**/*`)
|
||||
- when any backend file is changed ([see the patterns list for details](https://gitlab.com/gitlab-org/gitlab/-/blob/3616946936c1adbd9e754c1bd06f86ba670796d8/.gitlab/ci/rules.gitlab-ci.yml#L205-216))
|
||||
|
||||
## Fail-fast job in merge request pipelines
|
||||
|
||||
To provide faster feedback when a merge request breaks existing tests, we are experimenting with a
|
||||
fail-fast mechanism.
|
||||
|
||||
An `rspec fail-fast` job is added in parallel to all other `rspec` jobs in a merge
|
||||
request pipeline. This job runs the tests that are directly related to the changes
|
||||
in the merge request.
|
||||
|
||||
If any of these tests fail, the `rspec fail-fast` job fails, triggering a
|
||||
`fail-pipeline-early` job to run. The `fail-pipeline-early` job:
|
||||
|
||||
- Cancels the currently running pipeline and all in-progress jobs.
|
||||
- Sets pipeline to have status `failed`.
|
||||
|
||||
For example:
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
subgraph "prepare stage";
|
||||
A["detect-tests"]
|
||||
end
|
||||
|
||||
subgraph "test stage";
|
||||
B["jest"];
|
||||
C["rspec migration"];
|
||||
D["rspec unit"];
|
||||
E["rspec integration"];
|
||||
F["rspec system"];
|
||||
G["rspec fail-fast"];
|
||||
end
|
||||
|
||||
subgraph "post-test stage";
|
||||
Z["fail-pipeline-early"];
|
||||
end
|
||||
|
||||
A --"artifact: list of test files"--> G
|
||||
G --"on failure"--> Z
|
||||
```
|
||||
|
||||
The `rspec fail-fast` is a no-op if there are more than 10 test files related to the
|
||||
merge request. This prevents `rspec fail-fast` duration from exceeding the average
|
||||
`rspec` job duration and defeating its purpose.
|
||||
|
||||
This number can be overridden by setting a CI/CD variable named `RSPEC_FAIL_FAST_TEST_FILE_COUNT_THRESHOLD`.
|
||||
|
||||
## Test jobs
|
||||
|
||||
Consult [GitLab tests in the Continuous Integration (CI) context](testing_guide/ci.md)
|
||||
for more information.
|
||||
|
||||
We have dedicated jobs for each [testing level](testing_guide/testing_levels.md) and each job runs depending on the
|
||||
changes made in your merge request.
|
||||
If you want to force all the RSpec jobs to run regardless of your changes, you can add the `pipeline:run-all-rspec` label to the merge request.
|
||||
|
||||
> Forcing all jobs on docs only related MRs would not have the prerequisite jobs and would lead to errors
|
||||
|
||||
## Review app jobs
|
||||
|
||||
Consult the [Review Apps](testing_guide/review_apps.md) dedicated page for more information.
|
||||
|
||||
## As-if-FOSS jobs
|
||||
|
||||
The `* as-if-foss` jobs run the GitLab test suite "as-if-FOSS", meaning as if the jobs would run in the context
|
||||
of the `gitlab-org/gitlab-foss` project. These jobs are only created in the following cases:
|
||||
|
||||
- when the `pipeline:run-as-if-foss` label is set on the merge request
|
||||
- when the merge request is created in the `gitlab-org/security/gitlab` project
|
||||
- when any CI config file is changed (i.e. `.gitlab-ci.yml` or `.gitlab/ci/**/*`)
|
||||
|
||||
The `* as-if-foss` jobs are run in addition to the regular EE-context jobs. They have the `FOSS_ONLY='1'` variable
|
||||
set and get their EE-specific folders removed before the tests start running.
|
||||
|
||||
The intent is to ensure that a change doesn't introduce a failure after the `gitlab-org/gitlab` project is synced to
|
||||
the `gitlab-org/gitlab-foss` project.
|
||||
|
||||
## PostgreSQL versions testing
|
||||
|
||||
Our test suite runs against PG12 as GitLab.com runs on PG12 and
|
||||
[Omnibus defaults to PG12 for new installs and upgrades](../administration/package_information/postgresql_versions.md).
|
||||
|
||||
We do run our test suite against PG11 and PG13 on nightly scheduled pipelines.
|
||||
|
||||
We also run our test suite against PG11 upon specific database library changes in MRs and `main` pipelines (with the `rspec db-library-code pg11` job).
|
||||
|
||||
### Current versions testing
|
||||
|
||||
| Where? | PostgreSQL version |
|
||||
| ------ | ------------------ |
|
||||
| MRs | 12, 11 for DB library changes |
|
||||
| `main` (non-scheduled pipelines) | 12, 11 for DB library changes |
|
||||
| 2-hourly scheduled pipelines | 12, 11 for DB library changes |
|
||||
| `nightly` scheduled pipelines | 12, 11, 13 |
|
||||
|
||||
### Long-term plan
|
||||
|
||||
We follow the [PostgreSQL versions shipped with Omnibus GitLab](../administration/package_information/postgresql_versions.md):
|
||||
|
||||
| PostgreSQL version | 14.1 (July 2021) | 14.2 (August 2021) | 14.3 (September 2021) | 14.4 (October 2021) | 14.5 (November 2021) | 14.6 (December 2021) |
|
||||
| -------------------| ---------------------- | ---------------------- | ---------------------- | ---------------------- | ---------------------- | ---------------------- |
|
||||
| PG12 | MRs/`2-hour`/`nightly` | MRs/`2-hour`/`nightly` | MRs/`2-hour`/`nightly` | MRs/`2-hour`/`nightly` | MRs/`2-hour`/`nightly` | MRs/`2-hour`/`nightly` |
|
||||
| PG11 | `nightly` | `nightly` | `nightly` | `nightly` | `nightly` | `nightly` |
|
||||
| PG13 | `nightly` | `nightly` | `nightly` | `nightly` | `nightly` | `nightly` |
|
||||
|
||||
## Pipelines types for merge requests
|
||||
|
||||
In general, pipelines for an MR fall into one or more of the following types,
|
||||
depending on the changes made in the MR:
|
||||
|
@ -53,7 +182,7 @@ We use the [`rules:`](../ci/yaml/index.md#rules) and [`needs:`](../ci/yaml/index
|
|||
to determine the jobs that need to be run in a pipeline. Note that an MR that includes multiple types of changes would
|
||||
have a pipelines that include jobs from multiple types (for example, a combination of docs-only and code-only pipelines).
|
||||
|
||||
#### Documentation only MR pipeline
|
||||
### Documentation only MR pipeline
|
||||
|
||||
[Reference pipeline](https://gitlab.com/gitlab-org/gitlab/-/pipelines/250546928):
|
||||
|
||||
|
@ -71,7 +200,7 @@ graph LR
|
|||
end
|
||||
```
|
||||
|
||||
#### Code-only MR pipeline
|
||||
### Code-only MR pipeline
|
||||
|
||||
[Reference pipeline](https://gitlab.com/gitlab-org/gitlab/pipelines/136295694)
|
||||
|
||||
|
@ -173,7 +302,7 @@ graph RL;
|
|||
end
|
||||
```
|
||||
|
||||
#### Frontend-only MR pipeline
|
||||
### Frontend-only MR pipeline
|
||||
|
||||
[Reference pipeline](https://gitlab.com/gitlab-org/gitlab/pipelines/134661039):
|
||||
|
||||
|
@ -299,7 +428,7 @@ graph RL;
|
|||
end
|
||||
```
|
||||
|
||||
#### QA-only MR pipeline
|
||||
### QA-only MR pipeline
|
||||
|
||||
[Reference pipeline](https://gitlab.com/gitlab-org/gitlab/pipelines/134645109):
|
||||
|
||||
|
@ -358,151 +487,196 @@ graph RL;
|
|||
end
|
||||
```
|
||||
|
||||
### Fail-fast pipeline in Merge Requests
|
||||
## CI configuration internals
|
||||
|
||||
To provide faster feedback when a Merge Request breaks existing tests, we are experimenting with a
|
||||
fail-fast mechanism.
|
||||
### Workflow rules
|
||||
|
||||
An `rspec fail-fast` job is added in parallel to all other `rspec` jobs in a Merge
|
||||
Request pipeline. This job runs the tests that are directly related to the changes
|
||||
in the Merge Request.
|
||||
Pipelines for the GitLab project are created using the [`workflow:rules` keyword](../ci/yaml/index.md#workflow)
|
||||
feature of the GitLab CI/CD.
|
||||
|
||||
If any of these tests fail, the `rspec fail-fast` job fails, triggering a
|
||||
`fail-pipeline-early` job to run. The `fail-pipeline-early` job:
|
||||
Pipelines are always created for the following scenarios:
|
||||
|
||||
- Cancels the currently running pipeline and all in-progress jobs.
|
||||
- Sets pipeline to have status `failed`.
|
||||
- `main` branch, including on schedules, pushes, merges, and so on.
|
||||
- Merge requests.
|
||||
- Tags.
|
||||
- Stable, `auto-deploy`, and security branches.
|
||||
|
||||
For example:
|
||||
Pipeline creation is also affected by the following CI/CD variables:
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
subgraph "prepare stage";
|
||||
A["detect-tests"]
|
||||
end
|
||||
- If `$FORCE_GITLAB_CI` is set, pipelines are created.
|
||||
- If `$GITLAB_INTERNAL` is not set, pipelines are not created.
|
||||
|
||||
subgraph "test stage";
|
||||
B["jest"];
|
||||
C["rspec migration"];
|
||||
D["rspec unit"];
|
||||
E["rspec integration"];
|
||||
F["rspec system"];
|
||||
G["rspec fail-fast"];
|
||||
end
|
||||
No pipeline is created in any other cases (for example, when pushing a branch with no
|
||||
MR for it).
|
||||
|
||||
subgraph "post-test stage";
|
||||
Z["fail-pipeline-early"];
|
||||
end
|
||||
The source of truth for these workflow rules is defined in [`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml).
|
||||
|
||||
A --"artifact: list of test files"--> G
|
||||
G --"on failure"--> Z
|
||||
### Default image
|
||||
|
||||
The default image is defined in [`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml).
|
||||
|
||||
<!-- vale gitlab.Spelling = NO -->
|
||||
|
||||
It includes Ruby, Go, Git, Git LFS, Chrome, Node, Yarn, PostgreSQL, and Graphics Magick.
|
||||
|
||||
<!-- vale gitlab.Spelling = YES -->
|
||||
|
||||
The images used in our pipelines are configured in the
|
||||
[`gitlab-org/gitlab-build-images`](https://gitlab.com/gitlab-org/gitlab-build-images)
|
||||
project, which is push-mirrored to [`gitlab/gitlab-build-images`](https://dev.gitlab.org/gitlab/gitlab-build-images)
|
||||
for redundancy.
|
||||
|
||||
The current version of the build images can be found in the
|
||||
["Used by GitLab section"](https://gitlab.com/gitlab-org/gitlab-build-images/blob/master/.gitlab-ci.yml).
|
||||
|
||||
### Default variables
|
||||
|
||||
In addition to the [predefined CI/CD variables](../ci/variables/predefined_variables.md),
|
||||
each pipeline includes default variables defined in
|
||||
[`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml).
|
||||
|
||||
### Stages
|
||||
|
||||
The current stages are:
|
||||
|
||||
- `sync`: This stage is used to synchronize changes from [`gitlab-org/gitlab`](https://gitlab.com/gitlab-org/gitlab) to
|
||||
[`gitlab-org/gitlab-foss`](https://gitlab.com/gitlab-org/gitlab-foss).
|
||||
- `prepare`: This stage includes jobs that prepare artifacts that are needed by
|
||||
jobs in subsequent stages.
|
||||
- `build-images`: This stage includes jobs that prepare Docker images
|
||||
that are needed by jobs in subsequent stages or downstream pipelines.
|
||||
- `fixtures`: This stage includes jobs that prepare fixtures needed by frontend tests.
|
||||
- `test`: This stage includes most of the tests, DB/migration jobs, and static analysis jobs.
|
||||
- `post-test`: This stage includes jobs that build reports or gather data from
|
||||
the `test` stage's jobs (for example, coverage, Knapsack metadata, and so on).
|
||||
- `review-prepare`: This stage includes a job that build the CNG images that are
|
||||
later used by the (Helm) Review App deployment (see
|
||||
[Review Apps](testing_guide/review_apps.md) for details).
|
||||
- `review`: This stage includes jobs that deploy the GitLab and Docs Review Apps.
|
||||
- `dast`: This stage includes jobs that run a DAST full scan against the Review App
|
||||
that is deployed in stage `review`.
|
||||
- `qa`: This stage includes jobs that perform QA tasks against the Review App
|
||||
that is deployed in stage `review`.
|
||||
- `post-qa`: This stage includes jobs that build reports or gather data from
|
||||
the `qa` stage's jobs (for example, Review App performance report).
|
||||
- `pages`: This stage includes a job that deploys the various reports as
|
||||
GitLab Pages (for example, [`coverage-ruby`](https://gitlab-org.gitlab.io/gitlab/coverage-ruby/),
|
||||
and `webpack-report` (found at `https://gitlab-org.gitlab.io/gitlab/webpack-report/`, but there is
|
||||
[an issue with the deployment](https://gitlab.com/gitlab-org/gitlab/-/issues/233458)).
|
||||
- `notify`: This stage includes jobs that notify various failures to Slack.
|
||||
|
||||
### Dependency Proxy
|
||||
|
||||
Some of the jobs are using images from Docker Hub, where we also use
|
||||
`${GITLAB_DEPENDENCY_PROXY}` as a prefix to the image path, so that we pull
|
||||
images from our [Dependency Proxy](../user/packages/dependency_proxy/index.md).
|
||||
|
||||
`${GITLAB_DEPENDENCY_PROXY}` is a group CI/CD variable defined in
|
||||
[`gitlab-org`](https://gitlab.com/gitlab-org) as
|
||||
`${CI_DEPENDENCY_PROXY_GROUP_IMAGE_PREFIX}/`. This means when we use an image
|
||||
defined as:
|
||||
|
||||
```yaml
|
||||
image: ${GITLAB_DEPENDENCY_PROXY}alpine:edge
|
||||
```
|
||||
|
||||
A Merge Request author may choose to opt-out of the fail fast mechanism by doing one of the following:
|
||||
Projects in the `gitlab-org` group pull from the Dependency Proxy, while
|
||||
forks that reside on any other personal namespaces or groups fall back to
|
||||
Docker Hub unless `${GITLAB_DEPENDENCY_PROXY}` is also defined there.
|
||||
|
||||
- Adding the `pipeline:skip-rspec-fail-fast` label to the merge request
|
||||
- Starting the `dont-interrupt-me` job found in the `sync` stage of a Merge Request pipeline.
|
||||
### Common job definitions
|
||||
|
||||
The `rspec fail-fast` is a no-op if there are more than 10 test files related to the
|
||||
Merge Request. This prevents `rspec fail-fast` duration from exceeding the average
|
||||
`rspec` job duration and defeating its purpose.
|
||||
Most of the jobs [extend from a few CI definitions](../ci/yaml/index.md#extends)
|
||||
defined in [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml)
|
||||
that are scoped to a single [configuration keyword](../ci/yaml/index.md#job-keywords).
|
||||
|
||||
This number can be overridden by setting a CI/CD variable named `RSPEC_FAIL_FAST_TEST_FILE_COUNT_THRESHOLD`.
|
||||
| Job definitions | Description |
|
||||
|------------------|-------------|
|
||||
| `.default-retry` | Allows a job to [retry](../ci/yaml/index.md#retry) upon `unknown_failure`, `api_failure`, `runner_system_failure`, `job_execution_timeout`, or `stuck_or_timeout_failure`. |
|
||||
| `.default-before_script` | Allows a job to use a default `before_script` definition suitable for Ruby/Rails tasks that may need a database running (for example, tests). |
|
||||
| `.setup-test-env-cache` | Allows a job to use a default `cache` definition suitable for setting up test environment for subsequent Ruby/Rails tasks. |
|
||||
| `.rails-cache` | Allows a job to use a default `cache` definition suitable for Ruby/Rails tasks. |
|
||||
| `.static-analysis-cache` | Allows a job to use a default `cache` definition suitable for static analysis tasks. |
|
||||
| `.coverage-cache` | Allows a job to use a default `cache` definition suitable for coverage tasks. |
|
||||
| `.qa-cache` | Allows a job to use a default `cache` definition suitable for QA tasks. |
|
||||
| `.yarn-cache` | Allows a job to use a default `cache` definition suitable for frontend jobs that do a `yarn install`. |
|
||||
| `.assets-compile-cache` | Allows a job to use a default `cache` definition suitable for frontend jobs that compile assets. |
|
||||
| `.use-pg11` | Allows a job to run the `postgres` 11 and `redis` services (see [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml) for the specific versions of the services). |
|
||||
| `.use-pg11-ee` | Same as `.use-pg11` but also use an `elasticsearch` service (see [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml) for the specific version of the service). |
|
||||
| `.use-pg12` | Allows a job to use the `postgres` 12 and `redis` services (see [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml) for the specific versions of the services). |
|
||||
| `.use-pg12-ee` | Same as `.use-pg12` but also use an `elasticsearch` service (see [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml) for the specific version of the service). |
|
||||
| `.use-kaniko` | Allows a job to use the `kaniko` tool to build Docker images. |
|
||||
| `.as-if-foss` | Simulate the FOSS project by setting the `FOSS_ONLY='1'` CI/CD variable. |
|
||||
| `.use-docker-in-docker` | Allows a job to use Docker in Docker. |
|
||||
|
||||
NOTE:
|
||||
This experiment is only enabled when the CI/CD variable `RSPEC_FAIL_FAST_ENABLED=true` is set.
|
||||
### `rules`, `if:` conditions and `changes:` patterns
|
||||
|
||||
#### Determining related test files in a Merge Request
|
||||
We're using the [`rules` keyword](../ci/yaml/index.md#rules) extensively.
|
||||
|
||||
The test files related to the Merge Request are determined using the [`test_file_finder`](https://gitlab.com/gitlab-org/ci-cd/test_file_finder) gem.
|
||||
We are using a custom mapping between source file to test files, maintained in the `tests.yml` file.
|
||||
All `rules` definitions are defined in
|
||||
[`rules.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rules.gitlab-ci.yml),
|
||||
then included in individual jobs via [`extends`](../ci/yaml/index.md#extends).
|
||||
|
||||
### RSpec minimal jobs
|
||||
The `rules` definitions are composed of `if:` conditions and `changes:` patterns,
|
||||
which are also defined in
|
||||
[`rules.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rules.gitlab-ci.yml)
|
||||
and included in `rules` definitions via [YAML anchors](../ci/yaml/index.md#anchors)
|
||||
|
||||
Before a merge request is approved, the pipeline will run a minimal set of RSpec tests that are related to the merge request changes.
|
||||
This is to reduce the pipeline cost and shorten the job duration.
|
||||
#### `if:` conditions
|
||||
|
||||
To identify the minimal set of tests needed, we use [Crystalball gem](https://github.com/toptal/crystalball) to create a test mapping.
|
||||
The test mapping contains a map of each source files to a list of test files which is dependent of the source file.
|
||||
This mapping is currently generated using a combination of test coverage tracing and a static mapping.
|
||||
In the `detect-tests` job, we use this mapping to identify the minimal tests needed for the current Merge Request.
|
||||
<!-- vale gitlab.Substitutions = NO -->
|
||||
|
||||
After a merge request has been approved, the pipeline would contain the full RSpec tests. This will ensure that all tests
|
||||
have been run before a merge request is merged.
|
||||
| `if:` conditions | Description | Notes |
|
||||
|------------------|-------------|-------|
|
||||
| `if-not-canonical-namespace` | Matches if the project isn't in the canonical (`gitlab-org/`) or security (`gitlab-org/security`) namespace. | Use to create a job for forks (by using `when: on_success|manual`), or **not** create a job for forks (by using `when: never`). |
|
||||
| `if-not-ee` | Matches if the project isn't EE (that is, project name isn't `gitlab` or `gitlab-ee`). | Use to create a job only in the FOSS project (by using `when: on_success|manual`), or **not** create a job if the project is EE (by using `when: never`). |
|
||||
| `if-not-foss` | Matches if the project isn't FOSS (that is, project name isn't `gitlab-foss`, `gitlab-ce`, or `gitlabhq`). | Use to create a job only in the EE project (by using `when: on_success|manual`), or **not** create a job if the project is FOSS (by using `when: never`). |
|
||||
| `if-default-refs` | Matches if the pipeline is for `master`, `main`, `/^[\d-]+-stable(-ee)?$/` (stable branches), `/^\d+-\d+-auto-deploy-\d+$/` (auto-deploy branches), `/^security\//` (security branches), merge requests, and tags. | Note that jobs aren't created for branches with this default configuration. |
|
||||
| `if-master-refs` | Matches if the current branch is `master` or `main`. | |
|
||||
| `if-master-push` | Matches if the current branch is `master` or `main` and pipeline source is `push`. | |
|
||||
| `if-master-schedule-2-hourly` | Matches if the current branch is `master` or `main` and pipeline runs on a 2-hourly schedule. | |
|
||||
| `if-master-schedule-nightly` | Matches if the current branch is `master` or `main` and pipeline runs on a nightly schedule. | |
|
||||
| `if-auto-deploy-branches` | Matches if the current branch is an auto-deploy one. | |
|
||||
| `if-master-or-tag` | Matches if the pipeline is for the `master` or `main` branch or for a tag. | |
|
||||
| `if-merge-request` | Matches if the pipeline is for a merge request. | |
|
||||
| `if-merge-request-title-as-if-foss` | Matches if the pipeline is for a merge request and the MR has label ~"pipeline:run-as-if-foss" | |
|
||||
| `if-merge-request-title-update-caches` | Matches if the pipeline is for a merge request and the MR has label ~"pipeline:update-cache". | |
|
||||
| `if-merge-request-title-run-all-rspec` | Matches if the pipeline is for a merge request and the MR has label ~"pipeline:run-all-rspec". | |
|
||||
| `if-security-merge-request` | Matches if the pipeline is for a security merge request. | |
|
||||
| `if-security-schedule` | Matches if the pipeline is for a security scheduled pipeline. | |
|
||||
| `if-nightly-master-schedule` | Matches if the pipeline is for a `master` scheduled pipeline with `$NIGHTLY` set. | |
|
||||
| `if-dot-com-gitlab-org-schedule` | Limits jobs creation to scheduled pipelines for the `gitlab-org` group on GitLab.com. | |
|
||||
| `if-dot-com-gitlab-org-master` | Limits jobs creation to the `master` or `main` branch for the `gitlab-org` group on GitLab.com. | |
|
||||
| `if-dot-com-gitlab-org-merge-request` | Limits jobs creation to merge requests for the `gitlab-org` group on GitLab.com. | |
|
||||
| `if-dot-com-gitlab-org-and-security-tag` | Limits job creation to tags for the `gitlab-org` and `gitlab-org/security` groups on GitLab.com. | |
|
||||
| `if-dot-com-gitlab-org-and-security-merge-request` | Limit jobs creation to merge requests for the `gitlab-org` and `gitlab-org/security` groups on GitLab.com. | |
|
||||
| `if-dot-com-gitlab-org-and-security-tag` | Limit jobs creation to tags for the `gitlab-org` and `gitlab-org/security` groups on GitLab.com. | |
|
||||
| `if-dot-com-ee-schedule` | Limits jobs to scheduled pipelines for the `gitlab-org/gitlab` project on GitLab.com. | |
|
||||
| `if-cache-credentials-schedule` | Limits jobs to scheduled pipelines with the `$CI_REPO_CACHE_CREDENTIALS` variable set. | |
|
||||
| `if-security-pipeline-merge-result` | Matches if the pipeline is for a security merge request triggered by `@gitlab-release-tools-bot`. | |
|
||||
|
||||
### Jest minimal jobs
|
||||
<!-- vale gitlab.Substitutions = YES -->
|
||||
|
||||
Before a merge request is approved, the pipeline will run a minimal set of Jest tests that are related to the merge request changes.
|
||||
This is to reduce the pipeline cost and shorten the job duration.
|
||||
#### `changes:` patterns
|
||||
|
||||
To identify the minimal set of tests needed, we pass a list of all the changed files into `jest` using the [`--findRelatedTests`](https://jestjs.io/docs/cli#--findrelatedtests-spaceseparatedlistofsourcefiles) option.
|
||||
In this mode, `jest` would resolve all the dependencies of related to the changed files, which include test files that have these files in the dependency chain.
|
||||
|
||||
After a merge request has been approved, the pipeline would contain the full Jest tests. This will ensure that all tests
|
||||
have been run before a merge request is merged.
|
||||
|
||||
In addition, there are a few circumstances where we would always run the full Jest tests:
|
||||
|
||||
- when `package.json`, `yarn.lock`, `jest` config changes
|
||||
- when vendored JavaScript is changed
|
||||
- when `.graphql` files are changed
|
||||
|
||||
### PostgreSQL versions testing
|
||||
|
||||
Our test suite runs against PG12 as GitLab.com runs on PG12 and
|
||||
[Omnibus defaults to PG12 for new installs and upgrades](../administration/package_information/postgresql_versions.md),
|
||||
Our test suite is currently running against PG11, since GitLab.com still runs on PG11.
|
||||
|
||||
We do run our test suite against PG11 on nightly scheduled pipelines as well as upon specific
|
||||
database library changes in MRs and `main` pipelines (with the `rspec db-library-code pg11` job).
|
||||
|
||||
#### Current versions testing
|
||||
|
||||
| Where? | PostgreSQL version |
|
||||
| ------ | ------------------ |
|
||||
| MRs | 12, 11 for DB library changes |
|
||||
| `main` (non-scheduled pipelines) | 12, 11 for DB library changes |
|
||||
| 2-hourly scheduled pipelines | 12, 11 for DB library changes |
|
||||
| `nightly` scheduled pipelines | 12, 11 |
|
||||
|
||||
#### Long-term plan
|
||||
|
||||
We follow the [PostgreSQL versions shipped with Omnibus GitLab](../administration/package_information/postgresql_versions.md):
|
||||
|
||||
| PostgreSQL version | 13.11 (April 2021) | 13.12 (May 2021) | 14.0 (June 2021?) |
|
||||
| -------------------| ---------------------- | ---------------------- | ---------------------- |
|
||||
| PG12 | `nightly` | MRs/`2-hour`/`nightly` | MRs/`2-hour`/`nightly` |
|
||||
| PG11 | MRs/`2-hour`/`nightly` | `nightly` | `nightly` |
|
||||
|
||||
### Test jobs
|
||||
|
||||
Consult [GitLab tests in the Continuous Integration (CI) context](testing_guide/ci.md)
|
||||
for more information.
|
||||
|
||||
We have dedicated jobs for each [testing level](testing_guide/testing_levels.md) and each job runs depending on the
|
||||
changes made in your merge request.
|
||||
If you want to force all the RSpec jobs to run regardless of your changes, you can add the `pipeline:run-all-rspec` label to the merge request.
|
||||
|
||||
> Forcing all jobs on docs only related MRs would not have the prerequisite jobs and would lead to errors
|
||||
|
||||
### Review app jobs
|
||||
|
||||
Consult the [Review Apps](testing_guide/review_apps.md) dedicated page for more information.
|
||||
|
||||
### As-if-FOSS jobs
|
||||
|
||||
The `* as-if-foss` jobs allows the GitLab test suite "as-if-FOSS", meaning as if the jobs would run in the context
|
||||
of the `gitlab-org/gitlab-foss` project. These jobs are only created in the following cases:
|
||||
|
||||
- `gitlab-org/security/gitlab` merge requests.
|
||||
- Merge requests with the `pipeline:run-as-if-foss` label
|
||||
- Merge requests that changes the CI configuration.
|
||||
|
||||
The `* as-if-foss` jobs are run in addition to the regular EE-context jobs. They have the `FOSS_ONLY='1'` variable
|
||||
set and get their EE-specific folders removed before the tests start running.
|
||||
|
||||
The intent is to ensure that a change doesn't introduce a failure after the `gitlab-org/gitlab` project is synced to
|
||||
the `gitlab-org/gitlab-foss` project.
|
||||
| `changes:` patterns | Description |
|
||||
|------------------------------|--------------------------------------------------------------------------|
|
||||
| `ci-patterns` | Only create job for CI configuration-related changes. |
|
||||
| `ci-build-images-patterns` | Only create job for CI configuration-related changes related to the `build-images` stage. |
|
||||
| `ci-review-patterns` | Only create job for CI configuration-related changes related to the `review` stage. |
|
||||
| `ci-qa-patterns` | Only create job for CI configuration-related changes related to the `qa` stage. |
|
||||
| `yaml-lint-patterns` | Only create job for YAML-related changes. |
|
||||
| `docs-patterns` | Only create job for docs-related changes. |
|
||||
| `frontend-dependency-patterns` | Only create job when frontend dependencies are updated (that is, `package.json`, and `yarn.lock`). changes. |
|
||||
| `frontend-patterns` | Only create job for frontend-related changes. |
|
||||
| `backend-patterns` | Only create job for backend-related changes. |
|
||||
| `db-patterns` | Only create job for DB-related changes. |
|
||||
| `backstage-patterns` | Only create job for backstage-related changes (that is, Danger, fixtures, RuboCop, specs). |
|
||||
| `code-patterns` | Only create job for code-related changes. |
|
||||
| `qa-patterns` | Only create job for QA-related changes. |
|
||||
| `code-backstage-patterns` | Combination of `code-patterns` and `backstage-patterns`. |
|
||||
| `code-qa-patterns` | Combination of `code-patterns` and `qa-patterns`. |
|
||||
| `code-backstage-qa-patterns` | Combination of `code-patterns`, `backstage-patterns`, and `qa-patterns`. |
|
||||
|
||||
## Performance
|
||||
|
||||
|
@ -612,177 +786,6 @@ GitLab Team Member, find credentials in the
|
|||
Note that this bucket should be located in the same continent as the
|
||||
runner, or [you can incur network egress charges](https://cloud.google.com/storage/pricing).
|
||||
|
||||
## CI configuration internals
|
||||
|
||||
### Stages
|
||||
|
||||
The current stages are:
|
||||
|
||||
- `sync`: This stage is used to synchronize changes from [`gitlab-org/gitlab`](https://gitlab.com/gitlab-org/gitlab) to
|
||||
[`gitlab-org/gitlab-foss`](https://gitlab.com/gitlab-org/gitlab-foss).
|
||||
- `prepare`: This stage includes jobs that prepare artifacts that are needed by
|
||||
jobs in subsequent stages.
|
||||
- `build-images`: This stage includes jobs that prepare Docker images
|
||||
that are needed by jobs in subsequent stages or downstream pipelines.
|
||||
- `fixtures`: This stage includes jobs that prepare fixtures needed by frontend tests.
|
||||
- `test`: This stage includes most of the tests, DB/migration jobs, and static analysis jobs.
|
||||
- `post-test`: This stage includes jobs that build reports or gather data from
|
||||
the `test` stage's jobs (for example, coverage, Knapsack metadata, and so on).
|
||||
- `review-prepare`: This stage includes a job that build the CNG images that are
|
||||
later used by the (Helm) Review App deployment (see
|
||||
[Review Apps](testing_guide/review_apps.md) for details).
|
||||
- `review`: This stage includes jobs that deploy the GitLab and Docs Review Apps.
|
||||
- `dast`: This stage includes jobs that run a DAST full scan against the Review App
|
||||
that is deployed in stage `review`.
|
||||
- `qa`: This stage includes jobs that perform QA tasks against the Review App
|
||||
that is deployed in stage `review`.
|
||||
- `post-qa`: This stage includes jobs that build reports or gather data from
|
||||
the `qa` stage's jobs (for example, Review App performance report).
|
||||
- `pages`: This stage includes a job that deploys the various reports as
|
||||
GitLab Pages (for example, [`coverage-ruby`](https://gitlab-org.gitlab.io/gitlab/coverage-ruby/),
|
||||
and `webpack-report` (found at `https://gitlab-org.gitlab.io/gitlab/webpack-report/`, but there is
|
||||
[an issue with the deployment](https://gitlab.com/gitlab-org/gitlab/-/issues/233458)).
|
||||
- `notify`: This stage includes jobs that notify various failures to Slack.
|
||||
|
||||
### Default image
|
||||
|
||||
The default image is defined in [`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml).
|
||||
|
||||
<!-- vale gitlab.Spelling = NO -->
|
||||
|
||||
It includes Ruby, Go, Git, Git LFS, Chrome, Node, Yarn, PostgreSQL, and Graphics Magick.
|
||||
|
||||
<!-- vale gitlab.Spelling = YES -->
|
||||
|
||||
The images used in our pipelines are configured in the
|
||||
[`gitlab-org/gitlab-build-images`](https://gitlab.com/gitlab-org/gitlab-build-images)
|
||||
project, which is push-mirrored to [`gitlab/gitlab-build-images`](https://dev.gitlab.org/gitlab/gitlab-build-images)
|
||||
for redundancy.
|
||||
|
||||
The current version of the build images can be found in the
|
||||
["Used by GitLab section"](https://gitlab.com/gitlab-org/gitlab-build-images/blob/master/.gitlab-ci.yml).
|
||||
|
||||
### Dependency Proxy
|
||||
|
||||
Some of the jobs are using images from Docker Hub, where we also use
|
||||
`${GITLAB_DEPENDENCY_PROXY}` as a prefix to the image path, so that we pull
|
||||
images from our [Dependency Proxy](../user/packages/dependency_proxy/index.md).
|
||||
|
||||
`${GITLAB_DEPENDENCY_PROXY}` is a group CI/CD variable defined in
|
||||
[`gitlab-org`](https://gitlab.com/gitlab-org) as
|
||||
`${CI_DEPENDENCY_PROXY_GROUP_IMAGE_PREFIX}/`. This means when we use an image
|
||||
defined as:
|
||||
|
||||
```yaml
|
||||
image: ${GITLAB_DEPENDENCY_PROXY}alpine:edge
|
||||
```
|
||||
|
||||
Projects in the `gitlab-org` group pull from the Dependency Proxy, while
|
||||
forks that reside on any other personal namespaces or groups fall back to
|
||||
Docker Hub unless `${GITLAB_DEPENDENCY_PROXY}` is also defined there.
|
||||
|
||||
### Default variables
|
||||
|
||||
In addition to the [predefined CI/CD variables](../ci/variables/predefined_variables.md),
|
||||
each pipeline includes default variables defined in
|
||||
[`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml).
|
||||
|
||||
### Common job definitions
|
||||
|
||||
Most of the jobs [extend from a few CI definitions](../ci/yaml/index.md#extends)
|
||||
defined in [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml)
|
||||
that are scoped to a single [configuration keyword](../ci/yaml/index.md#job-keywords).
|
||||
|
||||
| Job definitions | Description |
|
||||
|------------------|-------------|
|
||||
| `.default-retry` | Allows a job to [retry](../ci/yaml/index.md#retry) upon `unknown_failure`, `api_failure`, `runner_system_failure`, `job_execution_timeout`, or `stuck_or_timeout_failure`. |
|
||||
| `.default-before_script` | Allows a job to use a default `before_script` definition suitable for Ruby/Rails tasks that may need a database running (for example, tests). |
|
||||
| `.setup-test-env-cache` | Allows a job to use a default `cache` definition suitable for setting up test environment for subsequent Ruby/Rails tasks. |
|
||||
| `.rails-cache` | Allows a job to use a default `cache` definition suitable for Ruby/Rails tasks. |
|
||||
| `.static-analysis-cache` | Allows a job to use a default `cache` definition suitable for static analysis tasks. |
|
||||
| `.coverage-cache` | Allows a job to use a default `cache` definition suitable for coverage tasks. |
|
||||
| `.qa-cache` | Allows a job to use a default `cache` definition suitable for QA tasks. |
|
||||
| `.yarn-cache` | Allows a job to use a default `cache` definition suitable for frontend jobs that do a `yarn install`. |
|
||||
| `.assets-compile-cache` | Allows a job to use a default `cache` definition suitable for frontend jobs that compile assets. |
|
||||
| `.use-pg11` | Allows a job to run the `postgres` 11 and `redis` services (see [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml) for the specific versions of the services). |
|
||||
| `.use-pg11-ee` | Same as `.use-pg11` but also use an `elasticsearch` service (see [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml) for the specific version of the service). |
|
||||
| `.use-pg12` | Allows a job to use the `postgres` 12 and `redis` services (see [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml) for the specific versions of the services). |
|
||||
| `.use-pg12-ee` | Same as `.use-pg12` but also use an `elasticsearch` service (see [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml) for the specific version of the service). |
|
||||
| `.use-kaniko` | Allows a job to use the `kaniko` tool to build Docker images. |
|
||||
| `.as-if-foss` | Simulate the FOSS project by setting the `FOSS_ONLY='1'` CI/CD variable. |
|
||||
| `.use-docker-in-docker` | Allows a job to use Docker in Docker. |
|
||||
|
||||
### `rules`, `if:` conditions and `changes:` patterns
|
||||
|
||||
We're using the [`rules` keyword](../ci/yaml/index.md#rules) extensively.
|
||||
|
||||
All `rules` definitions are defined in
|
||||
[`rules.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rules.gitlab-ci.yml),
|
||||
then included in individual jobs via [`extends`](../ci/yaml/index.md#extends).
|
||||
|
||||
The `rules` definitions are composed of `if:` conditions and `changes:` patterns,
|
||||
which are also defined in
|
||||
[`rules.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rules.gitlab-ci.yml)
|
||||
and included in `rules` definitions via [YAML anchors](../ci/yaml/index.md#anchors)
|
||||
|
||||
#### `if:` conditions
|
||||
|
||||
<!-- vale gitlab.Substitutions = NO -->
|
||||
|
||||
| `if:` conditions | Description | Notes |
|
||||
|------------------|-------------|-------|
|
||||
| `if-not-canonical-namespace` | Matches if the project isn't in the canonical (`gitlab-org/`) or security (`gitlab-org/security`) namespace. | Use to create a job for forks (by using `when: on_success|manual`), or **not** create a job for forks (by using `when: never`). |
|
||||
| `if-not-ee` | Matches if the project isn't EE (that is, project name isn't `gitlab` or `gitlab-ee`). | Use to create a job only in the FOSS project (by using `when: on_success|manual`), or **not** create a job if the project is EE (by using `when: never`). |
|
||||
| `if-not-foss` | Matches if the project isn't FOSS (that is, project name isn't `gitlab-foss`, `gitlab-ce`, or `gitlabhq`). | Use to create a job only in the EE project (by using `when: on_success|manual`), or **not** create a job if the project is FOSS (by using `when: never`). |
|
||||
| `if-default-refs` | Matches if the pipeline is for `master`, `main`, `/^[\d-]+-stable(-ee)?$/` (stable branches), `/^\d+-\d+-auto-deploy-\d+$/` (auto-deploy branches), `/^security\//` (security branches), merge requests, and tags. | Note that jobs aren't created for branches with this default configuration. |
|
||||
| `if-master-refs` | Matches if the current branch is `master` or `main`. | |
|
||||
| `if-master-push` | Matches if the current branch is `master` or `main` and pipeline source is `push`. | |
|
||||
| `if-master-schedule-2-hourly` | Matches if the current branch is `master` or `main` and pipeline runs on a 2-hourly schedule. | |
|
||||
| `if-master-schedule-nightly` | Matches if the current branch is `master` or `main` and pipeline runs on a nightly schedule. | |
|
||||
| `if-auto-deploy-branches` | Matches if the current branch is an auto-deploy one. | |
|
||||
| `if-master-or-tag` | Matches if the pipeline is for the `master` or `main` branch or for a tag. | |
|
||||
| `if-merge-request` | Matches if the pipeline is for a merge request. | |
|
||||
| `if-merge-request-title-as-if-foss` | Matches if the pipeline is for a merge request and the MR has label ~"pipeline:run-as-if-foss" | |
|
||||
| `if-merge-request-title-update-caches` | Matches if the pipeline is for a merge request and the MR has label ~"pipeline:update-cache". | |
|
||||
| `if-merge-request-title-run-all-rspec` | Matches if the pipeline is for a merge request and the MR has label ~"pipeline:run-all-rspec". | |
|
||||
| `if-security-merge-request` | Matches if the pipeline is for a security merge request. | |
|
||||
| `if-security-schedule` | Matches if the pipeline is for a security scheduled pipeline. | |
|
||||
| `if-nightly-master-schedule` | Matches if the pipeline is for a `master` scheduled pipeline with `$NIGHTLY` set. | |
|
||||
| `if-dot-com-gitlab-org-schedule` | Limits jobs creation to scheduled pipelines for the `gitlab-org` group on GitLab.com. | |
|
||||
| `if-dot-com-gitlab-org-master` | Limits jobs creation to the `master` or `main` branch for the `gitlab-org` group on GitLab.com. | |
|
||||
| `if-dot-com-gitlab-org-merge-request` | Limits jobs creation to merge requests for the `gitlab-org` group on GitLab.com. | |
|
||||
| `if-dot-com-gitlab-org-and-security-tag` | Limits job creation to tags for the `gitlab-org` and `gitlab-org/security` groups on GitLab.com. | |
|
||||
| `if-dot-com-gitlab-org-and-security-merge-request` | Limit jobs creation to merge requests for the `gitlab-org` and `gitlab-org/security` groups on GitLab.com. | |
|
||||
| `if-dot-com-gitlab-org-and-security-tag` | Limit jobs creation to tags for the `gitlab-org` and `gitlab-org/security` groups on GitLab.com. | |
|
||||
| `if-dot-com-ee-schedule` | Limits jobs to scheduled pipelines for the `gitlab-org/gitlab` project on GitLab.com. | |
|
||||
| `if-cache-credentials-schedule` | Limits jobs to scheduled pipelines with the `$CI_REPO_CACHE_CREDENTIALS` variable set. | |
|
||||
| `if-rspec-fail-fast-disabled` | Limits jobs to pipelines with `$RSPEC_FAIL_FAST_ENABLED` CI/CD variable not set to `"true"`. | |
|
||||
| `if-rspec-fail-fast-skipped` | Matches if the pipeline is for a merge request and the MR has label ~"pipeline:skip-rspec-fail-fast". | |
|
||||
| `if-security-pipeline-merge-result` | Matches if the pipeline is for a security merge request triggered by `@gitlab-release-tools-bot`. | |
|
||||
|
||||
<!-- vale gitlab.Substitutions = YES -->
|
||||
|
||||
#### `changes:` patterns
|
||||
|
||||
| `changes:` patterns | Description |
|
||||
|------------------------------|--------------------------------------------------------------------------|
|
||||
| `ci-patterns` | Only create job for CI configuration-related changes. |
|
||||
| `ci-build-images-patterns` | Only create job for CI configuration-related changes related to the `build-images` stage. |
|
||||
| `ci-review-patterns` | Only create job for CI configuration-related changes related to the `review` stage. |
|
||||
| `ci-qa-patterns` | Only create job for CI configuration-related changes related to the `qa` stage. |
|
||||
| `yaml-lint-patterns` | Only create job for YAML-related changes. |
|
||||
| `docs-patterns` | Only create job for docs-related changes. |
|
||||
| `frontend-dependency-patterns` | Only create job when frontend dependencies are updated (that is, `package.json`, and `yarn.lock`). changes. |
|
||||
| `frontend-patterns` | Only create job for frontend-related changes. |
|
||||
| `backend-patterns` | Only create job for backend-related changes. |
|
||||
| `db-patterns` | Only create job for DB-related changes. |
|
||||
| `backstage-patterns` | Only create job for backstage-related changes (that is, Danger, fixtures, RuboCop, specs). |
|
||||
| `code-patterns` | Only create job for code-related changes. |
|
||||
| `qa-patterns` | Only create job for QA-related changes. |
|
||||
| `code-backstage-patterns` | Combination of `code-patterns` and `backstage-patterns`. |
|
||||
| `code-qa-patterns` | Combination of `code-patterns` and `qa-patterns`. |
|
||||
| `code-backstage-qa-patterns` | Combination of `code-patterns`, `backstage-patterns`, and `qa-patterns`. |
|
||||
|
||||
---
|
||||
|
||||
[Return to Development documentation](index.md)
|
||||
|
|
|
@ -261,7 +261,7 @@ module Gitlab
|
|||
project: job.project,
|
||||
file_type: :trace,
|
||||
file: stream,
|
||||
file_sha256: self.class.hexdigest(path))
|
||||
file_sha256: self.class.sha256_hexdigest(path))
|
||||
|
||||
trace_metadata.track_archival!(trace_artifact.id)
|
||||
end
|
||||
|
|
|
@ -28,7 +28,7 @@ module Gitlab
|
|||
end
|
||||
|
||||
def actual_checksum(upload)
|
||||
Upload.hexdigest(upload.absolute_path)
|
||||
Upload.sha256_hexdigest(upload.absolute_path)
|
||||
end
|
||||
|
||||
def remote_object_exists?(upload)
|
||||
|
|
|
@ -2915,9 +2915,15 @@ msgstr ""
|
|||
msgid "After you've reviewed these contribution guidelines, you'll be all set to"
|
||||
msgstr ""
|
||||
|
||||
msgid "Akismet"
|
||||
msgstr ""
|
||||
|
||||
msgid "Akismet API Key"
|
||||
msgstr ""
|
||||
|
||||
msgid "Akismet helps prevent the creation of spam issues in public projects."
|
||||
msgstr ""
|
||||
|
||||
msgid "AlertManagement|Acknowledged"
|
||||
msgstr ""
|
||||
|
||||
|
@ -8533,6 +8539,9 @@ msgstr ""
|
|||
msgid "Configure %{repository_checks_link_start}repository checks%{link_end} and %{housekeeping_link_start}housekeeping%{link_end} on repositories."
|
||||
msgstr ""
|
||||
|
||||
msgid "Configure CAPTCHAs, IP address limits, and other anti-spam measures."
|
||||
msgstr ""
|
||||
|
||||
msgid "Configure Dependency Scanning in `.gitlab-ci.yml` using the GitLab managed template. You can [add variable overrides](https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings) to customize Dependency Scanning settings."
|
||||
msgstr ""
|
||||
|
||||
|
@ -12629,10 +12638,7 @@ msgstr ""
|
|||
msgid "Enable reCAPTCHA"
|
||||
msgstr ""
|
||||
|
||||
msgid "Enable reCAPTCHA for login"
|
||||
msgstr ""
|
||||
|
||||
msgid "Enable reCAPTCHA, Invisible Captcha, Akismet and set IP limits. For reCAPTCHA, we currently only support %{recaptcha_v2_link_start}v2%{recaptcha_v2_link_end}"
|
||||
msgid "Enable reCAPTCHA for login."
|
||||
msgstr ""
|
||||
|
||||
msgid "Enable repository checks"
|
||||
|
@ -16706,10 +16712,10 @@ msgstr ""
|
|||
msgid "Helps prevent bots from creating accounts."
|
||||
msgstr ""
|
||||
|
||||
msgid "Helps prevent bots from creating issues"
|
||||
msgid "Helps prevent bots from creating issues."
|
||||
msgstr ""
|
||||
|
||||
msgid "Helps prevent malicious users hide their activity"
|
||||
msgid "Helps prevent malicious users hide their activity."
|
||||
msgstr ""
|
||||
|
||||
msgid "Helps reduce request volume (for example, from crawlers or abusive bots)"
|
||||
|
@ -16833,6 +16839,12 @@ msgstr ""
|
|||
msgid "Housekeeping, export, path, transfer, remove, archive."
|
||||
msgstr ""
|
||||
|
||||
msgid "How do I configure Akismet?"
|
||||
msgstr ""
|
||||
|
||||
msgid "How do I configure it?"
|
||||
msgstr ""
|
||||
|
||||
msgid "How do I configure runners?"
|
||||
msgstr ""
|
||||
|
||||
|
@ -16866,7 +16878,7 @@ msgstr ""
|
|||
msgid "How many days need to pass between marking entity for deletion and actual removing it."
|
||||
msgstr ""
|
||||
|
||||
msgid "How many seconds an IP will be counted towards the limit"
|
||||
msgid "How many seconds an IP counts toward the IP address limit."
|
||||
msgstr ""
|
||||
|
||||
msgid "How the job limiter handles jobs exceeding the thresholds specified below. The 'track' mode only logs the jobs. The 'compress' mode compresses the jobs and raises an exception if the compressed size exceeds the limit."
|
||||
|
@ -16956,15 +16968,18 @@ msgstr ""
|
|||
msgid "IP Address"
|
||||
msgstr ""
|
||||
|
||||
msgid "IP expiration time"
|
||||
msgid "IP address expiration time"
|
||||
msgstr ""
|
||||
|
||||
msgid "IP address restrictions"
|
||||
msgstr ""
|
||||
|
||||
msgid "IP addresses per user"
|
||||
msgstr ""
|
||||
|
||||
msgid "IP subnet restriction only allowed for top-level groups"
|
||||
msgstr ""
|
||||
|
||||
msgid "IPs per user"
|
||||
msgstr ""
|
||||
|
||||
msgid "Identifier"
|
||||
msgstr ""
|
||||
|
||||
|
@ -18455,6 +18470,12 @@ msgstr ""
|
|||
msgid "Investigate vulnerability: %{title}"
|
||||
msgstr ""
|
||||
|
||||
msgid "Invisible Captcha"
|
||||
msgstr ""
|
||||
|
||||
msgid "Invisible Captcha helps prevent the creation of spam accounts. It adds a honeypot field and time-sensitive form submission to the account signup form."
|
||||
msgstr ""
|
||||
|
||||
msgid "Invitation"
|
||||
msgstr ""
|
||||
|
||||
|
@ -20155,10 +20176,10 @@ msgstr ""
|
|||
msgid "LicenseCompliance|Acceptable license to be used in the project"
|
||||
msgstr ""
|
||||
|
||||
msgid "LicenseCompliance|Add a license"
|
||||
msgid "LicenseCompliance|Add license and related policy"
|
||||
msgstr ""
|
||||
|
||||
msgid "LicenseCompliance|Add license and related policy"
|
||||
msgid "LicenseCompliance|Add license policy"
|
||||
msgstr ""
|
||||
|
||||
msgid "LicenseCompliance|Allow"
|
||||
|
@ -20314,7 +20335,7 @@ msgstr ""
|
|||
msgid "Limit namespaces and projects that can be indexed"
|
||||
msgstr ""
|
||||
|
||||
msgid "Limit sign in from multiple ips"
|
||||
msgid "Limit sign in from multiple IP addresses"
|
||||
msgstr ""
|
||||
|
||||
msgid "Limit the number of concurrent operations this secondary site can run in the background."
|
||||
|
@ -20958,7 +20979,7 @@ msgstr ""
|
|||
msgid "Maximum number of projects."
|
||||
msgstr ""
|
||||
|
||||
msgid "Maximum number of unique IPs per user"
|
||||
msgid "Maximum number of unique IP addresses per user."
|
||||
msgstr ""
|
||||
|
||||
msgid "Maximum page reached"
|
||||
|
@ -23738,6 +23759,9 @@ msgstr ""
|
|||
msgid "Only projects created under a Ultimate license are available in Security Dashboards."
|
||||
msgstr ""
|
||||
|
||||
msgid "Only reCAPTCHA v2 is supported:"
|
||||
msgstr ""
|
||||
|
||||
msgid "Only verified users with an email address in any of these domains can be added to the group."
|
||||
msgstr ""
|
||||
|
||||
|
@ -27782,6 +27806,9 @@ msgstr ""
|
|||
msgid "Read more about related issues"
|
||||
msgstr ""
|
||||
|
||||
msgid "Read their documentation."
|
||||
msgstr ""
|
||||
|
||||
msgid "Ready to get started with GitLab? Follow these steps to set up your workspace, plan and commit changes, and deploy your project."
|
||||
msgstr ""
|
||||
|
||||
|
@ -31965,7 +31992,10 @@ msgstr ""
|
|||
msgid "SourcegraphPreferences|Uses a custom %{linkStart}Sourcegraph instance%{linkEnd}."
|
||||
msgstr ""
|
||||
|
||||
msgid "Spam Check API Key"
|
||||
msgid "Spam Check"
|
||||
msgstr ""
|
||||
|
||||
msgid "Spam Check API key"
|
||||
msgstr ""
|
||||
|
||||
msgid "Spam Logs"
|
||||
|
@ -33508,7 +33538,7 @@ msgid_plural "The %{type} contains the following errors:"
|
|||
msgstr[0] ""
|
||||
msgstr[1] ""
|
||||
|
||||
msgid "The API key used by GitLab for accessing the Spam Check service endpoint"
|
||||
msgid "The API key used by GitLab for accessing the Spam Check service endpoint."
|
||||
msgstr ""
|
||||
|
||||
msgid "The GitLab subscription service (customers.gitlab.com) is currently experiencing an outage. You can monitor the status and get updates at %{linkStart}status.gitlab.com%{linkEnd}."
|
||||
|
@ -40764,10 +40794,16 @@ msgstr ""
|
|||
msgid "quick actions"
|
||||
msgstr ""
|
||||
|
||||
msgid "reCAPTCHA Private Key"
|
||||
msgid "reCAPTCHA"
|
||||
msgstr ""
|
||||
|
||||
msgid "reCAPTCHA Site Key"
|
||||
msgid "reCAPTCHA helps prevent credential stuffing."
|
||||
msgstr ""
|
||||
|
||||
msgid "reCAPTCHA private key"
|
||||
msgstr ""
|
||||
|
||||
msgid "reCAPTCHA site key"
|
||||
msgstr ""
|
||||
|
||||
msgid "recent activity"
|
||||
|
|
|
@ -8,7 +8,7 @@ module QA
|
|||
attr_accessor :name
|
||||
|
||||
# The user for which the personal access token is to be created
|
||||
# This *could* be different than the api_client.user or the api_user provided by the QA::Resource::ApiFabricator module
|
||||
# This *could* be different than the api_client.user or the api_user provided by the QA::Resource::ApiFabricator
|
||||
attr_writer :user
|
||||
|
||||
attribute :token
|
||||
|
@ -17,7 +17,9 @@ module QA
|
|||
# If Runtime::Env.admin_personal_access_token is provided, fabricate via the API,
|
||||
# else, fabricate via the browser.
|
||||
def fabricate_via_api!
|
||||
@token = QA::Resource::PersonalAccessTokenCache.get_token_for_username(user.username)
|
||||
QA::Resource::PersonalAccessTokenCache.get_token_for_username(user.username).tap do |cached_token|
|
||||
@token = cached_token if cached_token
|
||||
end
|
||||
return if @token
|
||||
|
||||
resource = if Runtime::Env.admin_personal_access_token && !@user.nil?
|
||||
|
@ -28,7 +30,7 @@ module QA
|
|||
fabricate!
|
||||
end
|
||||
|
||||
QA::Resource::PersonalAccessTokenCache.set_token_for_username(user.username, self.token)
|
||||
QA::Resource::PersonalAccessTokenCache.set_token_for_username(user.username, token)
|
||||
resource
|
||||
end
|
||||
|
||||
|
|
|
@ -16,17 +16,21 @@ module QA
|
|||
enable_ip_limits if ip_limits
|
||||
end
|
||||
|
||||
# Personal access token
|
||||
#
|
||||
# It is possible to set the environment variable GITLAB_QA_ACCESS_TOKEN
|
||||
# to use a specific access token rather than create one from the UI
|
||||
# unless a specific user has been passed
|
||||
#
|
||||
# @return [String]
|
||||
def personal_access_token
|
||||
@personal_access_token ||= begin
|
||||
# you can set the environment variable GITLAB_QA_ACCESS_TOKEN
|
||||
# to use a specific access token rather than create one from the UI
|
||||
# unless a specific user has been passed
|
||||
@user.nil? ? Runtime::Env.personal_access_token ||= create_personal_access_token : create_personal_access_token
|
||||
end
|
||||
@personal_access_token ||= if user.nil?
|
||||
Runtime::Env.personal_access_token ||= create_personal_access_token
|
||||
else
|
||||
create_personal_access_token
|
||||
end
|
||||
|
||||
if @user&.admin?
|
||||
Runtime::Env.admin_personal_access_token = @personal_access_token
|
||||
end
|
||||
Runtime::Env.admin_personal_access_token = @personal_access_token if user&.admin? # rubocop:disable Cop/UserAdmin
|
||||
|
||||
@personal_access_token
|
||||
end
|
||||
|
@ -82,27 +86,38 @@ module QA
|
|||
Page::Main::Menu.perform(&:sign_out)
|
||||
end
|
||||
|
||||
# Create PAT
|
||||
#
|
||||
# Use api if admin personal access token is present and skip any UI actions otherwise perform creation via UI
|
||||
#
|
||||
# @return [String]
|
||||
def create_personal_access_token
|
||||
signed_in_initially = Page::Main::Menu.perform(&:signed_in?)
|
||||
if Runtime::Env.admin_personal_access_token
|
||||
Resource::PersonalAccessToken.fabricate_via_api! do |pat|
|
||||
pat.user = user
|
||||
end.token
|
||||
else
|
||||
signed_in_initially = Page::Main::Menu.perform(&:signed_in?)
|
||||
|
||||
Page::Main::Menu.perform(&:sign_out) if @is_new_session && signed_in_initially
|
||||
Page::Main::Menu.perform(&:sign_out) if @is_new_session && signed_in_initially
|
||||
|
||||
token = Resource::PersonalAccessToken.fabricate! do |pat|
|
||||
pat.user = user
|
||||
end.token
|
||||
token = Resource::PersonalAccessToken.fabricate! do |pat|
|
||||
pat.user = user
|
||||
end.token
|
||||
|
||||
# If this is a new session, that tests that follow could fail if they
|
||||
# try to sign in without starting a new session.
|
||||
# Also, if the browser wasn't already signed in, leaving it
|
||||
# signed in could cause tests to fail when they try to sign
|
||||
# in again. For example, that would happen if a test has a
|
||||
# before(:context) block that fabricates via the API, and
|
||||
# it's the first test to run so it creates an access token
|
||||
#
|
||||
# Sign out so the tests can successfully sign in
|
||||
Page::Main::Menu.perform(&:sign_out) if @is_new_session || !signed_in_initially
|
||||
# If this is a new session, that tests that follow could fail if they
|
||||
# try to sign in without starting a new session.
|
||||
# Also, if the browser wasn't already signed in, leaving it
|
||||
# signed in could cause tests to fail when they try to sign
|
||||
# in again. For example, that would happen if a test has a
|
||||
# before(:context) block that fabricates via the API, and
|
||||
# it's the first test to run so it creates an access token
|
||||
#
|
||||
# Sign out so the tests can successfully sign in
|
||||
Page::Main::Menu.perform(&:sign_out) if @is_new_session || !signed_in_initially
|
||||
|
||||
token
|
||||
token
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -21,82 +21,94 @@ RSpec.describe GroupTree do
|
|||
end
|
||||
|
||||
describe 'GET #index' do
|
||||
it 'filters groups' do
|
||||
other_group = create(:group, name: 'filter')
|
||||
other_group.add_owner(user)
|
||||
|
||||
get :index, params: { filter: 'filt' }, format: :json
|
||||
|
||||
expect(assigns(:groups)).to contain_exactly(other_group)
|
||||
end
|
||||
|
||||
context 'for subgroups' do
|
||||
it 'only renders root groups when no parent was given' do
|
||||
create(:group, :public, parent: group)
|
||||
|
||||
get :index, format: :json
|
||||
|
||||
expect(assigns(:groups)).to contain_exactly(group)
|
||||
end
|
||||
|
||||
it 'contains only the subgroup when a parent was given' do
|
||||
subgroup = create(:group, :public, parent: group)
|
||||
|
||||
get :index, params: { parent_id: group.id }, format: :json
|
||||
|
||||
expect(assigns(:groups)).to contain_exactly(subgroup)
|
||||
end
|
||||
|
||||
it 'allows filtering for subgroups and includes the parents for rendering' do
|
||||
subgroup = create(:group, :public, parent: group, name: 'filter')
|
||||
shared_examples 'returns filtered groups' do
|
||||
it 'filters groups' do
|
||||
other_group = create(:group, name: 'filter')
|
||||
other_group.add_owner(user)
|
||||
|
||||
get :index, params: { filter: 'filt' }, format: :json
|
||||
|
||||
expect(assigns(:groups)).to contain_exactly(group, subgroup)
|
||||
expect(assigns(:groups)).to contain_exactly(other_group)
|
||||
end
|
||||
|
||||
it 'does not include groups the user does not have access to' do
|
||||
parent = create(:group, :private)
|
||||
subgroup = create(:group, :private, parent: parent, name: 'filter')
|
||||
subgroup.add_developer(user)
|
||||
_other_subgroup = create(:group, :private, parent: parent, name: 'filte')
|
||||
context 'for subgroups' do
|
||||
it 'only renders root groups when no parent was given' do
|
||||
create(:group, :public, parent: group)
|
||||
|
||||
get :index, params: { filter: 'filt' }, format: :json
|
||||
get :index, format: :json
|
||||
|
||||
expect(assigns(:groups)).to contain_exactly(parent, subgroup)
|
||||
end
|
||||
expect(assigns(:groups)).to contain_exactly(group)
|
||||
end
|
||||
|
||||
it 'preloads parents regardless of pagination' do
|
||||
allow(Kaminari.config).to receive(:default_per_page).and_return(1)
|
||||
group = create(:group, :public)
|
||||
subgroup = create(:group, :public, parent: group)
|
||||
search_result = create(:group, :public, name: 'result', parent: subgroup)
|
||||
it 'contains only the subgroup when a parent was given' do
|
||||
subgroup = create(:group, :public, parent: group)
|
||||
|
||||
get :index, params: { filter: 'resu' }, format: :json
|
||||
get :index, params: { parent_id: group.id }, format: :json
|
||||
|
||||
expect(assigns(:groups)).to contain_exactly(group, subgroup, search_result)
|
||||
end
|
||||
end
|
||||
expect(assigns(:groups)).to contain_exactly(subgroup)
|
||||
end
|
||||
|
||||
context 'json content' do
|
||||
it 'shows groups as json' do
|
||||
get :index, format: :json
|
||||
|
||||
expect(json_response.first['id']).to eq(group.id)
|
||||
end
|
||||
|
||||
context 'nested groups' do
|
||||
it 'expands the tree when filtering' do
|
||||
it 'allows filtering for subgroups and includes the parents for rendering' do
|
||||
subgroup = create(:group, :public, parent: group, name: 'filter')
|
||||
|
||||
get :index, params: { filter: 'filt' }, format: :json
|
||||
|
||||
children_response = json_response.first['children']
|
||||
expect(assigns(:groups)).to contain_exactly(group, subgroup)
|
||||
end
|
||||
|
||||
it 'does not include groups the user does not have access to' do
|
||||
parent = create(:group, :private)
|
||||
subgroup = create(:group, :private, parent: parent, name: 'filter')
|
||||
subgroup.add_developer(user)
|
||||
_other_subgroup = create(:group, :private, parent: parent, name: 'filte')
|
||||
|
||||
get :index, params: { filter: 'filt' }, format: :json
|
||||
|
||||
expect(assigns(:groups)).to contain_exactly(parent, subgroup)
|
||||
end
|
||||
|
||||
it 'preloads parents regardless of pagination' do
|
||||
allow(Kaminari.config).to receive(:default_per_page).and_return(1)
|
||||
group = create(:group, :public)
|
||||
subgroup = create(:group, :public, parent: group)
|
||||
search_result = create(:group, :public, name: 'result', parent: subgroup)
|
||||
|
||||
get :index, params: { filter: 'resu' }, format: :json
|
||||
|
||||
expect(assigns(:groups)).to contain_exactly(group, subgroup, search_result)
|
||||
end
|
||||
end
|
||||
|
||||
context 'json content' do
|
||||
it 'shows groups as json' do
|
||||
get :index, format: :json
|
||||
|
||||
expect(json_response.first['id']).to eq(group.id)
|
||||
expect(children_response.first['id']).to eq(subgroup.id)
|
||||
end
|
||||
|
||||
context 'nested groups' do
|
||||
it 'expands the tree when filtering' do
|
||||
subgroup = create(:group, :public, parent: group, name: 'filter')
|
||||
|
||||
get :index, params: { filter: 'filt' }, format: :json
|
||||
|
||||
children_response = json_response.first['children']
|
||||
|
||||
expect(json_response.first['id']).to eq(group.id)
|
||||
expect(children_response.first['id']).to eq(subgroup.id)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'returns filtered groups'
|
||||
|
||||
context 'when feature flag :linear_group_tree_ancestor_scopes is disabled' do
|
||||
before do
|
||||
stub_feature_flags(linear_group_tree_ancestor_scopes: false)
|
||||
end
|
||||
|
||||
it_behaves_like 'returns filtered groups'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -450,14 +450,14 @@ RSpec.describe 'Admin updates settings' do
|
|||
visit reporting_admin_application_settings_path
|
||||
|
||||
page.within('.as-spam') do
|
||||
fill_in 'reCAPTCHA Site Key', with: 'key'
|
||||
fill_in 'reCAPTCHA Private Key', with: 'key'
|
||||
fill_in 'reCAPTCHA site key', with: 'key'
|
||||
fill_in 'reCAPTCHA private key', with: 'key'
|
||||
check 'Enable reCAPTCHA'
|
||||
check 'Enable reCAPTCHA for login'
|
||||
fill_in 'IPs per user', with: 15
|
||||
fill_in 'IP addresses per user', with: 15
|
||||
check 'Enable Spam Check via external API endpoint'
|
||||
fill_in 'URL of the external Spam Check endpoint', with: 'grpc://www.example.com/spamcheck'
|
||||
fill_in 'Spam Check API Key', with: 'SPAM_CHECK_API_KEY'
|
||||
fill_in 'Spam Check API key', with: 'SPAM_CHECK_API_KEY'
|
||||
click_button 'Save changes'
|
||||
end
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@ RSpec.describe 'Merge requests > User merges immediately', :js do
|
|||
Sidekiq::Testing.fake! do
|
||||
click_button 'Merge immediately'
|
||||
|
||||
expect(find('.accept-merge-request.btn-confirm')).to have_content('Merge in progress')
|
||||
expect(find('.media-body h4')).to have_content('Merging!')
|
||||
|
||||
wait_for_requests
|
||||
end
|
||||
|
|
|
@ -45,6 +45,8 @@ const createTestMr = (customConfig) => {
|
|||
preferredAutoMergeStrategy: MWPS_MERGE_STRATEGY,
|
||||
availableAutoMergeStrategies: [MWPS_MERGE_STRATEGY],
|
||||
mergeImmediatelyDocsPath: 'path/to/merge/immediately/docs',
|
||||
transitionStateMachine: () => eventHub.$emit('StateMachineValueChanged', { value: 'value' }),
|
||||
translateStateToMachine: () => this.transitionStateMachine(),
|
||||
};
|
||||
|
||||
Object.assign(mr, customConfig.mr);
|
||||
|
|
|
@ -13,11 +13,19 @@ RSpec.describe Checksummable do
|
|||
end
|
||||
end
|
||||
|
||||
describe ".hexdigest" do
|
||||
describe ".sha256_hexdigest" do
|
||||
it 'returns the SHA256 sum of the file' do
|
||||
expected = Digest::SHA256.file(__FILE__).hexdigest
|
||||
|
||||
expect(subject.hexdigest(__FILE__)).to eq(expected)
|
||||
expect(subject.sha256_hexdigest(__FILE__)).to eq(expected)
|
||||
end
|
||||
end
|
||||
|
||||
describe ".md5_hexdigest" do
|
||||
it 'returns the MD5 sum of the file' do
|
||||
expected = Digest::MD5.file(__FILE__).hexdigest
|
||||
|
||||
expect(subject.md5_hexdigest(__FILE__)).to eq(expected)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -2608,17 +2608,29 @@ RSpec.describe Group do
|
|||
end
|
||||
|
||||
describe '.ids_with_disabled_email' do
|
||||
let!(:parent_1) { create(:group, emails_disabled: true) }
|
||||
let!(:child_1) { create(:group, parent: parent_1) }
|
||||
let_it_be(:parent_1) { create(:group, emails_disabled: true) }
|
||||
let_it_be(:child_1) { create(:group, parent: parent_1) }
|
||||
|
||||
let!(:parent_2) { create(:group, emails_disabled: false) }
|
||||
let!(:child_2) { create(:group, parent: parent_2) }
|
||||
let_it_be(:parent_2) { create(:group, emails_disabled: false) }
|
||||
let_it_be(:child_2) { create(:group, parent: parent_2) }
|
||||
|
||||
let!(:other_group) { create(:group, emails_disabled: false) }
|
||||
let_it_be(:other_group) { create(:group, emails_disabled: false) }
|
||||
|
||||
subject(:group_ids_where_email_is_disabled) { described_class.ids_with_disabled_email([child_1, child_2, other_group]) }
|
||||
shared_examples 'returns namespaces with disabled email' do
|
||||
subject(:group_ids_where_email_is_disabled) { described_class.ids_with_disabled_email([child_1, child_2, other_group]) }
|
||||
|
||||
it { is_expected.to eq(Set.new([child_1.id])) }
|
||||
it { is_expected.to eq(Set.new([child_1.id])) }
|
||||
end
|
||||
|
||||
it_behaves_like 'returns namespaces with disabled email'
|
||||
|
||||
context 'when feature flag :linear_group_ancestor_scopes is disabled' do
|
||||
before do
|
||||
stub_feature_flags(linear_group_ancestor_scopes: false)
|
||||
end
|
||||
|
||||
it_behaves_like 'returns namespaces with disabled email'
|
||||
end
|
||||
end
|
||||
|
||||
describe '.timelogs' do
|
||||
|
|
|
@ -30,129 +30,6 @@ RSpec.describe Clusters::ClusterPresenter do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#item_link' do
|
||||
let(:clusterable_presenter) { double('ClusterablePresenter', subject: clusterable) }
|
||||
|
||||
subject { presenter.item_link(clusterable_presenter) }
|
||||
|
||||
context 'for a group cluster' do
|
||||
let(:cluster) { create(:cluster, cluster_type: :group_type, groups: [group]) }
|
||||
let(:group) { create(:group, name: 'Foo') }
|
||||
let(:cluster_link) { "<a href=\"#{group_cluster_path(cluster.group, cluster)}\">#{cluster.name}</a>" }
|
||||
|
||||
before do
|
||||
group.add_maintainer(user)
|
||||
end
|
||||
|
||||
shared_examples 'ancestor clusters' do
|
||||
context 'ancestor clusters' do
|
||||
let(:root_group) { create(:group, name: 'Root Group') }
|
||||
let(:parent) { create(:group, name: 'parent', parent: root_group) }
|
||||
let(:child) { create(:group, name: 'child', parent: parent) }
|
||||
let(:group) { create(:group, name: 'group', parent: child) }
|
||||
|
||||
before do
|
||||
root_group.add_maintainer(user)
|
||||
end
|
||||
|
||||
context 'top level group cluster' do
|
||||
let(:cluster) { create(:cluster, cluster_type: :group_type, groups: [root_group]) }
|
||||
|
||||
it 'returns full group names and link for cluster' do
|
||||
expect(subject).to eq("Root Group / #{cluster_link}")
|
||||
end
|
||||
|
||||
it 'is html safe' do
|
||||
expect(presenter).to receive(:sanitize).with('Root Group').and_call_original
|
||||
|
||||
expect(subject).to be_html_safe
|
||||
end
|
||||
end
|
||||
|
||||
context 'first level group cluster' do
|
||||
let(:cluster) { create(:cluster, cluster_type: :group_type, groups: [parent]) }
|
||||
|
||||
it 'returns full group names and link for cluster' do
|
||||
expect(subject).to eq("Root Group / parent / #{cluster_link}")
|
||||
end
|
||||
|
||||
it 'is html safe' do
|
||||
expect(presenter).to receive(:sanitize).with('Root Group / parent').and_call_original
|
||||
|
||||
expect(subject).to be_html_safe
|
||||
end
|
||||
end
|
||||
|
||||
context 'second level group cluster' do
|
||||
let(:cluster) { create(:cluster, cluster_type: :group_type, groups: [child]) }
|
||||
|
||||
let(:ellipsis_h) do
|
||||
/.*ellipsis_h.*/
|
||||
end
|
||||
|
||||
it 'returns clipped group names and link for cluster' do
|
||||
expect(subject).to match("Root Group / #{ellipsis_h} / child / #{cluster_link}")
|
||||
end
|
||||
|
||||
it 'is html safe' do
|
||||
expect(presenter).to receive(:sanitize).with('Root Group / parent / child').and_call_original
|
||||
|
||||
expect(subject).to be_html_safe
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'for a project clusterable' do
|
||||
let(:clusterable) { project }
|
||||
let(:project) { create(:project, group: group) }
|
||||
|
||||
it 'returns the group name and the link for cluster' do
|
||||
expect(subject).to eq("Foo / #{cluster_link}")
|
||||
end
|
||||
|
||||
it 'is html safe' do
|
||||
expect(presenter).to receive(:sanitize).with('Foo').and_call_original
|
||||
|
||||
expect(subject).to be_html_safe
|
||||
end
|
||||
|
||||
include_examples 'ancestor clusters'
|
||||
end
|
||||
|
||||
context 'for the group clusterable for the cluster' do
|
||||
let(:clusterable) { group }
|
||||
|
||||
it 'returns link for cluster' do
|
||||
expect(subject).to eq(cluster_link)
|
||||
end
|
||||
|
||||
include_examples 'ancestor clusters'
|
||||
|
||||
it 'is html safe' do
|
||||
expect(subject).to be_html_safe
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'for a project cluster' do
|
||||
let(:cluster) { create(:cluster, :project) }
|
||||
let(:cluster_link) { "<a href=\"#{project_cluster_path(cluster.project, cluster)}\">#{cluster.name}</a>" }
|
||||
|
||||
before do
|
||||
cluster.project.add_maintainer(user)
|
||||
end
|
||||
|
||||
context 'for the project clusterable' do
|
||||
let(:clusterable) { cluster.project }
|
||||
|
||||
it 'returns link for cluster' do
|
||||
expect(subject).to eq(cluster_link)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#provider_label' do
|
||||
let(:cluster) { create(:cluster, provider_type: provider_type) }
|
||||
|
||||
|
@ -191,26 +68,6 @@ RSpec.describe Clusters::ClusterPresenter do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#cluster_type_description' do
|
||||
subject { described_class.new(cluster).cluster_type_description }
|
||||
|
||||
context 'project_type cluster' do
|
||||
it { is_expected.to eq('Project cluster') }
|
||||
end
|
||||
|
||||
context 'group_type cluster' do
|
||||
let(:cluster) { create(:cluster, :provided_by_gcp, :group) }
|
||||
|
||||
it { is_expected.to eq('Group cluster') }
|
||||
end
|
||||
|
||||
context 'instance_type cluster' do
|
||||
let(:cluster) { create(:cluster, :provided_by_gcp, :instance) }
|
||||
|
||||
it { is_expected.to eq('Instance cluster') }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#show_path' do
|
||||
subject { described_class.new(cluster).show_path }
|
||||
|
||||
|
|
|
@ -497,7 +497,7 @@ RSpec.shared_examples 'trace with disabled live trace feature' do
|
|||
expect(build.job_artifacts_trace.file.filename).to eq('job.log')
|
||||
expect(File.exist?(src_path)).to be_falsy
|
||||
expect(src_checksum)
|
||||
.to eq(described_class.hexdigest(build.job_artifacts_trace.file.path))
|
||||
.to eq(described_class.sha256_hexdigest(build.job_artifacts_trace.file.path))
|
||||
expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum)
|
||||
end
|
||||
end
|
||||
|
@ -523,7 +523,7 @@ RSpec.shared_examples 'trace with disabled live trace feature' do
|
|||
expect(build.job_artifacts_trace.file.filename).to eq('job.log')
|
||||
expect(build.old_trace).to be_nil
|
||||
expect(src_checksum)
|
||||
.to eq(described_class.hexdigest(build.job_artifacts_trace.file.path))
|
||||
.to eq(described_class.sha256_hexdigest(build.job_artifacts_trace.file.path))
|
||||
expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum)
|
||||
end
|
||||
end
|
||||
|
@ -861,7 +861,7 @@ RSpec.shared_examples 'trace with enabled live trace feature' do
|
|||
expect(build.job_artifacts_trace.file.filename).to eq('job.log')
|
||||
expect(Ci::BuildTraceChunk.where(build: build)).not_to be_exist
|
||||
expect(src_checksum)
|
||||
.to eq(described_class.hexdigest(build.job_artifacts_trace.file.path))
|
||||
.to eq(described_class.sha256_hexdigest(build.job_artifacts_trace.file.path))
|
||||
expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum)
|
||||
end
|
||||
end
|
||||
|
|
Loading…
Reference in a new issue