Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
d5d47b45dd
commit
770adf9251
161 changed files with 1059 additions and 696 deletions
|
@ -382,7 +382,6 @@ Performance/DeleteSuffix:
|
|||
- 'app/workers/concerns/application_worker.rb'
|
||||
- 'ee/app/models/geo/upload_registry.rb'
|
||||
- 'ee/app/workers/geo/file_download_dispatch_worker/attachment_job_finder.rb'
|
||||
- 'lib/sentry/client/issue.rb'
|
||||
|
||||
# Offense count: 13
|
||||
# Cop supports --auto-correct.
|
||||
|
@ -1042,4 +1041,3 @@ Style/StringLiteralsInInterpolation:
|
|||
# IgnoredMethods: respond_to, define_method
|
||||
Style/SymbolProc:
|
||||
Enabled: false
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
55bed7acf3bb27ab627272d903d99573c5f009e7
|
||||
cdb02af5b322de1f4091b39c349579b2e335b914
|
||||
|
|
|
@ -42,6 +42,7 @@ export default {
|
|||
"AlertManagement|There was an error displaying the alerts. Confirm your endpoint's configuration details to ensure alerts appear.",
|
||||
),
|
||||
unassigned: __('Unassigned'),
|
||||
closed: __('closed'),
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
|
@ -75,7 +76,7 @@ export default {
|
|||
{
|
||||
key: 'issue',
|
||||
label: s__('AlertManagement|Incident'),
|
||||
thClass: 'gl-w-12 gl-pointer-events-none',
|
||||
thClass: 'gl-w-15p gl-pointer-events-none',
|
||||
tdClass,
|
||||
},
|
||||
{
|
||||
|
@ -221,8 +222,11 @@ export default {
|
|||
hasAssignees(assignees) {
|
||||
return Boolean(assignees.nodes?.length);
|
||||
},
|
||||
getIssueLink(item) {
|
||||
return joinPaths('/', this.projectPath, '-', 'issues', item.issueIid);
|
||||
getIssueMeta({ issue: { iid, state } }) {
|
||||
return {
|
||||
state: state === 'closed' ? `(${this.$options.i18n.closed})` : '',
|
||||
link: joinPaths('/', this.projectPath, '-', 'issues', iid),
|
||||
};
|
||||
},
|
||||
tbodyTrClass(item) {
|
||||
return {
|
||||
|
@ -343,8 +347,14 @@ export default {
|
|||
</template>
|
||||
|
||||
<template #cell(issue)="{ item }">
|
||||
<gl-link v-if="item.issueIid" data-testid="issueField" :href="getIssueLink(item)">
|
||||
#{{ item.issueIid }}
|
||||
<gl-link
|
||||
v-if="item.issue"
|
||||
v-gl-tooltip
|
||||
:title="item.issue.title"
|
||||
data-testid="issueField"
|
||||
:href="getIssueMeta(item).link"
|
||||
>
|
||||
#{{ item.issue.iid }} {{ getIssueMeta(item).state }}
|
||||
</gl-link>
|
||||
<div v-else data-testid="issueField">{{ s__('AlertManagement|None') }}</div>
|
||||
</template>
|
||||
|
|
|
@ -5,7 +5,11 @@ fragment AlertListItem on AlertManagementAlert {
|
|||
status
|
||||
startedAt
|
||||
eventCount
|
||||
issueIid
|
||||
issue {
|
||||
iid
|
||||
state
|
||||
title
|
||||
}
|
||||
assignees {
|
||||
nodes {
|
||||
name
|
||||
|
|
|
@ -13,7 +13,7 @@ export default {
|
|||
return {
|
||||
text: __('Save'),
|
||||
attributes: [
|
||||
{ variant: 'success' },
|
||||
{ variant: 'confirm' },
|
||||
{ category: 'primary' },
|
||||
{ disabled: this.isDisabled },
|
||||
],
|
||||
|
|
|
@ -138,7 +138,7 @@ export default {
|
|||
<gl-button
|
||||
v-gl-modal.confirmSaveIntegration
|
||||
category="primary"
|
||||
variant="success"
|
||||
variant="confirm"
|
||||
:loading="isSaving"
|
||||
:disabled="isDisabled"
|
||||
data-qa-selector="save_changes_button"
|
||||
|
@ -162,6 +162,8 @@ export default {
|
|||
|
||||
<gl-button
|
||||
v-if="propsSource.canTest"
|
||||
category="secondary"
|
||||
variant="confirm"
|
||||
:loading="isTesting"
|
||||
:disabled="isDisabled"
|
||||
:href="propsSource.testPath"
|
||||
|
@ -174,7 +176,7 @@ export default {
|
|||
<gl-button
|
||||
v-gl-modal.confirmResetIntegration
|
||||
category="secondary"
|
||||
variant="default"
|
||||
variant="confirm"
|
||||
:loading="isResetting"
|
||||
:disabled="isDisabled"
|
||||
data-testid="reset-button"
|
||||
|
@ -184,9 +186,7 @@ export default {
|
|||
<reset-confirmation-modal @reset="onResetClick" />
|
||||
</template>
|
||||
|
||||
<gl-button class="btn-cancel" :href="propsSource.cancelPath">{{
|
||||
__('Cancel')
|
||||
}}</gl-button>
|
||||
<gl-button :href="propsSource.cancelPath">{{ __('Cancel') }}</gl-button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
import { initSearchApp } from '~/search';
|
||||
|
||||
document.addEventListener('DOMContentLoaded', () => {
|
||||
initSearchApp();
|
||||
});
|
||||
initSearchApp();
|
||||
|
|
|
@ -31,6 +31,10 @@ export default {
|
|||
},
|
||||
mixins: [glFeatureFlagsMixin()],
|
||||
props: {
|
||||
ciFileContent: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
ciConfigData: {
|
||||
type: Object,
|
||||
required: true,
|
||||
|
@ -60,6 +64,7 @@ export default {
|
|||
<validation-segment
|
||||
:class="validationStyling"
|
||||
:loading="isCiConfigDataLoading"
|
||||
:ci-file-content="ciFileContent"
|
||||
:ci-config="ciConfigData"
|
||||
/>
|
||||
</div>
|
||||
|
|
|
@ -5,6 +5,9 @@ import TooltipOnTruncate from '~/vue_shared/components/tooltip_on_truncate.vue';
|
|||
import { CI_CONFIG_STATUS_VALID } from '../../constants';
|
||||
|
||||
export const i18n = {
|
||||
empty: __(
|
||||
"We'll continuously validate your pipeline configuration. The validation results will appear here.",
|
||||
),
|
||||
learnMore: __('Learn more'),
|
||||
loading: s__('Pipelines|Validating GitLab CI configuration…'),
|
||||
invalid: s__('Pipelines|This GitLab CI configuration is invalid.'),
|
||||
|
@ -26,6 +29,10 @@ export default {
|
|||
},
|
||||
},
|
||||
props: {
|
||||
ciFileContent: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
ciConfig: {
|
||||
type: Object,
|
||||
required: false,
|
||||
|
@ -38,17 +45,22 @@ export default {
|
|||
},
|
||||
},
|
||||
computed: {
|
||||
isEmpty() {
|
||||
return !this.ciFileContent;
|
||||
},
|
||||
isValid() {
|
||||
return this.ciConfig?.status === CI_CONFIG_STATUS_VALID;
|
||||
},
|
||||
icon() {
|
||||
if (this.isValid) {
|
||||
if (this.isValid || this.isEmpty) {
|
||||
return 'check';
|
||||
}
|
||||
return 'warning-solid';
|
||||
},
|
||||
message() {
|
||||
if (this.isValid) {
|
||||
if (this.isEmpty) {
|
||||
return this.$options.i18n.empty;
|
||||
} else if (this.isValid) {
|
||||
return this.$options.i18n.valid;
|
||||
}
|
||||
|
||||
|
@ -74,7 +86,7 @@ export default {
|
|||
<tooltip-on-truncate :title="message" class="gl-text-truncate">
|
||||
<gl-icon :name="icon" /> <span data-testid="validationMsg">{{ message }}</span>
|
||||
</tooltip-on-truncate>
|
||||
<span class="gl-flex-shrink-0 gl-pl-2">
|
||||
<span v-if="!isEmpty" class="gl-flex-shrink-0 gl-pl-2">
|
||||
<gl-link data-testid="learnMoreLink" :href="ymlHelpPagePath">
|
||||
{{ $options.i18n.learnMore }}
|
||||
</gl-link>
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
<script>
|
||||
import { GlSprintf } from '@gitlab/ui';
|
||||
import { GlButton, GlSprintf } from '@gitlab/ui';
|
||||
import { __ } from '~/locale';
|
||||
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
GlButton,
|
||||
GlSprintf,
|
||||
},
|
||||
i18n: {
|
||||
|
@ -11,24 +13,44 @@ export default {
|
|||
body: __(
|
||||
'Create a new %{codeStart}.gitlab-ci.yml%{codeEnd} file at the root of the repository to get started.',
|
||||
),
|
||||
btnText: __('Create new CI/CD pipeline'),
|
||||
},
|
||||
mixins: [glFeatureFlagsMixin()],
|
||||
inject: {
|
||||
emptyStateIllustrationPath: {
|
||||
default: '',
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
showCTAButton() {
|
||||
return this.glFeatures.pipelineEditorEmptyStateAction;
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
createEmptyConfigFile() {
|
||||
this.$emit('createEmptyConfigFile');
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
<template>
|
||||
<div class="gl-display-flex gl-flex-direction-column gl-align-items-center gl-mt-11">
|
||||
<img :src="emptyStateIllustrationPath" />
|
||||
<h1 class="gl-font-size-h1">{{ $options.i18n.title }}</h1>
|
||||
<p>
|
||||
<p class="gl-mt-3">
|
||||
<gl-sprintf :message="$options.i18n.body">
|
||||
<template #code="{ content }">
|
||||
<code>{{ content }}</code>
|
||||
</template>
|
||||
</gl-sprintf>
|
||||
</p>
|
||||
<gl-button
|
||||
v-if="showCTAButton"
|
||||
variant="confirm"
|
||||
class="gl-mt-3"
|
||||
@click="createEmptyConfigFile"
|
||||
>
|
||||
{{ $options.i18n.btnText }}
|
||||
</gl-button>
|
||||
</div>
|
||||
</template>
|
||||
|
|
|
@ -36,7 +36,8 @@ export default {
|
|||
// Success and failure state
|
||||
failureType: null,
|
||||
failureReasons: [],
|
||||
hasNoCiConfigFile: false,
|
||||
showStartScreen: false,
|
||||
isNewConfigFile: false,
|
||||
initialCiFileContent: '',
|
||||
lastCommittedContent: '',
|
||||
currentCiFileContent: '',
|
||||
|
@ -48,6 +49,11 @@ export default {
|
|||
apollo: {
|
||||
initialCiFileContent: {
|
||||
query: getBlobContent,
|
||||
// If we are working off a new file, we don't want to fetch
|
||||
// the base data as there is nothing to fetch.
|
||||
skip({ isNewConfigFile }) {
|
||||
return isNewConfigFile;
|
||||
},
|
||||
variables() {
|
||||
return {
|
||||
projectPath: this.projectFullPath,
|
||||
|
@ -157,7 +163,7 @@ export default {
|
|||
response?.status === httpStatusCodes.NOT_FOUND ||
|
||||
response?.status === httpStatusCodes.BAD_REQUEST
|
||||
) {
|
||||
this.hasNoCiConfigFile = true;
|
||||
this.showStartScreen = true;
|
||||
} else {
|
||||
this.reportFailure(LOAD_FAILURE_UNKNOWN);
|
||||
}
|
||||
|
@ -183,6 +189,10 @@ export default {
|
|||
resetContent() {
|
||||
this.currentCiFileContent = this.lastCommittedContent;
|
||||
},
|
||||
setNewEmptyCiConfigFile() {
|
||||
this.showStartScreen = false;
|
||||
this.isNewConfigFile = true;
|
||||
},
|
||||
showErrorAlert({ type, reasons = [] }) {
|
||||
this.reportFailure(type, reasons);
|
||||
},
|
||||
|
@ -202,7 +212,10 @@ export default {
|
|||
<template>
|
||||
<div class="gl-mt-4 gl-relative">
|
||||
<gl-loading-icon v-if="isBlobContentLoading" size="lg" class="gl-m-3" />
|
||||
<pipeline-editor-empty-state v-else-if="hasNoCiConfigFile" />
|
||||
<pipeline-editor-empty-state
|
||||
v-else-if="showStartScreen"
|
||||
@createEmptyConfigFile="setNewEmptyCiConfigFile"
|
||||
/>
|
||||
<div v-else>
|
||||
<gl-alert v-if="showSuccessAlert" :variant="success.variant" @dismiss="dismissSuccess">
|
||||
{{ success.text }}
|
||||
|
|
|
@ -45,6 +45,7 @@ export default {
|
|||
<template>
|
||||
<div>
|
||||
<pipeline-editor-header
|
||||
:ci-file-content="ciFileContent"
|
||||
:ci-config-data="ciConfigData"
|
||||
:is-ci-config-data-loading="isCiConfigDataLoading"
|
||||
/>
|
||||
|
|
|
@ -268,10 +268,10 @@ export default {
|
|||
</span>
|
||||
</div>
|
||||
<gl-button
|
||||
v-if="alert.issueIid"
|
||||
v-if="alert.issue"
|
||||
class="gl-mt-3 mt-sm-0 align-self-center align-self-sm-baseline alert-details-incident-button"
|
||||
data-testid="viewIncidentBtn"
|
||||
:href="incidentPath(alert.issueIid)"
|
||||
:href="incidentPath(alert.issue.iid)"
|
||||
category="primary"
|
||||
variant="success"
|
||||
>
|
||||
|
|
|
@ -6,6 +6,7 @@ class Projects::Ci::PipelineEditorController < Projects::ApplicationController
|
|||
push_frontend_feature_flag(:ci_config_visualization_tab, @project, default_enabled: :yaml)
|
||||
push_frontend_feature_flag(:ci_config_merged_tab, @project, default_enabled: :yaml)
|
||||
push_frontend_feature_flag(:pipeline_status_for_pipeline_editor, @project, default_enabled: :yaml)
|
||||
push_frontend_feature_flag(:pipeline_editor_empty_state_action, @project, default_enabled: :yaml)
|
||||
end
|
||||
|
||||
feature_category :pipeline_authoring
|
||||
|
|
|
@ -15,19 +15,45 @@ module MergeRequests
|
|||
# Returns a Hash that maps a commit ID to the oldest merge request that
|
||||
# introduced that commit.
|
||||
def execute(commits)
|
||||
mapping = {}
|
||||
shas = commits.map(&:id)
|
||||
|
||||
# To include merge requests by the commit SHA, we don't need to go through
|
||||
# any diff rows.
|
||||
#
|
||||
# We can't squeeze all this into a single query, as the diff based data
|
||||
# relies on a GROUP BY. On the other hand, retrieving MRs by their merge
|
||||
# SHAs separately is much easier, and plenty fast.
|
||||
@project
|
||||
.merge_requests
|
||||
.preload_target_project
|
||||
.by_merge_commit_sha(shas)
|
||||
.each do |mr|
|
||||
# Merge SHAs can't be in the merge request itself. It _is_ possible a
|
||||
# newer merge request includes the merge commit, but in that case we
|
||||
# still want the oldest merge request.
|
||||
mapping[mr.merge_commit_sha] = mr
|
||||
end
|
||||
|
||||
remaining = shas - mapping.keys
|
||||
|
||||
return mapping if remaining.empty?
|
||||
|
||||
id_rows = MergeRequestDiffCommit
|
||||
.oldest_merge_request_id_per_commit(@project.id, commits.map(&:id))
|
||||
.oldest_merge_request_id_per_commit(@project.id, remaining)
|
||||
|
||||
mrs = MergeRequest
|
||||
.preload_target_project
|
||||
.id_in(id_rows.map { |r| r[:merge_request_id] })
|
||||
.index_by(&:id)
|
||||
|
||||
id_rows.each_with_object({}) do |row, hash|
|
||||
id_rows.each do |row|
|
||||
if (mr = mrs[row[:merge_request_id]])
|
||||
hash[row[:sha]] = mr
|
||||
mapping[row[:sha]] = mr
|
||||
end
|
||||
end
|
||||
|
||||
mapping
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -43,7 +43,8 @@ module Resolvers
|
|||
def preloads
|
||||
{
|
||||
assignees: [:assignees],
|
||||
notes: [:ordered_notes, { ordered_notes: [:system_note_metadata, :project, :noteable] }]
|
||||
notes: [:ordered_notes, { ordered_notes: [:system_note_metadata, :project, :noteable] }],
|
||||
issue: [:issue]
|
||||
}
|
||||
end
|
||||
end
|
||||
|
|
|
@ -20,8 +20,14 @@ module Types
|
|||
field :issue_iid,
|
||||
GraphQL::ID_TYPE,
|
||||
null: true,
|
||||
deprecated: { reason: 'Use issue field', milestone: '13.10' },
|
||||
description: 'Internal ID of the GitLab issue attached to the alert.'
|
||||
|
||||
field :issue,
|
||||
Types::IssueType,
|
||||
null: true,
|
||||
description: 'Issue attached to the alert.'
|
||||
|
||||
field :title,
|
||||
GraphQL::STRING_TYPE,
|
||||
null: true,
|
||||
|
|
|
@ -66,16 +66,6 @@ class BulkImports::Entity < ApplicationRecord
|
|||
event :fail_op do
|
||||
transition any => :failed
|
||||
end
|
||||
|
||||
after_transition any => [:finished, :failed] do |entity|
|
||||
Gitlab::Redis::Cache.with do |redis|
|
||||
pattern = "bulk_import:#{entity.bulk_import.id}:entity:#{entity.id}:*"
|
||||
|
||||
redis.scan_each(match: pattern).each do |key|
|
||||
redis.del(key)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def update_tracker_for(relation:, has_next_page:, next_page: nil)
|
||||
|
|
|
@ -77,7 +77,7 @@ module ErrorTracking
|
|||
|
||||
def sentry_client
|
||||
strong_memoize(:sentry_client) do
|
||||
Sentry::Client.new(api_url, token)
|
||||
ErrorTracking::SentryClient.new(api_url, token)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -168,13 +168,13 @@ module ErrorTracking
|
|||
|
||||
def handle_exceptions
|
||||
yield
|
||||
rescue Sentry::Client::Error => e
|
||||
rescue ErrorTracking::SentryClient::Error => e
|
||||
{ error: e.message, error_type: SENTRY_API_ERROR_TYPE_NON_20X_RESPONSE }
|
||||
rescue Sentry::Client::MissingKeysError => e
|
||||
rescue ErrorTracking::SentryClient::MissingKeysError => e
|
||||
{ error: e.message, error_type: SENTRY_API_ERROR_TYPE_MISSING_KEYS }
|
||||
rescue Sentry::Client::ResponseInvalidSizeError => e
|
||||
rescue ErrorTracking::SentryClient::ResponseInvalidSizeError => e
|
||||
{ error: e.message, error_type: SENTRY_API_ERROR_INVALID_SIZE }
|
||||
rescue Sentry::Client::BadRequestError => e
|
||||
rescue ErrorTracking::SentryClient::BadRequestError => e
|
||||
{ error: e.message, error_type: SENTRY_API_ERROR_TYPE_BAD_REQUEST }
|
||||
rescue StandardError => e
|
||||
Gitlab::ErrorTracking.track_exception(e)
|
||||
|
|
|
@ -191,12 +191,8 @@ class MergeRequest < ApplicationRecord
|
|||
end
|
||||
|
||||
state_machine :merge_status, initial: :unchecked do
|
||||
event :mark_as_preparing do
|
||||
transition unchecked: :preparing
|
||||
end
|
||||
|
||||
event :mark_as_unchecked do
|
||||
transition [:preparing, :can_be_merged, :checking] => :unchecked
|
||||
transition [:can_be_merged, :checking] => :unchecked
|
||||
transition [:cannot_be_merged, :cannot_be_merged_rechecking] => :cannot_be_merged_recheck
|
||||
end
|
||||
|
||||
|
@ -241,7 +237,7 @@ class MergeRequest < ApplicationRecord
|
|||
# Returns current merge_status except it returns `cannot_be_merged_rechecking` as `checking`
|
||||
# to avoid exposing unnecessary internal state
|
||||
def public_merge_status
|
||||
cannot_be_merged_rechecking? || preparing? ? 'checking' : merge_status
|
||||
cannot_be_merged_rechecking? ? 'checking' : merge_status
|
||||
end
|
||||
|
||||
validates :source_project, presence: true, unless: [:allow_broken, :importing?, :closed_or_merged_without_fork?]
|
||||
|
@ -1058,8 +1054,6 @@ class MergeRequest < ApplicationRecord
|
|||
end
|
||||
|
||||
def mergeable?(skip_ci_check: false, skip_discussions_check: false)
|
||||
return false if preparing?
|
||||
|
||||
return false unless mergeable_state?(skip_ci_check: skip_ci_check,
|
||||
skip_discussions_check: skip_discussions_check)
|
||||
|
||||
|
|
|
@ -2,112 +2,97 @@
|
|||
|
||||
module Members
|
||||
class InviteService < Members::BaseService
|
||||
DEFAULT_LIMIT = 100
|
||||
BlankEmailsError = Class.new(StandardError)
|
||||
TooManyEmailsError = Class.new(StandardError)
|
||||
|
||||
attr_reader :errors
|
||||
def initialize(*args)
|
||||
super
|
||||
|
||||
def initialize(current_user, params)
|
||||
@current_user, @params = current_user, params.dup
|
||||
@errors = {}
|
||||
@emails = params[:email]&.split(',')&.uniq&.flatten
|
||||
end
|
||||
|
||||
def execute(source)
|
||||
return error(s_('Email cannot be blank')) if params[:email].blank?
|
||||
validate_emails!
|
||||
|
||||
emails = params[:email].split(',').uniq.flatten
|
||||
return error(s_("Too many users specified (limit is %{user_limit})") % { user_limit: user_limit }) if
|
||||
user_limit && emails.size > user_limit
|
||||
|
||||
emails.each do |email|
|
||||
next if existing_member?(source, email)
|
||||
next if existing_invite?(source, email)
|
||||
next if existing_request?(source, email)
|
||||
|
||||
if existing_user?(email)
|
||||
add_existing_user_as_member(current_user, source, params, email)
|
||||
next
|
||||
end
|
||||
|
||||
invite_new_member_and_user(current_user, source, params, email)
|
||||
end
|
||||
|
||||
return success unless errors.any?
|
||||
|
||||
error(errors)
|
||||
@source = source
|
||||
emails.each(&method(:process_email))
|
||||
result
|
||||
rescue BlankEmailsError, TooManyEmailsError => e
|
||||
error(e.message)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def invite_new_member_and_user(current_user, source, params, email)
|
||||
new_member = (source.class.name + 'Member').constantize.create(source_id: source.id,
|
||||
user_id: nil,
|
||||
access_level: params[:access_level],
|
||||
invite_email: email,
|
||||
created_by_id: current_user.id,
|
||||
expires_at: params[:expires_at])
|
||||
attr_reader :source, :errors, :emails
|
||||
|
||||
unless new_member.valid? && new_member.persisted?
|
||||
errors[params[:email]] = new_member.errors.full_messages.to_sentence
|
||||
def validate_emails!
|
||||
raise BlankEmailsError, s_('AddMember|Email cannot be blank') if emails.blank?
|
||||
|
||||
if user_limit && emails.size > user_limit
|
||||
raise TooManyEmailsError, s_("AddMember|Too many users specified (limit is %{user_limit})") % { user_limit: user_limit }
|
||||
end
|
||||
end
|
||||
|
||||
def add_existing_user_as_member(current_user, source, params, email)
|
||||
new_member = create_member(current_user, existing_user(email), source, params.merge({ invite_email: email }))
|
||||
|
||||
unless new_member.valid? && new_member.persisted?
|
||||
errors[email] = new_member.errors.full_messages.to_sentence
|
||||
end
|
||||
end
|
||||
|
||||
def create_member(current_user, user, source, params)
|
||||
source.add_user(user, params[:access_level], current_user: current_user, expires_at: params[:expires_at])
|
||||
end
|
||||
|
||||
def user_limit
|
||||
limit = params.fetch(:limit, DEFAULT_LIMIT)
|
||||
limit = params.fetch(:limit, Members::CreateService::DEFAULT_LIMIT)
|
||||
|
||||
limit && limit < 0 ? nil : limit
|
||||
limit < 0 ? nil : limit
|
||||
end
|
||||
|
||||
def existing_member?(source, email)
|
||||
def process_email(email)
|
||||
return if existing_member?(email)
|
||||
return if existing_invite?(email)
|
||||
return if existing_request?(email)
|
||||
|
||||
add_member(email)
|
||||
end
|
||||
|
||||
def existing_member?(email)
|
||||
existing_member = source.members.with_user_by_email(email).exists?
|
||||
|
||||
if existing_member
|
||||
errors[email] = "Already a member of #{source.name}"
|
||||
errors[email] = s_("AddMember|Already a member of %{source_name}") % { source_name: source.name }
|
||||
return true
|
||||
end
|
||||
|
||||
false
|
||||
end
|
||||
|
||||
def existing_invite?(source, email)
|
||||
def existing_invite?(email)
|
||||
existing_invite = source.members.search_invite_email(email).exists?
|
||||
|
||||
if existing_invite
|
||||
errors[email] = "Member already invited to #{source.name}"
|
||||
errors[email] = s_("AddMember|Member already invited to %{source_name}") % { source_name: source.name }
|
||||
return true
|
||||
end
|
||||
|
||||
false
|
||||
end
|
||||
|
||||
def existing_request?(source, email)
|
||||
def existing_request?(email)
|
||||
existing_request = source.requesters.with_user_by_email(email).exists?
|
||||
|
||||
if existing_request
|
||||
errors[email] = "Member cannot be invited because they already requested to join #{source.name}"
|
||||
errors[email] = s_("AddMember|Member cannot be invited because they already requested to join %{source_name}") % { source_name: source.name }
|
||||
return true
|
||||
end
|
||||
|
||||
false
|
||||
end
|
||||
|
||||
def existing_user(email)
|
||||
User.find_by_email(email)
|
||||
def add_member(email)
|
||||
new_member = source.add_user(email, params[:access_level], current_user: current_user, expires_at: params[:expires_at])
|
||||
|
||||
errors[email] = new_member.errors.full_messages.to_sentence if new_member.invalid?
|
||||
end
|
||||
|
||||
def existing_user?(email)
|
||||
existing_user(email).present?
|
||||
def result
|
||||
if errors.any?
|
||||
error(errors)
|
||||
else
|
||||
success
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -3,13 +3,6 @@
|
|||
module MergeRequests
|
||||
class AfterCreateService < MergeRequests::BaseService
|
||||
def execute(merge_request)
|
||||
prepare_merge_request(merge_request)
|
||||
merge_request.mark_as_unchecked! if merge_request.preparing?
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def prepare_merge_request(merge_request)
|
||||
event_service.open_mr(merge_request, current_user)
|
||||
merge_request_activity_counter.track_create_mr_action(user: current_user)
|
||||
notification_service.new_merge_request(merge_request, current_user)
|
||||
|
|
|
@ -14,8 +14,6 @@ module MergeRequests
|
|||
end
|
||||
|
||||
def after_create(issuable)
|
||||
issuable.mark_as_preparing
|
||||
|
||||
# Add new items to MergeRequests::AfterCreateService if they can
|
||||
# be performed in Sidekiq
|
||||
NewMergeRequestWorker.perform_async(issuable.id, current_user.id)
|
||||
|
|
|
@ -6,3 +6,4 @@ Merge Request URL: #{project_merge_request_url(@merge_request.target_project, @m
|
|||
|
||||
Author: #{sanitize_name(@merge_request.author_name)}
|
||||
= assignees_label(@merge_request)
|
||||
= reviewers_label(@merge_request)
|
||||
|
|
|
@ -26,7 +26,7 @@ class ErrorTrackingIssueLinkWorker # rubocop:disable Scalability/IdempotentWorke
|
|||
logger.info("Linking Sentry issue #{sentry_issue_id} to GitLab issue #{issue.id}")
|
||||
|
||||
sentry_client.create_issue_link(integration_id, sentry_issue_id, issue)
|
||||
rescue Sentry::Client::Error => e
|
||||
rescue ErrorTracking::SentryClient::Error => e
|
||||
logger.info("Failed to link Sentry issue #{sentry_issue_id} to GitLab issue #{issue.id} with error: #{e.message}")
|
||||
end
|
||||
end
|
||||
|
@ -63,7 +63,7 @@ class ErrorTrackingIssueLinkWorker # rubocop:disable Scalability/IdempotentWorke
|
|||
sentry_client
|
||||
.repos(organization_slug)
|
||||
.find { |repo| repo.project_id == issue.project_id && repo.status == 'active' }
|
||||
rescue Sentry::Client::Error => e
|
||||
rescue ErrorTracking::SentryClient::Error => e
|
||||
logger.info("Unable to retrieve Sentry repo for organization #{organization_slug}, id #{sentry_issue_id}, with error: #{e.message}")
|
||||
|
||||
nil
|
||||
|
|
5
changelogs/unreleased/228733-alert-issue-status.yml
Normal file
5
changelogs/unreleased/228733-alert-issue-status.yml
Normal file
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: 'Incident management: add issue state to alerts table'
|
||||
merge_request: 55185
|
||||
author:
|
||||
type: added
|
|
@ -1,5 +0,0 @@
|
|||
---
|
||||
title: Implement new preparing internal merge_status
|
||||
merge_request: 54900
|
||||
author:
|
||||
type: other
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add reviewers detail to merged merge request email
|
||||
merge_request: 55589
|
||||
author:
|
||||
type: added
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Include MRs for merge commits for changelogs
|
||||
merge_request: 55371
|
||||
author:
|
||||
type: fixed
|
5
changelogs/unreleased/move-cancel-btn-integrations.yml
Normal file
5
changelogs/unreleased/move-cancel-btn-integrations.yml
Normal file
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Group integration settings buttons to the left
|
||||
merge_request: 55139
|
||||
author:
|
||||
type: changed
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
name: pipeline_editor_empty_state_action
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/55414
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/323229
|
||||
milestone: '13.10'
|
||||
type: development
|
||||
group: group::pipeline authoring
|
||||
default_enabled: false
|
|
@ -1198,7 +1198,7 @@ Confirm the following are all true:
|
|||
successfully creates the project, but doesn't create the README.
|
||||
- When [tailing the logs](https://docs.gitlab.com/omnibus/settings/logs.html#tail-logs-in-a-console-on-the-server)
|
||||
on a Gitaly client and reproducing the error, you get `401` errors
|
||||
when reaching the `/api/v4/internal/allowed` endpoint:
|
||||
when reaching the [`/api/v4/internal/allowed`](../../development/internal_api.md) endpoint:
|
||||
|
||||
```shell
|
||||
# api_json.log
|
||||
|
|
|
@ -301,7 +301,7 @@ Confirm the following are all true:
|
|||
- Creating a new project and [initializing it with a README](../../user/project/working_with_projects.md#blank-projects)
|
||||
successfully creates the project but doesn't create the README.
|
||||
- When [tailing the logs](https://docs.gitlab.com/omnibus/settings/logs.html#tail-logs-in-a-console-on-the-server) on an app node and reproducing the error, you get `401` errors
|
||||
when reaching the `/api/v4/internal/allowed` endpoint:
|
||||
when reaching the [`/api/v4/internal/allowed`](../../development/internal_api.md) endpoint:
|
||||
|
||||
```shell
|
||||
# api_json.log
|
||||
|
|
|
@ -255,7 +255,7 @@ separate Rails process to debug the issue:
|
|||
### GitLab: API is not accessible
|
||||
|
||||
This often occurs when GitLab Shell attempts to request authorization via the
|
||||
internal API (e.g., `http://localhost:8080/api/v4/internal/allowed`), and
|
||||
[internal API](../../development/internal_api.md) (e.g., `http://localhost:8080/api/v4/internal/allowed`), and
|
||||
something in the check fails. There are many reasons why this may happen:
|
||||
|
||||
1. Timeout connecting to a database (e.g., PostgreSQL or Redis)
|
||||
|
@ -271,8 +271,8 @@ strace -ttTfyyy -s 1024 -p <PID of unicorn worker> -o /tmp/unicorn.txt
|
|||
```
|
||||
|
||||
If you cannot isolate which Unicorn worker is the issue, try to run `strace`
|
||||
on all the Unicorn workers to see where the `/internal/allowed` endpoint gets
|
||||
stuck:
|
||||
on all the Unicorn workers to see where the
|
||||
[`/internal/allowed`](../../development/internal_api.md) endpoint gets stuck:
|
||||
|
||||
```shell
|
||||
ps auwx | grep unicorn | awk '{ print " -p " $2}' | xargs strace -ttTfyyy -s 1024 -o /tmp/unicorn.txt
|
||||
|
|
|
@ -427,7 +427,8 @@ Describes an alert from the project's Alert Management.
|
|||
| `eventCount` | Int | Number of events of this alert. |
|
||||
| `hosts` | String! => Array | List of hosts the alert came from. |
|
||||
| `iid` | ID! | Internal ID of the alert. |
|
||||
| `issueIid` | ID | Internal ID of the GitLab issue attached to the alert. |
|
||||
| `issue` | Issue | Issue attached to the alert. |
|
||||
| `issueIid` **{warning-solid}** | ID | **Deprecated:** Use issue field. Deprecated in 13.10. |
|
||||
| `metricsDashboardUrl` | String | URL for metrics embed for the alert. |
|
||||
| `monitoringTool` | String | Monitoring tool the alert came from. |
|
||||
| `notes` | NoteConnection! | All notes on this noteable. |
|
||||
|
|
|
@ -627,6 +627,35 @@ POST /projects/:id/external_approval_rules
|
|||
| `external_url` | string | yes | URL of external approval resource |
|
||||
| `protected_branch_ids` | array<Integer> | no | The ids of protected branches to scope the rule by |
|
||||
|
||||
### Delete external approval rule **(ULTIMATE)**
|
||||
|
||||
You can delete an external approval rule for a project using the following endpoint:
|
||||
|
||||
```plaintext
|
||||
DELETE /projects/:id/external_approval_rules/:rule_id
|
||||
```
|
||||
|
||||
| Attribute | Type | Required | Description |
|
||||
|------------------------|----------------|----------|----------------------------------------------------|
|
||||
| `rule_id` | integer | yes | The ID of an approval rule |
|
||||
| `id` | integer | yes | The ID of a project |
|
||||
|
||||
### Update external approval rule **(ULTIMATE)**
|
||||
|
||||
You can update an existing external approval rule for a project using the following endpoint:
|
||||
|
||||
```plaintext
|
||||
PATCH /projects/:id/external_approval_rules/:rule_id
|
||||
```
|
||||
|
||||
| Attribute | Type | Required | Description |
|
||||
|------------------------|----------------|----------|----------------------------------------------------|
|
||||
| `id` | integer | yes | The ID of a project |
|
||||
| `rule_id` | integer | yes | The ID of an external approval rule |
|
||||
| `name` | string | no | Display name of approval rule |
|
||||
| `external_url` | string | no | URL of external approval resource |
|
||||
| `protected_branch_ids` | array<Integer> | no | The ids of protected branches to scope the rule by |
|
||||
|
||||
### Enable or disable External Project-level MR approvals **(ULTIMATE SELF)**
|
||||
|
||||
Enable or disable External Project-level MR approvals is under development and not ready for production use. It is
|
||||
|
|
|
@ -7,32 +7,16 @@ type: concepts, howto
|
|||
|
||||
# Use Docker to build Docker images
|
||||
|
||||
You can use GitLab CI/CD with Docker to build and test Docker images.
|
||||
|
||||
For example, you might want to:
|
||||
|
||||
1. Create a Docker image of your application.
|
||||
1. Run tests against the image.
|
||||
1. Push the image to a remote registry.
|
||||
1. Use the image to deploy your application to a server.
|
||||
|
||||
Or, if your application already has a `Dockerfile`, you can
|
||||
use it to create and test an image:
|
||||
|
||||
```shell
|
||||
docker build -t my-image dockerfiles/
|
||||
docker run my-image /script/to/run/tests
|
||||
docker tag my-image my-registry:5000/my-image
|
||||
docker push my-registry:5000/my-image
|
||||
```
|
||||
You can use GitLab CI/CD with Docker to create Docker images.
|
||||
For example, you can create a Docker image of your application,
|
||||
test it, and publish it to a container registry.
|
||||
|
||||
To run Docker commands in your CI/CD jobs, you must configure
|
||||
GitLab Runner to support `docker` commands.
|
||||
|
||||
## Enable Docker commands in your CI/CD jobs
|
||||
|
||||
There are three ways to enable the use of `docker build` and `docker run`
|
||||
during jobs, each with their own tradeoffs. You can use:
|
||||
To enable Docker commands for your CI/CD jobs, you can use:
|
||||
|
||||
- [The shell executor](#use-the-shell-executor)
|
||||
- [The Docker executor with the Docker image (Docker-in-Docker)](#use-the-docker-executor-with-the-docker-image-docker-in-docker)
|
||||
|
@ -47,12 +31,9 @@ to learn more about how these runners are configured.
|
|||
|
||||
### Use the shell executor
|
||||
|
||||
One way to configure GitLab Runner for `docker` support is to use the
|
||||
`shell` executor.
|
||||
|
||||
After you register a runner and select the `shell` executor,
|
||||
your job scripts are executed as the `gitlab-runner` user.
|
||||
This user needs permission to run Docker commands.
|
||||
You can include Docker commands in your CI/CD jobs if your runner is configured to
|
||||
use the `shell` executor. The `gitlab-runner` user runs the Docker commands, but
|
||||
needs permission to run them.
|
||||
|
||||
1. [Install](https://gitlab.com/gitlab-org/gitlab-runner/#installation) GitLab Runner.
|
||||
1. [Register](https://docs.gitlab.com/runner/register/) a runner.
|
||||
|
@ -100,9 +81,11 @@ Learn more about the [security of the `docker` group](https://blog.zopyx.com/on-
|
|||
|
||||
### Use the Docker executor with the Docker image (Docker-in-Docker)
|
||||
|
||||
Another way to configure GitLab Runner for `docker` support is to
|
||||
register a runner with the Docker executor and use the [Docker image](https://hub.docker.com/_/docker/)
|
||||
to run your job scripts. This configuration is referred to as "Docker-in-Docker."
|
||||
You can use "Docker-in-Docker" to run commands in your CI/CD jobs:
|
||||
|
||||
- Register a runner that uses the Docker executor.
|
||||
- Use the [Docker image](https://hub.docker.com/_/docker/) provided by Docker to
|
||||
run the jobs that need Docker commands.
|
||||
|
||||
The Docker image has all of the `docker` tools installed
|
||||
and can run the job script in context of the image in privileged mode.
|
||||
|
@ -111,14 +94,18 @@ The `docker-compose` command is not available in this configuration by default.
|
|||
To use `docker-compose` in your job scripts, follow the `docker-compose`
|
||||
[installation instructions](https://docs.docker.com/compose/install/).
|
||||
|
||||
An example project that uses this approach can be found here: <https://gitlab.com/gitlab-examples/docker>.
|
||||
|
||||
WARNING:
|
||||
When you enable `--docker-privileged`, you are effectively disabling all of
|
||||
the security mechanisms of containers and exposing your host to privilege
|
||||
escalation which can lead to container breakout. For more information, check
|
||||
escalation. Doing this can lead to container breakout. For more information, check
|
||||
out the official Docker documentation on
|
||||
[runtime privilege and Linux capabilities](https://docs.docker.com/engine/reference/run/#runtime-privilege-and-linux-capabilities).
|
||||
|
||||
Docker-in-Docker works well, and is the recommended configuration, but it is
|
||||
#### Limitations of Docker-in-Docker
|
||||
|
||||
Docker-in-Docker is the recommended configuration, but it is
|
||||
not without its own challenges:
|
||||
|
||||
- When using Docker-in-Docker, each job is in a clean environment without the past
|
||||
|
@ -144,8 +131,6 @@ not without its own challenges:
|
|||
- docker run -v "$MOUNT_POINT:/mnt" my-docker-image
|
||||
```
|
||||
|
||||
An example project using this approach can be found here: <https://gitlab.com/gitlab-examples/docker>.
|
||||
|
||||
In the examples below, we are using Docker images tags to specify a
|
||||
specific version, such as `docker:19.03.12`. If tags like `docker:stable`
|
||||
are used, you have no control over what version is used. This can lead to
|
||||
|
@ -373,9 +358,8 @@ build:
|
|||
|
||||
### Use Docker socket binding
|
||||
|
||||
Another way to configure GitLab Runner for `docker` support is to
|
||||
bind-mount `/var/run/docker.sock` into the
|
||||
container so that Docker is available in the context of the image.
|
||||
To use Docker commands in your CI/CD jobs, you can bind-mount `/var/run/docker.sock` into the
|
||||
container. Docker is then available in the context of the image.
|
||||
|
||||
NOTE:
|
||||
If you bind the Docker socket and you are
|
||||
|
@ -478,13 +462,10 @@ services:
|
|||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/27173) in GitLab Runner 13.6.
|
||||
|
||||
If you are an administrator of GitLab Runner and you have the `dind`
|
||||
service defined for the [Docker
|
||||
executor](https://docs.gitlab.com/runner/configuration/advanced-configuration.html#the-runnersdockerservices-section),
|
||||
or the [Kubernetes
|
||||
executor](https://docs.gitlab.com/runner/executors/kubernetes.html#using-services)
|
||||
you can specify the `command` to configure the registry mirror for the
|
||||
Docker daemon.
|
||||
If you are a GitLab Runner administrator, you can specify the `command` to configure the registry mirror
|
||||
for the Docker daemon. The `dind` service must be defined for the
|
||||
[Docker](https://docs.gitlab.com/runner/configuration/advanced-configuration.html#the-runnersdockerservices-section)
|
||||
or [Kubernetes executor](https://docs.gitlab.com/runner/executors/kubernetes.html#using-services).
|
||||
|
||||
Docker:
|
||||
|
||||
|
@ -516,11 +497,10 @@ Kubernetes:
|
|||
|
||||
##### Docker executor inside GitLab Runner configuration
|
||||
|
||||
If you are an administrator of GitLab Runner and you want to use
|
||||
the mirror for every `dind` service, update the
|
||||
If you are a GitLab Runner administrator, you can use
|
||||
the mirror for every `dind` service. Update the
|
||||
[configuration](https://docs.gitlab.com/runner/configuration/advanced-configuration.html)
|
||||
to specify a [volume
|
||||
mount](https://docs.gitlab.com/runner/configuration/advanced-configuration.html#volumes-in-the-runnersdocker-section).
|
||||
to specify a [volume mount](https://docs.gitlab.com/runner/configuration/advanced-configuration.html#volumes-in-the-runnersdocker-section).
|
||||
|
||||
For example, if you have a `/opt/docker/daemon.json` file with the following
|
||||
content:
|
||||
|
@ -552,11 +532,10 @@ picked up by the `dind` service.
|
|||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/3223) in GitLab Runner 13.6.
|
||||
|
||||
If you are an administrator of GitLab Runner and you want to use
|
||||
the mirror for every `dind` service, update the
|
||||
If you are a GitLab Runner administrator, you can use
|
||||
the mirror for every `dind` service. Update the
|
||||
[configuration](https://docs.gitlab.com/runner/configuration/advanced-configuration.html)
|
||||
to specify a [ConfigMap volume
|
||||
mount](https://docs.gitlab.com/runner/executors/kubernetes.html#using-volumes).
|
||||
to specify a [ConfigMap volume mount](https://docs.gitlab.com/runner/executors/kubernetes.html#using-volumes).
|
||||
|
||||
For example, if you have a `/tmp/daemon.json` file with the following
|
||||
content:
|
||||
|
@ -602,7 +581,7 @@ The configuration is picked up by the `dind` service.
|
|||
|
||||
When you use Docker-in-Docker, the [normal authentication
|
||||
methods](using_docker_images.html#define-an-image-from-a-private-container-registry)
|
||||
won't work because a fresh Docker daemon is started with the service.
|
||||
don't work because a fresh Docker daemon is started with the service.
|
||||
|
||||
### Option 1: Run `docker login`
|
||||
|
||||
|
@ -634,14 +613,14 @@ empty or remove it.
|
|||
|
||||
If you are an administrator for GitLab Runner, you can mount a file
|
||||
with the authentication configuration to `~/.docker/config.json`.
|
||||
Then every job that the runner picks up will be authenticated already. If you
|
||||
Then every job that the runner picks up is authenticated already. If you
|
||||
are using the official `docker:19.03.13` image, the home directory is
|
||||
under `/root`.
|
||||
|
||||
If you mount the configuration file, any `docker` command
|
||||
that modifies the `~/.docker/config.json` (for example, `docker login`)
|
||||
fails, because the file is mounted as read-only. Do not change it from
|
||||
read-only, because other problems will occur.
|
||||
read-only, because problems occur.
|
||||
|
||||
Here is an example of `/opt/.docker/config.json` that follows the
|
||||
[`DOCKER_AUTH_CONFIG`](using_docker_images.md#determining-your-docker_auth_config-data)
|
||||
|
@ -743,8 +722,8 @@ build:
|
|||
|
||||
When using Docker-in-Docker, Docker downloads all layers of your image every
|
||||
time you create a build. Recent versions of Docker (Docker 1.13 and above) can
|
||||
use a pre-existing image as a cache during the `docker build` step, considerably
|
||||
speeding up the build process.
|
||||
use a pre-existing image as a cache during the `docker build` step. This considerably
|
||||
speeds up the build process.
|
||||
|
||||
### How Docker caching works
|
||||
|
||||
|
@ -754,8 +733,8 @@ any changes. Change in one layer causes all subsequent layers to be recreated.
|
|||
|
||||
You can specify a tagged image to be used as a cache source for the `docker build`
|
||||
command by using the `--cache-from` argument. Multiple images can be specified
|
||||
as a cache source by using multiple `--cache-from` arguments. Keep in mind that
|
||||
any image that's used with the `--cache-from` argument must first be pulled
|
||||
as a cache source by using multiple `--cache-from` arguments. Any image that's used
|
||||
with the `--cache-from` argument must first be pulled
|
||||
(using `docker pull`) before it can be used as a cache source.
|
||||
|
||||
### Using Docker caching
|
||||
|
|
|
@ -899,7 +899,7 @@ in Rails, scheduled to run whenever an SSH key is modified by a user.
|
|||
instead of keys. In this case, `AuthorizedKeysCommand` is replaced with an
|
||||
`AuthorizedPrincipalsCommand`. This extracts a username from the certificate
|
||||
without using the Rails internal API, which is used instead of `key_id` in the
|
||||
`/api/internal/allowed` call later.
|
||||
[`/api/internal/allowed`](internal_api.md) call later.
|
||||
|
||||
GitLab Shell also has a few operations that do not involve Gitaly, such as
|
||||
resetting two-factor authentication codes. These are handled in the same way,
|
||||
|
|
|
@ -35,12 +35,12 @@ This is called by [Gitaly](https://gitlab.com/gitlab-org/gitaly) and
|
|||
[GitLab Shell](https://gitlab.com/gitlab-org/gitlab-shell) to check access to a
|
||||
repository.
|
||||
|
||||
When called from GitLab Shell no changes are passed and the internal
|
||||
API replies with the information needed to pass the request on to
|
||||
Gitaly.
|
||||
- **When called from GitLab Shell**: No changes are passed, and the internal
|
||||
API replies with the information needed to pass the request on to Gitaly.
|
||||
- **When called from Gitaly in a `pre-receive` hook**: The changes are passed
|
||||
and validated to determine if the push is allowed.
|
||||
|
||||
When called from Gitaly in a `pre-receive` hook the changes are passed
|
||||
and those are validated to determine if the push is allowed.
|
||||
Calls are limited to 50 seconds each.
|
||||
|
||||
```plaintext
|
||||
POST /internal/allowed
|
||||
|
|
|
@ -13,7 +13,7 @@ GitLab Maintenance Mode **only** blocks writes from HTTP and SSH requests at the
|
|||
|
||||
- [the read-only database method](https://gitlab.com/gitlab-org/gitlab/blob/2425e9de50c678413ceaad6ee3bf66f42b7e228c/ee/lib/ee/gitlab/database.rb#L13), which toggles special behavior when we are not allowed to write to the database. [Search the codebase for `Gitlab::Database.read_only?`.](https://gitlab.com/search?utf8=%E2%9C%93&search=Gitlab%3A%3ADatabase.read_only%3F&group_id=9970&project_id=278964&scope=blobs&search_code=false&snippets=false&repository_ref=)
|
||||
- [the read-only middleware](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/ee/gitlab/middleware/read_only/controller.rb), where HTTP requests that cause database writes are blocked, unless explicitly allowed.
|
||||
- [Git push access via SSH is denied](https://gitlab.com/gitlab-org/gitlab/-/blob/2425e9de50c678413ceaad6ee3bf66f42b7e228c/ee/lib/ee/gitlab/git_access.rb#L13) by returning 401 when `gitlab-shell` POSTs to `/internal/allowed` to [check if access is allowed](internal_api.md#git-authentication).
|
||||
- [Git push access via SSH is denied](https://gitlab.com/gitlab-org/gitlab/-/blob/2425e9de50c678413ceaad6ee3bf66f42b7e228c/ee/lib/ee/gitlab/git_access.rb#L13) by returning 401 when `gitlab-shell` POSTs to [`/internal/allowed`](internal_api.md) to [check if access is allowed](internal_api.md#git-authentication).
|
||||
- [Container registry authentication service](https://gitlab.com/gitlab-org/gitlab/-/blob/2425e9de50c678413ceaad6ee3bf66f42b7e228c/ee/app/services/ee/auth/container_registry_authentication_service.rb#L12), where updates to the container registry are blocked.
|
||||
|
||||
The database itself is not in read-only mode (except in a Geo secondary site) and can be written by sources other than the ones blocked.
|
||||
|
|
|
@ -5,7 +5,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
type: reference
|
||||
---
|
||||
|
||||
# Upgrading deployments for newer Auto Deploy dependencies (Auto Deploy template, auto-deploy-image and auto-deploy-app chart)
|
||||
# Upgrading deployments for newer Auto Deploy dependencies
|
||||
|
||||
[Auto Deploy](stages.md#auto-deploy) is a feature that deploys your application to a Kubernetes cluster.
|
||||
It consists of several dependencies:
|
||||
|
|
|
@ -281,7 +281,7 @@ example [`resources.yml` file](#example-resourcesyml-file) in the following ways
|
|||
after you install the `gitlab-kas` sub-chart, or enable `gitlab-kas` for Omnibus GitLab.
|
||||
When using the sub-chart, you must set `wss://kas.host.tld:443` as
|
||||
`kas-address`, where `host.tld` is the domain you've setup for your GitLab installation.
|
||||
When using Omnibus GitLab, you must set `wss://GitLab.host.tld:443/-/kubernetes-agent` as
|
||||
When using Omnibus GitLab, you must set `wss://GitLab.host.tld:443/-/kubernetes-agent/` as
|
||||
`kas-address`, where `GitLab.host.tld` is your GitLab hostname.
|
||||
- When using the sub-chart, specify the `ws` scheme (such as `ws://kas.host.tld:80`)
|
||||
to use an unencrypted WebSockets connection.
|
||||
|
@ -346,7 +346,7 @@ spec:
|
|||
- --token-file=/config/token
|
||||
- --kas-address
|
||||
- wss://kas.host.tld:443 # change this line for the one below if using Omnibus GitLab
|
||||
# - wss://gitlab.host.tld:443/-/kubernetes-agent
|
||||
# - wss://gitlab.host.tld:443/-/kubernetes-agent/
|
||||
volumeMounts:
|
||||
- name: token-volume
|
||||
mountPath: /config
|
||||
|
@ -569,7 +569,7 @@ This error is shown if there are some connectivity issues between the address
|
|||
specified as `kas-address`, and your Agent pod. To fix it, make sure that you
|
||||
specified the `kas-address` correctly.
|
||||
|
||||
### Agent logs - ValidationError(Deployment.metadata
|
||||
### Agent logs - ValidationError(Deployment.metadata)
|
||||
|
||||
```plaintext
|
||||
{"level":"info","time":"2020-10-30T08:56:54.329Z","msg":"Synced","project_id":"root/kas-manifest001","resource_key":"apps/Deployment/kas-test001/nginx-deployment","sync_result":"error validating data: [ValidationError(Deployment.metadata): unknown field \"replicas\" in io.k8s.apimachinery.pkg.apis.meta.v1.ObjectMeta, ValidationError(Deployment.metadata): unknown field \"selector\" in io.k8s.apimachinery.pkg.apis.meta.v1.ObjectMeta, ValidationError(Deployment.metadata): unknown field \"template\" in io.k8s.apimachinery.pkg.apis.meta.v1.ObjectMeta]"}
|
||||
|
@ -603,3 +603,72 @@ issue is in progress, directly edit the deployment with the
|
|||
|
||||
This error is shown if the version of the agent is newer that the version of KAS.
|
||||
To fix it, make sure that both `agentk` and KAS use the same versions.
|
||||
|
||||
### Agent logs - Certificate signed by unknown authority
|
||||
|
||||
```plaintext
|
||||
{"level":"error","time":"2021-02-25T07:22:37.158Z","msg":"Reverse tunnel","mod_name":"reverse_tunnel","error":"Connect(): rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing failed to WebSocket dial: failed to send handshake request: Get \\\"https://GitLabhost.tld:443/-/kubernetes-agent/\\\": x509: certificate signed by unknown authority\""}
|
||||
```
|
||||
|
||||
This error is shown if your GitLab instance is using a certificate signed by an internal CA that
|
||||
is unknown to the agent. One approach to fixing it is to present the CA certificate file to the agent
|
||||
via a Kubernetes `configmap` and mount the file in the agent `/etc/ssl/certs` directory from where it
|
||||
will be picked up automatically.
|
||||
|
||||
For example, if your internal CA certifciate is `myCA.pem`:
|
||||
|
||||
```plaintext
|
||||
kubectl -n gitlab-agent create configmap ca-pemstore --from-file=myCA.pem
|
||||
```
|
||||
|
||||
Then in `resources.yml`:
|
||||
|
||||
```plaintext
|
||||
spec:
|
||||
serviceAccountName: gitlab-agent
|
||||
containers:
|
||||
- name: agent
|
||||
image: "registry.gitlab.com/gitlab-org/cluster-integration/gitlab-agent/agentk:latest"
|
||||
args:
|
||||
- --token-file=/config/token
|
||||
- --kas-address
|
||||
- wss://kas.host.tld:443 # change this line for the one below if using Omnibus GitLab
|
||||
# - wss://gitlab.host.tld:443/-/kubernetes-agent
|
||||
volumeMounts:
|
||||
- name: token-volume
|
||||
mountPath: /config
|
||||
- name: ca-pemstore-volume
|
||||
mountPath: /etc/ssl/certs/myCA.pem
|
||||
subPath: myCA.pem
|
||||
volumes:
|
||||
- name: token-volume
|
||||
secret:
|
||||
secretName: gitlab-agent-token
|
||||
- name: ca-pemstore-volume
|
||||
configMap:
|
||||
name: ca-pemstore
|
||||
items:
|
||||
- key: myCA.pem
|
||||
path: myCA.pem
|
||||
```
|
||||
|
||||
Alternatively, you can mount the certificate file at a different location and include it using the
|
||||
`--ca-cert-file` agent parameter:
|
||||
|
||||
```plaintext
|
||||
containers:
|
||||
- name: agent
|
||||
image: "registry.gitlab.com/gitlab-org/cluster-integration/gitlab-agent/agentk:latest"
|
||||
args:
|
||||
- --ca-cert-file=/tmp/myCA.pem
|
||||
- --token-file=/config/token
|
||||
- --kas-address
|
||||
- wss://kas.host.tld:443 # change this line for the one below if using Omnibus GitLab
|
||||
# - wss://gitlab.host.tld:443/-/kubernetes-agent
|
||||
volumeMounts:
|
||||
- name: token-volume
|
||||
mountPath: /config
|
||||
- name: ca-pemstore-volume
|
||||
mountPath: /tmp/myCA.pem
|
||||
subPath: myCA.pem
|
||||
```
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Sentry
|
||||
class Client
|
||||
include Sentry::Client::Event
|
||||
include Sentry::Client::Projects
|
||||
include Sentry::Client::Issue
|
||||
include Sentry::Client::Repo
|
||||
include Sentry::Client::IssueLink
|
||||
module ErrorTracking
|
||||
class SentryClient
|
||||
include SentryClient::Event
|
||||
include SentryClient::Projects
|
||||
include SentryClient::Issue
|
||||
include SentryClient::Repo
|
||||
include SentryClient::IssueLink
|
||||
|
||||
Error = Class.new(StandardError)
|
||||
MissingKeysError = Class.new(StandardError)
|
||||
|
@ -21,7 +21,7 @@ module Sentry
|
|||
private
|
||||
|
||||
def api_urls
|
||||
@api_urls ||= Sentry::ApiUrls.new(@url)
|
||||
@api_urls ||= SentryClient::ApiUrls.new(@url)
|
||||
end
|
||||
|
||||
def handle_mapping_exceptions(&block)
|
||||
|
@ -94,7 +94,7 @@ module Sentry
|
|||
end
|
||||
|
||||
def raise_error(message)
|
||||
raise Client::Error, message
|
||||
raise SentryClient::Error, message
|
||||
end
|
||||
end
|
||||
end
|
41
lib/error_tracking/sentry_client/api_urls.rb
Normal file
41
lib/error_tracking/sentry_client/api_urls.rb
Normal file
|
@ -0,0 +1,41 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module ErrorTracking
|
||||
class SentryClient
|
||||
class ApiUrls
|
||||
def initialize(url_base)
|
||||
@uri = URI(url_base).freeze
|
||||
end
|
||||
|
||||
def issues_url
|
||||
with_path(File.join(@uri.path, '/issues/'))
|
||||
end
|
||||
|
||||
def issue_url(issue_id)
|
||||
with_path("/api/0/issues/#{escape(issue_id)}/")
|
||||
end
|
||||
|
||||
def projects_url
|
||||
with_path('/api/0/projects/')
|
||||
end
|
||||
|
||||
def issue_latest_event_url(issue_id)
|
||||
with_path("/api/0/issues/#{escape(issue_id)}/events/latest/")
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def with_path(new_path)
|
||||
new_uri = @uri.dup
|
||||
# Sentry API returns 404 if there are extra slashes in the URL
|
||||
new_uri.path = new_path.squeeze('/')
|
||||
|
||||
new_uri
|
||||
end
|
||||
|
||||
def escape(param)
|
||||
CGI.escape(param.to_s)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,7 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Sentry
|
||||
class Client
|
||||
module ErrorTracking
|
||||
class SentryClient
|
||||
module Event
|
||||
def issue_latest_event(issue_id:)
|
||||
latest_event = http_get(api_urls.issue_latest_event_url(issue_id))[:body]
|
|
@ -1,7 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Sentry
|
||||
class Client
|
||||
module ErrorTracking
|
||||
class SentryClient
|
||||
module Issue
|
||||
BadRequestError = Class.new(StandardError)
|
||||
ResponseInvalidSizeError = Class.new(StandardError)
|
||||
|
@ -49,7 +49,7 @@ module Sentry
|
|||
|
||||
{
|
||||
issues: response[:body],
|
||||
pagination: Sentry::PaginationParser.parse(response[:headers])
|
||||
pagination: SentryClient::PaginationParser.parse(response[:headers])
|
||||
}
|
||||
end
|
||||
|
||||
|
@ -113,7 +113,7 @@ module Sentry
|
|||
uri = URI(url)
|
||||
uri.path.squeeze!('/')
|
||||
# Remove trailing slash
|
||||
uri = uri.to_s.gsub(/\/\z/, '')
|
||||
uri = uri.to_s.delete_suffix('/')
|
||||
|
||||
uri
|
||||
end
|
|
@ -1,7 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Sentry
|
||||
class Client
|
||||
module ErrorTracking
|
||||
class SentryClient
|
||||
module IssueLink
|
||||
# Creates a link in Sentry corresponding to the provided
|
||||
# Sentry issue and GitLab issue
|
25
lib/error_tracking/sentry_client/pagination_parser.rb
Normal file
25
lib/error_tracking/sentry_client/pagination_parser.rb
Normal file
|
@ -0,0 +1,25 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module ErrorTracking
|
||||
class SentryClient
|
||||
module PaginationParser
|
||||
PATTERN = /rel=\"(?<direction>\w+)\";\sresults=\"(?<results>\w+)\";\scursor=\"(?<cursor>.+)\"/.freeze
|
||||
|
||||
def self.parse(headers)
|
||||
links = headers['link'].to_s.split(',')
|
||||
|
||||
links.map { |link| parse_link(link) }.compact.to_h
|
||||
end
|
||||
|
||||
def self.parse_link(link)
|
||||
match = link.match(PATTERN)
|
||||
|
||||
return unless match
|
||||
return if match['results'] != "true"
|
||||
|
||||
[match['direction'], { 'cursor' => match['cursor'] }]
|
||||
end
|
||||
private_class_method :parse_link
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,7 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Sentry
|
||||
class Client
|
||||
module ErrorTracking
|
||||
class SentryClient
|
||||
module Projects
|
||||
def projects
|
||||
projects = get_projects
|
|
@ -1,7 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Sentry
|
||||
class Client
|
||||
module ErrorTracking
|
||||
class SentryClient
|
||||
module Repo
|
||||
def repos(organization_slug)
|
||||
repos_url = repos_api_url(organization_slug)
|
|
@ -1,5 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# This should be in the ErrorTracking namespace. For more details, see:
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/323342
|
||||
module Gitlab
|
||||
module ErrorTracking
|
||||
class DetailedError
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# This should be in the ErrorTracking namespace. For more details, see:
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/323342
|
||||
module Gitlab
|
||||
module ErrorTracking
|
||||
class Error
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# This should be in the ErrorTracking namespace. For more details, see:
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/323342
|
||||
module Gitlab
|
||||
module ErrorTracking
|
||||
class ErrorCollection
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# This should be in the ErrorTracking namespace. For more details, see:
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/323342
|
||||
module Gitlab
|
||||
module ErrorTracking
|
||||
class ErrorEvent
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# This should be in the ErrorTracking namespace. For more details, see:
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/323342
|
||||
module Gitlab
|
||||
module ErrorTracking
|
||||
class Project
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# This should be in the ErrorTracking namespace. For more details, see:
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/323342
|
||||
module Gitlab
|
||||
module ErrorTracking
|
||||
class Repo
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# This should be in the ErrorTracking namespace. For more details, see:
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/323342
|
||||
module Gitlab
|
||||
module ErrorTracking
|
||||
module StackTraceHighlightDecorator
|
||||
|
|
13
lib/gitlab/relative_positioning/closed_range.rb
Normal file
13
lib/gitlab/relative_positioning/closed_range.rb
Normal file
|
@ -0,0 +1,13 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module RelativePositioning
|
||||
class ClosedRange < RelativePositioning::Range
|
||||
def initialize(lhs, rhs)
|
||||
@lhs, @rhs = lhs, rhs
|
||||
raise IllegalRange, 'Either lhs or rhs is missing' unless lhs && rhs
|
||||
raise IllegalRange, 'lhs and rhs cannot be the same object' if lhs == rhs
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
18
lib/gitlab/relative_positioning/ending_at.rb
Normal file
18
lib/gitlab/relative_positioning/ending_at.rb
Normal file
|
@ -0,0 +1,18 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module RelativePositioning
|
||||
class EndingAt < RelativePositioning::Range
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
def initialize(rhs)
|
||||
@rhs = rhs
|
||||
raise IllegalRange, 'rhs is required' unless rhs
|
||||
end
|
||||
|
||||
def lhs
|
||||
strong_memoize(:lhs) { rhs.lhs_neighbour }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -31,39 +31,5 @@ module Gitlab
|
|||
other.is_a?(RelativePositioning::Range) && lhs == other.lhs && rhs == other.rhs
|
||||
end
|
||||
end
|
||||
|
||||
class ClosedRange < RelativePositioning::Range
|
||||
def initialize(lhs, rhs)
|
||||
@lhs, @rhs = lhs, rhs
|
||||
raise IllegalRange, 'Either lhs or rhs is missing' unless lhs && rhs
|
||||
raise IllegalRange, 'lhs and rhs cannot be the same object' if lhs == rhs
|
||||
end
|
||||
end
|
||||
|
||||
class StartingFrom < RelativePositioning::Range
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
def initialize(lhs)
|
||||
@lhs = lhs
|
||||
raise IllegalRange, 'lhs is required' unless lhs
|
||||
end
|
||||
|
||||
def rhs
|
||||
strong_memoize(:rhs) { lhs.rhs_neighbour }
|
||||
end
|
||||
end
|
||||
|
||||
class EndingAt < RelativePositioning::Range
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
def initialize(rhs)
|
||||
@rhs = rhs
|
||||
raise IllegalRange, 'rhs is required' unless rhs
|
||||
end
|
||||
|
||||
def lhs
|
||||
strong_memoize(:lhs) { rhs.lhs_neighbour }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
18
lib/gitlab/relative_positioning/starting_from.rb
Normal file
18
lib/gitlab/relative_positioning/starting_from.rb
Normal file
|
@ -0,0 +1,18 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module RelativePositioning
|
||||
class StartingFrom < RelativePositioning::Range
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
def initialize(lhs)
|
||||
@lhs = lhs
|
||||
raise IllegalRange, 'lhs is required' unless lhs
|
||||
end
|
||||
|
||||
def rhs
|
||||
strong_memoize(:rhs) { lhs.rhs_neighbour }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,39 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Sentry
|
||||
class ApiUrls
|
||||
def initialize(url_base)
|
||||
@uri = URI(url_base).freeze
|
||||
end
|
||||
|
||||
def issues_url
|
||||
with_path(File.join(@uri.path, '/issues/'))
|
||||
end
|
||||
|
||||
def issue_url(issue_id)
|
||||
with_path("/api/0/issues/#{escape(issue_id)}/")
|
||||
end
|
||||
|
||||
def projects_url
|
||||
with_path('/api/0/projects/')
|
||||
end
|
||||
|
||||
def issue_latest_event_url(issue_id)
|
||||
with_path("/api/0/issues/#{escape(issue_id)}/events/latest/")
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def with_path(new_path)
|
||||
new_uri = @uri.dup
|
||||
# Sentry API returns 404 if there are extra slashes in the URL
|
||||
new_uri.path = new_path.squeeze('/')
|
||||
|
||||
new_uri
|
||||
end
|
||||
|
||||
def escape(param)
|
||||
CGI.escape(param.to_s)
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,23 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Sentry
|
||||
module PaginationParser
|
||||
PATTERN = /rel=\"(?<direction>\w+)\";\sresults=\"(?<results>\w+)\";\scursor=\"(?<cursor>.+)\"/.freeze
|
||||
|
||||
def self.parse(headers)
|
||||
links = headers['link'].to_s.split(',')
|
||||
|
||||
links.map { |link| parse_link(link) }.compact.to_h
|
||||
end
|
||||
|
||||
def self.parse_link(link)
|
||||
match = link.match(PATTERN)
|
||||
|
||||
return unless match
|
||||
return if match['results'] != "true"
|
||||
|
||||
[match['direction'], { 'cursor' => match['cursor'] }]
|
||||
end
|
||||
private_class_method :parse_link
|
||||
end
|
||||
end
|
|
@ -1965,9 +1965,21 @@ msgstr ""
|
|||
msgid "AddContextCommits|Add/remove"
|
||||
msgstr ""
|
||||
|
||||
msgid "AddMember|Already a member of %{source_name}"
|
||||
msgstr ""
|
||||
|
||||
msgid "AddMember|Email cannot be blank"
|
||||
msgstr ""
|
||||
|
||||
msgid "AddMember|Invite limit of %{daily_invites} per day exceeded"
|
||||
msgstr ""
|
||||
|
||||
msgid "AddMember|Member already invited to %{source_name}"
|
||||
msgstr ""
|
||||
|
||||
msgid "AddMember|Member cannot be invited because they already requested to join %{source_name}"
|
||||
msgstr ""
|
||||
|
||||
msgid "AddMember|No users specified."
|
||||
msgstr ""
|
||||
|
||||
|
@ -8673,6 +8685,9 @@ msgstr ""
|
|||
msgid "Create new %{name} by email"
|
||||
msgstr ""
|
||||
|
||||
msgid "Create new CI/CD pipeline"
|
||||
msgstr ""
|
||||
|
||||
msgid "Create new Value Stream"
|
||||
msgstr ""
|
||||
|
||||
|
@ -11111,9 +11126,6 @@ msgstr ""
|
|||
msgid "Email address to use for Support Desk"
|
||||
msgstr ""
|
||||
|
||||
msgid "Email cannot be blank"
|
||||
msgstr ""
|
||||
|
||||
msgid "Email could not be sent"
|
||||
msgstr ""
|
||||
|
||||
|
@ -17368,6 +17380,9 @@ msgstr ""
|
|||
msgid "Keep editing"
|
||||
msgstr ""
|
||||
|
||||
msgid "Keeping all SAST analyzers enabled future-proofs the project in case new languages are added later on. Determining which analyzers apply is a process that consumes minimal resources and adds minimal time to the pipeline. Leaving all SAST analyzers enabled ensures maximum coverage."
|
||||
msgstr ""
|
||||
|
||||
msgid "Kerberos access denied"
|
||||
msgstr ""
|
||||
|
||||
|
@ -31412,9 +31427,6 @@ msgstr ""
|
|||
msgid "Too many projects enabled. You will need to manage them via the console or the API."
|
||||
msgstr ""
|
||||
|
||||
msgid "Too many users specified (limit is %{user_limit})"
|
||||
msgstr ""
|
||||
|
||||
msgid "Too much data"
|
||||
msgstr ""
|
||||
|
||||
|
@ -33422,6 +33434,9 @@ msgstr ""
|
|||
msgid "We recommend cloud-based mobile authenticator apps such as Authy, Duo Mobile, and LastPass. They can restore access if you lose your hardware device."
|
||||
msgstr ""
|
||||
|
||||
msgid "We recommend leaving all SAST analyzers enabled"
|
||||
msgstr ""
|
||||
|
||||
msgid "We recommend that you buy more Pipeline minutes to avoid any interruption of service."
|
||||
msgstr ""
|
||||
|
||||
|
@ -33443,6 +33458,9 @@ msgstr ""
|
|||
msgid "We would like to inform you that your subscription GitLab Enterprise Edition %{plan_name} is nearing its user limit. You have %{active_user_count} active users, which is almost at the user limit of %{maximum_user_count}."
|
||||
msgstr ""
|
||||
|
||||
msgid "We'll continuously validate your pipeline configuration. The validation results will appear here."
|
||||
msgstr ""
|
||||
|
||||
msgid "We've found no vulnerabilities"
|
||||
msgstr ""
|
||||
|
||||
|
@ -34816,6 +34834,9 @@ msgstr ""
|
|||
msgid "[No reason]"
|
||||
msgstr ""
|
||||
|
||||
msgid "[Unchanged]"
|
||||
msgstr ""
|
||||
|
||||
msgid "`end_time` should not exceed one month after `start_time`"
|
||||
msgstr ""
|
||||
|
||||
|
@ -35189,6 +35210,9 @@ msgstr ""
|
|||
msgid "ciReport|is loading, errors when loading results"
|
||||
msgstr ""
|
||||
|
||||
msgid "closed"
|
||||
msgstr ""
|
||||
|
||||
msgid "closed issue"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
@ -6,12 +6,20 @@ RSpec.describe MergeRequests::OldestPerCommitFinder do
|
|||
describe '#execute' do
|
||||
it 'returns a Hash mapping commit SHAs to their oldest merge requests' do
|
||||
project = create(:project)
|
||||
mr1 = create(:merge_request, :merged, target_project: project)
|
||||
mr2 = create(:merge_request, :merged, target_project: project)
|
||||
mr1_diff = create(:merge_request_diff, merge_request: mr1)
|
||||
mr2_diff = create(:merge_request_diff, merge_request: mr2)
|
||||
sha1 = Digest::SHA1.hexdigest('foo')
|
||||
sha2 = Digest::SHA1.hexdigest('bar')
|
||||
sha3 = Digest::SHA1.hexdigest('baz')
|
||||
mr1 = create(:merge_request, :merged, target_project: project)
|
||||
mr2 = create(:merge_request, :merged, target_project: project)
|
||||
mr3 = create(
|
||||
:merge_request,
|
||||
:merged,
|
||||
target_project: project,
|
||||
merge_commit_sha: sha3
|
||||
)
|
||||
|
||||
mr1_diff = create(:merge_request_diff, merge_request: mr1)
|
||||
mr2_diff = create(:merge_request_diff, merge_request: mr2)
|
||||
|
||||
create(:merge_request_diff_commit, merge_request_diff: mr1_diff, sha: sha1)
|
||||
create(:merge_request_diff_commit, merge_request_diff: mr2_diff, sha: sha1)
|
||||
|
@ -22,11 +30,16 @@ RSpec.describe MergeRequests::OldestPerCommitFinder do
|
|||
relative_order: 1
|
||||
)
|
||||
|
||||
commits = [double(:commit, id: sha1), double(:commit, id: sha2)]
|
||||
commits = [
|
||||
double(:commit, id: sha1),
|
||||
double(:commit, id: sha2),
|
||||
double(:commit, id: sha3)
|
||||
]
|
||||
|
||||
expect(described_class.new(project).execute(commits)).to eq(
|
||||
sha1 => mr1,
|
||||
sha2 => mr2
|
||||
sha2 => mr2,
|
||||
sha3 => mr3
|
||||
)
|
||||
end
|
||||
|
||||
|
@ -42,5 +55,45 @@ RSpec.describe MergeRequests::OldestPerCommitFinder do
|
|||
expect(described_class.new(mr.target_project).execute(commits))
|
||||
.to be_empty
|
||||
end
|
||||
|
||||
it 'includes the merge request for a merge commit' do
|
||||
project = create(:project)
|
||||
sha = Digest::SHA1.hexdigest('foo')
|
||||
mr = create(
|
||||
:merge_request,
|
||||
:merged,
|
||||
target_project: project,
|
||||
merge_commit_sha: sha
|
||||
)
|
||||
|
||||
commits = [double(:commit, id: sha)]
|
||||
|
||||
# This expectation is set so we're certain that the merge commit SHAs (if
|
||||
# a matching merge request is found) aren't also used for finding MRs
|
||||
# according to diffs.
|
||||
expect(MergeRequestDiffCommit)
|
||||
.not_to receive(:oldest_merge_request_id_per_commit)
|
||||
|
||||
expect(described_class.new(project).execute(commits)).to eq(sha => mr)
|
||||
end
|
||||
|
||||
it 'includes the oldest merge request when a merge commit is present in a newer merge request' do
|
||||
project = create(:project)
|
||||
sha = Digest::SHA1.hexdigest('foo')
|
||||
mr1 = create(
|
||||
:merge_request,
|
||||
:merged,
|
||||
target_project: project, merge_commit_sha: sha
|
||||
)
|
||||
|
||||
mr2 = create(:merge_request, :merged, target_project: project)
|
||||
mr_diff = create(:merge_request_diff, merge_request: mr2)
|
||||
|
||||
create(:merge_request_diff_commit, merge_request_diff: mr_diff, sha: sha)
|
||||
|
||||
commits = [double(:commit, id: sha)]
|
||||
|
||||
expect(described_class.new(project).execute(commits)).to eq(sha => mr1)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -14,7 +14,6 @@ settings:
|
|||
globals:
|
||||
getJSONFixture: false
|
||||
loadFixtures: false
|
||||
preloadFixtures: false
|
||||
setFixtures: false
|
||||
rules:
|
||||
jest/expect-expect:
|
||||
|
|
4
spec/frontend/__helpers__/fake_date/fixtures.js
Normal file
4
spec/frontend/__helpers__/fake_date/fixtures.js
Normal file
|
@ -0,0 +1,4 @@
|
|||
import { useFakeDate } from './jest';
|
||||
|
||||
// Also see spec/support/helpers/javascript_fixtures_helpers.rb
|
||||
export const useFixturesFakeDate = () => useFakeDate(2015, 6, 3, 10);
|
|
@ -1,2 +1,3 @@
|
|||
export * from './fake_date';
|
||||
export * from './jest';
|
||||
export * from './fixtures';
|
||||
|
|
|
@ -2,6 +2,8 @@ import { GlTable, GlAlert, GlLoadingIcon, GlDropdown, GlIcon, GlAvatar } from '@
|
|||
import { mount } from '@vue/test-utils';
|
||||
import axios from 'axios';
|
||||
import MockAdapter from 'axios-mock-adapter';
|
||||
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
|
||||
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
|
||||
import mockAlerts from 'jest/vue_shared/alert_details/mocks/alerts.json';
|
||||
import AlertManagementTable from '~/alert_management/components/alert_management_table.vue';
|
||||
import { visitUrl } from '~/lib/utils/url_utility';
|
||||
|
@ -18,19 +20,18 @@ describe('AlertManagementTable', () => {
|
|||
let wrapper;
|
||||
let mock;
|
||||
|
||||
const findAlertsTable = () => wrapper.find(GlTable);
|
||||
const findAlertsTable = () => wrapper.findComponent(GlTable);
|
||||
const findAlerts = () => wrapper.findAll('table tbody tr');
|
||||
const findAlert = () => wrapper.find(GlAlert);
|
||||
const findLoader = () => wrapper.find(GlLoadingIcon);
|
||||
const findStatusDropdown = () => wrapper.find(GlDropdown);
|
||||
const findDateFields = () => wrapper.findAll(TimeAgo);
|
||||
const findSearch = () => wrapper.find(FilteredSearchBar);
|
||||
const findSeverityColumnHeader = () =>
|
||||
wrapper.find('[data-testid="alert-management-severity-sort"]');
|
||||
const findFirstIDField = () => wrapper.findAll('[data-testid="idField"]').at(0);
|
||||
const findAssignees = () => wrapper.findAll('[data-testid="assigneesField"]');
|
||||
const findSeverityFields = () => wrapper.findAll('[data-testid="severityField"]');
|
||||
const findIssueFields = () => wrapper.findAll('[data-testid="issueField"]');
|
||||
const findAlert = () => wrapper.findComponent(GlAlert);
|
||||
const findLoader = () => wrapper.findComponent(GlLoadingIcon);
|
||||
const findStatusDropdown = () => wrapper.findComponent(GlDropdown);
|
||||
const findDateFields = () => wrapper.findAllComponents(TimeAgo);
|
||||
const findSearch = () => wrapper.findComponent(FilteredSearchBar);
|
||||
const findSeverityColumnHeader = () => wrapper.findByTestId('alert-management-severity-sort');
|
||||
const findFirstIDField = () => wrapper.findAllByTestId('idField').at(0);
|
||||
const findAssignees = () => wrapper.findAllByTestId('assigneesField');
|
||||
const findSeverityFields = () => wrapper.findAllByTestId('severityField');
|
||||
const findIssueFields = () => wrapper.findAllByTestId('issueField');
|
||||
const alertsCount = {
|
||||
open: 24,
|
||||
triggered: 20,
|
||||
|
@ -40,29 +41,34 @@ describe('AlertManagementTable', () => {
|
|||
};
|
||||
|
||||
function mountComponent({ provide = {}, data = {}, loading = false, stubs = {} } = {}) {
|
||||
wrapper = mount(AlertManagementTable, {
|
||||
provide: {
|
||||
...defaultProvideValues,
|
||||
alertManagementEnabled: true,
|
||||
userCanEnableAlertManagement: true,
|
||||
...provide,
|
||||
},
|
||||
data() {
|
||||
return data;
|
||||
},
|
||||
mocks: {
|
||||
$apollo: {
|
||||
mutate: jest.fn(),
|
||||
query: jest.fn(),
|
||||
queries: {
|
||||
alerts: {
|
||||
loading,
|
||||
wrapper = extendedWrapper(
|
||||
mount(AlertManagementTable, {
|
||||
provide: {
|
||||
...defaultProvideValues,
|
||||
alertManagementEnabled: true,
|
||||
userCanEnableAlertManagement: true,
|
||||
...provide,
|
||||
},
|
||||
data() {
|
||||
return data;
|
||||
},
|
||||
mocks: {
|
||||
$apollo: {
|
||||
mutate: jest.fn(),
|
||||
query: jest.fn(),
|
||||
queries: {
|
||||
alerts: {
|
||||
loading,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
stubs,
|
||||
});
|
||||
stubs,
|
||||
directives: {
|
||||
GlTooltip: createMockDirective(),
|
||||
},
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
|
@ -72,7 +78,6 @@ describe('AlertManagementTable', () => {
|
|||
afterEach(() => {
|
||||
if (wrapper) {
|
||||
wrapper.destroy();
|
||||
wrapper = null;
|
||||
}
|
||||
mock.restore();
|
||||
});
|
||||
|
@ -241,9 +246,14 @@ describe('AlertManagementTable', () => {
|
|||
expect(findIssueFields().at(0).text()).toBe('None');
|
||||
});
|
||||
|
||||
it('renders a link when one exists', () => {
|
||||
expect(findIssueFields().at(1).text()).toBe('#1');
|
||||
expect(findIssueFields().at(1).attributes('href')).toBe('/gitlab-org/gitlab/-/issues/1');
|
||||
it('renders a link when one exists with the issue state and title tooltip', () => {
|
||||
const issueField = findIssueFields().at(1);
|
||||
const tooltip = getBinding(issueField.element, 'gl-tooltip');
|
||||
|
||||
expect(issueField.text()).toBe(`#1 (closed)`);
|
||||
expect(issueField.attributes('href')).toBe('/gitlab-org/gitlab/-/issues/1');
|
||||
expect(issueField.attributes('title')).toBe('My test issue');
|
||||
expect(tooltip).not.toBe(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -8,8 +8,6 @@ describe('U2FAuthenticate', () => {
|
|||
let container;
|
||||
let component;
|
||||
|
||||
preloadFixtures('u2f/authenticate.html');
|
||||
|
||||
beforeEach(() => {
|
||||
loadFixtures('u2f/authenticate.html');
|
||||
u2fDevice = new MockU2FDevice();
|
||||
|
|
|
@ -8,8 +8,6 @@ describe('U2FRegister', () => {
|
|||
let container;
|
||||
let component;
|
||||
|
||||
preloadFixtures('u2f/register.html');
|
||||
|
||||
beforeEach((done) => {
|
||||
loadFixtures('u2f/register.html');
|
||||
u2fDevice = new MockU2FDevice();
|
||||
|
|
|
@ -13,7 +13,6 @@ const mockResponse = {
|
|||
};
|
||||
|
||||
describe('WebAuthnAuthenticate', () => {
|
||||
preloadFixtures('webauthn/authenticate.html');
|
||||
useMockNavigatorCredentials();
|
||||
|
||||
let fallbackElement;
|
||||
|
|
|
@ -5,7 +5,6 @@ import MockWebAuthnDevice from './mock_webauthn_device';
|
|||
import { useMockNavigatorCredentials } from './util';
|
||||
|
||||
describe('WebAuthnRegister', () => {
|
||||
preloadFixtures('webauthn/register.html');
|
||||
useMockNavigatorCredentials();
|
||||
|
||||
const mockResponse = {
|
||||
|
|
|
@ -60,7 +60,6 @@ describe('AwardsHandler', () => {
|
|||
u: '6.0',
|
||||
},
|
||||
};
|
||||
preloadFixtures('snippets/show.html');
|
||||
|
||||
const openAndWaitForEmojiMenu = (sel = '.js-add-award') => {
|
||||
$(sel).eq(0).click();
|
||||
|
|
|
@ -6,8 +6,6 @@ describe('Quick Submit behavior', () => {
|
|||
|
||||
const keydownEvent = (options = { keyCode: 13, metaKey: true }) => $.Event('keydown', options);
|
||||
|
||||
preloadFixtures('snippets/show.html');
|
||||
|
||||
beforeEach(() => {
|
||||
loadFixtures('snippets/show.html');
|
||||
|
||||
|
|
|
@ -3,7 +3,6 @@ import '~/behaviors/requires_input';
|
|||
|
||||
describe('requiresInput', () => {
|
||||
let submitButton;
|
||||
preloadFixtures('branches/new_branch.html');
|
||||
|
||||
beforeEach(() => {
|
||||
loadFixtures('branches/new_branch.html');
|
||||
|
|
|
@ -13,8 +13,6 @@ describe('ShortcutsIssuable', () => {
|
|||
const snippetShowFixtureName = 'snippets/show.html';
|
||||
const mrShowFixtureName = 'merge_requests/merge_request_of_current_user.html';
|
||||
|
||||
preloadFixtures(snippetShowFixtureName, mrShowFixtureName);
|
||||
|
||||
beforeAll((done) => {
|
||||
initCopyAsGFM();
|
||||
|
||||
|
|
|
@ -7,7 +7,6 @@ jest.mock('~/projects/upload_file_experiment', () => ({
|
|||
}));
|
||||
|
||||
describe('BlobFileDropzone', () => {
|
||||
preloadFixtures('blob/show.html');
|
||||
let dropzone;
|
||||
let replaceFileButton;
|
||||
|
||||
|
|
|
@ -4,8 +4,6 @@ import SketchLoader from '~/blob/sketch';
|
|||
jest.mock('jszip');
|
||||
|
||||
describe('Sketch viewer', () => {
|
||||
preloadFixtures('static/sketch_viewer.html');
|
||||
|
||||
beforeEach(() => {
|
||||
loadFixtures('static/sketch_viewer.html');
|
||||
});
|
||||
|
|
|
@ -16,8 +16,6 @@ describe('Blob viewer', () => {
|
|||
|
||||
setTestTimeout(2000);
|
||||
|
||||
preloadFixtures('blob/show_readme.html');
|
||||
|
||||
beforeEach(() => {
|
||||
$.fn.extend(jQueryMock);
|
||||
mock = new MockAdapter(axios);
|
||||
|
|
2
spec/frontend/bootstrap_linked_tabs_spec.js
vendored
2
spec/frontend/bootstrap_linked_tabs_spec.js
vendored
|
@ -1,8 +1,6 @@
|
|||
import LinkedTabs from '~/lib/utils/bootstrap_linked_tabs';
|
||||
|
||||
describe('Linked Tabs', () => {
|
||||
preloadFixtures('static/linked_tabs.html');
|
||||
|
||||
beforeEach(() => {
|
||||
loadFixtures('static/linked_tabs.html');
|
||||
});
|
||||
|
|
|
@ -4,9 +4,6 @@ import VariableList from '~/ci_variable_list/ci_variable_list';
|
|||
const HIDE_CLASS = 'hide';
|
||||
|
||||
describe('VariableList', () => {
|
||||
preloadFixtures('pipeline_schedules/edit.html');
|
||||
preloadFixtures('pipeline_schedules/edit_with_variables.html');
|
||||
|
||||
let $wrapper;
|
||||
let variableList;
|
||||
|
||||
|
|
|
@ -2,8 +2,6 @@ import $ from 'jquery';
|
|||
import setupNativeFormVariableList from '~/ci_variable_list/native_form_variable_list';
|
||||
|
||||
describe('NativeFormVariableList', () => {
|
||||
preloadFixtures('pipeline_schedules/edit.html');
|
||||
|
||||
let $wrapper;
|
||||
|
||||
beforeEach(() => {
|
||||
|
|
|
@ -14,9 +14,6 @@ describe('Issuable right sidebar collapsed todo toggle', () => {
|
|||
const jsonFixtureName = 'todos/todos.json';
|
||||
let mock;
|
||||
|
||||
preloadFixtures(fixtureName);
|
||||
preloadFixtures(jsonFixtureName);
|
||||
|
||||
beforeEach(() => {
|
||||
const todoData = getJSONFixture(jsonFixtureName);
|
||||
new Sidebar();
|
||||
|
|
|
@ -17,8 +17,6 @@ describe('Pipelines table in Commits and Merge requests', () => {
|
|||
errorStateSvgPath: 'foo',
|
||||
};
|
||||
|
||||
preloadFixtures(jsonFixtureName);
|
||||
|
||||
const findRunPipelineBtn = () => vm.$el.querySelector('[data-testid="run_pipeline_button"]');
|
||||
const findRunPipelineBtnMobile = () =>
|
||||
vm.$el.querySelector('[data-testid="run_pipeline_button_mobile"]');
|
||||
|
|
|
@ -20,8 +20,6 @@ const DROPDOWN_ITEM_DATA = [
|
|||
];
|
||||
|
||||
describe('CreateItemDropdown', () => {
|
||||
preloadFixtures('static/create_item_dropdown.html');
|
||||
|
||||
let $wrapperEl;
|
||||
let createItemDropdown;
|
||||
|
||||
|
|
|
@ -10,8 +10,6 @@ jest.mock('~/lib/utils/url_utility', () => ({
|
|||
}));
|
||||
|
||||
describe('deprecatedJQueryDropdown', () => {
|
||||
preloadFixtures('static/deprecated_jquery_dropdown.html');
|
||||
|
||||
const NON_SELECTABLE_CLASSES =
|
||||
'.divider, .separator, .dropdown-header, .dropdown-menu-empty-item';
|
||||
const SEARCH_INPUT_SELECTOR = '.dropdown-input-field';
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
const FIXTURE = 'merge_request_diffs/with_commit.json';
|
||||
|
||||
preloadFixtures(FIXTURE);
|
||||
|
||||
export default function getDiffWithCommit() {
|
||||
return getJSONFixture(FIXTURE);
|
||||
}
|
||||
|
|
|
@ -78,7 +78,6 @@ describe('Dropdown User', () => {
|
|||
|
||||
describe('hideCurrentUser', () => {
|
||||
const fixtureTemplate = 'issues/issue_list.html';
|
||||
preloadFixtures(fixtureTemplate);
|
||||
|
||||
let dropdown;
|
||||
let authorFilterDropdownElement;
|
||||
|
|
|
@ -5,7 +5,6 @@ import IssuableFilteredSearchTokenKeys from '~/filtered_search/issuable_filtered
|
|||
|
||||
describe('Dropdown Utils', () => {
|
||||
const issueListFixture = 'issues/issue_list.html';
|
||||
preloadFixtures(issueListFixture);
|
||||
|
||||
describe('getEscapedText', () => {
|
||||
it('should return same word when it has no space', () => {
|
||||
|
|
|
@ -133,8 +133,6 @@ describe('Filtered Search Visual Tokens', () => {
|
|||
const jsonFixtureName = 'labels/project_labels.json';
|
||||
const dummyEndpoint = '/dummy/endpoint';
|
||||
|
||||
preloadFixtures(jsonFixtureName);
|
||||
|
||||
let labelData;
|
||||
|
||||
beforeAll(() => {
|
||||
|
|
|
@ -8,8 +8,6 @@ describe('GL Style Field Errors', () => {
|
|||
testContext = {};
|
||||
});
|
||||
|
||||
preloadFixtures('static/gl_field_errors.html');
|
||||
|
||||
beforeEach(() => {
|
||||
loadFixtures('static/gl_field_errors.html');
|
||||
const $form = $('form.gl-show-field-errors');
|
||||
|
|
|
@ -15,7 +15,6 @@ describe('Header', () => {
|
|||
$(document).trigger('todo:toggle', newCount);
|
||||
}
|
||||
|
||||
preloadFixtures(fixtureTemplate);
|
||||
beforeEach(() => {
|
||||
initTodoToggle();
|
||||
loadFixtures(fixtureTemplate);
|
||||
|
|
|
@ -7,7 +7,6 @@ jest.mock('~/vue_shared/plugins/global_toast');
|
|||
|
||||
describe('IntegrationSettingsForm', () => {
|
||||
const FIXTURE = 'services/edit_service.html';
|
||||
preloadFixtures(FIXTURE);
|
||||
|
||||
beforeEach(() => {
|
||||
loadFixtures(FIXTURE);
|
||||
|
|
|
@ -8,11 +8,6 @@ describe('Issue', () => {
|
|||
let testContext;
|
||||
let mock;
|
||||
|
||||
beforeAll(() => {
|
||||
preloadFixtures('issues/closed-issue.html');
|
||||
preloadFixtures('issues/open-issue.html');
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
mock = new MockAdapter(axios);
|
||||
mock.onGet(/(.*)\/related_branches$/).reply(200, {});
|
||||
|
|
|
@ -7,7 +7,6 @@ import LineHighlighter from '~/line_highlighter';
|
|||
describe('LineHighlighter', () => {
|
||||
const testContext = {};
|
||||
|
||||
preloadFixtures('static/line_highlighter.html');
|
||||
const clickLine = (number, eventData = {}) => {
|
||||
if ($.isEmptyObject(eventData)) {
|
||||
return $(`#L${number}`).click();
|
||||
|
|
|
@ -9,7 +9,6 @@ describe('MergeRequest', () => {
|
|||
describe('task lists', () => {
|
||||
let mock;
|
||||
|
||||
preloadFixtures('merge_requests/merge_request_with_task_list.html');
|
||||
beforeEach(() => {
|
||||
loadFixtures('merge_requests/merge_request_with_task_list.html');
|
||||
|
||||
|
|
|
@ -21,11 +21,6 @@ describe('MergeRequestTabs', () => {
|
|||
$.extend(stubLocation, defaults, stubs || {});
|
||||
};
|
||||
|
||||
preloadFixtures(
|
||||
'merge_requests/merge_request_with_task_list.html',
|
||||
'merge_requests/diff_comment.html',
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
initMrPage();
|
||||
|
||||
|
|
|
@ -5,8 +5,6 @@ import axios from '~/lib/utils/axios_utils';
|
|||
import MiniPipelineGraph from '~/mini_pipeline_graph_dropdown';
|
||||
|
||||
describe('Mini Pipeline Graph Dropdown', () => {
|
||||
preloadFixtures('static/mini_dropdown_graph.html');
|
||||
|
||||
beforeEach(() => {
|
||||
loadFixtures('static/mini_dropdown_graph.html');
|
||||
});
|
||||
|
|
|
@ -9,8 +9,6 @@ describe('Branch', () => {
|
|||
});
|
||||
|
||||
describe('create a new branch', () => {
|
||||
preloadFixtures('branches/new_branch.html');
|
||||
|
||||
function fillNameWith(value) {
|
||||
$('.js-branch-name').val(value).trigger('blur');
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue