Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-06-17 15:08:36 +00:00
parent a89912871c
commit c61d90dbfa
67 changed files with 1348 additions and 289 deletions

View File

@ -62,7 +62,7 @@ export default {
},
},
mounted() {
this.$refs.textarea.focus();
this.focusInput();
},
methods: {
submitForm() {
@ -75,6 +75,9 @@ export default {
this.$emit('cancelForm');
}
},
focusInput() {
this.$refs.textarea.focus();
},
},
};
</script>

View File

@ -254,6 +254,9 @@ export default {
},
openCommentForm(annotationCoordinates) {
this.annotationCoordinates = annotationCoordinates;
if (this.$refs.newDiscussionForm) {
this.$refs.newDiscussionForm.focusInput();
}
},
closeCommentForm() {
this.comment = '';
@ -361,6 +364,7 @@ export default {
@error="onCreateImageDiffNoteError"
>
<design-reply-form
ref="newDiscussionForm"
v-model="comment"
:is-saving="loading"
:markdown-preview-path="markdownPreviewPath"

View File

@ -218,7 +218,7 @@ export default {
<gl-chart-series-label :color="content.color">
{{ content.name }}
</gl-chart-series-label>
<div class="prepend-left-32">
<div class="gl-ml-7">
{{ yValueFormatted(seriesIndex, content.dataIndex) }}
</div>
</div>

View File

@ -415,7 +415,7 @@ export default {
<gl-chart-series-label :color="isMultiSeries ? content.color : ''">
{{ content.name }}
</gl-chart-series-label>
<div class="prepend-left-32">
<div class="gl-ml-7">
{{ content.value }}
</div>
</div>

View File

@ -56,6 +56,7 @@ export default {
blob: {},
fileName: '',
content: '',
originalContent: '',
isContentLoading: true,
isUpdating: false,
newSnippet: false,
@ -97,7 +98,24 @@ export default {
return `${this.isProjectSnippet ? 'project' : 'personal'}_snippet_description`;
},
},
created() {
window.addEventListener('beforeunload', this.onBeforeUnload);
},
destroyed() {
window.removeEventListener('beforeunload', this.onBeforeUnload);
},
methods: {
onBeforeUnload(e = {}) {
const returnValue = __('Are you sure you want to lose unsaved changes?');
if (!this.hasChanges()) return undefined;
Object.assign(e, { returnValue });
return returnValue;
},
hasChanges() {
return this.content !== this.originalContent;
},
updateFileName(newName) {
this.fileName = newName;
},
@ -125,7 +143,9 @@ export default {
axios
.get(url)
.then(res => {
this.originalContent = res.data;
this.content = res.data;
this.isContentLoading = false;
})
.catch(e => this.flashAPIFailure(e));
@ -172,6 +192,7 @@ export default {
if (errors.length) {
this.flashAPIFailure(errors[0]);
} else {
this.originalContent = this.content;
redirectTo(baseObj.snippet.webUrl);
}
})

View File

@ -58,15 +58,16 @@ export default {
methods: {
syncSource() {
if (this.isWysiwygMode) {
this.parsedSource.syncBody();
this.parsedSource.syncBodyToRaw();
return;
}
this.parsedSource.syncRaw();
this.parsedSource.syncRawToBody();
},
onModeChange(mode) {
this.editorMode = mode;
// Sequentially sync then switch modes (rich-content-editor's v-model computed source content update)
this.syncSource();
this.editorMode = mode;
},
onSubmit() {
this.syncSource();

View File

@ -24,7 +24,7 @@ const parseSourceFile = raw => {
const computedRaw = () => `${editable.header}${editable.spacing}${editable.body}`;
const syncBody = () => {
const syncRawToBody = () => {
/*
We re-parse as markdown editing could have added non-body changes (preFrontMatter, frontMatter, or spacing).
Re-parsing additionally gets us the desired body that was extracted from the mutated editable.raw
@ -33,7 +33,7 @@ const parseSourceFile = raw => {
Object.assign(editable, parse(editable.raw));
};
const syncRaw = () => {
const syncBodyToRaw = () => {
editable.raw = computedRaw();
};
@ -47,8 +47,8 @@ const parseSourceFile = raw => {
editable,
isModifiedRaw,
isModifiedBody,
syncRaw,
syncBody,
syncBodyToRaw,
syncRawToBody,
};
};

View File

@ -407,7 +407,6 @@ img.emoji {
.prepend-left-15 { margin-left: 15px; }
.prepend-left-default { margin-left: $gl-padding; }
.prepend-left-20 { margin-left: 20px; }
.prepend-left-32 { margin-left: 32px; }
.prepend-left-64 { margin-left: 64px; }
.append-right-2 { margin-right: 2px; }
.append-right-4 { margin-right: 4px; }

View File

@ -571,8 +571,8 @@
.header-user-notification-dot {
background-color: $orange-500;
height: 10px;
width: 10px;
height: 12px;
width: 12px;
right: 8px;
top: -8px;
}

View File

@ -113,29 +113,29 @@ $gl-gray-600: #666 !default;
$gl-gray-700: #555 !default;
$gl-gray-800: #333 !default;
$green-50: #f1fdf6 !default;
$green-100: #dcf5e7 !default;
$green-200: #263a2e !default;
$green-300: #75d09b !default;
$green-400: #37b96d !default;
$green-500: #1aaa55 !default;
$green-600: #168f48 !default;
$green-700: #12753a !default;
$green-800: #0e5a2d !default;
$green-50: #ecf4ee !default;
$green-100: #c3e6cd !default;
$green-200: #91d4a8 !default;
$green-300: #52b87a !default;
$green-400: #2da160 !default;
$green-500: #108548 !default;
$green-600: #217645 !default;
$green-700: #24663b !default;
$green-800: #0d532a !default;
$green-900: #0a4020 !default;
$green-950: #072b15 !default;
$blue-50: #f6fafe !default;
$blue-100: #e4f0fb !default;
$blue-200: #b8d6f4 !default;
$blue-300: #73afea !default;
$blue-400: #418cd8 !default;
$blue-500: #1f78d1 !default;
$blue-600: #1b69b6 !default;
$blue-700: #17599c !default;
$blue-800: #134a81 !default;
$blue-900: #0f3b66 !default;
$blue-950: #0a2744 !default;
$blue-50: #e9f3fc !default;
$blue-100: #cbe2f9 !default;
$blue-200: #9dc7f1 !default;
$blue-300: #63a6e9 !default;
$blue-400: #428fdc !default;
$blue-500: #1f75cb !default;
$blue-600: #1068bf !default;
$blue-700: #0b5cad !default;
$blue-800: #064787 !default;
$blue-900: #033464 !default;
$blue-950: #002850 !default;
$orange-50: #fffaf4 !default;
$orange-100: #fff1de !default;

View File

@ -1,19 +0,0 @@
# frozen_string_literal: true
# Shared scope between Route and RedirectRoute
module RouteModelQuery
extend ActiveSupport::Concern
class_methods do
def find_source_of_path(path, case_sensitive: true)
scope =
if case_sensitive
where(path: path)
else
where('LOWER(path) = LOWER(?)', path)
end
scope.first&.source
end
end
end

View File

@ -1,8 +1,6 @@
# frozen_string_literal: true
class RedirectRoute < ApplicationRecord
include RouteModelQuery
belongs_to :source, polymorphic: true # rubocop:disable Cop/PolymorphicAssociations
validates :source, presence: true

View File

@ -3,7 +3,6 @@
class Route < ApplicationRecord
include CaseSensitivity
include Gitlab::SQL::Pattern
include RouteModelQuery
belongs_to :source, polymorphic: true # rubocop:disable Cop/PolymorphicAssociations
validates :source, presence: true

View File

@ -167,11 +167,7 @@ class Snippet < ApplicationRecord
end
def self.find_by_id_and_project(id:, project:)
if project.is_a?(Project)
ProjectSnippet.find_by(id: id, project: project)
elsif project.nil?
PersonalSnippet.find_by(id: id)
end
Snippet.find_by(id: id, project: project)
end
def self.max_file_limit(user)

View File

@ -2,6 +2,10 @@
module AlertManagement
class CreateAlertIssueService
include Gitlab::Utils::StrongMemoize
INCIDENT_LABEL = ::IncidentManagement::CreateIssueService::INCIDENT_LABEL
# @param alert [AlertManagement::Alert]
# @param user [User]
def initialize(alert, user)
@ -13,10 +17,10 @@ module AlertManagement
return error_no_permissions unless allowed?
return error_issue_already_exists if alert.issue
result = create_issue(alert, user, alert_payload)
@issue = result[:issue]
result = create_issue(user, alert_payload)
@issue = result.payload[:issue]
return error(result[:message]) if result[:status] == :error
return error(result.message) if result.error?
return error(alert.errors.full_messages.to_sentence) unless update_alert_issue_id
success
@ -32,10 +36,21 @@ module AlertManagement
user.can?(:create_issue, project)
end
def create_issue(alert, user, alert_payload)
::IncidentManagement::CreateIssueService
.new(project, alert_payload, user)
.execute(skip_settings_check: true)
def create_issue(user, alert_payload)
issue = do_create_issue(label_ids: issue_label_ids)
# Create an unlabelled issue if we couldn't create the issue
# due to labels errors.
# See https://gitlab.com/gitlab-org/gitlab-foss/issues/65042
if issue.errors.include?(:labels)
log_label_error(issue)
issue = do_create_issue
end
return error(issue_errors(issue)) unless issue.valid?
@issue = issue
success
end
def alert_payload
@ -65,5 +80,81 @@ module AlertManagement
def error_no_permissions
error(_('You have no permissions'))
end
def do_create_issue(**params)
Issues::CreateService.new(
project,
user,
title: issue_title,
description: issue_description,
**params
).execute
end
def issue_title
alert_presenter.full_title
end
def issue_description
horizontal_line = "\n\n---\n\n"
[
alert_summary,
alert_markdown,
issue_template_content
].compact.join(horizontal_line)
end
def issue_label_ids
[
find_or_create_label(**INCIDENT_LABEL)
].compact.map(&:id)
end
def find_or_create_label(**params)
Labels::FindOrCreateService
.new(user, project, **params)
.execute
end
def alert_summary
alert_presenter.issue_summary_markdown
end
def alert_markdown
alert_presenter.alert_markdown
end
def alert_presenter
strong_memoize(:alert_presenter) do
Gitlab::Alerting::Alert.new(project: project, payload: alert_payload).present
end
end
def issue_template_content
incident_management_setting.issue_template_content
end
def incident_management_setting
strong_memoize(:incident_management_setting) do
project.incident_management_setting ||
project.build_incident_management_setting
end
end
def issue_errors(issue)
issue.errors.full_messages.to_sentence
end
def log_label_error(issue)
Gitlab::AppLogger.info(
<<~TEXT.chomp
Cannot create incident issue with labels \
#{issue.labels.map(&:title).inspect} \
for "#{project.full_name}": #{issue.errors.full_messages.to_sentence}.
Retrying without labels.
TEXT
)
end
end
end

View File

@ -21,7 +21,7 @@ module AuthorizedProjectUpdate
{ user_id: member.user_id, project_id: project.id, access_level: member.access_level }
end
ProjectAuthorization.insert_all(attributes)
ProjectAuthorization.insert_all(attributes) unless attributes.empty?
end
ServiceResponse.success

View File

@ -0,0 +1,70 @@
# frozen_string_literal: true
module AuthorizedProjectUpdate
class ProjectGroupLinkCreateService < BaseService
include Gitlab::Utils::StrongMemoize
BATCH_SIZE = 1000
def initialize(project, group)
@project = project
@group = group
end
def execute
group.members_from_self_and_ancestors_with_effective_access_level
.each_batch(of: BATCH_SIZE, column: :user_id) do |members|
existing_authorizations = existing_project_authorizations(members)
authorizations_to_create = []
user_ids_to_delete = []
members.each do |member|
existing_access_level = existing_authorizations[member.user_id]
if existing_access_level
# User might already have access to the project unrelated to the
# current project share
next if existing_access_level >= member.access_level
user_ids_to_delete << member.user_id
end
authorizations_to_create << { user_id: member.user_id,
project_id: project.id,
access_level: member.access_level }
end
update_authorizations(user_ids_to_delete, authorizations_to_create)
end
ServiceResponse.success
end
private
attr_reader :project, :group
def existing_project_authorizations(members)
user_ids = members.map(&:user_id)
ProjectAuthorization.where(project_id: project.id, user_id: user_ids) # rubocop: disable CodeReuse/ActiveRecord
.select(:user_id, :access_level)
.each_with_object({}) do |authorization, hash|
hash[authorization.user_id] = authorization.access_level
end
end
def update_authorizations(user_ids_to_delete, authorizations_to_create)
ProjectAuthorization.transaction do
if user_ids_to_delete.any?
ProjectAuthorization.where(project_id: project.id, user_id: user_ids_to_delete) # rubocop: disable CodeReuse/ActiveRecord
.delete_all
end
if authorizations_to_create.any?
ProjectAuthorization.insert_all(authorizations_to_create)
end
end
end
end
end

View File

@ -13,12 +13,12 @@ module IncidentManagement
DESCRIPTION
}.freeze
def initialize(project, params, user = User.alert_bot)
super(project, user, params)
def initialize(project, params)
super(project, User.alert_bot, params)
end
def execute(skip_settings_check: false)
return error_with('setting disabled') unless skip_settings_check || incident_management_setting.create_issue?
def execute
return error_with('setting disabled') unless incident_management_setting.create_issue?
return error_with('invalid alert') unless alert.valid?
issue = create_issue

View File

@ -13,12 +13,32 @@ module Projects
)
if link.save
group.refresh_members_authorized_projects
setup_authorizations(group)
success(link: link)
else
error(link.errors.full_messages.to_sentence, 409)
end
end
private
def setup_authorizations(group)
if Feature.enabled?(:specialized_project_authorization_project_share_worker)
AuthorizedProjectUpdate::ProjectGroupLinkCreateWorker.perform_async(project.id, group.id)
# AuthorizedProjectsWorker uses an exclusive lease per user but
# specialized workers might have synchronization issues. Until we
# compare the inconsistency rates of both approaches, we still run
# AuthorizedProjectsWorker but with some delay and lower urgency as a
# safety net.
group.refresh_members_authorized_projects(
blocking: false,
priority: UserProjectAccessChangedService::LOW_PRIORITY
)
else
group.refresh_members_authorized_projects(blocking: false)
end
end
end
end
end

View File

@ -11,6 +11,14 @@
:weight: 1
:idempotent: true
:tags: []
- :name: authorized_project_update:authorized_project_update_project_group_link_create
:feature_category: :authentication_and_authorization
:has_external_dependencies:
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: authorized_project_update:authorized_project_update_user_refresh_with_low_urgency
:feature_category: :authentication_and_authorization
:has_external_dependencies:

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
module AuthorizedProjectUpdate
class ProjectGroupLinkCreateWorker
include ApplicationWorker
feature_category :authentication_and_authorization
urgency :low
queue_namespace :authorized_project_update
idempotent!
def perform(project_id, group_id)
project = Project.find(project_id)
group = Group.find(group_id)
AuthorizedProjectUpdate::ProjectGroupLinkCreateService.new(project, group)
.execute
end
end
end

View File

@ -0,0 +1,5 @@
---
title: Trigger unsaved changes warning in snippets on navigating away
merge_request: 34640
author:
type: added

View File

@ -0,0 +1,6 @@
---
title: When clicking multiple times to leave a single comment, the input field should
remain focused
merge_request: 33742
author:
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Fix static site editor raw (has front matter) <-> body (lacks front matter) content changes sync
merge_request: 34523
author:
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Record audit event when a user creates a new SSH Key for themselves via the API
merge_request: 34645
author: Rajendra Kadam
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Consolidate object storage config in one place
merge_request: 34460
author:
type: changed

View File

@ -0,0 +1,5 @@
---
title: Update blue hex values to match GitLab UI
merge_request: 34530
author:
type: other

View File

@ -0,0 +1,5 @@
---
title: Update green hex values to match GitLab UI
merge_request: 34547
author:
type: other

View File

@ -205,6 +205,34 @@ production: &base
# Whether to expunge (permanently remove) messages from the mailbox when they are deleted after delivery
expunge_deleted: false
## Consolidated object store config
## This will only take effect if the object_store sections are not defined
## within the types (e.g. artifacts, lfs, etc.).
# object_store:
# enabled: false
# remote_directory: artifacts # The bucket name
# proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage
# connection:
# provider: AWS # Only AWS supported at the moment
# aws_access_key_id: AWS_ACCESS_KEY_ID
# aws_secret_access_key: AWS_SECRET_ACCESS_KEY
# region: us-east-1
# aws_signature_version: 4 # For creation of signed URLs. Set to 2 if provider does not support v4.
# endpoint: 'https://s3.amazonaws.com' # default: nil - Useful for S3 compliant services such as DigitalOcean Spaces
# objects:
# artifacts:
# bucket: artifacts
# external_diffs:
# bucket: external-diffs
# lfs:
# bucket: lfs-objects
# uploads:
# bucket: uploads
# packages:
# bucket: packages
# dependency_proxy:
# bucket: dependency_proxy
## Build Artifacts
artifacts:
enabled: true

View File

@ -254,7 +254,7 @@ Settings.artifacts['storage_path'] = Settings.absolute(Settings.artifacts.values
# Settings.artifact['path'] is deprecated, use `storage_path` instead
Settings.artifacts['path'] = Settings.artifacts['storage_path']
Settings.artifacts['max_size'] ||= 100 # in megabytes
Settings.artifacts['object_store'] = ObjectStoreSettings.parse(Settings.artifacts['object_store'])
Settings.artifacts['object_store'] = ObjectStoreSettings.legacy_parse(Settings.artifacts['object_store'])
#
# Registry
@ -325,7 +325,7 @@ Settings['external_diffs'] ||= Settingslogic.new({})
Settings.external_diffs['enabled'] = false if Settings.external_diffs['enabled'].nil?
Settings.external_diffs['when'] = 'always' if Settings.external_diffs['when'].nil?
Settings.external_diffs['storage_path'] = Settings.absolute(Settings.external_diffs['storage_path'] || File.join(Settings.shared['path'], 'external-diffs'))
Settings.external_diffs['object_store'] = ObjectStoreSettings.parse(Settings.external_diffs['object_store'])
Settings.external_diffs['object_store'] = ObjectStoreSettings.legacy_parse(Settings.external_diffs['object_store'])
#
# Git LFS
@ -333,7 +333,7 @@ Settings.external_diffs['object_store'] = ObjectStoreSettings.parse(Settings.ext
Settings['lfs'] ||= Settingslogic.new({})
Settings.lfs['enabled'] = true if Settings.lfs['enabled'].nil?
Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] || File.join(Settings.shared['path'], "lfs-objects"))
Settings.lfs['object_store'] = ObjectStoreSettings.parse(Settings.lfs['object_store'])
Settings.lfs['object_store'] = ObjectStoreSettings.legacy_parse(Settings.lfs['object_store'])
#
# Uploads
@ -341,7 +341,7 @@ Settings.lfs['object_store'] = ObjectStoreSettings.parse(Settings.lfs['object_st
Settings['uploads'] ||= Settingslogic.new({})
Settings.uploads['storage_path'] = Settings.absolute(Settings.uploads['storage_path'] || 'public')
Settings.uploads['base_dir'] = Settings.uploads['base_dir'] || 'uploads/-/system'
Settings.uploads['object_store'] = ObjectStoreSettings.parse(Settings.uploads['object_store'])
Settings.uploads['object_store'] = ObjectStoreSettings.legacy_parse(Settings.uploads['object_store'])
Settings.uploads['object_store']['remote_directory'] ||= 'uploads'
#
@ -351,7 +351,7 @@ Gitlab.ee do
Settings['packages'] ||= Settingslogic.new({})
Settings.packages['enabled'] = true if Settings.packages['enabled'].nil?
Settings.packages['storage_path'] = Settings.absolute(Settings.packages['storage_path'] || File.join(Settings.shared['path'], "packages"))
Settings.packages['object_store'] = ObjectStoreSettings.parse(Settings.packages['object_store'])
Settings.packages['object_store'] = ObjectStoreSettings.legacy_parse(Settings.packages['object_store'])
end
#
@ -361,7 +361,7 @@ Gitlab.ee do
Settings['dependency_proxy'] ||= Settingslogic.new({})
Settings.dependency_proxy['enabled'] = true if Settings.dependency_proxy['enabled'].nil?
Settings.dependency_proxy['storage_path'] = Settings.absolute(Settings.dependency_proxy['storage_path'] || File.join(Settings.shared['path'], "dependency_proxy"))
Settings.dependency_proxy['object_store'] = ObjectStoreSettings.parse(Settings.dependency_proxy['object_store'])
Settings.dependency_proxy['object_store'] = ObjectStoreSettings.legacy_parse(Settings.dependency_proxy['object_store'])
# For first iteration dependency proxy uses Rails server to download blobs.
# To ensure acceptable performance we only allow feature to be used with
@ -376,7 +376,7 @@ end
Settings['terraform_state'] ||= Settingslogic.new({})
Settings.terraform_state['enabled'] = true if Settings.terraform_state['enabled'].nil?
Settings.terraform_state['storage_path'] = Settings.absolute(Settings.terraform_state['storage_path'] || File.join(Settings.shared['path'], "terraform_state"))
Settings.terraform_state['object_store'] = ObjectStoreSettings.parse(Settings.terraform_state['object_store'])
Settings.terraform_state['object_store'] = ObjectStoreSettings.legacy_parse(Settings.terraform_state['object_store'])
#
# Mattermost
@ -595,6 +595,9 @@ Settings.gitlab_shell['owner_group'] ||= Settings.gitlab.user
Settings.gitlab_shell['ssh_path_prefix'] ||= Settings.__send__(:build_gitlab_shell_ssh_path_prefix)
Settings.gitlab_shell['git_timeout'] ||= 10800
# Object storage
ObjectStoreSettings.new(Settings).parse!
#
# Workhorse
#

View File

@ -1,6 +1,12 @@
# Set default values for object_store settings
class ObjectStoreSettings
def self.parse(object_store)
SUPPORTED_TYPES = %w(artifacts external_diffs lfs uploads packages dependency_proxy terraform_state).freeze
ALLOWED_OBJECT_STORE_OVERRIDES = %w(bucket enabled proxy_download).freeze
attr_accessor :settings
# Legacy parser
def self.legacy_parse(object_store)
object_store ||= Settingslogic.new({})
object_store['enabled'] = false if object_store['enabled'].nil?
object_store['remote_directory'] ||= nil
@ -12,4 +18,126 @@ class ObjectStoreSettings
object_store['connection']&.deep_stringify_keys!
object_store
end
def initialize(settings)
@settings = settings
end
# This method converts the common object storage settings to
# the legacy, internal representation.
#
# For example, with the folowing YAML:
#
# object_store:
# enabled: true
# connection:
# provider: AWS
# aws_access_key_id: minio
# aws_secret_access_key: gdk-minio
# region: gdk
# endpoint: 'http://127.0.0.1:9000'
# path_style: true
# proxy_download: true
# objects:
# artifacts:
# bucket: artifacts
# proxy_download: false
# lfs:
# bucket: lfs-objects
#
# This method then will essentially call:
#
# Settings.artifacts['object_store'] = {
# "enabled" => true,
# "connection"=> {
# "provider" => "AWS",
# "aws_access_key_id" => "minio",
# "aws_secret_access_key" => "gdk-minio",
# "region" => "gdk",
# "endpoint" => "http://127.0.0.1:9000",
# "path_style" => true
# },
# "direct_upload" => true,
# "background_upload" => false,
# "proxy_download" => false,
# "remote_directory" => "artifacts"
# }
#
# Settings.lfs['object_store'] = {
# "enabled" => true,
# "connection" => {
# "provider" => "AWS",
# "aws_access_key_id" => "minio",
# "aws_secret_access_key" => "gdk-minio",
# "region" => "gdk",
# "endpoint" => "http://127.0.0.1:9000",
# "path_style" => true
# },
# "direct_upload" => true,
# "background_upload" => false,
# "proxy_download" => true,
# "remote_directory" => "lfs-objects"
# }
#
# Note that with the common config:
# 1. Only one object store credentials can now be used. This is
# necessary to limit configuration overhead when an object storage
# client (e.g. AWS S3) is used inside GitLab Workhorse.
# 2. However, a bucket has to be specified for each object
# type. Reusing buckets is not really supported, but we don't
# enforce that yet.
# 3. direct_upload and background_upload cannot be configured anymore.
def parse!
return unless use_consolidated_settings?
main_config = settings['object_store']
common_config = main_config.slice('enabled', 'connection', 'proxy_download')
# Convert connection settings to use string keys, to make Fog happy
common_config['connection']&.deep_stringify_keys!
# These are no longer configurable if common config is used
common_config['direct_upload'] = true
common_config['background_upload'] = false
SUPPORTED_TYPES.each do |store_type|
overrides = main_config.dig('objects', store_type) || {}
target_config = common_config.merge(overrides.slice(*ALLOWED_OBJECT_STORE_OVERRIDES))
section = settings.try(store_type)
next unless section
raise "Object storage for #{store_type} must have a bucket specified" if section['enabled'] && target_config['bucket'].blank?
# Map bucket (external name) -> remote_directory (internal representation)
target_config['remote_directory'] = target_config.delete('bucket')
section['object_store'] = target_config
end
end
private
# We only can use the common object storage settings if:
# 1. The common settings are defined
# 2. The legacy settings are not defined
def use_consolidated_settings?
return false unless settings.dig('object_store', 'enabled')
return false unless settings.dig('object_store', 'connection')
SUPPORTED_TYPES.each do |store|
# to_h is needed because something strange happens to
# Settingslogic#dig when stub_storage_settings is run in tests:
#
# (byebug) section.dig
# *** ArgumentError Exception: wrong number of arguments (given 0, expected 1+)
# (byebug) section.dig('object_store')
# *** ArgumentError Exception: wrong number of arguments (given 1, expected 0)
section = settings.try(store)&.to_h
next unless section
return false if section.dig('object_store', 'enabled')
return false if section.dig('object_store', 'connection')
end
true
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class AddComplianceFrameworksToApplicationSettings < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_column :application_settings, :compliance_frameworks, :integer, limit: 2, array: true, default: [], null: false
end
end
def down
with_lock_retries do
remove_column :application_settings, :compliance_frameworks
end
end
end

View File

@ -478,6 +478,7 @@ CREATE TABLE public.application_settings (
repository_storages_weighted jsonb DEFAULT '{}'::jsonb NOT NULL,
max_import_size integer DEFAULT 50 NOT NULL,
enforce_pat_expiration boolean DEFAULT true NOT NULL,
compliance_frameworks smallint[] DEFAULT '{}'::smallint[] NOT NULL,
CONSTRAINT check_d03919528d CHECK ((char_length(container_registry_vendor) <= 255)),
CONSTRAINT check_d820146492 CHECK ((char_length(spam_check_endpoint_url) <= 255)),
CONSTRAINT check_e5aba18f02 CHECK ((char_length(container_registry_version) <= 255))
@ -13994,6 +13995,7 @@ COPY "schema_migrations" (version) FROM STDIN;
20200609142507
20200609142508
20200609212701
20200613104045
20200615083635
20200615121217
20200615123055

View File

@ -83,6 +83,7 @@ compilable
composable
Conda
Consul
Contentful
Corosync
cron
crons

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 110 KiB

After

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 81 KiB

After

Width:  |  Height:  |  Size: 79 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 19 KiB

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 69 KiB

After

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

After

Width:  |  Height:  |  Size: 9.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 103 KiB

After

Width:  |  Height:  |  Size: 28 KiB

View File

@ -6,71 +6,82 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Performance Bar
A Performance Bar can be displayed, to dig into the performance of a page. When
activated, it looks as follows:
You can display the GitLab Performance Bar to see statistics for the performance
of a page. When activated, it looks as follows:
![Performance Bar](img/performance_bar.png)
It allows you to see (from left to right):
From left to right, it displays:
- the current host serving the page
- time taken and number of DB queries; click through for details of these queries
- **Current Host**: the current host serving the page.
- **Database queries**: the time taken (in milliseconds) and the total number
of database queries, displayed in the format `00ms / 00pg`. Click to display
a modal window with more details:
![SQL profiling using the Performance Bar](img/performance_bar_sql_queries.png)
- time taken and number of [Gitaly](../../gitaly/index.md) calls; click through for details of these calls
- **Gitaly calls**: the time taken (in milliseconds) and the total number of
[Gitaly](../../gitaly/index.md) calls. Click to display a modal window with more
details:
![Gitaly profiling using the Performance Bar](img/performance_bar_gitaly_calls.png)
- time taken and number of [Rugged](../../high_availability/nfs.md#improving-nfs-performance-with-gitlab) calls; click through for details of these calls
- **Rugged calls**: the time taken (in milliseconds) and the total number of
[Rugged](../../high_availability/nfs.md#improving-nfs-performance-with-gitlab) calls.
Click to display a modal window with more details:
![Rugged profiling using the Performance Bar](img/performance_bar_rugged_calls.png)
- time taken and number of Redis calls; click through for details of these calls
- **Redis calls**: the time taken (in milliseconds) and the total number of
Redis calls. Click to display a modal window with more details:
![Redis profiling using the Performance Bar](img/performance_bar_redis_calls.png)
- total load timings of the page; click through for details of these calls. Values in the following order:
- Backend - Time that the actual base page took to load
- [First Contentful Paint](hhttps://web.dev/first-contentful-paint/) - Time until something was visible to the user
- [DomContentLoaded](https://developers.google.com/web/fundamentals/performance/critical-rendering-path/measure-crp) Event
- Number of Requests that the page loaded
![Frontend requests using the Performance Bar](img/performance_bar_frontend.png)
- a link to add a request's details to the performance bar; the request can be
added by its full URL (authenticated as the current user), or by the value of
its `X-Request-Id` header
- a link to download the raw JSON used to generate the Performance Bar reports
On the far right is a request selector that allows you to view the same metrics
(excluding the page timing and line profiler) for any requests made while the
page was open. Only the first two requests per unique URL are captured.
- **Elasticsearch calls**: the time taken (in milliseconds) and the total number of
Elasticsearch calls. Click to display a modal window with more details.
- **Load timings** of the page: several values in milliseconds, separated by slashes.
Click to display a modal window with more details. The values, from left to right:
- **Backend**: time needed for the base page to load.
- [**First Contentful Paint**](https://web.dev/first-contentful-paint/):
Time until something was visible to the user.
- [**DomContentLoaded**](https://developers.google.com/web/fundamentals/performance/critical-rendering-path/measure-crp) Event.
- **Total number of requests** the page loaded:
![Frontend requests using the Performance Bar](img/performance_bar_frontend.png)
- **Trace**: If Jaeger is integrated, **Trace** links to a Jaeger tracing page
with the current request's `correlation_id` included.
- **+**: A link to add a request's details to the performance bar. The request
can be added by its full URL (authenticated as the current user), or by the value of
its `X-Request-Id` header.
- **Download**: a link to download the raw JSON used to generate the Performance Bar reports.
- **Request Selector**: a select box displayed on the right-hand side of the
Performance Bar which enables you to view these metrics for any requests made while
the current page was open. Only the first two requests per unique URL are captured.
## Request warnings
For requests exceeding predefined limits, a warning icon will be shown
next to the failing metric, along with an explanation. In this example,
the Gitaly call duration exceeded the threshold:
Requests exceeding predefined limits display a warning **{warning}** icon and
explanation next to the failing metric. In this example, the Gitaly call duration
exceeded the threshold:
![Gitaly call duration exceeded threshold](img/performance_bar_gitaly_threshold.png)
If any requests on the current page generated warnings, the icon will
appear next to the request selector:
If any requests on the current page generated warnings, the warning icon displays
next to the **Request selector**:
![Request selector showing two requests with warnings](img/performance_bar_request_selector_warning.png)
And requests with warnings are indicated in the request selector with a
`(!)` after their path:
Requests with warnings display `(!)` after their path in the **Request selector**:
![Request selector showing dropdown](img/performance_bar_request_selector_warning_expanded.png)
## Enable the Performance Bar via the Admin panel
GitLab Performance Bar is disabled by default. To enable it for a given group,
navigate to **Admin Area > Settings > Metrics and profiling**
(`admin/application_settings/metrics_and_profiling`), and expand the section
**Profiling - Performance bar**.
The GitLab Performance Bar is disabled by default. To enable it for a given group:
The only required setting you need to set is the full path of the group that
will be allowed to display the Performance Bar.
Make sure _Enable the Performance Bar_ is checked and hit
**Save** to save the changes.
1. Sign in as a user with Administrator [permissions](../../../user/permissions.md).
1. In the menu bar, click the **{admin}** **Admin Area** icon.
1. Navigate to **{settings}** **Settings > Metrics and profiling**
(`admin/application_settings/metrics_and_profiling`), and expand the section
**Profiling - Performance bar**.
1. Click **Enable access to the Performance Bar**.
1. In the **Allowed group** field, provide the full path of the group allowed
to access the GitLab Performance Bar.
1. Click **Save changes**.
Once the Performance Bar is enabled, you will need to press the [<kbd>p</kbd> +
<kbd>b</kbd> keyboard shortcut](../../../user/shortcuts.md) to actually
display it.
## Keyboard shortcut for the Performance Bar
You can toggle the Bar using the same shortcut.
![GitLab Performance Bar Admin Settings](img/performance_bar_configuration_settings.png)
After enabling the GitLab Performance Bar, press the [<kbd>p</kbd> +
<kbd>b</kbd> keyboard shortcut](../../../user/shortcuts.md) to display it, and
again to hide it.

View File

@ -99,6 +99,7 @@ Complementary reads:
- [Code comments](code_comments.md)
- [Renaming features](renaming_features.md)
- [Windows Development on GCP](windows.md)
- [Code Intelligence](code_intelligence/index.md)
## Performance guides

View File

@ -0,0 +1,110 @@
# Code Intelligence
> [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/1576) in GitLab 13.1.
This document describes the design behind [Code Intelligence](../../user/project/code_intelligence.md).
GitLab's built-in Code Intelligence is powered by
[LSIF](https://lsif.dev) and comes down to generating an LSIF document for a
project in a CI job, processing the data, uploading it as a CI artifact and
displaying this information for the files in the project.
Here is a sequence diagram for uploading an LSIF artifact:
```mermaid
sequenceDiagram
participant Runner
participant Workhorse
participant Rails
participant Object Storage
Runner->>+Workhorse: POST /v4/jobs/:id/artifacts
Workhorse->>+Rails: POST /:id/artifacts/authorize
Rails-->>-Workhorse: Respond with ProcessLsif header
Note right of Workhorse: Process LSIF file
Workhorse->>+Object Storage: Put file
Object Storage-->>-Workhorse: request results
Workhorse->>+Rails: POST /:id/artifacts
Rails-->>-Workhorse: request results
Workhorse-->>-Runner: request results
```
1. The CI/CD job generates a document in an LSIF format (usually `dump.lsif`) using [an
indexer](https://lsif.dev) for the language of a project. The format
[describes](https://github.com/sourcegraph/sourcegraph/blob/master/doc/user/code_intelligence/writing_an_indexer.md)
interactions between a method or function and its definition(s) or references. The
document is marked to be stored as an LSIF report artifact.
1. After receiving a request for storing the artifact, Workhorse asks
GitLab Rails to authorize the upload.
1. GitLab Rails validates whether the artifact can be uploaded and sends
`ProcessLsif: true` header if the lsif artifact can be processed.
1. Workhorse reads the LSIF document line by line and generates code intelligence
data for each file in the project. The output is a zipped directory of JSON
files which imitates the structure of the project:
Project:
```code
app
controllers
application_controller.rb
models
application.rb
```
Generated data:
```code
app
controllers
application_controller.rb.json
models
application.rb.json
```
1. The zipped directory is stored as a ZIP artifact. Workhorse replaces the
original LSIF document with a set of JSON files in the ZIP artifact and
generates metadata for it. The metadata makes it possible to view a single
file in a ZIP file without unpacking or loading the whole file. That allows us
to access code intelligence data for a single file.
1. When a file is viewed in the GitLab application, frontend fetches code
intelligence data for the file directly from the object storage. The file
contains information about code units in the file. For example:
```json
[
{
"definition_path": "cmd/check/main.go#L4",
"hover": [
{
"language": "go",
"tokens": [
[
{
"class": "kn",
"value": "package"
},
{
"value": " "
},
{
"class": "s",
"value": "\"fmt\""
}
]
]
},
{
"value": "Package fmt implements formatted I/O with functions analogous to C's printf and scanf. The format 'verbs' are derived from C's but are simpler. \n\n### hdr-PrintingPrinting\nThe verbs: \n\nGeneral: \n\n```\n%v\tthe value in a default format\n\twhen printing st..."
}
],
"start_char": 2,
"start_line": 33
}
...
]
```

View File

@ -495,9 +495,11 @@ by calling the method `disable_ddl_transaction!` in the body of your migration
class like so:
```ruby
class MyMigration < ActiveRecord::Migration[4.2]
class MyMigration < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up

View File

@ -317,30 +317,11 @@ migrations](background_migrations.md#cleaning-up).
## Adding Indexes
Adding indexes is an expensive process that blocks INSERT and UPDATE queries for
the duration. You can work around this by using the `CONCURRENTLY` option:
Adding indexes does not require downtime when `add_concurrent_index`
is used.
```sql
CREATE INDEX CONCURRENTLY index_name ON projects (column_name);
```
Migrations can take advantage of this by using the method
`add_concurrent_index`. For example:
```ruby
class MyMigration < ActiveRecord::Migration[4.2]
def up
add_concurrent_index :projects, :column_name
end
def down
remove_index(:projects, :column_name) if index_exists?(:projects, :column_name)
end
end
```
Note that `add_concurrent_index` can not be reversed automatically, thus you
need to manually define `up` and `down`.
See also [Migration Style Guide](migration_style_guide.md#adding-indexes)
for more information.
## Dropping Indexes

View File

@ -730,9 +730,9 @@ module API
optional :expires_at, type: DateTime, desc: 'The expiration date of the SSH key in ISO 8601 format (YYYY-MM-DDTHH:MM:SSZ)'
end
post "keys" do
key = current_user.keys.new(declared_params)
key = ::Keys::CreateService.new(current_user, declared_params(include_missing: false)).execute
if key.save
if key.persisted?
present key, with: Entities::SSHKey
else
render_validation_error!(key)

View File

@ -7,7 +7,7 @@ module Gitlab
include Gitlab::ImportExport::CommandLineUtil
BATCH_SIZE = 100
SMALLER_BATCH_SIZE = 20
SMALLER_BATCH_SIZE = 2
def self.batch_size(exportable)
if Feature.enabled?(:export_reduce_relation_batch_size, exportable)

View File

@ -4,11 +4,6 @@ module Gitlab
module RepoPath
NotFoundError = Class.new(StandardError)
# @return [Array]
# 1. container (ActiveRecord which holds repository)
# 2. project (Project)
# 3. repo_type
# 4. redirected_path
def self.parse(path)
repo_path = path.sub(/\.git\z/, '').sub(%r{\A/}, '')
redirected_path = nil
@ -22,7 +17,7 @@ module Gitlab
# `Gitlab::GlRepository::PROJECT` type.
next unless type.valid?(repo_path)
# Removing the suffix (.wiki, .design, ...) from path
# Removing the suffix (.wiki, .design, ...) from the project path
full_path = repo_path.chomp(type.path_suffix)
container, project, redirected_path = find_container(type, full_path)
@ -41,31 +36,23 @@ module Gitlab
[snippet, snippet&.project, redirected_path]
else
container, redirected_path = find_routes_source(full_path)
project, redirected_path = find_project(full_path)
if container.is_a?(Project)
[container, container, redirected_path]
else
[container, nil, redirected_path]
end
[project, project, redirected_path]
end
end
def self.find_routes_source(path)
return [nil, nil] if path.blank?
def self.find_project(project_path)
return [nil, nil] if project_path.blank?
source =
Route.find_source_of_path(path) ||
Route.find_source_of_path(path, case_sensitive: false) ||
RedirectRoute.find_source_of_path(path, case_sensitive: false)
project = Project.find_by_full_path(project_path, follow_redirects: true)
redirected_path = redirected?(project, project_path) ? project_path : nil
redirected_path = redirected?(source, path) ? path : nil
[source, redirected_path]
[project, redirected_path]
end
def self.redirected?(container, container_path)
container && container.full_path.casecmp(container_path) != 0
def self.redirected?(project, project_path)
project && project.full_path.casecmp(project_path) != 0
end
# Snippet_path can be either:
@ -75,7 +62,7 @@ module Gitlab
return [nil, nil] if snippet_path.blank?
snippet_id, project_path = extract_snippet_info(snippet_path)
project, redirected_path = find_routes_source(project_path)
project, redirected_path = find_project(project_path)
[Snippet.find_by_id_and_project(id: snippet_id, project: project), redirected_path]
end

View File

@ -5844,6 +5844,9 @@ msgstr ""
msgid "Compliance framework (optional)"
msgstr ""
msgid "Compliance frameworks"
msgstr ""
msgid "ComplianceFramework|GDPR"
msgstr ""
@ -18474,6 +18477,9 @@ msgstr ""
msgid "Registration|Your profile"
msgstr ""
msgid "Regulate approvals by authors/committers, based on compliance frameworks. Can be changed only at the instance level."
msgstr ""
msgid "Rejected (closed)"
msgstr ""
@ -22220,6 +22226,9 @@ msgstr ""
msgid "The X509 Certificate to use when mutual TLS is required to communicate with the external authorization service. If left blank, the server certificate is still validated when accessing over HTTPS."
msgstr ""
msgid "The above settings apply to all projects with the selected compliance framework(s)."
msgstr ""
msgid "The amount of seconds after which a request to get a secondary node status will time out."
msgstr ""
@ -27386,6 +27395,9 @@ msgstr ""
msgid "must be greater than start date"
msgstr ""
msgid "must contain only valid frameworks"
msgstr ""
msgid "my-awesome-group"
msgstr ""

View File

@ -40,8 +40,8 @@
"@babel/plugin-syntax-import-meta": "^7.10.1",
"@babel/preset-env": "^7.10.1",
"@gitlab/at.js": "1.5.5",
"@gitlab/svgs": "1.139.0",
"@gitlab/ui": "16.12.1",
"@gitlab/svgs": "1.140.0",
"@gitlab/ui": "16.12.2",
"@gitlab/visual-review-tools": "1.6.1",
"@rails/actioncable": "^6.0.3-1",
"@sentry/browser": "^5.10.2",

View File

@ -4,9 +4,105 @@ require 'spec_helper'
require Rails.root.join('config', 'object_store_settings.rb')
RSpec.describe ObjectStoreSettings do
describe '.parse' do
describe '#parse!' do
let(:settings) { Settingslogic.new(config) }
subject { described_class.new(settings).parse! }
context 'with valid config' do
let(:connection) do
{
'provider' => 'AWS',
'aws_access_key_id' => 'AWS_ACCESS_KEY_ID',
'aws_secret_access_key' => 'AWS_SECRET_ACCESS_KEY',
'region' => 'us-east-1'
}
end
let(:config) do
{
'lfs' => { 'enabled' => true },
'artifacts' => { 'enabled' => true },
'external_diffs' => { 'enabled' => false },
'object_store' => {
'enabled' => true,
'connection' => connection,
'proxy_download' => true,
'objects' => {
'artifacts' => {
'bucket' => 'artifacts',
'proxy_download' => false
},
'lfs' => {
'bucket' => 'lfs-objects'
},
'external_diffs' => {
'bucket' => 'external_diffs',
'enabled' => false
}
}
}
}
end
it 'sets correct default values' do
subject
expect(settings.artifacts['enabled']).to be true
expect(settings.artifacts['object_store']['enabled']).to be true
expect(settings.artifacts['object_store']['connection']).to eq(connection)
expect(settings.artifacts['object_store']['direct_upload']).to be true
expect(settings.artifacts['object_store']['background_upload']).to be false
expect(settings.artifacts['object_store']['proxy_download']).to be false
expect(settings.artifacts['object_store']['remote_directory']).to eq('artifacts')
expect(settings.lfs['enabled']).to be true
expect(settings.lfs['object_store']['enabled']).to be true
expect(settings.lfs['object_store']['connection']).to eq(connection)
expect(settings.lfs['object_store']['direct_upload']).to be true
expect(settings.lfs['object_store']['background_upload']).to be false
expect(settings.lfs['object_store']['proxy_download']).to be true
expect(settings.lfs['object_store']['remote_directory']).to eq('lfs-objects')
expect(settings.external_diffs['enabled']).to be false
expect(settings.external_diffs['object_store']['enabled']).to be false
expect(settings.external_diffs['object_store']['remote_directory']).to eq('external_diffs')
end
it 'raises an error when a bucket is missing' do
config['object_store']['objects']['lfs'].delete('bucket')
expect { subject }.to raise_error(/Object storage for lfs must have a bucket specified/)
end
context 'with legacy config' do
let(:legacy_settings) do
{
'enabled' => true,
'remote_directory' => 'some-bucket',
'direct_upload' => true,
'background_upload' => false,
'proxy_download' => false
}
end
before do
settings.lfs['object_store'] = described_class.legacy_parse(legacy_settings)
end
it 'does not alter config if legacy settings are specified' do
subject
expect(settings.artifacts['object_store']).to be_nil
expect(settings.lfs['object_store']['remote_directory']).to eq('some-bucket')
expect(settings.external_diffs['object_store']).to be_nil
end
end
end
end
describe '.legacy_parse' do
it 'sets correct default values' do
settings = described_class.parse(nil)
settings = described_class.legacy_parse(nil)
expect(settings['enabled']).to be false
expect(settings['direct_upload']).to be false
@ -20,7 +116,7 @@ RSpec.describe ObjectStoreSettings do
'remote_directory' => 'artifacts'
})
settings = described_class.parse(original_settings)
settings = described_class.legacy_parse(original_settings)
expect(settings['enabled']).to be true
expect(settings['direct_upload']).to be false

View File

@ -5,7 +5,7 @@ import { ApolloMutation } from 'vue-apollo';
import createFlash from '~/flash';
import DesignIndex from '~/design_management/pages/design/index.vue';
import DesignSidebar from '~/design_management/components/design_sidebar.vue';
import DesignReplyForm from '~/design_management/components/design_notes/design_reply_form.vue';
import DesignPresentation from '~/design_management/components/design_presentation.vue';
import createImageDiffNoteMutation from '~/design_management/graphql/mutations/createImageDiffNote.mutation.graphql';
import design from '../../mock_data/design';
import mockResponseWithDesigns from '../../mock_data/designs';
@ -26,6 +26,15 @@ jest.mock('mousetrap', () => ({
unbind: jest.fn(),
}));
const focusInput = jest.fn();
const DesignReplyForm = {
template: '<div><textarea ref="textarea"></textarea></div>',
methods: {
focusInput,
},
};
const localVue = createLocalVue();
localVue.use(VueRouter);
@ -64,6 +73,7 @@ describe('Design management design index page', () => {
const findDiscussionForm = () => wrapper.find(DesignReplyForm);
const findSidebar = () => wrapper.find(DesignSidebar);
const findDesignPresentation = () => wrapper.find(DesignPresentation);
function createComponent(loading = false, data = {}) {
const $apollo = {
@ -83,6 +93,7 @@ describe('Design management design index page', () => {
stubs: {
ApolloMutation,
DesignSidebar,
DesignReplyForm,
},
data() {
return {
@ -153,13 +164,29 @@ describe('Design management design index page', () => {
},
});
wrapper.vm.openCommentForm({ x: 0, y: 0 });
findDesignPresentation().vm.$emit('openCommentForm', { x: 0, y: 0 });
return wrapper.vm.$nextTick().then(() => {
expect(findDiscussionForm().exists()).toBe(true);
});
});
it('keeps new discussion form focused', () => {
createComponent(false, {
design: {
...design,
discussions: {
nodes: [],
},
},
annotationCoordinates,
});
findDesignPresentation().vm.$emit('openCommentForm', { x: 10, y: 10 });
expect(focusInput).toHaveBeenCalled();
});
it('sends a mutation on submitting form and closes form', () => {
createComponent(false, {
design: {

View File

@ -307,6 +307,16 @@ describe('Snippet Edit app', () => {
});
});
it('makes sure there are no unsaved changes in the snippet', () => {
createComponent();
clickSubmitBtn();
return waitForPromises().then(() => {
expect(wrapper.vm.originalContent).toBe(wrapper.vm.content);
expect(wrapper.vm.hasChanges()).toBe(false);
});
});
it.each`
newSnippet | projectPath | mutationName
${true} | ${rawProjectPathMock} | ${'CreateSnippetMutation with projectPath'}
@ -419,5 +429,33 @@ describe('Snippet Edit app', () => {
expect(resolveMutate).toHaveBeenCalledWith(updateMutationPayload());
});
});
describe('on before unload', () => {
let event;
let returnValueSetter;
beforeEach(() => {
createComponent();
event = new Event('beforeunload');
returnValueSetter = jest.spyOn(event, 'returnValue', 'set');
});
it('does not prevent page navigation if there are no changes to the snippet content', () => {
window.dispatchEvent(event);
expect(returnValueSetter).not.toHaveBeenCalled();
});
it('prevents page navigation if there are some changes in the snippet content', () => {
wrapper.setData({ content: 'new content' });
window.dispatchEvent(event);
expect(returnValueSetter).toHaveBeenCalledWith(
'Are you sure you want to lose unsaved changes?',
);
});
});
});
});

View File

@ -1,6 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import RichContentEditor from '~/vue_shared/components/rich_content_editor/rich_content_editor.vue';
import { EDITOR_TYPES } from '~/vue_shared/components/rich_content_editor/constants';
import EditArea from '~/static_site_editor/components/edit_area.vue';
import PublishToolbar from '~/static_site_editor/components/publish_toolbar.vue';
@ -91,4 +92,47 @@ describe('~/static_site_editor/components/edit_area.vue', () => {
});
});
});
describe('when the mode changes', () => {
const setInitialMode = mode => {
wrapper.setData({ editorMode: mode });
};
afterEach(() => {
setInitialMode(EDITOR_TYPES.wysiwyg);
});
it.each`
initialMode | targetMode
${EDITOR_TYPES.wysiwyg} | ${EDITOR_TYPES.markdown}
${EDITOR_TYPES.markdown} | ${EDITOR_TYPES.wysiwyg}
`('sets editorMode from $initialMode to $targetMode', ({ initialMode, targetMode }) => {
setInitialMode(initialMode);
findRichContentEditor().vm.$emit('modeChange', targetMode);
expect(wrapper.vm.editorMode).toBe(targetMode);
});
it.each`
syncFnName | initialMode | targetMode
${'syncBodyToRaw'} | ${EDITOR_TYPES.wysiwyg} | ${EDITOR_TYPES.markdown}
${'syncRawToBody'} | ${EDITOR_TYPES.markdown} | ${EDITOR_TYPES.wysiwyg}
`(
'calls $syncFnName source before switching from $initialMode to $targetMode',
({ syncFnName, initialMode, targetMode }) => {
setInitialMode(initialMode);
const spySyncSource = jest.spyOn(wrapper.vm, 'syncSource');
const spySyncParsedSource = jest.spyOn(wrapper.vm.parsedSource, syncFnName);
findRichContentEditor().vm.$emit('modeChange', targetMode);
expect(spySyncSource).toHaveBeenCalled();
expect(spySyncParsedSource).toHaveBeenCalled();
spySyncSource.mockReset();
spySyncParsedSource.mockReset();
},
);
});
});

View File

@ -48,11 +48,11 @@ describe('parseSourceFile', () => {
});
it.each`
sourceContent | editableKey | syncKey | isModifiedKey | desc
${contentSimple} | ${'body'} | ${'syncRaw'} | ${'isModifiedRaw'} | ${'returns true after modification and sync'}
${contentSimple} | ${'raw'} | ${'syncBody'} | ${'isModifiedBody'} | ${'returns true after modification and sync'}
${contentComplex} | ${'body'} | ${'syncRaw'} | ${'isModifiedRaw'} | ${'returns true after modification and sync'}
${contentComplex} | ${'raw'} | ${'syncBody'} | ${'isModifiedBody'} | ${'returns true after modification and sync'}
sourceContent | editableKey | syncKey | isModifiedKey | desc
${contentSimple} | ${'body'} | ${'syncBodyToRaw'} | ${'isModifiedRaw'} | ${'returns true after modification and sync'}
${contentSimple} | ${'raw'} | ${'syncRawToBody'} | ${'isModifiedBody'} | ${'returns true after modification and sync'}
${contentComplex} | ${'body'} | ${'syncBodyToRaw'} | ${'isModifiedRaw'} | ${'returns true after modification and sync'}
${contentComplex} | ${'raw'} | ${'syncRawToBody'} | ${'isModifiedBody'} | ${'returns true after modification and sync'}
`('$desc', ({ sourceContent, editableKey, syncKey, isModifiedKey }) => {
const parsedSource = parseSourceFile(sourceContent);
parsedSource.editable[editableKey] += 'Added content';

View File

@ -67,11 +67,11 @@ describe ::Gitlab::RepoPath do
end
end
describe '.find_routes_source' do
describe '.find_project' do
context 'when finding a project by its canonical path' do
context 'when the cases match' do
it 'returns the project and nil' do
expect(described_class.find_routes_source(project.full_path)).to eq([project, nil])
expect(described_class.find_project(project.full_path)).to eq([project, nil])
end
end
@ -81,14 +81,14 @@ describe ::Gitlab::RepoPath do
# requests, we should accept wrongly-cased URLs because it is a pain to
# block people's git operations and force them to update remote URLs.
it 'returns the project and nil' do
expect(described_class.find_routes_source(project.full_path.upcase)).to eq([project, nil])
expect(described_class.find_project(project.full_path.upcase)).to eq([project, nil])
end
end
end
context 'when finding a project via a redirect' do
it 'returns the project and nil' do
expect(described_class.find_routes_source(redirect.path)).to eq([project, redirect.path])
expect(described_class.find_project(redirect.path)).to eq([project, redirect.path])
end
end
end
@ -110,16 +110,6 @@ describe ::Gitlab::RepoPath do
end
end
context 'when path is namespace path, but has same id as project' do
let(:namespace) { build_stubbed(:namespace, id: project.id) }
it 'returns nil if path is referring to namespace' do
allow(described_class).to receive(:find_route_source).and_return(namespace)
expect(described_class.find_snippet("#{namespace.full_path}/snippets/#{project_snippet.id}")).to eq([nil, nil])
end
end
it 'returns nil for snippets not associated with the project' do
snippet = create(:project_snippet)

View File

@ -1,28 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Route, 'RouteModelQuery', :aggregate_failures do
let_it_be(:group1) { create(:group, path: 'Group1') }
let_it_be(:group2) { create(:group, path: 'Group2') }
let_it_be(:project1) { create(:project, path: 'Project1', group: group1) }
let_it_be(:project2) { create(:project, path: 'Project2', group: group2) }
describe '.find_source_of_path' do
it 'finds exact match' do
expect(described_class.find_source_of_path('Group1')).to eq(group1)
expect(described_class.find_source_of_path('Group2/Project2')).to eq(project2)
expect(described_class.find_source_of_path('GROUP1')).to be_nil
expect(described_class.find_source_of_path('GROUP2/PROJECT2')).to be_nil
end
it 'finds case insensitive match' do
expect(described_class.find_source_of_path('Group1', case_sensitive: false)).to eq(group1)
expect(described_class.find_source_of_path('Group2/Project2', case_sensitive: false)).to eq(project2)
expect(described_class.find_source_of_path('GROUP1', case_sensitive: false)).to eq(group1)
expect(described_class.find_source_of_path('GROUP2/PROJECT2', case_sensitive: false)).to eq(project2)
end
end
end

View File

@ -206,32 +206,6 @@ describe Snippet do
end
end
describe '.find_by_id_and_project' do
let_it_be(:project) { create(:project) }
let_it_be(:project_snippet) { create(:project_snippet, project: project) }
let_it_be(:personal_snippet) { create(:personal_snippet) }
context 'when project is provided' do
it 'returns ProjectSnippet' do
expect(described_class.find_by_id_and_project(id: project_snippet.id, project: project)).to eq(project_snippet)
end
end
context 'when project is nil' do
it 'returns PersonalSnippet' do
expect(described_class.find_by_id_and_project(id: personal_snippet.id, project: nil)).to eq(personal_snippet)
end
end
context 'when project variable is not a Project' do
let(:namespace) { build_stubbed(:namespace, id: project.id) }
it 'returns nil' do
expect(described_class.find_by_id_and_project(id: project_snippet.id, project: namespace)).to be_nil
end
end
end
describe '.with_optional_visibility' do
context 'when a visibility level is provided' do
it 'returns snippets with the given visibility' do

View File

@ -2133,7 +2133,7 @@ describe API::Projects do
expect(json_response['expires_at']).to eq(expires_at.to_s)
end
it 'updates project authorization' do
it 'updates project authorization', :sidekiq_inline do
expect do
post api("/projects/#{project.id}/share", user), params: { group_id: group.id, group_access: Gitlab::Access::DEVELOPER }
end.to(

View File

@ -4,9 +4,11 @@ require 'spec_helper'
RSpec.describe AlertManagement::CreateAlertIssueService do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
let_it_be(:payload) do
{
'title' => 'Alert title',
'annotations' => {
'title' => 'Alert title'
},
@ -15,7 +17,7 @@ RSpec.describe AlertManagement::CreateAlertIssueService do
}
end
let_it_be(:generic_alert, reload: true) { create(:alert_management_alert, :triggered, project: project, payload: payload) }
let_it_be(:prometheus_alert) { create(:alert_management_alert, :triggered, :prometheus, project: project, payload: payload) }
let_it_be(:prometheus_alert, reload: true) { create(:alert_management_alert, :triggered, :prometheus, project: project, payload: payload) }
let(:alert) { generic_alert }
let(:created_issue) { Issue.last! }
@ -29,7 +31,7 @@ RSpec.describe AlertManagement::CreateAlertIssueService do
.and_return(can_create)
end
shared_examples 'creating an alert' do
shared_examples 'creating an alert issue' do
it 'creates an issue' do
expect { execute }.to change { project.issues.count }.by(1)
end
@ -47,12 +49,106 @@ RSpec.describe AlertManagement::CreateAlertIssueService do
expect(alert.reload.issue_id).to eq(created_issue.id)
end
end
shared_examples 'setting an issue attributes' do
before do
execute
end
it 'sets issue author to the current user' do
execute
expect(created_issue.author).to eq(user)
end
it 'sets the issue title' do
expect(created_issue.title).to eq(alert_presenter.title)
end
it 'sets the issue description' do
expect(created_issue.description).to include(alert_presenter.issue_summary_markdown.strip)
end
end
shared_examples 'sets issue labels' do
let(:title) { 'incident' }
let(:color) { '#CC0033' }
let(:description) do
<<~DESCRIPTION.chomp
Denotes a disruption to IT services and \
the associated issues require immediate attention
DESCRIPTION
end
shared_examples 'existing label' do
it 'does not create new label' do
expect { execute }.not_to change(Label, :count)
end
it 'adds the existing label' do
execute
expect(created_issue.labels).to eq([label])
end
end
shared_examples 'new label' do
it 'adds newly created label' do
expect { execute }.to change(Label, :count).by(1)
end
it 'sets label attributes' do
execute
created_label = project.reload.labels.last!
expect(created_issue.labels).to eq([created_label])
expect(created_label.title).to eq(title)
expect(created_label.color).to eq(color)
expect(created_label.description).to eq(description)
end
end
context 'with predefined project label' do
it_behaves_like 'existing label' do
let!(:label) { create(:label, project: project, title: title) }
end
end
context 'with predefined group label' do
it_behaves_like 'existing label' do
let!(:label) { create(:group_label, group: group, title: title) }
end
end
context 'without label' do
it_behaves_like 'new label'
end
context 'with duplicate labels', issue: 'https://gitlab.com/gitlab-org/gitlab-foss/issues/65042' do
before do
# Replicate race condition to create duplicates
build(:label, project: project, title: title).save!(validate: false)
build(:label, project: project, title: title).save!(validate: false)
end
it 'create an issue without labels' do
# Verify we have duplicates
expect(project.labels.size).to eq(2)
expect(project.labels.map(&:title)).to all(eq(title))
message = <<~MESSAGE.chomp
Cannot create incident issue with labels ["#{title}"] for \
"#{project.full_name}": Labels is invalid.
Retrying without labels.
MESSAGE
expect(Gitlab::AppLogger)
.to receive(:info)
.with(message)
expect(execute).to be_success
expect(created_issue.labels).to be_empty
end
end
end
context 'when a user is allowed to create an issue' do
@ -69,14 +165,25 @@ RSpec.describe AlertManagement::CreateAlertIssueService do
context 'when the alert is prometheus alert' do
let(:alert) { prometheus_alert }
let(:alert_presenter) do
Gitlab::Alerting::Alert.new(project: project, payload: alert.payload).present
end
it_behaves_like 'creating an alert'
it_behaves_like 'creating an alert issue'
it_behaves_like 'setting an issue attributes'
it_behaves_like 'sets issue labels'
end
context 'when the alert is generic' do
let(:alert) { generic_alert }
let(:alert_presenter) do
alert_payload = Gitlab::Alerting::NotificationPayloadParser.call(alert.payload.to_h)
Gitlab::Alerting::Alert.new(project: project, payload: alert_payload).present
end
it_behaves_like 'creating an alert'
it_behaves_like 'creating an alert issue'
it_behaves_like 'setting an issue attributes'
it_behaves_like 'sets issue labels'
end
context 'when issue cannot be created' do
@ -89,7 +196,7 @@ RSpec.describe AlertManagement::CreateAlertIssueService do
it 'has an unsuccessful status' do
expect(execute).to be_error
expect(execute.message).to eq('invalid alert')
expect(execute.message).to eq("Title can't be blank")
end
end

View File

@ -0,0 +1,190 @@
# frozen_string_literal: true
require 'spec_helper'
describe AuthorizedProjectUpdate::ProjectGroupLinkCreateService do
let_it_be(:group_parent) { create(:group, :private) }
let_it_be(:group) { create(:group, :private, parent: group_parent) }
let_it_be(:group_child) { create(:group, :private, parent: group) }
let_it_be(:parent_group_user) { create(:user) }
let_it_be(:group_user) { create(:user) }
let_it_be(:project) { create(:project, :private, group: create(:group, :private)) }
let(:access_level) { Gitlab::Access::MAINTAINER }
subject(:service) { described_class.new(project, group) }
describe '#perform' do
context 'direct group members' do
before do
create(:group_member, access_level: access_level, group: group, user: group_user)
ProjectAuthorization.delete_all
end
it 'creates project authorization' do
expect { service.execute }.to(
change { ProjectAuthorization.count }.from(0).to(1))
project_authorization = ProjectAuthorization.where(
project_id: project.id,
user_id: group_user.id,
access_level: access_level)
expect(project_authorization).to exist
end
end
context 'inherited group members' do
before do
create(:group_member, access_level: access_level, group: group_parent, user: parent_group_user)
ProjectAuthorization.delete_all
end
it 'creates project authorization' do
expect { service.execute }.to(
change { ProjectAuthorization.count }.from(0).to(1))
project_authorization = ProjectAuthorization.where(
project_id: project.id,
user_id: parent_group_user.id,
access_level: access_level)
expect(project_authorization).to exist
end
end
context 'membership overrides' do
before do
create(:group_member, access_level: Gitlab::Access::REPORTER, group: group_parent, user: group_user)
create(:group_member, access_level: Gitlab::Access::DEVELOPER, group: group, user: group_user)
ProjectAuthorization.delete_all
end
it 'creates project authorization' do
expect { service.execute }.to(
change { ProjectAuthorization.count }.from(0).to(1))
project_authorization = ProjectAuthorization.where(
project_id: project.id,
user_id: group_user.id,
access_level: Gitlab::Access::DEVELOPER)
expect(project_authorization).to exist
end
end
context 'no group member' do
it 'does not create project authorization' do
expect { service.execute }.not_to(
change { ProjectAuthorization.count }.from(0))
end
end
context 'unapproved access requests' do
before do
create(:group_member, :guest, :access_request, user: group_user, group: group)
end
it 'does not create project authorization' do
expect { service.execute }.not_to(
change { ProjectAuthorization.count }.from(0))
end
end
context 'project has more users than BATCH_SIZE' do
let(:batch_size) { 2 }
let(:users) { create_list(:user, batch_size + 1 ) }
before do
stub_const("#{described_class.name}::BATCH_SIZE", batch_size)
users.each do |user|
create(:group_member, access_level: access_level, group: group_parent, user: user)
end
ProjectAuthorization.delete_all
end
it 'bulk creates project authorizations in batches' do
users.each_slice(batch_size) do |batch|
attributes = batch.map do |user|
{ user_id: user.id, project_id: project.id, access_level: access_level }
end
expect(ProjectAuthorization).to(
receive(:insert_all).with(array_including(attributes)).and_call_original)
end
expect { service.execute }.to(
change { ProjectAuthorization.count }.from(0).to(batch_size + 1))
end
end
context 'users have existing project authorizations' do
before do
create(:group_member, access_level: access_level, group: group, user: group_user)
ProjectAuthorization.delete_all
create(:project_authorization, user_id: group_user.id,
project_id: project.id,
access_level: existing_access_level)
end
context 'when access level is the same' do
let(:existing_access_level) { access_level }
it 'does not create project authorization' do
project_authorization = ProjectAuthorization.where(
project_id: project.id,
user_id: group_user.id,
access_level: existing_access_level)
expect(ProjectAuthorization).not_to receive(:insert_all)
expect { service.execute }.not_to(
change { project_authorization.reload.exists? }.from(true))
end
end
context 'when existing access level is lower' do
let(:existing_access_level) { Gitlab::Access::DEVELOPER }
it 'creates new project authorization' do
project_authorization = ProjectAuthorization.where(
project_id: project.id,
user_id: group_user.id,
access_level: access_level)
expect { service.execute }.to(
change { project_authorization.reload.exists? }.from(false).to(true))
end
it 'deletes previous project authorization' do
project_authorization = ProjectAuthorization.where(
project_id: project.id,
user_id: group_user.id,
access_level: existing_access_level)
expect { service.execute }.to(
change { project_authorization.reload.exists? }.from(true).to(false))
end
end
context 'when existing access level is higher' do
let(:existing_access_level) { Gitlab::Access::OWNER }
it 'does not create project authorization' do
project_authorization = ProjectAuthorization.where(
project_id: project.id,
user_id: group_user.id,
access_level: existing_access_level)
expect(ProjectAuthorization).not_to receive(:insert_all)
expect { service.execute }.not_to(
change { project_authorization.reload.exists? }.from(true))
end
end
end
end
end

View File

@ -281,22 +281,12 @@ describe IncidentManagement::CreateIssueService do
setting.update!(create_issue: false)
end
context 'when skip_settings_check is false (default)' do
it 'returns an error' do
expect(service)
.to receive(:log_error)
.with(error_message('setting disabled'))
it 'returns an error' do
expect(service)
.to receive(:log_error)
.with(error_message('setting disabled'))
expect(subject).to eq(status: :error, message: 'setting disabled')
end
end
context 'when skip_settings_check is true' do
subject { service.execute(skip_settings_check: true) }
it 'creates an issue' do
expect { subject }.to change(Issue, :count).by(1)
end
expect(subject).to eq(status: :error, message: 'setting disabled')
end
end

View File

@ -23,7 +23,7 @@ describe Projects::GroupLinks::CreateService, '#execute' do
expect { subject.execute(group) }.to change { project.project_group_links.count }.from(0).to(1)
end
it 'updates authorization' do
it 'updates authorization', :sidekiq_inline do
expect { subject.execute(group) }.to(
change { Ability.allowed?(user, :read_project, project) }
.from(false).to(true))
@ -36,4 +36,50 @@ describe Projects::GroupLinks::CreateService, '#execute' do
it 'returns error if user is not allowed to share with a group' do
expect { subject.execute(create(:group)) }.not_to change { project.project_group_links.count }
end
context 'with specialized_project_authorization_workers' do
let_it_be(:other_user) { create(:user) }
before do
group.add_developer(other_user)
end
it 'schedules authorization update for users with access to group' do
expect(AuthorizedProjectsWorker).not_to(
receive(:bulk_perform_async)
)
expect(AuthorizedProjectUpdate::ProjectGroupLinkCreateWorker).to(
receive(:perform_async).and_call_original
)
expect(AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker).to(
receive(:bulk_perform_in)
.with(1.hour,
array_including([user.id], [other_user.id]),
batch_delay: 30.seconds, batch_size: 100)
.and_call_original
)
subject.execute(group)
end
context 'when feature is disabled' do
before do
stub_feature_flags(specialized_project_authorization_project_share_worker: false)
end
it 'uses AuthorizedProjectsWorker' do
expect(AuthorizedProjectsWorker).to(
receive(:bulk_perform_async).with(array_including([user.id], [other_user.id])).and_call_original
)
expect(AuthorizedProjectUpdate::ProjectCreateWorker).not_to(
receive(:perform_async)
)
expect(AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker).not_to(
receive(:bulk_perform_in)
)
subject.execute(group)
end
end
end
end

View File

@ -27,7 +27,7 @@ describe AuthorizedProjectUpdate::ProjectCreateWorker do
context 'idempotence' do
before do
create(:group_member, access_level: Gitlab::Access::MAINTAINER, group: group, user: group_user)
create(:group_member, access_level: access_level, group: group, user: group_user)
ProjectAuthorization.delete_all
end

View File

@ -0,0 +1,52 @@
# frozen_string_literal: true
require 'spec_helper'
describe AuthorizedProjectUpdate::ProjectGroupLinkCreateWorker do
let_it_be(:group) { create(:group, :private) }
let_it_be(:group_project) { create(:project, group: group) }
let_it_be(:shared_with_group) { create(:group, :private) }
let_it_be(:user) { create(:user) }
let(:access_level) { Gitlab::Access::MAINTAINER }
subject(:worker) { described_class.new }
it 'calls AuthorizedProjectUpdate::ProjectCreateService' do
expect_next_instance_of(AuthorizedProjectUpdate::ProjectGroupLinkCreateService) do |service|
expect(service).to(receive(:execute))
end
worker.perform(group_project.id, shared_with_group.id)
end
it 'returns ServiceResponse.success' do
result = worker.perform(group_project.id, shared_with_group.id)
expect(result.success?).to be_truthy
end
context 'idempotence' do
before do
create(:group_member, group: shared_with_group, user: user, access_level: access_level)
create(:project_group_link, project: group_project, group: shared_with_group)
ProjectAuthorization.delete_all
end
include_examples 'an idempotent worker' do
let(:job_args) { [group_project.id, shared_with_group.id] }
it 'creates project authorization' do
subject
project_authorization = ProjectAuthorization.where(
project_id: group_project.id,
user_id: user.id,
access_level: access_level)
expect(project_authorization).to exist
expect(ProjectAuthorization.count).to eq(1)
end
end
end
end

View File

@ -835,15 +835,15 @@
eslint-plugin-vue "^6.2.1"
vue-eslint-parser "^7.0.0"
"@gitlab/svgs@1.139.0":
version "1.139.0"
resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-1.139.0.tgz#8a4874e76000e2dd7d3ed3a8967d62bed47d7ea7"
integrity sha512-o1KAmQLYL727HodlPHkmj+d+Kdw8OIgHzlKmmPYMzeE+As2l1oz6CTilca56KqXGklOgrouC9P2puMwyX8e/6g==
"@gitlab/svgs@1.140.0":
version "1.140.0"
resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-1.140.0.tgz#593f1f65b0df57c3399fcfb9f472f59aa64da074"
integrity sha512-6gANJGi2QkpvOgFTMcY3SIwEqhO69i6R3jU4BSskkVziwDdAWxGonln22a4Iu//Iv0NrsFDpAA0jIVfnJzw0iA==
"@gitlab/ui@16.12.1":
version "16.12.1"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-16.12.1.tgz#4d6865308596b09e36961210df7a8a489aaadb6d"
integrity sha512-jF6/I71Q0mjHetIRDO8O4VO2KIGWKL/yH2Mdb/CqQKaEasgnc/YpuyHGCsBXqDPxCjRbXPeKp0EywICQx4agZA==
"@gitlab/ui@16.12.2":
version "16.12.2"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-16.12.2.tgz#dc37bc6f827b55b86e29b10e42500913446818a3"
integrity sha512-pCl0dzVsQ94MLk0T0jCwgv9Dbf+FX+6vpR+E0FQH6SFAIaNEEpkBkSDiVp0Q1RJoRi1Q6nK1rVPoMWTwW6/7uA==
dependencies:
"@babel/standalone" "^7.0.0"
"@gitlab/vue-toasted" "^1.3.0"