Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-08-10 15:10:03 +00:00
parent 63c306d960
commit c74c13e2e1
71 changed files with 1255 additions and 698 deletions

View file

@ -1342,6 +1342,17 @@ entry.
- [Add missing metrics information](gitlab-org/gitlab@89cd7fe3b95323e635b2d73e08549b2e6153dc4d) ([merge request](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/61772/edit))
- [Track usage of the resolve UI](gitlab-org/gitlab@35c8e30fce288cecefcf2f7c0077d4608e696519) ([merge request](gitlab-org/gitlab!61654))
## 13.12.10 (2021-08-10)
### Fixed (2 changes)
- [Fix validation method regarding MIME type keys](gitlab-org/gitlab@4782194408063f61da4e1e69d7d8813cfec84a78) ([merge request](gitlab-org/gitlab!67748))
- [Do not create audit event for failed logins on read-only DB](gitlab-org/gitlab@53237efd7b677ccaa7db05f51d5594f594db41ce) ([merge request](gitlab-org/gitlab!67748)) **GitLab Enterprise Edition**
### Changed (1 change)
- [Resolve "operator does not exist: integer[] || bigint in...](gitlab-org/gitlab@fcaf589950878529019d9d9d6b047b4802c9c374) ([merge request](gitlab-org/gitlab!67748))
## 13.12.9 (2021-08-03)
### Security (15 changes)

View file

@ -8,8 +8,8 @@ import {
GlDropdownItem,
GlTooltipDirective as GlTooltip,
} from '@gitlab/ui';
import { acceptedMimes } from '../extensions/image';
import { getImageAlt } from '../services/utils';
import { acceptedMimes } from '../services/upload_helpers';
import { extractFilename } from '../services/utils';
export default {
components: {
@ -41,7 +41,7 @@ export default {
.setImage({
src: this.imgSrc,
canonicalSrc: this.imgSrc,
alt: getImageAlt(this.imgSrc),
alt: extractFilename(this.imgSrc),
})
.run();
@ -58,7 +58,7 @@ export default {
this.tiptapEditor
.chain()
.focus()
.uploadImage({
.uploadAttachment({
file: e.target.files[0],
})
.run();
@ -67,7 +67,7 @@ export default {
this.emitExecute('upload');
},
},
acceptedMimes,
acceptedMimes: acceptedMimes.image,
};
</script>
<template>

View file

@ -33,6 +33,13 @@ export default {
};
},
methods: {
resetFields() {
this.imgSrc = '';
this.$refs.fileSelector.value = '';
},
openFileUpload() {
this.$refs.fileSelector.click();
},
updateLinkState({ editor }) {
const { canonicalSrc, href } = editor.getAttributes(Link.name);
@ -65,6 +72,18 @@ export default {
this.$emit('execute', { contentType: Link.name });
},
onFileSelect(e) {
this.tiptapEditor
.chain()
.focus()
.uploadAttachment({
file: e.target.files[0],
})
.run();
this.resetFields();
this.$emit('execute', { contentType: Link.name });
},
},
};
</script>
@ -83,14 +102,25 @@ export default {
<gl-dropdown-form class="gl-px-3!">
<gl-form-input-group v-model="linkHref" :placeholder="__('Link URL')">
<template #append>
<gl-button variant="confirm" @click="updateLink()">{{ __('Apply') }}</gl-button>
<gl-button variant="confirm" @click="updateLink">{{ __('Apply') }}</gl-button>
</template>
</gl-form-input-group>
</gl-dropdown-form>
<gl-dropdown-divider v-if="isActive" />
<gl-dropdown-item v-if="isActive" @click="removeLink()">
<gl-dropdown-divider />
<gl-dropdown-item v-if="isActive" @click="removeLink">
{{ __('Remove link') }}
</gl-dropdown-item>
<gl-dropdown-item v-else @click="openFileUpload">
{{ __('Upload file') }}
</gl-dropdown-item>
<input
ref="fileSelector"
type="file"
name="content_editor_attachment"
class="gl-display-none"
@change="onFileSelect"
/>
</gl-dropdown>
</editor-state-observer>
</template>

View file

@ -0,0 +1,53 @@
import { Extension } from '@tiptap/core';
import { Plugin, PluginKey } from 'prosemirror-state';
import { handleFileEvent } from '../services/upload_helpers';
export default Extension.create({
name: 'attachment',
defaultOptions: {
uploadsPath: null,
renderMarkdown: null,
},
addCommands() {
return {
uploadAttachment: ({ file }) => () => {
const { uploadsPath, renderMarkdown } = this.options;
return handleFileEvent({ file, uploadsPath, renderMarkdown, editor: this.editor });
},
};
},
addProseMirrorPlugins() {
const { editor } = this;
return [
new Plugin({
key: new PluginKey('attachment'),
props: {
handlePaste: (_, event) => {
const { uploadsPath, renderMarkdown } = this.options;
return handleFileEvent({
editor,
file: event.clipboardData.files[0],
uploadsPath,
renderMarkdown,
});
},
handleDrop: (_, event) => {
const { uploadsPath, renderMarkdown } = this.options;
return handleFileEvent({
editor,
file: event.dataTransfer.files[0],
uploadsPath,
renderMarkdown,
});
},
},
}),
];
},
});

View file

@ -1,58 +1,13 @@
import { Image } from '@tiptap/extension-image';
import { VueNodeViewRenderer } from '@tiptap/vue-2';
import { Plugin, PluginKey } from 'prosemirror-state';
import { __ } from '~/locale';
import ImageWrapper from '../components/wrappers/image.vue';
import { uploadFile } from '../services/upload_file';
import { getImageAlt, readFileAsDataURL } from '../services/utils';
export const acceptedMimes = ['image/jpeg', 'image/png', 'image/gif', 'image/jpg'];
const resolveImageEl = (element) =>
element.nodeName === 'IMG' ? element : element.querySelector('img');
const startFileUpload = async ({ editor, file, uploadsPath, renderMarkdown }) => {
const encodedSrc = await readFileAsDataURL(file);
const { view } = editor;
editor.commands.setImage({ uploading: true, src: encodedSrc });
const { state } = view;
const position = state.selection.from - 1;
const { tr } = state;
try {
const { src, canonicalSrc } = await uploadFile({ file, uploadsPath, renderMarkdown });
view.dispatch(
tr.setNodeMarkup(position, undefined, {
uploading: false,
src: encodedSrc,
alt: getImageAlt(src),
canonicalSrc,
}),
);
} catch (e) {
editor.commands.deleteRange({ from: position, to: position + 1 });
editor.emit('error', __('An error occurred while uploading the image. Please try again.'));
}
};
const handleFileEvent = ({ editor, file, uploadsPath, renderMarkdown }) => {
if (acceptedMimes.includes(file?.type)) {
startFileUpload({ editor, file, uploadsPath, renderMarkdown });
return true;
}
return false;
};
export default Image.extend({
defaultOptions: {
...Image.options,
uploadsPath: null,
renderMarkdown: null,
inline: true,
},
addAttributes() {
@ -108,47 +63,6 @@ export default Image.extend({
},
];
},
addCommands() {
return {
...this.parent(),
uploadImage: ({ file }) => () => {
const { uploadsPath, renderMarkdown } = this.options;
handleFileEvent({ file, uploadsPath, renderMarkdown, editor: this.editor });
},
};
},
addProseMirrorPlugins() {
const { editor } = this;
return [
new Plugin({
key: new PluginKey('handleDropAndPasteImages'),
props: {
handlePaste: (_, event) => {
const { uploadsPath, renderMarkdown } = this.options;
return handleFileEvent({
editor,
file: event.clipboardData.files[0],
uploadsPath,
renderMarkdown,
});
},
handleDrop: (_, event) => {
const { uploadsPath, renderMarkdown } = this.options;
return handleFileEvent({
editor,
file: event.dataTransfer.files[0],
uploadsPath,
renderMarkdown,
});
},
},
}),
];
},
addNodeView() {
return VueNodeViewRenderer(ImageWrapper);
},

View file

@ -21,6 +21,10 @@ export const extractHrefFromMarkdownLink = (match) => {
};
export default Link.extend({
defaultOptions: {
...Link.options,
openOnClick: false,
},
addInputRules() {
return [
markInputRule(markdownLinkSyntaxInputRuleRegExp, this.type, extractHrefFromMarkdownLink),
@ -48,6 +52,4 @@ export default Link.extend({
},
};
},
}).configure({
openOnClick: false,
});

View file

@ -0,0 +1,24 @@
import { Node } from '@tiptap/core';
export default Node.create({
name: 'loading',
inline: true,
group: 'inline',
addAttributes() {
return {
label: {
default: null,
},
};
},
renderHTML({ node }) {
return [
'span',
{ class: 'gl-display-inline-flex gl-align-items-center' },
['span', { class: 'gl-spinner gl-mx-2' }],
['span', { class: 'gl-link' }, node.attrs.label],
];
},
});

View file

@ -1,6 +1,7 @@
import { Editor } from '@tiptap/vue-2';
import { isFunction } from 'lodash';
import { PROVIDE_SERIALIZER_OR_RENDERER_ERROR } from '../constants';
import Attachment from '../extensions/attachment';
import Blockquote from '../extensions/blockquote';
import Bold from '../extensions/bold';
import BulletList from '../extensions/bullet_list';
@ -17,6 +18,7 @@ import Image from '../extensions/image';
import Italic from '../extensions/italic';
import Link from '../extensions/link';
import ListItem from '../extensions/list_item';
import Loading from '../extensions/loading';
import OrderedList from '../extensions/ordered_list';
import Paragraph from '../extensions/paragraph';
import Strike from '../extensions/strike';
@ -52,6 +54,7 @@ export const createContentEditor = ({
}
const builtInContentEditorExtensions = [
Attachment.configure({ uploadsPath, renderMarkdown }),
Blockquote,
Bold,
BulletList,
@ -64,10 +67,11 @@ export const createContentEditor = ({
Heading,
History,
HorizontalRule,
Image.configure({ uploadsPath, renderMarkdown }),
Image,
Italic,
Link,
ListItem,
Loading,
OrderedList,
Paragraph,
Strike,

View file

@ -1,44 +0,0 @@
import axios from '~/lib/utils/axios_utils';
const extractAttachmentLinkUrl = (html) => {
const parser = new DOMParser();
const { body } = parser.parseFromString(html, 'text/html');
const link = body.querySelector('a');
const src = link.getAttribute('href');
const { canonicalSrc } = link.dataset;
return { src, canonicalSrc };
};
/**
* Uploads a file with a post request to the URL indicated
* in the uploadsPath parameter. The expected response of the
* uploads service is a JSON object that contains, at least, a
* link property. The link property should contain markdown link
* definition (i.e. [GitLab](https://gitlab.com)).
*
* This Markdown will be rendered to extract its canonical and full
* URLs using GitLab Flavored Markdown renderer in the backend.
*
* @param {Object} params
* @param {String} params.uploadsPath An absolute URL that points to a service
* that allows sending a file for uploading via POST request.
* @param {String} params.renderMarkdown A function that accepts a markdown string
* and returns a rendered version in HTML format.
* @param {File} params.file The file to upload
*
* @returns Returns an object with two properties:
*
* canonicalSrc: The URL as defined in the Markdown
* src: The absolute URL that points to the resource in the server
*/
export const uploadFile = async ({ uploadsPath, renderMarkdown, file }) => {
const formData = new FormData();
formData.append('file', file, file.name);
const { data } = await axios.post(uploadsPath, formData);
const { markdown } = data.link;
const rendered = await renderMarkdown(markdown);
return extractAttachmentLinkUrl(rendered);
};

View file

@ -0,0 +1,119 @@
import axios from '~/lib/utils/axios_utils';
import { __ } from '~/locale';
import { extractFilename, readFileAsDataURL } from './utils';
export const acceptedMimes = {
image: ['image/jpeg', 'image/png', 'image/gif', 'image/jpg'],
};
const extractAttachmentLinkUrl = (html) => {
const parser = new DOMParser();
const { body } = parser.parseFromString(html, 'text/html');
const link = body.querySelector('a');
const src = link.getAttribute('href');
const { canonicalSrc } = link.dataset;
return { src, canonicalSrc };
};
/**
* Uploads a file with a post request to the URL indicated
* in the uploadsPath parameter. The expected response of the
* uploads service is a JSON object that contains, at least, a
* link property. The link property should contain markdown link
* definition (i.e. [GitLab](https://gitlab.com)).
*
* This Markdown will be rendered to extract its canonical and full
* URLs using GitLab Flavored Markdown renderer in the backend.
*
* @param {Object} params
* @param {String} params.uploadsPath An absolute URL that points to a service
* that allows sending a file for uploading via POST request.
* @param {String} params.renderMarkdown A function that accepts a markdown string
* and returns a rendered version in HTML format.
* @param {File} params.file The file to upload
*
* @returns Returns an object with two properties:
*
* canonicalSrc: The URL as defined in the Markdown
* src: The absolute URL that points to the resource in the server
*/
export const uploadFile = async ({ uploadsPath, renderMarkdown, file }) => {
const formData = new FormData();
formData.append('file', file, file.name);
const { data } = await axios.post(uploadsPath, formData);
const { markdown } = data.link;
const rendered = await renderMarkdown(markdown);
return extractAttachmentLinkUrl(rendered);
};
const uploadImage = async ({ editor, file, uploadsPath, renderMarkdown }) => {
const encodedSrc = await readFileAsDataURL(file);
const { view } = editor;
editor.commands.setImage({ uploading: true, src: encodedSrc });
const { state } = view;
const position = state.selection.from - 1;
const { tr } = state;
try {
const { src, canonicalSrc } = await uploadFile({ file, uploadsPath, renderMarkdown });
view.dispatch(
tr.setNodeMarkup(position, undefined, {
uploading: false,
src: encodedSrc,
alt: extractFilename(src),
canonicalSrc,
}),
);
} catch (e) {
editor.commands.deleteRange({ from: position, to: position + 1 });
editor.emit('error', __('An error occurred while uploading the image. Please try again.'));
}
};
const uploadAttachment = async ({ editor, file, uploadsPath, renderMarkdown }) => {
await Promise.resolve();
const { view } = editor;
const text = extractFilename(file.name);
const { state } = view;
const { from } = state.selection;
editor.commands.insertContent({
type: 'loading',
attrs: { label: text },
});
try {
const { src, canonicalSrc } = await uploadFile({ file, uploadsPath, renderMarkdown });
editor.commands.insertContentAt(
{ from, to: from + 1 },
{ type: 'text', text, marks: [{ type: 'link', attrs: { href: src, canonicalSrc } }] },
);
} catch (e) {
editor.commands.deleteRange({ from, to: from + 1 });
editor.emit('error', __('An error occurred while uploading the file. Please try again.'));
}
};
export const handleFileEvent = ({ editor, file, uploadsPath, renderMarkdown }) => {
if (!file) return false;
if (acceptedMimes.image.includes(file?.type)) {
uploadImage({ editor, file, uploadsPath, renderMarkdown });
return true;
}
uploadAttachment({ editor, file, uploadsPath, renderMarkdown });
return true;
};

View file

@ -4,8 +4,18 @@ export const hasSelection = (tiptapEditor) => {
return from < to;
};
export const getImageAlt = (src) => {
return src.replace(/^.*\/|\..*$/g, '').replace(/\W+/g, ' ');
/**
* Extracts filename from a URL
*
* @example
* > extractFilename('https://gitlab.com/images/logo-full.png')
* < 'logo-full'
*
* @param {string} src The URL to extract filename from
* @returns {string}
*/
export const extractFilename = (src) => {
return src.replace(/^.*\/|\..+?$/g, '');
};
export const readFileAsDataURL = (file) => {

View file

@ -115,11 +115,11 @@ export default {
renderGFM() {
$(this.$refs['note-body']).renderGFM();
},
handleFormUpdate(note, parentElement, callback, resolveDiscussion) {
this.$emit('handleFormUpdate', note, parentElement, callback, resolveDiscussion);
handleFormUpdate(noteText, parentElement, callback, resolveDiscussion) {
this.$emit('handleFormUpdate', { noteText, parentElement, callback, resolveDiscussion });
},
formCancelHandler(shouldConfirm, isDirty) {
this.$emit('cancelForm', shouldConfirm, isDirty);
this.$emit('cancelForm', { shouldConfirm, isDirty });
},
applySuggestion({ suggestionId, flashContainer, callback = () => {}, message }) {
const { discussion_id: discussionId, id: noteId } = this.note;

View file

@ -263,7 +263,7 @@ export default {
this.$refs.noteBody.resetAutoSave();
this.$emit('updateSuccess');
},
formUpdateHandler(noteText, parentElement, callback, resolveDiscussion) {
formUpdateHandler({ noteText, callback, resolveDiscussion }) {
const position = {
...this.note.position,
};
@ -329,7 +329,7 @@ export default {
}
});
},
formCancelHandler(shouldConfirm, isDirty) {
formCancelHandler({ shouldConfirm, isDirty }) {
if (shouldConfirm && isDirty) {
// eslint-disable-next-line no-alert
if (!window.confirm(__('Are you sure you want to cancel editing this comment?'))) return;

View file

@ -12,19 +12,10 @@ export default {
ReportSection,
},
props: {
headPath: {
type: String,
required: true,
},
headBlobPath: {
type: String,
required: true,
},
basePath: {
type: String,
required: false,
default: null,
},
baseBlobPath: {
type: String,
required: false,
@ -52,8 +43,6 @@ export default {
},
created() {
this.setPaths({
basePath: this.basePath,
headPath: this.headPath,
baseBlobPath: this.baseBlobPath,
headBlobPath: this.headBlobPath,
reportsPath: this.codequalityReportsPath,

View file

@ -1,4 +1,5 @@
import pollUntilComplete from '~/lib/utils/poll_until_complete';
import { STATUS_NOT_FOUND } from '../../constants';
import * as types from './mutation_types';
import { parseCodeclimateMetrics } from './utils/codequality_parser';
@ -7,11 +8,11 @@ export const setPaths = ({ commit }, paths) => commit(types.SET_PATHS, paths);
export const fetchReports = ({ state, dispatch, commit }) => {
commit(types.REQUEST_REPORTS);
if (!state.basePath) {
return dispatch('receiveReportsError');
}
return pollUntilComplete(state.reportsPath)
.then(({ data }) => {
if (data.status === STATUS_NOT_FOUND) {
return dispatch('receiveReportsError', data);
}
return dispatch('receiveReportsSuccess', {
newIssues: parseCodeclimateMetrics(data.new_errors, state.headBlobPath),
resolvedIssues: parseCodeclimateMetrics(data.resolved_errors, state.baseBlobPath),

View file

@ -1,6 +1,6 @@
import { spriteIcon } from '~/lib/utils/common_utils';
import { sprintf, __, s__, n__ } from '~/locale';
import { LOADING, ERROR, SUCCESS } from '../../constants';
import { LOADING, ERROR, SUCCESS, STATUS_NOT_FOUND } from '../../constants';
export const hasCodequalityIssues = (state) =>
Boolean(state.newIssues?.length || state.resolvedIssues?.length);
@ -42,7 +42,7 @@ export const codequalityText = (state) => {
};
export const codequalityPopover = (state) => {
if (state.headPath && !state.basePath) {
if (state.status === STATUS_NOT_FOUND) {
return {
title: s__('ciReport|Base pipeline codequality artifact not found'),
content: sprintf(

View file

@ -2,8 +2,6 @@ import * as types from './mutation_types';
export default {
[types.SET_PATHS](state, paths) {
state.basePath = paths.basePath;
state.headPath = paths.headPath;
state.baseBlobPath = paths.baseBlobPath;
state.headBlobPath = paths.headBlobPath;
state.reportsPath = paths.reportsPath;
@ -14,6 +12,7 @@ export default {
},
[types.RECEIVE_REPORTS_SUCCESS](state, data) {
state.hasError = false;
state.status = '';
state.statusReason = '';
state.isLoading = false;
state.newIssues = data.newIssues;
@ -22,6 +21,7 @@ export default {
[types.RECEIVE_REPORTS_ERROR](state, error) {
state.isLoading = false;
state.hasError = true;
state.status = error?.status || '';
state.statusReason = error?.response?.data?.status_reason;
},
};

View file

@ -1,6 +1,4 @@
export default () => ({
basePath: null,
headPath: null,
reportsPath: null,
baseBlobPath: null,
@ -8,6 +6,7 @@ export default () => ({
isLoading: false,
hasError: false,
status: '',
statusReason: '',
newIssues: [],

View file

@ -12,6 +12,7 @@ export const SUCCESS = 'SUCCESS';
export const STATUS_FAILED = 'failed';
export const STATUS_SUCCESS = 'success';
export const STATUS_NEUTRAL = 'neutral';
export const STATUS_NOT_FOUND = 'not_found';
export const ICON_WARNING = 'warning';
export const ICON_SUCCESS = 'success';

View file

@ -150,7 +150,7 @@ export default {
);
},
shouldRenderCodeQuality() {
return this.mr?.codeclimate?.head_path;
return this.mr?.codequalityReportsPath;
},
shouldRenderRelatedLinks() {
return Boolean(this.mr.relatedLinks) && !this.mr.isNothingToMergeState;
@ -496,8 +496,6 @@ export default {
<!-- <extensions-container :mr="mr" /> -->
<grouped-codequality-reports-app
v-if="shouldRenderCodeQuality"
:base-path="mr.codeclimate.base_path"
:head-path="mr.codeclimate.head_path"
:head-blob-path="mr.headBlobPath"
:base-blob-path="mr.baseBlobPath"
:codequality-reports-path="mr.codequalityReportsPath"

View file

@ -261,7 +261,6 @@ export default class MergeRequestStore {
this.baseBlobPath = blobPath.base_path || '';
this.codequalityReportsPath = data.codequality_reports_path;
this.codequalityHelpPath = data.codequality_help_path;
this.codeclimate = data.codeclimate;
// Security reports
this.sastComparisonPath = data.sast_comparison_path;

View file

@ -27,10 +27,10 @@ class Projects::MergeRequests::DiffsController < Projects::MergeRequests::Applic
diff_options_hash[:paths] = params[:paths] if params[:paths]
diffs = @compare.diffs_in_batch(params[:page], params[:per_page], diff_options: diff_options_hash)
positions = @merge_request.note_positions_for_paths(diffs.diff_file_paths, current_user)
unfoldable_positions = @merge_request.note_positions_for_paths(diffs.diff_file_paths, current_user).unfoldable
environment = @merge_request.environments_for(current_user, latest: true).last
diffs.unfold_diff_files(positions.unfoldable)
diffs.unfold_diff_files(unfoldable_positions)
diffs.write_cache
options = {
@ -43,10 +43,24 @@ class Projects::MergeRequests::DiffsController < Projects::MergeRequests::Applic
}
if diff_options_hash[:paths].blank? && Feature.enabled?(:diffs_batch_render_cached, project, default_enabled: :yaml)
# NOTE: Any variables that would affect the resulting json needs to be added to the cache_context to avoid stale cache issues.
cache_context = [
current_user&.cache_key,
environment&.cache_key,
unfoldable_positions.map(&:to_h),
diff_view,
params[:w],
params[:expanded],
params[:page],
params[:per_page],
options[:merge_ref_head_diff],
options[:allow_tree_conflicts]
]
render_cached(
diffs,
with: PaginatedDiffSerializer.new(current_user: current_user),
cache_context: -> (_) { [diff_view, params[:w], params[:expanded], params[:per_page], params[:page]] },
cache_context: -> (_) { [Digest::SHA256.hexdigest(cache_context.to_s)] },
**options
)
else

View file

@ -5,6 +5,7 @@ module Types
class RunnerType < BaseObject
graphql_name 'CiRunner'
authorize :read_runner
present_using ::Ci::RunnerPresenter
JOB_COUNT_LIMIT = 1000

View file

@ -22,18 +22,6 @@ module GroupsHelper
]
end
def group_packages_nav_link_paths
%w[
groups/packages#index
groups/container_registries#index
]
end
def group_container_registry_nav?
Gitlab.config.registry.enabled &&
can?(current_user, :read_container_image, @group)
end
def group_sidebar_links
@group_sidebar_links ||= get_group_sidebar_links
end
@ -179,19 +167,6 @@ module GroupsHelper
groups.to_json
end
def group_packages_nav?
group_packages_list_nav? ||
group_container_registry_nav?
end
def group_dependency_proxy_nav?
@group.dependency_proxy_feature_available?
end
def group_packages_list_nav?
@group.packages_feature_enabled?
end
def show_invite_banner?(group)
can?(current_user, :admin_group, group) &&
!just_created? &&

View file

@ -11,6 +11,7 @@ module Ci
include FeatureGate
include Gitlab::Utils::StrongMemoize
include TaggableQueries
include Presentable
add_authentication_token_field :token, encrypted: :optional

View file

@ -0,0 +1,12 @@
# frozen_string_literal: true
module Ci
class RunnerPresenter < Gitlab::View::Presenter::Delegated
presents :runner
def locked?
read_attribute(:locked) && project_type?
end
alias_method :locked, :locked?
end
end

View file

@ -96,6 +96,7 @@
.table-section.section-10{ role: 'rowheader' }
- @group_runners.each do |runner|
- runner = runner.present(current_user: current_user)
= render 'groups/runners/runner', runner: runner
= paginate @group_runners, theme: 'gitlab', :params => { :anchor => 'runners-settings' }
- else

View file

@ -1,27 +0,0 @@
- packages_link = group_packages_list_nav? ? group_packages_path(@group) : group_container_registries_path(@group)
- if group_packages_nav?
= nav_link(controller: ['groups/packages', 'groups/registry/repositories', 'groups/dependency_proxies']) do
= link_to packages_link, title: _('Packages'), class: 'has-sub-items' do
.nav-icon-container
= sprite_icon('package')
%span.nav-item-name
= _('Packages & Registries')
%ul.sidebar-sub-level-items
= nav_link(controller: [:packages, :repositories], html_options: { class: "fly-out-top-item" } ) do
= link_to packages_link, title: _('Packages & Registries') do
%strong.fly-out-top-item-name
= _('Packages & Registries')
%li.divider.fly-out-top-item
- if group_packages_list_nav?
= nav_link(controller: 'groups/packages') do
= link_to group_packages_path(@group), title: _('Packages') do
%span= _('Package Registry')
- if group_container_registry_nav?
= nav_link(controller: 'groups/registry/repositories') do
= link_to group_container_registries_path(@group), title: _('Container Registry') do
%span= _('Container Registry')
- if group_dependency_proxy_nav?
= nav_link(controller: 'groups/dependency_proxies') do
= link_to group_dependency_proxy_path(@group), title: _('Dependency Proxy') do
%span= _('Dependency Proxy')

View file

@ -1,4 +1,4 @@
- if group_packages_list_nav?
- if @group.packages_feature_enabled?
= nav_link(controller: :packages_and_registries) do
= link_to group_settings_packages_and_registries_path(@group), title: _('Packages & Registries'), data: { qa_selector: 'group_package_settings_link' } do
%span

View file

@ -1,5 +1,3 @@
= render 'groups/sidebar/packages'
= render 'layouts/nav/sidebar/analytics_links', links: group_analytics_navbar_links(@group, current_user)
- if group_sidebar_link?(:wiki)

View file

@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/332333
milestone: '14.0'
type: development
group: group::pipeline authoring
default_enabled: false
default_enabled: true

View file

@ -152,20 +152,6 @@ module Gitlab
end
end
# pool_size - The size of the DB pool.
# host - An optional host name to use instead of the default one.
# port - An optional port to connect to.
def create_connection_pool(pool_size, host = nil, port = nil)
original_config = config
env_config = original_config.merge(pool: pool_size)
env_config[:host] = host if host
env_config[:port] = port if port
ActiveRecord::ConnectionAdapters::ConnectionHandler
.new.establish_connection(env_config)
end
def cached_column_exists?(table_name, column_name)
connection
.schema_cache.columns_hash(table_name)

View file

@ -29,7 +29,7 @@ module Gitlab
@host = host
@port = port
@load_balancer = load_balancer
@pool = Database.main.create_connection_pool(LoadBalancing.pool_size, host, port)
@pool = load_balancer.create_replica_connection_pool(LoadBalancing.pool_size, host, port)
@online = true
@last_checked_at = Time.zone.now
@ -41,7 +41,7 @@ module Gitlab
#
# timeout - The time after which the pool should be forcefully
# disconnected.
def disconnect!(timeout = 120)
def disconnect!(timeout: 120)
start_time = ::Gitlab::Metrics::System.monotonic_time
while (::Gitlab::Metrics::System.monotonic_time - start_time) <= timeout

View file

@ -14,10 +14,14 @@ module Gitlab
# hosts - The hostnames/addresses of the additional databases.
def initialize(hosts = [], model = ActiveRecord::Base)
@model = model
@host_list = HostList.new(hosts.map { |addr| Host.new(addr, self) })
@connection_db_roles = {}.compare_by_identity
@connection_db_roles_count = {}.compare_by_identity
@model = model
end
def disconnect!(timeout: 120)
host_list.hosts.each { |host| host.disconnect!(timeout: timeout) }
end
# Yields a connection that can be used for reads.
@ -205,6 +209,30 @@ module Gitlab
end
end
# pool_size - The size of the DB pool.
# host - An optional host name to use instead of the default one.
# port - An optional port to connect to.
def create_replica_connection_pool(pool_size, host = nil, port = nil)
db_config = pool.db_config
env_config = db_config.configuration_hash.dup
env_config[:pool] = pool_size
env_config[:host] = host if host
env_config[:port] = port if port
replica_db_config = ActiveRecord::DatabaseConfigurations::HashConfig.new(
db_config.env_name,
db_config.name + "_replica",
env_config
)
# We cannot use ActiveRecord::Base.connection_handler.establish_connection
# as it will rewrite ActiveRecord::Base.connection
ActiveRecord::ConnectionAdapters::ConnectionHandler
.new
.establish_connection(replica_db_config)
end
private
# ActiveRecord::ConnectionAdapters::ConnectionHandler handles fetching,

View file

@ -120,7 +120,7 @@ module Gitlab
# host/connection. While this connection will be checked in and out,
# it won't be explicitly disconnected.
old_hosts.each do |host|
host.disconnect!(disconnect_timeout)
host.disconnect!(timeout: disconnect_timeout)
end
end

View file

@ -3,40 +3,43 @@
module Gitlab
module Usage
class Metric
include ActiveModel::Model
attr_reader :definition
InvalidMetricError = Class.new(RuntimeError)
attr_accessor :key_path, :value
validates :key_path, presence: true
def definition
self.class.definitions[key_path]
end
def unflatten_key_path
unflatten(key_path.split('.'), value)
def initialize(definition)
@definition = definition
end
class << self
def definitions
@definitions ||= Gitlab::Usage::MetricDefinition.definitions
def all
@all ||= Gitlab::Usage::MetricDefinition.with_instrumentation_class.map do |definition|
self.new(definition)
end
end
end
def dictionary
definitions.map { |key, definition| definition.to_dictionary }
end
def with_value
unflatten_key_path(intrumentation_object.value)
end
def with_instrumentation
unflatten_key_path(intrumentation_object.instrumentation)
end
private
def unflatten(keys, value)
loop do
value = { keys.pop.to_sym => value }
break if keys.blank?
end
value
def unflatten_key_path(value)
::Gitlab::Usage::Metrics::KeyPathProcessor.process(definition.key_path, value)
end
def instrumentation_class
"Gitlab::Usage::Metrics::Instrumentations::#{definition.instrumentation_class}"
end
def intrumentation_object
instrumentation_class.constantize.new(
time_frame: definition.time_frame,
options: definition.attributes[:options]
)
end
end
end

View file

@ -7,6 +7,8 @@ module Gitlab
BASE_REPO_PATH = 'https://gitlab.com/gitlab-org/gitlab/-/blob/master'
SKIP_VALIDATION_STATUSES = %w[deprecated removed].to_set.freeze
InvalidError = Class.new(RuntimeError)
attr_reader :path
attr_reader :attributes
@ -48,7 +50,7 @@ module Gitlab
Metric file: #{path}
ERROR_MSG
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(Gitlab::Usage::Metric::InvalidMetricError.new(error_message))
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(InvalidError.new(error_message))
end
end
end
@ -69,6 +71,10 @@ module Gitlab
@all ||= definitions.map { |_key_path, definition| definition }
end
def with_instrumentation_class
all.select { |definition| definition.attributes[:instrumentation_class].present? }
end
def schemer
@schemer ||= ::JSONSchemer.schema(Pathname.new(METRIC_SCHEMA_PATH))
end
@ -92,7 +98,7 @@ module Gitlab
self.new(path, definition).tap(&:validate!)
rescue StandardError => e
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(Gitlab::Usage::Metric::InvalidMetricError.new(e.message))
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(InvalidError.new(e.message))
end
def load_all_from_path!(definitions, glob_path)
@ -100,7 +106,7 @@ module Gitlab
definition = load_from_file(path)
if previous = definitions[definition.key]
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(Gitlab::Usage::Metric::InvalidMetricError.new("Metric '#{definition.key}' is already defined in '#{previous.path}'"))
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(InvalidError.new("Metric '#{definition.key}' is already defined in '#{previous.path}'"))
end
definitions[definition.key] = definition

View file

@ -5,26 +5,7 @@ module Gitlab
class << self
# Build the Usage Ping JSON payload from metrics YAML definitions which have instrumentation class set
def uncached_data
::Gitlab::Usage::MetricDefinition.all.map do |definition|
instrumentation_class = definition.attributes[:instrumentation_class]
options = definition.attributes[:options]
if instrumentation_class.present?
metric_value = "Gitlab::Usage::Metrics::Instrumentations::#{instrumentation_class}".constantize.new(
time_frame: definition.attributes[:time_frame],
options: options).value
metric_payload(definition.key_path, metric_value)
else
{}
end
end.reduce({}, :deep_merge)
end
private
def metric_payload(key_path, value)
::Gitlab::Usage::Metrics::KeyPathProcessor.process(key_path, value)
::Gitlab::Usage::Metric.all.map(&:with_value).reduce({}, :deep_merge)
end
end
end

View file

@ -5,6 +5,10 @@ module Gitlab
SQL_METRIC_DEFAULT = -3
class << self
def uncached_data
super.with_indifferent_access.deep_merge(instrumentation_metrics_queries.with_indifferent_access)
end
def add_metric(metric, time_frame: 'none')
metric_class = "Gitlab::Usage::Metrics::Instrumentations::#{metric}".constantize
@ -43,6 +47,12 @@ module Gitlab
projects_jira_cloud_active: 0
}
end
private
def instrumentation_metrics_queries
::Gitlab::Usage::Metric.all.map(&:with_instrumentation).reduce({}, :deep_merge)
end
end
end
end

View file

@ -5,6 +5,10 @@ module Gitlab
# See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/41091
class UsageDataQueries < UsageData
class << self
def uncached_data
super.with_indifferent_access.deep_merge(instrumentation_metrics_queries.with_indifferent_access)
end
def add_metric(metric, time_frame: 'none')
metric_class = "Gitlab::Usage::Metrics::Instrumentations::#{metric}".constantize
@ -64,6 +68,12 @@ module Gitlab
def epics_deepest_relationship_level
{ epics_deepest_relationship_level: 0 }
end
private
def instrumentation_metrics_queries
::Gitlab::Usage::Metric.all.map(&:with_instrumentation).reduce({}, :deep_merge)
end
end
end
end

View file

@ -0,0 +1,74 @@
# frozen_string_literal: true
module Sidebars
module Groups
module Menus
class PackagesRegistriesMenu < ::Sidebars::Menu
override :configure_menu_items
def configure_menu_items
add_item(packages_registry_menu_item)
add_item(container_registry_menu_item)
add_item(dependency_proxy_menu_item)
true
end
override :link
def link
renderable_items.first.link
end
override :title
def title
_('Packages & Registries')
end
override :sprite_icon
def sprite_icon
'package'
end
private
def packages_registry_menu_item
unless context.group.packages_feature_enabled?
return ::Sidebars::NilMenuItem.new(item_id: :packages_registry)
end
::Sidebars::MenuItem.new(
title: _('Package Registry'),
link: group_packages_path(context.group),
active_routes: { controller: 'groups/packages' },
item_id: :packages_registry
)
end
def container_registry_menu_item
if !::Gitlab.config.registry.enabled || !can?(context.current_user, :read_container_image, context.group)
return ::Sidebars::NilMenuItem.new(item_id: :container_registry)
end
::Sidebars::MenuItem.new(
title: _('Container Registry'),
link: group_container_registries_path(context.group),
active_routes: { controller: 'groups/registry/repositories' },
item_id: :container_registry
)
end
def dependency_proxy_menu_item
unless context.group.dependency_proxy_feature_available?
return ::Sidebars::NilMenuItem.new(item_id: :dependency_proxy)
end
::Sidebars::MenuItem.new(
title: _('Dependency Proxy'),
link: group_dependency_proxy_path(context.group),
active_routes: { controller: 'groups/dependency_proxies' },
item_id: :dependency_proxy
)
end
end
end
end
end

View file

@ -12,6 +12,7 @@ module Sidebars
add_menu(Sidebars::Groups::Menus::MergeRequestsMenu.new(context))
add_menu(Sidebars::Groups::Menus::CiCdMenu.new(context))
add_menu(Sidebars::Groups::Menus::KubernetesMenu.new(context))
add_menu(Sidebars::Groups::Menus::PackagesRegistriesMenu.new(context))
end
override :render_raw_menus_partial

View file

@ -3810,6 +3810,9 @@ msgstr ""
msgid "An error occurred while updating the notification settings. Please try again."
msgstr ""
msgid "An error occurred while uploading the file. Please try again."
msgstr ""
msgid "An error occurred while uploading the image. Please try again."
msgstr ""
@ -23889,9 +23892,6 @@ msgstr ""
msgid "PackageRegistry|published by %{author}"
msgstr ""
msgid "Packages"
msgstr ""
msgid "Packages & Registries"
msgstr ""

View file

@ -75,6 +75,14 @@ module QA
end
end
def go_to_group_packages
hover_group_packages do
within_submenu do
click_element(:sidebar_menu_item_link, menu_item: 'Package Registry')
end
end
end
private
def hover_issues
@ -101,6 +109,15 @@ module QA
yield
end
end
def hover_group_packages
within_sidebar do
scroll_to_element(:sidebar_menu_link, menu_item: 'Packages & Registries')
find_element(:sidebar_menu_link, menu_item: 'Packages & Registries').hover
yield
end
end
end
end
end

View file

@ -24,6 +24,7 @@ module QA
end
let(:github_repo) { ENV['QA_LARGE_GH_IMPORT_REPO'] || 'rspec/rspec-core' }
let(:import_max_duration) { ENV['QA_LARGE_GH_IMPORT_DURATION'] ? ENV['QA_LARGE_GH_IMPORT_DURATION'].to_i : 7200 }
let(:github_client) do
Octokit.middleware = Faraday::RackBuilder.new do |builder|
builder.response(:logger, logger, headers: false, bodies: false)
@ -137,7 +138,7 @@ module QA
raise "Import of '#{imported_project.name}' failed!" if status == 'failed'
end
end
expect(import_status).to eventually_eq('finished').within(duration: 3600, interval: 30)
expect(import_status).to eventually_eq('finished').within(duration: import_max_duration, interval: 30)
@import_time = Time.now - start
aggregate_failures do

View file

@ -26,8 +26,21 @@ exports[`content_editor/components/toolbar_link_button renders dropdown componen
</div>
</form>
</li>
<!---->
<!---->
<li role=\\"presentation\\" class=\\"gl-new-dropdown-divider\\">
<hr role=\\"separator\\" aria-orientation=\\"horizontal\\" class=\\"dropdown-divider\\">
</li>
<li role=\\"presentation\\" class=\\"gl-new-dropdown-item\\"><button role=\\"menuitem\\" type=\\"button\\" class=\\"dropdown-item\\">
<!---->
<!---->
<!---->
<div class=\\"gl-new-dropdown-item-text-wrapper\\">
<p class=\\"gl-new-dropdown-item-text-primary\\">
Upload file
</p>
<!---->
</div>
<!---->
</button></li> <input type=\\"file\\" name=\\"content_editor_attachment\\" class=\\"gl-display-none\\">
</div>
<!---->
</div>

View file

@ -1,6 +1,7 @@
import { GlButton, GlFormInputGroup } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import ToolbarImageButton from '~/content_editor/components/toolbar_image_button.vue';
import Attachment from '~/content_editor/extensions/attachment';
import Image from '~/content_editor/extensions/image';
import { createTestEditor, mockChainedCommands } from '../test_utils';
@ -31,7 +32,8 @@ describe('content_editor/components/toolbar_image_button', () => {
beforeEach(() => {
editor = createTestEditor({
extensions: [
Image.configure({
Image,
Attachment.configure({
renderMarkdown: jest.fn(),
uploadsPath: '/uploads/',
}),
@ -64,13 +66,13 @@ describe('content_editor/components/toolbar_image_button', () => {
});
it('uploads the selected image when file input changes', async () => {
const commands = mockChainedCommands(editor, ['focus', 'uploadImage', 'run']);
const commands = mockChainedCommands(editor, ['focus', 'uploadAttachment', 'run']);
const file = new File(['foo'], 'foo.png', { type: 'image/png' });
await selectFile(file);
expect(commands.focus).toHaveBeenCalled();
expect(commands.uploadImage).toHaveBeenCalledWith({ file });
expect(commands.uploadAttachment).toHaveBeenCalledWith({ file });
expect(commands.run).toHaveBeenCalled();
expect(wrapper.emitted().execute[0]).toEqual([{ contentType: 'image', value: 'upload' }]);

View file

@ -1,4 +1,4 @@
import { GlDropdown, GlDropdownDivider, GlButton, GlFormInputGroup } from '@gitlab/ui';
import { GlDropdown, GlButton, GlFormInputGroup } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import ToolbarLinkButton from '~/content_editor/components/toolbar_link_button.vue';
import Link from '~/content_editor/extensions/link';
@ -19,11 +19,18 @@ describe('content_editor/components/toolbar_link_button', () => {
});
};
const findDropdown = () => wrapper.findComponent(GlDropdown);
const findDropdownDivider = () => wrapper.findComponent(GlDropdownDivider);
const findLinkURLInput = () => wrapper.findComponent(GlFormInputGroup).find('input[type="text"]');
const findApplyLinkButton = () => wrapper.findComponent(GlButton);
const findRemoveLinkButton = () => wrapper.findByText('Remove link');
const selectFile = async (file) => {
const input = wrapper.find({ ref: 'fileSelector' });
// override the property definition because `input.files` isn't directly modifyable
Object.defineProperty(input.element, 'files', { value: [file], writable: true });
await input.trigger('change');
};
beforeEach(() => {
editor = createTestEditor();
});
@ -51,8 +58,11 @@ describe('content_editor/components/toolbar_link_button', () => {
expect(findDropdown().props('toggleClass')).toEqual({ active: true });
});
it('does not display the upload file option', () => {
expect(wrapper.findByText('Upload file').exists()).toBe(false);
});
it('displays a remove link dropdown option', () => {
expect(findDropdownDivider().exists()).toBe(true);
expect(wrapper.findByText('Remove link').exists()).toBe(true);
});
@ -107,7 +117,7 @@ describe('content_editor/components/toolbar_link_button', () => {
});
});
describe('when there is not an active link', () => {
describe('when there is no active link', () => {
beforeEach(() => {
jest.spyOn(editor, 'isActive');
editor.isActive.mockReturnValueOnce(false);
@ -118,8 +128,11 @@ describe('content_editor/components/toolbar_link_button', () => {
expect(findDropdown().props('toggleClass')).toEqual({ active: false });
});
it('displays the upload file option', () => {
expect(wrapper.findByText('Upload file').exists()).toBe(true);
});
it('does not display a remove link dropdown option', () => {
expect(findDropdownDivider().exists()).toBe(false);
expect(wrapper.findByText('Remove link').exists()).toBe(false);
});
@ -138,6 +151,19 @@ describe('content_editor/components/toolbar_link_button', () => {
expect(wrapper.emitted().execute[0]).toEqual([{ contentType: 'link' }]);
});
it('uploads the selected image when file input changes', async () => {
const commands = mockChainedCommands(editor, ['focus', 'uploadAttachment', 'run']);
const file = new File(['foo'], 'foo.png', { type: 'image/png' });
await selectFile(file);
expect(commands.focus).toHaveBeenCalled();
expect(commands.uploadAttachment).toHaveBeenCalledWith({ file });
expect(commands.run).toHaveBeenCalled();
expect(wrapper.emitted().execute[0]).toEqual([{ contentType: 'link' }]);
});
});
describe('when the user displays the dropdown', () => {

View file

@ -0,0 +1,235 @@
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { once } from 'lodash';
import waitForPromises from 'helpers/wait_for_promises';
import Attachment from '~/content_editor/extensions/attachment';
import Image from '~/content_editor/extensions/image';
import Link from '~/content_editor/extensions/link';
import Loading from '~/content_editor/extensions/loading';
import httpStatus from '~/lib/utils/http_status';
import { loadMarkdownApiResult } from '../markdown_processing_examples';
import { createTestEditor, createDocBuilder } from '../test_utils';
describe('content_editor/extensions/image', () => {
let tiptapEditor;
let eq;
let doc;
let p;
let image;
let loading;
let link;
let renderMarkdown;
let mock;
const uploadsPath = '/uploads/';
const imageFile = new File(['foo'], 'test-file.png', { type: 'image/png' });
const attachmentFile = new File(['foo'], 'test-file.zip', { type: 'application/zip' });
beforeEach(() => {
renderMarkdown = jest.fn();
tiptapEditor = createTestEditor({
extensions: [Loading, Link, Image, Attachment.configure({ renderMarkdown, uploadsPath })],
});
({
builders: { doc, p, image, loading, link },
eq,
} = createDocBuilder({
tiptapEditor,
names: {
loading: { markType: Loading.name },
image: { nodeType: Image.name },
link: { nodeType: Link.name },
},
}));
mock = new MockAdapter(axios);
});
afterEach(() => {
mock.reset();
});
it.each`
eventType | propName | eventData | output
${'paste'} | ${'handlePaste'} | ${{ clipboardData: { files: [attachmentFile] } }} | ${true}
${'paste'} | ${'handlePaste'} | ${{ clipboardData: { files: [] } }} | ${undefined}
${'drop'} | ${'handleDrop'} | ${{ dataTransfer: { files: [attachmentFile] } }} | ${true}
`('handles $eventType properly', ({ eventType, propName, eventData, output }) => {
const event = Object.assign(new Event(eventType), eventData);
const handled = tiptapEditor.view.someProp(propName, (eventHandler) => {
return eventHandler(tiptapEditor.view, event);
});
expect(handled).toBe(output);
});
describe('uploadAttachment command', () => {
let initialDoc;
beforeEach(() => {
initialDoc = doc(p(''));
tiptapEditor.commands.setContent(initialDoc.toJSON());
});
describe('when the file has image mime type', () => {
const base64EncodedFile = 'data:image/png;base64,Zm9v';
beforeEach(() => {
renderMarkdown.mockResolvedValue(
loadMarkdownApiResult('project_wiki_attachment_image').body,
);
});
describe('when uploading succeeds', () => {
const successResponse = {
link: {
markdown: '![test-file](test-file.png)',
},
};
beforeEach(() => {
mock.onPost().reply(httpStatus.OK, successResponse);
});
it('inserts an image with src set to the encoded image file and uploading true', (done) => {
const expectedDoc = doc(p(image({ uploading: true, src: base64EncodedFile })));
tiptapEditor.on(
'update',
once(() => {
expect(eq(tiptapEditor.state.doc, expectedDoc)).toBe(true);
done();
}),
);
tiptapEditor.commands.uploadAttachment({ file: imageFile });
});
it('updates the inserted image with canonicalSrc when upload is successful', async () => {
const expectedDoc = doc(
p(
image({
canonicalSrc: 'test-file.png',
src: base64EncodedFile,
alt: 'test-file',
uploading: false,
}),
),
);
tiptapEditor.commands.uploadAttachment({ file: imageFile });
await waitForPromises();
expect(eq(tiptapEditor.state.doc, expectedDoc)).toBe(true);
});
});
describe('when uploading request fails', () => {
beforeEach(() => {
mock.onPost().reply(httpStatus.INTERNAL_SERVER_ERROR);
});
it('resets the doc to orginal state', async () => {
const expectedDoc = doc(p(''));
tiptapEditor.commands.uploadAttachment({ file: imageFile });
await waitForPromises();
expect(eq(tiptapEditor.state.doc, expectedDoc)).toBe(true);
});
it('emits an error event that includes an error message', (done) => {
tiptapEditor.commands.uploadAttachment({ file: imageFile });
tiptapEditor.on('error', (message) => {
expect(message).toBe('An error occurred while uploading the image. Please try again.');
done();
});
});
});
});
describe('when the file has a zip (or any other attachment) mime type', () => {
const markdownApiResult = loadMarkdownApiResult('project_wiki_attachment_link').body;
beforeEach(() => {
renderMarkdown.mockResolvedValue(markdownApiResult);
});
describe('when uploading succeeds', () => {
const successResponse = {
link: {
markdown: '[test-file](test-file.zip)',
},
};
beforeEach(() => {
mock.onPost().reply(httpStatus.OK, successResponse);
});
it('inserts a loading mark', (done) => {
const expectedDoc = doc(p(loading({ label: 'test-file' })));
tiptapEditor.on(
'update',
once(() => {
expect(eq(tiptapEditor.state.doc, expectedDoc)).toBe(true);
done();
}),
);
tiptapEditor.commands.uploadAttachment({ file: attachmentFile });
});
it('updates the loading mark with a link with canonicalSrc and href attrs', async () => {
const [, group, project] = markdownApiResult.match(/\/(group[0-9]+)\/(project[0-9]+)\//);
const expectedDoc = doc(
p(
link(
{
canonicalSrc: 'test-file.zip',
href: `/${group}/${project}/-/wikis/test-file.zip`,
},
'test-file',
),
),
);
tiptapEditor.commands.uploadAttachment({ file: attachmentFile });
await waitForPromises();
expect(eq(tiptapEditor.state.doc, expectedDoc)).toBe(true);
});
});
describe('when uploading request fails', () => {
beforeEach(() => {
mock.onPost().reply(httpStatus.INTERNAL_SERVER_ERROR);
});
it('resets the doc to orginal state', async () => {
const expectedDoc = doc(p(''));
tiptapEditor.commands.uploadAttachment({ file: attachmentFile });
await waitForPromises();
expect(eq(tiptapEditor.state.doc, expectedDoc)).toBe(true);
});
it('emits an error event that includes an error message', (done) => {
tiptapEditor.commands.uploadAttachment({ file: attachmentFile });
tiptapEditor.on('error', (message) => {
expect(message).toBe('An error occurred while uploading the file. Please try again.');
done();
});
});
});
});
});
});

View file

@ -1,193 +0,0 @@
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { once } from 'lodash';
import waitForPromises from 'helpers/wait_for_promises';
import Image from '~/content_editor/extensions/image';
import httpStatus from '~/lib/utils/http_status';
import { loadMarkdownApiResult } from '../markdown_processing_examples';
import { createTestEditor, createDocBuilder } from '../test_utils';
describe('content_editor/extensions/image', () => {
let tiptapEditor;
let eq;
let doc;
let p;
let image;
let renderMarkdown;
let mock;
const uploadsPath = '/uploads/';
const validFile = new File(['foo'], 'foo.png', { type: 'image/png' });
const invalidFile = new File(['foo'], 'bar.html', { type: 'text/html' });
beforeEach(() => {
renderMarkdown = jest
.fn()
.mockResolvedValue(loadMarkdownApiResult('project_wiki_attachment_image').body);
tiptapEditor = createTestEditor({
extensions: [Image.configure({ renderMarkdown, uploadsPath })],
});
({
builders: { doc, p, image },
eq,
} = createDocBuilder({
tiptapEditor,
names: { image: { nodeType: Image.name } },
}));
mock = new MockAdapter(axios);
});
afterEach(() => {
mock.reset();
});
it.each`
file | valid | description
${validFile} | ${true} | ${'handles paste event when mime type is valid'}
${invalidFile} | ${false} | ${'does not handle paste event when mime type is invalid'}
`('$description', ({ file, valid }) => {
const pasteEvent = Object.assign(new Event('paste'), {
clipboardData: {
files: [file],
},
});
let handled;
tiptapEditor.view.someProp('handlePaste', (eventHandler) => {
handled = eventHandler(tiptapEditor.view, pasteEvent);
});
expect(handled).toBe(valid);
});
it.each`
file | valid | description
${validFile} | ${true} | ${'handles drop event when mime type is valid'}
${invalidFile} | ${false} | ${'does not handle drop event when mime type is invalid'}
`('$description', ({ file, valid }) => {
const dropEvent = Object.assign(new Event('drop'), {
dataTransfer: {
files: [file],
},
});
let handled;
tiptapEditor.view.someProp('handleDrop', (eventHandler) => {
handled = eventHandler(tiptapEditor.view, dropEvent);
});
expect(handled).toBe(valid);
});
it('handles paste event when mime type is correct', () => {
const pasteEvent = Object.assign(new Event('paste'), {
clipboardData: {
files: [new File(['foo'], 'foo.png', { type: 'image/png' })],
},
});
const handled = tiptapEditor.view.someProp('handlePaste', (eventHandler) => {
return eventHandler(tiptapEditor.view, pasteEvent);
});
expect(handled).toBe(true);
});
describe('uploadImage command', () => {
describe('when file has correct mime type', () => {
let initialDoc;
const base64EncodedFile = 'data:image/png;base64,Zm9v';
beforeEach(() => {
initialDoc = doc(p(''));
tiptapEditor.commands.setContent(initialDoc.toJSON());
});
describe('when uploading image succeeds', () => {
const successResponse = {
link: {
markdown: '[image](/uploads/25265/image.png)',
},
};
beforeEach(() => {
mock.onPost().reply(httpStatus.OK, successResponse);
});
it('inserts an image with src set to the encoded image file and uploading true', (done) => {
const expectedDoc = doc(p(image({ uploading: true, src: base64EncodedFile })));
tiptapEditor.on(
'update',
once(() => {
expect(eq(tiptapEditor.state.doc, expectedDoc)).toBe(true);
done();
}),
);
tiptapEditor.commands.uploadImage({ file: validFile });
});
it('updates the inserted image with canonicalSrc when upload is successful', async () => {
const expectedDoc = doc(
p(
image({
canonicalSrc: 'test-file.png',
src: base64EncodedFile,
alt: 'test file',
uploading: false,
}),
),
);
tiptapEditor.commands.uploadImage({ file: validFile });
await waitForPromises();
expect(eq(tiptapEditor.state.doc, expectedDoc)).toBe(true);
});
});
describe('when uploading image request fails', () => {
beforeEach(() => {
mock.onPost().reply(httpStatus.INTERNAL_SERVER_ERROR);
});
it('resets the doc to orginal state', async () => {
const expectedDoc = doc(p(''));
tiptapEditor.commands.uploadImage({ file: validFile });
await waitForPromises();
expect(eq(tiptapEditor.state.doc, expectedDoc)).toBe(true);
});
it('emits an error event that includes an error message', (done) => {
tiptapEditor.commands.uploadImage({ file: validFile });
tiptapEditor.on('error', (message) => {
expect(message).toBe('An error occurred while uploading the image. Please try again.');
done();
});
});
});
});
describe('when file does not have correct mime type', () => {
let initialDoc;
beforeEach(() => {
initialDoc = doc(p(''));
tiptapEditor.commands.setContent(initialDoc.toJSON());
});
it('does not start the upload image process', () => {
tiptapEditor.commands.uploadImage({ file: invalidFile });
expect(eq(tiptapEditor.state.doc, initialDoc)).toBe(true);
});
});
});
});

View file

@ -52,9 +52,9 @@ describe('content_editor/services/create_editor', () => {
expect(() => createContentEditor()).toThrow(PROVIDE_SERIALIZER_OR_RENDERER_ERROR);
});
it('provides uploadsPath and renderMarkdown function to Image extension', () => {
it('provides uploadsPath and renderMarkdown function to Attachment extension', () => {
expect(
editor.tiptapEditor.extensionManager.extensions.find((e) => e.name === 'image').options,
editor.tiptapEditor.extensionManager.extensions.find((e) => e.name === 'attachment').options,
).toMatchObject({
uploadsPath,
renderMarkdown,

View file

@ -1,9 +1,9 @@
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { uploadFile } from '~/content_editor/services/upload_file';
import { uploadFile } from '~/content_editor/services/upload_helpers';
import httpStatus from '~/lib/utils/http_status';
describe('content_editor/services/upload_file', () => {
describe('content_editor/services/upload_helpers', () => {
const uploadsPath = '/uploads';
const file = new File(['content'], 'file.txt');
// TODO: Replace with automated fixture

View file

@ -258,7 +258,11 @@ describe('issue_note', () => {
},
});
noteBodyComponent.vm.$emit('handleFormUpdate', noteBody, null, () => {});
noteBodyComponent.vm.$emit('handleFormUpdate', {
noteText: noteBody,
parentElement: null,
callback: () => {},
});
await waitForPromises();
expect(alertSpy).not.toHaveBeenCalled();
@ -287,14 +291,18 @@ describe('issue_note', () => {
const noteBody = wrapper.findComponent(NoteBody);
noteBody.vm.resetAutoSave = () => {};
noteBody.vm.$emit('handleFormUpdate', updatedText, null, () => {});
noteBody.vm.$emit('handleFormUpdate', {
noteText: updatedText,
parentElement: null,
callback: () => {},
});
await wrapper.vm.$nextTick();
let noteBodyProps = noteBody.props();
expect(noteBodyProps.note.note_html).toBe(`<p>${updatedText}</p>\n`);
noteBody.vm.$emit('cancelForm');
noteBody.vm.$emit('cancelForm', {});
await wrapper.vm.$nextTick();
noteBodyProps = noteBody.props();
@ -305,7 +313,12 @@ describe('issue_note', () => {
describe('formUpdateHandler', () => {
const updateNote = jest.fn();
const params = ['', null, jest.fn(), ''];
const params = {
noteText: '',
parentElement: null,
callback: jest.fn(),
resolveDiscussion: false,
};
const updateActions = () => {
store.hotUpdate({
@ -325,14 +338,14 @@ describe('issue_note', () => {
it('responds to handleFormUpdate', () => {
createWrapper();
updateActions();
wrapper.findComponent(NoteBody).vm.$emit('handleFormUpdate', ...params);
wrapper.findComponent(NoteBody).vm.$emit('handleFormUpdate', params);
expect(wrapper.emitted('handleUpdateNote')).toBeTruthy();
});
it('does not stringify empty position', () => {
createWrapper();
updateActions();
wrapper.findComponent(NoteBody).vm.$emit('handleFormUpdate', ...params);
wrapper.findComponent(NoteBody).vm.$emit('handleFormUpdate', params);
expect(updateNote.mock.calls[0][1].note.note.position).toBeUndefined();
});
@ -341,7 +354,7 @@ describe('issue_note', () => {
const expectation = JSON.stringify(position);
createWrapper({ note: { ...note, position } });
updateActions();
wrapper.findComponent(NoteBody).vm.$emit('handleFormUpdate', ...params);
wrapper.findComponent(NoteBody).vm.$emit('handleFormUpdate', params);
expect(updateNote.mock.calls[0][1].note.note.position).toBe(expectation);
});
});

View file

@ -3,6 +3,7 @@ import Vuex from 'vuex';
import CodequalityIssueBody from '~/reports/codequality_report/components/codequality_issue_body.vue';
import GroupedCodequalityReportsApp from '~/reports/codequality_report/grouped_codequality_reports_app.vue';
import { getStoreConfig } from '~/reports/codequality_report/store';
import { STATUS_NOT_FOUND } from '~/reports/constants';
import { parsedReportIssues } from './mock_data';
const localVue = createLocalVue();
@ -14,8 +15,6 @@ describe('Grouped code quality reports app', () => {
const PATHS = {
codequalityHelpPath: 'codequality_help.html',
basePath: 'base.json',
headPath: 'head.json',
baseBlobPath: 'base/blob/path/',
headBlobPath: 'head/blob/path/',
};
@ -127,21 +126,6 @@ describe('Grouped code quality reports app', () => {
});
});
describe('when there is a head report but no base report', () => {
beforeEach(() => {
mockStore.state.basePath = null;
mockStore.state.hasError = true;
});
it('renders error text', () => {
expect(findWidget().text()).toContain('Failed to load codeclimate report');
});
it('renders a help icon with more information', () => {
expect(findWidget().find('[data-testid="question-icon"]').exists()).toBe(true);
});
});
describe('on error', () => {
beforeEach(() => {
mockStore.state.hasError = true;
@ -154,5 +138,15 @@ describe('Grouped code quality reports app', () => {
it('does not render a help icon', () => {
expect(findWidget().find('[data-testid="question-icon"]').exists()).toBe(false);
});
describe('when base report was not found', () => {
beforeEach(() => {
mockStore.state.status = STATUS_NOT_FOUND;
});
it('renders a help icon with more information', () => {
expect(findWidget().find('[data-testid="question-icon"]').exists()).toBe(true);
});
});
});
});

View file

@ -5,6 +5,7 @@ import axios from '~/lib/utils/axios_utils';
import createStore from '~/reports/codequality_report/store';
import * as actions from '~/reports/codequality_report/store/actions';
import * as types from '~/reports/codequality_report/store/mutation_types';
import { STATUS_NOT_FOUND } from '~/reports/constants';
import { reportIssues, parsedReportIssues } from '../mock_data';
const pollInterval = 123;
@ -24,8 +25,6 @@ describe('Codequality Reports actions', () => {
describe('setPaths', () => {
it('should commit SET_PATHS mutation', (done) => {
const paths = {
basePath: 'basePath',
headPath: 'headPath',
baseBlobPath: 'baseBlobPath',
headBlobPath: 'headBlobPath',
reportsPath: 'reportsPath',
@ -49,7 +48,6 @@ describe('Codequality Reports actions', () => {
beforeEach(() => {
localState.reportsPath = endpoint;
localState.basePath = '/base/path';
mock = new MockAdapter(axios);
});
@ -92,16 +90,17 @@ describe('Codequality Reports actions', () => {
});
});
describe('with no base path', () => {
describe('when base report is not found', () => {
it('commits REQUEST_REPORTS and dispatches receiveReportsError', (done) => {
localState.basePath = null;
const data = { status: STATUS_NOT_FOUND };
mock.onGet(`${TEST_HOST}/codequality_reports.json`).reply(200, data);
testAction(
actions.fetchReports,
null,
localState,
[{ type: types.REQUEST_REPORTS }],
[{ type: 'receiveReportsError' }],
[{ type: 'receiveReportsError', payload: data }],
done,
);
});

View file

@ -1,6 +1,6 @@
import createStore from '~/reports/codequality_report/store';
import * as getters from '~/reports/codequality_report/store/getters';
import { LOADING, ERROR, SUCCESS } from '~/reports/constants';
import { LOADING, ERROR, SUCCESS, STATUS_NOT_FOUND } from '~/reports/constants';
describe('Codequality reports store getters', () => {
let localState;
@ -76,10 +76,9 @@ describe('Codequality reports store getters', () => {
});
describe('codequalityPopover', () => {
describe('when head report is available but base report is not', () => {
describe('when base report is not available', () => {
it('returns a popover with a documentation link', () => {
localState.headPath = 'head.json';
localState.basePath = undefined;
localState.status = STATUS_NOT_FOUND;
localState.helpPath = 'codequality_help.html';
expect(getters.codequalityPopover(localState).title).toEqual(

View file

@ -1,5 +1,6 @@
import createStore from '~/reports/codequality_report/store';
import mutations from '~/reports/codequality_report/store/mutations';
import { STATUS_NOT_FOUND } from '~/reports/constants';
describe('Codequality Reports mutations', () => {
let localState;
@ -12,24 +13,18 @@ describe('Codequality Reports mutations', () => {
describe('SET_PATHS', () => {
it('sets paths to given values', () => {
const basePath = 'base.json';
const headPath = 'head.json';
const baseBlobPath = 'base/blob/path/';
const headBlobPath = 'head/blob/path/';
const reportsPath = 'reports.json';
const helpPath = 'help.html';
mutations.SET_PATHS(localState, {
basePath,
headPath,
baseBlobPath,
headBlobPath,
reportsPath,
helpPath,
});
expect(localState.basePath).toEqual(basePath);
expect(localState.headPath).toEqual(headPath);
expect(localState.baseBlobPath).toEqual(baseBlobPath);
expect(localState.headBlobPath).toEqual(headBlobPath);
expect(localState.reportsPath).toEqual(reportsPath);
@ -58,9 +53,10 @@ describe('Codequality Reports mutations', () => {
expect(localState.hasError).toEqual(false);
});
it('clears statusReason', () => {
it('clears status and statusReason', () => {
mutations.RECEIVE_REPORTS_SUCCESS(localState, {});
expect(localState.status).toEqual('');
expect(localState.statusReason).toEqual('');
});
@ -86,6 +82,13 @@ describe('Codequality Reports mutations', () => {
expect(localState.hasError).toEqual(true);
});
it('sets status based on error object', () => {
const error = { status: STATUS_NOT_FOUND };
mutations.RECEIVE_REPORTS_ERROR(localState, error);
expect(localState.status).toEqual(error.status);
});
it('sets statusReason to string from error response data', () => {
const data = { status_reason: 'This merge request does not have codequality reports' };
const error = { response: { data } };

View file

@ -234,14 +234,11 @@ export default {
can_revert_on_current_merge_request: true,
can_cherry_pick_on_current_merge_request: true,
},
codeclimate: {
head_path: 'head.json',
base_path: 'base.json',
},
blob_path: {
base_path: 'blob_path',
head_path: 'blob_path',
},
codequality_reports_path: 'codequality_reports.json',
codequality_help_path: 'code_quality.html',
target_branch_path: '/root/acets-app/branches/main',
source_branch_path: '/root/acets-app/branches/daaaa',

View file

@ -263,42 +263,6 @@ RSpec.describe GroupsHelper do
end
end
describe '#group_container_registry_nav' do
let_it_be(:group) { create(:group, :public) }
let_it_be(:user) { create(:user) }
before do
stub_container_registry_config(enabled: true)
allow(helper).to receive(:current_user) { user }
allow(helper).to receive(:can?).with(user, :read_container_image, group) { true }
helper.instance_variable_set(:@group, group)
end
subject { helper.group_container_registry_nav? }
context 'when container registry is enabled' do
it { is_expected.to be_truthy }
it 'is disabled for guest' do
allow(helper).to receive(:can?).with(user, :read_container_image, group) { false }
expect(subject).to be false
end
end
context 'when container registry is not enabled' do
before do
stub_container_registry_config(enabled: false)
end
it { is_expected.to be_falsy }
it 'is disabled for guests' do
allow(helper).to receive(:can?).with(user, :read_container_image, group) { false }
expect(subject).to be false
end
end
end
describe '#group_sidebar_links' do
let_it_be(:group) { create(:group, :public) }
let_it_be(:user) { create(:user) }

View file

@ -378,42 +378,6 @@ RSpec.describe Gitlab::Database::Connection do
end
end
describe '#create_connection_pool' do
it 'creates a new connection pool with specific pool size' do
pool = connection.create_connection_pool(5)
begin
expect(pool)
.to be_kind_of(ActiveRecord::ConnectionAdapters::ConnectionPool)
expect(pool.db_config.pool).to eq(5)
ensure
pool.disconnect!
end
end
it 'allows setting of a custom hostname' do
pool = connection.create_connection_pool(5, '127.0.0.1')
begin
expect(pool.db_config.host).to eq('127.0.0.1')
ensure
pool.disconnect!
end
end
it 'allows setting of a custom hostname and port' do
pool = connection.create_connection_pool(5, '127.0.0.1', 5432)
begin
expect(pool.db_config.host).to eq('127.0.0.1')
expect(pool.db_config.configuration_hash[:port]).to eq(5432)
ensure
pool.disconnect!
end
end
end
describe '#cached_column_exists?' do
it 'only retrieves data once' do
expect(connection.scope.connection)

View file

@ -3,25 +3,17 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing::HostList do
def expect_metrics(hosts)
expect(Gitlab::Metrics.registry.get(:db_load_balancing_hosts).get({})).to eq(hosts)
end
before do
allow(Gitlab::Database.main)
.to receive(:create_connection_pool)
.and_return(ActiveRecord::Base.connection_pool)
end
let(:db_host) { ActiveRecord::Base.connection_pool.db_config.host }
let(:load_balancer) { double(:load_balancer) }
let(:host_count) { 2 }
let(:hosts) { Array.new(host_count) { Gitlab::Database::LoadBalancing::Host.new(db_host, load_balancer, port: 5432) } }
let(:host_list) { described_class.new(hosts) }
let(:host_list) do
hosts = Array.new(host_count) do
Gitlab::Database::LoadBalancing::Host.new('localhost', load_balancer, port: 5432)
before do
# each call generate a new replica pool
allow(load_balancer).to receive(:create_replica_connection_pool) do
double(:replica_connection_pool)
end
described_class.new(hosts)
end
describe '#initialize' do
@ -42,8 +34,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::HostList do
context 'with ports' do
it 'returns the host names of all hosts' do
hosts = [
['localhost', 5432],
['localhost', 5432]
[db_host, 5432],
[db_host, 5432]
]
expect(host_list.host_names_and_ports).to eq(hosts)
@ -51,18 +43,12 @@ RSpec.describe Gitlab::Database::LoadBalancing::HostList do
end
context 'without ports' do
let(:host_list) do
hosts = Array.new(2) do
Gitlab::Database::LoadBalancing::Host.new('localhost', load_balancer)
end
described_class.new(hosts)
end
let(:hosts) { Array.new(2) { Gitlab::Database::LoadBalancing::Host.new(db_host, load_balancer) } }
it 'returns the host names of all hosts' do
hosts = [
['localhost', nil],
['localhost', nil]
[db_host, nil],
[db_host, nil]
]
expect(host_list.host_names_and_ports).to eq(hosts)
@ -71,10 +57,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::HostList do
end
describe '#manage_pool?' do
before do
allow(Gitlab::Database.main).to receive(:create_connection_pool) { double(:connection) }
end
context 'when the testing pool belongs to one host of the host list' do
it 'returns true' do
pool = host_list.hosts.first.pool
@ -185,4 +167,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::HostList do
end
end
end
def expect_metrics(hosts)
expect(Gitlab::Metrics.registry.get(:db_load_balancing_hosts).get({})).to eq(hosts)
end
end

View file

@ -3,15 +3,16 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing::Host do
let(:load_balancer) do
Gitlab::Database::LoadBalancing::LoadBalancer.new(%w[localhost])
let(:load_balancer) { Gitlab::Database::LoadBalancing::LoadBalancer.new }
let(:host) do
Gitlab::Database::LoadBalancing::Host.new('localhost', load_balancer)
end
let(:host) { load_balancer.host_list.hosts.first }
before do
allow(Gitlab::Database.main).to receive(:create_connection_pool)
.and_return(ActiveRecord::Base.connection_pool)
allow(load_balancer).to receive(:create_replica_connection_pool) do
ActiveRecord::Base.connection_pool
end
end
def raise_and_wrap(wrapper, original)
@ -63,7 +64,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::Host do
expect(host.pool)
.to receive(:disconnect!)
host.disconnect!(1)
host.disconnect!(timeout: 1)
end
end

View file

@ -3,21 +3,22 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
let(:pool) { Gitlab::Database.main.create_connection_pool(2) }
let(:conflict_error) { Class.new(RuntimeError) }
let(:lb) { described_class.new(%w(localhost localhost)) }
let(:db_host) { ActiveRecord::Base.connection_pool.db_config.host }
let(:lb) { described_class.new([db_host, db_host]) }
let(:request_cache) { lb.send(:request_cache) }
before do
allow(Gitlab::Database.main).to receive(:create_connection_pool)
.and_return(pool)
stub_const(
'Gitlab::Database::LoadBalancing::LoadBalancer::PG::TRSerializationFailure',
conflict_error
)
end
after do |example|
lb.disconnect!(timeout: 0) unless example.metadata[:skip_disconnect]
end
def raise_and_wrap(wrapper, original)
raise original
rescue original.class
@ -239,7 +240,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
context 'when the connection comes from the primary pool' do
it 'returns :primary' do
connection = double(:connection)
allow(connection).to receive(:pool).and_return(ActiveRecord::Base.connection_pool)
allow(connection).to receive(:pool).and_return(lb.send(:pool))
expect(lb.db_role_for_connection(connection)).to be(:primary)
end
@ -271,8 +272,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
end
it 'does not create conflicts with other load balancers when caching hosts' do
lb1 = described_class.new(%w(localhost localhost), ActiveRecord::Base)
lb2 = described_class.new(%w(localhost localhost), Ci::CiDatabaseRecord)
lb1 = described_class.new([db_host, db_host], ActiveRecord::Base)
lb2 = described_class.new([db_host, db_host], Ci::CiDatabaseRecord)
host1 = lb1.host
host2 = lb2.host
@ -456,4 +457,45 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
end
end
end
describe '#create_replica_connection_pool' do
it 'creates a new connection pool with specific pool size and name' do
with_replica_pool(5, 'other_host') do |replica_pool|
expect(replica_pool)
.to be_kind_of(ActiveRecord::ConnectionAdapters::ConnectionPool)
expect(replica_pool.db_config.host).to eq('other_host')
expect(replica_pool.db_config.pool).to eq(5)
expect(replica_pool.db_config.name).to end_with("_replica")
end
end
it 'allows setting of a custom hostname and port' do
with_replica_pool(5, 'other_host', 5432) do |replica_pool|
expect(replica_pool.db_config.host).to eq('other_host')
expect(replica_pool.db_config.configuration_hash[:port]).to eq(5432)
end
end
it 'does not modify connection class pool' do
expect { with_replica_pool(5) { } }.not_to change { ActiveRecord::Base.connection_pool }
end
def with_replica_pool(*args)
pool = lb.create_replica_connection_pool(*args)
yield pool
ensure
pool&.disconnect!
end
end
describe '#disconnect!' do
it 'calls disconnect on all hosts with a timeout', :skip_disconnect do
expect_next_instances_of(Gitlab::Database::LoadBalancing::Host, 2) do |host|
expect(host).to receive(:disconnect!).with(timeout: 30)
end
lb.disconnect!(timeout: 30)
end
end
end

View file

@ -169,7 +169,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery do
expect(host)
.to receive(:disconnect!)
.with(2)
.with(timeout: 2)
service.replace_hosts([address_bar])
end

View file

@ -3,10 +3,6 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing do
before do
stub_env('ENABLE_LOAD_BALANCING_FOR_FOSS', 'true')
end
describe '.proxy' do
before do
@previous_proxy = ActiveRecord::Base.load_balancing_proxy

View file

@ -87,14 +87,14 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
end
it 'raise exception' do
expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::Metric::InvalidMetricError))
expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::MetricDefinition::InvalidError))
described_class.new(path, attributes).validate!
end
context 'with skip_validation' do
it 'raise exception if skip_validation: false' do
expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::Metric::InvalidMetricError))
expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::MetricDefinition::InvalidError))
described_class.new(path, attributes.merge( { skip_validation: false } )).validate!
end
@ -113,7 +113,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
attributes[:status] = 'broken'
attributes.delete(:repair_issue_url)
expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::Metric::InvalidMetricError))
expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::MetricDefinition::InvalidError))
described_class.new(path, attributes).validate!
end
@ -173,7 +173,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
write_metric(metric1, path, yaml_content)
write_metric(metric2, path, yaml_content)
expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).with(instance_of(Gitlab::Usage::Metric::InvalidMetricError))
expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).with(instance_of(Gitlab::Usage::MetricDefinition::InvalidError))
subject
end

View file

@ -3,27 +3,46 @@
require 'spec_helper'
RSpec.describe Gitlab::Usage::Metric do
describe '#definition' do
it 'returns key_path metric definiton' do
expect(described_class.new(key_path: 'uuid').definition).to be_an(Gitlab::Usage::MetricDefinition)
let!(:issue) { create(:issue) }
let(:attributes) do
{
data_category: "Operational",
key_path: "counts.issues",
description: "Count of Issues created",
product_section: "dev",
product_stage: "plan",
product_group: "group::plan",
product_category: "issue_tracking",
value_type: "number",
status: "data_available",
time_frame: "all",
data_source: "database",
instrumentation_class: "CountIssuesMetric",
distribution: %w(ce ee),
tier: %w(free premium ultimate)
}
end
let(:issue_count_metric_definiton) do
double(:issue_count_metric_definiton,
attributes.merge({ attributes: attributes })
)
end
before do
allow(ApplicationRecord.connection).to receive(:transaction_open?).and_return(false)
end
describe '#with_value' do
it 'returns key_path metric with the corresponding value' do
expect(described_class.new(issue_count_metric_definiton).with_value).to eq({ counts: { issues: 1 } })
end
end
describe '#unflatten_default_path' do
using RSpec::Parameterized::TableSyntax
where(:key_path, :value, :expected_hash) do
'uuid' | nil | { uuid: nil }
'uuid' | '1111' | { uuid: '1111' }
'counts.issues' | nil | { counts: { issues: nil } }
'counts.issues' | 100 | { counts: { issues: 100 } }
'usage_activity_by_stage.verify.ci_builds' | 100 | { usage_activity_by_stage: { verify: { ci_builds: 100 } } }
end
with_them do
subject { described_class.new(key_path: key_path, value: value).unflatten_key_path }
it { is_expected.to eq(expected_hash) }
describe '#with_instrumentation' do
it 'returns key_path metric with the corresponding generated query' do
expect(described_class.new(issue_count_metric_definiton).with_instrumentation).to eq({ counts: { issues: "SELECT COUNT(\"issues\".\"id\") FROM \"issues\"" } })
end
end
end

View file

@ -0,0 +1,163 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu do
let_it_be(:owner) { create(:user) }
let_it_be(:group) do
build(:group, :private).tap do |g|
g.add_owner(owner)
end
end
let(:user) { owner }
let(:context) { Sidebars::Groups::Context.new(current_user: user, container: group) }
let(:menu) { described_class.new(context) }
describe '#render?' do
context 'when menu has menu items to show' do
it 'returns true' do
expect(menu.render?).to eq true
end
end
context 'when menu does not have any menu item to show' do
it 'returns false' do
stub_container_registry_config(enabled: false)
stub_config(packages: { enabled: false })
stub_config(dependency_proxy: { enabled: false })
expect(menu.render?).to eq false
end
end
end
describe '#link' do
let(:registry_enabled) { true }
let(:packages_enabled) { true }
before do
stub_container_registry_config(enabled: registry_enabled)
stub_config(packages: { enabled: packages_enabled })
stub_config(dependency_proxy: { enabled: true })
end
subject { menu.link }
context 'when Packages Registry is visible' do
it 'menu link points to Packages Registry page' do
expect(subject).to eq find_menu(menu, :packages_registry).link
end
end
context 'when Packages Registry is not visible' do
let(:packages_enabled) { false }
it 'menu link points to Container Registry page' do
expect(subject).to eq find_menu(menu, :container_registry).link
end
context 'when Container Registry is not visible' do
let(:registry_enabled) { false }
it 'menu link points to Dependency Proxy page' do
expect(subject).to eq find_menu(menu, :dependency_proxy).link
end
end
end
end
describe 'Menu items' do
subject { find_menu(menu, item_id) }
describe 'Packages Registry' do
let(:item_id) { :packages_registry }
context 'when user can read packages' do
before do
stub_config(packages: { enabled: packages_enabled })
end
context 'when config package setting is disabled' do
let(:packages_enabled) { false }
it 'the menu item is not added to list of menu items' do
is_expected.to be_nil
end
end
context 'when config package setting is enabled' do
let(:packages_enabled) { true }
it 'the menu item is added to list of menu items' do
is_expected.not_to be_nil
end
end
end
end
describe 'Container Registry' do
let(:item_id) { :container_registry }
context 'when user can read container images' do
before do
stub_container_registry_config(enabled: container_enabled)
end
context 'when config registry setting is disabled' do
let(:container_enabled) { false }
it 'the menu item is not added to list of menu items' do
is_expected.to be_nil
end
end
context 'when config registry setting is enabled' do
let(:container_enabled) { true }
it 'the menu item is added to list of menu items' do
is_expected.not_to be_nil
end
context 'when user cannot read container images' do
let(:user) { nil }
it 'the menu item is not added to list of menu items' do
is_expected.to be_nil
end
end
end
end
end
describe 'Dependency Proxy' do
let(:item_id) { :dependency_proxy }
before do
stub_config(dependency_proxy: { enabled: dependency_enabled })
end
context 'when config dependency_proxy is enabled' do
let(:dependency_enabled) { true }
it 'the menu item is added to list of menu items' do
is_expected.not_to be_nil
end
end
context 'when config dependency_proxy is not enabled' do
let(:dependency_enabled) { false }
it 'the menu item is not added to list of menu items' do
is_expected.to be_nil
end
end
end
end
private
def find_menu(menu, item)
menu.renderable_items.find { |i| i.item_id == item }
end
end

View file

@ -52,14 +52,14 @@ RSpec.describe 'Query.runner(id)' do
'version' => runner.version,
'shortSha' => runner.short_sha,
'revision' => runner.revision,
'locked' => runner.locked,
'locked' => false,
'active' => runner.active,
'status' => runner.status.to_s.upcase,
'maximumTimeout' => runner.maximum_timeout,
'accessLevel' => runner.access_level.to_s.upcase,
'runUntagged' => runner.run_untagged,
'ipAddress' => runner.ip_address,
'runnerType' => 'INSTANCE_TYPE',
'runnerType' => runner.instance_type? ? 'INSTANCE_TYPE' : 'PROJECT_TYPE',
'jobCount' => 0,
'projectCount' => nil
)
@ -109,6 +109,40 @@ RSpec.describe 'Query.runner(id)' do
end
end
describe 'for project runner' do
using RSpec::Parameterized::TableSyntax
where(is_locked: [true, false])
with_them do
let(:project_runner) do
create(:ci_runner, :project, description: 'Runner 3', contacted_at: 1.day.ago, active: false, locked: is_locked,
version: 'adfe157', revision: 'b', ip_address: '10.10.10.10', access_level: 1, run_untagged: true)
end
let(:query) do
wrap_fields(query_graphql_path(query_path, all_graphql_fields_for('CiRunner')))
end
let(:query_path) do
[
[:runner, { id: project_runner.to_global_id.to_s }]
]
end
it 'retrieves correct locked value' do
post_graphql(query, current_user: user)
runner_data = graphql_data_at(:runner)
expect(runner_data).to match a_hash_including(
'id' => "gid://gitlab/Ci::Runner/#{project_runner.id}",
'locked' => is_locked
)
end
end
end
describe 'for inactive runner' do
it_behaves_like 'runner details fetch', :inactive_instance_runner
end

View file

@ -76,6 +76,78 @@ RSpec.describe 'Merge Requests Diffs' do
subject
end
context 'with the different user' do
let(:another_user) { create(:user) }
before do
project.add_maintainer(another_user)
sign_in(another_user)
end
it_behaves_like 'serializes diffs with expected arguments' do
let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
let(:expected_options) { collection_arguments(total_pages: 20) }
end
end
context 'with a new unfoldable diff position' do
let(:unfoldable_position) do
create(:diff_position)
end
before do
expect_next_instance_of(Gitlab::Diff::PositionCollection) do |instance|
expect(instance)
.to receive(:unfoldable)
.and_return([unfoldable_position])
end
end
it_behaves_like 'serializes diffs with expected arguments' do
let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
let(:expected_options) { collection_arguments(total_pages: 20) }
end
end
context 'with a new environment' do
let(:environment) do
create(:environment, :available, project: project)
end
let!(:deployment) do
create(:deployment, :success, environment: environment, ref: merge_request.source_branch)
end
it_behaves_like 'serializes diffs with expected arguments' do
let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
let(:expected_options) { collection_arguments(total_pages: 20).merge(environment: environment) }
end
end
context 'with disabled display_merge_conflicts_in_diff feature' do
before do
stub_feature_flags(display_merge_conflicts_in_diff: false)
end
it_behaves_like 'serializes diffs with expected arguments' do
let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
let(:expected_options) { collection_arguments(total_pages: 20).merge(allow_tree_conflicts: false) }
end
end
context 'with diff_head option' do
subject { go(page: 0, per_page: 5, diff_head: true) }
before do
merge_request.create_merge_head_diff!
end
it_behaves_like 'serializes diffs with expected arguments' do
let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
let(:expected_options) { collection_arguments(total_pages: 20).merge(merge_ref_head_diff: true) }
end
end
context 'with the different pagination option' do
subject { go(page: 5, per_page: 5) }

View file

@ -108,4 +108,30 @@ RSpec.describe 'layouts/nav/sidebar/_group' do
expect(rendered).to have_link('Kubernetes', href: group_clusters_path(group))
end
end
describe 'Packages & Registries' do
it 'has a link to the package registry page' do
stub_config(packages: { enabled: true })
render
expect(rendered).to have_link('Package Registry', href: group_packages_path(group))
end
it 'has a link to the container registry page' do
stub_container_registry_config(enabled: true)
render
expect(rendered).to have_link('Container Registry', href: group_container_registries_path(group))
end
it 'has a link to the dependency proxy page' do
stub_config(dependency_proxy: { enabled: true })
render
expect(rendered).to have_link('Dependency Proxy', href: group_dependency_proxy_path(group))
end
end
end