Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-08-06 00:09:53 +00:00
parent ad9eb72915
commit 631ed6dcca
70 changed files with 2281 additions and 469 deletions

View File

@ -1 +1 @@
1bd1bfa6673eb784b856d580240f8e5522b86467
0f3b4b0b121f611fac2a613ec0273864d5f38c8b

View File

@ -507,5 +507,6 @@ gem 'valid_email', '~> 0.1'
# JSON
gem 'json', '~> 2.3.0'
gem 'json-schema', '~> 2.8.0'
gem 'json_schemer', '~> 0.2.12'
gem 'oj', '~> 3.10.6'
gem 'multi_json', '~> 1.14.1'

View File

@ -272,6 +272,8 @@ GEM
dry-equalizer (~> 0.3)
dry-inflector (~> 0.1, >= 0.1.2)
dry-logic (~> 1.0, >= 1.0.2)
ecma-re-validator (0.2.1)
regexp_parser (~> 1.2)
ed25519 (1.2.4)
elasticsearch (6.8.0)
elasticsearch-api (= 6.8.0)
@ -522,6 +524,7 @@ GEM
temple (>= 0.8.2)
thor
tilt
hana (1.3.6)
hangouts-chat (0.0.5)
hashdiff (0.3.8)
hashie (3.6.0)
@ -582,6 +585,11 @@ GEM
bindata
json-schema (2.8.0)
addressable (>= 2.4)
json_schemer (0.2.12)
ecma-re-validator (~> 0.2)
hana (~> 1.3)
regexp_parser (~> 1.5)
uri_template (~> 0.7)
jwt (2.1.0)
kaminari (1.2.1)
activesupport (>= 4.1.0)
@ -1135,6 +1143,7 @@ GEM
equalizer (~> 0.0.9)
parser (>= 2.6.5)
procto (~> 0.0.2)
uri_template (0.7.0)
valid_email (0.1.3)
activemodel
mail (>= 2.6.1)
@ -1305,6 +1314,7 @@ DEPENDENCIES
js_regex (~> 3.4)
json (~> 2.3.0)
json-schema (~> 2.8.0)
json_schemer (~> 0.2.12)
jwt (~> 2.1.0)
kaminari (~> 1.0)
knapsack (~> 1.17)

View File

@ -1,13 +1,78 @@
import svg4everybody from 'svg4everybody';
import { debounce } from 'lodash';
/*
Chrome and Edge 84 have a bug relating to icon sprite svgs
https://bugs.chromium.org/p/chromium/issues/detail?id=1107442
If the SVG is loaded, under certain circumstances the icons are not
shown. As a workaround we use the well-tested svg4everybody and forcefully
include the icon fragments into the DOM and thus circumventing the bug
shown. We load our sprite icons with JS and add them to the body.
Then we iterate over all the `use` elements and replace their reference
to that svg which we added internally. In order to avoid id conflicts,
those are renamed with a unique prefix.
We do that once the DOMContentLoaded fired and otherwise we use a
mutation observer to re-trigger this logic.
In order to not have a big impact on performance or to cause flickering
of of content,
1. we only do it for each svg once
2. we debounce the event handler and just do it in a requestIdleCallback
Before we tried to do it with the library svg4everybody and it had a big
performance impact. See:
https://gitlab.com/gitlab-org/quality/performance/-/issues/312
*/
document.addEventListener('DOMContentLoaded', () => {
svg4everybody({ polyfill: true });
document.addEventListener('DOMContentLoaded', async () => {
const GITLAB_SVG_PREFIX = 'chrome-issue-230433-gitlab-svgs';
const FILE_ICON_PREFIX = 'chrome-issue-230433-file-icons';
const SKIP_ATTRIBUTE = 'data-replaced-by-chrome-issue-230433';
const fixSVGs = () => {
requestIdleCallback(() => {
document.querySelectorAll(`use:not([${SKIP_ATTRIBUTE}])`).forEach(use => {
const href = use?.getAttribute('href') ?? use?.getAttribute('xlink:href') ?? '';
if (href.includes(window.gon.sprite_icons)) {
use.removeAttribute('xlink:href');
use.setAttribute('href', `#${GITLAB_SVG_PREFIX}-${href.split('#')[1]}`);
} else if (href.includes(window.gon.sprite_file_icons)) {
use.removeAttribute('xlink:href');
use.setAttribute('href', `#${FILE_ICON_PREFIX}-${href.split('#')[1]}`);
}
use.setAttribute(SKIP_ATTRIBUTE, 'true');
});
});
};
const watchForNewSVGs = () => {
const observer = new MutationObserver(debounce(fixSVGs, 200));
observer.observe(document.querySelector('body'), {
childList: true,
attributes: false,
subtree: true,
});
};
const retrieveIconSprites = async (url, prefix) => {
const div = document.createElement('div');
div.classList.add('hidden');
const result = await fetch(url);
div.innerHTML = await result.text();
div.querySelectorAll('[id]').forEach(node => {
node.setAttribute('id', `${prefix}-${node.getAttribute('id')}`);
});
document.body.append(div);
};
if (window.gon && window.gon.sprite_icons) {
await retrieveIconSprites(window.gon.sprite_icons, GITLAB_SVG_PREFIX);
if (window.gon.sprite_file_icons) {
await retrieveIconSprites(window.gon.sprite_file_icons, FILE_ICON_PREFIX);
}
fixSVGs();
watchForNewSVGs();
}
});

View File

@ -8,6 +8,7 @@ import { EDITOR_TYPES } from '~/vue_shared/components/rich_content_editor/consta
import { DEFAULT_IMAGE_UPLOAD_PATH } from '../constants';
import imageRepository from '../image_repository';
import formatter from '../services/formatter';
import templater from '../services/templater';
export default {
components: {
@ -44,7 +45,7 @@ export default {
data() {
return {
saveable: false,
parsedSource: parseSourceFile(this.content),
parsedSource: parseSourceFile(this.preProcess(true, this.content)),
editorMode: EDITOR_TYPES.wysiwyg,
isModified: false,
};
@ -59,22 +60,30 @@ export default {
},
},
methods: {
preProcess(isWrap, value) {
const formattedContent = formatter(value);
const templatedContent = isWrap
? templater.wrap(formattedContent)
: templater.unwrap(formattedContent);
return templatedContent;
},
onInputChange(newVal) {
this.parsedSource.sync(newVal, this.isWysiwygMode);
this.isModified = this.parsedSource.isModified();
},
onModeChange(mode) {
this.editorMode = mode;
const formattedContent = formatter(this.editableContent);
this.$refs.editor.resetInitialValue(formattedContent);
const preProcessedContent = this.preProcess(this.isWysiwygMode, this.editableContent);
this.$refs.editor.resetInitialValue(preProcessedContent);
},
onUploadImage({ file, imageUrl }) {
this.$options.imageRepository.add(file, imageUrl);
},
onSubmit() {
const formattedContent = formatter(this.parsedSource.content());
const preProcessedContent = this.preProcess(false, this.parsedSource.content());
this.$emit('submit', {
content: formattedContent,
content: preProcessedContent,
images: this.$options.imageRepository.getAll(),
});
},

View File

@ -0,0 +1,32 @@
const marker = 'sse';
const ticks = '```';
const prefix = `${ticks} ${marker}\n`; // Space intentional due to https://github.com/nhn/tui.editor/blob/6bcec75c69028570d93d973aa7533090257eaae0/libs/to-mark/src/renderer.gfm.js#L26
const postfix = `\n${ticks}`;
const code = '.| |\\t|\\n(?!\\n)';
const templatedRegex = new RegExp(`(^${prefix}(${code})+${postfix}$)`, 'gm');
const embeddedRubyRegex = new RegExp(`(^<%(${code})+%>$)`, 'gm');
const unwrap = source => {
let text = source;
const matches = text.match(templatedRegex);
if (matches) {
matches.forEach(match => {
const initial = match.replace(prefix, '').replace(postfix, '');
text = text.replace(match, initial);
});
}
return text;
};
const wrap = source => {
let text = unwrap(source);
const matches = text.match(embeddedRubyRegex);
if (matches) {
matches.forEach(match => {
text = text.replace(match, `${prefix}${match}${postfix}`);
});
}
return text;
};
export default { wrap, unwrap };

View File

@ -87,7 +87,7 @@ export default {
<span>
<gl-loading-icon v-if="loading" :inline="true" />
<gl-icon v-else-if="isSymlink" name="symlink" :size="size" />
<svg v-else-if="!folder" :class="[iconSizeClass, cssClasses]">
<svg v-else-if="!folder" :key="spriteHref" :class="[iconSizeClass, cssClasses]">
<use v-bind="{ 'xlink:href': spriteHref }" />
</svg>
<gl-icon v-else :name="folderIconName" :size="size" class="folder-icon" />

View File

@ -61,7 +61,12 @@ export default {
</script>
<template>
<svg :class="[iconSizeClass, iconTestClass]" aria-hidden="true" v-on="$listeners">
<svg
:key="spriteHref"
:class="[iconSizeClass, iconTestClass]"
aria-hidden="true"
v-on="$listeners"
>
<use v-bind="{ 'xlink:href': spriteHref }" />
</svg>
</template>

View File

@ -3,7 +3,6 @@ import renderKramdownList from './renderers/render_kramdown_list';
import renderKramdownText from './renderers/render_kramdown_text';
import renderIdentifierInstanceText from './renderers/render_identifier_instance_text';
import renderIdentifierParagraph from './renderers/render_identifier_paragraph';
import renderEmbeddedRubyText from './renderers/render_embedded_ruby_text';
import renderFontAwesomeHtmlInline from './renderers/render_font_awesome_html_inline';
import renderSoftbreak from './renderers/render_softbreak';
@ -11,7 +10,7 @@ const htmlInlineRenderers = [renderFontAwesomeHtmlInline];
const htmlBlockRenderers = [renderBlockHtml];
const listRenderers = [renderKramdownList];
const paragraphRenderers = [renderIdentifierParagraph];
const textRenderers = [renderKramdownText, renderEmbeddedRubyText, renderIdentifierInstanceText];
const textRenderers = [renderKramdownText, renderIdentifierInstanceText];
const softbreakRenderers = [renderSoftbreak];
const executeRenderer = (renderers, node, context) => {

View File

@ -45,6 +45,17 @@ module Emails
end
end
def access_token_expired_email(user)
return unless user && user.active?
@user = user
@target_url = profile_personal_access_tokens_url
Gitlab::I18n.with_locale(@user.preferred_language) do
mail(to: @user.notification_email, subject: subject(_("Your personal access token has expired")))
end
end
def unknown_sign_in_email(user, ip, time)
@user = user
@ip = ip

View File

@ -32,17 +32,32 @@ module RelativePositioning
class_methods do
def move_nulls_to_end(objects)
objects = objects.reject(&:relative_position)
return if objects.empty?
max_relative_position = objects.first.max_relative_position
self.transaction do
max_relative_position = objects.first.max_relative_position
objects.each do |object|
relative_position = position_between(max_relative_position || START_POSITION, MAX_POSITION)
object.relative_position = relative_position
object.update_column(:relative_position, relative_position)
max_relative_position = relative_position
object.save(touch: false)
end
end
end
def move_nulls_to_start(objects)
objects = objects.reject(&:relative_position)
return if objects.empty?
self.transaction do
min_relative_position = objects.first.min_relative_position
objects.reverse_each do |object|
relative_position = position_between(MIN_POSITION, min_relative_position || START_POSITION)
object.update_column(:relative_position, relative_position)
min_relative_position = relative_position
end
end
end

View File

@ -58,7 +58,7 @@ class MergeRequestDiff < ApplicationRecord
end
scope :recent, -> { order(id: :desc).limit(100) }
scope :files_in_database, -> { has_diff_files.where(stored_externally: [false, nil]) }
scope :files_in_database, -> { where(stored_externally: [false, nil]) }
scope :not_latest_diffs, -> do
merge_requests = MergeRequest.arel_table
@ -100,37 +100,55 @@ class MergeRequestDiff < ApplicationRecord
joins(merge_request: :metrics).where(condition)
end
def self.ids_for_external_storage_migration(limit:)
# No point doing any work unless the feature is enabled
return [] unless Gitlab.config.external_diffs.enabled
class << self
def ids_for_external_storage_migration(limit:)
return [] unless Gitlab.config.external_diffs.enabled
case Gitlab.config.external_diffs.when
when 'always'
files_in_database.limit(limit).pluck(:id)
when 'outdated'
case Gitlab.config.external_diffs.when
when 'always'
ids_for_external_storage_migration_strategy_always(limit: limit)
when 'outdated'
ids_for_external_storage_migration_strategy_outdated(limit: limit)
else
[]
end
end
def ids_for_external_storage_migration_strategy_always(limit:)
ids = []
files_in_database.each_batch(column: :merge_request_id, order_hint: :id) do |scope|
ids.concat(scope.has_diff_files.pluck(:id))
break if ids.count >= limit
end
ids.first(limit)
end
def ids_for_external_storage_migration_strategy_outdated(limit:)
# Outdated is too complex to be a single SQL query, so split into three
before = EXTERNAL_DIFF_CUTOFF.ago
potentials = has_diff_files.files_in_database
ids = files_in_database
ids = potentials
.old_merged_diffs(before)
.limit(limit)
.pluck(:id)
return ids if ids.size >= limit
ids += files_in_database
ids += potentials
.old_closed_diffs(before)
.limit(limit - ids.size)
.pluck(:id)
return ids if ids.size >= limit
ids + files_in_database
ids + potentials
.not_latest_diffs
.limit(limit - ids.size)
.pluck(:id)
else
[]
end
end

View File

@ -19,6 +19,7 @@ class PersonalAccessToken < ApplicationRecord
scope :active, -> { where("revoked = false AND (expires_at >= CURRENT_DATE OR expires_at IS NULL)") }
scope :expiring_and_not_notified, ->(date) { where(["revoked = false AND expire_notification_delivered = false AND expires_at >= CURRENT_DATE AND expires_at <= ?", date]) }
scope :expired_today_and_not_notified, -> { where(["revoked = false AND expires_at = CURRENT_DATE AND after_expiry_notification_delivered = false"]) }
scope :inactive, -> { where("revoked = true OR expires_at < CURRENT_DATE") }
scope :with_impersonation, -> { where(impersonation: true) }
scope :without_impersonation, -> { where(impersonation: false) }

View File

@ -350,6 +350,14 @@ class User < ApplicationRecord
.without_impersonation
.expiring_and_not_notified(at).select(1))
end
scope :with_personal_access_tokens_expired_today, -> do
where('EXISTS (?)',
::PersonalAccessToken
.select(1)
.where('personal_access_tokens.user_id = users.id')
.without_impersonation
.expired_today_and_not_notified)
end
scope :order_recent_sign_in, -> { reorder(Gitlab::Database.nulls_last_order('current_sign_in_at', 'DESC')) }
scope :order_oldest_sign_in, -> { reorder(Gitlab::Database.nulls_last_order('current_sign_in_at', 'ASC')) }
scope :order_recent_last_activity, -> { reorder(Gitlab::Database.nulls_last_order('last_activity_on', 'DESC')) }

View File

@ -66,6 +66,13 @@ class NotificationService
mailer.access_token_about_to_expire_email(user).deliver_later
end
# Notify the user when at least one of their personal access tokens has expired today
def access_token_expired(user)
return unless user.can?(:receive_notifications)
mailer.access_token_expired_email(user).deliver_later
end
# Notify a user when a previously unknown IP or device is used to
# sign in to their account
def unknown_sign_in(user, ip, time)

View File

@ -4,4 +4,4 @@
= _('One or more of your personal access tokens will expire in %{days_to_expire} days or less.') % { days_to_expire: @days_to_expire }
%p
- pat_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: @target_url }
= _('You can create a new one or check them in your %{pat_link_start}Personal Access Tokens%{pat_link_end} settings').html_safe % { pat_link_start: pat_link_start, pat_link_end: '</a>'.html_safe }
= html_escape(_('You can create a new one or check them in your %{pat_link_start}personal access tokens%{pat_link_end} settings')) % { pat_link_start: pat_link_start, pat_link_end: '</a>'.html_safe }

View File

@ -2,4 +2,4 @@
<%= _('One or more of your personal access tokens will expire in %{days_to_expire} days or less.') % { days_to_expire: @days_to_expire} %>
<%= _('You can create a new one or check them in your Personal Access Tokens settings %{pat_link}') % { pat_link: @target_url } %>
<%= _('You can create a new one or check them in your personal access tokens settings %{pat_link}') % { pat_link: @target_url } %>

View File

@ -0,0 +1,7 @@
%p
= _('Hi %{username}!') % { username: sanitize_name(@user.name) }
%p
= _('One or more of your personal access tokens has expired.')
%p
- pat_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: @target_url }
= html_escape(_('You can create a new one or check them in your %{pat_link_start}personal access tokens%{pat_link_end} settings')) % { pat_link_start: pat_link_start, pat_link_end: '</a>'.html_safe }

View File

@ -0,0 +1,5 @@
<%= _('Hi %{username}!') % { username: sanitize_name(@user.name) } %>
<%= _('One or more of your personal access tokens has expired.') %>
<%= _('You can create a new one or check them in your personal access tokens settings %{pat_link}') % { pat_link: @target_url } %>

View File

@ -243,6 +243,14 @@
:weight: 1
:idempotent: true
:tags: []
- :name: cronjob:personal_access_tokens_expired_notification
:feature_category: :authentication_and_authorization
:has_external_dependencies:
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent:
:tags: []
- :name: cronjob:personal_access_tokens_expiring
:feature_category: :authentication_and_authorization
:has_external_dependencies:

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
module PersonalAccessTokens
class ExpiredNotificationWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include CronjobQueue
feature_category :authentication_and_authorization
def perform(*args)
return unless Feature.enabled?(:expired_pat_email_notification)
notification_service = NotificationService.new
User.with_personal_access_tokens_expired_today.find_each do |user|
with_context(user: user) do
Gitlab::AppLogger.info "#{self.class}: Notifying User #{user.id} about an expired token"
notification_service.access_token_expired(user)
user.personal_access_tokens.without_impersonation.expired_today_and_not_notified.update_all(after_expiry_notification_delivered: true)
end
end
end
end
end

View File

@ -0,0 +1,5 @@
---
title: Backfill relative positions on designs
merge_request: 37837
author:
type: changed

View File

@ -0,0 +1,5 @@
---
title: Optimise the external diff storage migration query
merge_request: 38579
author:
type: performance

View File

@ -0,0 +1,5 @@
---
title: Added pre-processing step to the Static Site Editor so code templates (ERB) are interpreted as code not content
merge_request: 38694
author:
type: added

View File

@ -0,0 +1,5 @@
---
title: Email notification for expired personal access token
merge_request: 38086
author:
type: added

View File

@ -0,0 +1,5 @@
---
title: 'Geo: Drop tables related to vulnerability export replication'
merge_request: 38299
author:
type: removed

View File

@ -423,6 +423,9 @@ Settings.cron_jobs['admin_email_worker']['job_class'] = 'AdminEmailWorker'
Settings.cron_jobs['personal_access_tokens_expiring_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['personal_access_tokens_expiring_worker']['cron'] ||= '0 1 * * *'
Settings.cron_jobs['personal_access_tokens_expiring_worker']['job_class'] = 'PersonalAccessTokens::ExpiringWorker'
Settings.cron_jobs['personal_access_tokens_expired_notification_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['personal_access_tokens_expired_notification_worker']['cron'] ||= '0 2 * * *'
Settings.cron_jobs['personal_access_tokens_expired_notification_worker']['job_class'] = 'PersonalAccessTokens::ExpiredNotificationWorker'
Settings.cron_jobs['repository_archive_cache_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['repository_archive_cache_worker']['cron'] ||= '0 * * * *'
Settings.cron_jobs['repository_archive_cache_worker']['job_class'] = 'RepositoryArchiveCacheWorker'

View File

@ -33,6 +33,8 @@ class CreateVulnerabilitiesExportVerificationStatus < ActiveRecord::Migration[6.
end
def down
return unless table_exists?(:vulnerability_export_verification_status)
with_lock_retries do
drop_table :vulnerability_export_verification_status
end

View File

@ -0,0 +1,11 @@
# frozen_string_literal: true
class AddAfterExpiryNotificationDeliveredToPersonalAccessTokens < ActiveRecord::Migration[6.0]
DOWNTIME = false
disable_ddl_transaction!
def change
add_column :personal_access_tokens, :after_expiry_notification_delivered, :boolean, null: false, default: false
end
end

View File

@ -0,0 +1,38 @@
# frozen_string_literal: true
class BackfillDesignsRelativePosition < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
INTERVAL = 2.minutes
BATCH_SIZE = 1000
MIGRATION = 'BackfillDesignsRelativePosition'
disable_ddl_transaction!
class Issue < ActiveRecord::Base
include EachBatch
self.table_name = 'issues'
has_many :designs
end
class Design < ActiveRecord::Base
self.table_name = 'design_management_designs'
end
def up
issues_with_designs = Issue.where(id: Design.select(:issue_id))
issues_with_designs.each_batch(of: BATCH_SIZE) do |relation, index|
issue_ids = relation.pluck(:id)
delay = INTERVAL * index
migrate_in(delay, MIGRATION, [issue_ids])
end
end
def down
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class RemoveTableVulnerabilityExportVerificationStatus < ActiveRecord::Migration[6.0]
DOWNTIME = false
def up
return unless table_exists?(:vulnerability_export_verification_status)
drop_table :vulnerability_export_verification_status
end
def down
# no-op because `vulnerability_export_verification_status` table should not
# be created, see https://gitlab.com/gitlab-org/gitlab/-/issues/232977
end
end

View File

@ -0,0 +1 @@
9d711a0c4f785660c0a2317e598e427d5e2f91b177e4c0b96cef2958f787aa6e

View File

@ -0,0 +1 @@
0a080250afe61007852cb65e8fd6cdccbdad1666abf12b59d46fb55ec0d455cc

View File

@ -0,0 +1 @@
bd1c0fd04b1a77492a125417a530ea0482c66330da3d2563c963c097e63b6b39

View File

@ -13979,7 +13979,8 @@ CREATE TABLE public.personal_access_tokens (
impersonation boolean DEFAULT false NOT NULL,
token_digest character varying,
expire_notification_delivered boolean DEFAULT false NOT NULL,
last_used_at timestamp with time zone
last_used_at timestamp with time zone,
after_expiry_notification_delivered boolean DEFAULT false NOT NULL
);
CREATE SEQUENCE public.personal_access_tokens_id_seq
@ -16173,25 +16174,6 @@ CREATE SEQUENCE public.vulnerabilities_id_seq
ALTER SEQUENCE public.vulnerabilities_id_seq OWNED BY public.vulnerabilities.id;
CREATE TABLE public.vulnerability_export_verification_status (
vulnerability_export_id bigint NOT NULL,
verification_retry_at timestamp with time zone,
verified_at timestamp with time zone,
verification_retry_count smallint,
verification_checksum bytea,
verification_failure text,
CONSTRAINT check_48fdf48546 CHECK ((char_length(verification_failure) <= 255))
);
CREATE SEQUENCE public.vulnerability_export_verification_s_vulnerability_export_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE public.vulnerability_export_verification_s_vulnerability_export_id_seq OWNED BY public.vulnerability_export_verification_status.vulnerability_export_id;
CREATE TABLE public.vulnerability_exports (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
@ -17261,8 +17243,6 @@ ALTER TABLE ONLY public.users_statistics ALTER COLUMN id SET DEFAULT nextval('pu
ALTER TABLE ONLY public.vulnerabilities ALTER COLUMN id SET DEFAULT nextval('public.vulnerabilities_id_seq'::regclass);
ALTER TABLE ONLY public.vulnerability_export_verification_status ALTER COLUMN vulnerability_export_id SET DEFAULT nextval('public.vulnerability_export_verification_s_vulnerability_export_id_seq'::regclass);
ALTER TABLE ONLY public.vulnerability_exports ALTER COLUMN id SET DEFAULT nextval('public.vulnerability_exports_id_seq'::regclass);
ALTER TABLE ONLY public.vulnerability_feedback ALTER COLUMN id SET DEFAULT nextval('public.vulnerability_feedback_id_seq'::regclass);
@ -18548,9 +18528,6 @@ ALTER TABLE ONLY public.users_statistics
ALTER TABLE ONLY public.vulnerabilities
ADD CONSTRAINT vulnerabilities_pkey PRIMARY KEY (id);
ALTER TABLE ONLY public.vulnerability_export_verification_status
ADD CONSTRAINT vulnerability_export_verification_status_pkey PRIMARY KEY (vulnerability_export_id);
ALTER TABLE ONLY public.vulnerability_exports
ADD CONSTRAINT vulnerability_exports_pkey PRIMARY KEY (id);
@ -20803,8 +20780,6 @@ CREATE INDEX index_vulnerabilities_on_start_date_sourcing_milestone_id ON public
CREATE INDEX index_vulnerabilities_on_updated_by_id ON public.vulnerabilities USING btree (updated_by_id);
CREATE INDEX index_vulnerability_export_verification_status_on_export_id ON public.vulnerability_export_verification_status USING btree (vulnerability_export_id);
CREATE INDEX index_vulnerability_exports_on_author_id ON public.vulnerability_exports USING btree (author_id);
CREATE INDEX index_vulnerability_exports_on_file_store ON public.vulnerability_exports USING btree (file_store);
@ -20955,10 +20930,6 @@ CREATE UNIQUE INDEX unique_merge_request_metrics_by_merge_request_id ON public.m
CREATE UNIQUE INDEX users_security_dashboard_projects_unique_index ON public.users_security_dashboard_projects USING btree (project_id, user_id);
CREATE INDEX vulnerability_exports_verification_checksum_partial ON public.vulnerability_export_verification_status USING btree (verification_checksum) WHERE (verification_checksum IS NOT NULL);
CREATE INDEX vulnerability_exports_verification_failure_partial ON public.vulnerability_export_verification_status USING btree (verification_failure) WHERE (verification_failure IS NOT NULL);
CREATE UNIQUE INDEX vulnerability_feedback_unique_idx ON public.vulnerability_feedback USING btree (project_id, category, feedback_type, project_fingerprint);
CREATE UNIQUE INDEX vulnerability_occurrence_pipelines_on_unique_keys ON public.vulnerability_occurrence_pipelines USING btree (occurrence_id, pipeline_id);
@ -21839,9 +21810,6 @@ ALTER TABLE ONLY public.approval_merge_request_rules
ALTER TABLE ONLY public.namespace_statistics
ADD CONSTRAINT fk_rails_0062050394 FOREIGN KEY (namespace_id) REFERENCES public.namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY public.vulnerability_export_verification_status
ADD CONSTRAINT fk_rails_00a22ee64f FOREIGN KEY (vulnerability_export_id) REFERENCES public.vulnerability_exports(id) ON DELETE CASCADE;
ALTER TABLE ONLY public.clusters_applications_elastic_stacks
ADD CONSTRAINT fk_rails_026f219f46 FOREIGN KEY (cluster_id) REFERENCES public.clusters(id) ON DELETE CASCADE;

View File

@ -5926,7 +5926,43 @@ type Group {
): VulnerabilityConnection
"""
Number of vulnerabilities per severity level, per day, for the projects in the group and its subgroups
Number of vulnerabilities per day for the projects in the group and its subgroups
"""
vulnerabilitiesCountByDay(
"""
Returns the elements in the list that come after the specified cursor.
"""
after: String
"""
Returns the elements in the list that come before the specified cursor.
"""
before: String
"""
Last day for which to fetch vulnerability history
"""
endDate: ISO8601Date!
"""
Returns the first _n_ elements from the list.
"""
first: Int
"""
Returns the last _n_ elements from the list.
"""
last: Int
"""
First day for which to fetch vulnerability history
"""
startDate: ISO8601Date!
): VulnerabilitiesCountByDayConnection
"""
Number of vulnerabilities per severity level, per day, for the projects in the
group and its subgroups. Deprecated in 13.3: Use `vulnerabilitiesCountByDay`
"""
vulnerabilitiesCountByDayAndSeverity(
"""
@ -5958,7 +5994,7 @@ type Group {
First day for which to fetch vulnerability history
"""
startDate: ISO8601Date!
): VulnerabilitiesCountByDayAndSeverityConnection
): VulnerabilitiesCountByDayAndSeverityConnection @deprecated(reason: "Use `vulnerabilitiesCountByDay`. Deprecated in 13.3")
"""
Represents vulnerable project counts for each grade
@ -11552,7 +11588,44 @@ type Query {
): VulnerabilityConnection
"""
Number of vulnerabilities per severity level, per day, for the projects on the current user's instance security dashboard
Number of vulnerabilities per day for the projects on the current user's instance security dashboard
"""
vulnerabilitiesCountByDay(
"""
Returns the elements in the list that come after the specified cursor.
"""
after: String
"""
Returns the elements in the list that come before the specified cursor.
"""
before: String
"""
Last day for which to fetch vulnerability history
"""
endDate: ISO8601Date!
"""
Returns the first _n_ elements from the list.
"""
first: Int
"""
Returns the last _n_ elements from the list.
"""
last: Int
"""
First day for which to fetch vulnerability history
"""
startDate: ISO8601Date!
): VulnerabilitiesCountByDayConnection
"""
Number of vulnerabilities per severity level, per day, for the projects on the
current user's instance security dashboard. Deprecated in 13.3: Use
`vulnerabilitiesCountByDay`
"""
vulnerabilitiesCountByDayAndSeverity(
"""
@ -11584,7 +11657,7 @@ type Query {
First day for which to fetch vulnerability history
"""
startDate: ISO8601Date!
): VulnerabilitiesCountByDayAndSeverityConnection
): VulnerabilitiesCountByDayAndSeverityConnection @deprecated(reason: "Use `vulnerabilitiesCountByDay`. Deprecated in 13.3")
}
"""
@ -15460,6 +15533,51 @@ enum VisibilityScopesEnum {
public
}
"""
Represents the count of vulnerabilities by severity on a particular day
"""
type VulnerabilitiesCountByDay {
"""
Total number of vulnerabilities on a particular day with critical severity
"""
critical: Int!
"""
Date for the count
"""
date: ISO8601Date!
"""
Total number of vulnerabilities on a particular day with high severity
"""
high: Int!
"""
Total number of vulnerabilities on a particular day with info severity
"""
info: Int!
"""
Total number of vulnerabilities on a particular day with low severity
"""
low: Int!
"""
Total number of vulnerabilities on a particular day with medium severity
"""
medium: Int!
"""
Total number of vulnerabilities on a particular day
"""
total: Int!
"""
Total number of vulnerabilities on a particular day with unknown severity
"""
unknown: Int!
}
"""
Represents the number of vulnerabilities for a particular severity on a particular day
"""
@ -15515,6 +15633,41 @@ type VulnerabilitiesCountByDayAndSeverityEdge {
node: VulnerabilitiesCountByDayAndSeverity
}
"""
The connection type for VulnerabilitiesCountByDay.
"""
type VulnerabilitiesCountByDayConnection {
"""
A list of edges.
"""
edges: [VulnerabilitiesCountByDayEdge]
"""
A list of nodes.
"""
nodes: [VulnerabilitiesCountByDay]
"""
Information to aid in pagination.
"""
pageInfo: PageInfo!
}
"""
An edge in a connection.
"""
type VulnerabilitiesCountByDayEdge {
"""
A cursor for use in pagination.
"""
cursor: String!
"""
The item at the end of the edge.
"""
node: VulnerabilitiesCountByDay
}
"""
Represents a vulnerability.
"""

View File

@ -16324,9 +16324,90 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "vulnerabilitiesCountByDay",
"description": "Number of vulnerabilities per day for the projects in the group and its subgroups",
"args": [
{
"name": "startDate",
"description": "First day for which to fetch vulnerability history",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "ISO8601Date",
"ofType": null
}
},
"defaultValue": null
},
{
"name": "endDate",
"description": "Last day for which to fetch vulnerability history",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "ISO8601Date",
"ofType": null
}
},
"defaultValue": null
},
{
"name": "after",
"description": "Returns the elements in the list that come after the specified cursor.",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
},
{
"name": "before",
"description": "Returns the elements in the list that come before the specified cursor.",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
},
{
"name": "first",
"description": "Returns the first _n_ elements from the list.",
"type": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
},
"defaultValue": null
},
{
"name": "last",
"description": "Returns the last _n_ elements from the list.",
"type": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
},
"defaultValue": null
}
],
"type": {
"kind": "OBJECT",
"name": "VulnerabilitiesCountByDayConnection",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "vulnerabilitiesCountByDayAndSeverity",
"description": "Number of vulnerabilities per severity level, per day, for the projects in the group and its subgroups",
"description": "Number of vulnerabilities per severity level, per day, for the projects in the group and its subgroups. Deprecated in 13.3: Use `vulnerabilitiesCountByDay`",
"args": [
{
"name": "startDate",
@ -16402,8 +16483,8 @@
"name": "VulnerabilitiesCountByDayAndSeverityConnection",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
"isDeprecated": true,
"deprecationReason": "Use `vulnerabilitiesCountByDay`. Deprecated in 13.3"
},
{
"name": "vulnerabilityGrades",
@ -33982,9 +34063,90 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "vulnerabilitiesCountByDay",
"description": "Number of vulnerabilities per day for the projects on the current user's instance security dashboard",
"args": [
{
"name": "startDate",
"description": "First day for which to fetch vulnerability history",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "ISO8601Date",
"ofType": null
}
},
"defaultValue": null
},
{
"name": "endDate",
"description": "Last day for which to fetch vulnerability history",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "ISO8601Date",
"ofType": null
}
},
"defaultValue": null
},
{
"name": "after",
"description": "Returns the elements in the list that come after the specified cursor.",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
},
{
"name": "before",
"description": "Returns the elements in the list that come before the specified cursor.",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
},
{
"name": "first",
"description": "Returns the first _n_ elements from the list.",
"type": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
},
"defaultValue": null
},
{
"name": "last",
"description": "Returns the last _n_ elements from the list.",
"type": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
},
"defaultValue": null
}
],
"type": {
"kind": "OBJECT",
"name": "VulnerabilitiesCountByDayConnection",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "vulnerabilitiesCountByDayAndSeverity",
"description": "Number of vulnerabilities per severity level, per day, for the projects on the current user's instance security dashboard",
"description": "Number of vulnerabilities per severity level, per day, for the projects on the current user's instance security dashboard. Deprecated in 13.3: Use `vulnerabilitiesCountByDay`",
"args": [
{
"name": "startDate",
@ -34060,8 +34222,8 @@
"name": "VulnerabilitiesCountByDayAndSeverityConnection",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
"isDeprecated": true,
"deprecationReason": "Use `vulnerabilitiesCountByDay`. Deprecated in 13.3"
}
],
"inputFields": null,
@ -45482,6 +45644,163 @@
],
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "VulnerabilitiesCountByDay",
"description": "Represents the count of vulnerabilities by severity on a particular day",
"fields": [
{
"name": "critical",
"description": "Total number of vulnerabilities on a particular day with critical severity",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "date",
"description": "Date for the count",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "ISO8601Date",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "high",
"description": "Total number of vulnerabilities on a particular day with high severity",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "info",
"description": "Total number of vulnerabilities on a particular day with info severity",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "low",
"description": "Total number of vulnerabilities on a particular day with low severity",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "medium",
"description": "Total number of vulnerabilities on a particular day with medium severity",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "total",
"description": "Total number of vulnerabilities on a particular day",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "unknown",
"description": "Total number of vulnerabilities on a particular day with unknown severity",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "VulnerabilitiesCountByDayAndSeverity",
@ -45649,6 +45968,118 @@
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "VulnerabilitiesCountByDayConnection",
"description": "The connection type for VulnerabilitiesCountByDay.",
"fields": [
{
"name": "edges",
"description": "A list of edges.",
"args": [
],
"type": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "OBJECT",
"name": "VulnerabilitiesCountByDayEdge",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "nodes",
"description": "A list of nodes.",
"args": [
],
"type": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "OBJECT",
"name": "VulnerabilitiesCountByDay",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "pageInfo",
"description": "Information to aid in pagination.",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "OBJECT",
"name": "PageInfo",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "VulnerabilitiesCountByDayEdge",
"description": "An edge in a connection.",
"fields": [
{
"name": "cursor",
"description": "A cursor for use in pagination.",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "node",
"description": "The item at the end of the edge.",
"args": [
],
"type": {
"kind": "OBJECT",
"name": "VulnerabilitiesCountByDay",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "Vulnerability",

View File

@ -2294,6 +2294,21 @@ Autogenerated return type of UpdateSnippet
| --- | ---- | ---------- |
| `createSnippet` | Boolean! | Indicates the user can perform `create_snippet` on this resource |
## VulnerabilitiesCountByDay
Represents the count of vulnerabilities by severity on a particular day
| Name | Type | Description |
| --- | ---- | ---------- |
| `critical` | Int! | Total number of vulnerabilities on a particular day with critical severity |
| `date` | ISO8601Date! | Date for the count |
| `high` | Int! | Total number of vulnerabilities on a particular day with high severity |
| `info` | Int! | Total number of vulnerabilities on a particular day with info severity |
| `low` | Int! | Total number of vulnerabilities on a particular day with low severity |
| `medium` | Int! | Total number of vulnerabilities on a particular day with medium severity |
| `total` | Int! | Total number of vulnerabilities on a particular day |
| `unknown` | Int! | Total number of vulnerabilities on a particular day with unknown severity |
## VulnerabilitiesCountByDayAndSeverity
Represents the number of vulnerabilities for a particular severity on a particular day

View File

@ -138,44 +138,12 @@ import { mapActions } from 'vuex';
### `mutations.js`
The mutations specify how the application state changes in response to actions sent to the store.
The only way to change state in a Vuex store should be by committing a mutation.
The only way to change state in a Vuex store is by committing a mutation.
**It's a good idea to think of the state before writing any code.**
Most mutations are committed from an action using `commit`. If you don't have any
asynchronous operations, you can call mutations from a component using the `mapMutations` helper.
Remember that actions only describe that something happened, they don't describe how the application state changes.
**Never commit a mutation directly from a component**
Instead, you should create an action that will commit a mutation.
```javascript
import * as types from './mutation_types';
export default {
[types.REQUEST_USERS](state) {
state.isLoading = true;
},
[types.RECEIVE_USERS_SUCCESS](state, data) {
// Do any needed data transformation to the received payload here
state.users = data;
state.isLoading = false;
},
[types.RECEIVE_USERS_ERROR](state, error) {
state.isLoading = false;
},
[types.REQUEST_ADD_USER](state, user) {
state.isAddingUser = true;
},
[types.RECEIVE_ADD_USER_SUCCESS](state, user) {
state.isAddingUser = false;
state.users.push(user);
},
[types.REQUEST_ADD_USER_ERROR](state, error) {
state.isAddingUser = false;
state.errorAddingUser = error;
},
};
```
See the Vuex docs for examples of [committing mutations from components](https://vuex.vuejs.org/guide/mutations.html#committing-mutations-in-components).
#### Naming Pattern: `REQUEST` and `RECEIVE` namespaces
@ -448,29 +416,6 @@ export default {
</template>
```
### Vuex Gotchas
1. Do not call a mutation directly. Always use an action to commit a mutation. Doing so will keep consistency throughout the application. From Vuex docs:
> Why don't we just call store.commit('action') directly? Well, remember that mutations must be synchronous? Actions aren't. We can perform asynchronous operations inside an action.
```javascript
// component.vue
// bad
created() {
this.$store.commit('mutation');
}
// good
created() {
this.$store.dispatch('action');
}
```
1. Use mutation types instead of hardcoding strings. It will be less error prone.
1. The State will be accessible in all components descending from the use where the store is instantiated.
### Testing Vuex
#### Testing Vuex concerns

View File

@ -0,0 +1,54 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Backfill `relative_position` column in `design_management_designs` table
class BackfillDesignsRelativePosition
# Define the issue model
class Issue < ActiveRecord::Base
self.table_name = 'issues'
end
# Define the design model
class Design < ActiveRecord::Base
include RelativePositioning if defined?(RelativePositioning)
self.table_name = 'design_management_designs'
def self.relative_positioning_query_base(design)
where(issue_id: design.issue_id)
end
def self.relative_positioning_parent_column
:issue_id
end
def self.move_nulls_to_start(designs)
if defined?(super)
super(designs)
else
logger.error "BackfillDesignsRelativePosition failed because move_nulls_to_start is no longer included in the RelativePositioning concern"
end
end
end
def perform(issue_ids)
issue_ids.each do |issue_id|
migrate_issue(issue_id)
end
end
private
def migrate_issue(issue_id)
issue = Issue.find_by(id: issue_id)
return unless issue
designs = Design.where(issue_id: issue.id).order(:id)
return unless designs.any?
Design.move_nulls_to_start(designs)
end
end
end
end

View File

@ -16,6 +16,7 @@
# batch_count(::Clusters::Cluster.aws_installed.enabled, :cluster_id)
# batch_distinct_count(::Project, :creator_id)
# batch_distinct_count(::Project.with_active_services.service_desk_enabled.where(time_period), start: ::User.minimum(:id), finish: ::User.maximum(:id))
# batch_sum(User, :sign_in_count)
module Gitlab
module Database
module BatchCount
@ -27,6 +28,10 @@ module Gitlab
BatchCounter.new(relation, column: column).count(mode: :distinct, batch_size: batch_size, start: start, finish: finish)
end
def batch_sum(relation, column, batch_size: nil, start: nil, finish: nil)
BatchCounter.new(relation, column: nil, operation: :sum, operation_args: [column]).count(batch_size: batch_size, start: start, finish: finish)
end
class << self
include BatchCount
end
@ -35,6 +40,7 @@ module Gitlab
class BatchCounter
FALLBACK = -1
MIN_REQUIRED_BATCH_SIZE = 1_250
DEFAULT_SUM_BATCH_SIZE = 1_000
MAX_ALLOWED_LOOPS = 10_000
SLEEP_TIME_IN_SECONDS = 0.01 # 10 msec sleep
ALLOWED_MODES = [:itself, :distinct].freeze
@ -43,13 +49,16 @@ module Gitlab
DEFAULT_DISTINCT_BATCH_SIZE = 10_000
DEFAULT_BATCH_SIZE = 100_000
def initialize(relation, column: nil)
def initialize(relation, column: nil, operation: :count, operation_args: nil)
@relation = relation
@column = column || relation.primary_key
@operation = operation
@operation_args = operation_args
end
def unwanted_configuration?(finish, batch_size, start)
batch_size <= MIN_REQUIRED_BATCH_SIZE ||
(@operation == :count && batch_size <= MIN_REQUIRED_BATCH_SIZE) ||
(@operation == :sum && batch_size < DEFAULT_SUM_BATCH_SIZE) ||
(finish - start) / batch_size >= MAX_ALLOWED_LOOPS ||
start > finish
end
@ -60,7 +69,7 @@ module Gitlab
check_mode!(mode)
# non-distinct have better performance
batch_size ||= mode == :distinct ? DEFAULT_DISTINCT_BATCH_SIZE : DEFAULT_BATCH_SIZE
batch_size ||= batch_size_for_mode_and_operation(mode, @operation)
start = actual_start(start)
finish = actual_finish(finish)
@ -91,11 +100,17 @@ module Gitlab
def batch_fetch(start, finish, mode)
# rubocop:disable GitlabSecurity/PublicSend
@relation.select(@column).public_send(mode).where(between_condition(start, finish)).count
@relation.select(@column).public_send(mode).where(between_condition(start, finish)).send(@operation, *@operation_args)
end
private
def batch_size_for_mode_and_operation(mode, operation)
return DEFAULT_SUM_BATCH_SIZE if operation == :sum
mode == :distinct ? DEFAULT_DISTINCT_BATCH_SIZE : DEFAULT_BATCH_SIZE
end
def between_condition(start, finish)
return @column.between(start..(finish - 1)) if @column.is_a?(Arel::Attributes::Attribute)

View File

@ -0,0 +1,30 @@
# frozen_string_literal: true
module Gitlab
module Metrics
module Dashboard
module Validator
DASHBOARD_SCHEMA_PATH = 'lib/gitlab/metrics/dashboard/validator/schemas/dashboard.json'.freeze
class << self
def validate(content, schema_path = DASHBOARD_SCHEMA_PATH, project: nil)
errors = _validate(content, schema_path, project: project)
errors.empty?
end
def validate!(content, schema_path = DASHBOARD_SCHEMA_PATH, project: nil)
errors = _validate(content, schema_path, project: project)
errors.empty? || raise(errors.first)
end
private
def _validate(content, schema_path, project)
client = Client.new(content, schema_path, project: project)
client.execute
end
end
end
end
end
end

View File

@ -0,0 +1,49 @@
# frozen_string_literal: true
module Gitlab
module Metrics
module Dashboard
module Validator
class Client
# @param content [Hash] Representing a raw, unprocessed
# dashboard object
# @param schema_path [String] Representing path to dashboard schema file
def initialize(content, schema_path, project: nil)
@content = content
@schema_path = schema_path
@project = project
end
def execute
errors = validate_against_schema
errors += post_schema_validator.validate
errors.compact
end
private
attr_reader :content, :schema_path, :project
def custom_formats
@custom_formats ||= CustomFormats.new
end
def post_schema_validator
@post_schema_validator ||= PostSchemaValidator.new(project: project, metric_ids: custom_formats.metric_ids_cache)
end
def schemer
@schemer ||= JSONSchemer.schema(Pathname.new(schema_path), formats: custom_formats.format_handlers)
end
def validate_against_schema
schemer.validate(content).map do |error|
Errors::SchemaValidationError.new(error)
end
end
end
end
end
end
end

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
module Gitlab
module Metrics
module Dashboard
module Validator
class CustomFormats
def format_handlers
# Key is custom JSON Schema format name. Value is a proc that takes data and schema and handles
# validations.
@format_handlers ||= {
"add_to_metric_id_cache" => ->(data, schema) { metric_ids_cache << data }
}
end
def metric_ids_cache
@metric_ids_cache ||= []
end
end
end
end
end
end

View File

@ -0,0 +1,37 @@
# frozen_string_literal: true
module Gitlab
module Metrics
module Dashboard
module Validator
module Errors
InvalidDashboardError = Class.new(StandardError)
class SchemaValidationError < InvalidDashboardError
def initialize(error = {})
if error.is_a?(Hash) && error.present?
data = error["data"]
data_pointer = error["data_pointer"]
schema = error["schema"]
schema_pointer = error["schema_pointer"]
msg = _("'%{data}' is invalid at '%{data_pointer}'. Should be '%{schema}' due to schema definition at '%{schema_pointer}'") %
{ data: data, data_pointer: data_pointer, schema: schema, schema_pointer: schema_pointer }
else
msg = "Dashboard failed schema validation"
end
super(msg)
end
end
class DuplicateMetricIds < InvalidDashboardError
def initialize
super(_("metric_id must be unique across a project"))
end
end
end
end
end
end
end

View File

@ -0,0 +1,31 @@
# frozen_string_literal: true
module Gitlab
module Metrics
module Dashboard
module Validator
class PostSchemaValidator
def initialize(project: nil, metric_ids: [])
@project = project
@metric_ids = metric_ids
end
def validate
[uniq_metric_ids_across_project].compact
end
private
attr_reader :project, :metric_ids
def uniq_metric_ids_across_project
# TODO: modify this method to check metric identifier uniqueness across project once we start
# recording dashboard_path https://gitlab.com/gitlab-org/gitlab/-/merge_requests/38237
Validator::Errors::DuplicateMetricIds.new if metric_ids.uniq!
end
end
end
end
end
end

View File

@ -800,6 +800,9 @@ msgstr ""
msgid "&lt;project name&gt;"
msgstr ""
msgid "'%{data}' is invalid at '%{data_pointer}'. Should be '%{schema}' due to schema definition at '%{schema_pointer}'"
msgstr ""
msgid "'%{level}' is not a valid visibility level"
msgstr ""
@ -16689,6 +16692,9 @@ msgstr ""
msgid "One or more of your dependency files are not supported, and the dependency list may be incomplete. Below is a list of supported file types."
msgstr ""
msgid "One or more of your personal access tokens has expired."
msgstr ""
msgid "One or more of your personal access tokens will expire in %{days_to_expire} days or less."
msgstr ""
@ -27563,10 +27569,10 @@ msgstr ""
msgid "You can apply your Trial to your Personal account or create a New Group."
msgstr ""
msgid "You can create a new one or check them in your %{pat_link_start}Personal Access Tokens%{pat_link_end} settings"
msgid "You can create a new one or check them in your %{pat_link_start}personal access tokens%{pat_link_end} settings"
msgstr ""
msgid "You can create a new one or check them in your Personal Access Tokens settings %{pat_link}"
msgid "You can create a new one or check them in your personal access tokens settings %{pat_link}"
msgstr ""
msgid "You can create new ones at your %{pat_link_start}Personal Access Tokens%{pat_link_end} settings"
@ -28127,6 +28133,9 @@ msgstr ""
msgid "Your password reset token has expired."
msgstr ""
msgid "Your personal access token has expired"
msgstr ""
msgid "Your profile"
msgstr ""
@ -28870,6 +28879,9 @@ msgstr[1] ""
msgid "merged %{timeAgo}"
msgstr ""
msgid "metric_id must be unique across a project"
msgstr ""
msgid "missing"
msgstr ""

View File

@ -129,7 +129,6 @@
"stickyfilljs": "^2.1.0",
"string-hash": "1.1.3",
"style-loader": "^1.1.3",
"svg4everybody": "^2.1.9",
"swagger-ui-dist": "^3.26.2",
"three": "^0.84.0",
"three-orbit-controls": "^82.1.0",

View File

@ -16,10 +16,10 @@ gem 'faker', '~> 1.6', '>= 1.6.6'
gem 'knapsack', '~> 1.17'
gem 'parallel_tests', '~> 2.29'
gem 'rotp', '~> 3.1.0'
gem 'timecop', '~> 0.9.1'
group :test do
group :development do
gem 'pry-byebug', '~> 3.5.1', platform: :mri
gem "ruby-debug-ide", "~> 0.7.0"
gem "debase", "~> 0.2.4.1"
gem 'timecop', '~> 0.9.1'
end

View File

@ -0,0 +1,67 @@
dashboard: 'Test Dashboard'
priority: 1
links:
- title: Link 1
url: https://gitlab.com
- title: Link 2
url: https://docs.gitlab.com
templating:
variables:
text_variable_full_syntax:
label: 'Variable 1'
type: text
options:
default_value: 'default'
text_variable_simple_syntax: 'default value'
custom_variable_simple_syntax: ['value1', 'value2', 'value3']
custom_variable_full_syntax:
label: 'Variable 2'
type: custom
options:
values:
- value: 'value option 1'
text: 'Option 1'
- value: 'value_option_2'
text: 'Option 2'
default: true
metric_label_values_variable:
label: 'Variable 3'
type: metric_label_values
options:
series_selector: 'backend:haproxy_backend_availability:ratio{env="{{env}}"}'
label: 'backend'
panel_groups:
- group: Group A
priority: 1
panels:
- title: "Super Chart A1"
type: "area-chart"
y_label: "y_label"
weight: 1
max_value: 1
metrics:
- id: metric_a1
query_range: 'query'
unit: unit
label: Legend Label
- title: "Super Chart A2"
type: "area-chart"
y_label: "y_label"
weight: 2
metrics:
- id: metric_a1
query_range: 'query'
label: Legend Label
unit: unit
- group: Group B
priority: 10
panels:
- title: "Super Chart B"
type: "area-chart"
y_label: "y_label"
weight: 1
metrics:
- id: metric_a1
query_range: 'query'
unit: unit
label: Legend Label

View File

@ -0,0 +1,67 @@
dashboard: 'Test Dashboard'
priority: 1
links:
- title: Link 1
url: https://gitlab.com
- title: Link 2
url: https://docs.gitlab.com
templating:
variables:
text_variable_full_syntax:
label: 'Variable 1'
type: text
options:
default_value: 'default'
text_variable_simple_syntax: 'default value'
custom_variable_simple_syntax: ['value1', 'value2', 'value3']
custom_variable_full_syntax:
label: 'Variable 2'
type: custom
options:
values:
- value: 'value option 1'
text: 'Option 1'
- value: 'value_option_2'
text: 'Option 2'
default: true
metric_label_values_variable:
label: 'Variable 3'
type: metric_label_values
options:
series_selector: 'backend:haproxy_backend_availability:ratio{env="{{env}}"}'
label: 'backend'
panel_groups:
- group: Group A
priority: 1
panels:
- title: "Super Chart A1"
type: "area-chart"
y_label: "y_label"
weight: this_should_be_a_int
max_value: 1
metrics:
- id: metric_a1
query_range: 'query'
unit: unit
label: Legend Label
- title: "Super Chart A2"
type: "area-chart"
y_label: "y_label"
weight: 2
metrics:
- id: metric_a2
query_range: 'query'
label: Legend Label
unit: unit
- group: Group B
priority: 10
panels:
- title: "Super Chart B"
type: "area-chart"
y_label: "y_label"
weight: 1
metrics:
- id: metric_b
query_range: 'query'
unit: unit
label: Legend Label

View File

@ -15,11 +15,11 @@ import {
returnUrl,
} from '../mock_data';
jest.mock('~/static_site_editor/services/formatter', () => jest.fn(str => `${str} formatted`));
jest.mock('~/static_site_editor/services/formatter', () => jest.fn(str => `${str} format-pass`));
describe('~/static_site_editor/components/edit_area.vue', () => {
let wrapper;
const formattedContent = `${content} formatted`;
const formattedBody = `${body} format-pass`;
const savingChanges = true;
const newBody = `new ${body}`;
@ -53,9 +53,9 @@ describe('~/static_site_editor/components/edit_area.vue', () => {
expect(findEditHeader().props('title')).toBe(title);
});
it('renders rich content editor', () => {
it('renders rich content editor with a format pass', () => {
expect(findRichContentEditor().exists()).toBe(true);
expect(findRichContentEditor().props('content')).toBe(body);
expect(findRichContentEditor().props('content')).toBe(formattedBody);
});
it('renders publish toolbar', () => {
@ -97,7 +97,7 @@ describe('~/static_site_editor/components/edit_area.vue', () => {
});
it('sets publish toolbar as not saveable when content changes are rollback', () => {
findRichContentEditor().vm.$emit('input', body);
findRichContentEditor().vm.$emit('input', formattedBody);
return wrapper.vm.$nextTick().then(() => {
expect(findPublishToolbar().props('saveable')).toBe(false);
@ -124,8 +124,8 @@ describe('~/static_site_editor/components/edit_area.vue', () => {
it.each`
initialMode | targetMode | resetValue
${EDITOR_TYPES.wysiwyg} | ${EDITOR_TYPES.markdown} | ${formattedContent}
${EDITOR_TYPES.markdown} | ${EDITOR_TYPES.wysiwyg} | ${`${body} formatted`}
${EDITOR_TYPES.wysiwyg} | ${EDITOR_TYPES.markdown} | ${`${content} format-pass format-pass`}
${EDITOR_TYPES.markdown} | ${EDITOR_TYPES.wysiwyg} | ${`${body} format-pass format-pass`}
`(
'sets editorMode from $initialMode to $targetMode',
({ initialMode, targetMode, resetValue }) => {
@ -144,7 +144,7 @@ describe('~/static_site_editor/components/edit_area.vue', () => {
findRichContentEditor().vm.$emit('modeChange', EDITOR_TYPES.markdown);
expect(resetInitialValue).toHaveBeenCalledWith(formattedContent);
expect(resetInitialValue).toHaveBeenCalledWith(`${content} format-pass format-pass`);
});
});
@ -152,7 +152,7 @@ describe('~/static_site_editor/components/edit_area.vue', () => {
it('should format the content', () => {
findPublishToolbar().vm.$emit('submit', content);
expect(wrapper.emitted('submit')[0][0].content).toBe(formattedContent);
expect(wrapper.emitted('submit')[0][0].content).toBe(`${content} format-pass format-pass`);
});
});
});

View File

@ -0,0 +1,46 @@
/* eslint-disable no-useless-escape */
import templater from '~/static_site_editor/services/templater';
describe('templater', () => {
const source = `Some text
<% some erb code %>
Some more text
<% if apptype.maturity && (apptype.maturity != "planned") %>
<% maturity = "This application type is at the \"#{apptype.maturity}\" level of maturity." %>
<% end %>
With even text with indented code above.
`;
const sourceTemplated = `Some text
\`\`\` sse
<% some erb code %>
\`\`\`
Some more text
\`\`\` sse
<% if apptype.maturity && (apptype.maturity != "planned") %>
<% maturity = "This application type is at the \"#{apptype.maturity}\" level of maturity." %>
<% end %>
\`\`\`
With even text with indented code above.
`;
it.each`
fn | initial | target
${'wrap'} | ${source} | ${sourceTemplated}
${'wrap'} | ${sourceTemplated} | ${sourceTemplated}
${'unwrap'} | ${sourceTemplated} | ${source}
${'unwrap'} | ${source} | ${source}
`(
'wraps $initial in a templated sse codeblock if $fn is wrap, unwraps otherwise',
({ fn, initial, target }) => {
expect(templater[fn](initial)).toMatch(target);
},
);
});

View File

@ -0,0 +1,51 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillDesignsRelativePosition do
let(:namespace) { table(:namespaces).create!(name: 'gitlab', path: 'gitlab') }
let(:project) { table(:projects).create!(namespace_id: namespace.id) }
let(:issues) { table(:issues) }
let(:designs) { table(:design_management_designs) }
before do
issues.create!(id: 1, project_id: project.id)
issues.create!(id: 2, project_id: project.id)
issues.create!(id: 3, project_id: project.id)
issues.create!(id: 4, project_id: project.id)
designs.create!(id: 1, issue_id: 1, project_id: project.id, filename: 'design1.jpg')
designs.create!(id: 2, issue_id: 1, project_id: project.id, filename: 'design2.jpg')
designs.create!(id: 3, issue_id: 2, project_id: project.id, filename: 'design3.jpg')
designs.create!(id: 4, issue_id: 2, project_id: project.id, filename: 'design4.jpg')
designs.create!(id: 5, issue_id: 3, project_id: project.id, filename: 'design5.jpg')
end
describe '#perform' do
it 'backfills the position for the designs in each issue' do
expect(described_class::Design).to receive(:move_nulls_to_start).with(
a_collection_containing_exactly(
an_object_having_attributes(id: 1, issue_id: 1),
an_object_having_attributes(id: 2, issue_id: 1)
)
).ordered.and_call_original
expect(described_class::Design).to receive(:move_nulls_to_start).with(
a_collection_containing_exactly(
an_object_having_attributes(id: 3, issue_id: 2),
an_object_having_attributes(id: 4, issue_id: 2)
)
).ordered.and_call_original
# We only expect calls to `move_nulls_to_start` with issues 1 and 2:
# - Issue 3 should be skipped because we're not passing its ID
# - Issue 4 should be skipped because it doesn't have any designs
# - Issue 0 should be skipped because it doesn't exist
subject.perform([1, 2, 4, 0])
expect(designs.find(1).relative_position).to be < designs.find(2).relative_position
expect(designs.find(3).relative_position).to be < designs.find(4).relative_position
expect(designs.find(5).relative_position).to be_nil
end
end
end

View File

@ -13,11 +13,34 @@ RSpec.describe Gitlab::Database::BatchCount do
let(:another_user) { create(:user) }
before do
create_list(:issue, 3, author: user )
create_list(:issue, 2, author: another_user )
create_list(:issue, 3, author: user)
create_list(:issue, 2, author: another_user)
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(in_transaction)
end
shared_examples 'disallowed configurations' do |method|
it 'returns fallback if start is bigger than finish' do
expect(described_class.public_send(method, *args, start: 1, finish: 0)).to eq(fallback)
end
it 'returns fallback if loops more than allowed' do
large_finish = Gitlab::Database::BatchCounter::MAX_ALLOWED_LOOPS * default_batch_size + 1
expect(described_class.public_send(method, *args, start: 1, finish: large_finish)).to eq(fallback)
end
it 'returns fallback if batch size is less than min required' do
expect(described_class.public_send(method, *args, batch_size: small_batch_size)).to eq(fallback)
end
end
shared_examples 'when a transaction is open' do
let(:in_transaction) { true }
it 'raises an error' do
expect { subject }.to raise_error('BatchCount can not be run inside a transaction')
end
end
describe '#batch_count' do
it 'counts table' do
expect(described_class.batch_count(model)).to eq(5)
@ -53,38 +76,32 @@ RSpec.describe Gitlab::Database::BatchCount do
[1, 2, 4, 5, 6].each { |i| expect(described_class.batch_count(model, batch_size: i)).to eq(5) }
end
it 'will raise an error if distinct count is requested' do
expect do
described_class.batch_count(model.distinct(column))
end.to raise_error 'Use distinct count for optimized distinct counting'
end
context 'in a transaction' do
let(:in_transaction) { true }
it 'cannot count' do
expect do
described_class.batch_count(model)
end.to raise_error 'BatchCount can not be run inside a transaction'
end
end
it 'counts with a start and finish' do
expect(described_class.batch_count(model, start: model.minimum(:id), finish: model.maximum(:id))).to eq(5)
end
context 'disallowed configurations' do
it 'returns fallback if start is bigger than finish' do
expect(described_class.batch_count(model, start: 1, finish: 0)).to eq(fallback)
it "defaults the batch size to #{Gitlab::Database::BatchCounter::DEFAULT_BATCH_SIZE}" do
min_id = model.minimum(:id)
expect_next_instance_of(Gitlab::Database::BatchCounter) do |batch_counter|
expect(batch_counter).to receive(:batch_fetch).with(min_id, Gitlab::Database::BatchCounter::DEFAULT_BATCH_SIZE + min_id, :itself).once.and_call_original
end
it 'returns fallback if loops more than allowed' do
large_finish = Gitlab::Database::BatchCounter::MAX_ALLOWED_LOOPS * Gitlab::Database::BatchCounter::DEFAULT_BATCH_SIZE + 1
expect(described_class.batch_count(model, start: 1, finish: large_finish)).to eq(fallback)
described_class.batch_count(model)
end
it_behaves_like 'when a transaction is open' do
subject { described_class.batch_count(model) }
end
context 'disallowed_configurations' do
include_examples 'disallowed configurations', :batch_count do
let(:args) { [Issue] }
let(:default_batch_size) { Gitlab::Database::BatchCounter::DEFAULT_BATCH_SIZE }
end
it 'returns fallback if batch size is less than min required' do
expect(described_class.batch_count(model, batch_size: small_batch_size)).to eq(fallback)
it 'raises an error if distinct count is requested' do
expect { described_class.batch_count(model.distinct(column)) }.to raise_error 'Use distinct count for optimized distinct counting'
end
end
end
@ -128,18 +145,24 @@ RSpec.describe Gitlab::Database::BatchCount do
expect(described_class.batch_distinct_count(model, column, start: User.minimum(:id), finish: User.maximum(:id))).to eq(2)
end
it "defaults the batch size to #{Gitlab::Database::BatchCounter::DEFAULT_DISTINCT_BATCH_SIZE}" do
min_id = model.minimum(:id)
expect_next_instance_of(Gitlab::Database::BatchCounter) do |batch_counter|
expect(batch_counter).to receive(:batch_fetch).with(min_id, Gitlab::Database::BatchCounter::DEFAULT_DISTINCT_BATCH_SIZE + min_id, :distinct).once.and_call_original
end
described_class.batch_distinct_count(model)
end
it_behaves_like 'when a transaction is open' do
subject { described_class.batch_distinct_count(model, column) }
end
context 'disallowed configurations' do
it 'returns fallback if start is bigger than finish' do
expect(described_class.batch_distinct_count(model, column, start: 1, finish: 0)).to eq(fallback)
end
it 'returns fallback if loops more than allowed' do
large_finish = Gitlab::Database::BatchCounter::MAX_ALLOWED_LOOPS * Gitlab::Database::BatchCounter::DEFAULT_DISTINCT_BATCH_SIZE + 1
expect(described_class.batch_distinct_count(model, column, start: 1, finish: large_finish)).to eq(fallback)
end
it 'returns fallback if batch size is less than min required' do
expect(described_class.batch_distinct_count(model, column, batch_size: small_batch_size)).to eq(fallback)
include_examples 'disallowed configurations', :batch_distinct_count do
let(:args) { [model, column] }
let(:default_batch_size) { Gitlab::Database::BatchCounter::DEFAULT_DISTINCT_BATCH_SIZE }
end
it 'will raise an error if distinct count with the :id column is requested' do
@ -149,4 +172,55 @@ RSpec.describe Gitlab::Database::BatchCount do
end
end
end
describe '#batch_sum' do
let(:column) { :weight }
before do
Issue.first.update_attribute(column, 3)
Issue.last.update_attribute(column, 4)
end
it 'returns the sum of values in the given column' do
expect(described_class.batch_sum(model, column)).to eq(7)
end
it 'works when given an Arel column' do
expect(described_class.batch_sum(model, model.arel_table[column])).to eq(7)
end
it 'works with a batch size of 50K' do
expect(described_class.batch_sum(model, column, batch_size: 50_000)).to eq(7)
end
it 'works with start and finish provided' do
expect(described_class.batch_sum(model, column, start: model.minimum(:id), finish: model.maximum(:id))).to eq(7)
end
it 'returns the same result regardless of batch size' do
stub_const('Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE', 0)
(1..(model.count + 1)).each { |i| expect(described_class.batch_sum(model, column, batch_size: i)).to eq(7) }
end
it "defaults the batch size to #{Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE}" do
min_id = model.minimum(:id)
expect_next_instance_of(Gitlab::Database::BatchCounter) do |batch_counter|
expect(batch_counter).to receive(:batch_fetch).with(min_id, Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE + min_id, :itself).once.and_call_original
end
described_class.batch_sum(model, column)
end
it_behaves_like 'when a transaction is open' do
subject { described_class.batch_sum(model, column) }
end
it_behaves_like 'disallowed configurations', :batch_sum do
let(:args) { [model, column] }
let(:default_batch_size) { Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE }
let(:small_batch_size) { Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE - 1 }
end
end
end

View File

@ -0,0 +1,29 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Metrics::Dashboard::Validator::Client do
include MetricsDashboardHelpers
let_it_be(:schema_path) { 'lib/gitlab/metrics/dashboard/validator/schemas/dashboard.json' }
subject { described_class.new(dashboard, schema_path) }
describe '#execute' do
context 'with no validation errors' do
let(:dashboard) { load_sample_dashboard }
it 'returns empty array' do
expect(subject.execute).to eq([])
end
end
context 'with validation errors' do
let(:dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/invalid_dashboard.yml')) }
it 'returns array of error objects' do
expect(subject.execute).to all(be_a(Gitlab::Metrics::Dashboard::Validator::Errors::SchemaValidationError))
end
end
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Metrics::Dashboard::Validator::CustomFormats do
describe '#format_handlers' do
describe 'add_to_metric_id_cache' do
it 'adds data to metric id cache' do
subject.format_handlers['add_to_metric_id_cache'].call('metric_id', '_schema')
expect(subject.metric_ids_cache).to eq(["metric_id"])
end
end
end
end

View File

@ -0,0 +1,38 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Metrics::Dashboard::Validator::Errors do
describe Gitlab::Metrics::Dashboard::Validator::Errors::SchemaValidationError do
context 'valid error hash from jsonschemer' do
let(:error_hash) do
{
'data' => 'data',
'data_pointer' => 'data_pointer',
'schema' => 'schema',
'schema_pointer' => 'schema_pointer'
}
end
it 'formats message' do
expect(described_class.new(error_hash).message).to eq(
"'data' is invalid at 'data_pointer'. Should be 'schema' due to schema definition at 'schema_pointer'"
)
end
end
context 'empty error hash' do
let(:error_hash) { {} }
it 'uses default error message' do
expect(described_class.new(error_hash).message).to eq('Dashboard failed schema validation')
end
end
end
describe Gitlab::Metrics::Dashboard::Validator::Errors::DuplicateMetricIds do
it 'has custom error message' do
expect(described_class.new.message).to eq('metric_id must be unique across a project')
end
end
end

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Metrics::Dashboard::Validator::PostSchemaValidator do
describe '#validate' do
context 'unique metric ids' do
it 'returns blank array' do
expect(described_class.new(metric_ids: [1, 2, 3]).validate).to eq([])
end
end
context 'duplicate metric ids' do
it 'raises error' do
expect(described_class.new(metric_ids: [1, 1]).validate)
.to eq([Gitlab::Metrics::Dashboard::Validator::Errors::DuplicateMetricIds])
end
end
end
end

View File

@ -0,0 +1,64 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Metrics::Dashboard::Validator do
include MetricsDashboardHelpers
let_it_be(:valid_dashboard) { load_sample_dashboard }
let_it_be(:invalid_dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/invalid_dashboard.yml')) }
let_it_be(:duplicate_id_dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/duplicate_id_dashboard.yml')) }
describe '#validate' do
context 'valid dashboard' do
it 'returns true' do
expect(described_class.validate(valid_dashboard)).to be true
end
end
context 'invalid dashboard' do
context 'invalid schema' do
it 'returns false' do
expect(described_class.validate(invalid_dashboard)).to be false
end
end
context 'duplicate metric ids' do
context 'with no project given' do
it 'checks against given dashboard and returns false' do
expect(described_class.validate(duplicate_id_dashboard)).to be false
end
end
end
end
end
describe '#validate!' do
context 'valid dashboard' do
it 'returns true' do
expect(described_class.validate!(valid_dashboard)).to be true
end
end
context 'invalid dashboard' do
context 'invalid schema' do
it 'raises error' do
expect { described_class.validate!(invalid_dashboard) }
.to raise_error(Gitlab::Metrics::Dashboard::Validator::Errors::InvalidDashboardError,
"'this_should_be_a_int' is invalid at '/panel_groups/0/panels/0/weight'."\
" Should be '{\"type\"=>\"number\"}' due to schema definition at '/properties/weight'")
end
end
context 'duplicate metric ids' do
context 'with no project given' do
it 'checks against given dashboard and returns false' do
expect { described_class.validate!(duplicate_id_dashboard) }
.to raise_error(Gitlab::Metrics::Dashboard::Validator::Errors::InvalidDashboardError,
"metric_id must be unique across a project")
end
end
end
end
end
end

View File

@ -13,8 +13,14 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
describe '.uncached_data' do
describe '.usage_activity_by_stage' do
it 'includes usage_activity_by_stage data' do
expect(described_class.uncached_data).to include(:usage_activity_by_stage)
expect(described_class.uncached_data).to include(:usage_activity_by_stage_monthly)
uncached_data = described_class.uncached_data
expect(uncached_data).to include(:usage_activity_by_stage)
expect(uncached_data).to include(:usage_activity_by_stage_monthly)
expect(uncached_data[:usage_activity_by_stage])
.to include(:configure, :create, :manage, :monitor, :plan, :release, :verify)
expect(uncached_data[:usage_activity_by_stage_monthly])
.to include(:configure, :create, :manage, :monitor, :plan, :release, :verify)
end
it 'clears memoized values' do
@ -30,264 +36,13 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
described_class.uncached_data
end
context 'for configure' do
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
user = create(:user)
cluster = create(:cluster, user: user)
create(:clusters_applications_cert_manager, :installed, cluster: cluster)
create(:clusters_applications_helm, :installed, cluster: cluster)
create(:clusters_applications_ingress, :installed, cluster: cluster)
create(:clusters_applications_knative, :installed, cluster: cluster)
create(:cluster, :disabled, user: user)
create(:cluster_provider_gcp, :created)
create(:cluster_provider_aws, :created)
create(:cluster_platform_kubernetes)
create(:cluster, :group, :disabled, user: user)
create(:cluster, :group, user: user)
create(:cluster, :instance, :disabled, :production_environment)
create(:cluster, :instance, :production_environment)
create(:cluster, :management_project)
end
it 'merge_requests_users is included only in montly counters' do
uncached_data = described_class.uncached_data
expect(described_class.uncached_data[:usage_activity_by_stage][:configure]).to include(
clusters_applications_cert_managers: 2,
clusters_applications_helm: 2,
clusters_applications_ingress: 2,
clusters_applications_knative: 2,
clusters_management_project: 2,
clusters_disabled: 4,
clusters_enabled: 12,
clusters_platforms_gke: 2,
clusters_platforms_eks: 2,
clusters_platforms_user: 2,
instance_clusters_disabled: 2,
instance_clusters_enabled: 2,
group_clusters_disabled: 2,
group_clusters_enabled: 2,
project_clusters_disabled: 2,
project_clusters_enabled: 10
)
expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:configure]).to include(
clusters_applications_cert_managers: 1,
clusters_applications_helm: 1,
clusters_applications_ingress: 1,
clusters_applications_knative: 1,
clusters_management_project: 1,
clusters_disabled: 2,
clusters_enabled: 6,
clusters_platforms_gke: 1,
clusters_platforms_eks: 1,
clusters_platforms_user: 1,
instance_clusters_disabled: 1,
instance_clusters_enabled: 1,
group_clusters_disabled: 1,
group_clusters_enabled: 1,
project_clusters_disabled: 1,
project_clusters_enabled: 5
)
end
end
context 'for create' do
it 'include usage_activity_by_stage data' do
expect(described_class.uncached_data[:usage_activity_by_stage][:create])
.not_to include(
:merge_requests_users
)
end
it 'includes monthly usage_activity_by_stage data' do
expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:create])
.to include(
:merge_requests_users
)
end
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
user = create(:user)
project = create(:project, :repository_private,
:test_repo, :remote_mirror, creator: user)
create(:merge_request, source_project: project)
create(:deploy_key, user: user)
create(:key, user: user)
create(:project, creator: user, disable_overriding_approvers_per_merge_request: true)
create(:project, creator: user, disable_overriding_approvers_per_merge_request: false)
create(:remote_mirror, project: project)
create(:snippet, author: user)
end
expect(described_class.uncached_data[:usage_activity_by_stage][:create]).to include(
deploy_keys: 2,
keys: 2,
merge_requests: 2,
projects_with_disable_overriding_approvers_per_merge_request: 2,
projects_without_disable_overriding_approvers_per_merge_request: 4,
remote_mirrors: 2,
snippets: 2
)
expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:create]).to include(
deploy_keys: 1,
keys: 1,
merge_requests: 1,
projects_with_disable_overriding_approvers_per_merge_request: 1,
projects_without_disable_overriding_approvers_per_merge_request: 2,
remote_mirrors: 1,
snippets: 1
)
end
end
context 'for manage' do
it 'includes accurate usage_activity_by_stage data' do
stub_config(
omniauth:
{ providers: omniauth_providers }
)
for_defined_days_back do
user = create(:user)
create(:event, author: user)
create(:group_member, user: user)
end
expect(described_class.uncached_data[:usage_activity_by_stage][:manage]).to include(
events: 2,
groups: 2,
users_created: 6,
omniauth_providers: ['google_oauth2']
)
expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:manage]).to include(
events: 1,
groups: 1,
users_created: 4,
omniauth_providers: ['google_oauth2']
)
end
def omniauth_providers
[
OpenStruct.new(name: 'google_oauth2'),
OpenStruct.new(name: 'ldapmain'),
OpenStruct.new(name: 'group_saml')
]
end
end
context 'for monitor' do
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
user = create(:user, dashboard: 'operations')
cluster = create(:cluster, user: user)
create(:project, creator: user)
create(:clusters_applications_prometheus, :installed, cluster: cluster)
end
expect(described_class.uncached_data[:usage_activity_by_stage][:monitor]).to include(
clusters: 2,
clusters_applications_prometheus: 2,
operations_dashboard_default_dashboard: 2
)
expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:monitor]).to include(
clusters: 1,
clusters_applications_prometheus: 1,
operations_dashboard_default_dashboard: 1
)
end
end
context 'for plan' do
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
user = create(:user)
project = create(:project, creator: user)
issue = create(:issue, project: project, author: user)
create(:issue, project: project, author: User.support_bot)
create(:note, project: project, noteable: issue, author: user)
create(:todo, project: project, target: issue, author: user)
end
expect(described_class.uncached_data[:usage_activity_by_stage][:plan]).to include(
issues: 3,
notes: 2,
projects: 2,
todos: 2,
service_desk_enabled_projects: 2,
service_desk_issues: 2
)
expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:plan]).to include(
issues: 2,
notes: 1,
projects: 1,
todos: 1,
service_desk_enabled_projects: 1,
service_desk_issues: 1
)
end
end
context 'for release' do
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
user = create(:user)
create(:deployment, :failed, user: user)
create(:release, author: user)
create(:deployment, :success, user: user)
end
expect(described_class.uncached_data[:usage_activity_by_stage][:release]).to include(
deployments: 2,
failed_deployments: 2,
releases: 2,
successful_deployments: 2
)
expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:release]).to include(
deployments: 1,
failed_deployments: 1,
releases: 1,
successful_deployments: 1
)
end
end
context 'for verify' do
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
user = create(:user)
create(:ci_build, user: user)
create(:ci_empty_pipeline, source: :external, user: user)
create(:ci_empty_pipeline, user: user)
create(:ci_pipeline, :auto_devops_source, user: user)
create(:ci_pipeline, :repository_source, user: user)
create(:ci_pipeline_schedule, owner: user)
create(:ci_trigger, owner: user)
create(:clusters_applications_runner, :installed)
end
expect(described_class.uncached_data[:usage_activity_by_stage][:verify]).to include(
ci_builds: 2,
ci_external_pipelines: 2,
ci_internal_pipelines: 2,
ci_pipeline_config_auto_devops: 2,
ci_pipeline_config_repository: 2,
ci_pipeline_schedules: 2,
ci_pipelines: 2,
ci_triggers: 2,
clusters_applications_runner: 2
)
expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:verify]).to include(
ci_builds: 1,
ci_external_pipelines: 1,
ci_internal_pipelines: 1,
ci_pipeline_config_auto_devops: 1,
ci_pipeline_config_repository: 1,
ci_pipeline_schedules: 1,
ci_pipelines: 1,
ci_triggers: 1,
clusters_applications_runner: 1
)
end
expect(uncached_data[:usage_activity_by_stage][:create])
.not_to include(:merge_requests_users)
expect(uncached_data[:usage_activity_by_stage_monthly][:create])
.to include(:merge_requests_users)
end
end
@ -301,6 +56,252 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
describe '.usage_activity_by_stage_configure' do
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
user = create(:user)
cluster = create(:cluster, user: user)
create(:clusters_applications_cert_manager, :installed, cluster: cluster)
create(:clusters_applications_helm, :installed, cluster: cluster)
create(:clusters_applications_ingress, :installed, cluster: cluster)
create(:clusters_applications_knative, :installed, cluster: cluster)
create(:cluster, :disabled, user: user)
create(:cluster_provider_gcp, :created)
create(:cluster_provider_aws, :created)
create(:cluster_platform_kubernetes)
create(:cluster, :group, :disabled, user: user)
create(:cluster, :group, user: user)
create(:cluster, :instance, :disabled, :production_environment)
create(:cluster, :instance, :production_environment)
create(:cluster, :management_project)
end
expect(described_class.usage_activity_by_stage_configure({})).to include(
clusters_applications_cert_managers: 2,
clusters_applications_helm: 2,
clusters_applications_ingress: 2,
clusters_applications_knative: 2,
clusters_management_project: 2,
clusters_disabled: 4,
clusters_enabled: 12,
clusters_platforms_gke: 2,
clusters_platforms_eks: 2,
clusters_platforms_user: 2,
instance_clusters_disabled: 2,
instance_clusters_enabled: 2,
group_clusters_disabled: 2,
group_clusters_enabled: 2,
project_clusters_disabled: 2,
project_clusters_enabled: 10
)
expect(described_class.usage_activity_by_stage_configure(described_class.last_28_days_time_period)).to include(
clusters_applications_cert_managers: 1,
clusters_applications_helm: 1,
clusters_applications_ingress: 1,
clusters_applications_knative: 1,
clusters_management_project: 1,
clusters_disabled: 2,
clusters_enabled: 6,
clusters_platforms_gke: 1,
clusters_platforms_eks: 1,
clusters_platforms_user: 1,
instance_clusters_disabled: 1,
instance_clusters_enabled: 1,
group_clusters_disabled: 1,
group_clusters_enabled: 1,
project_clusters_disabled: 1,
project_clusters_enabled: 5
)
end
end
describe 'usage_activity_by_stage_create' do
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
user = create(:user)
project = create(:project, :repository_private,
:test_repo, :remote_mirror, creator: user)
create(:merge_request, source_project: project)
create(:deploy_key, user: user)
create(:key, user: user)
create(:project, creator: user, disable_overriding_approvers_per_merge_request: true)
create(:project, creator: user, disable_overriding_approvers_per_merge_request: false)
create(:remote_mirror, project: project)
create(:snippet, author: user)
end
expect(described_class.usage_activity_by_stage_create({})).to include(
deploy_keys: 2,
keys: 2,
merge_requests: 2,
projects_with_disable_overriding_approvers_per_merge_request: 2,
projects_without_disable_overriding_approvers_per_merge_request: 4,
remote_mirrors: 2,
snippets: 2
)
expect(described_class.usage_activity_by_stage_create(described_class.last_28_days_time_period)).to include(
deploy_keys: 1,
keys: 1,
merge_requests: 1,
projects_with_disable_overriding_approvers_per_merge_request: 1,
projects_without_disable_overriding_approvers_per_merge_request: 2,
remote_mirrors: 1,
snippets: 1
)
end
end
describe 'usage_activity_by_stage_manage' do
it 'includes accurate usage_activity_by_stage data' do
stub_config(
omniauth:
{ providers: omniauth_providers }
)
for_defined_days_back do
user = create(:user)
create(:event, author: user)
create(:group_member, user: user)
end
expect(described_class.usage_activity_by_stage_manage({})).to include(
events: 2,
groups: 2,
users_created: 4,
omniauth_providers: ['google_oauth2']
)
expect(described_class.usage_activity_by_stage_manage(described_class.last_28_days_time_period)).to include(
events: 1,
groups: 1,
users_created: 2,
omniauth_providers: ['google_oauth2']
)
end
def omniauth_providers
[
OpenStruct.new(name: 'google_oauth2'),
OpenStruct.new(name: 'ldapmain'),
OpenStruct.new(name: 'group_saml')
]
end
end
describe 'usage_activity_by_stage_monitor' do
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
user = create(:user, dashboard: 'operations')
cluster = create(:cluster, user: user)
create(:project, creator: user)
create(:clusters_applications_prometheus, :installed, cluster: cluster)
end
expect(described_class.usage_activity_by_stage_monitor({})).to include(
clusters: 2,
clusters_applications_prometheus: 2,
operations_dashboard_default_dashboard: 2
)
expect(described_class.usage_activity_by_stage_monitor(described_class.last_28_days_time_period)).to include(
clusters: 1,
clusters_applications_prometheus: 1,
operations_dashboard_default_dashboard: 1
)
end
end
describe 'usage_activity_by_stage_plan' do
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
user = create(:user)
project = create(:project, creator: user)
issue = create(:issue, project: project, author: user)
create(:issue, project: project, author: User.support_bot)
create(:note, project: project, noteable: issue, author: user)
create(:todo, project: project, target: issue, author: user)
end
expect(described_class.usage_activity_by_stage_plan({})).to include(
issues: 3,
notes: 2,
projects: 2,
todos: 2,
service_desk_enabled_projects: 2,
service_desk_issues: 2
)
expect(described_class.usage_activity_by_stage_plan(described_class.last_28_days_time_period)).to include(
issues: 2,
notes: 1,
projects: 1,
todos: 1,
service_desk_enabled_projects: 1,
service_desk_issues: 1
)
end
end
describe 'usage_activity_by_stage_release' do
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
user = create(:user)
create(:deployment, :failed, user: user)
create(:release, author: user)
create(:deployment, :success, user: user)
end
expect(described_class.usage_activity_by_stage_release({})).to include(
deployments: 2,
failed_deployments: 2,
releases: 2,
successful_deployments: 2
)
expect(described_class.usage_activity_by_stage_release(described_class.last_28_days_time_period)).to include(
deployments: 1,
failed_deployments: 1,
releases: 1,
successful_deployments: 1
)
end
end
describe 'usage_activity_by_stage_verify' do
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
user = create(:user)
create(:ci_build, user: user)
create(:ci_empty_pipeline, source: :external, user: user)
create(:ci_empty_pipeline, user: user)
create(:ci_pipeline, :auto_devops_source, user: user)
create(:ci_pipeline, :repository_source, user: user)
create(:ci_pipeline_schedule, owner: user)
create(:ci_trigger, owner: user)
create(:clusters_applications_runner, :installed)
end
expect(described_class.usage_activity_by_stage_verify({})).to include(
ci_builds: 2,
ci_external_pipelines: 2,
ci_internal_pipelines: 2,
ci_pipeline_config_auto_devops: 2,
ci_pipeline_config_repository: 2,
ci_pipeline_schedules: 2,
ci_pipelines: 2,
ci_triggers: 2,
clusters_applications_runner: 2
)
expect(described_class.usage_activity_by_stage_verify(described_class.last_28_days_time_period)).to include(
ci_builds: 1,
ci_external_pipelines: 1,
ci_internal_pipelines: 1,
ci_pipeline_config_auto_devops: 1,
ci_pipeline_config_repository: 1,
ci_pipeline_schedules: 1,
ci_pipelines: 1,
ci_triggers: 1,
clusters_applications_runner: 1
)
end
end
describe '.data' do
let!(:ud) { build(:usage_data) }

View File

@ -157,6 +157,56 @@ RSpec.describe Emails::Profile do
end
end
describe 'user personal access token has expired' do
let_it_be(:user) { create(:user) }
context 'when valid' do
subject { Notify.access_token_expired_email(user) }
it_behaves_like 'an email sent from GitLab'
it_behaves_like 'it should not have Gmail Actions links'
it_behaves_like 'a user cannot unsubscribe through footer link'
it 'is sent to the user' do
is_expected.to deliver_to user.email
end
it 'has the correct subject' do
is_expected.to have_subject /Your personal access token has expired/
end
it 'mentions the access token has expired' do
is_expected.to have_body_text /One or more of your personal access tokens has expired/
end
it 'includes a link to personal access tokens page' do
is_expected.to have_body_text /#{profile_personal_access_tokens_path}/
end
it 'includes the email reason' do
is_expected.to have_body_text /You're receiving this email because of your account on localhost/
end
end
context 'when invalid' do
context 'when user does not exist' do
it do
expect { Notify.access_token_expired_email(nil) }.not_to change { ActionMailer::Base.deliveries.count }
end
end
context 'when user is not active' do
before do
user.block!
end
it do
expect { Notify.access_token_expired_email(user) }.not_to change { ActionMailer::Base.deliveries.count }
end
end
end
end
describe 'user unknown sign in email' do
let_it_be(:user) { create(:user) }
let_it_be(:ip) { '169.0.0.1' }

View File

@ -0,0 +1,41 @@
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200724130639_backfill_designs_relative_position.rb')
RSpec.describe BackfillDesignsRelativePosition do
let(:namespace) { table(:namespaces).create!(name: 'gitlab', path: 'gitlab') }
let(:project) { table(:projects).create!(namespace_id: namespace.id) }
let(:issues) { table(:issues) }
let(:designs) { table(:design_management_designs) }
before do
issues.create!(id: 1, project_id: project.id)
issues.create!(id: 2, project_id: project.id)
issues.create!(id: 3, project_id: project.id)
issues.create!(id: 4, project_id: project.id)
designs.create!(issue_id: 1, project_id: project.id, filename: 'design1.jpg')
designs.create!(issue_id: 2, project_id: project.id, filename: 'design2.jpg')
designs.create!(issue_id: 4, project_id: project.id, filename: 'design3.jpg')
stub_const("#{described_class.name}::BATCH_SIZE", 2)
end
it 'correctly schedules background migrations' do
Sidekiq::Testing.fake! do
Timecop.freeze do
migrate!
expect(described_class::MIGRATION)
.to be_scheduled_delayed_migration(2.minutes, [1, 2])
expect(described_class::MIGRATION)
.to be_scheduled_delayed_migration(4.minutes, [4])
# Issue 3 should be skipped because it doesn't have any designs
expect(BackgroundMigrationWorker.jobs.size).to eq(2)
end
end
end
end

View File

@ -180,6 +180,18 @@ RSpec.describe PersonalAccessToken do
end
end
describe '.expired_today_and_not_notified' do
let_it_be(:active) { create(:personal_access_token) }
let_it_be(:expired_yesterday) { create(:personal_access_token, expires_at: Date.yesterday) }
let_it_be(:revoked_token) { create(:personal_access_token, expires_at: Date.current, revoked: true) }
let_it_be(:expired_today) { create(:personal_access_token, expires_at: Date.current) }
let_it_be(:expired_today_and_notified) { create(:personal_access_token, expires_at: Date.current, after_expiry_notification_delivered: true) }
it 'returns tokens that have expired today' do
expect(described_class.expired_today_and_not_notified).to contain_exactly(expired_today)
end
end
describe '.without_impersonation' do
let_it_be(:impersonation_token) { create(:personal_access_token, :impersonation) }
let_it_be(:personal_access_token) { create(:personal_access_token) }

View File

@ -855,6 +855,24 @@ RSpec.describe User do
end
end
describe '.with_personal_access_tokens_expired_today' do
let_it_be(:user1) { create(:user) }
let_it_be(:expired_today) { create(:personal_access_token, user: user1, expires_at: Date.current) }
let_it_be(:user2) { create(:user) }
let_it_be(:revoked_token) { create(:personal_access_token, user: user2, expires_at: Date.current, revoked: true) }
let_it_be(:user3) { create(:user) }
let_it_be(:impersonated_token) { create(:personal_access_token, user: user3, expires_at: Date.current, impersonation: true) }
let_it_be(:user4) { create(:user) }
let_it_be(:already_notified) { create(:personal_access_token, user: user4, expires_at: Date.current, after_expiry_notification_delivered: true) }
it 'returns users whose token has expired today' do
expect(described_class.with_personal_access_tokens_expired_today).to contain_exactly(user1)
end
end
describe '.active_without_ghosts' do
let_it_be(:user1) { create(:user, :external) }
let_it_be(:user2) { create(:user, state: 'blocked') }

View File

@ -238,6 +238,26 @@ RSpec.describe NotificationService, :mailer do
expect { subject }.to have_enqueued_email(user, mail: "access_token_about_to_expire_email")
end
end
describe '#access_token_expired' do
let_it_be(:user) { create(:user) }
subject { notification.access_token_expired(user) }
it 'sends email to the token owner' do
expect { subject }.to have_enqueued_email(user, mail: "access_token_expired_email")
end
context 'when user is not allowed to receive notifications' do
before do
user.block!
end
it 'does not send email to the token owner' do
expect { subject }.not_to have_enqueued_email(user, mail: "access_token_expired_email")
end
end
end
end
describe '#unknown_sign_in' do

View File

@ -1,11 +1,11 @@
# frozen_string_literal: true
RSpec.shared_examples 'a class that supports relative positioning' do
let(:item1) { create(factory, default_params) }
let(:item2) { create(factory, default_params) }
let(:new_item) { create(factory, default_params) }
let(:item1) { create_item }
let(:item2) { create_item }
let(:new_item) { create_item }
def create_item(params)
def create_item(params = {})
create(factory, params.merge(default_params))
end
@ -16,21 +16,30 @@ RSpec.shared_examples 'a class that supports relative positioning' do
end
describe '.move_nulls_to_end' do
let(:item3) { create_item }
it 'moves items with null relative_position to the end' do
item1.update!(relative_position: nil)
item1.update!(relative_position: 1000)
item2.update!(relative_position: nil)
item3.update!(relative_position: nil)
described_class.move_nulls_to_end([item1, item2])
items = [item1, item2, item3]
described_class.move_nulls_to_end(items)
items.map(&:reload)
expect(item2.prev_relative_position).to eq item1.relative_position
expect(item1.prev_relative_position).to eq nil
expect(item2.next_relative_position).to eq nil
expect(items.sort_by(&:relative_position)).to eq(items)
expect(item1.relative_position).to be(1000)
expect(item1.prev_relative_position).to be_nil
expect(item1.next_relative_position).to eq(item2.relative_position)
expect(item2.next_relative_position).to eq(item3.relative_position)
expect(item3.next_relative_position).to be_nil
end
it 'moves the item near the start position when there are no existing positions' do
item1.update!(relative_position: nil)
described_class.move_nulls_to_end([item1])
item1.reload
expect(item1.relative_position).to eq(described_class::START_POSITION + described_class::IDEAL_DISTANCE)
end
@ -38,9 +47,49 @@ RSpec.shared_examples 'a class that supports relative positioning' do
it 'does not perform any moves if all items have their relative_position set' do
item1.update!(relative_position: 1)
expect(item1).not_to receive(:save)
described_class.move_nulls_to_end([item1])
item1.reload
expect(item1.relative_position).to be(1)
end
end
describe '.move_nulls_to_start' do
let(:item3) { create_item }
it 'moves items with null relative_position to the start' do
item1.update!(relative_position: nil)
item2.update!(relative_position: nil)
item3.update!(relative_position: 1000)
items = [item1, item2, item3]
described_class.move_nulls_to_start(items)
items.map(&:reload)
expect(items.sort_by(&:relative_position)).to eq(items)
expect(item1.prev_relative_position).to eq nil
expect(item1.next_relative_position).to eq item2.relative_position
expect(item2.next_relative_position).to eq item3.relative_position
expect(item3.next_relative_position).to eq nil
expect(item3.relative_position).to be(1000)
end
it 'moves the item near the start position when there are no existing positions' do
item1.update!(relative_position: nil)
described_class.move_nulls_to_start([item1])
item1.reload
expect(item1.relative_position).to eq(described_class::START_POSITION - described_class::IDEAL_DISTANCE)
end
it 'does not perform any moves if all items have their relative_position set' do
item1.update!(relative_position: 1)
described_class.move_nulls_to_start([item1])
item1.reload
expect(item1.relative_position).to be(1)
end
end

View File

@ -0,0 +1,69 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe PersonalAccessTokens::ExpiredNotificationWorker, type: :worker do
subject(:worker) { described_class.new }
describe '#perform' do
context 'when a token has expired' do
let(:expired_today) { create(:personal_access_token, expires_at: Date.current) }
context 'when feature is enabled' do
it 'uses notification service to send email to the user' do
expect_next_instance_of(NotificationService) do |notification_service|
expect(notification_service).to receive(:access_token_expired).with(expired_today.user)
end
worker.perform
end
it 'updates notified column' do
expect { worker.perform }.to change { expired_today.reload.after_expiry_notification_delivered }.from(false).to(true)
end
end
context 'when feature is disabled' do
before do
stub_feature_flags(expired_pat_email_notification: false)
end
it 'does not update notified column' do
expect { worker.perform }.not_to change { expired_today.reload.after_expiry_notification_delivered }
end
it 'does not trigger email' do
expect { worker.perform }.not_to change { ActionMailer::Base.deliveries.count }
end
end
end
shared_examples 'expiry notification is not required to be sent for the token' do
it do
expect_next_instance_of(NotificationService) do |notification_service|
expect(notification_service).not_to receive(:access_token_expired).with(token.user)
end
worker.perform
end
end
context 'when token has expired in the past' do
let(:token) { create(:personal_access_token, expires_at: Date.yesterday) }
it_behaves_like 'expiry notification is not required to be sent for the token'
end
context 'when token is impersonated' do
let(:token) { create(:personal_access_token, expires_at: Date.current, impersonation: true) }
it_behaves_like 'expiry notification is not required to be sent for the token'
end
context 'when token is revoked' do
let(:token) { create(:personal_access_token, expires_at: Date.current, revoked: true) }
it_behaves_like 'expiry notification is not required to be sent for the token'
end
end
end

1
vendor/licenses.csv vendored
View File

@ -1044,7 +1044,6 @@ strip-json-comments,2.0.1,MIT
style-loader,0.23.0,MIT
supports-color,2.0.0,MIT
supports-color,5.5.0,MIT
svg4everybody,2.1.9,CC0-1.0
symbol-observable,1.2.0,MIT
sys-filesystem,1.1.6,Artistic 2.0
tapable,1.1.0,MIT

1 @babel/code-frame 7.0.0 MIT
1044 style-loader 0.23.0 MIT
1045 supports-color 2.0.0 MIT
1046 supports-color 5.5.0 MIT
svg4everybody 2.1.9 CC0-1.0
1047 symbol-observable 1.2.0 MIT
1048 sys-filesystem 1.1.6 Artistic 2.0
1049 tapable 1.1.0 MIT

View File

@ -11208,11 +11208,6 @@ svg-tags@^1.0.0:
resolved "https://registry.yarnpkg.com/svg-tags/-/svg-tags-1.0.0.tgz#58f71cee3bd519b59d4b2a843b6c7de64ac04764"
integrity sha1-WPcc7jvVGbWdSyqEO2x95krAR2Q=
svg4everybody@^2.1.9:
version "2.1.9"
resolved "https://registry.yarnpkg.com/svg4everybody/-/svg4everybody-2.1.9.tgz#5bd9f6defc133859a044646d4743fabc28db7e2d"
integrity sha1-W9n23vwTOFmgRGRtR0P6vCjbfi0=
swagger-ui-dist@^3.26.2:
version "3.26.2"
resolved "https://registry.yarnpkg.com/swagger-ui-dist/-/swagger-ui-dist-3.26.2.tgz#22c700906c8911b1c9956da6c3fca371dba6219f"