Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-07-08 09:09:33 +00:00
parent cf13820251
commit 7752bfa10f
63 changed files with 1215 additions and 350 deletions

View File

@ -217,6 +217,15 @@
variables:
POSTGRES_HOST_AUTH_METHOD: trust
.use-pg13:
image: "registry.gitlab.com/gitlab-org/gitlab-build-images:ruby-2.7.2.patched-golang-1.16-git-2.31-lfs-2.9-chrome-89-node-14.15-yarn-1.22-postgresql-13-graphicsmagick-1.3.36"
services:
- name: postgres:13
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
- name: redis:5.0-alpine
variables:
POSTGRES_HOST_AUTH_METHOD: trust
.use-pg11-ee:
image: "registry.gitlab.com/gitlab-org/gitlab-build-images:ruby-2.7.2.patched-golang-1.16-git-2.31-lfs-2.9-chrome-89-node-14.15-yarn-1.22-postgresql-11-graphicsmagick-1.3.36"
services:
@ -239,6 +248,17 @@
variables:
POSTGRES_HOST_AUTH_METHOD: trust
.use-pg13-ee:
image: "registry.gitlab.com/gitlab-org/gitlab-build-images:ruby-2.7.2.patched-golang-1.16-git-2.31-lfs-2.9-chrome-89-node-14.15-yarn-1.22-postgresql-13-graphicsmagick-1.3.36"
services:
- name: postgres:13
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
- name: redis:5.0-alpine
- name: elasticsearch:7.11.1
command: ["elasticsearch", "-E", "discovery.type=single-node"]
variables:
POSTGRES_HOST_AUTH_METHOD: trust
.use-kaniko:
image:
name: registry.gitlab.com/gitlab-org/gitlab-build-images:kaniko

View File

@ -74,6 +74,11 @@
- .use-pg12
needs: ["setup-test-env", "retrieve-tests-metadata", "compile-test-assets as-if-foss", "detect-tests"]
.rspec-base-pg13:
extends:
- .rspec-base
- .use-pg13
.rspec-ee-base-pg11:
extends:
- .rspec-base
@ -84,6 +89,11 @@
- .rspec-base
- .use-pg12-ee
.rspec-ee-base-pg13:
extends:
- .rspec-base
- .use-pg13-ee
.rspec-ee-base-geo:
extends: .rspec-base
script:
@ -100,6 +110,11 @@
- .rspec-ee-base-geo
- .use-pg12-ee
.rspec-ee-base-geo-pg13:
extends:
- .rspec-ee-base-geo
- .use-pg13-ee
.db-job-base:
extends:
- .rails-job-base
@ -671,6 +686,8 @@ db:rollback geo:
##########################################
# EE/FOSS: default branch nightly scheduled jobs #
# PG11
rspec migration pg11:
extends:
- .rspec-base-pg11
@ -695,11 +712,40 @@ rspec system pg11:
- .rspec-base-pg11
- .rails:rules:default-branch-schedule-nightly--code-backstage
- .rspec-system-parallel
# PG13
rspec migration pg13:
extends:
- .rspec-base-pg13
- .rspec-base-migration
- .rails:rules:default-branch-schedule-nightly--code-backstage
- .rspec-migration-parallel
rspec unit pg13:
extends:
- .rspec-base-pg13
- .rails:rules:default-branch-schedule-nightly--code-backstage
- .rspec-unit-parallel
rspec integration pg13:
extends:
- .rspec-base-pg13
- .rails:rules:default-branch-schedule-nightly--code-backstage
- .rspec-integration-parallel
rspec system pg13:
extends:
- .rspec-base-pg13
- .rails:rules:default-branch-schedule-nightly--code-backstage
- .rspec-system-parallel
# EE/FOSS: default branch nightly scheduled jobs #
##########################################
#####################################
# EE: default branch nightly scheduled jobs #
# PG11
rspec-ee migration pg11:
extends:
- .rspec-ee-base-pg11
@ -740,6 +786,48 @@ rspec-ee system pg11 geo:
extends:
- .rspec-ee-base-geo-pg11
- .rails:rules:default-branch-schedule-nightly--code-backstage-ee-only
# PG13
rspec-ee migration pg13:
extends:
- .rspec-ee-base-pg13
- .rspec-base-migration
- .rails:rules:default-branch-schedule-nightly--code-backstage-ee-only
- .rspec-ee-migration-parallel
rspec-ee unit pg13:
extends:
- .rspec-ee-base-pg13
- .rails:rules:default-branch-schedule-nightly--code-backstage-ee-only
- .rspec-ee-unit-parallel
rspec-ee integration pg13:
extends:
- .rspec-ee-base-pg13
- .rails:rules:default-branch-schedule-nightly--code-backstage-ee-only
- .rspec-ee-integration-parallel
rspec-ee system pg13:
extends:
- .rspec-ee-base-pg13
- .rails:rules:default-branch-schedule-nightly--code-backstage-ee-only
- .rspec-ee-system-parallel
rspec-ee unit pg13 geo:
extends:
- .rspec-ee-base-geo-pg13
- .rails:rules:default-branch-schedule-nightly--code-backstage-ee-only
- .rspec-ee-unit-geo-parallel
rspec-ee integration pg13 geo:
extends:
- .rspec-ee-base-geo-pg13
- .rails:rules:default-branch-schedule-nightly--code-backstage-ee-only
rspec-ee system pg13 geo:
extends:
- .rspec-ee-base-geo-pg13
- .rails:rules:default-branch-schedule-nightly--code-backstage-ee-only
# EE: default branch nightly scheduled jobs #
#####################################

View File

@ -213,9 +213,9 @@ export default {
const el = this.$el.closest('.vue-recycle-scroller__item-view');
if (this.glFeatures.diffsVirtualScrolling && el) {
// We can't add a class with Vue because of the way the virtual
// We can't add a style with Vue because of the way the virtual
// scroller library renders the diff files
el.classList.toggle('gl-z-index-1', val);
el.style.zIndex = val ? '1' : null;
}
},
},

View File

@ -1,5 +1,5 @@
<script>
import ProjectAvatarDefault from '~/vue_shared/components/project_avatar/default.vue';
import ProjectAvatarDefault from '~/vue_shared/components/deprecated_project_avatar/default.vue';
export default {
components: {

View File

@ -1,10 +1,8 @@
import $ from 'jquery';
import * as timeago from 'timeago.js';
import { languageCode, s__ } from '../../../locale';
import { languageCode, s__, createDateTimeFormat } from '../../../locale';
import { formatDate } from './date_format_utility';
window.timeago = timeago;
/**
* Timeago uses underscores instead of dashes to separate language from country code.
*
@ -76,7 +74,26 @@ const memoizedLocale = () => {
timeago.register(timeagoLanguageCode, memoizedLocale());
timeago.register(`${timeagoLanguageCode}-remaining`, memoizedLocaleRemaining());
export const getTimeago = () => timeago;
let memoizedFormatter = null;
function setupAbsoluteFormatter() {
if (memoizedFormatter === null) {
const formatter = createDateTimeFormat({
dateStyle: 'medium',
timeStyle: 'short',
});
memoizedFormatter = {
format(date) {
return formatter.format(date instanceof Date ? date : new Date(date));
},
};
}
return memoizedFormatter;
}
export const getTimeago = () =>
window.gon?.time_display_relative === false ? setupAbsoluteFormatter() : timeago;
/**
* For the given elements, sets a tooltip with a formatted date.
@ -84,8 +101,9 @@ export const getTimeago = () => timeago;
* @param {Boolean} setTimeago
*/
export const localTimeAgo = ($timeagoEls, setTimeago = true) => {
const { format } = getTimeago();
$timeagoEls.each((i, el) => {
$(el).text(timeago.format($(el).attr('datetime'), timeagoLanguageCode));
$(el).text(format($(el).attr('datetime'), timeagoLanguageCode));
});
if (!setTimeago) {
@ -117,6 +135,7 @@ export const timeFor = (time, expiredLabel) => {
return timeago.format(time, `${timeagoLanguageCode}-remaining`).trim();
};
window.timeago = getTimeago();
window.gl = window.gl || {};
window.gl.utils = {
...(window.gl.utils || {}),

View File

@ -117,8 +117,8 @@ LineHighlighter.prototype.clearHighlight = function () {
//
// Returns an Array
LineHighlighter.prototype.hashToRange = function (hash) {
// ?L(\d+)(?:-(\d+))?$/)
const matches = hash.match(/^#?L(\d+)(?:-(\d+))?$/);
// ?L(\d+)(?:-L?(\d+))?$/)
const matches = hash.match(/^#?L(\d+)(?:-L?(\d+))?$/);
if (matches && matches.length) {
const first = parseInt(matches[1], 10);
const last = matches[2] ? parseInt(matches[2], 10) : null;

View File

@ -9,8 +9,7 @@ import {
} from '@gitlab/ui';
import { debounce } from 'lodash';
import { mapState, mapActions } from 'vuex';
// eslint-disable-next-line import/no-deprecated
import { redirectTo, urlParamsToObject } from '~/lib/utils/url_utility';
import { redirectTo, queryToObject } from '~/lib/utils/url_utility';
import { __ } from '~/locale';
const tooltipMessage = __('Searching by both author and message is currently not supported.');
@ -52,8 +51,7 @@ export default {
},
mounted() {
this.fetchAuthors();
// eslint-disable-next-line import/no-deprecated
const params = urlParamsToObject(window.location.search);
const params = queryToObject(window.location.search);
const { search: searchParam, author: authorParam } = params;
const commitsSearchInput = this.projectCommitsEl.querySelector('#commits-search');

View File

@ -11,7 +11,7 @@ import { __ } from '~/locale';
import blobInfoQuery from '../queries/blob_info.query.graphql';
import BlobButtonGroup from './blob_button_group.vue';
import BlobEdit from './blob_edit.vue';
import { loadViewer } from './blob_viewers';
import { loadViewer, viewerProps } from './blob_viewers';
export default {
components: {
@ -31,12 +31,12 @@ export default {
};
},
result() {
if (this.hasRichViewer && !this.blobViewer) {
this.loadLegacyViewer();
}
this.switchViewer(
this.hasRichViewer && !window.location.hash ? RICH_BLOB_VIEWER : SIMPLE_BLOB_VIEWER,
);
if (this.hasRichViewer && !this.blobViewer) {
this.loadLegacyViewer();
}
},
error() {
this.displayError();
@ -125,6 +125,10 @@ export default {
const { fileType } = this.viewer;
return loadViewer(fileType);
},
viewerProps() {
const { fileType } = this.viewer;
return viewerProps(fileType, this.blobInfo);
},
},
methods: {
loadLegacyViewer() {
@ -183,7 +187,7 @@ export default {
:active-viewer="viewer"
:loading="false"
/>
<component :is="blobViewer" v-else class="blob-viewer" />
<component :is="blobViewer" v-else v-bind="viewerProps" class="blob-viewer" />
</div>
</div>
</template>

View File

@ -3,8 +3,7 @@ export const loadViewer = (type) => {
case 'empty':
return () => import(/* webpackChunkName: 'blob_empty_viewer' */ './empty_viewer.vue');
case 'text':
// TODO (follow-up): import the text viewer
return null; // () => import(/* webpackChunkName: 'blob_text_viewer' */ './text_viewer.vue');
return () => import(/* webpackChunkName: 'blob_text_viewer' */ './text_viewer.vue');
case 'download':
// TODO (follow-up): import the download viewer
return null; // () => import(/* webpackChunkName: 'blob_download_viewer' */ './download_viewer.vue');
@ -12,3 +11,13 @@ export const loadViewer = (type) => {
return null;
}
};
export const viewerProps = (type, blob) => {
return {
text: {
content: blob.rawTextBlob,
fileName: blob.name,
readOnly: true,
},
}[type];
};

View File

@ -0,0 +1,25 @@
<script>
export default {
components: {
SourceEditor: () =>
import(/* webpackChunkName: 'SourceEditor' */ '~/vue_shared/components/source_editor.vue'),
},
props: {
content: {
type: String,
required: true,
},
fileName: {
type: String,
required: true,
},
readOnly: {
type: Boolean,
required: true,
},
},
};
</script>
<template>
<source-editor :value="content" :file-name="fileName" :editor-options="{ readOnly }" />
</template>

View File

@ -3,6 +3,7 @@ import Identicon from '../identicon.vue';
import ProjectAvatarImage from './image.vue';
export default {
name: 'DeprecatedProjectAvatar',
components: {
Identicon,
ProjectAvatarImage,

View File

@ -0,0 +1,30 @@
import ProjectAvatar from './project_avatar.vue';
export default {
component: ProjectAvatar,
title: 'vue_shared/components/project_avatar',
};
const Template = (args, { argTypes }) => ({
components: { ProjectAvatar },
props: Object.keys(argTypes),
template: '<project-avatar v-bind="$props" />',
});
export const Default = Template.bind({});
Default.args = {
projectAvatarUrl:
'https://gitlab.com/uploads/-/system/project/avatar/278964/logo-extra-whitespace.png?width=64',
projectName: 'GitLab',
};
export const FallbackAvatar = Template.bind({});
FallbackAvatar.args = {
projectName: 'GitLab',
};
export const EmptyAltTag = Template.bind({});
EmptyAltTag.args = {
...Default.args,
alt: '',
};

View File

@ -0,0 +1,45 @@
<script>
import { GlAvatar } from '@gitlab/ui';
export default {
components: {
GlAvatar,
},
props: {
projectName: {
type: String,
required: true,
},
projectAvatarUrl: {
type: String,
required: false,
default: '',
},
size: {
type: Number,
default: 32,
required: false,
},
alt: {
type: String,
required: false,
default: undefined,
},
},
computed: {
avatarAlt() {
return this.alt ?? this.projectName;
},
},
};
</script>
<template>
<gl-avatar
shape="rect"
:entity-name="projectName"
:src="projectAvatarUrl"
:alt="avatarAlt"
:size="size"
/>
</template>

View File

@ -4,7 +4,7 @@ import { GlButton, GlIcon } from '@gitlab/ui';
import { isString } from 'lodash';
import highlight from '~/lib/utils/highlight';
import { truncateNamespace } from '~/lib/utils/text_utility';
import ProjectAvatar from '~/vue_shared/components/project_avatar/default.vue';
import ProjectAvatar from '~/vue_shared/components/deprecated_project_avatar/default.vue';
export default {
name: 'ProjectListItem',

View File

@ -48,6 +48,12 @@ export default {
}
return this.labels;
},
showDropdownFooter() {
return (
(this.isDropdownVariantSidebar || this.isDropdownVariantEmbedded) &&
(this.allowLabelCreate || this.labelsManagePath)
);
},
showNoMatchingResultsMessage() {
return Boolean(this.searchKey) && this.visibleLabels.length === 0;
},
@ -192,11 +198,7 @@ export default {
</li>
</ul>
</div>
<div
v-if="isDropdownVariantSidebar || isDropdownVariantEmbedded"
class="dropdown-footer"
data-testid="dropdown-footer"
>
<div v-if="showDropdownFooter" class="dropdown-footer" data-testid="dropdown-footer">
<ul class="list-unstyled">
<li v-if="allowLabelCreate">
<gl-link
@ -206,7 +208,7 @@ export default {
{{ footerCreateLabelTitle }}
</gl-link>
</li>
<li>
<li v-if="labelsManagePath">
<gl-link
:href="labelsManagePath"
class="gl-display-flex flex-row text-break-word label-item"

View File

@ -3,6 +3,7 @@
module IssuableActions
extend ActiveSupport::Concern
include Gitlab::Utils::StrongMemoize
include Gitlab::Cache::Helpers
included do
before_action :authorize_destroy_issuable!, only: :destroy
@ -129,7 +130,11 @@ module IssuableActions
discussions = Discussion.build_collection(notes, issuable)
render json: discussion_serializer.represent(discussions, context: self)
if issuable.is_a?(MergeRequest) && Feature.enabled?(:merge_request_discussion_cache, issuable.target_project, default_enabled: :yaml)
render_cached(discussions, with: discussion_serializer, context: self)
else
render json: discussion_serializer.represent(discussions, context: self)
end
end
# rubocop:enable CodeReuse/ActiveRecord

View File

@ -24,8 +24,8 @@ class AwardEmoji < ApplicationRecord
scope :named, -> (names) { where(name: names) }
scope :awarded_by, -> (users) { where(user: users) }
after_save :expire_etag_cache
after_destroy :expire_etag_cache
after_save :expire_cache
after_destroy :expire_cache
class << self
def votes_for_collection(ids, type)
@ -60,7 +60,8 @@ class AwardEmoji < ApplicationRecord
self.name == UPVOTE_NAME
end
def expire_etag_cache
def expire_cache
awardable.try(:bump_updated_at)
awardable.try(:expire_etag_cache)
end
end

View File

@ -7,6 +7,9 @@ class Discussion
include GlobalID::Identification
include ResolvableDiscussion
# Bump this if we need to refresh the cached versions of discussions
CACHE_VERSION = 1
attr_reader :notes, :context_noteable
delegate :created_at,
@ -158,4 +161,19 @@ class Discussion
def reply_attributes
first_note.slice(:type, :noteable_type, :noteable_id, :commit_id, :discussion_id)
end
def cache_key
# Need this so cache will be invalidated when note within a discussion
# has been deleted.
notes_sha = Digest::SHA1.hexdigest(notes.map(&:id).join(':'))
[
CACHE_VERSION,
notes.last.latest_cached_markdown_version,
id,
notes_sha,
notes.max_by(&:updated_at).updated_at,
resolved_at
].join(':')
end
end

View File

@ -500,6 +500,13 @@ class Note < ApplicationRecord
refs
end
def bump_updated_at
# Instead of calling touch which is throttled via ThrottledTouch concern,
# we bump the updated_at column directly. This also prevents executing
# after_commit callbacks that we don't need.
update_column(:updated_at, Time.current)
end
def expire_etag_cache
noteable&.expire_note_etag_cache
end

View File

@ -13,7 +13,7 @@ module Projects
)
if link.save
setup_authorizations(group, link.group_access)
setup_authorizations(group)
success(link: link)
else
error(link.errors.full_messages.to_sentence, 409)
@ -22,9 +22,8 @@ module Projects
private
def setup_authorizations(group, group_access = nil)
AuthorizedProjectUpdate::ProjectGroupLinkCreateWorker.perform_async(
project.id, group.id, group_access)
def setup_authorizations(group)
AuthorizedProjectUpdate::ProjectRecalculateWorker.perform_async(project.id)
# AuthorizedProjectsWorker uses an exclusive lease per user but
# specialized workers might have synchronization issues. Until we

View File

@ -20,8 +20,13 @@ class UserProjectAccessChangedService
if priority == HIGH_PRIORITY
AuthorizedProjectsWorker.bulk_perform_async(bulk_args) # rubocop:disable Scalability/BulkPerformWithContext
else
AuthorizedProjectUpdate::UserRefreshFromReplicaWorker.bulk_perform_in( # rubocop:disable Scalability/BulkPerformWithContext
DELAY, bulk_args, batch_size: 100, batch_delay: 30.seconds)
with_related_class_context do
# We wrap the execution in `with_related_class_context`so as to obtain
# the location of the original caller
# in jobs enqueued from within `AuthorizedProjectUpdate::UserRefreshFromReplicaWorker`
AuthorizedProjectUpdate::UserRefreshFromReplicaWorker.bulk_perform_in( # rubocop:disable Scalability/BulkPerformWithContext
DELAY, bulk_args, batch_size: 100, batch_delay: 30.seconds)
end
end
end
@ -29,4 +34,11 @@ class UserProjectAccessChangedService
result
end
private
def with_related_class_context(&block)
current_caller_id = Gitlab::ApplicationContext.current_context_attribute('meta.caller_id').presence
Gitlab::ApplicationContext.with_context(related_class: current_caller_id, &block)
end
end

View File

@ -3,20 +3,24 @@
%fieldset
.sub-section
%h4= _("Hashed repository storage paths")
%h4= _('Hashed repository storage paths')
.form-group
.form-check
= f.check_box :hashed_storage_enabled, class: 'form-check-input qa-hashed-storage-checkbox', disabled: @application_setting.hashed_storage_enabled?
= f.label :hashed_storage_enabled, _("Use hashed storage"), class: 'label-bold form-check-label'
= f.label :hashed_storage_enabled, _('Use hashed storage'), class: 'label-bold form-check-label'
.form-text.text-muted
= _("Use hashed storage paths for newly created and renamed repositories. Enable immutable, hash-based paths and repository names to store repositories on disk. This prevents repositories from having to be moved or renamed when the Repository URL changes and may improve disk I/O performance. (Always enabled since 13.0)")
= _('Use hashed storage paths for newly created and renamed repositories. Always enabled since 13.0.')
= link_to s_('Learn more.'), help_page_path('administration/repository_storage_types.md', anchor: 'hashed-storage'), target: '_blank', rel: 'noopener noreferrer'
.sub-section
%h4= _("Storage nodes for new repositories")
.form-group
.form-text
%p.text-secondary
= _('Enter weights for storages for new repositories.')
= link_to sprite_icon('question-o'), help_page_path('administration/repository_storage_paths')
- weights_link_url = help_page_path('administration/repository_storage_paths.md', anchor: 'configure-where-new-repositories-are-stored')
- weights_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: weights_link_url }
= html_escape(s_('Enter %{weights_link_start}weights%{weights_link_end} for storages for new repositories. Configured storages appear below.')) % { weights_link_start: weights_link_start, weights_link_end: '</a>'.html_safe }
= link_to s_('Learn more.'), help_page_path('administration/repository_storage_paths.md'), target: '_blank', rel: 'noopener noreferrer'
.form-check
= f.fields_for :repository_storages_weighted, storage_weights do |storage_form|
- Gitlab.config.repositories.storages.keys.each do |storage|

View File

@ -31,7 +31,8 @@
%button.btn.gl-button.btn-default.js-settings-toggle{ type: 'button' }
= expanded_by_default? ? _('Collapse') : _('Expand')
%p
= _('Configure storage path settings.')
= _('Configure repository storage.')
= link_to s_('Learn more.'), help_page_path('administration/repository_storage_paths.md'), target: '_blank', rel: 'noopener noreferrer'
.settings-content
= render 'repository_storage'

View File

@ -128,29 +128,27 @@
= f.label :first_day_of_week, class: 'label-bold' do
= _('First day of the week')
= f.select :first_day_of_week, first_day_of_week_choices_with_default, {}, class: 'select2'
- if Feature.enabled?(:user_time_settings)
.col-sm-12
%hr
.col-lg-4.profile-settings-sidebar
%h4.gl-mt-0= s_('Preferences|Time preferences')
%p= s_('Preferences|These settings will update how dates and times are displayed for you.')
.col-lg-8
.form-group
%h5= s_('Preferences|Time format')
.checkbox-icon-inline-wrapper
- time_format_label = capture do
= s_('Preferences|Display time in 24-hour format')
= f.check_box :time_format_in_24h
= f.label :time_format_in_24h do
= time_format_label
%h5= s_('Preferences|Time display')
.checkbox-icon-inline-wrapper
- time_display_label = capture do
= s_('Preferences|Use relative times')
= f.check_box :time_display_relative
= f.label :time_display_relative do
= time_display_label
.form-text.text-muted
= s_('Preferences|For example: 30 mins ago.')
.col-sm-12
%hr
.row.js-preferences-form.js-search-settings-section
.col-lg-4.profile-settings-sidebar#time-preferences
%h4.gl-mt-0
= s_('Preferences|Time preferences')
%p
= s_('Preferences|Configure how dates and times display for you.')
= succeed '.' do
= link_to _('Learn more'), help_page_path('user/profile/preferences', anchor: 'time-preferences'), target: '_blank'
.col-lg-8
.form-group.form-check
= f.check_box :time_display_relative, class: 'form-check-input'
= f.label :time_display_relative, class: 'form-check-label' do
= s_('Preferences|Use relative times')
.form-text.text-muted
= s_('Preferences|For example: 30 minutes ago.')
- if Feature.enabled?(:user_time_settings)
.form-group.form-check
= f.check_box :time_format_in_24h, class: 'form-check-input'
= f.label :time_format_in_24h, class: 'form-check-label' do
= s_('Preferences|Display time in 24-hour format')
#js-profile-preferences-app{ data: data_attributes }

View File

@ -41,15 +41,9 @@ module AuthorizedProjectUpdate
end
def enqueue_project_authorizations_refresh(user)
with_context(user: user, related_class: current_caller_id) do
with_context(user: user) do
AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker.perform_async(user.id)
end
end
# We use this so that we can obtain the details of the original caller
# in the enqueued `AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker` job.
def current_caller_id
Gitlab::ApplicationContext.current_context_attribute('meta.caller_id').presence
end
end
end

View File

@ -0,0 +1,8 @@
---
name: jira_issue_details_edit_labels
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/65298
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/335069
milestone: '14.1'
type: development
group: group::ecosystem
default_enabled: false

View File

@ -0,0 +1,8 @@
---
name: merge_request_discussion_cache
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/64688
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/332967
milestone: '14.1'
type: development
group: group::code review
default_enabled: false

Binary file not shown.

Before

Width:  |  Height:  |  Size: 26 KiB

View File

@ -268,8 +268,8 @@ control over how the Pages daemon runs and serves content in your environment.
| `sentry_enabled` | Enable reporting and logging with Sentry, true/false. |
| `sentry_environment` | The environment for Sentry crash reporting. |
| `status_uri` | The URL path for a status page, for example, `/@status`. |
| `tls_max_version` | Specifies the maximum SSL/TLS version ("ssl3", "tls1.0", "tls1.1" or "tls1.2"). |
| `tls_min_version` | Specifies the minimum SSL/TLS version ("ssl3", "tls1.0", "tls1.1" or "tls1.2"). |
| `tls_max_version` | Specifies the maximum TLS version ("tls1.2" or "tls1.3"). |
| `tls_min_version` | Specifies the minimum TLS version ("tls1.2" or "tls1.3"). |
| `use_http2` | Enable HTTP2 support. |
| **`gitlab_pages['env'][]`** | |
| `http_proxy` | Configure GitLab Pages to use an HTTP Proxy to mediate traffic between Pages and GitLab. Sets an environment variable `http_proxy` when starting Pages daemon. |

View File

@ -19,7 +19,7 @@ full list of reference architectures, see
|------------------------------------------|-------------|-------------------------|------------------|--------------|-----------|
| External load balancing node(3) | 1 | 4 vCPU, 3.6 GB memory | `n1-highcpu-4` | `c5.xlarge` | `F4s v2` |
| Consul(1) | 3 | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` | `F2s v2` |
| PostgreSQL(1) | 3 | 16 vCPU, 60 GB memory | `n1-standard-1` | `m5.4xlarge` | `D16s v3` |
| PostgreSQL(1) | 3 | 16 vCPU, 60 GB memory | `n1-standard-16` | `m5.4xlarge` | `D16s v3` |
| PgBouncer(1) | 3 | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` | `F2s v2` |
| Internal load balancing node(3) | 1 | 4 vCPU, 3.6GB memory | `n1-highcpu-4` | `c5.large` | `F2s v2` |
| Redis - Cache(2) | 3 | 4 vCPU, 15 GB memory | `n1-standard-4` | `m5.xlarge` | `D4s v3` |
@ -2370,6 +2370,188 @@ Read:
- The [Gitaly and NFS deprecation notice](../gitaly/index.md#nfs-deprecation-notice).
- About the [correct mount options to use](../nfs.md#upgrade-to-gitaly-cluster-or-disable-caching-if-experiencing-data-loss).
## Cloud Native Hybrid reference architecture with Helm Charts (alternative)
As an alternative approach, you can also run select components of GitLab as Cloud Native
in Kubernetes via our official [Helm Charts](https://docs.gitlab.com/charts/).
In this setup, we support running the equivalent of GitLab Rails and Sidekiq nodes
in a Kubernetes cluster, named Webservice and Sidekiq respectively. In addition,
the following other supporting services are supported: NGINX, Task Runner, Migrations,
Prometheus and Grafana.
Hybrid installations leverage the benefits of both cloud native and traditional
Kubernetes, you can reap certain cloud native workload management benefits while
the others are deployed in compute VMs with Omnibus as described above in this
page.
NOTE:
This is an **advanced** setup. Running services in Kubernetes is well known
to be complex. **This setup is only recommended** if you have strong working
knowledge and experience in Kubernetes. The rest of this
section will assume this.
### Cluster topology
The following tables and diagram details the hybrid environment using the same formats
as the normal environment above.
First starting with the components that run in Kubernetes. The recommendations at this
time use Google Clouds Kubernetes Engine (GKE) and associated machine types, but the memory
and CPU requirements should translate to most other providers. We hope to update this in the
future with further specific cloud provider details.
| Service | Nodes(1) | Configuration | GCP | Allocatable CPUs and Memory |
|-------------------------------------------------------|----------|-------------------------|------------------|-----------------------------|
| Webservice | 7 | 32 vCPU, 28.8 GB memory | `n1-highcpu-32` | 223 vCPU, 206.5 GB memory |
| Sidekiq | 4 | 4 vCPU, 15 GB memory | `n1-standard-4` | 15.5 vCPU, 50 GB memory |
| Supporting services such as NGINX, Prometheus, etc. | 2 | 4 vCPU, 15 GB memory | `n1-standard-4` | 7.75 vCPU, 25 GB memory |
<!-- Disable ordered list rule https://github.com/DavidAnson/markdownlint/blob/main/doc/Rules.md#md029---ordered-list-item-prefix -->
<!-- markdownlint-disable MD029 -->
1. Nodes configuration is shown as it is forced to ensure pod vcpu / memory ratios and avoid scaling during **performance testing**.
In production deployments there is no need to assign pods to nodes. A minimum of three nodes in three different availability zones is strongly recommended to align with resilient cloud architecture practices.
<!-- markdownlint-enable MD029 -->
Next are the backend components that run on static compute VMs via Omnibus (or External PaaS
services where applicable):
| Service | Nodes | Configuration | GCP |
|--------------------------------------------|-------|-------------------------|------------------|
| Consul(1) | 3 | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` |
| PostgreSQL(1) | 3 | 16 vCPU, 60 GB memory | `n1-standard-16` |
| PgBouncer(1) | 3 | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` |
| Internal load balancing node(3) | 1 | 4 vCPU, 3.6GB memory | `n1-highcpu-4` |
| Redis - Cache(2) | 3 | 4 vCPU, 15 GB memory | `n1-standard-4` |
| Redis - Queues / Shared State(2) | 3 | 4 vCPU, 15 GB memory | `n1-standard-4` |
| Redis Sentinel - Cache(2) | 3 | 1 vCPU, 3.75 GB memory | `n1-standard-1` |
| Redis Sentinel - Queues / Shared State(2) | 3 | 1 vCPU, 3.75 GB memory | `n1-standard-1` |
| Gitaly | 3 | 32 vCPU, 120 GB memory | `n1-standard-32` |
| Praefect | 3 | 4 vCPU, 3.6 GB memory | `n1-highcpu-4` |
| Praefect PostgreSQL(1) | 1+ | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` |
| Object storage(4) | n/a | n/a | n/a |
<!-- Disable ordered list rule https://github.com/DavidAnson/markdownlint/blob/main/doc/Rules.md#md029---ordered-list-item-prefix -->
<!-- markdownlint-disable MD029 -->
1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. Google Cloud SQL and AWS RDS are known to work, however Azure Database for PostgreSQL is [not recommended](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61) due to performance issues. Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery.
2. Can be optionally run on reputable third-party external PaaS Redis solutions. Google Memorystore and AWS Elasticache are known to work.
3. Can be optionally run on reputable third-party load balancing services (LB PaaS). AWS ELB is known to work.
4. Should be run on reputable third party object storage (storage PaaS) for cloud implementations. Google Cloud Storage and AWS S3 are known to work.
<!-- markdownlint-enable MD029 -->
NOTE:
For all PaaS solutions that involve configuring instances, it is strongly recommended to implement a minimum of three nodes in three different availability zones to align with resilient cloud architecture practices.
```plantuml
@startuml 25k
card "Kubernetes via Helm Charts" as kubernetes {
card "**External Load Balancer**" as elb #6a9be7
together {
collections "**Webservice** x7" as gitlab #32CD32
collections "**Sidekiq** x4" as sidekiq #ff8dd1
}
card "**Prometheus + Grafana**" as monitor #7FFFD4
card "**Supporting Services**" as support
}
card "**Internal Load Balancer**" as ilb #9370DB
collections "**Consul** x3" as consul #e76a9b
card "Gitaly Cluster" as gitaly_cluster {
collections "**Praefect** x3" as praefect #FF8C00
collections "**Gitaly** x3" as gitaly #FF8C00
card "**Praefect PostgreSQL***\n//Non fault-tolerant//" as praefect_postgres #FF8C00
praefect -[#FF8C00]-> gitaly
praefect -[#FF8C00]> praefect_postgres
}
card "Database" as database {
collections "**PGBouncer** x3" as pgbouncer #4EA7FF
card "**PostgreSQL** (Primary)" as postgres_primary #4EA7FF
collections "**PostgreSQL** (Secondary) x2" as postgres_secondary #4EA7FF
pgbouncer -[#4EA7FF]-> postgres_primary
postgres_primary .[#4EA7FF]> postgres_secondary
}
card "redis" as redis {
collections "**Redis Persistent** x3" as redis_persistent #FF6347
collections "**Redis Cache** x3" as redis_cache #FF6347
collections "**Redis Persistent Sentinel** x3" as redis_persistent_sentinel #FF6347
collections "**Redis Cache Sentinel** x3"as redis_cache_sentinel #FF6347
redis_persistent <.[#FF6347]- redis_persistent_sentinel
redis_cache <.[#FF6347]- redis_cache_sentinel
}
cloud "**Object Storage**" as object_storage #white
elb -[#6a9be7]-> gitlab
elb -[#6a9be7]-> monitor
elb -[hidden]-> support
gitlab -[#32CD32]> sidekiq
gitlab -[#32CD32]--> ilb
gitlab -[#32CD32]-> object_storage
gitlab -[#32CD32]---> redis
gitlab -[hidden]--> consul
sidekiq -[#ff8dd1]--> ilb
sidekiq -[#ff8dd1]-> object_storage
sidekiq -[#ff8dd1]---> redis
sidekiq -[hidden]--> consul
ilb -[#9370DB]-> gitaly_cluster
ilb -[#9370DB]-> database
consul .[#e76a9b]-> database
consul .[#e76a9b]-> gitaly_cluster
consul .[#e76a9b,norank]--> redis
monitor .[#7FFFD4]> consul
monitor .[#7FFFD4]-> database
monitor .[#7FFFD4]-> gitaly_cluster
monitor .[#7FFFD4,norank]--> redis
monitor .[#7FFFD4]> ilb
monitor .[#7FFFD4,norank]u--> elb
@enduml
```
### Resource usage settings
The following formulas help when calculating how many pods may be deployed within resource constraints.
The [25k reference architecture example values file](https://gitlab.com/gitlab-org/charts/gitlab/-/blob/master/examples/ref/25k.yaml)
documents how to apply the calculated configuration to the Helm Chart.
#### Webservice
Webservice pods typically need about 1 vCPU and 1.25 GB of memory _per worker_.
Each Webservice pod will consume roughly 4 vCPUs and 5 GB of memory using
the [recommended topology](#cluster-topology) because four worker processes
are created by default and each pod has other small processes running.
For 25k users we recommend a total Puma worker count of around 140.
With the [provided recommendations](#cluster-topology) this allows the deployment of up to 35
Webservice pods with 4 workers per pod and 5 pods per node. Expand available resources using
the ratio of 1 vCPU to 1.25 GB of memory _per each worker process_ for each additional
Webservice pod.
For further information on resource usage, see the [Webservice resources](https://docs.gitlab.com/charts/charts/gitlab/webservice/#resources).
#### Sidekiq
Sidekiq pods should generally have 1 vCPU and 2 GB of memory.
[The provided starting point](#cluster-topology) allows the deployment of up to
14 Sidekiq pods. Expand available resources using the 1 vCPU to 2GB memory
ratio for each additional pod.
For further information on resource usage, see the [Sidekiq resources](https://docs.gitlab.com/charts/charts/gitlab/sidekiq/#resources).
<div align="right">
<a type="button" class="btn btn-default" href="#setup-components">
Back to setup components <i class="fa fa-angle-double-up" aria-hidden="true"></i>

View File

@ -72,6 +72,7 @@ The following reference architectures are available:
The following Cloud Native Hybrid reference architectures, where select recommended components can be run in Kubernetes, are available:
- [Up to 10,000 users](10k_users.md#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
- [Up to 25,000 users](25k_users.md#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
- [Up to 50,000 users](50k_users.md#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
A GitLab [Premium or Ultimate](https://about.gitlab.com/pricing/#self-managed) license is required

View File

@ -147,11 +147,11 @@ can choose where new repositories are stored:
1. Select **Save changes**.
Each repository storage path can be assigned a weight from 0-100. When a new project is created,
these weights are used to determine the storage location the repository is created on. The higher
the weight of a given repository storage path relative to other repository storages paths, the more
often it is chosen. That is, `(storage weight) / (sum of all weights) * 100 = chance %`.
these weights are used to determine the storage location the repository is created on.
![Choose repository storage path in Admin Area](img/repository_storages_admin_ui_v13_1.png)
The higher the weight of a given repository storage path relative to other repository storages
paths, the more often it is chosen. That is,
`(storage weight) / (sum of all weights) * 100 = chance %`.
## Move repositories

View File

@ -1120,7 +1120,7 @@ To set up Service Ping locally, you must:
1. Using the `gitlab` Rails console, manually trigger Service Ping:
```ruby
SubmitUsagePingService.new.execute
ServicePing::SubmitService.new.execute
```
1. Use the `versions` Rails console to check the Service Ping was successfully received,

View File

@ -137,6 +137,8 @@ Set up the Jenkins project you intend to run your build on.
}
```
For more Jenkins Pipeline script examples, go to the [Jenkins GitLab plugin repository on GitHub](https://github.com/jenkinsci/gitlab-plugin#scripted-pipeline-jobs).
## Configure the GitLab project
Configure the GitLab integration with Jenkins in one of the following ways.
@ -217,3 +219,16 @@ If you don't find the errors above, but do find *duplicate* entries like below (
2019-10-25_04:22:41.25630 2019-10-25T04:22:41.256Z 1584 TID-ovowh4tek WebHookWorker JID-941fb7f40b69dff3d833c99b INFO: start
2019-10-25_04:22:41.25630 2019-10-25T04:22:41.256Z 1584 TID-ovowh4tek WebHookWorker JID-941fb7f40b69dff3d833c99b INFO: start
```
### Enable job logs in Jenkins
When troubleshooting an integration issue, it is useful to enable job logs in Jenkins to see more details about what is happening under the hood.
To enable job logs in Jenkins:
1. Go to **Dashboard > Manage Jenkins > System Log**.
1. Select **Add new log recorder**.
1. Enter a name for the log recorder.
1. On the next screen, select **Add** and enter `org.jenkinsci.plugins.workflow.job` in the text field.
1. Make sure that the Log Level is **All** and select **Save**.
Now, after you run a build, you can go to the loggers page (**Dashboard > Manage Jenkins > System Log**), select your logger, and check the logs.

View File

@ -165,6 +165,30 @@ You can choose one of the following options as the first day of the week:
If you select **System Default**, the [instance default](../admin_area/settings/index.md#default-first-day-of-the-week) setting is used.
## Time preferences
### Use relative times
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/65570) in GitLab 14.1.
You can select your preferred time format for the GitLab user interface:
- Relative times, for example, `30 minutes ago`.
- Absolute times, for example, `May 18, 2021, 3:57 PM`.
The times are formatted depending on your chosen language.
To set your time preference:
1. On the **Preferences** page, go to **Time preferences**.
1. Select the **Use relative times** checkbox to use relative times,
or clear the checkbox to use absolute times.
1. Select **Save changes**.
NOTE:
This feature is experimental, and choosing absolute times might break certain layouts.
Please open an issue if you notice that using absolute times breaks a layout.
## Integrations
Configure your preferences with third-party services which provide enhancements to your GitLab experience.

View File

@ -1,5 +1,7 @@
# frozen_string_literal: true
require 'webauthn/u2f_migrator'
module Gitlab
module Auth
class U2fWebauthnConverter

View File

@ -1,6 +1,5 @@
# frozen_string_literal: true
# rubocop:disable Style/Documentation
require "webauthn/u2f_migrator"
module Gitlab
module BackgroundMigration

View File

@ -41,159 +41,5 @@ module Gitlab
include BatchCount
end
end
class BatchCounter
FALLBACK = -1
MIN_REQUIRED_BATCH_SIZE = 1_250
DEFAULT_SUM_BATCH_SIZE = 1_000
MAX_ALLOWED_LOOPS = 10_000
SLEEP_TIME_IN_SECONDS = 0.01 # 10 msec sleep
ALLOWED_MODES = [:itself, :distinct].freeze
FALLBACK_FINISH = 0
OFFSET_BY_ONE = 1
# Each query should take < 500ms https://gitlab.com/gitlab-org/gitlab/-/merge_requests/22705
DEFAULT_DISTINCT_BATCH_SIZE = 10_000
DEFAULT_BATCH_SIZE = 100_000
def initialize(relation, column: nil, operation: :count, operation_args: nil)
@relation = relation
@column = column || relation.primary_key
@operation = operation
@operation_args = operation_args
end
def unwanted_configuration?(finish, batch_size, start)
(@operation == :count && batch_size <= MIN_REQUIRED_BATCH_SIZE) ||
(@operation == :sum && batch_size < DEFAULT_SUM_BATCH_SIZE) ||
(finish - start) / batch_size >= MAX_ALLOWED_LOOPS ||
start >= finish
end
def count(batch_size: nil, mode: :itself, start: nil, finish: nil)
raise 'BatchCount can not be run inside a transaction' if ActiveRecord::Base.connection.transaction_open?
check_mode!(mode)
# non-distinct have better performance
batch_size ||= batch_size_for_mode_and_operation(mode, @operation)
start = actual_start(start)
finish = actual_finish(finish)
raise "Batch counting expects positive values only for #{@column}" if start < 0 || finish < 0
return FALLBACK if unwanted_configuration?(finish, batch_size, start)
results = nil
batch_start = start
while batch_start < finish
begin
batch_end = [batch_start + batch_size, finish].min
batch_relation = build_relation_batch(batch_start, batch_end, mode)
op_args = @operation_args
if @operation == :count && @operation_args.blank? && use_loose_index_scan_for_distinct_values?(mode)
op_args = [Gitlab::Database::LooseIndexScanDistinctCount::COLUMN_ALIAS]
end
results = merge_results(results, batch_relation.send(@operation, *op_args)) # rubocop:disable GitlabSecurity/PublicSend
batch_start = batch_end
rescue ActiveRecord::QueryCanceled => error
# retry with a safe batch size & warmer cache
if batch_size >= 2 * MIN_REQUIRED_BATCH_SIZE
batch_size /= 2
else
log_canceled_batch_fetch(batch_start, mode, batch_relation.to_sql, error)
return FALLBACK
end
rescue Gitlab::Database::LooseIndexScanDistinctCount::ColumnConfigurationError => error
Gitlab::AppJsonLogger
.error(
event: 'batch_count',
relation: @relation.table_name,
operation: @operation,
operation_args: @operation_args,
mode: mode,
message: "LooseIndexScanDistinctCount column error: #{error.message}"
)
return FALLBACK
end
sleep(SLEEP_TIME_IN_SECONDS)
end
results
end
def merge_results(results, object)
return object unless results
if object.is_a?(Hash)
results.merge!(object) { |_, a, b| a + b }
else
results + object
end
end
private
def build_relation_batch(start, finish, mode)
if use_loose_index_scan_for_distinct_values?(mode)
Gitlab::Database::LooseIndexScanDistinctCount.new(@relation, @column).build_query(from: start, to: finish)
else
@relation.select(@column).public_send(mode).where(between_condition(start, finish)) # rubocop:disable GitlabSecurity/PublicSend
end
end
def batch_size_for_mode_and_operation(mode, operation)
return DEFAULT_SUM_BATCH_SIZE if operation == :sum
mode == :distinct ? DEFAULT_DISTINCT_BATCH_SIZE : DEFAULT_BATCH_SIZE
end
def between_condition(start, finish)
return @column.between(start...finish) if @column.is_a?(Arel::Attributes::Attribute)
{ @column => start...finish }
end
def actual_start(start)
start || @relation.unscope(:group, :having).minimum(@column) || 0
end
def actual_finish(finish)
(finish || @relation.unscope(:group, :having).maximum(@column) || FALLBACK_FINISH) + OFFSET_BY_ONE
end
def check_mode!(mode)
raise "The mode #{mode.inspect} is not supported" unless ALLOWED_MODES.include?(mode)
raise 'Use distinct count for optimized distinct counting' if @relation.limit(1).distinct_value.present? && mode != :distinct
raise 'Use distinct count only with non id fields' if @column == :id && mode == :distinct
end
def log_canceled_batch_fetch(batch_start, mode, query, error)
Gitlab::AppJsonLogger
.error(
event: 'batch_count',
relation: @relation.table_name,
operation: @operation,
operation_args: @operation_args,
start: batch_start,
mode: mode,
query: query,
message: "Query has been canceled with message: #{error.message}"
)
end
def use_loose_index_scan_for_distinct_values?(mode)
Feature.enabled?(:loose_index_scan_for_distinct_values) && not_group_by_query? && mode == :distinct
end
def not_group_by_query?
!@relation.is_a?(ActiveRecord::Relation) || @relation.group_values.blank?
end
end
end
end

View File

@ -0,0 +1,159 @@
# frozen_string_literal: true
module Gitlab
module Database
class BatchCounter
FALLBACK = -1
MIN_REQUIRED_BATCH_SIZE = 1_250
DEFAULT_SUM_BATCH_SIZE = 1_000
MAX_ALLOWED_LOOPS = 10_000
SLEEP_TIME_IN_SECONDS = 0.01 # 10 msec sleep
ALLOWED_MODES = [:itself, :distinct].freeze
FALLBACK_FINISH = 0
OFFSET_BY_ONE = 1
# Each query should take < 500ms https://gitlab.com/gitlab-org/gitlab/-/merge_requests/22705
DEFAULT_DISTINCT_BATCH_SIZE = 10_000
DEFAULT_BATCH_SIZE = 100_000
def initialize(relation, column: nil, operation: :count, operation_args: nil)
@relation = relation
@column = column || relation.primary_key
@operation = operation
@operation_args = operation_args
end
def unwanted_configuration?(finish, batch_size, start)
(@operation == :count && batch_size <= MIN_REQUIRED_BATCH_SIZE) ||
(@operation == :sum && batch_size < DEFAULT_SUM_BATCH_SIZE) ||
(finish - start) / batch_size >= MAX_ALLOWED_LOOPS ||
start >= finish
end
def count(batch_size: nil, mode: :itself, start: nil, finish: nil)
raise 'BatchCount can not be run inside a transaction' if ActiveRecord::Base.connection.transaction_open?
check_mode!(mode)
# non-distinct have better performance
batch_size ||= batch_size_for_mode_and_operation(mode, @operation)
start = actual_start(start)
finish = actual_finish(finish)
raise "Batch counting expects positive values only for #{@column}" if start < 0 || finish < 0
return FALLBACK if unwanted_configuration?(finish, batch_size, start)
results = nil
batch_start = start
while batch_start < finish
begin
batch_end = [batch_start + batch_size, finish].min
batch_relation = build_relation_batch(batch_start, batch_end, mode)
op_args = @operation_args
if @operation == :count && @operation_args.blank? && use_loose_index_scan_for_distinct_values?(mode)
op_args = [Gitlab::Database::LooseIndexScanDistinctCount::COLUMN_ALIAS]
end
results = merge_results(results, batch_relation.send(@operation, *op_args)) # rubocop:disable GitlabSecurity/PublicSend
batch_start = batch_end
rescue ActiveRecord::QueryCanceled => error
# retry with a safe batch size & warmer cache
if batch_size >= 2 * MIN_REQUIRED_BATCH_SIZE
batch_size /= 2
else
log_canceled_batch_fetch(batch_start, mode, batch_relation.to_sql, error)
return FALLBACK
end
rescue Gitlab::Database::LooseIndexScanDistinctCount::ColumnConfigurationError => error
Gitlab::AppJsonLogger
.error(
event: 'batch_count',
relation: @relation.table_name,
operation: @operation,
operation_args: @operation_args,
mode: mode,
message: "LooseIndexScanDistinctCount column error: #{error.message}"
)
return FALLBACK
end
sleep(SLEEP_TIME_IN_SECONDS)
end
results
end
def merge_results(results, object)
return object unless results
if object.is_a?(Hash)
results.merge!(object) { |_, a, b| a + b }
else
results + object
end
end
private
def build_relation_batch(start, finish, mode)
if use_loose_index_scan_for_distinct_values?(mode)
Gitlab::Database::LooseIndexScanDistinctCount.new(@relation, @column).build_query(from: start, to: finish)
else
@relation.select(@column).public_send(mode).where(between_condition(start, finish)) # rubocop:disable GitlabSecurity/PublicSend
end
end
def batch_size_for_mode_and_operation(mode, operation)
return DEFAULT_SUM_BATCH_SIZE if operation == :sum
mode == :distinct ? DEFAULT_DISTINCT_BATCH_SIZE : DEFAULT_BATCH_SIZE
end
def between_condition(start, finish)
return @column.between(start...finish) if @column.is_a?(Arel::Attributes::Attribute)
{ @column => start...finish }
end
def actual_start(start)
start || @relation.unscope(:group, :having).minimum(@column) || 0
end
def actual_finish(finish)
(finish || @relation.unscope(:group, :having).maximum(@column) || FALLBACK_FINISH) + OFFSET_BY_ONE
end
def check_mode!(mode)
raise "The mode #{mode.inspect} is not supported" unless ALLOWED_MODES.include?(mode)
raise 'Use distinct count for optimized distinct counting' if @relation.limit(1).distinct_value.present? && mode != :distinct
raise 'Use distinct count only with non id fields' if @column == :id && mode == :distinct
end
def log_canceled_batch_fetch(batch_start, mode, query, error)
Gitlab::AppJsonLogger
.error(
event: 'batch_count',
relation: @relation.table_name,
operation: @operation,
operation_args: @operation_args,
start: batch_start,
mode: mode,
query: query,
message: "Query has been canceled with message: #{error.message}"
)
end
def use_loose_index_scan_for_distinct_values?(mode)
Feature.enabled?(:loose_index_scan_for_distinct_values) && not_group_by_query? && mode == :distinct
end
def not_group_by_query?
!@relation.is_a?(ActiveRecord::Relation) || @relation.group_values.blank?
end
end
end
end

View File

@ -1,5 +1,7 @@
# frozen_string_literal: true
require 'net/dns'
module Gitlab
module Database
module LoadBalancing

View File

@ -22,7 +22,7 @@ module Gitlab
end
def force_disconnect_timer
@force_disconnect_timer ||= ConnectionTimer.starting_now
@force_disconnect_timer ||= ::Gitlab::Database::ConnectionTimer.starting_now
end
end
end

View File

@ -33,6 +33,7 @@ module Gitlab
gon.disable_animations = Gitlab.config.gitlab['disable_animations']
gon.suggested_label_colors = LabelsHelper.suggested_colors
gon.first_day_of_week = current_user&.first_day_of_week || Gitlab::CurrentSettings.first_day_of_week
gon.time_display_relative = true
gon.ee = Gitlab.ee?
gon.dot_com = Gitlab.com?
@ -41,6 +42,7 @@ module Gitlab
gon.current_username = current_user.username
gon.current_user_fullname = current_user.name
gon.current_user_avatar_url = current_user.avatar_url
gon.time_display_relative = current_user.time_display_relative
end
# Initialize gon.features with any flags that should be

View File

@ -21,11 +21,7 @@ module Sidebars
override :link
def link
if can?(context.current_user, :read_environment, context.project)
metrics_project_environments_path(context.project)
else
project_feature_flags_path(context.project)
end
renderable_items.first&.link
end
override :extra_container_html_options

View File

@ -8291,15 +8291,15 @@ msgstr ""
msgid "Configure repository mirroring."
msgstr ""
msgid "Configure repository storage."
msgstr ""
msgid "Configure settings for Advanced Search with Elasticsearch."
msgstr ""
msgid "Configure specific limits for Packages API requests that supersede the general user and IP rate limits."
msgstr ""
msgid "Configure storage path settings."
msgstr ""
msgid "Configure the %{link} integration."
msgstr ""
@ -12276,6 +12276,9 @@ msgstr ""
msgid "Ensure your %{linkStart}environment is part of the deploy stage%{linkEnd} of your CI pipeline to track deployments to your cluster."
msgstr ""
msgid "Enter %{weights_link_start}weights%{weights_link_end} for storages for new repositories. Configured storages appear below."
msgstr ""
msgid "Enter 2FA for Admin Mode"
msgstr ""
@ -12333,9 +12336,6 @@ msgstr ""
msgid "Enter the number of seconds, or other human-readable input, like \"1 hour\". This timeout takes precedence over lower timeouts set for the project."
msgstr ""
msgid "Enter weights for storages for new repositories."
msgstr ""
msgid "Enter your password to approve"
msgstr ""
@ -18513,6 +18513,9 @@ msgstr ""
msgid "JiraService|Failed to load Jira issue. View the issue in Jira, or reload the page."
msgstr ""
msgid "JiraService|Failed to update Jira issue labels. View the issue in Jira, or reload the page."
msgstr ""
msgid "JiraService|Failed to update Jira issue status. View the issue in Jira, or reload the page."
msgstr ""
@ -24664,6 +24667,9 @@ msgstr ""
msgid "Preferences|Choose what content you want to see on your homepage."
msgstr ""
msgid "Preferences|Configure how dates and times display for you."
msgstr ""
msgid "Preferences|Customize integrations with third party services."
msgstr ""
@ -24682,7 +24688,7 @@ msgstr ""
msgid "Preferences|Failed to save preferences."
msgstr ""
msgid "Preferences|For example: 30 mins ago."
msgid "Preferences|For example: 30 minutes ago."
msgstr ""
msgid "Preferences|Gitpod"
@ -24730,9 +24736,6 @@ msgstr ""
msgid "Preferences|Tab width"
msgstr ""
msgid "Preferences|These settings will update how dates and times are displayed for you."
msgstr ""
msgid "Preferences|This feature is experimental and translations are not complete yet"
msgstr ""
@ -24742,12 +24745,6 @@ msgstr ""
msgid "Preferences|This setting allows you to customize the behavior of the system layout and default views."
msgstr ""
msgid "Preferences|Time display"
msgstr ""
msgid "Preferences|Time format"
msgstr ""
msgid "Preferences|Time preferences"
msgstr ""
@ -35444,7 +35441,7 @@ msgstr ""
msgid "Use hashed storage"
msgstr ""
msgid "Use hashed storage paths for newly created and renamed repositories. Enable immutable, hash-based paths and repository names to store repositories on disk. This prevents repositories from having to be moved or renamed when the Repository URL changes and may improve disk I/O performance. (Always enabled since 13.0)"
msgid "Use hashed storage paths for newly created and renamed repositories. Always enabled since 13.0."
msgstr ""
msgid "Use one line per URI"

View File

@ -0,0 +1,104 @@
import $ from 'jquery';
import { getTimeago, localTimeAgo, timeFor } from '~/lib/utils/datetime/timeago_utility';
import { s__ } from '~/locale';
import '~/commons/bootstrap';
describe('TimeAgo utils', () => {
let oldGon;
afterEach(() => {
window.gon = oldGon;
});
beforeEach(() => {
oldGon = window.gon;
});
describe('getTimeago', () => {
describe('with User Setting timeDisplayRelative: true', () => {
beforeEach(() => {
window.gon = { time_display_relative: true };
});
it.each([
[new Date().toISOString(), 'just now'],
[new Date().getTime(), 'just now'],
[new Date(), 'just now'],
[null, 'just now'],
])('formats date `%p` as `%p`', (date, result) => {
expect(getTimeago().format(date)).toEqual(result);
});
});
describe('with User Setting timeDisplayRelative: false', () => {
beforeEach(() => {
window.gon = { time_display_relative: false };
});
it.each([
[new Date().toISOString(), 'Jul 6, 2020, 12:00 AM'],
[new Date(), 'Jul 6, 2020, 12:00 AM'],
[new Date().getTime(), 'Jul 6, 2020, 12:00 AM'],
// Slightly different behaviour when `null` is passed :see_no_evil`
[null, 'Jan 1, 1970, 12:00 AM'],
])('formats date `%p` as `%p`', (date, result) => {
expect(getTimeago().format(date)).toEqual(result);
});
});
});
describe('timeFor', () => {
it('returns localize `past due` when in past', () => {
const date = new Date();
date.setFullYear(date.getFullYear() - 1);
expect(timeFor(date)).toBe(s__('Timeago|Past due'));
});
it('returns localized remaining time when in the future', () => {
const date = new Date();
date.setFullYear(date.getFullYear() + 1);
// Add a day to prevent a transient error. If date is even 1 second
// short of a full year, timeFor will return '11 months remaining'
date.setDate(date.getDate() + 1);
expect(timeFor(date)).toBe(s__('Timeago|1 year remaining'));
});
});
describe('localTimeAgo', () => {
beforeEach(() => {
document.body.innerHTML =
'<time title="some time" datetime="2020-02-18T22:22:32Z">1 hour ago</time>';
});
describe.each`
timeDisplayRelative | text
${true} | ${'4 months ago'}
${false} | ${'Feb 18, 2020, 10:22 PM'}
`(
`With User Setting timeDisplayRelative: $timeDisplayRelative`,
({ timeDisplayRelative, text }) => {
it.each`
timeagoArg | title
${false} | ${'some time'}
${true} | ${'Feb 18, 2020 10:22pm UTC'}
`(
`has content: '${text}' and tooltip: '$title' with timeagoArg = $timeagoArg`,
({ timeagoArg, title }) => {
window.gon = { time_display_relative: timeDisplayRelative };
const element = document.querySelector('time');
localTimeAgo($(element), timeagoArg);
jest.runAllTimers();
expect(element.getAttribute('title')).toBe(title);
expect(element.innerText).toBe(text);
},
);
},
);
});
});

View File

@ -1,30 +1,9 @@
import $ from 'jquery';
import timezoneMock from 'timezone-mock';
import * as datetimeUtility from '~/lib/utils/datetime_utility';
import { __, s__ } from '~/locale';
import '~/commons/bootstrap';
describe('Date time utils', () => {
describe('timeFor', () => {
it('returns localize `past due` when in past', () => {
const date = new Date();
date.setFullYear(date.getFullYear() - 1);
expect(datetimeUtility.timeFor(date)).toBe(s__('Timeago|Past due'));
});
it('returns localized remaining time when in the future', () => {
const date = new Date();
date.setFullYear(date.getFullYear() + 1);
// Add a day to prevent a transient error. If date is even 1 second
// short of a full year, timeFor will return '11 months remaining'
date.setDate(date.getDate() + 1);
expect(datetimeUtility.timeFor(date)).toBe(s__('Timeago|1 year remaining'));
});
});
describe('get localized day name', () => {
it('should return Sunday', () => {
const day = datetimeUtility.getDayName(new Date('07/17/2016'));
@ -870,25 +849,6 @@ describe('approximateDuration', () => {
});
});
describe('localTimeAgo', () => {
beforeEach(() => {
document.body.innerHTML = `<time title="some time" datetime="2020-02-18T22:22:32Z">1 hour ago</time>`;
});
it.each`
timeagoArg | title
${false} | ${'some time'}
${true} | ${'Feb 18, 2020 10:22pm UTC'}
`('converts $seconds seconds to $approximation', ({ timeagoArg, title }) => {
const element = document.querySelector('time');
datetimeUtility.localTimeAgo($(element), timeagoArg);
jest.runAllTimers();
expect(element.getAttribute('title')).toBe(title);
});
});
describe('differenceInSeconds', () => {
const startDateTime = new Date('2019-07-17T00:00:00.000Z');

View File

@ -49,6 +49,15 @@ describe('LineHighlighter', () => {
}
});
it('highlights a range of lines given in the URL hash using GitHub format', () => {
new LineHighlighter({ hash: '#L5-L25' });
expect($(`.${testContext.css}`).length).toBe(21);
for (let line = 5; line <= 25; line += 1) {
expect($(`#LC${line}`)).toHaveClass(testContext.css);
}
});
it('scrolls to the first highlighted line on initial load', () => {
jest.spyOn(utils, 'scrollToElement');
new LineHighlighter({ hash: '#L5-25' });

View File

@ -11,7 +11,8 @@ import BlobHeader from '~/blob/components/blob_header.vue';
import BlobButtonGroup from '~/repository/components/blob_button_group.vue';
import BlobContentViewer from '~/repository/components/blob_content_viewer.vue';
import BlobEdit from '~/repository/components/blob_edit.vue';
import { loadViewer } from '~/repository/components/blob_viewers';
import { loadViewer, viewerProps } from '~/repository/components/blob_viewers';
import TextViewer from '~/repository/components/blob_viewers/text_viewer.vue';
import blobInfoQuery from '~/repository/queries/blob_info.query.graphql';
jest.mock('~/repository/components/blob_viewers');
@ -122,6 +123,7 @@ describe('Blob content viewer component', () => {
const findBlobEdit = () => wrapper.findComponent(BlobEdit);
const findBlobContent = () => wrapper.findComponent(BlobContent);
const findBlobButtonGroup = () => wrapper.findComponent(BlobButtonGroup);
const findTextViewer = () => wrapper.findComponent(TextViewer);
afterEach(() => {
wrapper.destroy();
@ -231,6 +233,15 @@ describe('Blob content viewer component', () => {
expect(findBlobContent().exists()).toBe(false);
});
it('renders a TextViewer for text files', () => {
loadViewer.mockReturnValueOnce(TextViewer);
viewerProps.mockReturnValueOnce({ content: 'test', fileName: 'test.js', readOnly: true });
factory({ mockData: { blobInfo: simpleMockData } });
expect(findTextViewer().exists()).toBe(true);
});
});
describe('BlobHeader action slot', () => {

View File

@ -0,0 +1,30 @@
import { shallowMount } from '@vue/test-utils';
import waitForPromises from 'helpers/wait_for_promises';
import TextViewer from '~/repository/components/blob_viewers/text_viewer.vue';
import SourceEditor from '~/vue_shared/components/source_editor.vue';
describe('Text Viewer', () => {
let wrapper;
const propsData = {
content: 'Some content',
fileName: 'file_name.js',
readOnly: true,
};
const createComponent = () => {
wrapper = shallowMount(TextViewer, { propsData });
};
const findEditor = () => wrapper.findComponent(SourceEditor);
it('renders a Source Editor component', async () => {
createComponent();
await waitForPromises();
expect(findEditor().exists()).toBe(true);
expect(findEditor().props('value')).toBe(propsData.content);
expect(findEditor().props('fileName')).toBe(propsData.fileName);
expect(findEditor().props('editorOptions')).toEqual({ readOnly: propsData.readOnly });
});
});

View File

@ -3,7 +3,7 @@ import mountComponent from 'helpers/vue_mount_component_helper';
import { projectData } from 'jest/ide/mock_data';
import { TEST_HOST } from 'spec/test_constants';
import { getFirstCharacterCapitalized } from '~/lib/utils/text_utility';
import ProjectAvatarDefault from '~/vue_shared/components/project_avatar/default.vue';
import ProjectAvatarDefault from '~/vue_shared/components/deprecated_project_avatar/default.vue';
describe('ProjectAvatarDefault component', () => {
const Component = Vue.extend(ProjectAvatarDefault);

View File

@ -0,0 +1,67 @@
import { GlAvatar } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import ProjectAvatar from '~/vue_shared/components/project_avatar.vue';
const defaultProps = {
projectName: 'GitLab',
};
describe('ProjectAvatar', () => {
let wrapper;
const findGlAvatar = () => wrapper.findComponent(GlAvatar);
const createComponent = ({ props, attrs } = {}) => {
wrapper = shallowMount(ProjectAvatar, { propsData: { ...defaultProps, ...props }, attrs });
};
afterEach(() => {
wrapper.destroy();
});
it('renders GlAvatar with correct props', () => {
createComponent();
const avatar = findGlAvatar();
expect(avatar.exists()).toBe(true);
expect(avatar.props()).toMatchObject({
alt: defaultProps.projectName,
entityName: defaultProps.projectName,
size: 32,
src: '',
});
});
describe('with `size` prop', () => {
it('renders GlAvatar with specified `size` prop', () => {
const mockSize = 48;
createComponent({ props: { size: mockSize } });
const avatar = findGlAvatar();
expect(avatar.props('size')).toBe(mockSize);
});
});
describe('with `projectAvatarUrl` prop', () => {
it('renders GlAvatar with specified `src` prop', () => {
const mockProjectAvatarUrl = 'https://gitlab.com';
createComponent({ props: { projectAvatarUrl: mockProjectAvatarUrl } });
const avatar = findGlAvatar();
expect(avatar.props('src')).toBe(mockProjectAvatarUrl);
});
});
describe.each`
alt
${''}
${'custom-alt'}
`('when `alt` prop is "$alt"', ({ alt }) => {
it('renders GlAvatar with specified `alt` attribute', () => {
createComponent({ props: { alt } });
const avatar = findGlAvatar();
expect(avatar.props('alt')).toBe(alt);
});
});
});

View File

@ -1,5 +1,6 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
import { trimText } from 'helpers/text_helper';
import ProjectAvatar from '~/vue_shared/components/deprecated_project_avatar/default.vue';
import ProjectListItem from '~/vue_shared/components/project_selector/project_list_item.vue';
const localVue = createLocalVue();
@ -53,7 +54,7 @@ describe('ProjectListItem component', () => {
it(`renders the project avatar`, () => {
wrapper = shallowMount(Component, options);
expect(wrapper.find('.js-project-avatar').exists()).toBe(true);
expect(wrapper.findComponent(ProjectAvatar).exists()).toBe(true);
});
it(`renders a simple namespace name with a trailing slash`, () => {

View File

@ -54,7 +54,6 @@ describe('DropdownContentsLabelsView', () => {
afterEach(() => {
wrapper.destroy();
wrapper = null;
});
const findDropdownContent = () => wrapper.find('[data-testid="dropdown-content"]');
@ -381,6 +380,15 @@ describe('DropdownContentsLabelsView', () => {
expect(findDropdownFooter().exists()).toBe(false);
});
it('does not render footer list items when `allowLabelCreate` is false and `labelsManagePath` is null', () => {
createComponent({
...mockConfig,
allowLabelCreate: false,
labelsManagePath: null,
});
expect(findDropdownFooter().exists()).toBe(false);
});
it('renders footer list items when `state.variant` is "embedded"', () => {
expect(findDropdownFooter().exists()).toBe(true);
});

View File

@ -2,6 +2,8 @@
require 'spec_helper'
require 'webauthn/u2f_migrator'
RSpec.describe Gitlab::BackgroundMigration::MigrateU2fWebauthn, :migration, schema: 20200925125321 do
let(:users) { table(:users) }

View File

@ -2230,16 +2230,14 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
describe '#backfill_iids' do
include MigrationsHelpers
before do
stub_const('Issue', Class.new(ActiveRecord::Base))
Issue.class_eval do
let(:issue_class) do
Class.new(ActiveRecord::Base) do
include AtomicInternalId
self.table_name = 'issues'
self.inheritance_column = :_type_disabled
belongs_to :project, class_name: "::Project"
belongs_to :project, class_name: "::Project", inverse_of: nil
has_internal_id :iid,
scope: :project,
@ -2262,7 +2260,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.backfill_iids('issues')
issue = Issue.create!(project_id: project.id)
issue = issue_class.create!(project_id: project.id)
expect(issue.iid).to eq(1)
end
@ -2273,7 +2271,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.backfill_iids('issues')
issue_b = Issue.create!(project_id: project.id)
issue_b = issue_class.create!(project_id: project.id)
expect(issue_a.reload.iid).to eq(1)
expect(issue_b.iid).to eq(2)
@ -2288,8 +2286,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.backfill_iids('issues')
issue_a = Issue.create!(project_id: project_a.id)
issue_b = Issue.create!(project_id: project_b.id)
issue_a = issue_class.create!(project_id: project_a.id)
issue_b = issue_class.create!(project_id: project_b.id)
expect(issue_a.iid).to eq(2)
expect(issue_b.iid).to eq(3)
@ -2303,7 +2301,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.backfill_iids('issues')
issue_b = Issue.create!(project_id: project_b.id)
issue_b = issue_class.create!(project_id: project_b.id)
expect(issue_a.reload.iid).to eq(1)
expect(issue_b.reload.iid).to eq(1)

View File

@ -50,18 +50,21 @@ RSpec.describe Sidebars::Projects::Menus::MonitorMenu do
end
describe '#link' do
context 'when metrics dashboard is visible' do
it 'returns link to the metrics dashboard page' do
expect(subject.link).to include('/-/environments/metrics')
end
let(:foo_path) { '/foo_path'}
let(:foo_menu) do
::Sidebars::MenuItem.new(
title: 'foo',
link: foo_path,
active_routes: {},
item_id: :foo
)
end
context 'when metrics dashboard is not visible' do
it 'returns link to the feature flags page' do
project.project_feature.update!(operations_access_level: Featurable::DISABLED)
it 'returns first visible item link' do
subject.insert_element_before(subject.renderable_items, subject.renderable_items.first.item_id, foo_menu)
expect(subject.link).to include('/-/feature_flags')
end
expect(subject.link).to eq foo_path
end
end

View File

@ -119,6 +119,36 @@ RSpec.describe AwardEmoji do
end
end
describe 'bumping updated at' do
let(:note) { create(:note_on_issue) }
let(:award_emoji) { build(:award_emoji, user: build(:user), awardable: note) }
it 'calls bump_updated_at on the note when saved' do
expect(note).to receive(:bump_updated_at)
award_emoji.save!
end
it 'calls bump_updated_at on the note when destroyed' do
expect(note).to receive(:bump_updated_at)
award_emoji.destroy!
end
context 'on another awardable' do
let(:issue) { create(:issue) }
let(:award_emoji) { build(:award_emoji, user: build(:user), awardable: issue) }
it 'does not error out when saved' do
expect { award_emoji.save! }.not_to raise_error
end
it 'does not error out when destroy' do
expect { award_emoji.destroy! }.not_to raise_error
end
end
end
describe '.award_counts_for_user' do
let(:user) { create(:user) }

View File

@ -51,4 +51,22 @@ RSpec.describe Discussion do
expect(policy).to be_a(NotePolicy)
end
end
describe '#cache_key' do
let(:notes_sha) { Digest::SHA1.hexdigest("#{first_note.id}:#{second_note.id}:#{third_note.id}") }
it 'returns the cache key with ID and latest updated note updated at' do
expect(subject.cache_key).to eq("#{described_class::CACHE_VERSION}:#{third_note.latest_cached_markdown_version}:#{subject.id}:#{notes_sha}:#{third_note.updated_at}:")
end
context 'when discussion is resolved' do
before do
subject.resolve!(first_note.author)
end
it 'returns the cache key with resolved at' do
expect(subject.cache_key).to eq("#{described_class::CACHE_VERSION}:#{third_note.latest_cached_markdown_version}:#{subject.id}:#{notes_sha}:#{third_note.updated_at}:#{subject.resolved_at}")
end
end
end
end

View File

@ -52,5 +52,121 @@ RSpec.describe 'merge requests discussions' do
expect { send_request }
.to change { Gitlab::GitalyClient.get_request_count }.by_at_most(4)
end
context 'caching', :use_clean_rails_memory_store_caching do
let!(:first_note) { create(:diff_note_on_merge_request, noteable: merge_request, project: project) }
let!(:second_note) { create(:diff_note_on_merge_request, in_reply_to: first_note, noteable: merge_request, project: project) }
let!(:award_emoji) { create(:award_emoji, awardable: first_note) }
before do
# Make a request to cache the discussions
send_request
end
shared_examples 'cache miss' do
it 'does not hit a warm cache' do
expect_next_instance_of(DiscussionSerializer) do |serializer|
expect(serializer).to receive(:represent) do |arg|
expect(arg.notes).to contain_exactly(*changed_notes)
end.and_call_original
end
send_request
end
end
it 'gets cached on subsequent requests' do
expect_next_instance_of(DiscussionSerializer) do |serializer|
expect(serializer).not_to receive(:represent)
end
send_request
end
context 'when a note in a discussion got updated' do
before do
first_note.update!(updated_at: 1.minute.from_now)
end
it_behaves_like 'cache miss' do
let(:changed_notes) { [first_note, second_note] }
end
end
context 'when a note in a discussion got resolved' do
before do
travel_to(1.minute.from_now) do
first_note.resolve!(user)
end
end
it_behaves_like 'cache miss' do
let(:changed_notes) { [first_note, second_note] }
end
end
context 'when a note is added to a discussion' do
let!(:third_note) { create(:diff_note_on_merge_request, in_reply_to: first_note, noteable: merge_request, project: project) }
it_behaves_like 'cache miss' do
let(:changed_notes) { [first_note, second_note, third_note] }
end
end
context 'when a note is removed from a discussion' do
before do
second_note.destroy!
end
it_behaves_like 'cache miss' do
let(:changed_notes) { [first_note] }
end
end
context 'when an emoji is awarded to a note in discussion' do
before do
travel_to(1.minute.from_now) do
create(:award_emoji, awardable: first_note)
end
end
it_behaves_like 'cache miss' do
let(:changed_notes) { [first_note, second_note] }
end
end
context 'when an award emoji is removed from a note in discussion' do
before do
travel_to(1.minute.from_now) do
award_emoji.destroy!
end
end
it_behaves_like 'cache miss' do
let(:changed_notes) { [first_note, second_note] }
end
end
context 'when cached markdown version gets bump' do
before do
settings = Gitlab::CurrentSettings.current_application_settings
settings.update!(local_markdown_version: settings.local_markdown_version + 1)
end
it_behaves_like 'cache miss' do
let(:changed_notes) { [first_note, second_note] }
end
end
context 'when merge_request_discussion_cache is disabled' do
before do
stub_feature_flags(merge_request_discussion_cache: false)
end
it_behaves_like 'cache miss' do
let(:changed_notes) { [first_note, second_note] }
end
end
end
end
end

View File

@ -50,9 +50,9 @@ RSpec.describe Projects::GroupLinks::CreateService, '#execute' do
expect(AuthorizedProjectsWorker).not_to(
receive(:bulk_perform_async)
)
expect(AuthorizedProjectUpdate::ProjectGroupLinkCreateWorker).to(
expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).to(
receive(:perform_async)
.with(project.id, group.id, group_access)
.with(project.id)
.and_call_original
)
expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(

View File

@ -30,6 +30,17 @@ RSpec.describe UserProjectAccessChangedService do
described_class.new([1, 2]).execute(blocking: false,
priority: described_class::LOW_PRIORITY)
end
it 'sets the current caller_id as related_class in the context of all the enqueued jobs' do
Gitlab::ApplicationContext.with_context(caller_id: 'Foo') do
described_class.new([1, 2]).execute(blocking: false,
priority: described_class::LOW_PRIORITY)
end
expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker.jobs).to all(
include(Labkit::Context.log_key(:related_class) => 'Foo')
)
end
end
context 'with load balancing enabled' do

View File

@ -34,30 +34,6 @@ RSpec.describe AuthorizedProjectUpdate::UserRefreshFromReplicaWorker do
execute_worker
end
context 'setting `meta.caller_id` as `meta.related_class` in the context of the newly enqueued `UserRefreshWithLowUrgencyWorker` job' do
context 'when the `UserRefreshFromReplicaWorker` job has a `caller_id` set' do
it 'sets the same `caller_id` as `related_class`' do
expect(AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker).to receive(:perform_async).with(user.id) do
expect(Gitlab::ApplicationContext.current).to include('meta.related_class' => 'Foo')
end
Gitlab::ApplicationContext.with_context(caller_id: 'Foo') do
execute_worker
end
end
end
context 'when the `UserRefreshFromReplicaWorker` job does not have a `caller_id` set' do
it 'does not set the value of `related_class`' do
expect(AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker).to receive(:perform_async).with(user.id) do
expect(Gitlab::ApplicationContext.current).not_to include('meta.related_class')
end
execute_worker
end
end
end
end
context 'when there are no additions or removals to be made to project authorizations for a specific user' do