Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-02-12 18:08:59 +00:00
parent f44248b613
commit 6d533fe8b4
93 changed files with 1552 additions and 308 deletions

View File

@ -2,6 +2,14 @@
documentation](doc/development/changelog.md) for instructions on adding your own
entry.
## 42.1.1 (2021-02-12)
### Security (2 changes)
- Testing main branch.
- Testing main branch.
## 13.8.4 (2021-02-11)
### Security (9 changes)

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

View File

@ -1,9 +1,7 @@
import { initRemoveTag } from '../remove_tag';
document.addEventListener('DOMContentLoaded', () => {
initRemoveTag({
onDelete: (path) => {
document.querySelector(`[data-path="${path}"]`).closest('.js-tag-list').remove();
},
});
initRemoveTag({
onDelete: (path) => {
document.querySelector(`[data-path="${path}"]`).closest('.js-tag-list').remove();
},
});

View File

@ -0,0 +1,100 @@
<script>
import { uniqueId } from 'lodash';
import { GlAlert, GlIcon } from '@gitlab/ui';
import { __, s__ } from '~/locale';
import { DEFAULT, INVALID_CI_CONFIG } from '~/pipelines/constants';
import { CI_CONFIG_STATUS_INVALID } from '~/pipeline_editor/constants';
import EditorLite from '~/vue_shared/components/editor_lite.vue';
export default {
i18n: {
viewOnlyMessage: s__('Pipelines|Merged YAML is view only'),
},
errorTexts: {
[INVALID_CI_CONFIG]: __('Your CI configuration file is invalid.'),
[DEFAULT]: __('An unknown error occurred.'),
},
components: {
EditorLite,
GlAlert,
GlIcon,
},
inject: ['ciConfigPath'],
props: {
ciConfigData: {
type: Object,
required: true,
},
},
data() {
return {
failureType: null,
};
},
computed: {
failure() {
switch (this.failureType) {
case INVALID_CI_CONFIG:
return this.$options.errorTexts[INVALID_CI_CONFIG];
default:
return this.$options.errorTexts[DEFAULT];
}
},
fileGlobalId() {
return `${this.ciConfigPath}-${uniqueId()}`;
},
hasError() {
return this.failureType;
},
isInvalidConfiguration() {
return this.ciConfigData.status === CI_CONFIG_STATUS_INVALID;
},
mergedYaml() {
return this.ciConfigData.mergedYaml;
},
},
watch: {
ciConfigData: {
immediate: true,
handler() {
if (this.isInvalidConfiguration) {
this.reportFailure(INVALID_CI_CONFIG);
} else if (this.hasError) {
this.resetFailure();
}
},
},
},
methods: {
reportFailure(errorType) {
this.failureType = errorType;
},
resetFailure() {
this.failureType = null;
},
},
};
</script>
<template>
<div>
<gl-alert v-if="hasError" variant="danger" :dismissible="false">
{{ failure }}
</gl-alert>
<div v-else>
<div class="gl-display-flex gl-align-items-center">
<gl-icon :size="18" name="lock" class="gl-text-gray-500 gl-mr-3" />
{{ $options.i18n.viewOnlyMessage }}
</div>
<div class="gl-mt-3 gl-border-solid gl-border-gray-100 gl-border-1">
<editor-lite
ref="editor"
:value="mergedYaml"
:file-name="ciConfigPath"
:file-global-id="fileGlobalId"
:editor-options="{ readOnly: true }"
v-on="$listeners"
/>
</div>
</div>
</div>
</template>

View File

@ -2,7 +2,7 @@
import EditorLite from '~/vue_shared/components/editor_lite.vue';
import { CiSchemaExtension } from '~/editor/extensions/editor_ci_schema_ext';
import { EDITOR_READY_EVENT } from '~/editor/constants';
import getCommitSha from '../graphql/queries/client/commit_sha.graphql';
import getCommitSha from '../../graphql/queries/client/commit_sha.graphql';
export default {
components: {

View File

@ -1,21 +1,41 @@
<script>
import { GlLoadingIcon, GlTabs, GlTab } from '@gitlab/ui';
import { GlAlert, GlLoadingIcon, GlTabs, GlTab } from '@gitlab/ui';
import { s__ } from '~/locale';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import PipelineGraph from '~/pipelines/components/pipeline_graph/pipeline_graph.vue';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import {
CI_CONFIG_STATUS_INVALID,
CREATE_TAB,
LINT_TAB,
MERGED_TAB,
VISUALIZE_TAB,
} from '../constants';
import CiConfigMergedPreview from './editor/ci_config_merged_preview.vue';
import CiLint from './lint/ci_lint.vue';
import EditorTab from './ui/editor_tab.vue';
import TextEditor from './text_editor.vue';
import TextEditor from './editor/text_editor.vue';
export default {
i18n: {
tabEdit: s__('Pipelines|Write pipeline configuration'),
tabGraph: s__('Pipelines|Visualize'),
tabLint: s__('Pipelines|Lint'),
tabMergedYaml: s__('Pipelines|View merged YAML'),
},
errorTexts: {
loadMergedYaml: s__('Pipelines|Could not load merged YAML content'),
},
tabConstants: {
CREATE_TAB,
LINT_TAB,
MERGED_TAB,
VISUALIZE_TAB,
},
components: {
CiConfigMergedPreview,
CiLint,
EditorTab,
GlAlert,
GlLoadingIcon,
GlTab,
GlTabs,
@ -38,25 +58,64 @@ export default {
default: false,
},
},
computed: {
hasMergedYamlLoadError() {
return (
!this.ciConfigData?.mergedYaml && this.ciConfigData.status !== CI_CONFIG_STATUS_INVALID
);
},
},
methods: {
setCurrentTab(tabName) {
this.$emit('set-current-tab', tabName);
},
},
};
</script>
<template>
<gl-tabs class="file-editor gl-mb-3">
<editor-tab :title="$options.i18n.tabEdit" lazy data-testid="editor-tab">
<editor-tab
class="gl-mb-3"
:title="$options.i18n.tabEdit"
lazy
data-testid="editor-tab"
@click="setCurrentTab($options.tabConstants.CREATE_TAB)"
>
<text-editor :value="ciFileContent" v-on="$listeners" />
</editor-tab>
<gl-tab
v-if="glFeatures.ciConfigVisualizationTab"
class="gl-mb-3"
:title="$options.i18n.tabGraph"
lazy
data-testid="visualization-tab"
@click="setCurrentTab($options.tabConstants.VISUALIZE_TAB)"
>
<gl-loading-icon v-if="isCiConfigDataLoading" size="lg" class="gl-m-3" />
<pipeline-graph v-else :pipeline-data="ciConfigData" />
</gl-tab>
<editor-tab :title="$options.i18n.tabLint" data-testid="lint-tab">
<editor-tab
class="gl-mb-3"
:title="$options.i18n.tabLint"
data-testid="lint-tab"
@click="setCurrentTab($options.tabConstants.LINT_TAB)"
>
<gl-loading-icon v-if="isCiConfigDataLoading" size="lg" class="gl-m-3" />
<ci-lint v-else :ci-config="ciConfigData" />
</editor-tab>
<gl-tab
v-if="glFeatures.ciConfigMergedTab"
class="gl-mb-3"
:title="$options.i18n.tabMergedYaml"
lazy
data-testid="merged-tab"
@click="setCurrentTab($options.tabConstants.MERGED_TAB)"
>
<gl-loading-icon v-if="isCiConfigDataLoading" size="lg" class="gl-m-3" />
<gl-alert v-else-if="hasMergedYamlLoadError" variant="danger" :dismissible="false">
{{ $options.errorTexts.loadMergedYaml }}
</gl-alert>
<ci-config-merged-preview v-else :ci-config-data="ciConfigData" v-on="$listeners" />
</gl-tab>
</gl-tabs>
</template>

View File

@ -7,3 +7,10 @@ export const COMMIT_SUCCESS = 'COMMIT_SUCCESS';
export const DEFAULT_FAILURE = 'DEFAULT_FAILURE';
export const LOAD_FAILURE_NO_FILE = 'LOAD_FAILURE_NO_FILE';
export const LOAD_FAILURE_UNKNOWN = 'LOAD_FAILURE_UNKNOWN';
export const CREATE_TAB = 'CREATE_TAB';
export const LINT_TAB = 'LINT_TAB';
export const MERGED_TAB = 'MERGED_TAB';
export const VISUALIZE_TAB = 'VISUALIZE_TAB';
export const TABS_WITH_COMMIT_FORM = [CREATE_TAB, LINT_TAB, VISUALIZE_TAB];

View File

@ -3,6 +3,7 @@
query getCiConfigData($projectPath: ID!, $content: String!) {
ciConfig(projectPath: $projectPath, content: $content) {
errors
mergedYaml
status
stages {
...PipelineStagesConnection

View File

@ -2,6 +2,7 @@
import CommitSection from './components/commit/commit_section.vue';
import PipelineEditorTabs from './components/pipeline_editor_tabs.vue';
import PipelineEditorHeader from './components/header/pipeline_editor_header.vue';
import { TABS_WITH_COMMIT_FORM, CREATE_TAB } from './constants';
export default {
components: {
@ -23,6 +24,21 @@ export default {
required: true,
},
},
data() {
return {
currentTab: CREATE_TAB,
};
},
computed: {
showCommitForm() {
return TABS_WITH_COMMIT_FORM.includes(this.currentTab);
},
},
methods: {
setCurrentTab(tabName) {
this.currentTab = tabName;
},
},
};
</script>
@ -37,7 +53,8 @@ export default {
:ci-file-content="ciFileContent"
:is-ci-config-data-loading="isCiConfigDataLoading"
v-on="$listeners"
@set-current-tab="setCurrentTab"
/>
<commit-section :ci-file-content="ciFileContent" v-on="$listeners" />
<commit-section v-if="showCommitForm" :ci-file-content="ciFileContent" v-on="$listeners" />
</div>
</template>

View File

@ -123,7 +123,7 @@ Sidebar.prototype.todoUpdateDone = function (data) {
.data('deletePath', deletePath);
if ($el.hasClass('has-tooltip')) {
fixTitle($el);
fixTitle(el);
}
if (typeof $el.data('isCollapsed') !== 'undefined') {

View File

@ -1,6 +1,5 @@
import Vue from 'vue';
import jQuery from 'jquery';
import { toArray, isFunction, isElement } from 'lodash';
import { toArray, isElement } from 'lodash';
import Tooltips from './components/tooltips.vue';
let app;
@ -60,72 +59,39 @@ const applyToElements = (elements, handler) => {
toArray(iterable).forEach(handler);
};
const invokeBootstrapApi = (elements, method) => {
if (isFunction(elements.tooltip)) {
elements.tooltip(method);
} else {
jQuery(elements).tooltip(method);
}
};
const isGlTooltipsEnabled = () => Boolean(window.gon.features?.glTooltips);
const tooltipApiInvoker = ({ glHandler, bsHandler }) => (elements, ...params) => {
if (isGlTooltipsEnabled()) {
applyToElements(elements, glHandler);
} else {
bsHandler(elements, ...params);
}
const createTooltipApiInvoker = (glHandler) => (elements) => {
applyToElements(elements, glHandler);
};
export const initTooltips = (config = {}) => {
if (isGlTooltipsEnabled()) {
const triggers = config?.triggers || DEFAULT_TRIGGER;
const events = triggers.split(' ').map((trigger) => EVENTS_MAP[trigger]);
const triggers = config?.triggers || DEFAULT_TRIGGER;
const events = triggers.split(' ').map((trigger) => EVENTS_MAP[trigger]);
events.forEach((event) => {
document.addEventListener(
event,
(e) => handleTooltipEvent(document, e, config.selector, config),
true,
);
});
events.forEach((event) => {
document.addEventListener(
event,
(e) => handleTooltipEvent(document, e, config.selector, config),
true,
);
});
return tooltipsApp();
}
return invokeBootstrapApi(document.body, config);
return tooltipsApp();
};
export const add = (elements, config = {}) => {
if (isGlTooltipsEnabled()) {
return addTooltips(elements, config);
}
return invokeBootstrapApi(elements, config);
};
export const dispose = tooltipApiInvoker({
glHandler: (element) => tooltipsApp().dispose(element),
bsHandler: (elements) => invokeBootstrapApi(elements, 'dispose'),
});
export const fixTitle = tooltipApiInvoker({
glHandler: (element) => tooltipsApp().fixTitle(element),
bsHandler: (elements) => invokeBootstrapApi(elements, '_fixTitle'),
});
export const enable = tooltipApiInvoker({
glHandler: (element) => tooltipsApp().triggerEvent(element, 'enable'),
bsHandler: (elements) => invokeBootstrapApi(elements, 'enable'),
});
export const disable = tooltipApiInvoker({
glHandler: (element) => tooltipsApp().triggerEvent(element, 'disable'),
bsHandler: (elements) => invokeBootstrapApi(elements, 'disable'),
});
export const hide = tooltipApiInvoker({
glHandler: (element) => tooltipsApp().triggerEvent(element, 'close'),
bsHandler: (elements) => invokeBootstrapApi(elements, 'hide'),
});
export const show = tooltipApiInvoker({
glHandler: (element) => tooltipsApp().triggerEvent(element, 'open'),
bsHandler: (elements) => invokeBootstrapApi(elements, 'show'),
});
export const add = (elements, config = {}) => addTooltips(elements, config);
export const dispose = createTooltipApiInvoker((element) => tooltipsApp().dispose(element));
export const fixTitle = createTooltipApiInvoker((element) => tooltipsApp().fixTitle(element));
export const enable = createTooltipApiInvoker((element) =>
tooltipsApp().triggerEvent(element, 'enable'),
);
export const disable = createTooltipApiInvoker((element) =>
tooltipsApp().triggerEvent(element, 'disable'),
);
export const hide = createTooltipApiInvoker((element) =>
tooltipsApp().triggerEvent(element, 'close'),
);
export const show = createTooltipApiInvoker((element) =>
tooltipsApp().triggerEvent(element, 'open'),
);
export const destroy = () => {
tooltipsApp().$destroy();
app = null;

View File

@ -40,7 +40,25 @@ class Projects::Ci::DailyBuildGroupReportResultsController < Projects::Applicati
end
def report_results
Ci::DailyBuildGroupReportResultsFinder.new(**finder_params).execute
if ::Gitlab::Ci::Features.use_coverage_data_new_finder?(project)
::Ci::Testing::DailyBuildGroupReportResultsFinder.new(
params: new_finder_params,
current_user: current_user
).execute
else
Ci::DailyBuildGroupReportResultsFinder.new(**finder_params).execute
end
end
def new_finder_params
{
project: project,
coverage: true,
start_date: start_date,
end_date: end_date,
ref_path: params[:ref_path],
sort: true
}
end
def finder_params

View File

@ -4,6 +4,7 @@ class Projects::Ci::PipelineEditorController < Projects::ApplicationController
before_action :check_can_collaborate!
before_action do
push_frontend_feature_flag(:ci_config_visualization_tab, @project, default_enabled: :yaml)
push_frontend_feature_flag(:ci_config_merged_tab, @project, default_enabled: :yaml)
end
feature_category :pipeline_authoring

View File

@ -0,0 +1,86 @@
# frozen_string_literal: true
# DailyBuildGroupReportResultsFinder
#
# Used to filter DailyBuildGroupReportResults by set of params
#
# Arguments:
# current_user
# params:
# project: integer
# group: integer
# coverage: boolean
# ref_path: string
# start_date: date
# end_date: date
# sort: boolean
# limit: integer
module Ci
module Testing
class DailyBuildGroupReportResultsFinder
include Gitlab::Allowable
MAX_ITEMS = 1_000
attr_reader :params, :current_user
def initialize(params: {}, current_user: nil)
@params = params
@current_user = current_user
end
def execute
return Ci::DailyBuildGroupReportResult.none unless query_allowed?
collection = Ci::DailyBuildGroupReportResult.by_projects(params[:project])
collection = filter_report_results(collection)
collection
end
private
def query_allowed?
can?(current_user, :read_build_report_results, params[:project])
end
def filter_report_results(collection)
collection = by_coverage(collection)
collection = by_ref_path(collection)
collection = by_dates(collection)
collection = sort(collection)
collection = limit_by(collection)
collection
end
def by_coverage(items)
params[:coverage].present? ? items.with_coverage : items
end
def by_ref_path(items)
params[:ref_path].present? ? items.by_ref_path(params[:ref_path]) : items.with_default_branch
end
def by_dates(items)
params[:start_date].present? && params[:end_date].present? ? items.by_dates(params[:start_date], params[:end_date]) : items
end
def sort(items)
params[:sort].present? ? items.ordered_by_date_and_group_name : items
end
# rubocop: disable CodeReuse/ActiveRecord
def limit_by(items)
items.limit(limit)
end
# rubocop: enable CodeReuse/ActiveRecord
def limit
return MAX_ITEMS unless params[:limit].present?
[params[:limit].to_i, MAX_ITEMS].min
end
end
end
end

View File

@ -1,7 +1,7 @@
# frozen_string_literal: true
module AuthHelper
PROVIDERS_WITH_ICONS = %w(twitter github gitlab bitbucket google_oauth2 facebook azure_oauth2 authentiq salesforce atlassian_oauth2).freeze
PROVIDERS_WITH_ICONS = %w(twitter github gitlab bitbucket google_oauth2 facebook azure_oauth2 authentiq salesforce atlassian_oauth2 openid_connect).freeze
LDAP_PROVIDER = /\Aldap/.freeze
def ldap_enabled?

View File

@ -13,10 +13,13 @@ module Ci
validates :data, json_schema: { filename: "daily_build_group_report_result_data" }
scope :with_included_projects, -> { includes(:project) }
scope :by_ref_path, -> (ref_path) { where(ref_path: ref_path) }
scope :by_projects, -> (ids) { where(project_id: ids) }
scope :with_coverage, -> { where("(data->'coverage') IS NOT NULL") }
scope :with_default_branch, -> { where(default_branch: true) }
scope :by_date, -> (start_date) { where(date: report_window(start_date)..Date.current) }
scope :by_dates, -> (start_date, end_date) { where(date: start_date..end_date) }
scope :ordered_by_date_and_group_name, -> { order(date: :desc, group_name: :asc) }
store_accessor :data, :coverage

View File

@ -7,6 +7,16 @@ class UserStatus < ApplicationRecord
DEFAULT_EMOJI = 'speech_balloon'
CLEAR_STATUS_QUICK_OPTIONS = {
'30_minutes' => 30.minutes,
'3_hours' => 3.hours,
'8_hours' => 8.hours,
'1_day' => 1.day,
'3_days' => 3.days,
'7_days' => 7.days,
'30_days' => 30.days
}.freeze
belongs_to :user
enum availability: { not_set: 0, busy: 1 }
@ -15,5 +25,11 @@ class UserStatus < ApplicationRecord
validates :emoji, inclusion: { in: Gitlab::Emoji.emojis_names }
validates :message, length: { maximum: 100 }, allow_blank: true
scope :scheduled_for_cleanup, -> { where(arel_table[:clear_status_at].lteq(Time.current)) }
cache_markdown_field :message, pipeline: :emoji
def clear_status_after=(value)
self.clear_status_at = CLEAR_STATUS_QUICK_OPTIONS[value]&.from_now
end
end

View File

@ -104,7 +104,7 @@ class Wiki
end
def empty?
list_pages(limit: 1).empty?
!repository_exists? || list_pages(limit: 1).empty?
end
def exists?

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
module Users
class BatchStatusCleanerService
BATCH_SIZE = 100.freeze
# Cleanup BATCH_SIZE user_statuses records
# rubocop: disable CodeReuse/ActiveRecord
def self.execute(batch_size: BATCH_SIZE)
scope = UserStatus
.select(:user_id)
.scheduled_for_cleanup
.lock('FOR UPDATE SKIP LOCKED')
.limit(batch_size)
deleted_rows = UserStatus.where(user_id: scope).delete_all
{ deleted_rows: deleted_rows }
end
# rubocop: enable CodeReuse/ActiveRecord
end
end

View File

@ -52,8 +52,8 @@
%span.gl-font-base.gl-text-gray-500.gl-vertical-align-middle= s_("UserProfile|(Busy)")
- if show_status_emoji?(@user.status)
.cover-status
= emoji_icon(@user.status.emoji)
.cover-status.gl-display-inline-flex.gl-align-items-center
= emoji_icon(@user.status.emoji, class: 'gl-mr-2')
= markdown_field(@user.status, :message)
= render "users/profile_basic_info"
.cover-desc.cgray.mb-1.mb-sm-2

View File

@ -467,6 +467,14 @@
:weight: 1
:idempotent: true
:tags: []
- :name: cronjob:user_status_cleanup_batch
:feature_category: :users
:has_external_dependencies:
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: cronjob:users_create_statistics
:feature_category: :users
:has_external_dependencies:

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
module UserStatusCleanup
# This worker will run every minute to look for user status records to clean up.
class BatchWorker
include ApplicationWorker
# rubocop:disable Scalability/CronWorkerContext
include CronjobQueue
# rubocop:enable Scalability/CronWorkerContext
feature_category :users
idempotent!
# Avoid running too many UPDATE queries at once
MAX_RUNTIME = 30.seconds
def perform
return unless UserStatus.scheduled_for_cleanup.exists?
start_time = Process.clock_gettime(Process::CLOCK_MONOTONIC)
loop do
result = Users::BatchStatusCleanerService.execute
break if result[:deleted_rows] < Users::BatchStatusCleanerService::BATCH_SIZE
current_time = Process.clock_gettime(Process::CLOCK_MONOTONIC)
break if (current_time - start_time) > MAX_RUNTIME
end
end
end
end

View File

@ -0,0 +1,5 @@
---
title: Add clear_status_at column to user_status table
merge_request: 53620
author:
type: other

View File

@ -0,0 +1,5 @@
---
title: Improve profile status emoji alignment
merge_request: 54078
author:
type: other

View File

@ -0,0 +1,5 @@
---
title: Support Markdown for Feature Flags
merge_request: 53816
author:
type: added

View File

@ -0,0 +1,5 @@
---
title: Add OpenID SSO icon
merge_request: 54026
author:
type: changed

View File

@ -0,0 +1,8 @@
---
name: ci_config_merged_tab
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/53299
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/301103
milestone: '13.9'
type: development
group: group::pipeline authoring
default_enabled: false

View File

@ -0,0 +1,8 @@
---
name: clear_status_with_quick_options
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/53620
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/320777
milestone: '13.9'
type: development
group: group::optimize
default_enabled: false

View File

@ -1,8 +1,8 @@
---
name: feature_flag_contextual_issue
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/53021
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/320741
name: coverage_data_new_finder
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/53670
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/301093
milestone: '13.9'
type: development
group: group::release
group: group::testing
default_enabled: false

View File

@ -1,8 +1,8 @@
---
name: export_reduce_relation_batch_size
introduced_by_url:
rollout_issue_url:
milestone:
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/34057
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/282245
milestone: '13.1'
type: development
group: group::import
default_enabled: false

View File

@ -1,8 +0,0 @@
---
name: gl_tooltips
introduced_by_url:
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/292972
milestone: '13.8'
type: development
group: group::editor
default_enabled: false

View File

@ -1,8 +1,8 @@
---
name: group_import_export
introduced_by_url:
rollout_issue_url:
milestone:
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/22423
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/282245
milestone: '12.8'
type: development
group: group::import
default_enabled: true

View File

@ -1,8 +1,8 @@
---
name: log_import_export_relation_creation
introduced_by_url:
rollout_issue_url:
milestone:
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/27605
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/282245
milestone: '12.10'
type: development
group: group::import
default_enabled: false

View File

@ -1,8 +1,8 @@
---
name: project_list_filter_bar
introduced_by_url:
rollout_issue_url:
milestone:
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/11209
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/321404
milestone: '11.11'
type: development
group:
group: group::access
default_enabled: false

View File

@ -1,8 +1,8 @@
---
name: user_time_settings
introduced_by_url:
rollout_issue_url:
milestone:
introduced_by_url: https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/25381
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/321407
milestone: '11.11'
type: development
group:
group: group::access
default_enabled: false

View File

@ -1,8 +1,8 @@
---
name: validate_import_decompressed_archive_size
introduced_by_url:
rollout_issue_url:
milestone:
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/39686
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/282245
milestone: '13.4'
type: development
group: group::import
default_enabled: false

View File

@ -0,0 +1,8 @@
---
name: vue_epics_list
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/46769
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/276189
milestone: '13.9'
type: development
group: group::product planning
default_enabled: false

View File

@ -553,6 +553,9 @@ Settings.cron_jobs['schedule_merge_request_cleanup_refs_worker']['job_class'] =
Settings.cron_jobs['manage_evidence_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['manage_evidence_worker']['cron'] ||= '0 * * * *'
Settings.cron_jobs['manage_evidence_worker']['job_class'] = 'Releases::ManageEvidenceWorker'
Settings.cron_jobs['user_status_cleanup_batch_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['user_status_cleanup_batch_worker']['cron'] ||= '* * * * *'
Settings.cron_jobs['user_status_cleanup_batch_worker']['job_class'] = 'UserStatusCleanup::BatchWorker'
Gitlab.com do
Settings.cron_jobs['namespaces_in_product_marketing_emails_worker'] ||= Settingslogic.new({})

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class AddStatusExpiresAtToUserStatuses < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_column(:user_statuses, :clear_status_at, :datetime_with_timezone, null: true)
end
end
def down
with_lock_retries do
remove_column(:user_statuses, :clear_status_at)
end
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class AddIndexOnUserStatusesStatusExpiresAt < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
INDEX_NAME = 'index_user_statuses_on_clear_status_at_not_null'
disable_ddl_transaction!
def up
add_concurrent_index(:user_statuses, :clear_status_at, name: INDEX_NAME, where: 'clear_status_at IS NOT NULL')
end
def down
remove_concurrent_index_by_name(:user_statuses, INDEX_NAME)
end
end

View File

@ -0,0 +1 @@
b9200d6c754f7c450ba0c718171806e8f4f9720d870e532f4800640ca707f24f

View File

@ -0,0 +1 @@
3a7fb1b7959f09b9ba464253a72d52bcb744e7f78aac4f44e1d9201fa3c8387d

View File

@ -17842,7 +17842,8 @@ CREATE TABLE user_statuses (
emoji character varying DEFAULT 'speech_balloon'::character varying NOT NULL,
message character varying(100),
message_html character varying,
availability smallint DEFAULT 0 NOT NULL
availability smallint DEFAULT 0 NOT NULL,
clear_status_at timestamp with time zone
);
CREATE SEQUENCE user_statuses_user_id_seq
@ -23521,6 +23522,8 @@ CREATE INDEX index_user_preferences_on_gitpod_enabled ON user_preferences USING
CREATE UNIQUE INDEX index_user_preferences_on_user_id ON user_preferences USING btree (user_id);
CREATE INDEX index_user_statuses_on_clear_status_at_not_null ON user_statuses USING btree (clear_status_at) WHERE (clear_status_at IS NOT NULL);
CREATE INDEX index_user_statuses_on_user_id ON user_statuses USING btree (user_id);
CREATE UNIQUE INDEX index_user_synced_attributes_metadata_on_user_id ON user_synced_attributes_metadata USING btree (user_id);

View File

@ -396,6 +396,69 @@ In GitLab 13.4, a seed project is added when GitLab is first installed. This mak
on a new Geo secondary node. There is an [issue to account for seed projects](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5618)
when checking the database.
### Message: `Synchronization failed - Error syncing repository`
WARNING:
If large repositories are affected by this problem,
their resync may take a long time and cause significant load on your Geo nodes,
storage and network systems.
If you get the error `Synchronization failed - Error syncing repository` along with the following log messages, this indicates that the expected `geo` remote is not present in the `.git/config` file
of a repository on the secondary Geo node's filesystem:
```json
{
"created": "@1603481145.084348757",
"description": "Error received from peer unix:/var/opt/gitlab/gitaly/gitaly.socket",
"grpc_message": "exit status 128",
"grpc_status": 13
}
{ …
"grpc.request.fullMethod": "/gitaly.RemoteService/FindRemoteRootRef",
"grpc.request.glProjectPath": "<namespace>/<project>",
"level": "error",
"msg": "fatal: 'geo' does not appear to be a git repository
fatal: Could not read from remote repository. …",
}
```
To solve this:
1. Log into the secondary Geo node.
1. Back up [the `.git` folder](../../repository_storage_types.md#translating-hashed-storage-paths).
1. Optional: [Spot-check](../../troubleshooting/log_parsing.md#find-all-projects-affected-by-a-fatal-git-problem))
a few of those IDs whether they indeed correspond
to a project with known Geo replication failures.
Use `fatal: 'geo'` as the `grep` term and the following API call:
```shell
curl --request GET --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/<first_failed_geo_sync_ID>"
```
1. Enter the [Rails console](../../troubleshooting/navigating_gitlab_via_rails_console.md) and run:
```ruby
failed_geo_syncs = Geo::ProjectRegistry.failed.pluck(:id)
failed_geo_syncs.each do |fgs|
puts Geo::ProjectRegistry.failed.find(fgs).project_id
end
```
1. Run the following commands to reset each project's
Geo-related attributes and execute a new sync:
```ruby
failed_geo_syncs.each do |fgs|
registry = Geo::ProjectRegistry.failed.find(fgs)
registry.update(resync_repository: true, force_to_redownload_repository: false, repository_retry_count: 0)
Geo::RepositorySyncService.new(registry.project).execute
end
```
### Very large repositories never successfully synchronize on the **secondary** node
GitLab places a timeout on all repository clones, including project imports

View File

@ -75,8 +75,7 @@ types. If you want to use local storage for specific object types, you can
Most types of objects, such as CI artifacts, LFS files, upload
attachments, and so on can be saved in object storage by specifying a single
credential for object storage with multiple buckets. A [different bucket
for each type must be used](#use-separate-buckets).
credential for object storage with multiple buckets.
When the consolidated form is:
@ -571,22 +570,13 @@ See the following additional guides:
## Warnings, limitations, and known issues
### Use separate buckets
### Separate buckets required when using Helm
Using separate buckets for each data type is the recommended approach for GitLab.
Generally, using the same bucket for your Object Storage is fine to do
for convenience.
A limitation of our configuration is that each use of object storage is separately configured.
[We have an issue for improving this](https://gitlab.com/gitlab-org/gitlab/-/issues/23345)
and easily using one bucket with separate folders is one improvement that this might bring.
There is at least one specific issue with using the same bucket:
when GitLab is deployed with the Helm chart restore from backup
[will not properly function](https://docs.gitlab.com/charts/advanced/external-object-storage/#lfs-artifacts-uploads-packages-external-diffs-pseudonymizer)
unless separate buckets are used.
One risk of using a single bucket would be that if your organisation decided to
migrate GitLab to the Helm deployment in the future. GitLab would run, but the situation with
backups might not be realised until the organisation had a critical requirement for the backups to work.
However, if you're using or planning to use Helm, separate buckets will
be required as there is a [known limitation with restorations of Helm chart backups](https://docs.gitlab.com/charts/advanced/external-object-storage/#lfs-artifacts-uploads-packages-external-diffs-pseudonymizer).
### S3 API compatibility issues

View File

@ -1812,8 +1812,9 @@ To configure Praefect with TLS:
## Configure Sidekiq
Sidekiq requires connections to the Redis, PostgreSQL and Gitaly instances.
The following IPs will be used as an example:
Sidekiq requires connection to the [Redis](#configure-redis),
[PostgreSQL](#configure-postgresql) and [Gitaly](#configure-gitaly) instances.
[Object storage](#configure-the-object-storage) is also required to be configured.
- `10.6.0.101`: Sidekiq 1
- `10.6.0.102`: Sidekiq 2
@ -1927,6 +1928,25 @@ To configure the Sidekiq nodes, on each one:
# Rails Status for prometheus
gitlab_rails['monitoring_whitelist'] = ['10.6.0.121/32', '127.0.0.0/8']
#############################
### Object storage ###
#############################
# This is an example for configuring Object Storage on GCP
# Replace this config with your chosen Object Storage provider as desired
gitlab_rails['object_store']['connection'] = {
'provider' => 'Google',
'google_project' => '<gcp-project-name>',
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
gitlab_rails['object_store']['objects']['artifacts']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['external_diffs']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['lfs']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['uploads']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['packages']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['dependency_proxy']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['terraform_state']['bucket'] = "<gcp-bucket-name>"
```
1. Copy the `/etc/gitlab/gitlab-secrets.json` file from your Consul server, and replace
@ -1947,6 +1967,7 @@ You can also run [multiple Sidekiq processes](../operations/extra_sidekiq_proces
## Configure GitLab Rails
This section describes how to configure the GitLab application (Rails) component.
[Object storage](#configure-the-object-storage) is also required to be configured.
The following IPs will be used as an example:
@ -2036,6 +2057,25 @@ On each node perform the following:
# scrape the NGINX metrics
gitlab_rails['monitoring_whitelist'] = ['10.6.0.121/32', '127.0.0.0/8']
nginx['status']['options']['allow'] = ['10.6.0.121/32', '127.0.0.0/8']
#############################
### Object storage ###
#############################
# This is an example for configuring Object Storage on GCP
# Replace this config with your chosen Object Storage provider as desired
gitlab_rails['object_store']['connection'] = {
'provider' => 'Google',
'google_project' => '<gcp-project-name>',
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
gitlab_rails['object_store']['objects']['artifacts']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['external_diffs']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['lfs']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['uploads']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['packages']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['dependency_proxy']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['terraform_state']['bucket'] = "<gcp-bucket-name>"
```
1. Save the file and [reconfigure GitLab](../restart_gitlab.md#omnibus-gitlab-reconfigure).

View File

@ -1512,8 +1512,9 @@ To configure Gitaly with TLS:
## Configure Sidekiq
Sidekiq requires connections to the Redis, PostgreSQL and Gitaly instances.
The following IPs will be used as an example:
Sidekiq requires connection to the [Redis](#configure-redis),
[PostgreSQL](#configure-postgresql) and [Gitaly](#configure-gitaly) instances.
[Object storage](#configure-the-object-storage) is also required to be configured.
- `10.6.0.101`: Sidekiq 1
- `10.6.0.102`: Sidekiq 2
@ -1624,6 +1625,25 @@ To configure the Sidekiq nodes, on each one:
# Rails Status for prometheus
gitlab_rails['monitoring_whitelist'] = ['10.6.0.121/32', '127.0.0.0/8']
#############################
### Object storage ###
#############################
# This is an example for configuring Object Storage on GCP
# Replace this config with your chosen Object Storage provider as desired
gitlab_rails['object_store']['connection'] = {
'provider' => 'Google',
'google_project' => '<gcp-project-name>',
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
gitlab_rails['object_store']['objects']['artifacts']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['external_diffs']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['lfs']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['uploads']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['packages']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['dependency_proxy']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['terraform_state']['bucket'] = "<gcp-bucket-name>"
```
1. Copy the `/etc/gitlab/gitlab-secrets.json` file from your Consul server, and replace
@ -1644,6 +1664,7 @@ You can also run [multiple Sidekiq processes](../operations/extra_sidekiq_proces
## Configure GitLab Rails
This section describes how to configure the GitLab application (Rails) component.
[Object storage](#configure-the-object-storage) is also required to be configured.
The following IPs will be used as an example:
@ -1736,6 +1757,25 @@ On each node perform the following:
# scrape the NGINX metrics
gitlab_rails['monitoring_whitelist'] = ['10.6.0.121/32', '127.0.0.0/8']
nginx['status']['options']['allow'] = ['10.6.0.121/32', '127.0.0.0/8']
#############################
### Object storage ###
#############################
# This is an example for configuring Object Storage on GCP
# Replace this config with your chosen Object Storage provider as desired
gitlab_rails['object_store']['connection'] = {
'provider' => 'Google',
'google_project' => '<gcp-project-name>',
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
gitlab_rails['object_store']['objects']['artifacts']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['external_diffs']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['lfs']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['uploads']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['packages']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['dependency_proxy']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['terraform_state']['bucket'] = "<gcp-bucket-name>"
```
1. Save the file and [reconfigure GitLab](../restart_gitlab.md#omnibus-gitlab-reconfigure).

View File

@ -657,6 +657,25 @@ On each node perform the following:
gitlab_rails['monitoring_whitelist'] = ['<MONITOR NODE IP>/32', '127.0.0.0/8']
nginx['status']['options']['allow'] = ['<MONITOR NODE IP>/32', '127.0.0.0/8']
#############################
### Object storage ###
#############################
# This is an example for configuring Object Storage on GCP
# Replace this config with your chosen Object Storage provider as desired
gitlab_rails['object_store']['connection'] = {
'provider' => 'Google',
'google_project' => '<gcp-project-name>',
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
gitlab_rails['object_store']['objects']['artifacts']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['external_diffs']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['lfs']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['uploads']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['packages']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['dependency_proxy']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['terraform_state']['bucket'] = "<gcp-bucket-name>"
## Uncomment and edit the following options if you have set up NFS
##
## Prevent GitLab from starting if NFS data mounts are not available

View File

@ -1212,7 +1212,10 @@ To configure Gitaly with TLS:
## Configure Sidekiq
Sidekiq requires connection to the Redis, PostgreSQL and Gitaly instance.
Sidekiq requires connection to the [Redis](#configure-redis),
[PostgreSQL](#configure-postgresql) and [Gitaly](#configure-gitaly) instances.
[Object storage](#configure-the-object-storage) is also required to be configured.
The following IPs will be used as an example:
- `10.6.0.71`: Sidekiq 1
@ -1307,6 +1310,25 @@ To configure the Sidekiq nodes, one each one:
# Rails Status for prometheus
gitlab_rails['monitoring_whitelist'] = ['10.6.0.81/32', '127.0.0.0/8']
gitlab_rails['prometheus_address'] = '10.6.0.81:9090'
#############################
### Object storage ###
#############################
# This is an example for configuring Object Storage on GCP
# Replace this config with your chosen Object Storage provider as desired
gitlab_rails['object_store']['connection'] = {
'provider' => 'Google',
'google_project' => '<gcp-project-name>',
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
gitlab_rails['object_store']['objects']['artifacts']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['external_diffs']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['lfs']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['uploads']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['packages']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['dependency_proxy']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['terraform_state']['bucket'] = "<gcp-bucket-name>"
```
1. Save the file and [reconfigure GitLab](../restart_gitlab.md#omnibus-gitlab-reconfigure).
@ -1337,6 +1359,7 @@ You can also run [multiple Sidekiq processes](../operations/extra_sidekiq_proces
## Configure GitLab Rails
This section describes how to configure the GitLab application (Rails) component.
[Object storage](#configure-the-object-storage) is also required to be configured.
On each node perform the following:
@ -1454,6 +1477,25 @@ On each node perform the following:
#web_server['gid'] = 9001
#registry['uid'] = 9002
#registry['gid'] = 9002
#############################
### Object storage ###
#############################
# This is an example for configuring Object Storage on GCP
# Replace this config with your chosen Object Storage provider as desired
gitlab_rails['object_store']['connection'] = {
'provider' => 'Google',
'google_project' => '<gcp-project-name>',
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
gitlab_rails['object_store']['objects']['artifacts']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['external_diffs']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['lfs']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['uploads']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['packages']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['dependency_proxy']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['terraform_state']['bucket'] = "<gcp-bucket-name>"
```
1. If you're using [Gitaly with TLS support](#gitaly-tls-support), make sure the

View File

@ -1512,8 +1512,9 @@ To configure Gitaly with TLS:
## Configure Sidekiq
Sidekiq requires connections to the Redis, PostgreSQL and Gitaly instances.
The following IPs will be used as an example:
Sidekiq requires connection to the [Redis](#configure-redis),
[PostgreSQL](#configure-postgresql) and [Gitaly](#configure-gitaly) instances.
[Object storage](#configure-the-object-storage) is also required to be configured.
- `10.6.0.101`: Sidekiq 1
- `10.6.0.102`: Sidekiq 2
@ -1624,6 +1625,25 @@ To configure the Sidekiq nodes, on each one:
# Rails Status for prometheus
gitlab_rails['monitoring_whitelist'] = ['10.6.0.121/32', '127.0.0.0/8']
#############################
### Object storage ###
#############################
# This is an example for configuring Object Storage on GCP
# Replace this config with your chosen Object Storage provider as desired
gitlab_rails['object_store']['connection'] = {
'provider' => 'Google',
'google_project' => '<gcp-project-name>',
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
gitlab_rails['object_store']['objects']['artifacts']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['external_diffs']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['lfs']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['uploads']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['packages']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['dependency_proxy']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['terraform_state']['bucket'] = "<gcp-bucket-name>"
```
1. Copy the `/etc/gitlab/gitlab-secrets.json` file from your Consul server, and replace
@ -1644,6 +1664,7 @@ You can also run [multiple Sidekiq processes](../operations/extra_sidekiq_proces
## Configure GitLab Rails
This section describes how to configure the GitLab application (Rails) component.
[Object storage](#configure-the-object-storage) is also required to be configured.
The following IPs will be used as an example:
@ -1736,6 +1757,25 @@ On each node perform the following:
# scrape the NGINX metrics
gitlab_rails['monitoring_whitelist'] = ['10.6.0.121/32', '127.0.0.0/8']
nginx['status']['options']['allow'] = ['10.6.0.121/32', '127.0.0.0/8']
#############################
### Object storage ###
#############################
# This is an example for configuring Object Storage on GCP
# Replace this config with your chosen Object Storage provider as desired
gitlab_rails['object_store']['connection'] = {
'provider' => 'Google',
'google_project' => '<gcp-project-name>',
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
gitlab_rails['object_store']['objects']['artifacts']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['external_diffs']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['lfs']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['uploads']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['packages']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['dependency_proxy']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['terraform_state']['bucket'] = "<gcp-bucket-name>"
```
1. Save the file and [reconfigure GitLab](../restart_gitlab.md#omnibus-gitlab-reconfigure).

View File

@ -1211,8 +1211,9 @@ To configure Gitaly with TLS:
## Configure Sidekiq
Sidekiq requires connection to the Redis, PostgreSQL and Gitaly instance.
The following IPs will be used as an example:
Sidekiq requires connection to the [Redis](#configure-redis),
[PostgreSQL](#configure-postgresql) and [Gitaly](#configure-gitaly) instances.
[Object storage](#configure-the-object-storage) is also required to be configured.
- `10.6.0.71`: Sidekiq 1
- `10.6.0.72`: Sidekiq 2
@ -1306,6 +1307,25 @@ To configure the Sidekiq nodes, one each one:
# Rails Status for prometheus
gitlab_rails['monitoring_whitelist'] = ['10.6.0.81/32', '127.0.0.0/8']
gitlab_rails['prometheus_address'] = '10.6.0.81:9090'
#############################
### Object storage ###
#############################
# This is an example for configuring Object Storage on GCP
# Replace this config with your chosen Object Storage provider as desired
gitlab_rails['object_store']['connection'] = {
'provider' => 'Google',
'google_project' => '<gcp-project-name>',
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
gitlab_rails['object_store']['objects']['artifacts']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['external_diffs']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['lfs']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['uploads']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['packages']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['dependency_proxy']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['terraform_state']['bucket'] = "<gcp-bucket-name>"
```
1. Save the file and [reconfigure GitLab](../restart_gitlab.md#omnibus-gitlab-reconfigure).
@ -1336,6 +1356,7 @@ You can also run [multiple Sidekiq processes](../operations/extra_sidekiq_proces
## Configure GitLab Rails
This section describes how to configure the GitLab application (Rails) component.
[Object storage](#configure-the-object-storage) is also required to be configured.
On each node perform the following:
@ -1439,6 +1460,25 @@ On each node perform the following:
nginx['status']['options']['allow'] = ['10.6.0.81/32', '127.0.0.0/8']
gitlab_rails['prometheus_address'] = '10.6.0.81:9090'
#############################
### Object storage ###
#############################
# This is an example for configuring Object Storage on GCP
# Replace this config with your chosen Object Storage provider as desired
gitlab_rails['object_store']['connection'] = {
'provider' => 'Google',
'google_project' => '<gcp-project-name>',
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
gitlab_rails['object_store']['objects']['artifacts']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['external_diffs']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['lfs']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['uploads']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['packages']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['dependency_proxy']['bucket'] = "<gcp-bucket-name>"
gitlab_rails['object_store']['objects']['terraform_state']['bucket'] = "<gcp-bucket-name>"
## Uncomment and edit the following options if you have set up NFS
##
## Prevent GitLab from starting if NFS data mounts are not available

View File

@ -179,3 +179,11 @@ jq --raw-output --slurp '
663 106 ms, 96 ms, 94 ms 'groupABC/project123'
...
```
#### Find all projects affected by a fatal Git problem
```shell
grep "fatal: " /var/log/gitlab/gitaly/current | \
jq '."grpc.request.glProjectPath"' | \
sort | uniq
```

View File

@ -25,6 +25,7 @@ From the pipeline editor page you can:
- Do a deeper [lint](#lint-ci-configuration) of your configuration, that verifies it with any configuration
added with the [`include`](../yaml/README.md#include) keyword.
- See a [visualization](#visualize-ci-configuration) of the current configuration.
- View an [expanded](#view-expanded-configuration) version of your configuration.
- [Commit](#commit-changes-to-ci-configuration) the changes to a specific branch.
NOTE:
@ -101,6 +102,40 @@ To enable it:
Feature.enable(:ci_config_visualization_tab)
```
## View expanded configuration
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/246801) in GitLab 13.9.
> - It is [deployed behind a feature flag](../../user/feature_flags.md), disabled by default.
> - To use it in GitLab self-managed instances, ask a GitLab administrator to [enable it](#enable-or-disable-expanded-configuration). **(FREE SELF)**
To view the fully expanded CI/CD configuration as one combined file, go to the
pipeline editor's **View merged YAML** tab. This tab displays an expanded configuration
where:
- Configuration imported with [`include`](../yaml/README.md#include) is copied into the view.
- Jobs that use [`extends`](../yaml/README.md#extends) display with the
[extended configuration merged into the job](../yaml/README.md#merge-details).
- YAML anchors are [replaced with the linked configuration](../yaml/README.md#anchors).
### Enable or disable expanded configuration **(FREE SELF)**
Expanded CI/CD configuration is under development and not ready for production use.
It is deployed behind a feature flag that is **disabled by default**.
[GitLab administrators with access to the GitLab Rails console](../../administration/feature_flags.md)
can opt to enable it.
To enable it:
```ruby
Feature.enable(:ci_config_visualization_tab)
```
To disable it:
```ruby
Feature.disable(:ci_config_visualization_tab)
```
## Commit changes to CI configuration
The commit form appears at the bottom of each tab in the editor so you can commit

View File

@ -65,9 +65,9 @@ request is as follows:
template already provided in the "Description" field.
1. If you are contributing documentation, choose `Documentation` from the
"Choose a template" menu and fill in the description according to the template.
1. Mention the issue(s) your merge request solves, using the `Solves #XXX` or
`Closes #XXX` syntax to [auto-close](../../user/project/issues/managing_issues.md#closing-issues-automatically)
the issue(s) once the merge request is merged.
1. Use the syntax `Solves #XXX`, `Closes #XXX`, or `Refs #XXX` to mention the issue(s) your merge
request addresses. Referenced issues do not [close automatically](../../user/project/issues/managing_issues.md#closing-issues-automatically).
You must close them manually once the merge request is merged.
1. If you're allowed to, set a relevant milestone and [labels](issue_workflow.md).
1. UI changes should use available components from the GitLab Design System,
[Pajamas](https://design.gitlab.com/). The MR must include *Before* and

View File

@ -13,5 +13,11 @@ each security partner:
<!-- vale gitlab.Spelling = NO -->
- [Anchore](https://docs.anchore.com/current/docs/using/integration/ci_cd/gitlab/)
- [Bridgecrew](https://docs.bridgecrew.io/docs/integrate-with-gitlab-self-managed)
- [Checkmarx](https://checkmarx.atlassian.net/wiki/spaces/SD/pages/1929937052/GitLab+Integration)
- [Indeni](https://indeni.com/doc-indeni-cloudrail/integrate-with-ci-cd/gitlab-instructions/)
- [JScrambler](https://docs.jscrambler.com/code-integrity/documentation/gitlab-ci-integration)
- [StackHawk](https://docs.stackhawk.com/continuous-integration/gitlab.html)
- [WhiteSource](https://www.whitesourcesoftware.com/gitlab/)
<!-- vale gitlab.Spelling = YES -->

View File

@ -90,7 +90,7 @@ parameters. All fields are optional. If the incoming alert does not contain a va
| `service` | String | The affected service. |
| `monitoring_tool` | String | The name of the associated monitoring tool. |
| `hosts` | String or Array | One or more hosts, as to where this incident occurred. |
| `severity` | String | The severity of the alert. Must be one of `critical`, `high`, `medium`, `low`, `info`, `unknown`. Default is `critical`. |
| `severity` | String | The severity of the alert. Case-insensitive. Can be one of: `critical`, `high`, `medium`, `low`, `info`, `unknown`. Defaults to `critical` if missing or value is not in this list. |
| `fingerprint` | String or Array | The unique identifier of the alert. This can be used to group occurrences of the same alert. |
| `gitlab_environment_name` | String | The name of the associated GitLab [environment](../../ci/environments/index.md). Required to [display alerts on a dashboard](../../user/operations_dashboard/index.md#adding-a-project-to-the-dashboard). |

View File

@ -7,9 +7,9 @@ type: reference, howto
# Threads **(FREE)**
You can use words to communicate with other users all over GitLab.
GitLab encourages communication through comments, threads, and suggestions.
For example, you can leave a comment in the following places:
For example, you can create a comment in the following places:
- Issues
- Epics

View File

@ -444,6 +444,7 @@ GFM recognizes the following:
| snippet | `$123` | `namespace/project$123` | `project$123` |
| epic **(ULTIMATE)** | `&123` | `group1/subgroup&123` | |
| vulnerability **(ULTIMATE)** (1)| `[vulnerability:123]` | `[vulnerability:namespace/project/123]` | `[vulnerability:project/123]` |
| feature flag | `[feature_flag:123]` | `[feature_flag:namespace/project/123]` | `[feature_flag:project/123]` |
| label by ID | `~123` | `namespace/project~123` | `project~123` |
| one-word label by name | `~bug` | `namespace/project~bug` | `project~bug` |
| multi-word label by name | `~"feature request"` | `namespace/project~"feature request"` | `project~"feature request"` |

View File

@ -9,6 +9,7 @@ module API
expose :message_html do |entity|
MarkupHelper.markdown_field(entity, :message)
end
expose :clear_status_at
end
end
end

View File

@ -1004,11 +1004,15 @@ module API
optional :emoji, type: String, desc: "The emoji to set on the status"
optional :message, type: String, desc: "The status message to set"
optional :availability, type: String, desc: "The availability of user to set"
optional :clear_status_after, type: String, desc: "Automatically clear emoji, message and availability fields after a certain time", values: UserStatus::CLEAR_STATUS_QUICK_OPTIONS.keys
end
put "status", feature_category: :users do
forbidden! unless can?(current_user, :update_user_status, current_user)
if ::Users::SetStatusService.new(current_user, declared_params).execute
update_params = declared_params
update_params.delete(:clear_status_after) if Feature.disabled?(:clear_status_with_quick_options, current_user)
if ::Users::SetStatusService.new(current_user, update_params).execute
present current_user.status, with: Entities::UserStatus
else
render_validation_error!(current_user.status)

View File

@ -14,8 +14,6 @@ module Banzai
end
def parent_records(parent, ids)
return self.class.object_class.none unless Feature.enabled?(:feature_flag_contextual_issue, parent)
parent.operations_feature_flags.where(iid: ids.to_a)
end

View File

@ -13,7 +13,7 @@ module BulkImports
loader BulkImports::Groups::Loaders::LabelsLoader
def after_run(context, extracted_data)
def after_run(extracted_data)
context.entity.update_tracker_for(
relation: :labels,
has_next_page: extracted_data.has_next_page?,
@ -21,7 +21,7 @@ module BulkImports
)
if extracted_data.has_next_page?
run(context)
run
end
end
end

View File

@ -14,7 +14,7 @@ module BulkImports
loader BulkImports::Groups::Loaders::MembersLoader
def after_run(context, extracted_data)
def after_run(extracted_data)
context.entity.update_tracker_for(
relation: :group_members,
has_next_page: extracted_data.has_next_page?,
@ -22,7 +22,7 @@ module BulkImports
)
if extracted_data.has_next_page?
run(context)
run
end
end
end

View File

@ -10,7 +10,7 @@ module BulkImports
def execute
context = BulkImports::Pipeline::Context.new(entity)
pipelines.each { |pipeline| pipeline.new.run(context) }
pipelines.each { |pipeline| pipeline.new(context).run }
entity.finish!
end

View File

@ -4,12 +4,17 @@ module BulkImports
module Pipeline
extend ActiveSupport::Concern
include Gitlab::ClassAttributes
include Runner
def initialize(context)
@context = context
end
included do
include Runner
private
attr_reader :context
def extractor
@extractor ||= instantiate(self.class.get_extractor)
end

View File

@ -7,78 +7,78 @@ module BulkImports
MarkedAsFailedError = Class.new(StandardError)
def run(context)
raise MarkedAsFailedError if marked_as_failed?(context)
def run
raise MarkedAsFailedError if marked_as_failed?
info(context, message: 'Pipeline started')
info(message: 'Pipeline started')
extracted_data = extracted_data_from(context)
extracted_data = extracted_data_from
extracted_data&.each do |entry|
transformers.each do |transformer|
entry = run_pipeline_step(:transformer, transformer.class.name, context) do
entry = run_pipeline_step(:transformer, transformer.class.name) do
transformer.transform(context, entry)
end
end
run_pipeline_step(:loader, loader.class.name, context) do
run_pipeline_step(:loader, loader.class.name) do
loader.load(context, entry)
end
end
after_run(context, extracted_data) if respond_to?(:after_run)
after_run(extracted_data) if respond_to?(:after_run)
info(context, message: 'Pipeline finished')
info(message: 'Pipeline finished')
rescue MarkedAsFailedError
log_skip(context)
log_skip
end
private # rubocop:disable Lint/UselessAccessModifier
def run_pipeline_step(step, class_name, context)
raise MarkedAsFailedError if marked_as_failed?(context)
def run_pipeline_step(step, class_name)
raise MarkedAsFailedError if marked_as_failed?
info(context, pipeline_step: step, step_class: class_name)
info(pipeline_step: step, step_class: class_name)
yield
rescue MarkedAsFailedError
log_skip(context, step => class_name)
log_skip(step => class_name)
rescue => e
log_import_failure(e, step, context)
log_import_failure(e, step)
mark_as_failed(context) if abort_on_failure?
mark_as_failed if abort_on_failure?
nil
end
def extracted_data_from(context)
run_pipeline_step(:extractor, extractor.class.name, context) do
def extracted_data_from
run_pipeline_step(:extractor, extractor.class.name) do
extractor.extract(context)
end
end
def mark_as_failed(context)
warn(context, message: 'Pipeline failed', pipeline_class: pipeline)
def mark_as_failed
warn(message: 'Pipeline failed', pipeline_class: pipeline)
context.entity.fail_op!
end
def marked_as_failed?(context)
def marked_as_failed?
return true if context.entity.failed?
false
end
def log_skip(context, extra = {})
def log_skip(extra = {})
log = {
message: 'Skipping due to failed pipeline status',
pipeline_class: pipeline
}.merge(extra)
info(context, log)
info(log)
end
def log_import_failure(exception, step, context)
def log_import_failure(exception, step)
attributes = {
bulk_import_entity_id: context.entity.id,
pipeline_class: pipeline,
@ -91,15 +91,15 @@ module BulkImports
BulkImports::Failure.create(attributes)
end
def warn(context, extra = {})
logger.warn(log_base_params(context).merge(extra))
def warn(extra = {})
logger.warn(log_base_params.merge(extra))
end
def info(context, extra = {})
logger.info(log_base_params(context).merge(extra))
def info(extra = {})
logger.info(log_base_params.merge(extra))
end
def log_base_params(context)
def log_base_params
{
bulk_import_entity_id: context.entity.id,
bulk_import_entity_type: context.entity.source_type,

View File

@ -75,6 +75,10 @@ module Gitlab
def self.display_codequality_backend_comparison?(project)
::Feature.enabled?(:codequality_backend_comparison, project, default_enabled: :yaml)
end
def self.use_coverage_data_new_finder?(record)
::Feature.enabled?(:coverage_data_new_finder, record, default_enabled: :yaml)
end
end
end
end

View File

@ -46,7 +46,6 @@ module Gitlab
push_frontend_feature_flag(:snippets_binary_blob, default_enabled: false)
push_frontend_feature_flag(:usage_data_api, default_enabled: true)
push_frontend_feature_flag(:security_auto_fix, default_enabled: false)
push_frontend_feature_flag(:gl_tooltips, default_enabled: :yaml)
end
# Exposes the state of a feature flag to the frontend code.

View File

@ -11544,6 +11544,12 @@ msgstr ""
msgid "Epics, Issues, and Merge Requests"
msgstr ""
msgid "Epics|%{startDate} %{dueDate}"
msgstr ""
msgid "Epics|%{startDate} No due date"
msgstr ""
msgid "Epics|Add a new epic"
msgstr ""
@ -11571,6 +11577,9 @@ msgstr ""
msgid "Epics|Leave empty to inherit from milestone dates"
msgstr ""
msgid "Epics|No start date %{dueDate}"
msgstr ""
msgid "Epics|Remove epic"
msgstr ""
@ -11598,6 +11607,9 @@ msgstr ""
msgid "Epics|Something went wrong while fetching child epics."
msgstr ""
msgid "Epics|Something went wrong while fetching epics list."
msgstr ""
msgid "Epics|Something went wrong while fetching group epics."
msgstr ""
@ -21672,6 +21684,9 @@ msgstr ""
msgid "Pipelines|Copy trigger token"
msgstr ""
msgid "Pipelines|Could not load merged YAML content"
msgstr ""
msgid "Pipelines|Description"
msgstr ""
@ -21708,6 +21723,9 @@ msgstr ""
msgid "Pipelines|Loading Pipelines"
msgstr ""
msgid "Pipelines|Merged YAML is view only"
msgstr ""
msgid "Pipelines|More Information"
msgstr ""
@ -21780,6 +21798,9 @@ msgstr ""
msgid "Pipelines|Validating GitLab CI configuration…"
msgstr ""
msgid "Pipelines|View merged YAML"
msgstr ""
msgid "Pipelines|Visualize"
msgstr ""
@ -29404,6 +29425,9 @@ msgstr ""
msgid "There are no charts configured for this page"
msgstr ""
msgid "There are no closed epics"
msgstr ""
msgid "There are no closed issues"
msgstr ""
@ -29428,6 +29452,9 @@ msgstr ""
msgid "There are no matching files"
msgstr ""
msgid "There are no open epics"
msgstr ""
msgid "There are no open issues"
msgstr ""

View File

@ -11,6 +11,7 @@ RSpec.describe Projects::Ci::DailyBuildGroupReportResultsController do
let(:end_date) { '2020-03-09' }
let(:allowed_to_read) { true }
let(:user) { create(:user) }
let(:feature_enabled?) { true }
before do
create_daily_coverage('rspec', 79.0, '2020-03-09')
@ -24,6 +25,8 @@ RSpec.describe Projects::Ci::DailyBuildGroupReportResultsController do
allow(Ability).to receive(:allowed?).and_call_original
allow(Ability).to receive(:allowed?).with(user, :read_build_report_results, project).and_return(allowed_to_read)
stub_feature_flags(coverage_data_new_finder: feature_enabled?)
get :index, params: {
namespace_id: project.namespace,
project_id: project,
@ -55,9 +58,7 @@ RSpec.describe Projects::Ci::DailyBuildGroupReportResultsController do
end
end
context 'when format is CSV' do
let(:format) { :csv }
shared_examples 'CSV results' do
it 'serves the results in CSV' do
expect(response).to have_gitlab_http_status(:ok)
expect(response.headers['Content-Type']).to eq('text/csv; charset=utf-8')
@ -88,9 +89,7 @@ RSpec.describe Projects::Ci::DailyBuildGroupReportResultsController do
it_behaves_like 'ensuring policy'
end
context 'when format is JSON' do
let(:format) { :json }
shared_examples 'JSON results' do
it 'serves the results in JSON' do
expect(response).to have_gitlab_http_status(:ok)
@ -137,6 +136,38 @@ RSpec.describe Projects::Ci::DailyBuildGroupReportResultsController do
it_behaves_like 'validating param_type'
it_behaves_like 'ensuring policy'
end
context 'when format is JSON' do
let(:format) { :json }
context 'when coverage_data_new_finder flag is enabled' do
let(:feature_enabled?) { true }
it_behaves_like 'JSON results'
end
context 'when coverage_data_new_finder flag is disabled' do
let(:feature_enabled?) { false }
it_behaves_like 'JSON results'
end
end
context 'when format is CSV' do
let(:format) { :csv }
context 'when coverage_data_new_finder flag is enabled' do
let(:feature_enabled?) { true }
it_behaves_like 'CSV results'
end
context 'when coverage_data_new_finder flag is disabled' do
let(:feature_enabled?) { false }
it_behaves_like 'CSV results'
end
end
end
def create_daily_coverage(group_name, coverage, date)

View File

@ -0,0 +1,99 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Ci::Testing::DailyBuildGroupReportResultsFinder do
describe '#execute' do
let_it_be(:project) { create(:project, :private) }
let(:user_without_permission) { create(:user) }
let_it_be(:user_with_permission) { project.owner }
let_it_be(:ref_path) { 'refs/heads/master' }
let(:limit) { nil }
let_it_be(:default_branch) { false }
let(:start_date) { '2020-03-09' }
let(:end_date) { '2020-03-10' }
let(:sort) { true }
let_it_be(:rspec_coverage_1) { create_daily_coverage('rspec', 79.0, '2020-03-09') }
let_it_be(:karma_coverage_1) { create_daily_coverage('karma', 89.0, '2020-03-09') }
let_it_be(:rspec_coverage_2) { create_daily_coverage('rspec', 95.0, '2020-03-10') }
let_it_be(:karma_coverage_2) { create_daily_coverage('karma', 92.0, '2020-03-10') }
let_it_be(:rspec_coverage_3) { create_daily_coverage('rspec', 97.0, '2020-03-11') }
let_it_be(:karma_coverage_3) { create_daily_coverage('karma', 99.0, '2020-03-11') }
let(:finder) { described_class.new(params: params, current_user: current_user) }
let(:params) do
{
project: project,
coverage: true,
ref_path: ref_path,
start_date: start_date,
end_date: end_date,
limit: limit,
sort: sort
}
end
subject(:coverages) { finder.execute }
context 'when params are provided' do
context 'when current user is not allowed to read data' do
let(:current_user) { user_without_permission }
it 'returns an empty collection' do
expect(coverages).to be_empty
end
end
context 'when current user is allowed to read data' do
let(:current_user) { user_with_permission }
it 'returns matching coverages within the given date range' do
expect(coverages).to match_array([
karma_coverage_2,
rspec_coverage_2,
karma_coverage_1,
rspec_coverage_1
])
end
context 'when ref_path is nil' do
let(:default_branch) { true }
let(:ref_path) { nil }
it 'returns coverages for the default branch' do
rspec_coverage_4 = create_daily_coverage('rspec', 66.0, '2020-03-10')
expect(coverages).to contain_exactly(rspec_coverage_4)
end
end
context 'when limit is specified' do
let(:limit) { 2 }
it 'returns limited number of matching coverages within the given date range' do
expect(coverages).to match_array([
karma_coverage_2,
rspec_coverage_2
])
end
end
end
end
end
private
def create_daily_coverage(group_name, coverage, date)
create(
:ci_daily_build_group_report_result,
project: project,
ref_path: ref_path || 'feature-branch',
group_name: group_name,
data: { 'coverage' => coverage },
date: date,
default_branch: default_branch
)
end
end

View File

@ -5,6 +5,9 @@ import { TEST_HOST } from 'spec/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import Sidebar from '~/right_sidebar';
import { fixTitle } from '~/tooltips';
jest.mock('~/tooltips');
describe('Issuable right sidebar collapsed todo toggle', () => {
const fixtureName = 'issues/open-issue.html';
@ -96,11 +99,10 @@ describe('Issuable right sidebar collapsed todo toggle', () => {
document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click();
setImmediate(() => {
expect(
document
.querySelector('.js-issuable-todo.sidebar-collapsed-icon')
.getAttribute('data-original-title'),
).toBe('Mark as done');
const el = document.querySelector('.js-issuable-todo.sidebar-collapsed-icon');
expect(el.getAttribute('title')).toBe('Mark as done');
expect(fixTitle).toHaveBeenCalledWith(el);
done();
});

View File

@ -0,0 +1,88 @@
import { shallowMount } from '@vue/test-utils';
import { GlAlert, GlIcon } from '@gitlab/ui';
import { EDITOR_READY_EVENT } from '~/editor/constants';
import { INVALID_CI_CONFIG } from '~/pipelines/constants';
import CiConfigMergedPreview from '~/pipeline_editor/components/editor/ci_config_merged_preview.vue';
import { CI_CONFIG_STATUS_INVALID } from '~/pipeline_editor/constants';
import { mockLintResponse, mockCiConfigPath } from '../../mock_data';
describe('Text editor component', () => {
let wrapper;
const MockEditorLite = {
template: '<div/>',
props: ['value', 'fileName', 'editorOptions'],
mounted() {
this.$emit(EDITOR_READY_EVENT);
},
};
const createComponent = ({ props = {} } = {}) => {
wrapper = shallowMount(CiConfigMergedPreview, {
propsData: {
ciConfigData: mockLintResponse,
...props,
},
provide: {
ciConfigPath: mockCiConfigPath,
},
stubs: {
EditorLite: MockEditorLite,
},
});
};
const findAlert = () => wrapper.findComponent(GlAlert);
const findIcon = () => wrapper.findComponent(GlIcon);
const findEditor = () => wrapper.findComponent(MockEditorLite);
afterEach(() => {
wrapper.destroy();
wrapper = null;
});
describe('when status is invalid', () => {
beforeEach(() => {
createComponent({ props: { ciConfigData: { status: CI_CONFIG_STATUS_INVALID } } });
});
it('show an error message', () => {
expect(findAlert().exists()).toBe(true);
expect(findAlert().text()).toBe(wrapper.vm.$options.errorTexts[INVALID_CI_CONFIG]);
});
it('hides the editor', () => {
expect(findEditor().exists()).toBe(false);
});
});
describe('when status is valid', () => {
beforeEach(() => {
createComponent();
});
it('shows an information message that the section is not editable', () => {
expect(findIcon().exists()).toBe(true);
expect(wrapper.text()).toContain(wrapper.vm.$options.i18n.viewOnlyMessage);
});
it('contains an editor', () => {
expect(findEditor().exists()).toBe(true);
});
it('editor contains the value provided', () => {
expect(findEditor().props('value')).toBe(mockLintResponse.mergedYaml);
});
it('editor is configured for the CI config path', () => {
expect(findEditor().props('fileName')).toBe(mockCiConfigPath);
});
it('editor is readonly', () => {
expect(findEditor().props('editorOptions')).toMatchObject({
readOnly: true,
});
});
});
});

View File

@ -1,14 +1,14 @@
import { shallowMount } from '@vue/test-utils';
import { EDITOR_READY_EVENT } from '~/editor/constants';
import TextEditor from '~/pipeline_editor/components/text_editor.vue';
import TextEditor from '~/pipeline_editor/components/editor/text_editor.vue';
import {
mockCiConfigPath,
mockCiYml,
mockCommitSha,
mockProjectPath,
mockProjectNamespace,
} from '../mock_data';
} from '../../mock_data';
describe('Pipeline Editor | Text editor component', () => {
let wrapper;

View File

@ -1,9 +1,10 @@
import { nextTick } from 'vue';
import { shallowMount, mount } from '@vue/test-utils';
import { GlLoadingIcon } from '@gitlab/ui';
import { GlAlert, GlLoadingIcon } from '@gitlab/ui';
import CiLint from '~/pipeline_editor/components/lint/ci_lint.vue';
import PipelineGraph from '~/pipelines/components/pipeline_graph/pipeline_graph.vue';
import PipelineEditorTabs from '~/pipeline_editor/components/pipeline_editor_tabs.vue';
import CiLint from '~/pipeline_editor/components/lint/ci_lint.vue';
import CiConfigMergedPreview from '~/pipeline_editor/components/editor/ci_config_merged_preview.vue';
import { mockLintResponse, mockCiYml } from '../mock_data';
@ -15,6 +16,7 @@ describe('Pipeline editor tabs component', () => {
const mockProvide = {
glFeatures: {
ciConfigVisualizationTab: true,
ciConfigMergedTab: true,
},
};
@ -35,72 +37,102 @@ describe('Pipeline editor tabs component', () => {
const findEditorTab = () => wrapper.find('[data-testid="editor-tab"]');
const findLintTab = () => wrapper.find('[data-testid="lint-tab"]');
const findMergedTab = () => wrapper.find('[data-testid="merged-tab"]');
const findVisualizationTab = () => wrapper.find('[data-testid="visualization-tab"]');
const findAlert = () => wrapper.findComponent(GlAlert);
const findCiLint = () => wrapper.findComponent(CiLint);
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findPipelineGraph = () => wrapper.findComponent(PipelineGraph);
const findTextEditor = () => wrapper.findComponent(MockTextEditor);
const findMergedPreview = () => wrapper.findComponent(CiConfigMergedPreview);
afterEach(() => {
wrapper.destroy();
wrapper = null;
});
describe('tabs', () => {
describe('editor tab', () => {
it('displays editor only after the tab is mounted', async () => {
createComponent({ mountFn: mount });
describe('editor tab', () => {
it('displays editor only after the tab is mounted', async () => {
createComponent({ mountFn: mount });
expect(findTextEditor().exists()).toBe(false);
expect(findTextEditor().exists()).toBe(false);
await nextTick();
await nextTick();
expect(findTextEditor().exists()).toBe(true);
expect(findEditorTab().exists()).toBe(true);
});
expect(findTextEditor().exists()).toBe(true);
expect(findEditorTab().exists()).toBe(true);
});
});
describe('visualization tab', () => {
describe('with feature flag on', () => {
describe('while loading', () => {
beforeEach(() => {
createComponent({ props: { isCiConfigDataLoading: true } });
});
it('displays a loading icon if the lint query is loading', () => {
expect(findLoadingIcon().exists()).toBe(true);
expect(findPipelineGraph().exists()).toBe(false);
});
});
describe('after loading', () => {
beforeEach(() => {
createComponent();
});
it('display the tab and visualization', () => {
expect(findVisualizationTab().exists()).toBe(true);
expect(findPipelineGraph().exists()).toBe(true);
});
});
});
describe('with feature flag off', () => {
describe('visualization tab', () => {
describe('with feature flag on', () => {
describe('while loading', () => {
beforeEach(() => {
createComponent({
provide: {
glFeatures: { ciConfigVisualizationTab: false },
},
});
createComponent({ props: { isCiConfigDataLoading: true } });
});
it('does not display the tab or component', () => {
expect(findVisualizationTab().exists()).toBe(false);
it('displays a loading icon if the lint query is loading', () => {
expect(findLoadingIcon().exists()).toBe(true);
expect(findPipelineGraph().exists()).toBe(false);
});
});
describe('after loading', () => {
beforeEach(() => {
createComponent();
});
it('display the tab and visualization', () => {
expect(findVisualizationTab().exists()).toBe(true);
expect(findPipelineGraph().exists()).toBe(true);
});
});
});
describe('lint tab', () => {
describe('with feature flag off', () => {
beforeEach(() => {
createComponent({
provide: {
glFeatures: { ciConfigVisualizationTab: false },
},
});
});
it('does not display the tab or component', () => {
expect(findVisualizationTab().exists()).toBe(false);
expect(findPipelineGraph().exists()).toBe(false);
});
});
});
describe('lint tab', () => {
describe('while loading', () => {
beforeEach(() => {
createComponent({ props: { isCiConfigDataLoading: true } });
});
it('displays a loading icon if the lint query is loading', () => {
expect(findLoadingIcon().exists()).toBe(true);
});
it('does not display the lint component', () => {
expect(findCiLint().exists()).toBe(false);
});
});
describe('after loading', () => {
beforeEach(() => {
createComponent();
});
it('display the tab and the lint component', () => {
expect(findLintTab().exists()).toBe(true);
expect(findCiLint().exists()).toBe(true);
});
});
});
describe('merged tab', () => {
describe('with feature flag on', () => {
describe('while loading', () => {
beforeEach(() => {
createComponent({ props: { isCiConfigDataLoading: true } });
@ -109,21 +141,43 @@ describe('Pipeline editor tabs component', () => {
it('displays a loading icon if the lint query is loading', () => {
expect(findLoadingIcon().exists()).toBe(true);
});
});
it('does not display the lint component', () => {
expect(findCiLint().exists()).toBe(false);
describe('when `mergedYaml` is undefined', () => {
beforeEach(() => {
createComponent({ props: { ciConfigData: {} } });
});
it('show an error message', () => {
expect(findAlert().exists()).toBe(true);
expect(findAlert().text()).toBe(wrapper.vm.$options.errorTexts.loadMergedYaml);
});
it('does not render the `meged_preview` component', () => {
expect(findMergedPreview().exists()).toBe(false);
});
});
describe('after loading', () => {
beforeEach(() => {
createComponent();
});
it('display the tab and the lint component', () => {
expect(findLintTab().exists()).toBe(true);
expect(findCiLint().exists()).toBe(true);
it('display the tab and the merged preview component', () => {
expect(findMergedTab().exists()).toBe(true);
expect(findMergedPreview().exists()).toBe(true);
});
});
});
describe('with feature flag off', () => {
beforeEach(() => {
createComponent({ provide: { glFeatures: { ciConfigMergedTab: false } } });
});
it('does not display the merged tab', () => {
expect(findMergedTab().exists()).toBe(false);
expect(findMergedPreview().exists()).toBe(false);
});
});
});
});

View File

@ -54,6 +54,7 @@ export const mockCiConfigQueryResponse = {
data: {
ciConfig: {
errors: [],
mergedYaml: mockCiYml,
status: CI_CONFIG_STATUS_VALID,
stages: {
__typename: 'CiConfigStageConnection',
@ -139,6 +140,8 @@ export const mergeUnwrappedCiConfig = (mergedConfig) => {
export const mockLintResponse = {
valid: true,
mergedYaml: mockCiYml,
status: CI_CONFIG_STATUS_VALID,
errors: [],
warnings: [],
jobs: [

View File

@ -3,7 +3,7 @@ import { GlAlert, GlButton, GlLoadingIcon, GlTabs } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
import waitForPromises from 'helpers/wait_for_promises';
import createMockApollo from 'helpers/mock_apollo_helper';
import TextEditor from '~/pipeline_editor/components/text_editor.vue';
import TextEditor from '~/pipeline_editor/components/editor/text_editor.vue';
import httpStatusCodes from '~/lib/utils/http_status';

View File

@ -1,9 +1,11 @@
import { nextTick } from 'vue';
import { shallowMount } from '@vue/test-utils';
import PipelineEditorHome from '~/pipeline_editor/pipeline_editor_home.vue';
import PipelineEditorTabs from '~/pipeline_editor/components/pipeline_editor_tabs.vue';
import CommitSection from '~/pipeline_editor/components/commit/commit_section.vue';
import PipelineEditorHeader from '~/pipeline_editor/components/header/pipeline_editor_header.vue';
import { MERGED_TAB, VISUALIZE_TAB } from '~/pipeline_editor/constants';
import { mockLintResponse, mockCiYml } from './mock_data';
@ -21,9 +23,9 @@ describe('Pipeline editor home wrapper', () => {
});
};
const findPipelineEditorHeader = () => wrapper.findComponent(PipelineEditorTabs);
const findPipelineEditorTabs = () => wrapper.findComponent(CommitSection);
const findCommitSection = () => wrapper.findComponent(PipelineEditorHeader);
const findPipelineEditorHeader = () => wrapper.findComponent(PipelineEditorHeader);
const findPipelineEditorTabs = () => wrapper.findComponent(PipelineEditorTabs);
const findCommitSection = () => wrapper.findComponent(CommitSection);
afterEach(() => {
wrapper.destroy();
@ -43,7 +45,33 @@ describe('Pipeline editor home wrapper', () => {
expect(findPipelineEditorTabs().exists()).toBe(true);
});
it('shows the commit section', () => {
it('shows the commit section by default', () => {
expect(findCommitSection().exists()).toBe(true);
});
});
describe('commit form toggle', () => {
beforeEach(() => {
createComponent();
});
it('hides the commit form when in the merged tab', async () => {
expect(findCommitSection().exists()).toBe(true);
findPipelineEditorTabs().vm.$emit('set-current-tab', MERGED_TAB);
await nextTick();
expect(findCommitSection().exists()).toBe(false);
});
it('shows the form again when leaving the merged tab', async () => {
expect(findCommitSection().exists()).toBe(true);
findPipelineEditorTabs().vm.$emit('set-current-tab', MERGED_TAB);
await nextTick();
expect(findCommitSection().exists()).toBe(false);
findPipelineEditorTabs().vm.$emit('set-current-tab', VISUALIZE_TAB);
await nextTick();
expect(findCommitSection().exists()).toBe(true);
});
});

View File

@ -1,4 +1,3 @@
import jQuery from 'jquery';
import {
add,
initTooltips,
@ -146,29 +145,4 @@ describe('tooltips/index.js', () => {
expect(tooltipsApp.fixTitle).toHaveBeenCalledWith(target);
});
describe('when glTooltipsEnabled feature flag is disabled', () => {
beforeEach(() => {
window.gon.features.glTooltips = false;
});
it.each`
method | methodName | bootstrapParams
${dispose} | ${'dispose'} | ${'dispose'}
${fixTitle} | ${'fixTitle'} | ${'_fixTitle'}
${enable} | ${'enable'} | ${'enable'}
${disable} | ${'disable'} | ${'disable'}
${hide} | ${'hide'} | ${'hide'}
${show} | ${'show'} | ${'show'}
${add} | ${'init'} | ${{ title: 'the title' }}
`('delegates $methodName to bootstrap tooltip API', ({ method, bootstrapParams }) => {
const elements = jQuery(createTooltipTarget());
jest.spyOn(jQuery.fn, 'tooltip');
method(elements, bootstrapParams);
expect(elements.tooltip).toHaveBeenCalledWith(bootstrapParams);
});
});
});

View File

@ -73,12 +73,12 @@ RSpec.describe AuthHelper do
describe 'enabled_button_based_providers' do
before do
allow(helper).to receive(:auth_providers) { [:twitter, :github, :google_oauth2] }
allow(helper).to receive(:auth_providers) { [:twitter, :github, :google_oauth2, :openid_connect] }
end
context 'all providers are enabled to sign in' do
it 'returns all the enabled providers from settings' do
expect(helper.enabled_button_based_providers).to include('twitter', 'github', 'google_oauth2')
expect(helper.enabled_button_based_providers).to include('twitter', 'github', 'google_oauth2', 'openid_connect')
end
it 'puts google and github in the beginning' do

View File

@ -82,18 +82,6 @@ RSpec.describe Banzai::Filter::FeatureFlagReferenceFilter do
expect(link).not_to match %r(https?://)
expect(link).to eq urls.edit_project_feature_flag_url(project, feature_flag.iid, only_path: true)
end
context 'when feature_flag_contextual_issue feture flag is disabled' do
before do
stub_feature_flags(feature_flag_contextual_issue: false)
end
it 'does not link the reference' do
doc = reference_filter("See #{reference}")
expect(doc.css('a').first).to be_nil
end
end
end
context 'with cross-project / cross-namespace complete reference' do

View File

@ -33,6 +33,8 @@ RSpec.describe BulkImports::Groups::Pipelines::GroupPipeline do
}
end
subject { described_class.new(context) }
before do
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return([group_data])
@ -44,7 +46,7 @@ RSpec.describe BulkImports::Groups::Pipelines::GroupPipeline do
it 'imports new group into destination group' do
group_path = 'my-destination-group'
subject.run(context)
subject.run
imported_group = Group.find_by_path(group_path)

View File

@ -18,6 +18,8 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
let(:context) { BulkImports::Pipeline::Context.new(entity) }
subject { described_class.new(context) }
def extractor_data(title:, has_next_page:, cursor: nil)
data = [
{
@ -46,7 +48,7 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
.and_return(first_page, last_page)
end
expect { subject.run(context) }.to change(Label, :count).by(2)
expect { subject.run }.to change(Label, :count).by(2)
label = group.labels.order(:created_at).last
@ -61,9 +63,9 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
it 'updates tracker information and runs pipeline again' do
data = extractor_data(title: 'label', has_next_page: true, cursor: cursor)
expect(subject).to receive(:run).with(context)
expect(subject).to receive(:run)
subject.after_run(context, data)
subject.after_run(data)
tracker = entity.trackers.find_by(relation: :labels)
@ -76,9 +78,9 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
it 'updates tracker information and does not run pipeline' do
data = extractor_data(title: 'label', has_next_page: false)
expect(subject).not_to receive(:run).with(context)
expect(subject).not_to receive(:run)
subject.after_run(context, data)
subject.after_run(data)
tracker = entity.trackers.find_by(relation: :labels)

View File

@ -13,6 +13,8 @@ RSpec.describe BulkImports::Groups::Pipelines::MembersPipeline do
let_it_be(:entity) { create(:bulk_import_entity, bulk_import: bulk_import, group: group) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(entity) }
subject { described_class.new(context) }
describe '#run' do
it 'maps existing users to the imported group' do
first_page = member_data(email: member_user1.email, has_next_page: true, cursor: cursor)
@ -24,7 +26,7 @@ RSpec.describe BulkImports::Groups::Pipelines::MembersPipeline do
.and_return(first_page, last_page)
end
expect { subject.run(context) }.to change(GroupMember, :count).by(2)
expect { subject.run }.to change(GroupMember, :count).by(2)
members = group.members.map { |m| m.slice(:user_id, :access_level) }

View File

@ -25,7 +25,7 @@ RSpec.describe BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline do
]
end
subject { described_class.new }
subject { described_class.new(context) }
before do
allow_next_instance_of(BulkImports::Groups::Extractors::SubgroupsExtractor) do |extractor|
@ -36,7 +36,7 @@ RSpec.describe BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline do
end
it 'creates entities for the subgroups' do
expect { subject.run(context) }.to change(BulkImports::Entity, :count).by(1)
expect { subject.run }.to change(BulkImports::Entity, :count).by(1)
subgroup_entity = BulkImports::Entity.last

View File

@ -42,8 +42,8 @@ RSpec.describe BulkImports::Importers::GroupImporter do
end
def expect_to_run_pipeline(klass, context:)
expect_next_instance_of(klass) do |pipeline|
expect(pipeline).to receive(:run).with(context)
expect_next_instance_of(klass, context) do |pipeline|
expect(pipeline).to receive(:run)
end
end
end

View File

@ -112,7 +112,7 @@ RSpec.describe BulkImports::Pipeline::Runner do
)
end
BulkImports::MyPipeline.new.run(context)
BulkImports::MyPipeline.new(context).run
end
context 'when exception is raised' do
@ -126,7 +126,7 @@ RSpec.describe BulkImports::Pipeline::Runner do
end
it 'logs import failure' do
BulkImports::MyPipeline.new.run(context)
BulkImports::MyPipeline.new(context).run
failure = entity.failures.first
@ -143,7 +143,7 @@ RSpec.describe BulkImports::Pipeline::Runner do
end
it 'marks entity as failed' do
BulkImports::MyPipeline.new.run(context)
BulkImports::MyPipeline.new(context).run
expect(entity.failed?).to eq(true)
end
@ -159,13 +159,13 @@ RSpec.describe BulkImports::Pipeline::Runner do
)
end
BulkImports::MyPipeline.new.run(context)
BulkImports::MyPipeline.new(context).run
end
end
context 'when pipeline is not marked to abort on failure' do
it 'marks entity as failed' do
BulkImports::MyPipeline.new.run(context)
BulkImports::MyPipeline.new(context).run
expect(entity.failed?).to eq(false)
end
@ -190,7 +190,7 @@ RSpec.describe BulkImports::Pipeline::Runner do
)
end
BulkImports::MyPipeline.new.run(context)
BulkImports::MyPipeline.new(context).run
end
end
end

View File

@ -97,6 +97,35 @@ RSpec.describe Ci::DailyBuildGroupReportResult do
end
end
describe '.by_ref_path' do
subject(:coverages) { described_class.by_ref_path(recent_build_group_report_result.ref_path) }
it 'returns coverages by ref_path' do
expect(coverages).to contain_exactly(recent_build_group_report_result, old_build_group_report_result)
end
end
describe '.ordered_by_date_and_group_name' do
subject(:coverages) { described_class.ordered_by_date_and_group_name }
it 'returns coverages ordered by data and group name' do
expect(subject).to contain_exactly(recent_build_group_report_result, old_build_group_report_result)
end
end
describe '.by_dates' do
subject(:coverages) { described_class.by_dates(start_date, end_date) }
context 'when daily coverages exist during those dates' do
let(:start_date) { 1.day.ago.to_date.to_s }
let(:end_date) { Date.current.to_s }
it 'returns coverages' do
expect(coverages).to contain_exactly(recent_build_group_report_result)
end
end
end
describe '.with_coverage' do
subject { described_class.with_coverage }

View File

@ -17,4 +17,34 @@ RSpec.describe UserStatus do
expect { status.user.destroy }.to change { described_class.count }.from(1).to(0)
end
describe '#clear_status_after=' do
it 'sets clear_status_at' do
status = build(:user_status)
freeze_time do
status.clear_status_after = '8_hours'
expect(status.clear_status_at).to be_like_time(8.hours.from_now)
end
end
it 'unsets clear_status_at' do
status = build(:user_status, clear_status_at: 8.hours.from_now)
status.clear_status_after = nil
expect(status.clear_status_at).to be_nil
end
context 'when unknown clear status is given' do
it 'unsets clear_status_at' do
status = build(:user_status, clear_status_at: 8.hours.from_now)
status.clear_status_after = 'unknown'
expect(status.clear_status_at).to be_nil
end
end
end
end

View File

@ -2866,6 +2866,47 @@ RSpec.describe API::Users do
expect(response).to have_gitlab_http_status(:success)
expect(user.reload.status).to be_nil
end
context 'when clear_status_after is given' do
it 'sets the clear_status_at column' do
freeze_time do
expected_clear_status_at = 3.hours.from_now
put api('/user/status', user), params: { emoji: 'smirk', message: 'hello world', clear_status_after: '3_hours' }
expect(response).to have_gitlab_http_status(:success)
expect(user.status.reload.clear_status_at).to be_within(1.minute).of(expected_clear_status_at)
expect(Time.parse(json_response["clear_status_at"])).to be_within(1.minute).of(expected_clear_status_at)
end
end
it 'unsets the clear_status_at column' do
user.create_status!(clear_status_at: 5.hours.ago)
put api('/user/status', user), params: { emoji: 'smirk', message: 'hello world', clear_status_after: nil }
expect(response).to have_gitlab_http_status(:success)
expect(user.status.reload.clear_status_at).to be_nil
end
it 'raises error when unknown status value is given' do
put api('/user/status', user), params: { emoji: 'smirk', message: 'hello world', clear_status_after: 'wrong' }
expect(response).to have_gitlab_http_status(:bad_request)
end
context 'when the clear_status_with_quick_options feature flag is disabled' do
before do
stub_feature_flags(clear_status_with_quick_options: false)
end
it 'does not persist clear_status_at' do
put api('/user/status', user), params: { emoji: 'smirk', message: 'hello world', clear_status_after: '3_hours' }
expect(user.status.reload.clear_status_at).to be_nil
end
end
end
end
describe 'POST /users/:user_id/personal_access_tokens' do

View File

@ -0,0 +1,43 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Users::BatchStatusCleanerService do
let_it_be(:user_status_1) { create(:user_status, emoji: 'coffee', message: 'msg1', clear_status_at: 1.year.ago) }
let_it_be(:user_status_2) { create(:user_status, emoji: 'coffee', message: 'msg1', clear_status_at: 1.year.from_now) }
let_it_be(:user_status_3) { create(:user_status, emoji: 'coffee', message: 'msg1', clear_status_at: 2.years.ago) }
let_it_be(:user_status_4) { create(:user_status, emoji: 'coffee', message: 'msg1') }
subject(:result) { described_class.execute }
it 'cleans up scheduled user statuses' do
expect(result[:deleted_rows]).to eq(2)
deleted_statuses = UserStatus.where(user_id: [user_status_1.user_id, user_status_3.user_id])
expect(deleted_statuses).to be_empty
end
it 'does not affect rows with future clear_status_at' do
expect { result }.not_to change { user_status_2.reload }
end
it 'does not affect rows without clear_status_at' do
expect { result }.not_to change { user_status_4.reload }
end
describe 'batch_size' do
it 'clears status in batches' do
result = described_class.execute(batch_size: 1)
expect(result[:deleted_rows]).to eq(1)
result = described_class.execute(batch_size: 1)
expect(result[:deleted_rows]).to eq(1)
result = described_class.execute(batch_size: 1)
expect(result[:deleted_rows]).to eq(0)
end
end
end

View File

@ -154,6 +154,15 @@ RSpec.shared_examples 'wiki model' do
it 'returns true' do
expect(subject.empty?).to be(true)
end
context 'when the repository does not exist' do
let(:wiki_container) { wiki_container_without_repo }
it 'returns true and does not create the repo' do
expect(subject.empty?).to be(true)
expect(wiki.repository_exists?).to be false
end
end
end
context 'when the wiki has pages' do

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe UserStatusCleanup::BatchWorker do
include_examples 'an idempotent worker' do
subject do
perform_multiple([], worker: described_class.new)
end
end
describe '#perform' do
subject(:run_worker) { described_class.new.perform }
context 'when no records are scheduled for cleanup' do
let(:user_status) { create(:user_status) }
it 'does nothing' do
expect { run_worker }.not_to change { user_status.reload }
end
end
it 'cleans up the records' do
user_status_1 = create(:user_status, clear_status_at: 1.year.ago)
user_status_2 = create(:user_status, clear_status_at: 2.years.ago)
run_worker
deleted_statuses = UserStatus.where(user_id: [user_status_1.user_id, user_status_2.user_id])
expect(deleted_statuses).to be_empty
end
end
end