Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-06-10 12:09:36 +00:00
parent f42c4be1c0
commit 948023c9c9
113 changed files with 1345 additions and 956 deletions

View File

@ -614,10 +614,6 @@ Gitlab/NamespacedClass:
- 'app/serializers/route_entity.rb'
- 'app/serializers/route_serializer.rb'
- 'app/serializers/runner_entity.rb'
- 'app/serializers/service_event_entity.rb'
- 'app/serializers/service_event_serializer.rb'
- 'app/serializers/service_field_entity.rb'
- 'app/serializers/service_field_serializer.rb'
- 'app/serializers/stage_entity.rb'
- 'app/serializers/stage_serializer.rb'
- 'app/serializers/suggestion_entity.rb'

View File

@ -769,7 +769,6 @@ Layout/LineLength:
- 'app/views/projects/merge_requests/index.atom.builder'
- 'app/workers/analytics/usage_trends/counter_job_worker.rb'
- 'app/workers/background_migration/single_database_worker.rb'
- 'app/workers/clusters/applications/deactivate_service_worker.rb'
- 'app/workers/concerns/application_worker.rb'
- 'app/workers/concerns/each_shard_worker.rb'
- 'app/workers/concerns/limited_capacity/worker.rb'
@ -5895,7 +5894,6 @@ Layout/LineLength:
- 'spec/serializers/paginated_diff_entity_spec.rb'
- 'spec/serializers/pipeline_serializer_spec.rb'
- 'spec/serializers/review_app_setup_entity_spec.rb'
- 'spec/serializers/service_field_entity_spec.rb'
- 'spec/services/alert_management/alerts/update_service_spec.rb'
- 'spec/services/alert_management/create_alert_issue_service_spec.rb'
- 'spec/services/alert_management/http_integrations/create_service_spec.rb'
@ -6503,8 +6501,6 @@ Layout/LineLength:
- 'spec/workers/ci/ref_delete_unlock_artifacts_worker_spec.rb'
- 'spec/workers/ci/resource_groups/assign_resource_from_resource_group_worker_spec.rb'
- 'spec/workers/cluster_wait_for_app_update_worker_spec.rb'
- 'spec/workers/clusters/applications/activate_service_worker_spec.rb'
- 'spec/workers/clusters/applications/deactivate_service_worker_spec.rb'
- 'spec/workers/clusters/integrations/check_prometheus_health_worker_spec.rb'
- 'spec/workers/concerns/application_worker_spec.rb'
- 'spec/workers/concerns/project_import_options_spec.rb'

View File

@ -168,7 +168,6 @@ Layout/SpaceInLambdaLiteral:
- 'app/serializers/review_app_setup_entity.rb'
- 'app/serializers/rollout_status_entity.rb'
- 'app/serializers/runner_entity.rb'
- 'app/serializers/service_event_entity.rb'
- 'app/serializers/stage_entity.rb'
- 'app/serializers/test_case_entity.rb'
- 'app/serializers/test_suite_entity.rb'

View File

@ -824,7 +824,6 @@ RSpec/ContextWording:
- 'ee/spec/serializers/member_user_entity_spec.rb'
- 'ee/spec/serializers/merge_request_widget_entity_spec.rb'
- 'ee/spec/serializers/project_mirror_entity_spec.rb'
- 'ee/spec/serializers/service_field_entity_spec.rb'
- 'ee/spec/serializers/vulnerabilities/finding_entity_spec.rb'
- 'ee/spec/services/alert_management/process_prometheus_alert_service_spec.rb'
- 'ee/spec/services/analytics/cycle_analytics/consistency_check_service_spec.rb'
@ -3195,8 +3194,6 @@ RSpec/ContextWording:
- 'spec/serializers/merge_request_widget_entity_spec.rb'
- 'spec/serializers/paginated_diff_entity_spec.rb'
- 'spec/serializers/pipeline_details_entity_spec.rb'
- 'spec/serializers/service_event_entity_spec.rb'
- 'spec/serializers/service_field_entity_spec.rb'
- 'spec/serializers/stage_entity_spec.rb'
- 'spec/serializers/user_serializer_spec.rb'
- 'spec/services/access_token_validation_service_spec.rb'
@ -3874,9 +3871,6 @@ RSpec/ContextWording:
- 'spec/workers/cleanup_container_repository_worker_spec.rb'
- 'spec/workers/cluster_update_app_worker_spec.rb'
- 'spec/workers/clusters/agents/delete_expired_events_worker_spec.rb'
- 'spec/workers/clusters/applications/activate_service_worker_spec.rb'
- 'spec/workers/clusters/applications/deactivate_service_worker_spec.rb'
- 'spec/workers/clusters/applications/wait_for_uninstall_app_worker_spec.rb'
- 'spec/workers/concerns/application_worker_spec.rb'
- 'spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb'
- 'spec/workers/container_expiration_policy_worker_spec.rb'

View File

@ -146,7 +146,6 @@ RSpec/VerifiedDoubles:
- ee/spec/serializers/merge_request_poll_widget_entity_spec.rb
- ee/spec/serializers/merge_request_sidebar_basic_entity_spec.rb
- ee/spec/serializers/merge_request_widget_entity_spec.rb
- ee/spec/serializers/service_field_entity_spec.rb
- ee/spec/serializers/test_reports_comparer_serializer_spec.rb
- ee/spec/serializers/user_analytics_entity_spec.rb
- ee/spec/serializers/vulnerabilities/feedback_entity_spec.rb
@ -959,8 +958,6 @@ RSpec/VerifiedDoubles:
- spec/serializers/prometheus_alert_entity_spec.rb
- spec/serializers/review_app_setup_entity_spec.rb
- spec/serializers/runner_entity_spec.rb
- spec/serializers/service_event_entity_spec.rb
- spec/serializers/service_field_entity_spec.rb
- spec/serializers/stage_entity_spec.rb
- spec/serializers/suggestion_entity_spec.rb
- spec/serializers/test_reports_comparer_serializer_spec.rb

View File

@ -5,7 +5,6 @@ Style/OpenStructUse:
- ee/spec/finders/template_finder_spec.rb
- ee/spec/helpers/ee/blob_helper_spec.rb
- ee/spec/lib/gitlab/auth/group_saml/failure_handler_spec.rb
- ee/spec/lib/gitlab/legacy_github_import/project_creator_spec.rb
- lib/gitlab/testing/request_inspector_middleware.rb
- spec/factories/wiki_pages.rb
- spec/helpers/application_settings_helper_spec.rb

View File

@ -88,17 +88,11 @@ export const publishReview = ({ commit, dispatch, getters }) => {
};
export const updateDiscussionsAfterPublish = async ({ dispatch, getters, rootGetters }) => {
if (window.gon?.features?.paginatedNotes) {
await dispatch('stopPolling', null, { root: true });
await dispatch('fetchData', null, { root: true });
await dispatch('restartPolling', null, { root: true });
} else {
await dispatch(
'fetchDiscussions',
{ path: getters.getNotesData.discussionsPath },
{ root: true },
);
}
await dispatch(
'fetchDiscussions',
{ path: getters.getNotesData.discussionsPath },
{ root: true },
);
dispatch('diffs/assignDiscussionsToDiff', rootGetters.discussionsStructuredByLineCode, {
root: true,

View File

@ -9,6 +9,7 @@ import { IssueType } from '~/issues/constants';
import Issue from '~/issues/issue';
import { initTitleSuggestions, initTypePopover } from '~/issues/new';
import { initRelatedMergeRequests } from '~/issues/related_merge_requests';
import initRelatedIssues from '~/related_issues';
import {
initHeaderActions,
initIncidentApp,
@ -58,6 +59,7 @@ export function initShow() {
if (issueType === IssueType.Incident) {
initIncidentApp(issuableData);
initHeaderActions(store, IssueType.Incident);
initRelatedIssues(IssueType.Incident);
} else {
initIssueApp(issuableData, store);
initHeaderActions(store);

View File

@ -210,10 +210,6 @@ export default {
this.setFetchingState(true);
if (this.glFeatures.paginatedNotes) {
return this.initPolling();
}
return this.fetchDiscussions(this.getFetchDiscussionsConfig())
.then(this.initPolling)
.then(() => {

View File

@ -19,7 +19,6 @@ import TaskList from '~/task_list';
import mrWidgetEventHub from '~/vue_merge_request_widget/event_hub';
import SidebarStore from '~/sidebar/stores/sidebar_store';
import * as constants from '../constants';
import eventHub from '../event_hub';
import * as types from './mutation_types';
import * as utils from './utils';
@ -499,13 +498,6 @@ const pollSuccessCallBack = async (resp, commit, state, getters, dispatch) => {
return null;
}
if (window.gon?.features?.paginatedNotes && !resp.more && state.isFetching) {
eventHub.$emit('fetchedNotesData');
dispatch('setFetchingState', false);
dispatch('setNotesFetchedState', true);
dispatch('setLoadingState', false);
}
if (resp.notes?.length) {
await dispatch('updateOrCreateNotes', resp.notes);
dispatch('startTaskList');

View File

@ -32,20 +32,6 @@ export default {
}
}
if (window.gon?.features?.paginatedNotes && note.base_discussion) {
if (discussion.diff_file) {
discussion.file_hash = discussion.diff_file.file_hash;
discussion.truncated_diff_lines = utils.prepareDiffLines(
discussion.truncated_diff_lines || [],
);
}
discussion.resolvable = note.resolvable;
discussion.expanded = note.base_discussion.expanded;
discussion.resolved = note.resolved;
}
// note.base_discussion = undefined; // No point keeping a reference to this
delete note.base_discussion;
discussion.notes = [note];

View File

@ -1,9 +1,7 @@
import { initShow } from '~/issues';
import initRelatedIssues from '~/related_issues';
import initSidebarBundle from '~/sidebar/sidebar_bundle';
import initWorkItemLinks from '~/work_items/components/work_item_links';
initShow();
initSidebarBundle();
initRelatedIssues();
initWorkItemLinks();

View File

@ -2,6 +2,7 @@ import { __, sprintf } from '~/locale';
export const issuableTypesMap = {
ISSUE: 'issue',
INCIDENT: 'incident',
EPIC: 'epic',
MERGE_REQUEST: 'merge_request',
};
@ -25,6 +26,11 @@ export const autoCompleteTextMap = {
{ emphasisStart: '<', emphasisEnd: '>' },
false,
),
[issuableTypesMap.INCIDENT]: sprintf(
__(' or %{emphasisStart}#id%{emphasisEnd}'),
{ emphasisStart: '<', emphasisEnd: '>' },
false,
),
[issuableTypesMap.EPIC]: sprintf(
__(' or %{emphasisStart}&epic id%{emphasisEnd}'),
{ emphasisStart: '<', emphasisEnd: '>' },
@ -45,6 +51,7 @@ export const autoCompleteTextMap = {
export const inputPlaceholderTextMap = {
[issuableTypesMap.ISSUE]: __('Paste issue link'),
[issuableTypesMap.INCIDENT]: __('Paste link'),
[issuableTypesMap.EPIC]: __('Paste epic link'),
[issuableTypesMap.MERGE_REQUEST]: __('Enter merge request URLs'),
};
@ -88,6 +95,7 @@ export const addRelatedItemErrorMap = {
*/
export const issuableIconMap = {
[issuableTypesMap.ISSUE]: 'issues',
[issuableTypesMap.INCIDENT]: 'issues',
[issuableTypesMap.EPIC]: 'epic',
};
@ -107,6 +115,7 @@ export const PathIdSeparator = {
export const issuablesBlockHeaderTextMap = {
[issuableTypesMap.ISSUE]: __('Linked issues'),
[issuableTypesMap.INCIDENT]: __('Related incidents or issues'),
[issuableTypesMap.EPIC]: __('Linked epics'),
};
@ -122,10 +131,12 @@ export const issuablesBlockAddButtonTextMap = {
export const issuablesFormCategoryHeaderTextMap = {
[issuableTypesMap.ISSUE]: __('The current issue'),
[issuableTypesMap.INCIDENT]: __('The current incident'),
[issuableTypesMap.EPIC]: __('The current epic'),
};
export const issuablesFormInputTextMap = {
[issuableTypesMap.ISSUE]: __('the following issue(s)'),
[issuableTypesMap.INCIDENT]: __('the following incident(s) or issue(s)'),
[issuableTypesMap.EPIC]: __('the following epic(s)'),
};

View File

@ -2,7 +2,7 @@ import Vue from 'vue';
import { parseBoolean } from '~/lib/utils/common_utils';
import RelatedIssuesRoot from './components/related_issues_root.vue';
export default function initRelatedIssues() {
export default function initRelatedIssues(issueType = 'issue') {
const relatedIssuesRootElement = document.querySelector('.js-related-issues-root');
if (relatedIssuesRootElement) {
// eslint-disable-next-line no-new
@ -21,6 +21,7 @@ export default function initRelatedIssues() {
showCategorizedIssues: parseBoolean(
relatedIssuesRootElement.dataset.showCategorizedIssues,
),
issuableType: issueType,
autoCompleteEpics: false,
},
}),

View File

@ -0,0 +1,23 @@
import updateWorkItemMutation from '../graphql/update_work_item.mutation.graphql';
import updateWorkItemTaskMutation from '../graphql/update_work_item_task.mutation.graphql';
export function getUpdateWorkItemMutation({ input, workItemParentId }) {
let mutation = updateWorkItemMutation;
const variables = {
input,
};
if (workItemParentId) {
mutation = updateWorkItemTaskMutation;
variables.input = {
id: workItemParentId,
taskData: input,
};
}
return {
mutation,
variables,
};
}

View File

@ -37,6 +37,11 @@ export default {
required: false,
default: null,
},
workItemParentId: {
type: String,
required: false,
default: null,
},
},
data() {
return {
@ -115,9 +120,9 @@ export default {
:work-item-id="workItem.id"
:work-item-title="workItem.title"
:work-item-type="workItemType"
:work-item-parent-id="workItemParentId"
class="gl-mr-5"
@error="error = $event"
@updated="$emit('workItemUpdated')"
/>
<work-item-actions
:work-item-id="workItem.id"
@ -133,8 +138,8 @@ export default {
</template>
<work-item-state
:work-item="workItem"
:work-item-parent-id="workItemParentId"
@error="error = $event"
@updated="$emit('workItemUpdated')"
/>
<work-item-description
v-if="workItemDescription"

View File

@ -37,7 +37,7 @@ export default {
default: null,
},
},
emits: ['workItemDeleted', 'workItemUpdated', 'close'],
emits: ['workItemDeleted', 'close'],
data() {
return {
error: undefined,
@ -104,9 +104,9 @@ export default {
</gl-alert>
<work-item-detail
:work-item-parent-id="issueGid"
:work-item-id="workItemId"
@deleteWorkItem="deleteWorkItem"
@workItemUpdated="$emit('workItemUpdated')"
/>
</gl-modal>
</template>

View File

@ -9,7 +9,7 @@ import {
STATE_EVENT_REOPEN,
TRACKING_CATEGORY_SHOW,
} from '../constants';
import updateWorkItemMutation from '../graphql/update_work_item.mutation.graphql';
import { getUpdateWorkItemMutation } from './update_work_item';
import ItemState from './item_state.vue';
export default {
@ -22,6 +22,11 @@ export default {
type: Object,
required: true,
},
workItemParentId: {
type: String,
required: false,
default: null,
},
},
data() {
return {
@ -41,7 +46,7 @@ export default {
},
},
methods: {
async updateWorkItemState(newState) {
updateWorkItemState(newState) {
const stateEventMap = {
[STATE_OPEN]: STATE_EVENT_REOPEN,
[STATE_CLOSED]: STATE_EVENT_CLOSE,
@ -49,35 +54,39 @@ export default {
const stateEvent = stateEventMap[newState];
await this.updateWorkItem(stateEvent);
this.updateWorkItem(stateEvent);
},
async updateWorkItem(updatedState) {
if (!updatedState) {
return;
}
const input = {
id: this.workItem.id,
stateEvent: updatedState,
};
this.updateInProgress = true;
try {
this.track('updated_state');
const {
data: { workItemUpdate },
} = await this.$apollo.mutate({
mutation: updateWorkItemMutation,
variables: {
input: {
id: this.workItem.id,
stateEvent: updatedState,
},
},
const { mutation, variables } = getUpdateWorkItemMutation({
workItemParentId: this.workItemParentId,
input,
});
if (workItemUpdate?.errors?.length) {
throw new Error(workItemUpdate.errors[0]);
}
const { data } = await this.$apollo.mutate({
mutation,
variables,
});
this.$emit('updated');
const errors = data.workItemUpdate?.errors;
if (errors?.length) {
throw new Error(errors[0]);
}
} catch (error) {
this.$emit('error', i18n.updateError);
Sentry.captureException(error);

View File

@ -1,7 +1,8 @@
<script>
import * as Sentry from '@sentry/browser';
import Tracking from '~/tracking';
import { i18n, TRACKING_CATEGORY_SHOW } from '../constants';
import updateWorkItemMutation from '../graphql/update_work_item.mutation.graphql';
import { getUpdateWorkItemMutation } from './update_work_item';
import ItemTitle from './item_title.vue';
export default {
@ -25,6 +26,11 @@ export default {
required: false,
default: '',
},
workItemParentId: {
type: String,
required: false,
default: null,
},
},
computed: {
tracking() {
@ -41,21 +47,37 @@ export default {
return;
}
const input = {
id: this.workItemId,
title: updatedTitle,
};
this.updateInProgress = true;
try {
await this.$apollo.mutate({
mutation: updateWorkItemMutation,
variables: {
input: {
id: this.workItemId,
title: updatedTitle,
},
},
});
this.track('updated_title');
this.$emit('updated');
} catch {
const { mutation, variables } = getUpdateWorkItemMutation({
workItemParentId: this.workItemParentId,
input,
});
const { data } = await this.$apollo.mutate({
mutation,
variables,
});
const errors = data.workItemUpdate?.errors;
if (errors?.length) {
throw new Error(errors[0]);
}
} catch (error) {
this.$emit('error', i18n.updateError);
Sentry.captureException(error);
}
this.updateInProgress = false;
},
},
};

View File

@ -0,0 +1,8 @@
mutation workItemUpdateTask($input: WorkItemUpdateTaskInput!) {
workItemUpdate: workItemUpdateTask(input: $input) {
workItem {
id
descriptionHtml
}
}
}

View File

@ -1,4 +1,4 @@
.gl-alert{ role: 'alert', class: [base_class, @alert_class], data: @alert_data }
.gl-alert{ @alert_options, role: 'alert', class: [base_class, @alert_class], data: @alert_data }
- if @show_icon
= sprite_icon(icon, css_class: icon_classes)
- if @dismissible

View File

@ -13,13 +13,14 @@ module Pajamas
# @param [Hash] close_button_data
def initialize(
title: nil, variant: :info, dismissible: true, show_icon: true,
alert_class: nil, alert_data: {}, close_button_class: nil, close_button_data: {})
alert_class: nil, alert_data: {}, alert_options: {}, close_button_class: nil, close_button_data: {})
@title = title
@variant = variant
@dismissible = dismissible
@show_icon = show_icon
@alert_class = alert_class
@alert_data = alert_data
@alert_options = alert_options
@close_button_class = close_button_class
@close_button_data = close_button_data
end

View File

@ -6,8 +6,7 @@ module NotesActions
extend ActiveSupport::Concern
# last_fetched_at is an integer number of microseconds, which is the same
# precision as PostgreSQL "timestamp" fields. It's important for them to have
# identical precision for accurate pagination
# precision as PostgreSQL "timestamp" fields.
MICROSECOND = 1_000_000
included do
@ -23,7 +22,7 @@ module NotesActions
end
def index
notes, meta = gather_notes
notes, meta = gather_all_notes
notes = prepare_notes_for_rendering(notes)
notes = notes.select { |n| n.readable_by?(current_user) }
notes =
@ -33,11 +32,7 @@ module NotesActions
notes.map { |note| note_json(note) }
end
# We know there's more data, so tell the frontend to poll again after 1ms
set_polling_interval_header(interval: 1) if meta[:more]
# Only present an ETag for the empty response to ensure pagination works
# as expected
# Only present an ETag for the empty response
::Gitlab::EtagCaching::Middleware.skip!(response) if notes.present?
render json: meta.merge(notes: notes)
@ -105,17 +100,6 @@ module NotesActions
private
# Lower bound (last_fetched_at as specified in the request) is already set in
# the finder. Here, we select between returning all notes since then, or a
# page's worth of notes.
def gather_notes
if Feature.enabled?(:paginated_notes, noteable.try(:resource_parent))
gather_some_notes
else
gather_all_notes
end
end
def gather_all_notes
now = Time.current
notes = merge_resource_events(notes_finder.execute.inc_relations_for_view)
@ -123,27 +107,11 @@ module NotesActions
[notes, { last_fetched_at: (now.to_i * MICROSECOND) + now.usec }]
end
def gather_some_notes
paginator = ::Gitlab::UpdatedNotesPaginator.new(
notes_finder.execute.inc_relations_for_view,
last_fetched_at: last_fetched_at
)
notes = paginator.notes
# Fetch all the synthetic notes in the same time range as the real notes.
# Although we don't limit the number, their text is under our control so
# should be fairly cheap to process.
notes = merge_resource_events(notes, fetch_until: paginator.next_fetched_at)
[notes, paginator.metadata]
end
def merge_resource_events(notes, fetch_until: nil)
def merge_resource_events(notes)
return notes if notes_filter == UserPreference::NOTES_FILTERS[:only_comments]
ResourceEvents::MergeIntoNotesService
.new(noteable, current_user, last_fetched_at: last_fetched_at, fetch_until: fetch_until)
.new(noteable, current_user, last_fetched_at: last_fetched_at)
.execute(notes)
end

View File

@ -35,7 +35,6 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
push_frontend_feature_flag(:file_identifier_hash)
push_frontend_feature_flag(:merge_request_widget_graphql, project)
push_frontend_feature_flag(:core_security_mr_widget_counts, project)
push_frontend_feature_flag(:paginated_notes, project)
push_frontend_feature_flag(:confidential_notes, project)
push_frontend_feature_flag(:restructured_mr_widget, project)
push_frontend_feature_flag(:refactor_mr_widgets_extensions, project)

View File

@ -7,6 +7,8 @@ module Types
authorize :read_release
present_using Releases::LinkPresenter
field :external, GraphQL::Types::Boolean, null: true, method: :external?,
description: 'Indicates the link points to an external resource.'
field :id, GraphQL::Types::ID, null: false,
@ -22,12 +24,5 @@ module Types
description: 'Relative path for the direct asset link.'
field :direct_asset_url, GraphQL::Types::String, null: true,
description: 'Direct asset URL of the link.'
def direct_asset_url
return object.url unless object.filepath
release = object.release.present
release.download_url(object.filepath)
end
end
end

View File

@ -62,7 +62,7 @@ module EmailsHelper
end
def header_logo
if current_appearance&.header_logo?
if current_appearance&.header_logo? && !current_appearance.header_logo.filename.ends_with?('.svg')
image_tag(
current_appearance.header_logo_path,
style: 'height: 50px'

View File

@ -1,7 +1,7 @@
# frozen_string_literal: true
module FormHelper
def form_errors(model, type: 'form', truncate: [])
def form_errors(model, type: 'form', truncate: [], pajamas_alert: false)
errors = model.errors
return unless errors.any?
@ -14,22 +14,38 @@ module FormHelper
truncate = Array.wrap(truncate)
tag.div(class: 'alert alert-danger', id: 'error_explanation') do
tag.h4(headline) <<
tag.ul do
messages = errors.map do |error|
attribute = error.attribute
message = error.message
messages = errors.map do |error|
attribute = error.attribute
message = error.message
message = html_escape_once(errors.full_message(attribute, message)).html_safe
message = tag.span(message, class: 'str-truncated-100') if truncate.include?(attribute)
message = append_help_page_link(message, error.options) if error.options[:help_page_url].present?
message = html_escape_once(errors.full_message(attribute, message)).html_safe
message = tag.span(message, class: 'str-truncated-100') if truncate.include?(attribute)
message = append_help_page_link(message, error.options) if error.options[:help_page_url].present?
tag.li(message)
tag.li(message)
end.join.html_safe
if pajamas_alert
render Pajamas::AlertComponent.new(
variant: :danger,
title: headline,
dismissible: false,
alert_class: 'gl-mb-5',
alert_options: { id: 'error_explanation' }
) do |c|
c.body do
tag.ul(class: 'gl-pl-5 gl-mb-0') do
messages
end
messages.join.html_safe
end
end
else
tag.div(class: 'alert alert-danger', id: 'error_explanation') do
tag.h4(headline) <<
tag.ul do
messages
end
end
end
end

View File

@ -233,11 +233,11 @@ module IntegrationsHelper
end
def trigger_events_for_integration(integration)
ServiceEventSerializer.new(service: integration).represent(integration.configurable_events).to_json
Integrations::EventSerializer.new(integration: integration).represent(integration.configurable_events).to_json
end
def fields_for_integration(integration)
ServiceFieldSerializer.new(service: integration).represent(integration.global_fields).to_json
Integrations::FieldSerializer.new(integration: integration).represent(integration.global_fields).to_json
end
def integration_level(integration)

View File

@ -175,9 +175,7 @@ module NotesHelper
end
end
def notes_data(issuable, start_at_zero = false)
initial_last_fetched_at = start_at_zero ? 0 : Time.current.to_i * ::Gitlab::UpdatedNotesPaginator::MICROSECOND
def notes_data(issuable)
data = {
discussionsPath: discussions_path(issuable),
registerPath: new_session_path(:user, redirect_to_referer: 'yes', anchor: 'register-pane'),
@ -188,7 +186,7 @@ module NotesHelper
reopenPath: reopen_issuable_path(issuable),
notesPath: notes_url,
prerenderedNotesCount: issuable.capped_notes_count(MAX_PRERENDERED_NOTES),
lastFetchedAt: initial_last_fetched_at,
lastFetchedAt: Time.current.to_i * NotesActions::MICROSECOND,
notesFilter: current_user&.notes_filter_for(issuable)
}

View File

@ -55,13 +55,23 @@ module Clusters
private
def activate_project_integrations
::Clusters::Applications::ActivateServiceWorker
.perform_async(cluster_id, ::Integrations::Prometheus.to_param)
if Feature.enabled?(:rename_integrations_workers)
::Clusters::Applications::ActivateIntegrationWorker
.perform_async(cluster_id, ::Integrations::Prometheus.to_param)
else
::Clusters::Applications::ActivateServiceWorker
.perform_async(cluster_id, ::Integrations::Prometheus.to_param)
end
end
def deactivate_project_integrations
::Clusters::Applications::DeactivateServiceWorker
.perform_async(cluster_id, ::Integrations::Prometheus.to_param)
if Feature.enabled?(:rename_integrations_workers)
::Clusters::Applications::DeactivateIntegrationWorker
.perform_async(cluster_id, ::Integrations::Prometheus.to_param)
else
::Clusters::Applications::DeactivateServiceWorker
.perform_async(cluster_id, ::Integrations::Prometheus.to_param)
end
end
end
end

View File

@ -288,7 +288,7 @@ class Deployment < ApplicationRecord
end
def manual_actions
Feature.enabled?(:deployment_environment_manual_actions) ? environment_manual_actions : other_manual_actions
environment_manual_actions
end
def other_manual_actions
@ -300,7 +300,7 @@ class Deployment < ApplicationRecord
end
def scheduled_actions
Feature.enabled?(:deployment_environment_manual_actions) ? environment_scheduled_actions : other_scheduled_actions
environment_scheduled_actions
end
def environment_scheduled_actions

View File

@ -295,7 +295,7 @@ class Issue < ApplicationRecord
end
def self.link_reference_pattern
@link_reference_pattern ||= super("issues", Gitlab::Regex.issue)
@link_reference_pattern ||= super(%r{issues(?:\/incident)?}, Gitlab::Regex.issue)
end
def self.reference_valid?(reference)

View File

@ -124,7 +124,6 @@ class Note < ApplicationRecord
scope :common, -> { where(noteable_type: ["", nil]) }
scope :fresh, -> { order_created_asc.with_order_id_asc }
scope :updated_after, ->(time) { where('updated_at > ?', time) }
scope :with_updated_at, ->(time) { where(updated_at: time) }
scope :with_discussion_ids, ->(discussion_ids) { where(discussion_id: discussion_ids) }
scope :with_suggestions, -> { joins(:suggestions) }
scope :inc_author, -> { includes(:author) }

View File

@ -11,7 +11,6 @@ class ResourceEvent < ApplicationRecord
belongs_to :user
scope :created_after, ->(time) { where('created_at > ?', time) }
scope :created_on_or_before, ->(time) { where('created_at <= ?', time) }
def discussion_id
strong_memoize(:discussion_id) do

View File

@ -0,0 +1,12 @@
# frozen_string_literal: true
module Releases
class LinkPresenter < Gitlab::View::Presenter::Delegated
def direct_asset_url
return @subject.url unless @subject.filepath
release = @subject.release.present
release.download_url(@subject.filepath)
end
end
end

View File

@ -0,0 +1,46 @@
# frozen_string_literal: true
module Integrations
class EventEntity < Grape::Entity
include RequestAwareEntity
expose :title do |event|
IntegrationsHelper.integration_event_title(event)
end
expose :event_field_name, as: :name
expose :value do |event|
integration[event_field_name]
end
expose :description do |event|
IntegrationsHelper.integration_event_description(integration, event)
end
expose :field, if: ->(_, _) { event_field } do
expose :name do |event|
event_field[:name]
end
expose :value do |event|
integration.public_send(event_field[:name]) # rubocop:disable GitlabSecurity/PublicSend
end
end
private
alias_method :event, :object
def event_field_name
IntegrationsHelper.integration_event_field_name(event)
end
def event_field
@event_field ||= integration.event_field(event)
end
def integration
request.integration
end
end
end

View File

@ -0,0 +1,7 @@
# frozen_string_literal: true
module Integrations
class EventSerializer < BaseSerializer
entity Integrations::EventEntity
end
end

View File

@ -0,0 +1,49 @@
# frozen_string_literal: true
module Integrations
class FieldEntity < Grape::Entity
include RequestAwareEntity
include Gitlab::Utils::StrongMemoize
expose :section, :type, :name, :placeholder, :required, :choices, :checkbox_label
expose :title do |field|
non_empty_password?(field) ? field[:non_empty_password_title] : field[:title]
end
expose :help do |field|
non_empty_password?(field) ? field[:non_empty_password_help] : field[:help]
end
expose :value do |field|
value = value_for(field)
if non_empty_password?(field)
'true'
elsif field[:type] == 'checkbox'
ActiveRecord::Type::Boolean.new.deserialize(value).to_s
else
value
end
end
private
def integration
request.integration
end
def value_for(field)
strong_memoize(:value_for) do
# field[:name] is not user input and so can assume is safe
integration.public_send(field[:name]) # rubocop:disable GitlabSecurity/PublicSend
end
end
def non_empty_password?(field)
strong_memoize(:non_empty_password) do
field[:type] == 'password' && value_for(field).present?
end
end
end
end

View File

@ -0,0 +1,7 @@
# frozen_string_literal: true
module Integrations
class FieldSerializer < BaseSerializer
entity Integrations::FieldEntity
end
end

View File

@ -1,44 +0,0 @@
# frozen_string_literal: true
class ServiceEventEntity < Grape::Entity
include RequestAwareEntity
expose :title do |event|
IntegrationsHelper.integration_event_title(event)
end
expose :event_field_name, as: :name
expose :value do |event|
integration[event_field_name]
end
expose :description do |event|
IntegrationsHelper.integration_event_description(integration, event)
end
expose :field, if: -> (_, _) { event_field } do
expose :name do |event|
event_field[:name]
end
expose :value do |event|
integration.public_send(event_field[:name]) # rubocop:disable GitlabSecurity/PublicSend
end
end
private
alias_method :event, :object
def event_field_name
IntegrationsHelper.integration_event_field_name(event)
end
def event_field
@event_field ||= integration.event_field(event)
end
def integration
request.service
end
end

View File

@ -1,5 +0,0 @@
# frozen_string_literal: true
class ServiceEventSerializer < BaseSerializer
entity ServiceEventEntity
end

View File

@ -1,47 +0,0 @@
# frozen_string_literal: true
class ServiceFieldEntity < Grape::Entity
include RequestAwareEntity
include Gitlab::Utils::StrongMemoize
expose :section, :type, :name, :placeholder, :required, :choices, :checkbox_label
expose :title do |field|
non_empty_password?(field) ? field[:non_empty_password_title] : field[:title]
end
expose :help do |field|
non_empty_password?(field) ? field[:non_empty_password_help] : field[:help]
end
expose :value do |field|
value = value_for(field)
if non_empty_password?(field)
'true'
elsif field[:type] == 'checkbox'
ActiveRecord::Type::Boolean.new.deserialize(value).to_s
else
value
end
end
private
def service
request.service
end
def value_for(field)
strong_memoize(:value_for) do
# field[:name] is not user input and so can assume is safe
service.public_send(field[:name]) # rubocop:disable GitlabSecurity/PublicSend
end
end
def non_empty_password?(field)
strong_memoize(:non_empty_password) do
field[:type] == 'password' && value_for(field).present?
end
end
end

View File

@ -1,5 +0,0 @@
# frozen_string_literal: true
class ServiceFieldSerializer < BaseSerializer
entity ServiceFieldEntity
end

View File

@ -25,8 +25,7 @@ module ResourceEvents
def apply_common_filters(events)
events = apply_pagination(events)
events = apply_last_fetched_at(events)
apply_fetch_until(events)
apply_last_fetched_at(events)
end
def apply_pagination(events)
@ -44,12 +43,6 @@ module ResourceEvents
events.created_after(last_fetched_at)
end
def apply_fetch_until(events)
return events unless params[:fetch_until].present?
events.created_on_or_before(params[:fetch_until])
end
def resource_parent
strong_memoize(:resource_parent) do
resource.project || resource.group

View File

@ -62,13 +62,10 @@
= render "projects/merge_requests/widget"
= render "projects/merge_requests/awards_block"
- if mr_action === "show"
- if Feature.enabled?(:paginated_notes, @project)
- add_page_startup_api_call notes_url
- else
- add_page_startup_api_call Feature.enabled?(:paginated_mr_discussions, @project) ? discussions_path(@merge_request, per_page: 20) : discussions_path(@merge_request)
- add_page_startup_api_call Feature.enabled?(:paginated_mr_discussions, @project) ? discussions_path(@merge_request, per_page: 20) : discussions_path(@merge_request)
- add_page_startup_api_call widget_project_json_merge_request_path(@project, @merge_request, format: :json)
- add_page_startup_api_call cached_widget_project_json_merge_request_path(@project, @merge_request, format: :json)
#js-vue-mr-discussions{ data: { notes_data: notes_data(@merge_request, Feature.enabled?(:paginated_notes, @project)).to_json,
#js-vue-mr-discussions{ data: { notes_data: notes_data(@merge_request).to_json,
endpoint_metadata: @endpoint_metadata_url,
noteable_data: serialize_issuable(@merge_request, serializer: 'noteable'),
noteable_type: 'MergeRequest',

View File

@ -2,7 +2,7 @@
- deploy_key = local_assigns.fetch(:deploy_key)
- deploy_keys_project = deploy_key.deploy_keys_project_for(@project)
= form_errors(deploy_key)
= form_errors(deploy_key, pajamas_alert: true)
.form-group
= form.label :title, class: 'col-form-label col-sm-2'

View File

@ -1,6 +1,6 @@
- page_info = { last_commit_sha: @page.last_commit_sha, persisted: @page.persisted?, title: @page.title, content: @page.content || '', format: @page.format.to_s, uploads_path: uploads_path, path: wiki_page_path(@wiki, @page), wiki_path: wiki_path(@wiki), help_path: help_page_path('user/project/wiki/index'), markdown_help_path: help_page_path('user/markdown'), markdown_preview_path: wiki_page_path(@wiki, @page, action: :preview_markdown), create_path: wiki_path(@wiki, action: :create) }
.gl-mt-3
= form_errors(@page, truncate: :title)
= form_errors(@page, truncate: :title, pajamas_alert: true)
#js-wiki-form{ data: { page_info: page_info.to_json, format_options: wiki_markup_hash_by_name_id.to_json } }

View File

@ -939,6 +939,15 @@
:weight: 1
:idempotent: false
:tags: []
- :name: gcp_cluster:clusters_applications_activate_integration
:worker_name: Clusters::Applications::ActivateIntegrationWorker
:feature_category: :kubernetes_management
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: false
:tags: []
- :name: gcp_cluster:clusters_applications_activate_service
:worker_name: Clusters::Applications::ActivateServiceWorker
:feature_category: :kubernetes_management
@ -948,6 +957,15 @@
:weight: 1
:idempotent: false
:tags: []
- :name: gcp_cluster:clusters_applications_deactivate_integration
:worker_name: Clusters::Applications::DeactivateIntegrationWorker
:feature_category: :kubernetes_management
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: false
:tags: []
- :name: gcp_cluster:clusters_applications_deactivate_service
:worker_name: Clusters::Applications::DeactivateServiceWorker
:feature_category: :kubernetes_management

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
module Clusters
module Applications
class ActivateIntegrationWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
data_consistency :always
sidekiq_options retry: 3
include ClusterQueue
loggable_arguments 1
def perform(cluster_id, integration_name)
cluster = Clusters::Cluster.find_by_id(cluster_id)
return unless cluster
cluster.all_projects.find_each do |project|
project.find_or_initialize_integration(integration_name).update!(active: true)
end
end
end
end
end

View File

@ -1,25 +1,12 @@
# frozen_string_literal: true
# This worker was renamed in 15.1, we can delete it in 15.2.
# See: https://gitlab.com/gitlab-org/gitlab/-/issues/364112
#
# rubocop:disable Scalability/IdempotentWorker
module Clusters
module Applications
class ActivateServiceWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
data_consistency :always
sidekiq_options retry: 3
include ClusterQueue
loggable_arguments 1
def perform(cluster_id, service_name)
cluster = Clusters::Cluster.find_by_id(cluster_id)
return unless cluster
cluster.all_projects.find_each do |project|
project.find_or_initialize_integration(service_name).update!(active: true)
end
end
class ActivateServiceWorker < ActivateIntegrationWorker
end
end
end

View File

@ -0,0 +1,39 @@
# frozen_string_literal: true
module Clusters
module Applications
class DeactivateIntegrationWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
data_consistency :always
sidekiq_options retry: 3
include ClusterQueue
loggable_arguments 1
def perform(cluster_id, integration_name)
cluster = Clusters::Cluster.find_by_id(cluster_id)
raise cluster_missing_error(integration_name) unless cluster
integration_class = Integration.integration_name_to_model(integration_name)
integration_association_name = ::Project.integration_association_name(integration_name).to_sym
projects = cluster.all_projects
.with_integration(integration_class)
.include_integration(integration_association_name)
projects.find_each do |project|
project.public_send(integration_association_name).update!(active: false) # rubocop:disable GitlabSecurity/PublicSend
end
end
def cluster_missing_error(integration_name)
ActiveRecord::RecordNotFound.new(
"Can't deactivate #{integration_name} integrations, host cluster not found! " \
"Some inconsistent records may be left in database."
)
end
end
end
end

View File

@ -1,32 +1,12 @@
# frozen_string_literal: true
# This worker was renamed in 15.1, we can delete it in 15.2.
# See: https://gitlab.com/gitlab-org/gitlab/-/issues/364112
#
# rubocop:disable Scalability/IdempotentWorker
module Clusters
module Applications
class DeactivateServiceWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
data_consistency :always
sidekiq_options retry: 3
include ClusterQueue
loggable_arguments 1
def perform(cluster_id, integration_name)
cluster = Clusters::Cluster.find_by_id(cluster_id)
raise cluster_missing_error(integration_name) unless cluster
integration_class = Integration.integration_name_to_model(integration_name)
integration_association_name = ::Project.integration_association_name(integration_name).to_sym
cluster.all_projects.with_integration(integration_class).include_integration(integration_association_name).find_each do |project|
project.public_send(integration_association_name).update!(active: false) # rubocop:disable GitlabSecurity/PublicSend
end
end
def cluster_missing_error(integration_name)
ActiveRecord::RecordNotFound.new("Can't deactivate #{integration_name} integrations, host cluster not found! Some inconsistent records may be left in database.")
end
class DeactivateServiceWorker < DeactivateIntegrationWorker
end
end
end

View File

@ -62,7 +62,7 @@ module LimitedCapacity
end
def with_redis(&block)
Gitlab::Redis::Queues.with(&block) # rubocop: disable CodeReuse/ActiveRecord
Gitlab::Redis::SharedState.with(&block) # rubocop: disable CodeReuse/ActiveRecord
end
end
end

View File

@ -1,8 +0,0 @@
---
name: deployment_environment_manual_actions
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/87138
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/362824
milestone: '15.1'
type: development
group: group::release
default_enabled: false

View File

@ -1,8 +0,0 @@
---
name: paginated_notes
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/34628
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/254987
milestone: '13.2'
type: development
group: group::code review
default_enabled: false

View File

@ -0,0 +1,8 @@
---
name: use_primary_and_secondary_stores_for_sidekiq_status
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/
rollout_issue_url:
milestone:
type: development
group: group::scalability
default_enabled: false

View File

@ -0,0 +1,8 @@
---
name: use_primary_store_as_default_for_sidekiq_status
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/
rollout_issue_url:
milestone:
type: development
group: group::scalability
default_enabled: false

View File

@ -20,3 +20,4 @@ Gitlab::Redis::TraceChunks.with { nil }
Gitlab::Redis::RateLimiting.with { nil }
Gitlab::Redis::Sessions.with { nil }
Gitlab::Redis::DuplicateJobs.with { nil }
Gitlab::Redis::SidekiqStatus.with { nil }

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class MigrateClusterIntegrationWorkerQueues < Gitlab::Database::Migration[2.0]
def up
sidekiq_queue_migrate 'gcp_cluster:clusters_applications_activate_service',
to: 'gcp_cluster:clusters_applications_activate_integration'
sidekiq_queue_migrate 'gcp_cluster:clusters_applications_deactivate_service',
to: 'gcp_cluster:clusters_applications_deactivate_integration'
end
def down
sidekiq_queue_migrate 'gcp_cluster:clusters_applications_activate_integration',
to: 'gcp_cluster:clusters_applications_activate_service'
sidekiq_queue_migrate 'gcp_cluster:clusters_applications_deactivate_integration',
to: 'gcp_cluster:clusters_applications_deactivate_service'
end
end

View File

@ -0,0 +1 @@
37f90c51322b961933b0aaa5b4d2b8d1f82bd3ee42209b241b9e1198f16adb59

View File

@ -15,21 +15,23 @@ page](https://about.gitlab.com/install/).
The following lists the currently supported OSs and their possible EOL dates.
| OS Version | First supported GitLab version | Arch | OS EOL | Details |
| ---------------- | ------------------------------ | --------------- | ------------- | ------------------------------------------------------------ |
| AlmaLinux 8 | GitLab CE / GitLab EE 14.5.0 | x86_64, aarch64 | 2029 | <https://almalinux.org/> |
| CentOS 7 | GitLab CE / GitLab EE 7.10.0 | x86_64 | June 2024 | <https://wiki.centos.org/About/Product> |
| CentOS 8 | GitLab CE / GitLab EE 12.8.1 | x86_64, aarch64 | Dec 2021 | <https://wiki.centos.org/About/Product> |
| Debian 9 | GitLab CE / GitLab EE 9.3.0 | amd64 | 2022 | <https://wiki.debian.org/LTS> |
| Debian 10 | GitLab CE / GitLab EE 12.2.0 | amd64, arm64 | 2024 | <https://wiki.debian.org/LTS> |
| Debian 11 | GitLab CE / GitLab EE 14.6.0 | amd64, arm64 | 2026 | <https://wiki.debian.org/LTS> |
| OpenSUSE 15.3 | GitLab CE / GitLab EE 14.5.0 | x86_64, aarch64 | Nov 2022 | <https://en.opensuse.org/Lifetime> |
| RHEL 8 | GitLab CE / GitLab EE 12.8.1 | x86_64, arm64 | May 2024 | <https://access.redhat.com/support/policy/updates/errata/#Life_Cycle_Dates> |
| SLES 12 | GitLab EE 9.0.0 | x86_64 | Oct 2027 | <https://www.suse.com/lifecycle/> |
| Ubuntu 18.04 | GitLab CE / GitLab EE 10.7.0 | amd64 | April 2023 | <https://wiki.ubuntu.com/Releases> |
| Ubuntu 20.04 | GitLab CE / GitLab EE 13.2.0 | amd64, arm64 | April 2025 | <https://wiki.ubuntu.com/Releases> |
| Amazon Linux 2 | GitLab CE / GitLab EE 14.9.0 | amd64, arm64 | June 2023 | <https://aws.amazon.com/amazon-linux-2/faqs/> |
| Raspberry Pi OS (Buster) (formerly known as Raspbian Buster) | GitLab CE 12.2.0 | armhf | 2024 | <https://www.raspberrypi.com/news/new-old-functionality-with-raspberry-pi-os-legacy/> |
| OS Version | First supported GitLab version | Arch | Install Docs | OS EOL | Details |
| ------------------------------------------------------------ | ------------------------------ | --------------- | :----------------------------------------------------------: | ---------- | ------------------------------------------------------------ |
| AlmaLinux 8 | GitLab CE / GitLab EE 14.5.0 | x86_64, aarch64 | [AlmaLinux Install Docs](https://about.gitlab.com/install/#almalinux-8) | 2029 | <https://almalinux.org/> |
| CentOS 7 | GitLab CE / GitLab EE 7.10.0 | x86_64 | [CentOS Install Docs](https://about.gitlab.com/install/#centos-7) | June 2024 | <https://wiki.centos.org/About/Product> |
| Debian 9 | GitLab CE / GitLab EE 9.3.0 | amd64 | [Debian Install Docs](https://about.gitlab.com/install/#debian) | 2022 | <https://wiki.debian.org/LTS> |
| Debian 10 | GitLab CE / GitLab EE 12.2.0 | amd64, arm64 | [Debian Install Docs](https://about.gitlab.com/install/#debian) | 2024 | <https://wiki.debian.org/LTS> |
| Debian 11 | GitLab CE / GitLab EE 14.6.0 | amd64, arm64 | [Debian Install Docs](https://about.gitlab.com/install/#debian) | 2026 | <https://wiki.debian.org/LTS> |
| OpenSUSE 15.3 | GitLab CE / GitLab EE 14.5.0 | x86_64, aarch64 | [OpenSUSE Install Docs](https://about.gitlab.com/install/#opensuse-leap-15-3) | Nov 2022 | <https://en.opensuse.org/Lifetime> |
| RHEL 8 | GitLab CE / GitLab EE 12.8.1 | x86_64, arm64 | [Use CentOS Install Docs](https://about.gitlab.com/install/#centos-7) | May 2024 | [RHEL Details](https://access.redhat.com/support/policy/updates/errata/#Life_Cycle_Dates) |
| SLES 12 | GitLab EE 9.0.0 | x86_64 | [Use OpenSUSE Install Docs](https://about.gitlab.com/install/#opensuse-leap-15-3) | Oct 2027 | <https://www.suse.com/lifecycle/> |
| Oracle Linux | GitLab CE / GitLab EE 8.14.0 | x86_64 | [Use CentOS Install Docs](https://about.gitlab.com/install/#centos-7) | Jul 2024 | <https://www.oracle.com/a/ocom/docs/elsp-lifetime-069338.pdf> |
| Scientific Linux | GitLab CE / GitLab EE 8.14.0 | x86_64 | [Use CentOS Install Docs](https://about.gitlab.com/install/#centos-7) | June 2024 | <https://scientificlinux.org/downloads/sl-versions/sl7/> |
| Ubuntu 18.04 | GitLab CE / GitLab EE 10.7.0 | amd64 | [Ubuntu Install Docs](https://about.gitlab.com/install/#ubuntu) | April 2023 | <https://wiki.ubuntu.com/Releases> |
| Ubuntu 20.04 | GitLab CE / GitLab EE 13.2.0 | amd64, arm64 | [Ubuntu Install Docs](https://about.gitlab.com/install/#ubuntu) | April 2025 | <https://wiki.ubuntu.com/Releases> |
| Amazon Linux 2 | GitLab CE / GitLab EE 14.9.0 | amd64, arm64 | [Amazon Linux 2 Instal Docsl](https://about.gitlab.com/install/#amazonlinux-2) | June 2023 | <https://aws.amazon.com/amazon-linux-2/faqs/> |
| Raspberry Pi OS (Buster) (formerly known as Raspbian Buster) | GitLab CE 12.2.0 | armhf | [Raspberry Pi Install Docs](https://about.gitlab.com/install/#raspberry-pi-os) | 2024 | [Raspberry Pi Details](https://www.raspberrypi.com/news/new-old-functionality-with-raspberry-pi-os-legacy/) |
NOTE:
CentOS 8 was EOL on December 31, 2021. In GitLab 14.5 and later,
@ -82,6 +84,7 @@ release for them can be found below:
| Raspbian Stretch | [June 2020](https://downloads.raspberrypi.org/raspbian/images/raspbian-2019-04-09/) | [GitLab CE](https://packages.gitlab.com/app/gitlab/raspberry-pi2/search?q=gitlab-ce_13.3&dist=raspbian%2Fstretch) 13.3 |
| Debian Jessie | [June 2020](https://www.debian.org/News/2020/20200709) | [GitLab CE](https://packages.gitlab.com/app/gitlab/gitlab-ce/search?q=gitlab-ce_13.2&dist=debian%2Fjessie) / [GitLab EE](https://packages.gitlab.com/app/gitlab/gitlab-ee/search?q=gitlab-ee_13.2&dist=debian%2Fjessie) 13.3 |
| CentOS 6 | [November 2020](https://wiki.centos.org/About/Product) | [GitLab CE](https://packages.gitlab.com/app/gitlab/gitlab-ce/search?q=13.6&filter=all&filter=all&dist=el%2F6) / [GitLab EE](https://packages.gitlab.com/app/gitlab/gitlab-ee/search?q=13.6&filter=all&filter=all&dist=el%2F6) 13.6 |
| CentOS 8 | [December 2021](https://wiki.centos.org/About/Product) | [GitLab CE](https://packages.gitlab.com/app/gitlab/gitlab-ce/search?q=14.6&filter=all&filter=all&dist=el%2F8) / [GitLab EE](https://packages.gitlab.com/app/gitlab/gitlab-ee/search?q=14.6&filter=all&filter=all&dist=el%2F8) 14.6 |
| OpenSUSE 15.1 | [November 2020](https://en.opensuse.org/Lifetime#Discontinued_distributions) | [GitLab CE](https://packages.gitlab.com/app/gitlab/gitlab-ce/search?q=gitlab-ce-13.12&dist=opensuse%2F15.1) / [GitLab EE](https://packages.gitlab.com/app/gitlab/gitlab-ee/search?q=gitlab-ee-13.12&dist=opensuse%2F15.1) 13.12 |
| Ubuntu 16.04 | [April 2021](https://ubuntu.com/info/release-end-of-life) | [GitLab CE](https://packages.gitlab.com/app/gitlab/gitlab-ce/search?q=gitlab-ce_13.12&dist=ubuntu%2Fxenial) / [GitLab EE](https://packages.gitlab.com/app/gitlab/gitlab-ee/search?q=gitlab-ee_13.12&dist=ubuntu%2Fxenial) 13.12 |
| OpenSUSE 15.2 | [December 2021](https://en.opensuse.org/Lifetime#Discontinued_distributions) | [GitLab CE](https://packages.gitlab.com/app/gitlab/gitlab-ce/search?q=gitlab-ce-14.7&dist=opensuse%2F15.2) / [GitLab EE](https://packages.gitlab.com/app/gitlab/gitlab-ee/search?q=gitlab-ee-14.7&dist=opensuse%2F15.2) 14.7 |

View File

@ -39,8 +39,8 @@ for clarity, they define different metric names:
1. `gitlab_sli:foo_apdex:success_total` for the number of successful
measurements.
1. `Gitlab::Metrics::Sli::ErrorRate.new('foo')` defines:
1. `gitlab_sli:foo_error_rate:total` for the total number of measurements.
1. `gitlab_sli:foo_error_rate:error_total` for the number of error
1. `gitlab_sli:foo:total` for the total number of measurements.
1. `gitlab_sli:foo:error_total` for the number of error
measurements - as this is an error rate, it's more natural to talk about
errors divided by the total.

View File

@ -108,7 +108,7 @@ sudo gitlab-rails runner -e production 'puts Gitlab::Database::BackgroundMigrati
```shell
cd /home/git/gitlab
sudo -u git -H bundle exec rails runner -e production 'puts Gitlab::BackgroundMigration.remaining'
sudo -u git -H bundle exec rails runner -e production 'puts Gitlab::Database::BackgroundMigrationJob.pending.count'
sudo -u git -H bundle exec rails runner -e production 'puts Gitlab::Database::BackgroundMigration::BatchedMigration.queued.count'
```
#### Failed migrations

View File

@ -23,7 +23,9 @@ After you select and upload an image, select **Update appearance settings** at t
of the page to activate it in the GitLab instance.
NOTE:
GitLab pipeline emails also display the custom logo.
GitLab pipeline emails also display the custom logo, unless the logo is in SVG format. If the
custom logo is in SVG format, the default logo is used instead because the SVG format is not
supported by many email clients.
## Favicon

View File

@ -7,16 +7,11 @@ module API
expose :id
expose :name
expose :url
expose :direct_asset_url
expose :direct_asset_url do |link|
::Releases::LinkPresenter.new(link).direct_asset_url
end
expose :external?, as: :external
expose :link_type
def direct_asset_url
return object.url unless object.filepath
release = object.release.present
release.download_url(object.filepath)
end
end
end
end

View File

@ -9,11 +9,13 @@ module Gitlab
include ::Gitlab::Config::Entry::Configurable
include ::Gitlab::Config::Entry::Attributable
CLAUSES = %i[if changes exists].freeze
ALLOWED_KEYS = %i[if changes exists when start_in allow_failure variables].freeze
ALLOWABLE_WHEN = %w[on_success on_failure always never manual delayed].freeze
ALLOWED_KEYS = %i[if changes exists when start_in allow_failure variables].freeze
ALLOWED_WHEN = %w[on_success on_failure always never manual delayed].freeze
attributes :if, :changes, :exists, :when, :start_in, :allow_failure
attributes :if, :exists, :when, :start_in, :allow_failure
entry :changes, Entry::Rules::Rule::Changes,
description: 'File change condition rule.'
entry :variables, Entry::Variables,
description: 'Environment variables to define for rule conditions.'
@ -28,8 +30,8 @@ module Gitlab
with_options allow_nil: true do
validates :if, expression: true
validates :changes, :exists, array_of_strings: true, length: { maximum: 50 }
validates :when, allowed_values: { in: ALLOWABLE_WHEN }
validates :exists, array_of_strings: true, length: { maximum: 50 }
validates :when, allowed_values: { in: ALLOWED_WHEN }
validates :allow_failure, boolean: true
end
@ -41,6 +43,13 @@ module Gitlab
end
end
def value
config.merge(
changes: (changes_value if changes_defined?),
variables: (variables_value if variables_defined?)
).compact
end
def specifies_delay?
self.when == 'delayed'
end

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
module Gitlab
module Ci
class Config
module Entry
class Rules
class Rule
class Changes < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Validatable
validations do
validates :config,
array_of_strings: true,
length: { maximum: 50, too_long: "has too many entries (maximum %{count})" }
end
end
end
end
end
end
end
end

View File

@ -20,7 +20,7 @@ variables:
SECURE_BINARIES_ANALYZERS: >-
bandit, brakeman, gosec, spotbugs, flawfinder, phpcs-security-audit, security-code-scan, nodejs-scan, eslint, secrets, sobelow, pmd-apex, kics, kubesec, semgrep, gemnasium, gemnasium-maven, gemnasium-python,
license-finder,
dast, dast-runner-validation, api-fuzzing
dast, dast-runner-validation, api-security
SECURE_BINARIES_DOWNLOAD_IMAGES: "true"
SECURE_BINARIES_PUSH_IMAGES: "true"
@ -252,11 +252,11 @@ dast-runner-validation:
- $SECURE_BINARIES_DOWNLOAD_IMAGES == "true" &&
$SECURE_BINARIES_ANALYZERS =~ /\bdast-runner-validation\b/
api-fuzzing:
api-security:
extends: .download_images
variables:
SECURE_BINARIES_ANALYZER_VERSION: "1"
SECURE_BINARIES_ANALYZER_VERSION: "2"
only:
variables:
- $SECURE_BINARIES_DOWNLOAD_IMAGES == "true" &&
$SECURE_BINARIES_ANALYZERS =~ /\bapi-fuzzing\b/
$SECURE_BINARIES_ANALYZERS =~ /\bapi-security\b/

View File

@ -15,7 +15,7 @@ module Gitlab
def execute!(stream)
clone_file!(stream, JobArtifactUploader.workhorse_upload_path) do |clone_path|
md5_checksum = self.class.md5_hexdigest(clone_path)
md5_checksum = self.class.md5_hexdigest(clone_path) unless Gitlab::FIPS.enabled?
sha256_checksum = self.class.sha256_hexdigest(clone_path)
job.transaction do
@ -24,7 +24,7 @@ module Gitlab
end
end
validate_archived_trace
validate_archived_trace unless Gitlab::FIPS.enabled?
end
private

View File

@ -68,10 +68,6 @@ module Gitlab
prometheus.counter(counter_name('total'), "Total number of measurements for #{name}")
end
def counter_name(suffix)
:"#{COUNTER_PREFIX}:#{name}_#{self.class.name.demodulize.underscore}:#{suffix}"
end
def prometheus
Gitlab::Metrics
end
@ -85,6 +81,10 @@ module Gitlab
private
def counter_name(suffix)
:"#{COUNTER_PREFIX}:#{name}_apdex:#{suffix}"
end
def numerator_counter
prometheus.counter(counter_name('success_total'), "Number of successful measurements for #{name}")
end
@ -99,6 +99,10 @@ module Gitlab
private
def counter_name(suffix)
:"#{COUNTER_PREFIX}:#{name}:#{suffix}"
end
def numerator_counter
prometheus.counter(counter_name('error_total'), "Number of error measurements for #{name}")
end

View File

@ -14,7 +14,13 @@ module Gitlab
def redis
primary_store = ::Redis.new(Gitlab::Redis::SharedState.params)
secondary_store = ::Redis.new(Gitlab::Redis::Queues.params)
# `Sidekiq.redis` is a namespaced redis connection. This means keys are actually being stored under
# "resque:gitlab:resque:gitlab:duplicate:". For backwards compatibility, we make the secondary store
# namespaced in the same way, but omit it from the primary so keys have proper format there.
secondary_store = ::Redis::Namespace.new(
Gitlab::Redis::Queues::SIDEKIQ_NAMESPACE, redis: ::Redis.new(Gitlab::Redis::Queues.params)
)
MultiStore.new(primary_store, secondary_store, name.demodulize)
end

View File

@ -157,8 +157,7 @@ module Gitlab
def log_error(exception, command_name, extra = {})
Gitlab::ErrorTracking.log_exception(
exception,
command_name: command_name,
extra: extra.merge(instance_name: instance_name))
extra.merge(command_name: command_name, instance_name: instance_name))
end
private
@ -285,12 +284,16 @@ module Gitlab
@instance = nil
end
def redis_store?(store)
store.is_a?(::Redis) || store.is_a?(::Redis::Namespace)
end
def validate_stores!
raise ArgumentError, 'primary_store is required' unless primary_store
raise ArgumentError, 'secondary_store is required' unless secondary_store
raise ArgumentError, 'instance_name is required' unless instance_name
raise ArgumentError, 'invalid primary_store' unless primary_store.is_a?(::Redis)
raise ArgumentError, 'invalid secondary_store' unless secondary_store.is_a?(::Redis)
raise ArgumentError, 'invalid primary_store' unless redis_store?(primary_store)
raise ArgumentError, 'invalid secondary_store' unless redis_store?(secondary_store)
end
end
end

View File

@ -0,0 +1,24 @@
# frozen_string_literal: true
module Gitlab
module Redis
# Pseudo-store to transition `Gitlab::SidekiqStatus` from
# using `Sidekiq.redis` to using the `SharedState` redis store.
class SidekiqStatus < ::Gitlab::Redis::Wrapper
class << self
def store_name
'SharedState'
end
private
def redis
primary_store = ::Redis.new(Gitlab::Redis::SharedState.params)
secondary_store = ::Redis.new(Gitlab::Redis::Queues.params)
MultiStore.new(primary_store, secondary_store, name.demodulize)
end
end
end
end
end

View File

@ -36,7 +36,7 @@ module Gitlab
def self.set(jid, expire = DEFAULT_EXPIRATION)
return unless expire
Sidekiq.redis do |redis|
with_redis do |redis|
redis.set(key_for(jid), 1, ex: expire)
end
end
@ -45,7 +45,7 @@ module Gitlab
#
# jid - The Sidekiq job ID to remove.
def self.unset(jid)
Sidekiq.redis do |redis|
with_redis do |redis|
redis.del(key_for(jid))
end
end
@ -94,8 +94,7 @@ module Gitlab
keys = job_ids.map { |jid| key_for(jid) }
Sidekiq
.redis { |redis| redis.mget(*keys) }
with_redis { |redis| redis.mget(*keys) }
.map { |result| !result.nil? }
end
@ -118,5 +117,18 @@ module Gitlab
def self.key_for(jid)
STATUS_KEY % jid
end
def self.with_redis
if Feature.enabled?(:use_primary_and_secondary_stores_for_sidekiq_status) ||
Feature.enabled?(:use_primary_store_as_default_for_sidekiq_status)
# TODO: Swap for Gitlab::Redis::SharedState after store transition
# https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/923
Gitlab::Redis::SidekiqStatus.with { |redis| yield redis }
else
# Keep the old behavior intact if neither feature flag is turned on
Sidekiq.redis { |redis| yield redis }
end
end
private_class_method :with_redis
end
end

View File

@ -1,74 +0,0 @@
# frozen_string_literal: true
module Gitlab
# UpdatedNotesPaginator implements a rudimentary form of keyset pagination on
# top of a notes relation that has been initialized with a `last_fetched_at`
# value. This class will attempt to limit the number of notes returned, and
# specify a new value for `last_fetched_at` that will pick up where the last
# page of notes left off.
class UpdatedNotesPaginator
LIMIT = 50
MICROSECOND = 1_000_000
attr_reader :next_fetched_at, :notes
def initialize(relation, last_fetched_at:)
@last_fetched_at = last_fetched_at
@now = Time.current
notes, more = fetch_page(relation)
if more
init_middle_page(notes)
else
init_final_page(notes)
end
end
def metadata
{ last_fetched_at: next_fetched_at_microseconds, more: more }
end
private
attr_reader :last_fetched_at, :more, :now
def next_fetched_at_microseconds
(next_fetched_at.to_i * MICROSECOND) + next_fetched_at.usec
end
def fetch_page(relation)
relation = relation.order_updated_asc.with_order_id_asc
notes = relation.limit(LIMIT + 1).to_a
return [notes, false] unless notes.size > LIMIT
marker = notes.pop # Remove the marker note
# Although very unlikely, it is possible that more notes with the same
# updated_at may exist, e.g., if created in bulk. Add them all to the page
# if this is detected, so pagination won't get stuck indefinitely
if notes.last.updated_at == marker.updated_at
notes += relation
.with_updated_at(marker.updated_at)
.id_not_in(notes.map(&:id))
.to_a
end
[notes, true]
end
def init_middle_page(notes)
@more = true
# The fetch overlap can be ignored if we're in an intermediate page.
@next_fetched_at = notes.last.updated_at + NotesFinder::FETCH_OVERLAP
@notes = notes
end
def init_final_page(notes)
@more = false
@next_fetched_at = now
@notes = notes
end
end
end

View File

@ -46,6 +46,9 @@ msgstr ""
msgid " or %{emphasisStart}!merge request id%{emphasisEnd}"
msgstr ""
msgid " or %{emphasisStart}#id%{emphasisEnd}"
msgstr ""
msgid " or %{emphasisStart}#issue id%{emphasisEnd}"
msgstr ""
@ -27488,6 +27491,9 @@ msgstr ""
msgid "Paste issue link"
msgstr ""
msgid "Paste link"
msgstr ""
msgid "Paste project path (i.e. gitlab-org/gitlab)"
msgstr ""
@ -31462,6 +31468,9 @@ msgstr ""
msgid "Related feature flags"
msgstr ""
msgid "Related incidents or issues"
msgstr ""
msgid "Related issues"
msgstr ""
@ -37934,6 +37943,9 @@ msgstr ""
msgid "The current epic"
msgstr ""
msgid "The current incident"
msgstr ""
msgid "The current issue"
msgstr ""
@ -46085,6 +46097,9 @@ msgstr ""
msgid "the following epic(s)"
msgstr ""
msgid "the following incident(s) or issue(s)"
msgstr ""
msgid "the following issue(s)"
msgstr ""

View File

@ -21,7 +21,7 @@ module QA
file.project = project
file.commit_message = 'Add new file'
file.name = "test-folder/#{file_name}"
file.content = "### Heading\n\n[Gitlab link](https://gitlab.com/)"
file.content = "### Heading\n\n[Example link](https://example.com/)"
end
project.visit!
@ -35,9 +35,9 @@ module QA
aggregate_failures 'markdown file contents' do
expect(show).to have_content('Heading')
expect(show).to have_content('Gitlab link')
expect(show).to have_content('Example link')
expect(show).not_to have_content('###')
expect(show).not_to have_content('https://gitlab.com/')
expect(show).not_to have_content('https://example.com/')
end
end
end

View File

@ -9,7 +9,7 @@ module QA
snippet.description = ' '
snippet.visibility = 'Private'
snippet.file_name = 'markdown_file.md'
snippet.file_content = "### Snippet heading\n\n[Gitlab link](https://gitlab.com/)"
snippet.file_content = "### Snippet heading\n\n[Example link](https://example.com/)"
end
end
@ -30,9 +30,9 @@ module QA
expect(snippet).to have_visibility_type(/private/i)
expect(snippet).to have_file_name('markdown_file.md')
expect(snippet).to have_file_content('Snippet heading')
expect(snippet).to have_file_content('Gitlab link')
expect(snippet).to have_file_content('Example link')
expect(snippet).not_to have_file_content('###')
expect(snippet).not_to have_file_content('https://gitlab.com/')
expect(snippet).not_to have_file_content('https://example.com/')
end
end
end

View File

@ -138,5 +138,35 @@ RSpec.describe Pajamas::AlertComponent, :aggregate_failures, type: :component do
end
end
end
context 'with alert_options' do
let(:options) { { alert_options: { id: 'test_id', class: 'baz', data: { foo: 'bar' } } } }
before do
render_inline described_class.new(**options)
end
it 'renders the extra options' do
expect(rendered_component).to have_css "#test_id.gl-alert.baz[data-foo='bar']"
end
context 'with custom classes or data' do
let(:options) do
{
variant: :danger,
alert_class: 'custom',
alert_data: { foo: 'bar' },
alert_options: {
class: 'extra special',
data: { foo: 'conflict' }
}
}
end
it 'doesn\'t conflict with internal alert_class or alert_data' do
expect(rendered_component).to have_css ".extra.special.custom.gl-alert.gl-alert-danger[data-foo='bar']"
end
end
end
end
end

View File

@ -84,100 +84,6 @@ RSpec.describe Projects::NotesController do
end
end
context 'for multiple pages of notes', :aggregate_failures do
# 3 pages worth: 1 normal page, 1 oversized due to clashing updated_at,
# and a final, short page
let!(:page_1) { create_list(:note, 2, noteable: issue, project: project, updated_at: 3.days.ago) }
let!(:page_2) { create_list(:note, 3, noteable: issue, project: project, updated_at: 2.days.ago) }
let!(:page_3) { create_list(:note, 2, noteable: issue, project: project, updated_at: 1.day.ago) }
# Include a resource event in the middle page as well
let!(:resource_event) { create(:resource_state_event, issue: issue, user: user, created_at: 2.days.ago) }
let(:page_1_boundary) { microseconds(page_1.last.updated_at + NotesFinder::FETCH_OVERLAP) }
let(:page_2_boundary) { microseconds(page_2.last.updated_at + NotesFinder::FETCH_OVERLAP) }
around do |example|
freeze_time do
example.run
end
end
before do
stub_const('Gitlab::UpdatedNotesPaginator::LIMIT', 2)
end
context 'feature flag enabled' do
before do
stub_feature_flags(paginated_notes: true)
end
it 'returns the first page of notes' do
expect(Gitlab::EtagCaching::Middleware).to receive(:skip!)
get :index, params: request_params
expect(json_response['notes'].count).to eq(page_1.count)
expect(json_response['more']).to be_truthy
expect(json_response['last_fetched_at']).to eq(page_1_boundary)
expect(response.headers['Poll-Interval'].to_i).to eq(1)
end
it 'returns the second page of notes' do
expect(Gitlab::EtagCaching::Middleware).to receive(:skip!)
request.headers['X-Last-Fetched-At'] = page_1_boundary
get :index, params: request_params
expect(json_response['notes'].count).to eq(page_2.count + 1) # resource event
expect(json_response['more']).to be_truthy
expect(json_response['last_fetched_at']).to eq(page_2_boundary)
expect(response.headers['Poll-Interval'].to_i).to eq(1)
end
it 'returns the final page of notes' do
expect(Gitlab::EtagCaching::Middleware).to receive(:skip!)
request.headers['X-Last-Fetched-At'] = page_2_boundary
get :index, params: request_params
expect(json_response['notes'].count).to eq(page_3.count)
expect(json_response['more']).to be_falsy
expect(json_response['last_fetched_at']).to eq(microseconds(Time.zone.now))
expect(response.headers['Poll-Interval'].to_i).to be > 1
end
it 'returns an empty page of notes' do
expect(Gitlab::EtagCaching::Middleware).not_to receive(:skip!)
request.headers['X-Last-Fetched-At'] = microseconds(Time.zone.now)
get :index, params: request_params
expect(json_response['notes']).to be_empty
expect(json_response['more']).to be_falsy
expect(json_response['last_fetched_at']).to eq(microseconds(Time.zone.now))
expect(response.headers['Poll-Interval'].to_i).to be > 1
end
end
context 'feature flag disabled' do
before do
stub_feature_flags(paginated_notes: false)
end
it 'returns all notes' do
get :index, params: request_params
expect(json_response['notes'].count).to eq((page_1 + page_2 + page_3).size + 1)
expect(json_response['more']).to be_falsy
expect(json_response['last_fetched_at']).to eq(microseconds(Time.zone.now))
end
end
end
context 'for a discussion note' do
let(:project) { create(:project, :repository) }
let!(:note) { create(:discussion_note_on_merge_request, project: project) }

View File

@ -6,8 +6,10 @@ FactoryBot.define do
dast_profile_schedules { 50 }
trait :default_plan do
plan factory: :default_plan
Plan.all_plans.each do |plan|
trait :"#{plan}_plan" do
plan factory: :"#{plan}_plan"
end
end
trait :with_package_file_sizes do

View File

@ -13,8 +13,6 @@ RSpec.describe 'Merge request > Batch comments', :js do
end
before do
stub_feature_flags(paginated_notes: false)
project.add_maintainer(user)
sign_in(user)

View File

@ -26,8 +26,6 @@ RSpec.describe 'User comments on a diff', :js do
let(:user) { create(:user) }
before do
stub_feature_flags(paginated_notes: false)
project.add_maintainer(user)
sign_in(user)

View File

@ -29,7 +29,7 @@ describe('WorkItemDetailModal component', () => {
const findAlert = () => wrapper.findComponent(GlAlert);
const findWorkItemDetail = () => wrapper.findComponent(WorkItemDetail);
const createComponent = ({ workItemId = '1', error = false } = {}) => {
const createComponent = ({ workItemId = '1', issueGid = '2', error = false } = {}) => {
const apolloProvider = createMockApollo([
[
deleteWorkItemFromTaskMutation,
@ -46,7 +46,7 @@ describe('WorkItemDetailModal component', () => {
wrapper = shallowMount(WorkItemDetailModal, {
apolloProvider,
propsData: { workItemId },
propsData: { workItemId, issueGid },
data() {
return {
error,
@ -67,6 +67,7 @@ describe('WorkItemDetailModal component', () => {
expect(findWorkItemDetail().props()).toEqual({
workItemId: '1',
workItemParentId: '2',
});
});
@ -97,13 +98,6 @@ describe('WorkItemDetailModal component', () => {
expect(wrapper.emitted('close')).toBeTruthy();
});
it('emits `workItemUpdated` event on updating work item', () => {
createComponent();
findWorkItemDetail().vm.$emit('workItemUpdated');
expect(wrapper.emitted('workItemUpdated')).toBeTruthy();
});
describe('delete work item', () => {
it('emits workItemDeleted and closes modal', async () => {
createComponent();

View File

@ -82,15 +82,6 @@ describe('WorkItemState component', () => {
});
});
it('emits updated event', async () => {
createComponent();
findItemState().vm.$emit('changed', STATE_CLOSED);
await waitForPromises();
expect(wrapper.emitted('updated')).toEqual([[]]);
});
it('emits an error message when the mutation was unsuccessful', async () => {
createComponent({ mutationHandler: jest.fn().mockRejectedValue('Error!') });

View File

@ -8,6 +8,7 @@ import ItemTitle from '~/work_items/components/item_title.vue';
import WorkItemTitle from '~/work_items/components/work_item_title.vue';
import { i18n, TRACKING_CATEGORY_SHOW } from '~/work_items/constants';
import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
import updateWorkItemTaskMutation from '~/work_items/graphql/update_work_item_task.mutation.graphql';
import { updateWorkItemMutationResponse, workItemQueryResponse } from '../mock_data';
describe('WorkItemTitle component', () => {
@ -19,14 +20,18 @@ describe('WorkItemTitle component', () => {
const findItemTitle = () => wrapper.findComponent(ItemTitle);
const createComponent = ({ mutationHandler = mutationSuccessHandler } = {}) => {
const createComponent = ({ workItemParentId, mutationHandler = mutationSuccessHandler } = {}) => {
const { id, title, workItemType } = workItemQueryResponse.data.workItem;
wrapper = shallowMount(WorkItemTitle, {
apolloProvider: createMockApollo([[updateWorkItemMutation, mutationHandler]]),
apolloProvider: createMockApollo([
[updateWorkItemMutation, mutationHandler],
[updateWorkItemTaskMutation, mutationHandler],
]),
propsData: {
workItemId: id,
workItemTitle: title,
workItemType: workItemType.name,
workItemParentId,
},
});
};
@ -57,13 +62,25 @@ describe('WorkItemTitle component', () => {
});
});
it('emits updated event', async () => {
createComponent();
it('calls WorkItemTaskUpdate if passed workItemParentId prop', () => {
const title = 'new title!';
const workItemParentId = '1234';
findItemTitle().vm.$emit('title-changed', 'new title');
await waitForPromises();
createComponent({
workItemParentId,
});
expect(wrapper.emitted('updated')).toEqual([[]]);
findItemTitle().vm.$emit('title-changed', title);
expect(mutationSuccessHandler).toHaveBeenCalledWith({
input: {
id: workItemParentId,
taskData: {
id: workItemQueryResponse.data.workItem.id,
title,
},
},
});
});
it('does not call a mutation when the title has not changed', () => {

View File

@ -141,20 +141,6 @@ describe('WorkItemDetail component', () => {
});
});
it('emits workItemUpdated event when fields updated', async () => {
createComponent();
await waitForPromises();
findWorkItemState().vm.$emit('updated');
expect(wrapper.emitted('workItemUpdated')).toEqual([[]]);
findWorkItemTitle().vm.$emit('updated');
expect(wrapper.emitted('workItemUpdated')).toEqual([[], []]);
});
describe('when work_items_mvc_2 feature flag is enabled', () => {
it('renders assignees component when assignees widget is returned from the API', async () => {
createComponent({

View File

@ -52,6 +52,7 @@ describe('Work items root component', () => {
expect(findWorkItemDetail().props()).toEqual({
workItemId: 'gid://gitlab/WorkItem/1',
workItemParentId: null,
});
});

View File

@ -227,13 +227,29 @@ RSpec.describe EmailsHelper do
describe '#header_logo' do
context 'there is a brand item with a logo' do
it 'returns the brand header logo' do
appearance = create :appearance, header_logo: fixture_file_upload('spec/fixtures/dk.png')
let_it_be(:appearance) { create(:appearance) }
let(:logo_path) { 'spec/fixtures/dk.png' }
before do
appearance.update!(header_logo: fixture_file_upload(logo_path))
end
it 'returns the brand header logo' do
expect(header_logo).to eq(
%{<img style="height: 50px" src="/uploads/-/system/appearance/header_logo/#{appearance.id}/dk.png" />}
)
end
context 'that is a SVG file' do
let(:logo_path) { 'spec/fixtures/logo_sample.svg' }
it 'returns the default header logo' do
expect(header_logo).to match(
%r{<img alt="GitLab" src="/images/mailers/gitlab_logo\.(?:gif|png)" width="\d+" height="\d+" />}
)
end
end
end
context 'there is a brand item without a logo' do

View File

@ -10,11 +10,16 @@ RSpec.describe FormHelper do
expect(helper.form_errors(model)).to be_nil
end
it 'renders an alert div' do
it 'renders an appropriately styled alert div' do
model = double(errors: errors_stub('Error 1'))
expect(helper.form_errors(model))
expect(helper.form_errors(model, pajamas_alert: false))
.to include('<div class="alert alert-danger" id="error_explanation">')
expect(helper.form_errors(model, pajamas_alert: true))
.to include(
'<div class="gl-alert gl-alert-danger gl-alert-not-dismissible gl-mb-5" id="error_explanation" role="alert">'
)
end
it 'contains a summary message' do
@ -22,9 +27,9 @@ RSpec.describe FormHelper do
multi_errors = double(errors: errors_stub('A', 'B', 'C'))
expect(helper.form_errors(single_error))
.to include('<h4>The form contains the following error:')
.to include('The form contains the following error:')
expect(helper.form_errors(multi_errors))
.to include('<h4>The form contains the following errors:')
.to include('The form contains the following errors:')
end
it 'renders each message' do

View File

@ -329,10 +329,6 @@ RSpec.describe NotesHelper do
allow(helper).to receive(:current_user).and_return(guest)
end
it 'sets last_fetched_at to 0 when start_at_zero is true' do
expect(helper.notes_data(issue, true)[:lastFetchedAt]).to eq(0)
end
it 'includes the current notes filter for the user' do
guest.set_notes_filter(UserPreference::NOTES_FILTERS[:only_comments], issue)

View File

@ -0,0 +1,81 @@
# frozen_string_literal: true
require 'fast_spec_helper'
RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule::Changes do
let(:factory) do
Gitlab::Config::Entry::Factory.new(described_class)
.value(config)
end
subject(:entry) { factory.create! }
before do
entry.compose!
end
describe '.new' do
context 'when using a string array' do
let(:config) { %w[app/ lib/ spec/ other/* paths/**/*.rb] }
it { is_expected.to be_valid }
end
context 'when using an integer array' do
let(:config) { [1, 2] }
it { is_expected.not_to be_valid }
it 'returns errors' do
expect(entry.errors).to include(/changes config should be an array of strings/)
end
end
context 'when using a string' do
let(:config) { 'a regular string' }
it { is_expected.not_to be_valid }
it 'reports an error about invalid policy' do
expect(entry.errors).to include(/should be an array of strings/)
end
end
context 'when using a long array' do
let(:config) { ['app/'] * 51 }
it { is_expected.not_to be_valid }
it 'returns errors' do
expect(entry.errors).to include(/has too many entries \(maximum 50\)/)
end
end
context 'when clause is empty' do
let(:config) {}
it { is_expected.to be_valid }
end
context 'when policy strategy does not match' do
let(:config) { 'string strategy' }
it { is_expected.not_to be_valid }
it 'returns information about errors' do
expect(entry.errors)
.to include(/should be an array of strings/)
end
end
end
describe '#value' do
subject(:value) { entry.value }
context 'when using a string array' do
let(:config) { %w[app/ lib/ spec/ other/* paths/**/*.rb] }
it { is_expected.to eq(config) }
end
end
end

View File

@ -18,6 +18,10 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
let(:entry) { factory.create! }
before do
entry.compose!
end
describe '.new' do
subject { entry }
@ -121,7 +125,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
it { is_expected.not_to be_valid }
it 'returns errors' do
expect(subject.errors).to include(/changes should be an array of strings/)
expect(subject.errors).to include(/changes config should be an array of strings/)
end
end
@ -131,7 +135,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
it { is_expected.not_to be_valid }
it 'returns errors' do
expect(subject.errors).to include(/changes is too long \(maximum is 50 characters\)/)
expect(subject.errors).to include(/changes config has too many entries \(maximum 50\)/)
end
end
@ -434,6 +438,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
end
describe '.default' do
let(:config) {}
it 'does not have default value' do
expect(described_class.default).to be_nil
end

View File

@ -4,9 +4,8 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Quota::Deployments do
let_it_be_with_refind(:namespace) { create(:namespace) }
let_it_be_with_reload(:default_plan) { create(:default_plan) }
let_it_be_with_reload(:project) { create(:project, :repository, namespace: namespace) }
let_it_be(:plan_limits) { create(:plan_limits, plan: default_plan) }
let_it_be(:plan_limits) { create(:plan_limits, :default_plan) }
let(:pipeline) { build_stubbed(:ci_pipeline, project: project) }

View File

@ -29,19 +29,50 @@ RSpec.describe Gitlab::Ci::Trace::Archive do
let(:stream) { StringIO.new(trace, 'rb') }
let(:src_checksum) { Digest::MD5.hexdigest(trace) }
shared_examples 'valid' do
it 'does not count as invalid' do
subject.execute!(stream)
expect(metrics)
.not_to have_received(:increment_error_counter)
.with(error_reason: :archive_invalid_checksum)
end
end
shared_examples 'local checksum only' do
it 'generates only local checksum' do
subject.execute!(stream)
expect(trace_metadata.checksum).to eq(src_checksum)
expect(trace_metadata.remote_checksum).to be_nil
end
end
shared_examples 'skips validations' do
it_behaves_like 'valid'
it_behaves_like 'local checksum only'
end
shared_context 'with FIPS' do
context 'with FIPS enabled', :fips_mode do
it_behaves_like 'valid'
it 'does not generate md5 checksums' do
subject.execute!(stream)
expect(trace_metadata.checksum).to be_nil
expect(trace_metadata.remote_checksum).to be_nil
end
end
end
context 'when the object store is disabled' do
before do
stub_artifacts_object_storage(enabled: false)
end
it 'skips validation' do
subject.execute!(stream)
expect(trace_metadata.checksum).to eq(src_checksum)
expect(trace_metadata.remote_checksum).to be_nil
expect(metrics)
.not_to have_received(:increment_error_counter)
.with(error_reason: :archive_invalid_checksum)
end
it_behaves_like 'skips validations'
include_context 'with FIPS'
end
context 'with background_upload enabled' do
@ -49,15 +80,8 @@ RSpec.describe Gitlab::Ci::Trace::Archive do
stub_artifacts_object_storage(background_upload: true)
end
it 'skips validation' do
subject.execute!(stream)
expect(trace_metadata.checksum).to eq(src_checksum)
expect(trace_metadata.remote_checksum).to be_nil
expect(metrics)
.not_to have_received(:increment_error_counter)
.with(error_reason: :archive_invalid_checksum)
end
it_behaves_like 'skips validations'
include_context 'with FIPS'
end
context 'with direct_upload enabled' do
@ -65,27 +89,26 @@ RSpec.describe Gitlab::Ci::Trace::Archive do
stub_artifacts_object_storage(direct_upload: true)
end
it 'validates the archived trace' do
it_behaves_like 'valid'
it 'checksums match' do
subject.execute!(stream)
expect(trace_metadata.checksum).to eq(src_checksum)
expect(trace_metadata.remote_checksum).to eq(src_checksum)
expect(metrics)
.not_to have_received(:increment_error_counter)
.with(error_reason: :archive_invalid_checksum)
end
context 'when the checksum does not match' do
let(:invalid_remote_checksum) { SecureRandom.hex }
before do
expect(::Gitlab::Ci::Trace::RemoteChecksum)
allow(::Gitlab::Ci::Trace::RemoteChecksum)
.to receive(:new)
.with(an_instance_of(Ci::JobArtifact))
.and_return(double(md5_checksum: invalid_remote_checksum))
end
it 'validates the archived trace' do
it 'counts as invalid' do
subject.execute!(stream)
expect(trace_metadata.checksum).to eq(src_checksum)
@ -94,7 +117,11 @@ RSpec.describe Gitlab::Ci::Trace::Archive do
.to have_received(:increment_error_counter)
.with(error_reason: :archive_invalid_checksum)
end
include_context 'with FIPS'
end
include_context 'with FIPS'
end
end
end

View File

@ -17,13 +17,13 @@ RSpec.describe Gitlab::Metrics::Sli do
it 'allows different SLIs to be defined on each subclass' do
apdex_counters = [
fake_total_counter('foo', 'apdex'),
fake_numerator_counter('foo', 'apdex', 'success')
fake_total_counter('foo_apdex'),
fake_numerator_counter('foo_apdex', 'success')
]
error_rate_counters = [
fake_total_counter('foo', 'error_rate'),
fake_numerator_counter('foo', 'error_rate', 'error')
fake_total_counter('foo'),
fake_numerator_counter('foo', 'error')
]
apdex = described_class::Apdex.initialize_sli(:foo, [{ hello: :world }])
@ -40,13 +40,17 @@ RSpec.describe Gitlab::Metrics::Sli do
end
subclasses = {
Gitlab::Metrics::Sli::Apdex => :success,
Gitlab::Metrics::Sli::ErrorRate => :error
Gitlab::Metrics::Sli::Apdex => {
suffix: '_apdex',
numerator: :success
},
Gitlab::Metrics::Sli::ErrorRate => {
suffix: '',
numerator: :error
}
}
subclasses.each do |subclass, numerator_type|
subclass_type = subclass.to_s.demodulize.underscore
subclasses.each do |subclass, subclass_info|
describe subclass do
describe 'Class methods' do
before do
@ -73,8 +77,8 @@ RSpec.describe Gitlab::Metrics::Sli do
describe '.initialize_sli' do
it 'returns and stores a new initialized SLI' do
counters = [
fake_total_counter(:bar, subclass_type),
fake_numerator_counter(:bar, subclass_type, numerator_type)
fake_total_counter("bar#{subclass_info[:suffix]}"),
fake_numerator_counter("bar#{subclass_info[:suffix]}", subclass_info[:numerator])
]
sli = described_class.initialize_sli(:bar, [{ hello: :world }])
@ -86,8 +90,8 @@ RSpec.describe Gitlab::Metrics::Sli do
it 'does not change labels for an already-initialized SLI' do
counters = [
fake_total_counter(:bar, subclass_type),
fake_numerator_counter(:bar, subclass_type, numerator_type)
fake_total_counter("bar#{subclass_info[:suffix]}"),
fake_numerator_counter("bar#{subclass_info[:suffix]}", subclass_info[:numerator])
]
sli = described_class.initialize_sli(:bar, [{ hello: :world }])
@ -106,8 +110,8 @@ RSpec.describe Gitlab::Metrics::Sli do
describe '.initialized?' do
before do
fake_total_counter(:boom, subclass_type)
fake_numerator_counter(:boom, subclass_type, numerator_type)
fake_total_counter("boom#{subclass_info[:suffix]}")
fake_numerator_counter("boom#{subclass_info[:suffix]}", subclass_info[:numerator])
end
it 'is true when an SLI was initialized with labels' do
@ -125,8 +129,8 @@ RSpec.describe Gitlab::Metrics::Sli do
describe '#initialize_counters' do
it 'initializes counters for the passed label combinations' do
counters = [
fake_total_counter(:hey, subclass_type),
fake_numerator_counter(:hey, subclass_type, numerator_type)
fake_total_counter("hey#{subclass_info[:suffix]}"),
fake_numerator_counter("hey#{subclass_info[:suffix]}", subclass_info[:numerator])
]
described_class.new(:hey).initialize_counters([{ foo: 'bar' }, { foo: 'baz' }])
@ -138,18 +142,18 @@ RSpec.describe Gitlab::Metrics::Sli do
describe "#increment" do
let!(:sli) { described_class.new(:heyo) }
let!(:total_counter) { fake_total_counter(:heyo, subclass_type) }
let!(:numerator_counter) { fake_numerator_counter(:heyo, subclass_type, numerator_type) }
let!(:total_counter) { fake_total_counter("heyo#{subclass_info[:suffix]}") }
let!(:numerator_counter) { fake_numerator_counter("heyo#{subclass_info[:suffix]}", subclass_info[:numerator]) }
it "increments both counters for labels when #{numerator_type} is true" do
sli.increment(labels: { hello: "world" }, numerator_type => true)
it "increments both counters for labels when #{subclass_info[:numerator]} is true" do
sli.increment(labels: { hello: "world" }, subclass_info[:numerator] => true)
expect(total_counter).to have_received(:increment).with({ hello: 'world' })
expect(numerator_counter).to have_received(:increment).with({ hello: 'world' })
end
it "only increments the total counters for labels when #{numerator_type} is false" do
sli.increment(labels: { hello: "world" }, numerator_type => false)
it "only increments the total counters for labels when #{subclass_info[:numerator]} is false" do
sli.increment(labels: { hello: "world" }, subclass_info[:numerator] => false)
expect(total_counter).to have_received(:increment).with({ hello: 'world' })
expect(numerator_counter).not_to have_received(:increment).with({ hello: 'world' })
@ -168,11 +172,11 @@ RSpec.describe Gitlab::Metrics::Sli do
fake_counter
end
def fake_total_counter(name, type)
fake_prometheus_counter("gitlab_sli:#{name}_#{type}:total")
def fake_total_counter(name)
fake_prometheus_counter("gitlab_sli:#{name}:total")
end
def fake_numerator_counter(name, type, numerator_name)
fake_prometheus_counter("gitlab_sli:#{name}_#{type}:#{numerator_name}_total")
def fake_numerator_counter(name, numerator_name)
fake_prometheus_counter("gitlab_sli:#{name}:#{numerator_name}_total")
end
end

View File

@ -12,17 +12,11 @@ RSpec.describe Gitlab::Redis::DuplicateJobs do
include_examples "redis_shared_examples"
describe '#pool' do
let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" }
let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" }
subject { described_class.pool }
before do
redis_clear_raw_config!(Gitlab::Redis::SharedState)
redis_clear_raw_config!(Gitlab::Redis::Queues)
allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_host)
allow(Gitlab::Redis::Queues).to receive(:config_file_name).and_return(config_new_format_socket)
end
after do
@ -37,14 +31,46 @@ RSpec.describe Gitlab::Redis::DuplicateJobs do
clear_pool
end
it 'instantiates an instance of MultiStore' do
subject.with do |redis_instance|
expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore)
context 'store connection settings' do
let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" }
let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" }
expect(redis_instance.primary_store.connection[:id]).to eq("redis://test-host:6379/99")
expect(redis_instance.secondary_store.connection[:id]).to eq("redis:///path/to/redis.sock/0")
before do
allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_host)
allow(Gitlab::Redis::Queues).to receive(:config_file_name).and_return(config_new_format_socket)
end
expect(redis_instance.instance_name).to eq('DuplicateJobs')
it 'instantiates an instance of MultiStore' do
subject.with do |redis_instance|
expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore)
expect(redis_instance.primary_store.connection[:id]).to eq("redis://test-host:6379/99")
expect(redis_instance.primary_store.connection[:namespace]).to be_nil
expect(redis_instance.secondary_store.connection[:id]).to eq("redis:///path/to/redis.sock/0")
expect(redis_instance.secondary_store.connection[:namespace]).to eq("resque:gitlab")
expect(redis_instance.instance_name).to eq('DuplicateJobs')
end
end
end
# Make sure they current namespace is respected for the secondary store but omitted from the primary
context 'key namespaces' do
let(:key) { 'key' }
let(:value) { '123' }
it 'writes keys to SharedState with no prefix, and to Queues with the "resque:gitlab:" prefix' do
subject.with do |redis_instance|
redis_instance.set(key, value)
end
Gitlab::Redis::SharedState.with do |redis_instance|
expect(redis_instance.get(key)).to eq(value)
end
Gitlab::Redis::Queues.with do |redis_instance|
expect(redis_instance.get("resque:gitlab:#{key}")).to eq(value)
end
end
end

View File

@ -65,6 +65,7 @@ RSpec.describe Gitlab::Redis::MultiStore do
context 'when primary_store is not a ::Redis instance' do
before do
allow(primary_store).to receive(:is_a?).with(::Redis).and_return(false)
allow(primary_store).to receive(:is_a?).with(::Redis::Namespace).and_return(false)
end
it 'fails with exception' do
@ -73,9 +74,21 @@ RSpec.describe Gitlab::Redis::MultiStore do
end
end
context 'when primary_store is a ::Redis::Namespace instance' do
before do
allow(primary_store).to receive(:is_a?).with(::Redis).and_return(false)
allow(primary_store).to receive(:is_a?).with(::Redis::Namespace).and_return(true)
end
it 'fails with exception' do
expect { described_class.new(primary_store, secondary_store, instance_name) }.not_to raise_error
end
end
context 'when secondary_store is not a ::Redis instance' do
before do
allow(secondary_store).to receive(:is_a?).with(::Redis).and_return(false)
allow(secondary_store).to receive(:is_a?).with(::Redis::Namespace).and_return(false)
end
it 'fails with exception' do
@ -84,6 +97,17 @@ RSpec.describe Gitlab::Redis::MultiStore do
end
end
context 'when secondary_store is a ::Redis::Namespace instance' do
before do
allow(secondary_store).to receive(:is_a?).with(::Redis).and_return(false)
allow(secondary_store).to receive(:is_a?).with(::Redis::Namespace).and_return(true)
end
it 'fails with exception' do
expect { described_class.new(primary_store, secondary_store, instance_name) }.not_to raise_error
end
end
context 'with READ redis commands' do
let_it_be(:key1) { "redis:{1}:key_a" }
let_it_be(:key2) { "redis:{1}:key_b" }
@ -145,7 +169,7 @@ RSpec.describe Gitlab::Redis::MultiStore do
it 'logs the ReadFromPrimaryError' do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
an_instance_of(Gitlab::Redis::MultiStore::ReadFromPrimaryError),
hash_including(command_name: name, extra: hash_including(instance_name: instance_name))
hash_including(command_name: name, instance_name: instance_name)
)
subject
@ -222,8 +246,7 @@ RSpec.describe Gitlab::Redis::MultiStore do
it 'logs the exception' do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
hash_including(extra: hash_including(:multi_store_error_message, instance_name: instance_name),
command_name: name))
hash_including(:multi_store_error_message, instance_name: instance_name, command_name: name))
subject
end
@ -404,7 +427,7 @@ RSpec.describe Gitlab::Redis::MultiStore do
it 'logs the exception and execute on secondary instance', :aggregate_errors do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
hash_including(extra: hash_including(:multi_store_error_message), command_name: name))
hash_including(:multi_store_error_message, command_name: name, instance_name: instance_name))
expect(secondary_store).to receive(name).with(*expected_args).and_call_original
subject
@ -525,7 +548,7 @@ RSpec.describe Gitlab::Redis::MultiStore do
it 'logs the exception and execute on secondary instance', :aggregate_errors do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
hash_including(extra: hash_including(:multi_store_error_message), command_name: name))
hash_including(:multi_store_error_message, command_name: name))
expect(secondary_store).to receive(name).and_call_original
subject
@ -563,7 +586,7 @@ RSpec.describe Gitlab::Redis::MultiStore do
it 'returns the value from the secondary store, logging an error' do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
an_instance_of(Gitlab::Redis::MultiStore::PipelinedDiffError),
hash_including(command_name: name, extra: hash_including(instance_name: instance_name))
hash_including(command_name: name, instance_name: instance_name)
).and_call_original
expect(counter).to receive(:increment).with(command: name, instance_name: instance_name)
@ -579,7 +602,7 @@ RSpec.describe Gitlab::Redis::MultiStore do
it 'returns the value from the secondary store, logging an error' do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
an_instance_of(Gitlab::Redis::MultiStore::PipelinedDiffError),
hash_including(command_name: name, extra: hash_including(instance_name: instance_name))
hash_including(command_name: name, instance_name: instance_name)
)
expect(counter).to receive(:increment).with(command: name, instance_name: instance_name)
@ -673,7 +696,7 @@ RSpec.describe Gitlab::Redis::MultiStore do
it 'logs MethodMissingError' do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
an_instance_of(Gitlab::Redis::MultiStore::MethodMissingError),
hash_including(command_name: :incr, extra: hash_including(instance_name: instance_name))
hash_including(command_name: :incr, instance_name: instance_name)
)
subject

View File

@ -0,0 +1,68 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Redis::SidekiqStatus do
# Note: this is a pseudo-store in front of `SharedState`, meant only as a tool
# to move away from `Sidekiq.redis` for sidekiq status data. Thus, we use the
# same store configuration as the former.
let(:instance_specific_config_file) { "config/redis.shared_state.yml" }
let(:environment_config_file_name) { "GITLAB_REDIS_SHARED_STATE_CONFIG_FILE" }
include_examples "redis_shared_examples"
describe '#pool' do
let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" }
let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" }
subject { described_class.pool }
before do
redis_clear_raw_config!(Gitlab::Redis::SharedState)
redis_clear_raw_config!(Gitlab::Redis::Queues)
allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_host)
allow(Gitlab::Redis::Queues).to receive(:config_file_name).and_return(config_new_format_socket)
end
after do
redis_clear_raw_config!(Gitlab::Redis::SharedState)
redis_clear_raw_config!(Gitlab::Redis::Queues)
end
around do |example|
clear_pool
example.run
ensure
clear_pool
end
it 'instantiates an instance of MultiStore' do
subject.with do |redis_instance|
expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore)
expect(redis_instance.primary_store.connection[:id]).to eq("redis://test-host:6379/99")
expect(redis_instance.secondary_store.connection[:id]).to eq("redis:///path/to/redis.sock/0")
expect(redis_instance.instance_name).to eq('SidekiqStatus')
end
end
it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_sidekiq_status,
:use_primary_store_as_default_for_sidekiq_status
end
describe '#raw_config_hash' do
it 'has a legacy default URL' do
expect(subject).to receive(:fetch_config) { false }
expect(subject.send(:raw_config_hash)).to eq(url: 'redis://localhost:6382')
end
end
describe '#store_name' do
it 'returns the name of the SharedState store' do
expect(described_class.store_name).to eq('SharedState')
end
end
end

View File

@ -3,103 +3,167 @@
require 'spec_helper'
RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_redis_shared_state do
describe '.set' do
it 'stores the job ID' do
described_class.set('123')
shared_examples 'tracking status in redis' do
describe '.set' do
it 'stores the job ID' do
described_class.set('123')
key = described_class.key_for('123')
key = described_class.key_for('123')
Sidekiq.redis do |redis|
expect(redis.exists(key)).to eq(true)
expect(redis.ttl(key) > 0).to eq(true)
expect(redis.get(key)).to eq('1')
with_redis do |redis|
expect(redis.exists(key)).to eq(true)
expect(redis.ttl(key) > 0).to eq(true)
expect(redis.get(key)).to eq('1')
end
end
it 'allows overriding the expiration time' do
described_class.set('123', described_class::DEFAULT_EXPIRATION * 2)
key = described_class.key_for('123')
with_redis do |redis|
expect(redis.exists(key)).to eq(true)
expect(redis.ttl(key) > described_class::DEFAULT_EXPIRATION).to eq(true)
expect(redis.get(key)).to eq('1')
end
end
it 'does not store anything with a nil expiry' do
described_class.set('123', nil)
key = described_class.key_for('123')
with_redis do |redis|
expect(redis.exists(key)).to eq(false)
end
end
end
it 'allows overriding the expiration time' do
described_class.set('123', described_class::DEFAULT_EXPIRATION * 2)
describe '.unset' do
it 'removes the job ID' do
described_class.set('123')
described_class.unset('123')
key = described_class.key_for('123')
key = described_class.key_for('123')
Sidekiq.redis do |redis|
expect(redis.exists(key)).to eq(true)
expect(redis.ttl(key) > described_class::DEFAULT_EXPIRATION).to eq(true)
expect(redis.get(key)).to eq('1')
with_redis do |redis|
expect(redis.exists(key)).to eq(false)
end
end
end
it 'does not store anything with a nil expiry' do
described_class.set('123', nil)
describe '.all_completed?' do
it 'returns true if all jobs have been completed' do
expect(described_class.all_completed?(%w(123))).to eq(true)
end
key = described_class.key_for('123')
it 'returns false if a job has not yet been completed' do
described_class.set('123')
Sidekiq.redis do |redis|
expect(redis.exists(key)).to eq(false)
expect(described_class.all_completed?(%w(123 456))).to eq(false)
end
end
describe '.running?' do
it 'returns true if job is running' do
described_class.set('123')
expect(described_class.running?('123')).to be(true)
end
it 'returns false if job is not found' do
expect(described_class.running?('123')).to be(false)
end
end
describe '.num_running' do
it 'returns 0 if all jobs have been completed' do
expect(described_class.num_running(%w(123))).to eq(0)
end
it 'returns 2 if two jobs are still running' do
described_class.set('123')
described_class.set('456')
expect(described_class.num_running(%w(123 456 789))).to eq(2)
end
end
describe '.num_completed' do
it 'returns 1 if all jobs have been completed' do
expect(described_class.num_completed(%w(123))).to eq(1)
end
it 'returns 1 if a job has not yet been completed' do
described_class.set('123')
described_class.set('456')
expect(described_class.num_completed(%w(123 456 789))).to eq(1)
end
end
describe '.completed_jids' do
it 'returns the completed job' do
expect(described_class.completed_jids(%w(123))).to eq(['123'])
end
it 'returns only the jobs completed' do
described_class.set('123')
described_class.set('456')
expect(described_class.completed_jids(%w(123 456 789))).to eq(['789'])
end
end
describe '.job_status' do
it 'returns an array of boolean values' do
described_class.set('123')
described_class.set('456')
described_class.unset('123')
expect(described_class.job_status(%w(123 456 789))).to eq([false, true, false])
end
it 'handles an empty array' do
expect(described_class.job_status([])).to eq([])
end
end
end
describe '.unset' do
it 'removes the job ID' do
described_class.set('123')
described_class.unset('123')
key = described_class.key_for('123')
Sidekiq.redis do |redis|
expect(redis.exists(key)).to eq(false)
end
context 'with multi-store feature flags turned on' do
def with_redis(&block)
Gitlab::Redis::SidekiqStatus.with(&block)
end
it 'uses Gitlab::Redis::SidekiqStatus.with' do
expect(Gitlab::Redis::SidekiqStatus).to receive(:with).and_call_original
expect(Sidekiq).not_to receive(:redis)
described_class.job_status(%w(123 456 789))
end
it_behaves_like 'tracking status in redis'
end
describe '.all_completed?' do
it 'returns true if all jobs have been completed' do
expect(described_class.all_completed?(%w(123))).to eq(true)
context 'when both multi-store feature flags are off' do
def with_redis(&block)
Sidekiq.redis(&block)
end
it 'returns false if a job has not yet been completed' do
described_class.set('123')
expect(described_class.all_completed?(%w(123 456))).to eq(false)
end
end
describe '.running?' do
it 'returns true if job is running' do
described_class.set('123')
expect(described_class.running?('123')).to be(true)
before do
stub_feature_flags(use_primary_and_secondary_stores_for_sidekiq_status: false)
stub_feature_flags(use_primary_store_as_default_for_sidekiq_status: false)
end
it 'returns false if job is not found' do
expect(described_class.running?('123')).to be(false)
end
end
it 'uses Sidekiq.redis' do
expect(Sidekiq).to receive(:redis).and_call_original
expect(Gitlab::Redis::SidekiqStatus).not_to receive(:with)
describe '.num_running' do
it 'returns 0 if all jobs have been completed' do
expect(described_class.num_running(%w(123))).to eq(0)
described_class.job_status(%w(123 456 789))
end
it 'returns 2 if two jobs are still running' do
described_class.set('123')
described_class.set('456')
expect(described_class.num_running(%w(123 456 789))).to eq(2)
end
end
describe '.num_completed' do
it 'returns 1 if all jobs have been completed' do
expect(described_class.num_completed(%w(123))).to eq(1)
end
it 'returns 1 if a job has not yet been completed' do
described_class.set('123')
described_class.set('456')
expect(described_class.num_completed(%w(123 456 789))).to eq(1)
end
it_behaves_like 'tracking status in redis'
end
describe '.key_for' do
@ -110,31 +174,4 @@ RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_
expect(key).to include('123')
end
end
describe '.completed_jids' do
it 'returns the completed job' do
expect(described_class.completed_jids(%w(123))).to eq(['123'])
end
it 'returns only the jobs completed' do
described_class.set('123')
described_class.set('456')
expect(described_class.completed_jids(%w(123 456 789))).to eq(['789'])
end
end
describe '.job_status' do
it 'returns an array of boolean values' do
described_class.set('123')
described_class.set('456')
described_class.unset('123')
expect(described_class.job_status(%w(123 456 789))).to eq([false, true, false])
end
it 'handles an empty array' do
expect(described_class.job_status([])).to eq([])
end
end
end

Some files were not shown because too many files have changed in this diff Show More