Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-12-03 12:10:23 +00:00
parent cc8ea69201
commit 5f0d27d131
87 changed files with 1547 additions and 674 deletions

View File

@ -1,22 +1,28 @@
<script>
import { GlButton, GlLoadingIcon, GlTable, GlTooltipDirective } from '@gitlab/ui';
import createFlash from '~/flash';
import { GlAlert, GlButton, GlLoadingIcon, GlTable, GlTooltipDirective } from '@gitlab/ui';
import { s__, __ } from '~/locale';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import getGroupContactsQuery from './queries/get_group_contacts.query.graphql';
import NewContactForm from './new_contact_form.vue';
export default {
components: {
GlAlert,
GlButton,
GlLoadingIcon,
GlTable,
NewContactForm,
},
directives: {
GlTooltip: GlTooltipDirective,
},
inject: ['groupFullPath', 'groupIssuesPath'],
inject: ['groupFullPath', 'groupIssuesPath', 'canAdminCrmContact'],
data() {
return { contacts: [] };
return {
contacts: [],
error: false,
errorMessages: [],
};
},
apollo: {
contacts: {
@ -31,12 +37,8 @@ export default {
update(data) {
return this.extractContacts(data);
},
error(error) {
createFlash({
message: __('Something went wrong. Please try again.'),
error,
captureError: true,
});
error() {
this.error = true;
},
},
},
@ -44,12 +46,31 @@ export default {
isLoading() {
return this.$apollo.queries.contacts.loading;
},
showNewForm() {
return this.$route.path.startsWith('/new');
},
},
methods: {
extractContacts(data) {
const contacts = data?.group?.contacts?.nodes || [];
return contacts.slice().sort((a, b) => a.firstName.localeCompare(b.firstName));
},
displayNewForm() {
if (this.showNewForm) return;
this.$router.push({ path: '/new' });
},
hideNewForm() {
this.$router.replace({ path: '/' });
},
handleError(errors) {
this.error = true;
if (errors) this.errorMessages = errors;
},
dismissError() {
this.error = false;
this.errorMessages = [];
},
},
fields: [
{ key: 'firstName', sortable: true },
@ -75,15 +96,41 @@ export default {
i18n: {
emptyText: s__('Crm|No contacts found'),
issuesButtonLabel: __('View issues'),
title: s__('Crm|Customer Relations Contacts'),
newContact: s__('Crm|New contact'),
errorText: __('Something went wrong. Please try again.'),
},
};
</script>
<template>
<div>
<gl-alert v-if="error" variant="danger" class="gl-mt-6" @dismiss="dismissError">
<div v-if="errorMessages.length == 0">{{ $options.i18n.errorText }}</div>
<div v-for="(message, index) in errorMessages" :key="index">{{ message }}</div>
</gl-alert>
<div
class="gl-display-flex gl-align-items-baseline gl-flex-direction-row gl-justify-content-space-between gl-mt-6"
>
<h2 class="gl-font-size-h2 gl-my-0">
{{ $options.i18n.title }}
</h2>
<div class="gl-display-none gl-md-display-flex gl-align-items-center gl-justify-content-end">
<gl-button
v-if="canAdminCrmContact"
variant="confirm"
data-testid="new-contact-button"
@click="displayNewForm"
>
{{ $options.i18n.newContact }}
</gl-button>
</div>
</div>
<new-contact-form v-if="showNewForm" @close="hideNewForm" @error="handleError" />
<gl-loading-icon v-if="isLoading" class="gl-mt-5" size="lg" />
<gl-table
v-else
class="gl-mt-5"
:items="contacts"
:fields="$options.fields"
:empty-text="$options.i18n.emptyText"

View File

@ -0,0 +1,140 @@
<script>
import { GlButton, GlFormGroup, GlFormInput } from '@gitlab/ui';
import { produce } from 'immer';
import { __, s__ } from '~/locale';
import { convertToGraphQLId } from '~/graphql_shared/utils';
import { TYPE_GROUP } from '~/graphql_shared/constants';
import createContact from './queries/create_contact.mutation.graphql';
import getGroupContactsQuery from './queries/get_group_contacts.query.graphql';
export default {
components: {
GlButton,
GlFormGroup,
GlFormInput,
},
inject: ['groupFullPath', 'groupId'],
data() {
return {
firstName: '',
lastName: '',
phone: '',
email: '',
description: '',
submitting: false,
};
},
computed: {
invalid() {
return this.firstName === '' || this.lastName === '' || this.email === '';
},
},
methods: {
save() {
this.submitting = true;
return this.$apollo
.mutate({
mutation: createContact,
variables: {
input: {
groupId: convertToGraphQLId(TYPE_GROUP, this.groupId),
firstName: this.firstName,
lastName: this.lastName,
phone: this.phone,
email: this.email,
description: this.description,
},
},
update: this.updateCache,
})
.then(({ data }) => {
if (data.customerRelationsContactCreate.errors.length === 0) this.close();
this.submitting = false;
})
.catch(() => {
this.error();
this.submitting = false;
});
},
close() {
this.$emit('close');
},
error(errors = null) {
this.$emit('error', errors);
},
updateCache(store, { data: { customerRelationsContactCreate } }) {
if (customerRelationsContactCreate.errors.length > 0) {
this.error(customerRelationsContactCreate.errors);
return;
}
const variables = {
groupFullPath: this.groupFullPath,
};
const sourceData = store.readQuery({
query: getGroupContactsQuery,
variables,
});
const data = produce(sourceData, (draftState) => {
draftState.group.contacts.nodes = [
...sourceData.group.contacts.nodes,
customerRelationsContactCreate.contact,
];
});
store.writeQuery({
query: getGroupContactsQuery,
variables,
data,
});
},
},
i18n: {
buttonLabel: s__('Crm|Create new contact'),
cancel: __('Cancel'),
firstName: s__('Crm|First name'),
lastName: s__('Crm|Last name'),
email: s__('Crm|Email'),
phone: s__('Crm|Phone number (optional)'),
description: s__('Crm|Description (optional)'),
},
};
</script>
<template>
<div class="col-md-4">
<form @submit.prevent="save">
<gl-form-group :label="$options.i18n.firstName" label-for="contact-first-name">
<gl-form-input id="contact-first-name" v-model="firstName" />
</gl-form-group>
<gl-form-group :label="$options.i18n.lastName" label-for="contact-last-name">
<gl-form-input id="contact-last-name" v-model="lastName" />
</gl-form-group>
<gl-form-group :label="$options.i18n.email" label-for="contact-email">
<gl-form-input id="contact-email" v-model="email" />
</gl-form-group>
<gl-form-group :label="$options.i18n.phone" label-for="contact-phone">
<gl-form-input id="contact-phone" v-model="phone" />
</gl-form-group>
<gl-form-group :label="$options.i18n.description" label-for="contact-description">
<gl-form-input id="contact-description" v-model="description" />
</gl-form-group>
<div class="form-actions">
<gl-button
variant="confirm"
:disabled="invalid"
:loading="submitting"
data-testid="create-new-contact-button"
type="submit"
>{{ $options.i18n.buttonLabel }}</gl-button
>
<gl-button data-testid="cancel-button" @click="close">
{{ $options.i18n.cancel }}
</gl-button>
</div>
</form>
<div class="gl-pb-5"></div>
</div>
</template>

View File

@ -0,0 +1,10 @@
#import "./crm_contact_fields.fragment.graphql"
mutation createContact($input: CustomerRelationsContactCreateInput!) {
customerRelationsContactCreate(input: $input) {
contact {
...ContactFragment
}
errors
}
}

View File

@ -0,0 +1,14 @@
fragment ContactFragment on CustomerRelationsContact {
__typename
id
firstName
lastName
email
phone
description
organization {
__typename
id
name
}
}

View File

@ -1,21 +1,12 @@
#import "./crm_contact_fields.fragment.graphql"
query contacts($groupFullPath: ID!) {
group(fullPath: $groupFullPath) {
__typename
id
contacts {
nodes {
__typename
id
firstName
lastName
email
phone
description
organization {
__typename
id
name
}
...ContactFragment
}
}
}

View File

@ -1,9 +1,11 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import VueRouter from 'vue-router';
import createDefaultClient from '~/lib/graphql';
import CrmContactsRoot from './components/contacts_root.vue';
Vue.use(VueApollo);
Vue.use(VueRouter);
export default () => {
const el = document.getElementById('js-crm-contacts-app');
@ -16,12 +18,26 @@ export default () => {
return false;
}
const { groupFullPath, groupIssuesPath } = el.dataset;
const { basePath, groupFullPath, groupIssuesPath, canAdminCrmContact, groupId } = el.dataset;
const router = new VueRouter({
base: basePath,
mode: 'history',
routes: [
{
// eslint-disable-next-line @gitlab/require-i18n-strings
name: 'Contacts List',
path: '/',
component: CrmContactsRoot,
},
],
});
return new Vue({
el,
router,
apolloProvider,
provide: { groupFullPath, groupIssuesPath },
provide: { groupFullPath, groupIssuesPath, canAdminCrmContact, groupId },
render(createElement) {
return createElement(CrmContactsRoot);
},

View File

@ -16,11 +16,11 @@ export class MyFancyExtension {
* actions, keystrokes, update options, etc.
* Is called only once before the extension gets registered
*
* @param { Object } [setupOptions] The setupOptions object
* @param { Object } [instance] The Source Editor instance
* @param { Object } [setupOptions] The setupOptions object
*/
// eslint-disable-next-line class-methods-use-this,no-unused-vars
onSetup(setupOptions, instance) {}
onSetup(instance, setupOptions) {}
/**
* The first thing called after the extension is

View File

@ -153,7 +153,7 @@ export default class EditorInstance {
const extensionInstance = new EditorExtension(extension);
const { setupOptions, obj: extensionObj } = extensionInstance;
if (extensionObj.onSetup) {
extensionObj.onSetup(setupOptions, this);
extensionObj.onSetup(this, setupOptions);
}
if (extensionsStore) {
this.registerExtension(extensionInstance, extensionsStore);

View File

@ -1,5 +1,5 @@
<script>
import { GlAlert, GlLink, GlSprintf, GlTable } from '@gitlab/ui';
import { GlAlert, GlLink, GlSprintf, GlTableLite } from '@gitlab/ui';
import { __ } from '~/locale';
import CiLintResultsParam from './ci_lint_results_param.vue';
import CiLintResultsValue from './ci_lint_results_value.vue';
@ -36,7 +36,7 @@ export default {
GlAlert,
GlLink,
GlSprintf,
GlTable,
GlTableLite,
CiLintWarnings,
CiLintResultsValue,
CiLintResultsParam,
@ -129,7 +129,7 @@ export default {
@dismiss="isWarningDismissed = true"
/>
<gl-table
<gl-table-lite
v-if="shouldShowTable"
:items="jobs"
:fields="$options.fields"
@ -142,6 +142,6 @@ export default {
<template #cell(value)="{ item }">
<ci-lint-results-value :item="item" :dry-run="dryRun" />
</template>
</gl-table>
</gl-table-lite>
</div>
</template>

View File

@ -122,7 +122,6 @@ export default function simulateDrag(options) {
const firstRect = getRect(firstEl);
const lastRect = getRect(lastEl);
const startTime = new Date().getTime();
const duration = options.duration || 1000;
simulateEvent(fromEl, 'pointerdown', {
@ -140,8 +139,28 @@ export default function simulateDrag(options) {
toRect.cy = lastRect.y + lastRect.h + 50;
}
const dragInterval = setInterval(() => {
const progress = (new Date().getTime() - startTime) / duration;
let startTime;
// Called within dragFn when the drag should finish
const finishFn = () => {
if (options.ondragend) options.ondragend();
if (options.performDrop) {
simulateEvent(toEl, 'mouseup');
}
window.SIMULATE_DRAG_ACTIVE = 0;
};
const dragFn = (timestamp) => {
if (!startTime) {
startTime = timestamp;
}
const elapsed = timestamp - startTime;
// Make sure that progress maxes at 1
const progress = Math.min(elapsed / duration, 1);
const x = fromRect.cx + (toRect.cx - fromRect.cx) * progress;
const y = fromRect.cy + (toRect.cy - fromRect.cy + options.extraHeight) * progress;
const overEl = fromEl.ownerDocument.elementFromPoint(x, y);
@ -152,16 +171,15 @@ export default function simulateDrag(options) {
});
if (progress >= 1) {
if (options.ondragend) options.ondragend();
if (options.performDrop) {
simulateEvent(toEl, 'mouseup');
}
clearInterval(dragInterval);
window.SIMULATE_DRAG_ACTIVE = 0;
// finish on next frame, so we can pause in the correct position for a frame
requestAnimationFrame(finishFn);
} else {
requestAnimationFrame(dragFn);
}
}, 100);
};
// Start the drag animation
requestAnimationFrame(dragFn);
return {
target: fromEl,

View File

@ -141,6 +141,7 @@ export default {
variant="link"
:icon="descriptionVersionToggleIcon"
data-testid="compare-btn"
class="gl-vertical-align-text-bottom"
@click="toggleDescriptionVersion"
>{{ __('Compare with previous version') }}</gl-button
>
@ -149,6 +150,7 @@ export default {
:icon="showLines ? 'chevron-up' : 'chevron-down'"
variant="link"
data-testid="outdated-lines-change-btn"
class="gl-vertical-align-text-bottom"
@click="toggleDiff"
>
{{ __('Compare changes') }}

View File

@ -23,6 +23,7 @@ module CycleAnalyticsParams
opts[:from] = params[:from] || start_date(params)
opts[:to] = params[:to] if params[:to]
opts[:end_event_filter] = params[:end_event_filter] if params[:end_event_filter]
opts[:use_aggregated_data_collector] = params[:use_aggregated_data_collector] if params[:use_aggregated_data_collector]
opts.merge!(params.slice(*::Gitlab::Analytics::CycleAnalytics::RequestParams::FINDER_PARAM_NAMES))
opts.merge!(date_range(params))
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class Groups::Crm::ContactsController < Groups::ApplicationController
feature_category :team_planning
before_action :authorize_read_crm_contact!
def new
render action: "index"
end
private
def authorize_read_crm_contact!
render_404 unless can?(current_user, :read_crm_contact, group)
end
end

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
class Groups::Crm::OrganizationsController < Groups::ApplicationController
feature_category :team_planning
before_action :authorize_read_crm_organization!
private
def authorize_read_crm_organization!
render_404 unless can?(current_user, :read_crm_organization, group)
end
end

View File

@ -1,30 +0,0 @@
# frozen_string_literal: true
class Groups::CrmController < Groups::ApplicationController
feature_category :team_planning
before_action :authorize_read_crm_contact!, only: [:contacts]
before_action :authorize_read_crm_organization!, only: [:organizations]
def contacts
respond_to do |format|
format.html
end
end
def organizations
respond_to do |format|
format.html
end
end
private
def authorize_read_crm_contact!
render_404 unless can?(current_user, :read_crm_contact, group)
end
def authorize_read_crm_organization!
render_404 unless can?(current_user, :read_crm_organization, group)
end
end

View File

@ -8,6 +8,8 @@ module Groups
feature_category :pipeline_authoring
urgency :low, [:show]
def show
respond_to do |format|
format.json do

View File

@ -6,6 +6,7 @@ class Projects::Ci::LintsController < Projects::ApplicationController
feature_category :pipeline_authoring
respond_to :json, only: [:create]
urgency :low, [:create]
def show
end

View File

@ -9,6 +9,8 @@ class Projects::Ci::PipelineEditorController < Projects::ApplicationController
feature_category :pipeline_authoring
urgency :low, [:show]
def show
end

View File

@ -5,6 +5,8 @@ class Projects::VariablesController < Projects::ApplicationController
feature_category :pipeline_authoring
urgency :low, [:show, :update]
def show
respond_to do |format|
format.json do

View File

@ -87,9 +87,13 @@ class GroupDescendantsFinder
visible_to_user = visible_to_user.or(authorized_to_user)
end
hierarchy_for_parent
.descendants
.where(visible_to_user)
group_to_query = if Feature.enabled?(:linear_group_descendants_finder, current_user, default_enabled: :yaml)
parent_group
else
hierarchy_for_parent
end
group_to_query.descendants.where(visible_to_user)
# rubocop: enable CodeReuse/Finder
end
# rubocop: enable CodeReuse/ActiveRecord
@ -155,7 +159,13 @@ class GroupDescendantsFinder
# rubocop: disable CodeReuse/ActiveRecord
def projects_matching_filter
# rubocop: disable CodeReuse/Finder
projects_nested_in_group = Project.where(namespace_id: hierarchy_for_parent.base_and_descendants.select(:id))
objects_in_hierarchy = if Feature.enabled?(:linear_group_descendants_finder, current_user, default_enabled: :yaml)
parent_group.self_and_descendants.as_ids
else
hierarchy_for_parent.base_and_descendants.select(:id)
end
projects_nested_in_group = Project.where(namespace_id: objects_in_hierarchy)
params_with_search = params.merge(search: params[:filter])
ProjectsFinder.new(params: params_with_search,

View File

@ -134,6 +134,10 @@ class BulkImports::Entity < ApplicationRecord
source_type == 'group_entity'
end
def update_service
"::#{pluralized_name.capitalize}::UpdateService".constantize
end
private
def validate_parent_is_a_group

View File

@ -5,6 +5,8 @@ module BulkImports
class BaseConfig
include Gitlab::Utils::StrongMemoize
UPLOADS_RELATION = 'uploads'
def initialize(portable)
@portable = portable
end
@ -78,7 +80,7 @@ module BulkImports
end
def file_relations
[]
[UPLOADS_RELATION]
end
def skipped_relations

View File

@ -3,8 +3,6 @@
module BulkImports
module FileTransfer
class ProjectConfig < BaseConfig
UPLOADS_RELATION = 'uploads'
SKIPPED_RELATIONS = %w(
project_members
group_members
@ -14,10 +12,6 @@ module BulkImports
::Gitlab::ImportExport.config_file
end
def file_relations
[UPLOADS_RELATION]
end
def skipped_relations
SKIPPED_RELATIONS
end

View File

@ -236,7 +236,12 @@ module Ci
pipeline.run_after_commit do
PipelineHooksWorker.perform_async(pipeline.id)
ExpirePipelineCacheWorker.perform_async(pipeline.id)
if Feature.enabled?(:expire_job_and_pipeline_cache_synchronously, pipeline.project, default_enabled: :yaml)
Ci::ExpirePipelineCacheService.new.execute(pipeline) # rubocop: disable CodeReuse/ServiceClass
else
ExpirePipelineCacheWorker.perform_async(pipeline.id)
end
end
end

View File

@ -188,7 +188,12 @@ class CommitStatus < Ci::ApplicationRecord
commit_status.run_after_commit do
PipelineProcessWorker.perform_async(pipeline_id) unless transition_options[:skip_pipeline_processing]
ExpireJobCacheWorker.perform_async(id)
if Feature.enabled?(:expire_job_and_pipeline_cache_synchronously, project, default_enabled: :yaml)
expire_etag_cache!
else
ExpireJobCacheWorker.perform_async(id)
end
end
end
@ -301,6 +306,12 @@ class CommitStatus < Ci::ApplicationRecord
.update_all(retried: true, processed: true)
end
def expire_etag_cache!
job_path = Gitlab::Routing.url_helpers.project_build_path(project, id, format: :json)
Gitlab::EtagCaching::Store.new.touch(job_path)
end
private
def unrecoverable_failure?

View File

@ -1,6 +1,8 @@
# frozen_string_literal: true
class AuditEventService
include AuditEventSaveType
# Instantiates a new service
#
# @param [User] author the user who authors the change
@ -10,13 +12,16 @@ class AuditEventService
# - Group: events are visible at Group and Instance level
# - User: events are visible at Instance level
# @param [Hash] details extra data of audit event
# @param [Symbol] save_type the type to save the event
# Can be selected from the following, :database, :stream, :database_and_stream .
#
# @return [AuditEventService]
def initialize(author, entity, details = {})
def initialize(author, entity, details = {}, save_type = :database_and_stream)
@author = build_author(author)
@entity = entity
@details = details
@ip_address = resolve_ip_address(@author)
@save_type = save_type
end
# Builds the @details attribute for authentication
@ -133,8 +138,8 @@ class AuditEventService
end
def save_or_track(event)
event.save!
stream_event_to_external_destinations(event)
event.save! if should_save_database?(@save_type)
stream_event_to_external_destinations(event) if should_save_stream?(@save_type)
rescue StandardError => e
Gitlab::ErrorTracking.track_exception(e, audit_event_type: event.class.to_s)
end

View File

@ -5,6 +5,7 @@ module BulkImports
include Gitlab::ImportExport::CommandLineUtil
BATCH_SIZE = 100
AVATAR_PATH = 'avatar'
def initialize(portable, export_path)
@portable = portable
@ -34,7 +35,7 @@ module BulkImports
def export_subdir_path(upload)
subdir = if upload.path == avatar_path
'avatar'
AVATAR_PATH
else
upload.try(:secret).to_s
end

View File

@ -74,20 +74,25 @@ module Ci
def update_etag_cache(pipeline, store)
project = pipeline.project
store.touch(project_pipelines_path(project))
store.touch(commit_pipelines_path(project, pipeline.commit)) unless pipeline.commit.nil?
store.touch(new_merge_request_pipelines_path(project))
etag_paths = [
project_pipelines_path(project),
new_merge_request_pipelines_path(project),
graphql_project_on_demand_scan_counts_path(project)
]
etag_paths << commit_pipelines_path(project, pipeline.commit) unless pipeline.commit.nil?
each_pipelines_merge_request_path(pipeline) do |path|
store.touch(path)
etag_paths << path
end
pipeline.self_with_upstreams_and_downstreams.each do |relative_pipeline|
store.touch(project_pipeline_path(relative_pipeline.project, relative_pipeline))
store.touch(graphql_pipeline_path(relative_pipeline))
store.touch(graphql_pipeline_sha_path(relative_pipeline.sha))
pipeline.self_with_upstreams_and_downstreams.includes(project: [:route, { namespace: :route }]).each do |relative_pipeline| # rubocop: disable CodeReuse/ActiveRecord
etag_paths << project_pipeline_path(relative_pipeline.project, relative_pipeline)
etag_paths << graphql_pipeline_path(relative_pipeline)
etag_paths << graphql_pipeline_sha_path(relative_pipeline.sha)
end
store.touch(graphql_project_on_demand_scan_counts_path(project))
store.touch(*etag_paths)
end
def url_helpers

View File

@ -36,6 +36,10 @@ module Ci
update_pipeline!
update_statuses_processed!
if Feature.enabled?(:expire_job_and_pipeline_cache_synchronously, pipeline.project, default_enabled: :yaml)
Ci::ExpirePipelineCacheService.new.execute(pipeline)
end
true
end

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
module AuditEventSaveType
SAVE_TYPES = {
database: 0b01,
stream: 0b10,
database_and_stream: 0b11
}.freeze
# def should_save_stream?(type)
# def should_save_database?(type)
[:database, :stream].each do |type|
define_method("should_save_#{type}?") do |param_type|
return false unless save_type_valid?(param_type)
# If the current type does not support query, the result of the `&` operation is 0 .
SAVE_TYPES[param_type] & SAVE_TYPES[type] != 0
end
end
private
def save_type_valid?(type)
SAVE_TYPES.key?(type)
end
end

View File

@ -2,6 +2,8 @@
module MergeRequests
class AfterCreateService < MergeRequests::BaseService
include Gitlab::Utils::StrongMemoize
def execute(merge_request)
prepare_for_mergeability(merge_request) if early_prepare_for_mergeability?(merge_request)
prepare_merge_request(merge_request)
@ -48,7 +50,9 @@ module MergeRequests
end
def early_prepare_for_mergeability?(merge_request)
Feature.enabled?(:early_prepare_for_mergeability, merge_request.target_project)
strong_memoize("early_prepare_for_mergeability_#{merge_request.target_project_id}".to_sym) do
Feature.enabled?(:early_prepare_for_mergeability, merge_request.target_project)
end
end
def mark_as_unchecked(merge_request)

View File

@ -1,4 +0,0 @@
- breadcrumb_title _('Customer Relations Contacts')
- page_title _('Customer Relations Contacts')
#js-crm-contacts-app{ data: { group_full_path: @group.full_path, group_issues_path: issues_group_path(@group) } }

View File

@ -0,0 +1,4 @@
- breadcrumb_title _('Customer Relations Contacts')
- page_title _('Customer Relations Contacts')
#js-crm-contacts-app{ data: { group_full_path: @group.full_path, group_issues_path: issues_group_path(@group), group_id: @group.id, can_admin_crm_contact: can?(current_user, :admin_crm_contact, @group).to_s, base_path: group_crm_contacts_path(@group) } }

View File

@ -14,8 +14,8 @@
- if can_modify_blob?(@blob)
= render 'projects/blob/remove'
- title = "Replace #{@blob.name}"
= render 'projects/blob/upload', title: title, placeholder: title, button_title: 'Replace file', form_path: project_update_blob_path(@project, @id), method: :put
- title = _("Replace %{blob_name}") % { blob_name: @blob.name }
= render 'projects/blob/upload', title: title, placeholder: title, button_title: _('Replace file'), form_path: project_update_blob_path(@project, @id), method: :put
= render partial: 'pipeline_tour_success' if show_suggest_pipeline_creation_celebration?
= render 'shared/web_ide_path'

View File

@ -15,19 +15,10 @@ class ExpireJobCacheWorker # rubocop:disable Scalability/IdempotentWorker
idempotent!
def perform(job_id)
job = CommitStatus.preload(:pipeline, :project).find_by_id(job_id) # rubocop: disable CodeReuse/ActiveRecord
job = CommitStatus.find_by_id(job_id)
return unless job
pipeline = job.pipeline
project = job.project
Gitlab::EtagCaching::Store.new.touch(project_job_path(project, job))
ExpirePipelineCacheWorker.perform_async(pipeline.id)
end
private
def project_job_path(project, job)
Gitlab::Routing.url_helpers.project_build_path(project, job.id, format: :json)
job.expire_etag_cache!
ExpirePipelineCacheWorker.perform_async(job.pipeline_id)
end
end

View File

@ -0,0 +1,8 @@
---
name: expire_job_and_pipeline_cache_synchronously
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/75611
rollout_issue_url: https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1380
milestone: '14.6'
type: development
group: group::project management
default_enabled: false

View File

@ -0,0 +1,8 @@
---
name: linear_group_descendants_finder
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/68954
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/339440
milestone: '14.6'
type: development
group: group::access
default_enabled: false

View File

@ -0,0 +1,24 @@
---
key_path: settings.snowplow_enabled
name: snowplow_enabled_gitlab_instance
description: Whether snowplow is enabled for the GitLab instance
product_section: growth
product_stage: growth
product_group: group::product intelligence
product_category: product intelligence
value_type: boolean
status: active
milestone: "14.6"
introduced_by_url: 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/75184'
time_frame: none
data_source: system
instrumentation_class: SnowplowEnabledMetric
data_category: optional
performance_indicator_type: []
distribution:
- ce
- ee
tier:
- free
- premium
- ultimate

View File

@ -0,0 +1,24 @@
---
key_path: settings.snowplow_configured_to_gitlab_collector
name: snowplow_configured_to_gitlab_collector
description: Metric informs if currently configured Snowplow collector hostname points towards Gitlab Snowplow collection pipeline.
product_section: growth
product_stage: growth
product_group: group::product intelligence
product_category: product intelligence
value_type: boolean
status: active
milestone: "14.6"
introduced_by_url: 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/75184'
time_frame: none
data_source: system
instrumentation_class: SnowplowConfiguredToGitlabCollectorMetric
data_category: optional
performance_indicator_type: []
distribution:
- ce
- ee
tier:
- free
- premium
- ultimate

View File

@ -126,11 +126,9 @@ constraints(::Constraints::GroupUrlConstrainer.new) do
end
end
resources :crm, only: [] do
collection do
get 'contacts'
get 'organizations'
end
namespace :crm do
resources :contacts, only: [:index, :new]
resources :organizations, only: [:index]
end
end

View File

@ -60,7 +60,7 @@ What is not covered:
NOTE:
Before following any of those steps, make sure you have `root` access to the
**secondary** to promote it, since there isn't provided an automated way to
**secondary** to promote it, because there isn't provided an automated way to
promote a Geo replica and perform a failover.
NOTE:
@ -122,7 +122,7 @@ follow these steps to avoid unnecessary data loss:
From this point, users are unable to view their data or make changes on the
**primary** site. They are also unable to log in to the **secondary** site.
However, existing sessions need to work for the remainder of the maintenance period, and
However, existing sessions must work for the remainder of the maintenance period, and
so public data is accessible throughout.
1. Verify the **primary** site is blocked to HTTP traffic by visiting it in browser via
@ -135,7 +135,7 @@ follow these steps to avoid unnecessary data loss:
1. On the **primary** site:
1. On the top bar, select **Menu > Admin**.
1. On the left sidebar, select **Monitoring > Background Jobs**.
1. On the Sidekiq dhasboard, select **Cron**.
1. On the Sidekiq dashboard, select **Cron**.
1. Select `Disable All` to disable any non-Geo periodic background jobs.
1. Select `Enable` for the `geo_sidekiq_cron_config_worker` cron job.
This job re-enables several other cron jobs that are essential for planned
@ -176,7 +176,7 @@ follow these steps to avoid unnecessary data loss:
At this point, your **secondary** site contains an up-to-date copy of everything the
**primary** site has, meaning nothing is lost when you fail over.
1. In this final step, you need to permanently disable the **primary** site.
1. In this final step, you must permanently disable the **primary** site.
WARNING:
When the **primary** site goes offline, there may be data saved on the **primary** site
@ -204,7 +204,7 @@ follow these steps to avoid unnecessary data loss:
```
NOTE:
(**CentOS only**) In CentOS 6 or older, there is no easy way to prevent GitLab from being
(**CentOS only**) In CentOS 6 or older, it is challenging to prevent GitLab from being
started if the machine reboots isn't available (see [Omnibus GitLab issue #3058](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/3058)).
It may be safest to uninstall the GitLab package completely with `sudo yum remove gitlab-ee`.
@ -216,7 +216,7 @@ follow these steps to avoid unnecessary data loss:
- If you do not have SSH access to the **primary** site, take the machine offline and
prevent it from rebooting. Since there are many ways you may prefer to accomplish
this, we avoid a single recommendation. You may need to:
this, we avoid a single recommendation. You may have to:
- Reconfigure the load balancers.
- Change DNS records (for example, point the **primary** DNS record to the

View File

@ -52,7 +52,7 @@ Before following any of those steps, make sure you have `root` access to the
promote a Geo replica and perform a failover.
NOTE:
GitLab 13.9 through GitLab 14.3 are affected by a bug in which the Geo secondary site statuses will appear to stop updating and become unhealthy. For more information, see [Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode](../../replication/troubleshooting.md#geo-admin-area-shows-unhealthy-after-enabling-maintenance-mode).
GitLab 13.9 through GitLab 14.3 are affected by a bug in which the Geo secondary site statuses appears to stop updating and become unhealthy. For more information, see [Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode](../../replication/troubleshooting.md#geo-admin-area-shows-unhealthy-after-enabling-maintenance-mode).
On the **secondary** site, navigate to the **Admin Area > Geo** dashboard to
review its status. Replicated objects (shown in green) should be close to 100%,
@ -73,7 +73,7 @@ A common cause of replication failures is the data being missing on the
**primary** site - you can resolve these failures by restoring the data from backup,
or removing references to the missing data.
The maintenance window won't end until Geo replication and verification is
The maintenance window does not end until Geo replication and verification is
completely finished. To keep the window as short as possible, you should
ensure these processes are close to 100% as possible during active use.
@ -123,7 +123,7 @@ follow these steps to avoid unnecessary data loss:
1. On the Sidekiq dhasboard, select **Cron**.
1. Select `Disable All` to disable any non-Geo periodic background jobs.
1. Select `Enable` for the `geo_sidekiq_cron_config_worker` cron job.
This job will re-enable several other cron jobs that are essential for planned
This job re-enables several other cron jobs that are essential for planned
failover to complete successfully.
1. Finish replicating and verifying all data:

View File

@ -18,7 +18,7 @@ The released package versions are in the format `MAJOR.MINOR.PATCH-EDITION.OMNIB
|-------------------|---------|---------|
| MAJOR.MINOR.PATCH | The GitLab version this corresponds to. | 13.3.0 |
| EDITION | The edition of GitLab this corresponds to. | ee |
| OMNIBUS_RELEASE | The Omnibus GitLab release. Usually, this will be 0. This is incremented if we need to build a new package without changing the GitLab version. | 0 |
| OMNIBUS_RELEASE | The Omnibus GitLab release. Usually, this is 0. This is incremented if we need to build a new package without changing the GitLab version. | 0 |
## Licenses
@ -27,7 +27,7 @@ See [licensing](licensing.md)
## Defaults
The Omnibus GitLab package requires various configuration to get the components
in working order. If the configuration is not provided, the package will use
in working order. If the configuration is not provided, the package uses
the default values assumed in the package.
These defaults are noted in the package [defaults document](defaults.md).
@ -59,8 +59,8 @@ accidental overwrite of user configuration provided in `/etc/gitlab/gitlab.rb`.
New configuration options are noted in the
[`gitlab.rb.template` file](https://gitlab.com/gitlab-org/omnibus-gitlab/raw/master/files/gitlab-config-template/gitlab.rb.template).
The Omnibus GitLab package also provides convenience command which will
compare the existing user configuration with the latest version of the
The Omnibus GitLab package also provides convenience command which
compares the existing user configuration with the latest version of the
template contained in the package.
To view a diff between your configuration file and the latest version, run:
@ -76,7 +76,7 @@ characters on each line.
## Init system detection
Omnibus GitLab will attempt to query the underlaying system in order to
Omnibus GitLab attempts to query the underlaying system in order to
check which init system it uses.
This manifests itself as a `WARNING` during the `sudo gitlab-ctl reconfigure`
run.

View File

@ -177,7 +177,7 @@ NOTE:
You can also
[scope](../../ci/environments/index.md#scope-environments-with-specs) the
`AUTO_DEVOPS_POSTGRES_CHANNEL`, `AUTO_DEVOPS_POSTGRES_DELETE_V1` and
`POSTGRES_VERSION` variables to specific environments, e.g. `staging`.
`POSTGRES_VERSION` variables to specific environments, for example, `staging`.
1. Set `AUTO_DEVOPS_POSTGRES_CHANNEL` to `2`. This opts into using the
newer 8.2.1-based PostgreSQL, and removes the older 0.7.1-based

View File

@ -29,9 +29,9 @@ If you meet all the requirements above, follow these instructions in order. Ther
| [Geo](#geo-deployment) | GitLab EE with Geo enabled |
| [Multi-node / HA with Geo](#multi-node--ha-deployment-with-geo) | GitLab CE/EE on multiple nodes |
Each type of deployment will require that you hot reload the `puma` and `sidekiq` processes on all nodes running these
Each type of deployment requires that you hot reload the `puma` and `sidekiq` processes on all nodes running these
services after you've upgraded. The reason for this is that those processes each load the GitLab Rails application which reads and loads
the database schema into memory when starting up. Each of these processes will need to be reloaded (or restarted in the case of `sidekiq`)
the database schema into memory when starting up. Each of these processes needs to be reloaded (or restarted in the case of `sidekiq`)
to re-read any database changes that have been made by post-deployment migrations.
Most of the time you can safely upgrade from a patch release to the next minor
@ -176,14 +176,14 @@ Upgrades on web (Puma) nodes must be done in a rolling manner, one after
another, ensuring at least one node is always up to serve traffic. This is
required to ensure zero-downtime.
Puma will enter a blackout period as part of the upgrade, during which they
continue to accept connections but will mark their respective health check
Puma enters a blackout period as part of the upgrade, during which nodes
continue to accept connections but mark their respective health check
endpoints to be unhealthy. On seeing this, the load balancer should disconnect
them gracefully.
Puma will restart only after completing all the currently processing requests.
Puma restarts only after completing all the currently processing requests.
This ensures data and service integrity. Once they have restarted, the health
check end points will be marked healthy.
check end points are marked healthy.
The nodes must be updated in the following order to update an HA instance using
load balancer to latest GitLab version.
@ -254,7 +254,7 @@ the application.
Before you update the main application you need to update Praefect.
Out of your Praefect nodes, pick one to be your Praefect deploy node.
This is where you will install the new Omnibus package first and run
This is where you install the new Omnibus package first and run
database migrations.
**Praefect deploy node**
@ -360,7 +360,7 @@ node throughout the process.
- If you're using PgBouncer:
You'll need to bypass PgBouncer and connect directly to the database master
You need to bypass PgBouncer and connect directly to the database master
before running migrations.
Rails uses an advisory lock when attempting to run a migration to prevent
@ -457,7 +457,7 @@ following command to get address of current Redis primary
```
- If your application node is running a version older than GitLab 12.7.0, you
will have to run the underlying `redis-cli` command (which `get-redis-master`
have to run the underlying `redis-cli` command (which `get-redis-master`
command uses) to fetch information about the primary.
1. Get the address of one of the sentinel nodes specified as
@ -653,7 +653,7 @@ setting `gitlab_rails['auto_migrate'] = false` in
This section describes the steps required to upgrade a multi-node / HA
deployment with Geo. Some steps must be performed on a particular node. This
node will be known as the “deploy node” and is noted through the following
node is known as the “deploy node” and is noted through the following
instructions.
Updates must be performed in the following order:
@ -737,7 +737,7 @@ sudo touch /etc/gitlab/skip-auto-reconfigure
1. If you're using PgBouncer:
You'll need to bypass PgBouncer and connect directly to the database master
You need to bypass PgBouncer and connect directly to the database master
before running migrations.
Rails uses an advisory lock when attempting to run a migration to prevent

View File

@ -350,7 +350,7 @@ request, and you're automatically signed in.
### Sign in by using a WebAuthn device
In supported browsers you should be automatically prompted to activate your WebAuthn device
(e.g. by touching/pressing its button) after entering your credentials.
(for example, by touching/pressing its button) after entering your credentials.
A message displays, indicating that your device responded to the authentication
request and you're automatically signed in.

View File

@ -48,7 +48,7 @@ When importing issues from a CSV file, it must be formatted in a certain way:
- **double-quote character:** The double-quote (`"`) character is used to quote fields,
enabling the use of the column separator within a field (see the third line in the
sample CSV data below). To insert a double-quote (`"`) within a quoted
field, use two double-quote characters in succession, i.e. `""`.
field, use two double-quote characters in succession (`""`).
- **data rows:** After the header row, succeeding rows must follow the same column
order. The issue title is required while the description is optional.

View File

@ -161,7 +161,7 @@ such as: ``http.get(`${__ENV.ENVIRONMENT_URL}`)``.
For example:
1. In the `review` job:
1. Capture the dynamic URL and save it into a `.env` file, e.g. `echo "ENVIRONMENT_URL=$CI_ENVIRONMENT_URL" >> review.env`.
1. Capture the dynamic URL and save it into a `.env` file, for example, `echo "ENVIRONMENT_URL=$CI_ENVIRONMENT_URL" >> review.env`.
1. Set the `.env` file to be a [job artifact](../../../ci/pipelines/job_artifacts.md#job-artifacts).
1. In the `load_performance` job:
1. Set it to depend on the review job, so it inherits the environment file.

View File

@ -166,7 +166,7 @@ module API
params do
requires :pipeline_id, type: Integer, desc: 'The pipeline ID'
end
get ':id/pipelines/:pipeline_id/variables', feature_category: :pipeline_authoring do
get ':id/pipelines/:pipeline_id/variables', feature_category: :pipeline_authoring, urgency: :low do
authorize! :read_pipeline_variable, pipeline
present pipeline.variables, with: Entities::Ci::Variable

View File

@ -33,7 +33,7 @@ module API
optional :dry_run, type: Boolean, default: false, desc: 'Run pipeline creation simulation, or only do static check.'
optional :include_jobs, type: Boolean, desc: 'Whether or not to include CI jobs in the response'
end
get ':id/ci/lint' do
get ':id/ci/lint', urgency: :low do
authorize! :download_code, user_project
content = user_project.repository.gitlab_ci_yml_for(user_project.commit.id, user_project.ci_config_path_or_default)
@ -54,7 +54,7 @@ module API
optional :dry_run, type: Boolean, default: false, desc: 'Run pipeline creation simulation, or only do static check.'
optional :include_jobs, type: Boolean, desc: 'Whether or not to include CI jobs in the response'
end
post ':id/ci/lint' do
post ':id/ci/lint', urgency: :low do
authorize! :create_pipeline, user_project
result = Gitlab::Ci::Lint

View File

@ -8,6 +8,9 @@ module BulkImports
include Gitlab::ImportExport::CommandLineUtil
FILENAME = 'uploads.tar.gz'
AVATAR_PATTERN = %r{.*\/#{BulkImports::UploadsExportService::AVATAR_PATH}\/(?<identifier>.*)}.freeze
AvatarLoadingError = Class.new(StandardError)
def extract(context)
download_service(tmp_dir, context).execute
@ -18,14 +21,18 @@ module BulkImports
end
def load(context, file_path)
dynamic_path = FileUploader.extract_dynamic_path(file_path)
avatar_path = AVATAR_PATTERN.match(file_path)
return save_avatar(file_path) if avatar_path
dynamic_path = file_uploader.extract_dynamic_path(file_path)
return unless dynamic_path
return if File.directory?(file_path)
named_captures = dynamic_path.named_captures.symbolize_keys
UploadService.new(context.portable, File.open(file_path, 'r'), FileUploader, **named_captures).execute
UploadService.new(context.portable, File.open(file_path, 'r'), file_uploader, **named_captures).execute
end
def after_run(_)
@ -46,6 +53,24 @@ module BulkImports
def tmp_dir
@tmp_dir ||= Dir.mktmpdir('bulk_imports')
end
def file_uploader
@file_uploader ||= if context.entity.group?
NamespaceFileUploader
else
FileUploader
end
end
def save_avatar(file_path)
File.open(file_path) do |avatar|
service = context.entity.update_service.new(portable, current_user, avatar: avatar)
unless service.execute
raise AvatarLoadingError, portable.errors.full_messages.to_sentence
end
end
end
end
end
end

View File

@ -1,49 +0,0 @@
# frozen_string_literal: true
module BulkImports
module Groups
module Pipelines
class GroupAvatarPipeline
include Pipeline
ALLOWED_AVATAR_DOWNLOAD_TYPES = (AvatarUploader::MIME_WHITELIST + %w(application/octet-stream)).freeze
GroupAvatarLoadingError = Class.new(StandardError)
def extract(context)
context.extra[:tmpdir] = Dir.mktmpdir
filepath = BulkImports::FileDownloadService.new(
configuration: context.configuration,
relative_url: "/groups/#{context.entity.encoded_source_full_path}/avatar",
dir: context.extra[:tmpdir],
file_size_limit: Avatarable::MAXIMUM_FILE_SIZE,
allowed_content_types: ALLOWED_AVATAR_DOWNLOAD_TYPES
).execute
BulkImports::Pipeline::ExtractedData.new(data: { filepath: filepath })
end
def load(context, data)
return if data.blank?
File.open(data[:filepath]) do |avatar|
service = ::Groups::UpdateService.new(
portable,
current_user,
avatar: avatar
)
unless service.execute
raise GroupAvatarLoadingError, portable.errors.full_messages.first
end
end
end
def after_run(_)
FileUtils.remove_entry(context.extra[:tmpdir]) if context.extra[:tmpdir].present?
end
end
end
end
end

View File

@ -11,10 +11,6 @@ module BulkImports
pipeline: BulkImports::Groups::Pipelines::GroupPipeline,
stage: 0
},
avatar: {
pipeline: BulkImports::Groups::Pipelines::GroupAvatarPipeline,
stage: 1
},
subgroups: {
pipeline: BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline,
stage: 1
@ -39,6 +35,10 @@ module BulkImports
pipeline: BulkImports::Common::Pipelines::BoardsPipeline,
stage: 2
},
uploads: {
pipeline: BulkImports::Common::Pipelines::UploadsPipeline,
stage: 2
},
finisher: {
pipeline: BulkImports::Common::Pipelines::EntityFinisher,
stage: 3

View File

@ -12,14 +12,18 @@ module Gitlab
Gitlab::Redis::SharedState.with { |redis| redis.get(redis_shared_state_key(key)) }
end
def touch(key, only_if_missing: false)
etag = generate_etag
def touch(*keys, only_if_missing: false)
etags = keys.map { generate_etag }
Gitlab::Redis::SharedState.with do |redis|
redis.set(redis_shared_state_key(key), etag, ex: EXPIRY_TIME, nx: only_if_missing)
redis.pipelined do
keys.each_with_index do |key, i|
redis.set(redis_shared_state_key(key), etags[i], ex: EXPIRY_TIME, nx: only_if_missing)
end
end
end
etag
keys.size > 1 ? etags : etags.first
end
private

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
module Gitlab
module Usage
module Metrics
module Instrumentations
class SnowplowConfiguredToGitlabCollectorMetric < GenericMetric
GITLAB_SNOWPLOW_COLLECTOR_HOSTNAME = 'snowplow.trx.gitlab.net'
def value
Gitlab::CurrentSettings.snowplow_collector_hostname == GITLAB_SNOWPLOW_COLLECTOR_HOSTNAME
end
end
end
end
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
module Gitlab
module Usage
module Metrics
module Instrumentations
class SnowplowEnabledMetric < GenericMetric
def value
Gitlab::CurrentSettings.snowplow_enabled?
end
end
end
end
end
end

View File

@ -228,7 +228,9 @@ module Gitlab
operating_system: alt_usage_data(fallback: nil) { operating_system },
gitaly_apdex: alt_usage_data { gitaly_apdex },
collected_data_categories: add_metric('CollectedDataCategoriesMetric', time_frame: 'none'),
service_ping_features_enabled: add_metric('ServicePingFeaturesMetric', time_frame: 'none')
service_ping_features_enabled: add_metric('ServicePingFeaturesMetric', time_frame: 'none'),
snowplow_enabled: add_metric('SnowplowEnabledMetric', time_frame: 'none'),
snowplow_configured_to_gitlab_collector: add_metric('SnowplowConfiguredToGitlabCollectorMetric', time_frame: 'none')
}
}
end

View File

@ -32,7 +32,7 @@ module Sidebars
def contacts_menu_item
::Sidebars::MenuItem.new(
title: _('Contacts'),
link: contacts_group_crm_index_path(context.group),
link: group_crm_contacts_path(context.group),
active_routes: { path: 'groups/crm#contacts' },
item_id: :crm_contacts
)
@ -41,7 +41,7 @@ module Sidebars
def organizations_menu_item
::Sidebars::MenuItem.new(
title: _('Organizations'),
link: organizations_group_crm_index_path(context.group),
link: group_crm_organizations_path(context.group),
active_routes: { path: 'groups/crm#organizations' },
item_id: :crm_organizations
)

View File

@ -10154,12 +10154,36 @@ msgstr ""
msgid "Critical vulnerabilities present"
msgstr ""
msgid "Crm|Create new contact"
msgstr ""
msgid "Crm|Customer Relations Contacts"
msgstr ""
msgid "Crm|Description (optional)"
msgstr ""
msgid "Crm|Email"
msgstr ""
msgid "Crm|First name"
msgstr ""
msgid "Crm|Last name"
msgstr ""
msgid "Crm|New contact"
msgstr ""
msgid "Crm|No contacts found"
msgstr ""
msgid "Crm|No organizations found"
msgstr ""
msgid "Crm|Phone number (optional)"
msgstr ""
msgid "Cron Timezone"
msgstr ""
@ -29196,6 +29220,9 @@ msgstr ""
msgid "Replace"
msgstr ""
msgid "Replace %{blob_name}"
msgstr ""
msgid "Replace %{name}"
msgstr ""

View File

@ -3,12 +3,12 @@
require 'active_support/environment_inquirer'
module Rails # rubocop:disable Gitlab/NamespacedClass
module Rails
extend self
def env
@env ||= ActiveSupport::EnvironmentInquirer.new(
ENV["RAILS_ENV"].presence || ENV["RACK_ENV"].presence || "test"
ENV["RAILS_ENV"].presence || ENV["RACK_ENV"].presence || "development"
)
end

View File

@ -7,7 +7,7 @@
# to all necessary constants. For example, we need Rails.root to
# determine the location of bin/metrics-server.
# Here we make the necessary constants available conditionally.
require_relative '../scripts/override_rails_constants' unless Object.const_defined?('Rails')
require_relative 'override_rails_constants' unless Object.const_defined?('Rails')
require_relative '../config/settings'

View File

@ -4,18 +4,11 @@ module QA
module Resource
class GroupDeployToken < Base
attr_accessor :name, :expires_at
attr_writer :scopes
attribute :username do
Page::Group::Settings::Repository.perform do |repository_page|
repository_page.expand_deploy_tokens(&:token_username)
end
end
attribute :password do
Page::Group::Settings::Repository.perform do |repository_page|
repository_page.expand_deploy_tokens(&:token_password)
end
end
attribute :id
attribute :token
attribute :username
attribute :group do
Group.fabricate! do |resource|
@ -24,11 +17,33 @@ module QA
end
end
attribute :project do
Project.fabricate! do |resource|
resource.name = 'project-to-deploy'
resource.description = 'project for adding deploy token test'
end
def fabricate_via_api!
super
end
def api_get_path
"/groups/#{group.id}/deploy_tokens"
end
def api_post_path
api_get_path
end
def api_post_body
{
name: @name,
scopes: @scopes
}
end
def api_delete_path
"/groups/#{group.id}/deploy_tokens/#{id}"
end
def resource_web_url(resource)
super
rescue ResourceURLMissingError
# this particular resource does not expose a web_url property
end
def fabricate!

View File

@ -21,9 +21,14 @@ module QA
end
let(:group_deploy_token) do
Resource::GroupDeployToken.fabricate_via_browser_ui! do |deploy_token|
Resource::GroupDeployToken.fabricate_via_api! do |deploy_token|
deploy_token.name = 'nuget-group-deploy-token'
deploy_token.group = project.group
deploy_token.scopes = %w[
read_repository
read_package_registry
write_package_registry
]
end
end
@ -70,7 +75,7 @@ module QA
when :ci_job_token
'${CI_JOB_TOKEN}'
when :group_deploy_token
"\"#{group_deploy_token.password}\""
"\"#{group_deploy_token.token}\""
end
end

View File

@ -29,7 +29,9 @@ const mergePurgeCSSOptions = (...options) =>
const getStartupCSS = async ({ htmlPaths, cssPaths, purgeOptions }) => {
const content = htmlPaths.map((htmlPath) => {
if (!fs.existsSync(htmlPath)) {
die(`Could not find fixture "${htmlPath}". Have you run the fixtures?`);
die(
`Could not find fixture "${htmlPath}". Have you run the fixtures? (bundle exec rspec spec/frontend/fixtures/startup_css.rb)`,
);
}
const rawHtml = fs.readFileSync(htmlPath);

View File

@ -13,7 +13,17 @@ require 'active_support/string_inquirer'
ENV['SKIP_RAILS_ENV_IN_RAKE'] = 'true'
require_relative 'override_rails_constants'
module Rails
extend self
def root
Pathname.new(File.expand_path('..', __dir__))
end
def env
@_env ||= ActiveSupport::StringInquirer.new(ENV["RAILS_ENV"] || ENV["RACK_ENV"] || "test")
end
end
ActiveSupport::Dependencies.autoload_paths << 'lib'

View File

@ -3,101 +3,125 @@
require 'spec_helper'
RSpec.describe 'admin deploy keys' do
include Spec::Support::Helpers::ModalHelpers
let_it_be(:admin) { create(:admin) }
let!(:deploy_key) { create(:deploy_key, public: true) }
let!(:another_deploy_key) { create(:another_deploy_key, public: true) }
before do
stub_feature_flags(admin_deploy_keys_vue: false)
sign_in(admin)
gitlab_enable_admin_mode_sign_in(admin)
end
it 'show all public deploy keys' do
visit admin_deploy_keys_path
page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
expect(page).to have_content(deploy_key.title)
expect(page).to have_content(another_deploy_key.title)
end
end
it 'shows all the projects the deploy key has write access' do
write_key = create(:deploy_keys_project, :write_access, deploy_key: deploy_key)
visit admin_deploy_keys_path
page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
expect(page).to have_content(write_key.project.full_name)
end
end
describe 'create a new deploy key' do
let(:new_ssh_key) { attributes_for(:key)[:key] }
before do
shared_examples 'renders deploy keys correctly' do
it 'show all public deploy keys' do
visit admin_deploy_keys_path
click_link 'New deploy key'
end
it 'creates a new deploy key' do
fill_in 'deploy_key_title', with: 'laptop'
fill_in 'deploy_key_key', with: new_ssh_key
click_button 'Create'
expect(current_path).to eq admin_deploy_keys_path
page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
expect(page).to have_content('laptop')
expect(page).to have_content(deploy_key.title)
expect(page).to have_content(another_deploy_key.title)
end
end
end
describe 'update an existing deploy key' do
before do
it 'shows all the projects the deploy key has write access' do
write_key = create(:deploy_keys_project, :write_access, deploy_key: deploy_key)
visit admin_deploy_keys_path
find('tr', text: deploy_key.title).click_link('Edit')
end
it 'updates an existing deploy key' do
fill_in 'deploy_key_title', with: 'new-title'
click_button 'Save changes'
expect(current_path).to eq admin_deploy_keys_path
page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
expect(page).to have_content('new-title')
expect(page).to have_content(write_key.project.full_name)
end
end
end
describe 'remove an existing deploy key' do
before do
visit admin_deploy_keys_path
describe 'create a new deploy key' do
let(:new_ssh_key) { attributes_for(:key)[:key] }
before do
visit admin_deploy_keys_path
click_link 'New deploy key'
end
it 'creates a new deploy key' do
fill_in 'deploy_key_title', with: 'laptop'
fill_in 'deploy_key_key', with: new_ssh_key
click_button 'Create'
expect(current_path).to eq admin_deploy_keys_path
page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
expect(page).to have_content('laptop')
end
end
end
it 'removes an existing deploy key' do
find('tr', text: deploy_key.title).click_link('Remove')
describe 'update an existing deploy key' do
before do
visit admin_deploy_keys_path
page.within('tr', text: deploy_key.title) do
click_link(_('Edit deploy key'))
end
end
expect(current_path).to eq admin_deploy_keys_path
page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
expect(page).not_to have_content(deploy_key.title)
it 'updates an existing deploy key' do
fill_in 'deploy_key_title', with: 'new-title'
click_button 'Save changes'
expect(current_path).to eq admin_deploy_keys_path
page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
expect(page).to have_content('new-title')
end
end
end
end
context 'when `admin_deploy_keys_vue` feature flag is enabled', :js do
before do
stub_feature_flags(admin_deploy_keys_vue: true)
it_behaves_like 'renders deploy keys correctly'
visit admin_deploy_keys_path
describe 'remove an existing deploy key' do
before do
visit admin_deploy_keys_path
end
it 'removes an existing deploy key' do
accept_gl_confirm('Are you sure you want to delete this deploy key?', button_text: 'Delete') do
page.within('tr', text: deploy_key.title) do
click_button _('Delete deploy key')
end
end
expect(current_path).to eq admin_deploy_keys_path
page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
expect(page).not_to have_content(deploy_key.title)
end
end
end
end
context 'when `admin_deploy_keys_vue` feature flag is disabled' do
before do
stub_feature_flags(admin_deploy_keys_vue: false)
end
it 'renders the Vue app', :aggregate_failures do
expect(page).to have_content('Public deploy keys')
expect(page).to have_selector('[data-testid="deploy-keys-list"]')
expect(page).to have_link('New deploy key', href: new_admin_deploy_key_path)
it_behaves_like 'renders deploy keys correctly'
describe 'remove an existing deploy key' do
before do
visit admin_deploy_keys_path
end
it 'removes an existing deploy key' do
page.within('tr', text: deploy_key.title) do
click_link _('Remove deploy key')
end
expect(current_path).to eq admin_deploy_keys_path
page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
expect(page).not_to have_content(deploy_key.title)
end
end
end
end
end

View File

@ -4,7 +4,12 @@ require 'spec_helper'
RSpec.describe GroupDescendantsFinder do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be_with_reload(:group) do
create(:group).tap do |g|
g.add_owner(user)
end
end
let(:params) { {} }
@ -12,254 +17,262 @@ RSpec.describe GroupDescendantsFinder do
described_class.new(current_user: user, parent_group: group, params: params)
end
before do
group.add_owner(user)
end
describe '#has_children?' do
it 'is true when there are projects' do
create(:project, namespace: group)
expect(finder.has_children?).to be_truthy
end
context 'when there are subgroups' do
shared_examples 'group descentants finder examples' do
describe '#has_children?' do
it 'is true when there are projects' do
create(:group, parent: group)
create(:project, namespace: group)
expect(finder.has_children?).to be_truthy
end
end
end
describe '#execute' do
it 'includes projects' do
project = create(:project, namespace: group)
context 'when there are subgroups' do
it 'is true when there are projects' do
create(:group, parent: group)
expect(finder.execute).to contain_exactly(project)
end
context 'when archived is `true`' do
let(:params) { { archived: 'true' } }
it 'includes archived projects' do
archived_project = create(:project, namespace: group, archived: true)
project = create(:project, namespace: group)
expect(finder.execute).to contain_exactly(archived_project, project)
end
end
context 'when archived is `only`' do
let(:params) { { archived: 'only' } }
it 'includes only archived projects' do
archived_project = create(:project, namespace: group, archived: true)
_project = create(:project, namespace: group)
expect(finder.execute).to contain_exactly(archived_project)
end
end
it 'does not include archived projects' do
_archived_project = create(:project, :archived, namespace: group)
expect(finder.execute).to be_empty
end
context 'with a filter' do
let(:params) { { filter: 'test' } }
it 'includes only projects matching the filter' do
_other_project = create(:project, namespace: group)
matching_project = create(:project, namespace: group, name: 'testproject')
expect(finder.execute).to contain_exactly(matching_project)
end
end
it 'sorts elements by name as default' do
project1 = create(:project, namespace: group, name: 'z')
project2 = create(:project, namespace: group, name: 'a')
expect(subject.execute).to eq([project2, project1])
end
context 'sorting by name' do
let!(:project1) { create(:project, namespace: group, name: 'a', path: 'project-a') }
let!(:project2) { create(:project, namespace: group, name: 'z', path: 'project-z') }
let(:params) do
{
sort: 'name_asc'
}
end
it 'sorts elements by name' do
expect(subject.execute).to eq(
[
project1,
project2
]
)
end
context 'with nested groups' do
let!(:subgroup1) { create(:group, parent: group, name: 'a', path: 'sub-a') }
let!(:subgroup2) { create(:group, parent: group, name: 'z', path: 'sub-z') }
it 'sorts elements by name' do
expect(subject.execute).to eq(
[
subgroup1,
subgroup2,
project1,
project2
]
)
expect(finder.has_children?).to be_truthy
end
end
end
it 'does not include projects shared with the group' do
project = create(:project, namespace: group)
other_project = create(:project)
other_project.project_group_links.create!(group: group,
group_access: Gitlab::Access::MAINTAINER)
expect(finder.execute).to contain_exactly(project)
end
end
context 'with shared groups' do
let_it_be(:other_group) { create(:group) }
let_it_be(:shared_group_link) do
create(:group_group_link,
shared_group: group,
shared_with_group: other_group)
end
context 'without common ancestor' do
it { expect(finder.execute).to be_empty }
end
context 'with common ancestor' do
let_it_be(:common_ancestor) { create(:group) }
let_it_be(:other_group) { create(:group, parent: common_ancestor) }
let_it_be(:group) { create(:group, parent: common_ancestor) }
context 'querying under the common ancestor' do
it { expect(finder.execute).to be_empty }
end
context 'querying the common ancestor' do
subject(:finder) do
described_class.new(current_user: user, parent_group: common_ancestor, params: params)
end
it 'contains shared subgroups' do
expect(finder.execute).to contain_exactly(group, other_group)
end
end
end
end
context 'with nested groups' do
let!(:project) { create(:project, namespace: group) }
let!(:subgroup) { create(:group, :private, parent: group) }
describe '#execute' do
it 'contains projects and subgroups' do
expect(finder.execute).to contain_exactly(subgroup, project)
end
it 'includes projects' do
project = create(:project, namespace: group)
it 'does not include subgroups the user does not have access to' do
subgroup.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
public_subgroup = create(:group, :public, parent: group, path: 'public-group')
other_subgroup = create(:group, :private, parent: group, path: 'visible-private-group')
other_user = create(:user)
other_subgroup.add_developer(other_user)
finder = described_class.new(current_user: other_user, parent_group: group)
expect(finder.execute).to contain_exactly(public_subgroup, other_subgroup)
end
it 'only includes public groups when no user is given' do
public_subgroup = create(:group, :public, parent: group)
_private_subgroup = create(:group, :private, parent: group)
finder = described_class.new(current_user: nil, parent_group: group)
expect(finder.execute).to contain_exactly(public_subgroup)
expect(finder.execute).to contain_exactly(project)
end
context 'when archived is `true`' do
let(:params) { { archived: 'true' } }
it 'includes archived projects in the count of subgroups' do
create(:project, namespace: subgroup, archived: true)
it 'includes archived projects' do
archived_project = create(:project, namespace: group, archived: true)
project = create(:project, namespace: group)
expect(finder.execute.first.preloaded_project_count).to eq(1)
expect(finder.execute).to contain_exactly(archived_project, project)
end
end
context 'when archived is `only`' do
let(:params) { { archived: 'only' } }
it 'includes only archived projects' do
archived_project = create(:project, namespace: group, archived: true)
_project = create(:project, namespace: group)
expect(finder.execute).to contain_exactly(archived_project)
end
end
it 'does not include archived projects' do
_archived_project = create(:project, :archived, namespace: group)
expect(finder.execute).to be_empty
end
context 'with a filter' do
let(:params) { { filter: 'test' } }
it 'contains only matching projects and subgroups' do
matching_project = create(:project, namespace: group, name: 'Testproject')
matching_subgroup = create(:group, name: 'testgroup', parent: group)
it 'includes only projects matching the filter' do
_other_project = create(:project, namespace: group)
matching_project = create(:project, namespace: group, name: 'testproject')
expect(finder.execute).to contain_exactly(matching_subgroup, matching_project)
expect(finder.execute).to contain_exactly(matching_project)
end
end
it 'sorts elements by name as default' do
project1 = create(:project, namespace: group, name: 'z')
project2 = create(:project, namespace: group, name: 'a')
expect(subject.execute).to match_array([project2, project1])
end
context 'sorting by name' do
let!(:project1) { create(:project, namespace: group, name: 'a', path: 'project-a') }
let!(:project2) { create(:project, namespace: group, name: 'z', path: 'project-z') }
let(:params) do
{
sort: 'name_asc'
}
end
it 'sorts elements by name' do
expect(subject.execute).to eq(
[
project1,
project2
]
)
end
context 'with nested groups' do
let!(:subgroup1) { create(:group, parent: group, name: 'a', path: 'sub-a') }
let!(:subgroup2) { create(:group, parent: group, name: 'z', path: 'sub-z') }
it 'sorts elements by name' do
expect(subject.execute).to eq(
[
subgroup1,
subgroup2,
project1,
project2
]
)
end
end
end
it 'does not include projects shared with the group' do
project = create(:project, namespace: group)
other_project = create(:project)
other_project.project_group_links.create!(group: group,
group_access: Gitlab::Access::MAINTAINER)
expect(finder.execute).to contain_exactly(project)
end
end
context 'with shared groups' do
let_it_be(:other_group) { create(:group) }
let_it_be(:shared_group_link) do
create(:group_group_link,
shared_group: group,
shared_with_group: other_group)
end
context 'without common ancestor' do
it { expect(finder.execute).to be_empty }
end
context 'with common ancestor' do
let_it_be(:common_ancestor) { create(:group) }
let_it_be(:other_group) { create(:group, parent: common_ancestor) }
let_it_be(:group) { create(:group, parent: common_ancestor) }
context 'querying under the common ancestor' do
it { expect(finder.execute).to be_empty }
end
context 'querying the common ancestor' do
subject(:finder) do
described_class.new(current_user: user, parent_group: common_ancestor, params: params)
end
it 'contains shared subgroups' do
expect(finder.execute).to contain_exactly(group, other_group)
end
end
end
end
context 'with nested groups' do
let!(:project) { create(:project, namespace: group) }
let!(:subgroup) { create(:group, :private, parent: group) }
describe '#execute' do
it 'contains projects and subgroups' do
expect(finder.execute).to contain_exactly(subgroup, project)
end
it 'does not include subgroups the user does not have access to' do
_invisible_subgroup = create(:group, :private, parent: group, name: 'test1')
other_subgroup = create(:group, :private, parent: group, name: 'test2')
public_subgroup = create(:group, :public, parent: group, name: 'test3')
other_subsubgroup = create(:group, :private, parent: other_subgroup, name: 'test4')
subgroup.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
public_subgroup = create(:group, :public, parent: group, path: 'public-group')
other_subgroup = create(:group, :private, parent: group, path: 'visible-private-group')
other_user = create(:user)
other_subgroup.add_developer(other_user)
finder = described_class.new(current_user: other_user,
parent_group: group,
params: params)
finder = described_class.new(current_user: other_user, parent_group: group)
expect(finder.execute).to contain_exactly(other_subgroup, public_subgroup, other_subsubgroup)
expect(finder.execute).to contain_exactly(public_subgroup, other_subgroup)
end
context 'with matching children' do
it 'includes a group that has a subgroup matching the query and its parent' do
matching_subgroup = create(:group, :private, name: 'testgroup', parent: subgroup)
it 'only includes public groups when no user is given' do
public_subgroup = create(:group, :public, parent: group)
_private_subgroup = create(:group, :private, parent: group)
expect(finder.execute).to contain_exactly(subgroup, matching_subgroup)
finder = described_class.new(current_user: nil, parent_group: group)
expect(finder.execute).to contain_exactly(public_subgroup)
end
context 'when archived is `true`' do
let(:params) { { archived: 'true' } }
it 'includes archived projects in the count of subgroups' do
create(:project, namespace: subgroup, archived: true)
expect(finder.execute.first.preloaded_project_count).to eq(1)
end
end
context 'with a filter' do
let(:params) { { filter: 'test' } }
it 'contains only matching projects and subgroups' do
matching_project = create(:project, namespace: group, name: 'Testproject')
matching_subgroup = create(:group, name: 'testgroup', parent: group)
expect(finder.execute).to contain_exactly(matching_subgroup, matching_project)
end
it 'includes the parent of a matching project' do
matching_project = create(:project, namespace: subgroup, name: 'Testproject')
it 'does not include subgroups the user does not have access to' do
_invisible_subgroup = create(:group, :private, parent: group, name: 'test1')
other_subgroup = create(:group, :private, parent: group, name: 'test2')
public_subgroup = create(:group, :public, parent: group, name: 'test3')
other_subsubgroup = create(:group, :private, parent: other_subgroup, name: 'test4')
other_user = create(:user)
other_subgroup.add_developer(other_user)
expect(finder.execute).to contain_exactly(subgroup, matching_project)
finder = described_class.new(current_user: other_user,
parent_group: group,
params: params)
expect(finder.execute).to contain_exactly(other_subgroup, public_subgroup, other_subsubgroup)
end
context 'with a small page size' do
let(:params) { { filter: 'test', per_page: 1 } }
context 'with matching children' do
it 'includes a group that has a subgroup matching the query and its parent' do
matching_subgroup = create(:group, :private, name: 'testgroup', parent: subgroup)
it 'contains all the ancestors of a matching subgroup regardless the page size' do
subgroup = create(:group, :private, parent: group)
matching = create(:group, :private, name: 'testgroup', parent: subgroup)
expect(finder.execute).to contain_exactly(subgroup, matching)
expect(finder.execute).to contain_exactly(subgroup, matching_subgroup)
end
end
it 'does not include the parent itself' do
group.update!(name: 'test')
it 'includes the parent of a matching project' do
matching_project = create(:project, namespace: subgroup, name: 'Testproject')
expect(finder.execute).not_to include(group)
expect(finder.execute).to contain_exactly(subgroup, matching_project)
end
context 'with a small page size' do
let(:params) { { filter: 'test', per_page: 1 } }
it 'contains all the ancestors of a matching subgroup regardless the page size' do
subgroup = create(:group, :private, parent: group)
matching = create(:group, :private, name: 'testgroup', parent: subgroup)
expect(finder.execute).to contain_exactly(subgroup, matching)
end
end
it 'does not include the parent itself' do
group.update!(name: 'test')
expect(finder.execute).not_to include(group)
end
end
end
end
end
end
it_behaves_like 'group descentants finder examples'
context 'when feature flag :linear_group_descendants_finder is disabled' do
before do
stub_feature_flags(linear_group_descendants_finder: false)
end
it_behaves_like 'group descentants finder examples'
end
end

View File

@ -1,40 +1,62 @@
import { GlLoadingIcon } from '@gitlab/ui';
import { GlAlert, GlLoadingIcon } from '@gitlab/ui';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import VueRouter from 'vue-router';
import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import createFlash from '~/flash';
import ContactsRoot from '~/crm/components/contacts_root.vue';
import NewContactForm from '~/crm/components/new_contact_form.vue';
import getGroupContactsQuery from '~/crm/components/queries/get_group_contacts.query.graphql';
import { getGroupContactsQueryResponse } from './mock_data';
jest.mock('~/flash');
describe('Customer relations contacts root app', () => {
Vue.use(VueApollo);
Vue.use(VueRouter);
let wrapper;
let fakeApollo;
let router;
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findRowByName = (rowName) => wrapper.findAllByRole('row', { name: rowName });
const findIssuesLinks = () => wrapper.findAllByTestId('issues-link');
const findNewContactButton = () => wrapper.findByTestId('new-contact-button');
const findNewContactForm = () => wrapper.findComponent(NewContactForm);
const findError = () => wrapper.findComponent(GlAlert);
const successQueryHandler = jest.fn().mockResolvedValue(getGroupContactsQueryResponse);
const basePath = '/groups/flightjs/-/crm/contacts';
const mountComponent = ({
queryHandler = successQueryHandler,
mountFunction = shallowMountExtended,
canAdminCrmContact = true,
} = {}) => {
fakeApollo = createMockApollo([[getGroupContactsQuery, queryHandler]]);
wrapper = mountFunction(ContactsRoot, {
provide: { groupFullPath: 'flightjs', groupIssuesPath: '/issues' },
router,
provide: {
groupFullPath: 'flightjs',
groupIssuesPath: '/issues',
groupId: 26,
canAdminCrmContact,
},
apolloProvider: fakeApollo,
});
};
beforeEach(() => {
router = new VueRouter({
base: basePath,
mode: 'history',
routes: [],
});
});
afterEach(() => {
wrapper.destroy();
fakeApollo = null;
router = null;
});
it('should render loading spinner', () => {
@ -43,23 +65,94 @@ describe('Customer relations contacts root app', () => {
expect(findLoadingIcon().exists()).toBe(true);
});
it('should render error message on reject', async () => {
mountComponent({ queryHandler: jest.fn().mockRejectedValue('ERROR') });
await waitForPromises();
describe('new contact button', () => {
it('should exist when user has permission', () => {
mountComponent();
expect(createFlash).toHaveBeenCalled();
expect(findNewContactButton().exists()).toBe(true);
});
it('should not exist when user has no permission', () => {
mountComponent({ canAdminCrmContact: false });
expect(findNewContactButton().exists()).toBe(false);
});
});
it('renders correct results', async () => {
mountComponent({ mountFunction: mountExtended });
await waitForPromises();
describe('new contact form', () => {
it('should not exist by default', async () => {
mountComponent();
await waitForPromises();
expect(findRowByName(/Marty/i)).toHaveLength(1);
expect(findRowByName(/George/i)).toHaveLength(1);
expect(findRowByName(/jd@gitlab.com/i)).toHaveLength(1);
expect(findNewContactForm().exists()).toBe(false);
});
const issueLink = findIssuesLinks().at(0);
expect(issueLink.exists()).toBe(true);
expect(issueLink.attributes('href')).toBe('/issues?scope=all&state=opened&crm_contact_id=16');
it('should exist when user clicks new contact button', async () => {
mountComponent();
findNewContactButton().vm.$emit('click');
await waitForPromises();
expect(findNewContactForm().exists()).toBe(true);
});
it('should exist when user navigates directly to /new', async () => {
router.replace({ path: '/new' });
mountComponent();
await waitForPromises();
expect(findNewContactForm().exists()).toBe(true);
});
it('should not exist when form emits close', async () => {
router.replace({ path: '/new' });
mountComponent();
findNewContactForm().vm.$emit('close');
await waitForPromises();
expect(findNewContactForm().exists()).toBe(false);
});
});
describe('error', () => {
it('should exist on reject', async () => {
mountComponent({ queryHandler: jest.fn().mockRejectedValue('ERROR') });
await waitForPromises();
expect(findError().exists()).toBe(true);
});
it('should exist when new contact form emits error', async () => {
router.replace({ path: '/new' });
mountComponent();
findNewContactForm().vm.$emit('error');
await waitForPromises();
expect(findError().exists()).toBe(true);
});
});
describe('on successful load', () => {
it('should not render error', async () => {
mountComponent();
await waitForPromises();
expect(findError().exists()).toBe(false);
});
it('renders correct results', async () => {
mountComponent({ mountFunction: mountExtended });
await waitForPromises();
expect(findRowByName(/Marty/i)).toHaveLength(1);
expect(findRowByName(/George/i)).toHaveLength(1);
expect(findRowByName(/jd@gitlab.com/i)).toHaveLength(1);
const issueLink = findIssuesLinks().at(0);
expect(issueLink.exists()).toBe(true);
expect(issueLink.attributes('href')).toBe('/issues?scope=all&state=opened&crm_contact_id=16');
});
});
});

View File

@ -40,7 +40,6 @@ export const getGroupContactsQueryResponse = {
organization: null,
},
],
__typename: 'CustomerRelationsContactConnection',
},
},
},
@ -79,3 +78,31 @@ export const getGroupOrganizationsQueryResponse = {
},
},
};
export const createContactMutationResponse = {
data: {
customerRelationsContactCreate: {
__typeName: 'CustomerRelationsContactCreatePayload',
contact: {
__typename: 'CustomerRelationsContact',
id: 'gid://gitlab/CustomerRelations::Contact/1',
firstName: 'A',
lastName: 'B',
email: 'C',
phone: null,
description: null,
organization: null,
},
errors: [],
},
},
};
export const createContactMutationErrorResponse = {
data: {
customerRelationsContactCreate: {
contact: null,
errors: ['Phone is invalid.'],
},
},
};

View File

@ -0,0 +1,108 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import NewContactForm from '~/crm/components/new_contact_form.vue';
import createContactMutation from '~/crm/components/queries/create_contact.mutation.graphql';
import getGroupContactsQuery from '~/crm/components/queries/get_group_contacts.query.graphql';
import {
createContactMutationErrorResponse,
createContactMutationResponse,
getGroupContactsQueryResponse,
} from './mock_data';
describe('Customer relations contacts root app', () => {
Vue.use(VueApollo);
let wrapper;
let fakeApollo;
let queryHandler;
const findCreateNewContactButton = () => wrapper.findByTestId('create-new-contact-button');
const findCancelButton = () => wrapper.findByTestId('cancel-button');
const findForm = () => wrapper.find('form');
const mountComponent = ({ mountFunction = shallowMountExtended } = {}) => {
fakeApollo = createMockApollo([[createContactMutation, queryHandler]]);
fakeApollo.clients.defaultClient.cache.writeQuery({
query: getGroupContactsQuery,
variables: { groupFullPath: 'flightjs' },
data: getGroupContactsQueryResponse.data,
});
wrapper = mountFunction(NewContactForm, {
provide: { groupId: 26, groupFullPath: 'flightjs' },
apolloProvider: fakeApollo,
});
};
beforeEach(() => {
queryHandler = jest.fn().mockResolvedValue(createContactMutationResponse);
});
afterEach(() => {
wrapper.destroy();
fakeApollo = null;
});
describe('Create new contact button', () => {
it('should be disabled by default', () => {
mountComponent();
expect(findCreateNewContactButton().attributes('disabled')).toBeTruthy();
});
it('should not be disabled when first, last and email have values', async () => {
mountComponent();
wrapper.find('#contact-first-name').vm.$emit('input', 'A');
wrapper.find('#contact-last-name').vm.$emit('input', 'B');
wrapper.find('#contact-email').vm.$emit('input', 'C');
await waitForPromises();
expect(findCreateNewContactButton().attributes('disabled')).toBeFalsy();
});
});
it("should emit 'close' when cancel button is clicked", () => {
mountComponent();
findCancelButton().vm.$emit('click');
expect(wrapper.emitted().close).toBeTruthy();
});
describe('when query is successful', () => {
it("should emit 'close'", async () => {
mountComponent();
findForm().trigger('submit');
await waitForPromises();
expect(wrapper.emitted().close).toBeTruthy();
});
});
describe('when query fails', () => {
it('should emit error on reject', async () => {
queryHandler = jest.fn().mockRejectedValue('ERROR');
mountComponent();
findForm().trigger('submit');
await waitForPromises();
expect(wrapper.emitted().error).toBeTruthy();
});
it('should emit error on error response', async () => {
queryHandler = jest.fn().mockResolvedValue(createContactMutationErrorResponse);
mountComponent();
findForm().trigger('submit');
await waitForPromises();
expect(wrapper.emitted().error[0][0]).toEqual(
createContactMutationErrorResponse.data.customerRelationsContactCreate.errors,
);
});
});
});

View File

@ -31,7 +31,7 @@ export const SEConstExt = () => {
export function SEWithSetupExt() {
return {
onSetup: (setupOptions = {}, instance) => {
onSetup: (instance, setupOptions = {}) => {
if (setupOptions && !Array.isArray(setupOptions)) {
Object.entries(setupOptions).forEach(([key, value]) => {
Object.assign(instance, {

View File

@ -424,7 +424,7 @@ describe('Source Editor Instance', () => {
definition: MyFullExtWithCallbacks,
setupOptions: defSetupOptions,
});
expect(onSetup).toHaveBeenCalledWith(defSetupOptions, seInstance);
expect(onSetup).toHaveBeenCalledWith(seInstance, defSetupOptions);
expect(onUse).toHaveBeenCalledWith(seInstance);
});

View File

@ -1,4 +1,4 @@
import { GlTable, GlLink } from '@gitlab/ui';
import { GlTableLite, GlLink } from '@gitlab/ui';
import { shallowMount, mount } from '@vue/test-utils';
import { capitalizeFirstCharacter } from '~/lib/utils/text_utility';
import CiLintResults from '~/pipeline_editor/components/lint/ci_lint_results.vue';
@ -24,7 +24,7 @@ describe('CI Lint Results', () => {
});
};
const findTable = () => wrapper.find(GlTable);
const findTable = () => wrapper.find(GlTableLite);
const findByTestId = (selector) => () => wrapper.find(`[data-testid="ci-lint-${selector}"]`);
const findAllByTestId = (selector) => () =>
wrapper.findAll(`[data-testid="ci-lint-${selector}"]`);

View File

@ -5,11 +5,12 @@ require 'spec_helper'
RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline do
let_it_be(:tmpdir) { Dir.mktmpdir }
let_it_be(:project) { create(:project) }
let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project, source_full_path: 'test') }
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let_it_be(:uploads_dir_path) { File.join(tmpdir, '72a497a02fe3ee09edae2ed06d390038') }
let_it_be(:upload_file_path) { File.join(uploads_dir_path, 'upload.txt')}
let_it_be(:group) { create(:group) }
let(:uploads_dir_path) { File.join(tmpdir, '72a497a02fe3ee09edae2ed06d390038') }
let(:upload_file_path) { File.join(uploads_dir_path, 'upload.txt')}
let(:tracker) { create(:bulk_import_tracker, entity: entity) }
let(:context) { BulkImports::Pipeline::Context.new(tracker) }
subject(:pipeline) { described_class.new(context) }
@ -24,57 +25,101 @@ RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline do
FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
end
describe '#run' do
it 'imports uploads into destination portable and removes tmpdir' do
allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [upload_file_path]))
shared_examples 'uploads import' do
describe '#run' do
before do
allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [upload_file_path]))
end
pipeline.run
it 'imports uploads into destination portable and removes tmpdir' do
pipeline.run
expect(project.uploads.map { |u| u.retrieve_uploader.filename }).to include('upload.txt')
expect(portable.uploads.map { |u| u.retrieve_uploader.filename }).to include('upload.txt')
expect(Dir.exist?(tmpdir)).to eq(false)
end
end
expect(Dir.exist?(tmpdir)).to eq(false)
end
describe '#extract' do
it 'downloads & extracts upload paths' do
allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
expect(pipeline).to receive(:untar_zxf)
file_download_service = instance_double("BulkImports::FileDownloadService")
context 'when importing avatar' do
let(:uploads_dir_path) { File.join(tmpdir, 'avatar') }
expect(BulkImports::FileDownloadService)
.to receive(:new)
.with(
configuration: context.configuration,
relative_url: "/projects/test/export_relations/download?relation=uploads",
dir: tmpdir,
filename: 'uploads.tar.gz')
.and_return(file_download_service)
it 'imports avatar' do
FileUtils.touch(File.join(uploads_dir_path, 'avatar.png'))
expect(file_download_service).to receive(:execute)
expect_next_instance_of(entity.update_service) do |service|
expect(service).to receive(:execute)
end
extracted_data = pipeline.extract(context)
pipeline.run
end
expect(extracted_data.data).to contain_exactly(uploads_dir_path, upload_file_path)
end
end
context 'when something goes wrong' do
it 'raises exception' do
allow_next_instance_of(entity.update_service) do |service|
allow(service).to receive(:execute).and_return(nil)
end
describe '#load' do
it 'creates a file upload' do
expect { pipeline.load(context, upload_file_path) }.to change { project.uploads.count }.by(1)
end
pipeline.run
context 'when dynamic path is nil' do
it 'returns' do
expect { pipeline.load(context, File.join(tmpdir, 'test')) }.not_to change { project.uploads.count }
expect(entity.failures.first.exception_class).to include('AvatarLoadingError')
end
end
end
end
context 'when path is a directory' do
it 'returns' do
expect { pipeline.load(context, uploads_dir_path) }.not_to change { project.uploads.count }
describe '#extract' do
it 'downloads & extracts upload paths' do
allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
expect(pipeline).to receive(:untar_zxf)
file_download_service = instance_double("BulkImports::FileDownloadService")
expect(BulkImports::FileDownloadService)
.to receive(:new)
.with(
configuration: context.configuration,
relative_url: "/#{entity.pluralized_name}/test/export_relations/download?relation=uploads",
dir: tmpdir,
filename: 'uploads.tar.gz')
.and_return(file_download_service)
expect(file_download_service).to receive(:execute)
extracted_data = pipeline.extract(context)
expect(extracted_data.data).to contain_exactly(uploads_dir_path, upload_file_path)
end
end
describe '#load' do
it 'creates a file upload' do
expect { pipeline.load(context, upload_file_path) }.to change { portable.uploads.count }.by(1)
end
context 'when dynamic path is nil' do
it 'returns' do
expect { pipeline.load(context, File.join(tmpdir, 'test')) }.not_to change { portable.uploads.count }
end
end
context 'when path is a directory' do
it 'returns' do
expect { pipeline.load(context, uploads_dir_path) }.not_to change { portable.uploads.count }
end
end
end
end
context 'when importing to group' do
let(:portable) { group }
let(:entity) { create(:bulk_import_entity, :group_entity, group: group, source_full_path: 'test') }
include_examples 'uploads import'
end
context 'when importing to project' do
let(:portable) { project }
let(:entity) { create(:bulk_import_entity, :project_entity, project: project, source_full_path: 'test') }
include_examples 'uploads import'
end
end

View File

@ -1,77 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::GroupAvatarPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:entity) do
create(
:bulk_import_entity,
group: group,
bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: 'My Destination Group',
destination_namespace: group.full_path
)
end
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
subject { described_class.new(context) }
describe '#run' do
it 'updates the group avatar' do
avatar_path = 'spec/fixtures/dk.png'
stub_file_download(
avatar_path,
configuration: context.configuration,
relative_url: "/groups/source%2Ffull%2Fpath/avatar",
dir: an_instance_of(String),
file_size_limit: Avatarable::MAXIMUM_FILE_SIZE,
allowed_content_types: described_class::ALLOWED_AVATAR_DOWNLOAD_TYPES
)
expect { subject.run }.to change(context.group, :avatar)
expect(context.group.avatar.filename).to eq(File.basename(avatar_path))
end
it 'raises an error when the avatar upload fails' do
avatar_path = 'spec/fixtures/aosp_manifest.xml'
stub_file_download(
avatar_path,
configuration: context.configuration,
relative_url: "/groups/source%2Ffull%2Fpath/avatar",
dir: an_instance_of(String),
file_size_limit: Avatarable::MAXIMUM_FILE_SIZE,
allowed_content_types: described_class::ALLOWED_AVATAR_DOWNLOAD_TYPES
)
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger).to receive(:error)
.with(
bulk_import_id: context.bulk_import.id,
bulk_import_entity_id: context.entity.id,
bulk_import_entity_type: context.entity.source_type,
context_extra: context.extra,
exception_class: "BulkImports::Groups::Pipelines::GroupAvatarPipeline::GroupAvatarLoadingError",
exception_message: "Avatar file format is not supported. Please try one of the following supported formats: image/png, image/jpeg, image/gif, image/bmp, image/tiff, image/vnd.microsoft.icon",
pipeline_class: "BulkImports::Groups::Pipelines::GroupAvatarPipeline",
pipeline_step: :loader
)
end
expect { subject.run }.to change(BulkImports::Failure, :count)
end
end
def stub_file_download(filepath = 'file/path.png', **params)
expect_next_instance_of(BulkImports::FileDownloadService, params.presence) do |downloader|
expect(downloader).to receive(:execute).and_return(filepath)
end
end
end

View File

@ -8,13 +8,13 @@ RSpec.describe BulkImports::Groups::Stage do
let(:pipelines) do
[
[0, BulkImports::Groups::Pipelines::GroupPipeline],
[1, BulkImports::Groups::Pipelines::GroupAvatarPipeline],
[1, BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline],
[1, BulkImports::Groups::Pipelines::MembersPipeline],
[1, BulkImports::Common::Pipelines::LabelsPipeline],
[1, BulkImports::Common::Pipelines::MilestonesPipeline],
[1, BulkImports::Common::Pipelines::BadgesPipeline],
[2, BulkImports::Common::Pipelines::BoardsPipeline]
[2, BulkImports::Common::Pipelines::BoardsPipeline],
[2, BulkImports::Common::Pipelines::UploadsPipeline]
]
end
@ -24,7 +24,7 @@ RSpec.describe BulkImports::Groups::Stage do
describe '.pipelines' do
it 'list all the pipelines with their stage number, ordered by stage' do
expect(described_class.new(bulk_import).pipelines & pipelines).to eq(pipelines)
expect(described_class.new(bulk_import).pipelines & pipelines).to contain_exactly(*pipelines)
expect(described_class.new(bulk_import).pipelines.last.last).to eq(BulkImports::Common::Pipelines::EntityFinisher)
end

View File

@ -80,5 +80,19 @@ RSpec.describe Gitlab::EtagCaching::Store, :clean_gitlab_redis_shared_state do
expect(store.get(key)).to eq(etag)
end
end
context 'with multiple keys' do
let(:keys) { ['/my-group/my-project/builds/234.json', '/api/graphql:pipelines/id/5'] }
it 'stores and returns multiple values' do
etags = store.touch(*keys)
expect(etags.size).to eq(keys.size)
keys.each_with_index do |key, i|
expect(store.get(key)).to eq(etags[i])
end
end
end
end
end

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::SnowplowConfiguredToGitlabCollectorMetric do
using RSpec::Parameterized::TableSyntax
context 'for collector_hostname option' do
where(:collector_hostname, :expected_value) do
'snowplow.trx.gitlab.net' | true
'foo.bar.something.net' | false
end
with_them do
before do
stub_application_setting(snowplow_collector_hostname: collector_hostname)
end
it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' }
end
end
end

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::SnowplowEnabledMetric do
using RSpec::Parameterized::TableSyntax
context 'for snowplow enabled option' do
where(:snowplow_enabled, :expected_value) do
true | true
false | false
end
with_them do
before do
stub_application_setting(snowplow_enabled: snowplow_enabled)
end
it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' }
end
end
end

View File

@ -1045,6 +1045,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
describe ".system_usage_data_settings" do
let(:prometheus_client) { double(Gitlab::PrometheusClient) }
let(:snowplow_gitlab_host?) { Gitlab::CurrentSettings.snowplow_collector_hostname == 'snowplow.trx.gitlab.net' }
before do
allow(described_class).to receive(:operating_system).and_return('ubuntu-20.04')
@ -1089,6 +1090,17 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
it 'gathers user_cap_feature_enabled' do
expect(subject[:settings][:user_cap_feature_enabled]).to eq(Gitlab::CurrentSettings.new_user_signups_cap)
end
context 'snowplow stats' do
before do
stub_feature_flags(usage_data_instrumentation: false)
end
it 'gathers snowplow stats' do
expect(subject[:settings][:snowplow_enabled]).to eq(Gitlab::CurrentSettings.snowplow_enabled?)
expect(subject[:settings][:snowplow_configured_to_gitlab_collector]).to eq(snowplow_gitlab_host?)
end
end
end
end

View File

@ -298,4 +298,14 @@ RSpec.describe BulkImports::Entity, type: :model do
expect(entity.wikis_url_path).to eq("/groups/#{entity.encoded_source_full_path}/wikis")
end
end
describe '#update_service' do
it 'returns correct update service class' do
group_entity = build(:bulk_import_entity)
project_entity = build(:bulk_import_entity, :project_entity)
expect(group_entity.update_service).to eq(::Groups::UpdateService)
expect(project_entity.update_service).to eq(::Projects::UpdateService)
end
end
end

View File

@ -1503,10 +1503,30 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
describe 'pipeline caching' do
it 'performs ExpirePipelinesCacheWorker' do
expect(ExpirePipelineCacheWorker).to receive(:perform_async).with(pipeline.id)
context 'when expire_job_and_pipeline_cache_synchronously is enabled' do
before do
stub_feature_flags(expire_job_and_pipeline_cache_synchronously: true)
end
pipeline.cancel
it 'executes Ci::ExpirePipelineCacheService' do
expect_next_instance_of(Ci::ExpirePipelineCacheService) do |service|
expect(service).to receive(:execute).with(pipeline)
end
pipeline.cancel
end
end
context 'when expire_job_and_pipeline_cache_synchronously is disabled' do
before do
stub_feature_flags(expire_job_and_pipeline_cache_synchronously: false)
end
it 'performs ExpirePipelinesCacheWorker' do
expect(ExpirePipelineCacheWorker).to receive(:perform_async).with(pipeline.id)
pipeline.cancel
end
end
end

View File

@ -46,10 +46,28 @@ RSpec.describe CommitStatus do
describe 'status state machine' do
let!(:commit_status) { create(:commit_status, :running, project: project) }
it 'invalidates the cache after a transition' do
expect(ExpireJobCacheWorker).to receive(:perform_async).with(commit_status.id)
context 'when expire_job_and_pipeline_cache_synchronously is enabled' do
before do
stub_feature_flags(expire_job_and_pipeline_cache_synchronously: true)
end
commit_status.success!
it 'invalidates the cache after a transition' do
expect(commit_status).to receive(:expire_etag_cache!)
commit_status.success!
end
end
context 'when expire_job_and_pipeline_cache_synchronously is disabled' do
before do
stub_feature_flags(expire_job_and_pipeline_cache_synchronously: false)
end
it 'invalidates the cache after a transition' do
expect(ExpireJobCacheWorker).to receive(:perform_async).with(commit_status.id)
commit_status.success!
end
end
describe 'transitioning to running' do
@ -949,4 +967,15 @@ RSpec.describe CommitStatus do
described_class.bulk_insert_tags!(statuses, tag_list_by_build)
end
end
describe '#expire_etag_cache!' do
it 'expires the etag cache' do
expect_next_instance_of(Gitlab::EtagCaching::Store) do |etag_store|
job_path = Gitlab::Routing.url_helpers.project_build_path(project, commit_status.id, format: :json)
expect(etag_store).to receive(:touch).with(job_path)
end
commit_status.expire_etag_cache!
end
end
end

View File

@ -0,0 +1,24 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::UploadsExportService do
let_it_be(:project) { create(:project, avatar: fixture_file_upload('spec/fixtures/rails_sample.png', 'image/png')) }
let_it_be(:upload) { create(:upload, :with_file, :issuable_upload, uploader: FileUploader, model: project) }
let_it_be(:export_path) { Dir.mktmpdir }
subject(:service) { described_class.new(project, export_path) }
after do
FileUtils.remove_entry(export_path) if Dir.exist?(export_path)
end
describe '#execute' do
it 'exports project uploads and avatar' do
subject.execute
expect(File.exist?(File.join(export_path, 'avatar', 'rails_sample.png'))).to eq(true)
expect(File.exist?(File.join(export_path, upload.secret, upload.retrieve_uploader.filename))).to eq(true)
end
end
end

View File

@ -18,14 +18,14 @@ RSpec.describe Ci::ExpirePipelineCacheService do
graphql_pipeline_sha_path = "/api/graphql:pipelines/sha/#{pipeline.sha}"
graphql_project_on_demand_scan_counts_path = "/api/graphql:on_demand_scan/counts/#{project.full_path}"
expect_next_instance_of(Gitlab::EtagCaching::Store) do |store|
expect(store).to receive(:touch).with(pipelines_path)
expect(store).to receive(:touch).with(new_mr_pipelines_path)
expect(store).to receive(:touch).with(pipeline_path)
expect(store).to receive(:touch).with(graphql_pipeline_path)
expect(store).to receive(:touch).with(graphql_pipeline_sha_path)
expect(store).to receive(:touch).with(graphql_project_on_demand_scan_counts_path)
end
expect_touched_etag_caching_paths(
pipelines_path,
new_mr_pipelines_path,
pipeline_path,
graphql_pipeline_path,
graphql_pipeline_sha_path,
graphql_project_on_demand_scan_counts_path
)
subject.execute(pipeline)
end
@ -37,9 +37,10 @@ RSpec.describe Ci::ExpirePipelineCacheService do
merge_request_pipelines_path = "/#{project.full_path}/-/merge_requests/#{merge_request.iid}/pipelines.json"
merge_request_widget_path = "/#{project.full_path}/-/merge_requests/#{merge_request.iid}/cached_widget.json"
allow_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch)
expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(merge_request_pipelines_path)
expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(merge_request_widget_path)
expect_touched_etag_caching_paths(
merge_request_pipelines_path,
merge_request_widget_path
)
subject.execute(merge_request.all_pipelines.last)
end
@ -78,10 +79,7 @@ RSpec.describe Ci::ExpirePipelineCacheService do
it 'updates the cache of dependent pipeline' do
dependent_pipeline_path = "/#{source.source_project.full_path}/-/pipelines/#{source.source_pipeline.id}.json"
expect_next_instance_of(Gitlab::EtagCaching::Store) do |store|
allow(store).to receive(:touch)
expect(store).to receive(:touch).with(dependent_pipeline_path)
end
expect_touched_etag_caching_paths(dependent_pipeline_path)
subject.execute(pipeline)
end
@ -94,13 +92,31 @@ RSpec.describe Ci::ExpirePipelineCacheService do
it 'updates the cache of dependent pipeline' do
dependent_pipeline_path = "/#{source.project.full_path}/-/pipelines/#{source.pipeline.id}.json"
expect_next_instance_of(Gitlab::EtagCaching::Store) do |store|
allow(store).to receive(:touch)
expect(store).to receive(:touch).with(dependent_pipeline_path)
end
expect_touched_etag_caching_paths(dependent_pipeline_path)
subject.execute(pipeline)
end
end
it 'does not do N+1 queries' do
subject.execute(pipeline)
control = ActiveRecord::QueryRecorder.new { subject.execute(pipeline) }
create(:ci_sources_pipeline, pipeline: pipeline)
create(:ci_sources_pipeline, source_job: create(:ci_build, pipeline: pipeline))
expect { subject.execute(pipeline) }.not_to exceed_query_limit(control.count)
end
end
def expect_touched_etag_caching_paths(*paths)
expect_next_instance_of(Gitlab::EtagCaching::Store) do |store|
expect(store).to receive(:touch).and_wrap_original do |m, *args|
expect(args).to include(*paths)
m.call(*args)
end
end
end
end

View File

@ -0,0 +1,28 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe AuditEventSaveType do
subject(:target) { Object.new.extend(described_class) }
describe '#should_save_database? and #should_save_stream?' do
using RSpec::Parameterized::TableSyntax
where(:query_method, :query_param, :result) do
:should_save_stream? | :stream | true
:should_save_stream? | :database_and_stream | true
:should_save_database? | :database | true
:should_save_database? | :database_and_stream | true
:should_save_stream? | :database | false
:should_save_stream? | nil | false
:should_save_database? | :stream | false
:should_save_database? | nil | false
end
with_them do
it 'returns corresponding results according to the query_method and query_param' do
expect(target.send(query_method, query_param)).to eq result
end
end
end
end