Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
932d504aaa
commit
59f160b0cf
49 changed files with 724 additions and 1198 deletions
|
@ -10,38 +10,6 @@ export default {
|
|||
type: String,
|
||||
required: true,
|
||||
},
|
||||
endpoint: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
projectFullPath: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
projectId: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
projectName: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
projectPath: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
projectDescription: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
projectVisibility: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
restrictedVisibilityLevels: {
|
||||
type: Array,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
@ -62,16 +30,7 @@ export default {
|
|||
</p>
|
||||
</div>
|
||||
<div class="col-lg-9">
|
||||
<fork-form
|
||||
:endpoint="endpoint"
|
||||
:project-full-path="projectFullPath"
|
||||
:project-id="projectId"
|
||||
:project-name="projectName"
|
||||
:project-path="projectPath"
|
||||
:project-description="projectDescription"
|
||||
:project-visibility="projectVisibility"
|
||||
:restricted-visibility-levels="restrictedVisibilityLevels"
|
||||
/>
|
||||
<fork-form />
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
|
|
@ -72,40 +72,29 @@ export default {
|
|||
visibilityHelpPath: {
|
||||
default: '',
|
||||
},
|
||||
},
|
||||
props: {
|
||||
endpoint: {
|
||||
type: String,
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
projectFullPath: {
|
||||
type: String,
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
projectId: {
|
||||
type: String,
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
projectName: {
|
||||
type: String,
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
projectPath: {
|
||||
type: String,
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
projectDescription: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: '',
|
||||
},
|
||||
projectVisibility: {
|
||||
type: String,
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
restrictedVisibilityLevels: {
|
||||
type: Array,
|
||||
required: true,
|
||||
default: [],
|
||||
},
|
||||
},
|
||||
data() {
|
||||
|
|
|
@ -1,93 +0,0 @@
|
|||
<script>
|
||||
import { GlTabs, GlTab, GlLoadingIcon, GlSearchBoxByType } from '@gitlab/ui';
|
||||
import createFlash from '~/flash';
|
||||
import axios from '~/lib/utils/axios_utils';
|
||||
import { __ } from '~/locale';
|
||||
import ForkGroupsListItem from './fork_groups_list_item.vue';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
GlTabs,
|
||||
GlTab,
|
||||
GlLoadingIcon,
|
||||
GlSearchBoxByType,
|
||||
ForkGroupsListItem,
|
||||
},
|
||||
props: {
|
||||
endpoint: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
namespaces: null,
|
||||
filter: '',
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
filteredNamespaces() {
|
||||
return this.namespaces.filter((n) =>
|
||||
n.name.toLowerCase().includes(this.filter.toLowerCase()),
|
||||
);
|
||||
},
|
||||
},
|
||||
|
||||
mounted() {
|
||||
this.loadGroups();
|
||||
},
|
||||
|
||||
methods: {
|
||||
loadGroups() {
|
||||
axios
|
||||
.get(this.endpoint)
|
||||
.then((response) => {
|
||||
this.namespaces = response.data.namespaces;
|
||||
})
|
||||
.catch(() =>
|
||||
createFlash({
|
||||
message: __('There was a problem fetching groups.'),
|
||||
}),
|
||||
);
|
||||
},
|
||||
},
|
||||
|
||||
i18n: {
|
||||
searchPlaceholder: __('Search by name'),
|
||||
},
|
||||
};
|
||||
</script>
|
||||
<template>
|
||||
<gl-tabs class="fork-groups">
|
||||
<gl-tab :title="__('Groups and subgroups')">
|
||||
<gl-loading-icon v-if="!namespaces" size="md" class="gl-mt-3" />
|
||||
<template v-else-if="namespaces.length === 0">
|
||||
<div class="gl-text-center">
|
||||
<div class="h5">{{ __('No available groups to fork the project.') }}</div>
|
||||
<p class="gl-mt-5">
|
||||
{{ __('You must have permission to create a project in a group before forking.') }}
|
||||
</p>
|
||||
</div>
|
||||
</template>
|
||||
<div v-else-if="filteredNamespaces.length === 0" class="gl-text-center gl-mt-3">
|
||||
{{ s__('GroupsTree|No groups matched your search') }}
|
||||
</div>
|
||||
<ul v-else class="groups-list group-list-tree">
|
||||
<fork-groups-list-item
|
||||
v-for="(namespace, index) in filteredNamespaces"
|
||||
:key="index"
|
||||
:group="namespace"
|
||||
/>
|
||||
</ul>
|
||||
</gl-tab>
|
||||
<template #tabs-end>
|
||||
<gl-search-box-by-type
|
||||
v-if="namespaces && namespaces.length"
|
||||
v-model="filter"
|
||||
:placeholder="$options.i18n.searchPlaceholder"
|
||||
class="gl-align-self-center gl-ml-auto fork-filtered-search"
|
||||
data-qa-selector="fork_groups_list_search_field"
|
||||
/>
|
||||
</template>
|
||||
</gl-tabs>
|
||||
</template>
|
|
@ -1,148 +0,0 @@
|
|||
<script>
|
||||
import {
|
||||
GlLink,
|
||||
GlButton,
|
||||
GlIcon,
|
||||
GlAvatar,
|
||||
GlTooltipDirective,
|
||||
GlTooltip,
|
||||
GlBadge,
|
||||
GlSafeHtmlDirective as SafeHtml,
|
||||
} from '@gitlab/ui';
|
||||
import { VISIBILITY_TYPE_ICON, GROUP_VISIBILITY_TYPE } from '~/groups/constants';
|
||||
import csrf from '~/lib/utils/csrf';
|
||||
import UserAccessRoleBadge from '~/vue_shared/components/user_access_role_badge.vue';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
GlIcon,
|
||||
GlAvatar,
|
||||
GlBadge,
|
||||
GlButton,
|
||||
GlTooltip,
|
||||
GlLink,
|
||||
UserAccessRoleBadge,
|
||||
},
|
||||
directives: {
|
||||
GlTooltip: GlTooltipDirective,
|
||||
SafeHtml,
|
||||
},
|
||||
props: {
|
||||
group: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return { namespaces: null, isForking: false };
|
||||
},
|
||||
|
||||
computed: {
|
||||
rowClass() {
|
||||
return {
|
||||
'has-description': this.group.description,
|
||||
'being-removed': this.isGroupPendingRemoval,
|
||||
};
|
||||
},
|
||||
isGroupPendingRemoval() {
|
||||
return this.group.marked_for_deletion;
|
||||
},
|
||||
hasForkedProject() {
|
||||
return Boolean(this.group.forked_project_path);
|
||||
},
|
||||
visibilityIcon() {
|
||||
return VISIBILITY_TYPE_ICON[this.group.visibility];
|
||||
},
|
||||
visibilityTooltip() {
|
||||
return GROUP_VISIBILITY_TYPE[this.group.visibility];
|
||||
},
|
||||
isSelectButtonDisabled() {
|
||||
return !this.group.can_create_project;
|
||||
},
|
||||
},
|
||||
|
||||
methods: {
|
||||
fork() {
|
||||
this.isForking = true;
|
||||
this.$refs.form.submit();
|
||||
},
|
||||
},
|
||||
|
||||
csrf,
|
||||
};
|
||||
</script>
|
||||
<template>
|
||||
<li :class="rowClass" class="group-row">
|
||||
<div class="group-row-contents gl-display-flex gl-align-items-center gl-py-3 gl-pr-5">
|
||||
<div
|
||||
class="folder-toggle-wrap gl-mr-3 gl-display-flex gl-align-items-center gl-text-gray-500"
|
||||
>
|
||||
<gl-icon name="folder-o" />
|
||||
</div>
|
||||
<gl-link
|
||||
:href="group.relative_path"
|
||||
class="gl-display-none gl-flex-shrink-0 gl-sm-display-flex gl-mr-3"
|
||||
>
|
||||
<gl-avatar :size="32" shape="rect" :entity-name="group.name" :src="group.avatarUrl" />
|
||||
</gl-link>
|
||||
<div class="gl-min-w-0 gl-display-flex gl-flex-grow-1 gl-flex-shrink-1 gl-align-items-center">
|
||||
<div class="gl-min-w-0 gl-flex-grow-1 flex-shrink-1">
|
||||
<div class="title gl-display-flex gl-align-items-center gl-flex-wrap gl-mr-3">
|
||||
<gl-link :href="group.relative_path" class="gl-mt-3 gl-mr-3 gl-text-gray-900!">
|
||||
{{ group.full_name }}
|
||||
</gl-link>
|
||||
<gl-icon
|
||||
v-gl-tooltip.hover.bottom
|
||||
class="gl-display-inline-flex gl-mt-3 gl-mr-3 gl-text-gray-500"
|
||||
:name="visibilityIcon"
|
||||
:title="visibilityTooltip"
|
||||
/>
|
||||
<gl-badge
|
||||
v-if="isGroupPendingRemoval"
|
||||
variant="warning"
|
||||
class="gl-display-none gl-sm-display-flex gl-mt-3 gl-mr-1"
|
||||
>{{ __('pending deletion') }}</gl-badge
|
||||
>
|
||||
<user-access-role-badge v-if="group.permission" class="gl-mt-3">
|
||||
{{ group.permission }}
|
||||
</user-access-role-badge>
|
||||
</div>
|
||||
<div v-if="group.description" class="description gl-line-height-20">
|
||||
<span v-safe-html="group.markdown_description"> </span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="gl-display-flex gl-flex-shrink-0">
|
||||
<gl-button
|
||||
v-if="hasForkedProject"
|
||||
class="gl-h-7 gl-text-decoration-none!"
|
||||
:href="group.forked_project_path"
|
||||
>{{ __('Go to fork') }}</gl-button
|
||||
>
|
||||
<template v-else>
|
||||
<div ref="selectButtonWrapper">
|
||||
<form ref="form" method="POST" :action="group.fork_path">
|
||||
<input type="hidden" name="authenticity_token" :value="$options.csrf.token" />
|
||||
<gl-button
|
||||
type="submit"
|
||||
class="gl-h-7"
|
||||
:data-qa-name="group.full_name"
|
||||
category="secondary"
|
||||
variant="success"
|
||||
:disabled="isSelectButtonDisabled"
|
||||
:loading="isForking"
|
||||
@click="fork"
|
||||
>{{ __('Select') }}</gl-button
|
||||
>
|
||||
</form>
|
||||
</div>
|
||||
<gl-tooltip v-if="isSelectButtonDisabled" :target="() => $refs.selectButtonWrapper">
|
||||
{{
|
||||
__('You must have permission to create a project in a namespace before forking.')
|
||||
}}
|
||||
</gl-tooltip>
|
||||
</template>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</li>
|
||||
</template>
|
|
@ -1,61 +1,42 @@
|
|||
import Vue from 'vue';
|
||||
import App from './components/app.vue';
|
||||
import ForkGroupsList from './components/fork_groups_list.vue';
|
||||
|
||||
const mountElement = document.getElementById('fork-groups-mount-element');
|
||||
|
||||
if (gon.features.forkProjectForm) {
|
||||
const {
|
||||
forkIllustration,
|
||||
endpoint,
|
||||
const {
|
||||
forkIllustration,
|
||||
endpoint,
|
||||
newGroupPath,
|
||||
projectFullPath,
|
||||
visibilityHelpPath,
|
||||
projectId,
|
||||
projectName,
|
||||
projectPath,
|
||||
projectDescription,
|
||||
projectVisibility,
|
||||
restrictedVisibilityLevels,
|
||||
} = mountElement.dataset;
|
||||
|
||||
// eslint-disable-next-line no-new
|
||||
new Vue({
|
||||
el: mountElement,
|
||||
provide: {
|
||||
newGroupPath,
|
||||
projectFullPath,
|
||||
visibilityHelpPath,
|
||||
endpoint,
|
||||
projectFullPath,
|
||||
projectId,
|
||||
projectName,
|
||||
projectPath,
|
||||
projectDescription,
|
||||
projectVisibility,
|
||||
restrictedVisibilityLevels,
|
||||
} = mountElement.dataset;
|
||||
|
||||
// eslint-disable-next-line no-new
|
||||
new Vue({
|
||||
el: mountElement,
|
||||
provide: {
|
||||
newGroupPath,
|
||||
visibilityHelpPath,
|
||||
},
|
||||
render(h) {
|
||||
return h(App, {
|
||||
props: {
|
||||
forkIllustration,
|
||||
endpoint,
|
||||
newGroupPath,
|
||||
projectFullPath,
|
||||
visibilityHelpPath,
|
||||
projectId,
|
||||
projectName,
|
||||
projectPath,
|
||||
projectDescription,
|
||||
projectVisibility,
|
||||
restrictedVisibilityLevels: JSON.parse(restrictedVisibilityLevels),
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
} else {
|
||||
const { endpoint } = mountElement.dataset;
|
||||
|
||||
// eslint-disable-next-line no-new
|
||||
new Vue({
|
||||
el: mountElement,
|
||||
render(h) {
|
||||
return h(ForkGroupsList, {
|
||||
props: {
|
||||
endpoint,
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
}
|
||||
restrictedVisibilityLevels: JSON.parse(restrictedVisibilityLevels),
|
||||
},
|
||||
render(h) {
|
||||
return h(App, {
|
||||
props: {
|
||||
forkIllustration,
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
export const TRACK_TOGGLE_TRAINING_PROVIDER_ACTION = 'toggle_security_training_provider';
|
||||
export const TRACK_TOGGLE_TRAINING_PROVIDER_LABEL = 'update_security_training_provider';
|
||||
|
||||
export const TRACK_CLICK_TRAINING_LINK = 'click_security_training_link';
|
||||
export const TRACK_PROVIDER_LEARN_MORE_CLICK_ACTION = 'click_link';
|
||||
export const TRACK_PROVIDER_LEARN_MORE_CLICK_LABEL = 'security_training_provider';
|
||||
|
|
|
@ -17,10 +17,6 @@ class Projects::ForksController < Projects::ApplicationController
|
|||
feature_category :source_code_management
|
||||
urgency :low, [:index]
|
||||
|
||||
before_action do
|
||||
push_frontend_feature_flag(:fork_project_form, @project, default_enabled: :yaml)
|
||||
end
|
||||
|
||||
def index
|
||||
@sort = forks_params[:sort]
|
||||
|
||||
|
@ -54,9 +50,7 @@ class Projects::ForksController < Projects::ApplicationController
|
|||
format.json do
|
||||
namespaces = load_namespaces_with_associations - [project.namespace]
|
||||
|
||||
namespaces = [current_user.namespace] + namespaces if
|
||||
Feature.enabled?(:fork_project_form, project, default_enabled: :yaml) &&
|
||||
can_fork_to?(current_user.namespace)
|
||||
namespaces = [current_user.namespace] + namespaces if can_fork_to?(current_user.namespace)
|
||||
|
||||
render json: {
|
||||
namespaces: ForkNamespaceSerializer.new.represent(
|
||||
|
|
|
@ -1016,8 +1016,23 @@ class MergeRequest < ApplicationRecord
|
|||
merge_request_diff.persisted? || create_merge_request_diff
|
||||
end
|
||||
|
||||
def create_merge_request_diff
|
||||
def eager_fetch_ref!
|
||||
return unless valid?
|
||||
|
||||
# has_internal_id normally attempts to allocate the iid in the
|
||||
# before_create hook, but we need the iid to be available before
|
||||
# that to fetch the ref into the target project.
|
||||
track_target_project_iid!
|
||||
ensure_target_project_iid!
|
||||
|
||||
fetch_ref!
|
||||
# Prevent the after_create hook from fetching the source branch again
|
||||
# Drop this field after rollout in https://gitlab.com/gitlab-org/gitlab/-/issues/353044.
|
||||
@skip_fetch_ref = true
|
||||
end
|
||||
|
||||
def create_merge_request_diff
|
||||
fetch_ref! unless skip_fetch_ref
|
||||
|
||||
# n+1: https://gitlab.com/gitlab-org/gitlab/-/issues/19377
|
||||
Gitlab::GitalyClient.allow_n_plus_1_calls do
|
||||
|
@ -1950,6 +1965,8 @@ class MergeRequest < ApplicationRecord
|
|||
|
||||
private
|
||||
|
||||
attr_accessor :skip_fetch_ref
|
||||
|
||||
def set_draft_status
|
||||
self.draft = draft?
|
||||
end
|
||||
|
|
|
@ -30,14 +30,6 @@ class ForkNamespaceEntity < Grape::Entity
|
|||
markdown_description(namespace)
|
||||
end
|
||||
|
||||
expose :can_create_project do |namespace, options|
|
||||
if Feature.enabled?(:fork_project_form, options[:project], default_enabled: :yaml)
|
||||
true
|
||||
else
|
||||
options[:current_user].can?(:create_projects, namespace)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
|
|
|
@ -31,6 +31,16 @@ module MergeRequests
|
|||
|
||||
private
|
||||
|
||||
def before_create(merge_request)
|
||||
# If the fetching of the source branch occurs in an ActiveRecord
|
||||
# callback (e.g. after_create), a database transaction will be
|
||||
# open while the Gitaly RPC waits. To avoid an idle in transaction
|
||||
# timeout, we do this before we attempt to save the merge request.
|
||||
if Feature.enabled?(:merge_request_eager_fetch_ref, @project, default_enabled: :yaml)
|
||||
merge_request.eager_fetch_ref!
|
||||
end
|
||||
end
|
||||
|
||||
def set_projects!
|
||||
# @project is used to determine whether the user can set the merge request's
|
||||
# assignee, milestone and labels. Whether they can depends on their
|
||||
|
|
|
@ -41,14 +41,12 @@ module Projects
|
|||
|
||||
# update the manifests of the tags with the new dummy image
|
||||
def replace_tag_manifests(dummy_manifest)
|
||||
deleted_tags = {}
|
||||
|
||||
@tag_names.each do |name|
|
||||
deleted_tags = @tag_names.map do |name|
|
||||
digest = @container_repository.client.put_tag(@container_repository.path, name, dummy_manifest)
|
||||
next unless digest
|
||||
|
||||
deleted_tags[name] = digest
|
||||
end
|
||||
[name, digest]
|
||||
end.compact.to_h
|
||||
|
||||
# make sure the digests are the same (it should always be)
|
||||
digests = deleted_tags.values.uniq
|
||||
|
|
|
@ -160,6 +160,7 @@ module SystemNotes
|
|||
body = "changed title from **#{marked_old_title}** to **#{marked_new_title}**"
|
||||
|
||||
issue_activity_counter.track_issue_title_changed_action(author: author) if noteable.is_a?(Issue)
|
||||
work_item_activity_counter.track_work_item_title_changed_action(author: author) if noteable.is_a?(WorkItem)
|
||||
|
||||
create_note(NoteSummary.new(noteable, project, author, body, action: 'title'))
|
||||
end
|
||||
|
@ -484,6 +485,10 @@ module SystemNotes
|
|||
Gitlab::UsageDataCounters::IssueActivityUniqueCounter
|
||||
end
|
||||
|
||||
def work_item_activity_counter
|
||||
Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter
|
||||
end
|
||||
|
||||
def track_cross_reference_action
|
||||
issue_activity_counter.track_issue_cross_referenced_action(author: author) if noteable.is_a?(Issue)
|
||||
end
|
||||
|
|
|
@ -1,20 +0,0 @@
|
|||
- avatar = namespace_icon(namespace, 100)
|
||||
- can_create_project = current_user.can?(:create_projects, namespace)
|
||||
|
||||
.bordered-box.fork-thumbnail.text-center.gl-m-3.gl-pb-5{ class: ("disabled" unless can_create_project) }
|
||||
- if /no_((\w*)_)*avatar/.match(avatar)
|
||||
= group_icon(namespace, class: "avatar rect-avatar s100 identicon mx-auto")
|
||||
- else
|
||||
.avatar-container.s100.mx-auto.gl-mt-5
|
||||
= image_tag(avatar, class: "avatar s100")
|
||||
%h5.gl-mt-3
|
||||
= namespace.human_name
|
||||
- if forked_project = namespace.find_fork_of(@project)
|
||||
= link_to _("Go to project"), project_path(forked_project), class: "btn gl-button btn-default"
|
||||
- else
|
||||
%div{ class: ('has-tooltip' unless can_create_project),
|
||||
title: (_('You have reached your project limit') unless can_create_project) }
|
||||
= link_to _("Select"), project_forks_path(@project, namespace_key: namespace.id),
|
||||
data: { qa_selector: 'fork_namespace_button', qa_name: namespace.human_name },
|
||||
method: "POST",
|
||||
class: ["btn gl-button btn-confirm", ("disabled" unless can_create_project)]
|
|
@ -1,30 +1,13 @@
|
|||
- page_title s_("ForkProject|Fork project")
|
||||
|
||||
- if Feature.enabled?(:fork_project_form, @project, default_enabled: :yaml)
|
||||
#fork-groups-mount-element{ data: { fork_illustration: image_path('illustrations/project-create-new-sm.svg'),
|
||||
endpoint: new_project_fork_path(@project, format: :json),
|
||||
new_group_path: new_group_path,
|
||||
project_full_path: project_path(@project),
|
||||
visibility_help_path: help_page_path("public_access/public_access"),
|
||||
project_id: @project.id,
|
||||
project_name: @project.name,
|
||||
project_path: @project.path,
|
||||
project_description: @project.description,
|
||||
project_visibility: @project.visibility,
|
||||
restricted_visibility_levels: Gitlab::CurrentSettings.restricted_visibility_levels.to_json } }
|
||||
- else
|
||||
.row.gl-mt-3
|
||||
.col-lg-3
|
||||
%h4.gl-mt-0
|
||||
= s_("ForkProject|Fork project")
|
||||
%p
|
||||
= s_("ForkProject|A fork is a copy of a project.")
|
||||
%br
|
||||
= s_('ForkProject|Forking a repository allows you to make changes without affecting the original project.')
|
||||
.col-lg-9
|
||||
- if @own_namespace.present?
|
||||
.fork-thumbnail-container.js-fork-content
|
||||
%h5.gl-mt-0.gl-mb-0.gl-ml-3.gl-mr-3
|
||||
= s_("ForkProject|Select a namespace to fork the project")
|
||||
= render 'fork_button', namespace: @own_namespace
|
||||
#fork-groups-mount-element{ data: { endpoint: new_project_fork_path(@project, format: :json) } }
|
||||
#fork-groups-mount-element{ data: { fork_illustration: image_path('illustrations/project-create-new-sm.svg'),
|
||||
endpoint: new_project_fork_path(@project, format: :json),
|
||||
new_group_path: new_group_path,
|
||||
project_full_path: project_path(@project),
|
||||
visibility_help_path: help_page_path("public_access/public_access"),
|
||||
project_id: @project.id,
|
||||
project_name: @project.name,
|
||||
project_path: @project.path,
|
||||
project_description: @project.description,
|
||||
project_visibility: @project.visibility,
|
||||
restricted_visibility_levels: Gitlab::CurrentSettings.restricted_visibility_levels.to_json } }
|
||||
|
|
|
@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/350884
|
|||
milestone: '14.0'
|
||||
type: development
|
||||
group: group::pipeline execution
|
||||
default_enabled: false
|
||||
default_enabled: true
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
---
|
||||
name: fork_project_form
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/53544
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/321387
|
||||
milestone: '13.10'
|
||||
name: merge_request_eager_fetch_ref
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/80876
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/353044
|
||||
milestone: '14.9'
|
||||
type: development
|
||||
group: group::source code
|
||||
default_enabled: true
|
||||
group: group::code review
|
||||
default_enabled: false
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
name: track_work_items_activity
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/80532
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/352903
|
||||
milestone: '14.9'
|
||||
type: development
|
||||
group: group::project management
|
||||
default_enabled: false
|
|
@ -0,0 +1,25 @@
|
|||
---
|
||||
key_path: redis_hll_counters.work_items.users_updating_work_item_title_monthly
|
||||
description: Unique users updating a work item's title
|
||||
product_category: team planning
|
||||
product_section: dev
|
||||
product_stage: plan
|
||||
product_group: group::project management
|
||||
value_type: number
|
||||
status: active
|
||||
milestone: '14.9'
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/80532
|
||||
time_frame: 28d
|
||||
data_source: redis_hll
|
||||
data_category: optional
|
||||
instrumentation_class: RedisHLLMetric
|
||||
options:
|
||||
events:
|
||||
- users_updating_work_item_title
|
||||
distribution:
|
||||
- ce
|
||||
- ee
|
||||
tier:
|
||||
- free
|
||||
- premium
|
||||
- ultimate
|
|
@ -0,0 +1,25 @@
|
|||
---
|
||||
key_path: redis_hll_counters.work_items.users_updating_work_item_title_weekly
|
||||
description: Unique users updating a work item's title
|
||||
product_category: team planning
|
||||
product_section: dev
|
||||
product_stage: plan
|
||||
product_group: group::project management
|
||||
value_type: number
|
||||
status: active
|
||||
milestone: '14.9'
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/80532
|
||||
time_frame: 7d
|
||||
data_source: redis_hll
|
||||
data_category: optional
|
||||
instrumentation_class: RedisHLLMetric
|
||||
options:
|
||||
events:
|
||||
- users_updating_work_item_title
|
||||
distribution:
|
||||
- ce
|
||||
- ee
|
||||
tier:
|
||||
- free
|
||||
- premium
|
||||
- ultimate
|
|
@ -4432,6 +4432,25 @@ Input type: `TimelineEventDestroyInput`
|
|||
| <a id="mutationtimelineeventdestroyerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
|
||||
| <a id="mutationtimelineeventdestroytimelineevent"></a>`timelineEvent` | [`TimelineEventType`](#timelineeventtype) | Timeline event. |
|
||||
|
||||
### `Mutation.timelineEventPromoteFromNote`
|
||||
|
||||
Input type: `TimelineEventPromoteFromNoteInput`
|
||||
|
||||
#### Arguments
|
||||
|
||||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="mutationtimelineeventpromotefromnoteclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
|
||||
| <a id="mutationtimelineeventpromotefromnotenoteid"></a>`noteId` | [`NoteID!`](#noteid) | Note ID from which the timeline event promoted. |
|
||||
|
||||
#### Fields
|
||||
|
||||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="mutationtimelineeventpromotefromnoteclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
|
||||
| <a id="mutationtimelineeventpromotefromnoteerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
|
||||
| <a id="mutationtimelineeventpromotefromnotetimelineevent"></a>`timelineEvent` | [`TimelineEventType`](#timelineeventtype) | Timeline event. |
|
||||
|
||||
### `Mutation.timelineEventUpdate`
|
||||
|
||||
Input type: `TimelineEventUpdateInput`
|
||||
|
|
|
@ -206,7 +206,7 @@ The following Elasticsearch settings are available:
|
|||
|
||||
| Parameter | Description |
|
||||
|-------------------------------------------------------|-------------|
|
||||
| `Elasticsearch indexing` | Enables or disables Elasticsearch indexing and creates an empty index if one does not already exist. You may want to enable indexing but disable search in order to give the index time to be fully completed, for example. Also, keep in mind that this option doesn't have any impact on existing data, this only enables/disables the background indexer which tracks data changes and ensures new data is indexed. |
|
||||
| `Elasticsearch indexing` | Enables or disables Elasticsearch indexing and creates an empty index if one does not already exist. You may want to enable indexing but disable search to give the index time to be fully completed, for example. Also, keep in mind that this option doesn't have any impact on existing data, this only enables/disables the background indexer which tracks data changes and ensures new data is indexed. |
|
||||
| `Pause Elasticsearch indexing` | Enables or disables temporary indexing pause. This is useful for cluster migration/reindexing. All changes are still tracked, but they are not committed to the Elasticsearch index until resumed. |
|
||||
| `Search with Elasticsearch enabled` | Enables or disables using Elasticsearch in search. |
|
||||
| `URL` | The URL of your Elasticsearch instance. Use a comma-separated list to support clustering (for example, `http://host1, https://host2:9200`). If your Elasticsearch instance is password-protected, use the `Username` and `Password` fields described below. Alternatively, use inline credentials such as `http://<username>:<password>@<elastic_host>:9200/`. |
|
||||
|
@ -221,8 +221,8 @@ The following Elasticsearch settings are available:
|
|||
| `AWS Secret Access Key` | The AWS secret access key. |
|
||||
| `Maximum file size indexed` | See [the explanation in instance limits.](../administration/instance_limits.md#maximum-file-size-indexed). |
|
||||
| `Maximum field length` | See [the explanation in instance limits.](../administration/instance_limits.md#maximum-field-length). |
|
||||
| `Maximum bulk request size (MiB)` | The Maximum Bulk Request size is used by the GitLab Golang-based indexer processes and indicates how much data it ought to collect (and store in memory) in a given indexing process before submitting the payload to Elasticsearch's Bulk API. This setting should be used with the Bulk request concurrency setting (see below) and needs to accommodate the resource constraints of both the Elasticsearch host(s) and the host(s) running the GitLab Golang-based indexer either from the `gitlab-rake` command or the Sidekiq tasks. |
|
||||
| `Bulk request concurrency` | The Bulk request concurrency indicates how many of the GitLab Golang-based indexer processes (or threads) can run in parallel to collect data to subsequently submit to Elasticsearch's Bulk API. This increases indexing performance, but fills the Elasticsearch bulk requests queue faster. This setting should be used together with the Maximum bulk request size setting (see above) and needs to accommodate the resource constraints of both the Elasticsearch host(s) and the host(s) running the GitLab Golang-based indexer either from the `gitlab-rake` command or the Sidekiq tasks. |
|
||||
| `Maximum bulk request size (MiB)` | The Maximum Bulk Request size is used by the GitLab Golang-based indexer processes and indicates how much data it ought to collect (and store in memory) in a given indexing process before submitting the payload to Elasticsearch's Bulk API. This setting should be used with the Bulk request concurrency setting (see below) and needs to accommodate the resource constraints of both the Elasticsearch hosts and the hosts running the GitLab Golang-based indexer either from the `gitlab-rake` command or the Sidekiq tasks. |
|
||||
| `Bulk request concurrency` | The Bulk request concurrency indicates how many of the GitLab Golang-based indexer processes (or threads) can run in parallel to collect data to subsequently submit to Elasticsearch's Bulk API. This increases indexing performance, but fills the Elasticsearch bulk requests queue faster. This setting should be used together with the Maximum bulk request size setting (see above) and needs to accommodate the resource constraints of both the Elasticsearch hosts and the hosts running the GitLab Golang-based indexer either from the `gitlab-rake` command or the Sidekiq tasks. |
|
||||
| `Client request timeout` | Elasticsearch HTTP client request timeout value in seconds. `0` means using the system default timeout value, which depends on the libraries that GitLab application is built upon. |
|
||||
|
||||
WARNING:
|
||||
|
@ -259,16 +259,16 @@ from the Elasticsearch index as expected.
|
|||
|
||||
You can improve the language support for Chinese and Japanese languages by utilizing [`smartcn`](https://www.elastic.co/guide/en/elasticsearch/plugins/current/analysis-smartcn.html) and/or [`kuromoji`](https://www.elastic.co/guide/en/elasticsearch/plugins/current/analysis-kuromoji.html) analysis plugins from Elastic.
|
||||
|
||||
To enable language(s) support:
|
||||
To enable languages support:
|
||||
|
||||
1. Install the desired plugin(s), please refer to [Elasticsearch documentation](https://www.elastic.co/guide/en/elasticsearch/plugins/7.9/installation.html) for plugins installation instructions. The plugin(s) must be installed on every node in the cluster, and each node must be restarted after installation. For a list of plugins, see the table later in this section.
|
||||
1. Install the desired plugins, please refer to [Elasticsearch documentation](https://www.elastic.co/guide/en/elasticsearch/plugins/7.9/installation.html) for plugins installation instructions. The plugins must be installed on every node in the cluster, and each node must be restarted after installation. For a list of plugins, see the table later in this section.
|
||||
1. On the top bar, select **Menu > Admin**.
|
||||
1. On the left sidebar, select **Settings > Advanced Search**.
|
||||
1. Locate **Custom analyzers: language support**.
|
||||
1. Enable plugin(s) support for **Indexing**.
|
||||
1. Enable plugins support for **Indexing**.
|
||||
1. Click **Save changes** for the changes to take effect.
|
||||
1. Trigger [Zero downtime reindexing](#zero-downtime-reindexing) or reindex everything from scratch to create a new index with updated mappings.
|
||||
1. Enable plugin(s) support for **Searching** after the previous step is completed.
|
||||
1. Enable plugins support for **Searching** after the previous step is completed.
|
||||
|
||||
For guidance on what to install, see the following Elasticsearch language plugin options:
|
||||
|
||||
|
|
|
@ -18,36 +18,24 @@ submit them through a merge request to the repository you don't have access to.
|
|||
|
||||
## Creating a fork
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/15013) a new form in GitLab 13.11 [with a flag](../../../user/feature_flags.md) named `fork_project_form`. Disabled by default.
|
||||
> - [Enabled on GitLab.com and self-managed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/77181) in GitLab 14.8. Feature flag `fork_project_form` removed.
|
||||
|
||||
To fork an existing project in GitLab:
|
||||
|
||||
1. On the project's home page, in the top right, click **{fork}** **Fork**.
|
||||
1. On the project's home page, in the top right, select **{fork}** **Fork**:
|
||||
![Fork this project](img/forking_workflow_fork_button_v13_10.png)
|
||||
1. Optional. Edit the **Project name**.
|
||||
1. For **Project URL**, select the [namespace](../../group/index.md#namespaces)
|
||||
your fork should belong to.
|
||||
1. Add a **Project slug**. This value becomes part of the URL to your fork.
|
||||
It must be unique in the namespace.
|
||||
1. Optional. Add a **Project description**.
|
||||
1. Select the **Visibility level** for your fork. For more information about
|
||||
visibility levels, read [Project and group visibility](../../../public_access/public_access.md).
|
||||
1. Select **Fork project**.
|
||||
|
||||
![Fork button](img/forking_workflow_fork_button_v13_10.png)
|
||||
|
||||
1. Select the project to fork to:
|
||||
|
||||
- Recommended method. Below **Select a namespace to fork the project**, identify
|
||||
the project you want to fork to, and click **Select**. Only namespaces where you have
|
||||
at least the Developer role for are shown.
|
||||
|
||||
![Choose namespace](img/forking_workflow_choose_namespace_v13_10.png)
|
||||
|
||||
- Experimental method. If your GitLab administrator has
|
||||
enabled the experimental fork project form, read
|
||||
[Create a fork with the fork project form](#create-a-fork-with-the-fork-project-form).
|
||||
Only namespaces where you have at least the Developer role for are shown.
|
||||
|
||||
NOTE:
|
||||
The project path must be unique in the namespace.
|
||||
|
||||
GitLab creates your fork, and redirects you to the project page for your new fork.
|
||||
The permissions you have in the namespace are your permissions in the fork.
|
||||
|
||||
WARNING:
|
||||
When a public project with the repository feature set to **Members Only**
|
||||
is forked, the repository is public in the fork. The owner
|
||||
of the fork must manually change the visibility. Issue
|
||||
[#36662](https://gitlab.com/gitlab-org/gitlab/-/issues/36662) exists for this issue.
|
||||
GitLab creates your fork, and redirects you to the new fork's page.
|
||||
|
||||
## Repository mirroring
|
||||
|
||||
|
@ -81,24 +69,3 @@ changes are added to the repository and branch you're merging into.
|
|||
## Removing a fork relationship
|
||||
|
||||
You can unlink your fork from its upstream project in the [advanced settings](../settings/index.md#removing-a-fork-relationship).
|
||||
|
||||
## Create a fork with the fork project form **(FREE SELF)**
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/15013) in GitLab 13.11 [with a flag](../../../administration/feature_flags.md) named `fork_project_form`. Disabled by default.
|
||||
> - [Enabled on self-managed and GitLab.com](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/64967) in GitLab 13.8.
|
||||
|
||||
FLAG:
|
||||
On self-managed GitLab, by default this feature is available. To hide the feature, ask an administrator to [disable the feature flag](../../../administration/feature_flags.md) named `fork_project_form`.
|
||||
On GitLab.com, this feature is available.
|
||||
|
||||
This version of the fork project form is experimental:
|
||||
|
||||
![Choose namespace](img/fork_form_v13_10.png)
|
||||
|
||||
To use it, follow the instructions at [Creating a fork](#creating-a-fork) and provide:
|
||||
|
||||
- The project name.
|
||||
- The project URL.
|
||||
- The project slug.
|
||||
- Optional. The project description.
|
||||
- The visibility level for your fork.
|
||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 40 KiB |
|
@ -41,7 +41,7 @@ Here's how Service Desk works for you:
|
|||
1. You provide a project-specific email address to your paying customers, who can email you directly
|
||||
from the application.
|
||||
1. Each email they send creates an issue in the appropriate project.
|
||||
1. Your team members navigate to the Service Desk issue tracker, where they can see new support
|
||||
1. Your team members go to the Service Desk issue tracker, where they can see new support
|
||||
requests and respond inside associated issues.
|
||||
1. Your team communicates back and forth with the customer to understand the request.
|
||||
1. Your team starts working on implementing code to solve your customer's problem.
|
||||
|
@ -153,7 +153,7 @@ To use a custom description template with Service Desk:
|
|||
1. On the top bar, select **Menu > Projects** and find your project.
|
||||
1. [Create a description template](description_templates.md#create-an-issue-template).
|
||||
1. On the left sidebar, select **Settings > General > Service Desk**.
|
||||
1. From the dropdown **Template to append to all Service Desk issues**, search or select your template.
|
||||
1. From the dropdown list **Template to append to all Service Desk issues**, search or select your template.
|
||||
|
||||
### Using a custom email display name
|
||||
|
||||
|
@ -190,13 +190,13 @@ you can customize the mailbox used by Service Desk. This allows you to have
|
|||
a separate email address for Service Desk by also configuring a [custom suffix](#configuring-a-custom-email-address-suffix)
|
||||
in project settings.
|
||||
|
||||
The `address` must include the `+%{key}` placeholder within the 'user'
|
||||
portion of the address, before the `@`. This is used to identify the project
|
||||
The `address` must include the `+%{key}` placeholder in the 'user'
|
||||
portion of the address, before the `@`. The placeholder is used to identify the project
|
||||
where the issue should be created.
|
||||
|
||||
NOTE:
|
||||
When configuring a custom mailbox, the `service_desk_email` and `incoming_email`
|
||||
configurations must always use separate mailboxes. This is important, because
|
||||
configurations must always use separate mailboxes. It's important, because
|
||||
emails picked from `service_desk_email` mailbox are processed by a different
|
||||
worker and it would not recognize `incoming_email` emails.
|
||||
|
||||
|
@ -267,7 +267,7 @@ The Microsoft Graph API is not yet supported in source installations. See [this
|
|||
|
||||
#### Configuring a custom email address suffix
|
||||
|
||||
You can set a custom suffix in your project's Service Desk settings once you have configured a [custom mailbox](#configuring-a-custom-mailbox).
|
||||
You can set a custom suffix in your project's Service Desk settings after you have configured a [custom mailbox](#configuring-a-custom-mailbox).
|
||||
It can contain only lowercase letters (`a-z`), numbers (`0-9`), or underscores (`_`).
|
||||
|
||||
When configured, the custom suffix creates a new Service Desk email address, consisting of the
|
||||
|
@ -281,7 +281,7 @@ For example, suppose the `mygroup/myproject` project Service Desk settings has t
|
|||
The Service Desk email address for this project is: `contact+mygroup-myproject-support@example.com`.
|
||||
The [incoming email](../../administration/incoming_email.md) address still works.
|
||||
|
||||
If you don't configure the custom suffix, the default project identification will be used for identifying the project. You can see that email address in the project settings.
|
||||
If you don't configure the custom suffix, the default project identification is used for identifying the project. You can see that email address in the project settings.
|
||||
|
||||
## Using Service Desk
|
||||
|
||||
|
|
|
@ -8,7 +8,10 @@ module Atlassian
|
|||
end
|
||||
|
||||
def app_key
|
||||
"gitlab-jira-connect-#{gitlab_host}"
|
||||
# App key must be <= 64 characters.
|
||||
# See: https://developer.atlassian.com/cloud/jira/platform/connect-app-descriptor/#app-descriptor-structure
|
||||
|
||||
"gitlab-jira-connect-#{gitlab_host}"[..63]
|
||||
end
|
||||
|
||||
private
|
||||
|
|
110
lib/gitlab/process_supervisor.rb
Normal file
110
lib/gitlab/process_supervisor.rb
Normal file
|
@ -0,0 +1,110 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
# Given a set of process IDs, the supervisor can monitor processes
|
||||
# for being alive and invoke a callback if some or all should go away.
|
||||
# The receiver of the callback can then act on this event, for instance
|
||||
# by restarting those processes or performing clean-up work.
|
||||
#
|
||||
# The supervisor will also trap termination signals if provided and
|
||||
# propagate those to the supervised processes. Any supervised processes
|
||||
# that do not terminate within a specified grace period will be killed.
|
||||
class ProcessSupervisor
|
||||
DEFAULT_HEALTH_CHECK_INTERVAL_SECONDS = 5
|
||||
DEFAULT_TERMINATE_INTERVAL_SECONDS = 1
|
||||
DEFAULT_TERMINATE_TIMEOUT_SECONDS = 10
|
||||
|
||||
attr_reader :alive
|
||||
|
||||
def initialize(
|
||||
health_check_interval_seconds: DEFAULT_HEALTH_CHECK_INTERVAL_SECONDS,
|
||||
check_terminate_interval_seconds: DEFAULT_TERMINATE_INTERVAL_SECONDS,
|
||||
terminate_timeout_seconds: DEFAULT_TERMINATE_TIMEOUT_SECONDS,
|
||||
term_signals: %i(INT TERM),
|
||||
forwarded_signals: [])
|
||||
|
||||
@term_signals = term_signals
|
||||
@forwarded_signals = forwarded_signals
|
||||
@health_check_interval_seconds = health_check_interval_seconds
|
||||
@check_terminate_interval_seconds = check_terminate_interval_seconds
|
||||
@terminate_timeout_seconds = terminate_timeout_seconds
|
||||
end
|
||||
|
||||
# Starts a supervision loop for the given process ID(s).
|
||||
#
|
||||
# If any or all processes go away, the IDs of any dead processes will
|
||||
# be yielded to the given block, so callers can act on them.
|
||||
#
|
||||
# If the block returns a non-empty list of IDs, the supervisor will
|
||||
# start observing those processes instead. Otherwise it will shut down.
|
||||
def supervise(pid_or_pids, &on_process_death)
|
||||
@pids = Array(pid_or_pids)
|
||||
|
||||
trap_signals!
|
||||
|
||||
@alive = true
|
||||
while @alive
|
||||
sleep(@health_check_interval_seconds)
|
||||
|
||||
check_process_health(&on_process_death)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def check_process_health(&on_process_death)
|
||||
unless all_alive?
|
||||
dead_pids = @pids - live_pids
|
||||
@pids = Array(yield(dead_pids))
|
||||
@alive = @pids.any?
|
||||
end
|
||||
end
|
||||
|
||||
def trap_signals!
|
||||
ProcessManagement.trap_signals(@term_signals) do |signal|
|
||||
@alive = false
|
||||
signal_all(signal)
|
||||
wait_for_termination
|
||||
end
|
||||
|
||||
ProcessManagement.trap_signals(@forwarded_signals) do |signal|
|
||||
signal_all(signal)
|
||||
end
|
||||
end
|
||||
|
||||
def wait_for_termination
|
||||
deadline = monotonic_time + @terminate_timeout_seconds
|
||||
sleep(@check_terminate_interval_seconds) while continue_waiting?(deadline)
|
||||
|
||||
hard_stop_stuck_pids
|
||||
end
|
||||
|
||||
def monotonic_time
|
||||
Process.clock_gettime(Process::CLOCK_MONOTONIC, :float_second)
|
||||
end
|
||||
|
||||
def continue_waiting?(deadline)
|
||||
any_alive? && monotonic_time < deadline
|
||||
end
|
||||
|
||||
def signal_all(signal)
|
||||
ProcessManagement.signal_processes(@pids, signal)
|
||||
end
|
||||
|
||||
def hard_stop_stuck_pids
|
||||
ProcessManagement.signal_processes(live_pids, "-KILL")
|
||||
end
|
||||
|
||||
def any_alive?
|
||||
ProcessManagement.any_alive?(@pids)
|
||||
end
|
||||
|
||||
def all_alive?
|
||||
ProcessManagement.all_alive?(@pids)
|
||||
end
|
||||
|
||||
def live_pids
|
||||
ProcessManagement.pids_alive(@pids)
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
- name: users_updating_work_item_title
|
||||
category: work_items
|
||||
redis_slot: users
|
||||
aggregation: weekly
|
||||
feature_flag: track_work_items_activity
|
|
@ -0,0 +1,23 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module UsageDataCounters
|
||||
module WorkItemActivityUniqueCounter
|
||||
WORK_ITEM_TITLE_CHANGED = 'users_updating_work_item_title'
|
||||
|
||||
class << self
|
||||
def track_work_item_title_changed_action(author:)
|
||||
track_unique_action(WORK_ITEM_TITLE_CHANGED, author)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def track_unique_action(action, author)
|
||||
return unless author
|
||||
|
||||
Gitlab::UsageDataCounters::HLLRedisCounter.track_event(action, values: author.id)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -15814,9 +15814,6 @@ msgstr ""
|
|||
msgid "ForkProject|Select a namespace"
|
||||
msgstr ""
|
||||
|
||||
msgid "ForkProject|Select a namespace to fork the project"
|
||||
msgstr ""
|
||||
|
||||
msgid "ForkProject|The project can be accessed by any logged in user."
|
||||
msgstr ""
|
||||
|
||||
|
@ -16864,9 +16861,6 @@ msgstr ""
|
|||
msgid "Go to find file"
|
||||
msgstr ""
|
||||
|
||||
msgid "Go to fork"
|
||||
msgstr ""
|
||||
|
||||
msgid "Go to issue boards"
|
||||
msgstr ""
|
||||
|
||||
|
@ -17668,9 +17662,6 @@ msgstr ""
|
|||
msgid "Groups and projects"
|
||||
msgstr ""
|
||||
|
||||
msgid "Groups and subgroups"
|
||||
msgstr ""
|
||||
|
||||
msgid "Groups are a great way to organize projects and people."
|
||||
msgstr ""
|
||||
|
||||
|
@ -24561,9 +24552,6 @@ msgstr ""
|
|||
msgid "No available branches"
|
||||
msgstr ""
|
||||
|
||||
msgid "No available groups to fork the project."
|
||||
msgstr ""
|
||||
|
||||
msgid "No branches found"
|
||||
msgstr ""
|
||||
|
||||
|
@ -42073,9 +42061,6 @@ msgstr ""
|
|||
msgid "You have not added any approvers. Start by adding users or groups."
|
||||
msgstr ""
|
||||
|
||||
msgid "You have reached your project limit"
|
||||
msgstr ""
|
||||
|
||||
msgid "You have set up 2FA for your account! If you lose access to your 2FA device, you can use your recovery codes to access your account. Alternatively, if you upload an SSH key, you can %{anchorOpen}use that key to generate additional recovery codes%{anchorClose}."
|
||||
msgstr ""
|
||||
|
||||
|
@ -42106,12 +42091,6 @@ msgstr ""
|
|||
msgid "You must have maintainer access to force delete a lock"
|
||||
msgstr ""
|
||||
|
||||
msgid "You must have permission to create a project in a group before forking."
|
||||
msgstr ""
|
||||
|
||||
msgid "You must have permission to create a project in a namespace before forking."
|
||||
msgstr ""
|
||||
|
||||
msgid "You must provide a valid current password"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
@ -5,10 +5,6 @@ module QA
|
|||
module Project
|
||||
module Fork
|
||||
class New < Page::Base
|
||||
view 'app/views/projects/forks/_fork_button.html.haml' do
|
||||
element :fork_namespace_button
|
||||
end
|
||||
|
||||
view 'app/assets/javascripts/pages/projects/forks/new/components/fork_form.vue' do
|
||||
element :fork_namespace_dropdown
|
||||
element :fork_project_button
|
||||
|
@ -16,13 +12,9 @@ module QA
|
|||
end
|
||||
|
||||
def fork_project(namespace = Runtime::Namespace.path)
|
||||
if has_element?(:fork_namespace_button, wait: 0)
|
||||
click_element(:fork_namespace_button, name: namespace)
|
||||
else
|
||||
select_element(:fork_namespace_dropdown, namespace)
|
||||
click_element(:fork_privacy_button, privacy_level: 'public')
|
||||
click_element(:fork_project_button)
|
||||
end
|
||||
select_element(:fork_namespace_dropdown, namespace)
|
||||
click_element(:fork_privacy_button, privacy_level: 'public')
|
||||
click_element(:fork_project_button)
|
||||
end
|
||||
|
||||
def fork_namespace_dropdown_values
|
||||
|
|
|
@ -24,7 +24,7 @@ module QA
|
|||
log(:debug, "Fetched #{failures.length} flaky testcases!")
|
||||
rescue StandardError => e
|
||||
log(:error, "Failed to fetch flaky spec data for report: #{e}")
|
||||
@failures = []
|
||||
@failures = {}
|
||||
end
|
||||
|
||||
# Finished example
|
||||
|
|
|
@ -14,6 +14,7 @@ require_relative '../lib/gitlab/sidekiq_config/cli_methods'
|
|||
require_relative '../lib/gitlab/sidekiq_config/worker_matcher'
|
||||
require_relative '../lib/gitlab/sidekiq_logging/json_formatter'
|
||||
require_relative '../lib/gitlab/process_management'
|
||||
require_relative '../lib/gitlab/process_supervisor'
|
||||
require_relative '../metrics_server/metrics_server'
|
||||
require_relative 'sidekiq_cluster'
|
||||
|
||||
|
@ -38,8 +39,7 @@ module Gitlab
|
|||
@metrics_dir = ENV["prometheus_multiproc_dir"] || File.absolute_path("tmp/prometheus_multiproc_dir/sidekiq")
|
||||
@pid = nil
|
||||
@interval = 5
|
||||
@alive = true
|
||||
@processes = []
|
||||
@soft_timeout_seconds = DEFAULT_SOFT_TIMEOUT_SECONDS
|
||||
@logger = Logger.new(log_output)
|
||||
@logger.formatter = ::Gitlab::SidekiqLogging::JSONFormatter.new
|
||||
@rails_path = Dir.pwd
|
||||
|
@ -103,95 +103,63 @@ module Gitlab
|
|||
@logger.info("Starting cluster with #{queue_groups.length} processes")
|
||||
end
|
||||
|
||||
start_metrics_server(wipe_metrics_dir: true)
|
||||
start_and_supervise_workers(queue_groups)
|
||||
end
|
||||
|
||||
@processes = SidekiqCluster.start(
|
||||
def start_and_supervise_workers(queue_groups)
|
||||
worker_pids = SidekiqCluster.start(
|
||||
queue_groups,
|
||||
env: @environment,
|
||||
directory: @rails_path,
|
||||
max_concurrency: @max_concurrency,
|
||||
min_concurrency: @min_concurrency,
|
||||
dryrun: @dryrun,
|
||||
timeout: soft_timeout_seconds
|
||||
timeout: @soft_timeout_seconds
|
||||
)
|
||||
|
||||
return if @dryrun
|
||||
|
||||
write_pid
|
||||
trap_signals
|
||||
start_loop
|
||||
end
|
||||
|
||||
def write_pid
|
||||
ProcessManagement.write_pid(@pid) if @pid
|
||||
end
|
||||
|
||||
def soft_timeout_seconds
|
||||
@soft_timeout_seconds || DEFAULT_SOFT_TIMEOUT_SECONDS
|
||||
end
|
||||
supervisor = Gitlab::ProcessSupervisor.new(
|
||||
health_check_interval_seconds: @interval,
|
||||
terminate_timeout_seconds: @soft_timeout_seconds + TIMEOUT_GRACE_PERIOD_SECONDS,
|
||||
term_signals: TERMINATE_SIGNALS,
|
||||
forwarded_signals: FORWARD_SIGNALS
|
||||
)
|
||||
|
||||
# The amount of time it'll wait for killing the alive Sidekiq processes.
|
||||
def hard_timeout_seconds
|
||||
soft_timeout_seconds + DEFAULT_HARD_TIMEOUT_SECONDS
|
||||
end
|
||||
metrics_server_pid = start_metrics_server
|
||||
|
||||
def monotonic_time
|
||||
Process.clock_gettime(Process::CLOCK_MONOTONIC, :float_second)
|
||||
end
|
||||
all_pids = worker_pids + Array(metrics_server_pid)
|
||||
|
||||
def continue_waiting?(deadline)
|
||||
ProcessManagement.any_alive?(@processes) && monotonic_time < deadline
|
||||
end
|
||||
|
||||
def hard_stop_stuck_pids
|
||||
ProcessManagement.signal_processes(ProcessManagement.pids_alive(@processes), "-KILL")
|
||||
end
|
||||
|
||||
def wait_for_termination
|
||||
deadline = monotonic_time + hard_timeout_seconds
|
||||
sleep(CHECK_TERMINATE_INTERVAL_SECONDS) while continue_waiting?(deadline)
|
||||
|
||||
hard_stop_stuck_pids
|
||||
end
|
||||
|
||||
def trap_signals
|
||||
ProcessManagement.trap_signals(TERMINATE_SIGNALS) do |signal|
|
||||
@alive = false
|
||||
ProcessManagement.signal_processes(@processes, signal)
|
||||
wait_for_termination
|
||||
end
|
||||
|
||||
ProcessManagement.trap_signals(FORWARD_SIGNALS) do |signal|
|
||||
ProcessManagement.signal_processes(@processes, signal)
|
||||
end
|
||||
end
|
||||
|
||||
def start_loop
|
||||
while @alive
|
||||
sleep(@interval)
|
||||
|
||||
if metrics_server_enabled? && ProcessManagement.process_died?(@metrics_server_pid)
|
||||
@logger.warn('Metrics server went away')
|
||||
start_metrics_server(wipe_metrics_dir: false)
|
||||
end
|
||||
|
||||
unless ProcessManagement.all_alive?(@processes)
|
||||
# If a child process died we'll just terminate the whole cluster. It's up to
|
||||
# runit and such to then restart the cluster.
|
||||
supervisor.supervise(all_pids) do |dead_pids|
|
||||
# If we're not in the process of shutting down the cluster,
|
||||
# and the metrics server died, restart it.
|
||||
if supervisor.alive && dead_pids.include?(metrics_server_pid)
|
||||
@logger.info('Metrics server terminated, restarting...')
|
||||
metrics_server_pid = restart_metrics_server(wipe_metrics_dir: false)
|
||||
all_pids = worker_pids + Array(metrics_server_pid)
|
||||
else
|
||||
# If a worker process died we'll just terminate the whole cluster.
|
||||
# We let an external system (runit, kubernetes) handle the restart.
|
||||
@logger.info('A worker terminated, shutting down the cluster')
|
||||
|
||||
stop_metrics_server
|
||||
ProcessManagement.signal_processes(@processes, :TERM)
|
||||
break
|
||||
ProcessManagement.signal_processes(all_pids - dead_pids, :TERM)
|
||||
# Signal supervisor not to respawn workers and shut down.
|
||||
[]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def start_metrics_server(wipe_metrics_dir: false)
|
||||
def start_metrics_server
|
||||
return unless metrics_server_enabled?
|
||||
|
||||
restart_metrics_server(wipe_metrics_dir: true)
|
||||
end
|
||||
|
||||
def restart_metrics_server(wipe_metrics_dir: false)
|
||||
@logger.info("Starting metrics server on port #{sidekiq_exporter_port}")
|
||||
@metrics_server_pid = MetricsServer.fork(
|
||||
MetricsServer.fork(
|
||||
'sidekiq',
|
||||
metrics_dir: @metrics_dir,
|
||||
wipe_metrics_dir: wipe_metrics_dir,
|
||||
|
@ -225,13 +193,6 @@ module Gitlab
|
|||
!@dryrun && sidekiq_exporter_enabled? && exporter_has_a_unique_port?
|
||||
end
|
||||
|
||||
def stop_metrics_server
|
||||
return unless @metrics_server_pid
|
||||
|
||||
@logger.info("Stopping metrics server (PID #{@metrics_server_pid})")
|
||||
ProcessManagement.signal(@metrics_server_pid, :TERM)
|
||||
end
|
||||
|
||||
def option_parser
|
||||
OptionParser.new do |opt|
|
||||
opt.banner = "#{File.basename(__FILE__)} [QUEUE,QUEUE] [QUEUE] ... [OPTIONS]"
|
||||
|
|
|
@ -4,8 +4,6 @@ require_relative '../lib/gitlab/process_management'
|
|||
|
||||
module Gitlab
|
||||
module SidekiqCluster
|
||||
CHECK_TERMINATE_INTERVAL_SECONDS = 1
|
||||
|
||||
# How long to wait when asking for a clean termination.
|
||||
# It maps the Sidekiq default timeout:
|
||||
# https://github.com/mperham/sidekiq/wiki/Signals#term
|
||||
|
@ -14,8 +12,9 @@ module Gitlab
|
|||
# is given through arguments.
|
||||
DEFAULT_SOFT_TIMEOUT_SECONDS = 25
|
||||
|
||||
# After surpassing the soft timeout.
|
||||
DEFAULT_HARD_TIMEOUT_SECONDS = 5
|
||||
# Additional time granted after surpassing the soft timeout
|
||||
# before we kill the process.
|
||||
TIMEOUT_GRACE_PERIOD_SECONDS = 5
|
||||
|
||||
# Starts Sidekiq workers for the pairs of processes.
|
||||
#
|
||||
|
|
|
@ -5,8 +5,11 @@ require 'rspec-parameterized'
|
|||
|
||||
require_relative '../../support/stub_settings_source'
|
||||
require_relative '../../../sidekiq_cluster/cli'
|
||||
require_relative '../../support/helpers/next_instance_of'
|
||||
|
||||
RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubocop:disable RSpec/FilePath
|
||||
include NextInstanceOf
|
||||
|
||||
let(:cli) { described_class.new('/dev/null') }
|
||||
let(:timeout) { Gitlab::SidekiqCluster::DEFAULT_SOFT_TIMEOUT_SECONDS }
|
||||
let(:default_options) do
|
||||
|
@ -61,9 +64,8 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
|
|||
|
||||
context 'with arguments' do
|
||||
before do
|
||||
allow(cli).to receive(:write_pid)
|
||||
allow(cli).to receive(:trap_signals)
|
||||
allow(cli).to receive(:start_loop)
|
||||
allow(Gitlab::ProcessManagement).to receive(:write_pid)
|
||||
allow_next_instance_of(Gitlab::ProcessSupervisor) { |it| allow(it).to receive(:supervise) }
|
||||
end
|
||||
|
||||
it 'starts the Sidekiq workers' do
|
||||
|
@ -81,7 +83,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
|
|||
.to receive(:worker_queues).and_return(worker_queues)
|
||||
|
||||
expect(Gitlab::SidekiqCluster)
|
||||
.to receive(:start).with([worker_queues], default_options)
|
||||
.to receive(:start).with([worker_queues], default_options).and_return([])
|
||||
|
||||
cli.run(%w(*))
|
||||
end
|
||||
|
@ -135,6 +137,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
|
|||
it 'when given', 'starts Sidekiq workers with given timeout' do
|
||||
expect(Gitlab::SidekiqCluster).to receive(:start)
|
||||
.with([['foo']], default_options.merge(timeout: 10))
|
||||
.and_return([])
|
||||
|
||||
cli.run(%w(foo --timeout 10))
|
||||
end
|
||||
|
@ -142,6 +145,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
|
|||
it 'when not given', 'starts Sidekiq workers with default timeout' do
|
||||
expect(Gitlab::SidekiqCluster).to receive(:start)
|
||||
.with([['foo']], default_options.merge(timeout: Gitlab::SidekiqCluster::DEFAULT_SOFT_TIMEOUT_SECONDS))
|
||||
.and_return([])
|
||||
|
||||
cli.run(%w(foo))
|
||||
end
|
||||
|
@ -257,7 +261,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
|
|||
.to receive(:worker_queues).and_return(worker_queues)
|
||||
|
||||
expect(Gitlab::SidekiqCluster)
|
||||
.to receive(:start).with([worker_queues], default_options)
|
||||
.to receive(:start).with([worker_queues], default_options).and_return([])
|
||||
|
||||
cli.run(%w(--queue-selector *))
|
||||
end
|
||||
|
@ -292,16 +296,15 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
|
|||
|
||||
context 'starting the server' do
|
||||
context 'without --dryrun' do
|
||||
before do
|
||||
allow(Gitlab::SidekiqCluster).to receive(:start).and_return([])
|
||||
allow(Gitlab::ProcessManagement).to receive(:write_pid)
|
||||
allow_next_instance_of(Gitlab::ProcessSupervisor) { |it| allow(it).to receive(:supervise) }
|
||||
end
|
||||
|
||||
context 'when there are no sidekiq_health_checks settings set' do
|
||||
let(:sidekiq_exporter_enabled) { true }
|
||||
|
||||
before do
|
||||
allow(Gitlab::SidekiqCluster).to receive(:start)
|
||||
allow(cli).to receive(:write_pid)
|
||||
allow(cli).to receive(:trap_signals)
|
||||
allow(cli).to receive(:start_loop)
|
||||
end
|
||||
|
||||
it 'does not start a sidekiq metrics server' do
|
||||
expect(MetricsServer).not_to receive(:fork)
|
||||
|
||||
|
@ -312,13 +315,6 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
|
|||
context 'when the sidekiq_exporter.port setting is not set' do
|
||||
let(:sidekiq_exporter_enabled) { true }
|
||||
|
||||
before do
|
||||
allow(Gitlab::SidekiqCluster).to receive(:start)
|
||||
allow(cli).to receive(:write_pid)
|
||||
allow(cli).to receive(:trap_signals)
|
||||
allow(cli).to receive(:start_loop)
|
||||
end
|
||||
|
||||
it 'does not start a sidekiq metrics server' do
|
||||
expect(MetricsServer).not_to receive(:fork)
|
||||
|
||||
|
@ -342,13 +338,6 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
|
|||
}
|
||||
end
|
||||
|
||||
before do
|
||||
allow(Gitlab::SidekiqCluster).to receive(:start)
|
||||
allow(cli).to receive(:write_pid)
|
||||
allow(cli).to receive(:trap_signals)
|
||||
allow(cli).to receive(:start_loop)
|
||||
end
|
||||
|
||||
it 'does not start a sidekiq metrics server' do
|
||||
expect(MetricsServer).not_to receive(:fork)
|
||||
|
||||
|
@ -368,13 +357,6 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
|
|||
}
|
||||
end
|
||||
|
||||
before do
|
||||
allow(Gitlab::SidekiqCluster).to receive(:start)
|
||||
allow(cli).to receive(:write_pid)
|
||||
allow(cli).to receive(:trap_signals)
|
||||
allow(cli).to receive(:start_loop)
|
||||
end
|
||||
|
||||
it 'does not start a sidekiq metrics server' do
|
||||
expect(MetricsServer).not_to receive(:fork)
|
||||
|
||||
|
@ -397,13 +379,6 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
|
|||
end
|
||||
|
||||
with_them do
|
||||
before do
|
||||
allow(Gitlab::SidekiqCluster).to receive(:start)
|
||||
allow(cli).to receive(:write_pid)
|
||||
allow(cli).to receive(:trap_signals)
|
||||
allow(cli).to receive(:start_loop)
|
||||
end
|
||||
|
||||
specify do
|
||||
if start_metrics_server
|
||||
expect(MetricsServer).to receive(:fork).with('sidekiq', metrics_dir: metrics_dir, wipe_metrics_dir: true, reset_signals: trapped_signals)
|
||||
|
@ -415,6 +390,23 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when a PID is specified' do
|
||||
it 'writes the PID to a file' do
|
||||
expect(Gitlab::ProcessManagement).to receive(:write_pid).with('/dev/null')
|
||||
|
||||
cli.option_parser.parse!(%w(-P /dev/null))
|
||||
cli.run(%w(foo))
|
||||
end
|
||||
end
|
||||
|
||||
context 'when no PID is specified' do
|
||||
it 'does not write a PID' do
|
||||
expect(Gitlab::ProcessManagement).not_to receive(:write_pid)
|
||||
|
||||
cli.run(%w(foo))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with --dryrun set' do
|
||||
|
@ -427,130 +419,55 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'supervising the server' do
|
||||
let(:sidekiq_exporter_enabled) { true }
|
||||
let(:sidekiq_health_checks_port) { '3907' }
|
||||
|
||||
before do
|
||||
allow(cli).to receive(:sleep).with(a_kind_of(Numeric))
|
||||
allow(MetricsServer).to receive(:fork).and_return(99)
|
||||
cli.start_metrics_server
|
||||
end
|
||||
|
||||
it 'stops the metrics server when one of the processes has been terminated' do
|
||||
allow(Gitlab::ProcessManagement).to receive(:process_died?).and_return(false)
|
||||
allow(Gitlab::ProcessManagement).to receive(:all_alive?).with(an_instance_of(Array)).and_return(false)
|
||||
allow(Gitlab::ProcessManagement).to receive(:signal_processes).with(an_instance_of(Array), :TERM)
|
||||
|
||||
expect(Process).to receive(:kill).with(:TERM, 99)
|
||||
|
||||
cli.start_loop
|
||||
end
|
||||
|
||||
it 'starts the metrics server when it is down' do
|
||||
allow(Gitlab::ProcessManagement).to receive(:process_died?).and_return(true)
|
||||
allow(Gitlab::ProcessManagement).to receive(:all_alive?).with(an_instance_of(Array)).and_return(false)
|
||||
allow(cli).to receive(:stop_metrics_server)
|
||||
|
||||
expect(MetricsServer).to receive(:fork).with(
|
||||
'sidekiq', metrics_dir: metrics_dir, wipe_metrics_dir: false, reset_signals: trapped_signals
|
||||
)
|
||||
|
||||
cli.start_loop
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#write_pid' do
|
||||
context 'when a PID is specified' do
|
||||
it 'writes the PID to a file' do
|
||||
expect(Gitlab::ProcessManagement).to receive(:write_pid).with('/dev/null')
|
||||
|
||||
cli.option_parser.parse!(%w(-P /dev/null))
|
||||
cli.write_pid
|
||||
end
|
||||
end
|
||||
|
||||
context 'when no PID is specified' do
|
||||
it 'does not write a PID' do
|
||||
expect(Gitlab::ProcessManagement).not_to receive(:write_pid)
|
||||
context 'supervising the cluster' do
|
||||
let(:sidekiq_exporter_enabled) { true }
|
||||
let(:sidekiq_health_checks_port) { '3907' }
|
||||
let(:metrics_server_pid) { 99 }
|
||||
let(:sidekiq_worker_pids) { [2, 42] }
|
||||
|
||||
cli.write_pid
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#wait_for_termination' do
|
||||
it 'waits for termination of all sub-processes and succeeds after 3 checks' do
|
||||
expect(Gitlab::ProcessManagement).to receive(:any_alive?)
|
||||
.with(an_instance_of(Array)).and_return(true, true, true, false)
|
||||
|
||||
expect(Gitlab::ProcessManagement).to receive(:pids_alive)
|
||||
.with([]).and_return([])
|
||||
|
||||
expect(Gitlab::ProcessManagement).to receive(:signal_processes)
|
||||
.with([], "-KILL")
|
||||
|
||||
stub_const("Gitlab::SidekiqCluster::CHECK_TERMINATE_INTERVAL_SECONDS", 0.1)
|
||||
allow(cli).to receive(:terminate_timeout_seconds) { 1 }
|
||||
|
||||
cli.wait_for_termination
|
||||
end
|
||||
|
||||
context 'with hanging workers' do
|
||||
before do
|
||||
expect(cli).to receive(:write_pid)
|
||||
expect(cli).to receive(:trap_signals)
|
||||
expect(cli).to receive(:start_loop)
|
||||
allow(Gitlab::SidekiqCluster).to receive(:start).and_return(sidekiq_worker_pids)
|
||||
allow(Gitlab::ProcessManagement).to receive(:write_pid)
|
||||
end
|
||||
|
||||
it 'hard kills workers after timeout expires' do
|
||||
worker_pids = [101, 102, 103]
|
||||
expect(Gitlab::SidekiqCluster).to receive(:start)
|
||||
.with([['foo']], default_options)
|
||||
.and_return(worker_pids)
|
||||
it 'stops the entire process cluster if one of the workers has been terminated' do
|
||||
allow_next_instance_of(Gitlab::ProcessSupervisor) do |it|
|
||||
allow(it).to receive(:supervise).and_yield([2])
|
||||
end
|
||||
|
||||
expect(Gitlab::ProcessManagement).to receive(:any_alive?)
|
||||
.with(worker_pids).and_return(true).at_least(10).times
|
||||
|
||||
expect(Gitlab::ProcessManagement).to receive(:pids_alive)
|
||||
.with(worker_pids).and_return([102])
|
||||
|
||||
expect(Gitlab::ProcessManagement).to receive(:signal_processes)
|
||||
.with([102], "-KILL")
|
||||
expect(MetricsServer).to receive(:fork).once.and_return(metrics_server_pid)
|
||||
expect(Gitlab::ProcessManagement).to receive(:signal_processes).with([42, 99], :TERM)
|
||||
|
||||
cli.run(%w(foo))
|
||||
|
||||
stub_const("Gitlab::SidekiqCluster::CHECK_TERMINATE_INTERVAL_SECONDS", 0.1)
|
||||
allow(cli).to receive(:terminate_timeout_seconds) { 1 }
|
||||
|
||||
cli.wait_for_termination
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#trap_signals' do
|
||||
it 'traps termination and sidekiq specific signals' do
|
||||
expect(Gitlab::ProcessManagement).to receive(:trap_signals).with(%i[INT TERM])
|
||||
expect(Gitlab::ProcessManagement).to receive(:trap_signals).with(%i[TTIN USR1 USR2 HUP])
|
||||
context 'when the supervisor is alive' do
|
||||
it 'restarts the metrics server when it is down' do
|
||||
allow_next_instance_of(Gitlab::ProcessSupervisor) do |it|
|
||||
allow(it).to receive(:alive).and_return(true)
|
||||
allow(it).to receive(:supervise).and_yield([metrics_server_pid])
|
||||
end
|
||||
|
||||
cli.trap_signals
|
||||
end
|
||||
end
|
||||
expect(MetricsServer).to receive(:fork).twice.and_return(metrics_server_pid)
|
||||
|
||||
describe '#start_loop' do
|
||||
it 'runs until one of the processes has been terminated' do
|
||||
allow(cli).to receive(:sleep).with(a_kind_of(Numeric))
|
||||
cli.run(%w(foo))
|
||||
end
|
||||
end
|
||||
|
||||
expect(Gitlab::ProcessManagement).to receive(:all_alive?)
|
||||
.with(an_instance_of(Array)).and_return(false)
|
||||
context 'when the supervisor is shutting down' do
|
||||
it 'does not restart the metrics server' do
|
||||
allow_next_instance_of(Gitlab::ProcessSupervisor) do |it|
|
||||
allow(it).to receive(:alive).and_return(false)
|
||||
allow(it).to receive(:supervise).and_yield([metrics_server_pid])
|
||||
end
|
||||
|
||||
expect(Gitlab::ProcessManagement).to receive(:signal_processes)
|
||||
.with(an_instance_of(Array), :TERM)
|
||||
expect(MetricsServer).to receive(:fork).once.and_return(metrics_server_pid)
|
||||
|
||||
cli.start_loop
|
||||
cli.run(%w(foo))
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -199,15 +199,6 @@ RSpec.describe Projects::ForksController do
|
|||
expect(json_response['namespaces'][1]['id']).to eq(group.id)
|
||||
end
|
||||
|
||||
it 'responds with group only when fork_project_form feature flag is disabled' do
|
||||
stub_feature_flags(fork_project_form: false)
|
||||
do_request
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response['namespaces'].length).to eq(1)
|
||||
expect(json_response['namespaces'][0]['id']).to eq(group.id)
|
||||
end
|
||||
|
||||
context 'N+1 queries' do
|
||||
before do
|
||||
create(:fork_network, root_project: project)
|
||||
|
|
|
@ -164,199 +164,4 @@ RSpec.describe 'Project fork' do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with fork_project_form feature flag disabled' do
|
||||
before do
|
||||
stub_feature_flags(fork_project_form: false)
|
||||
sign_in(user)
|
||||
end
|
||||
|
||||
it_behaves_like 'fork button on project page'
|
||||
|
||||
context 'user has exceeded personal project limit' do
|
||||
before do
|
||||
user.update!(projects_limit: 0)
|
||||
end
|
||||
|
||||
context 'with a group to fork to' do
|
||||
let!(:group) { create(:group).tap { |group| group.add_owner(user) } }
|
||||
|
||||
it 'allows user to fork only to the group on fork page', :js do
|
||||
visit new_project_fork_path(project)
|
||||
|
||||
to_personal_namespace = find('[data-qa-selector=fork_namespace_button].disabled') # rubocop:disable QA/SelectorUsage
|
||||
to_group = find(".fork-groups button[data-qa-name=#{group.name}]") # rubocop:disable QA/SelectorUsage
|
||||
|
||||
expect(to_personal_namespace).not_to be_nil
|
||||
expect(to_group).not_to be_disabled
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'create fork page', ' Select a namespace to fork the project '
|
||||
|
||||
it 'forks the project', :sidekiq_might_not_need_inline do
|
||||
visit project_path(project)
|
||||
|
||||
click_link 'Fork'
|
||||
|
||||
page.within '.fork-thumbnail-container' do
|
||||
click_link 'Select'
|
||||
end
|
||||
|
||||
expect(page).to have_content 'Forked from'
|
||||
|
||||
visit project_path(project)
|
||||
|
||||
expect(page).to have_content(/new merge request/i)
|
||||
|
||||
page.within '.nav-sidebar' do
|
||||
first(:link, 'Merge requests').click
|
||||
end
|
||||
|
||||
expect(page).to have_content(/new merge request/i)
|
||||
|
||||
page.within '#content-body' do
|
||||
click_link('New merge request')
|
||||
end
|
||||
|
||||
expect(current_path).to have_content(/#{user.namespace.path}/i)
|
||||
end
|
||||
|
||||
it 'shows avatars when Gravatar is disabled' do
|
||||
stub_application_setting(gravatar_enabled: false)
|
||||
|
||||
visit project_path(project)
|
||||
|
||||
click_link 'Fork'
|
||||
|
||||
page.within('.fork-thumbnail-container') do
|
||||
expect(page).to have_css('span.identicon')
|
||||
end
|
||||
end
|
||||
|
||||
it 'shows the forked project on the list' do
|
||||
visit project_path(project)
|
||||
|
||||
click_link 'Fork'
|
||||
|
||||
page.within '.fork-thumbnail-container' do
|
||||
click_link 'Select'
|
||||
end
|
||||
|
||||
visit project_forks_path(project)
|
||||
|
||||
forked_project = user.fork_of(project.reload)
|
||||
|
||||
page.within('.js-projects-list-holder') do
|
||||
expect(page).to have_content("#{forked_project.namespace.human_name} / #{forked_project.name}")
|
||||
end
|
||||
|
||||
forked_project.update!(path: 'test-crappy-path')
|
||||
|
||||
visit project_forks_path(project)
|
||||
|
||||
page.within('.js-projects-list-holder') do
|
||||
expect(page).to have_content("#{forked_project.namespace.human_name} / #{forked_project.name}")
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the project is private' do
|
||||
let(:project) { create(:project, :repository) }
|
||||
let(:another_user) { create(:user, name: 'Mike') }
|
||||
|
||||
before do
|
||||
project.add_reporter(user)
|
||||
project.add_reporter(another_user)
|
||||
end
|
||||
|
||||
it 'renders private forks of the project' do
|
||||
visit project_path(project)
|
||||
|
||||
another_project_fork = Projects::ForkService.new(project, another_user).execute
|
||||
|
||||
click_link 'Fork'
|
||||
|
||||
page.within '.fork-thumbnail-container' do
|
||||
click_link 'Select'
|
||||
end
|
||||
|
||||
visit project_forks_path(project)
|
||||
|
||||
page.within('.js-projects-list-holder') do
|
||||
user_project_fork = user.fork_of(project.reload)
|
||||
expect(page).to have_content("#{user_project_fork.namespace.human_name} / #{user_project_fork.name}")
|
||||
end
|
||||
|
||||
expect(page).not_to have_content("#{another_project_fork.namespace.human_name} / #{another_project_fork.name}")
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the user already forked the project' do
|
||||
before do
|
||||
create(:project, :repository, name: project.name, namespace: user.namespace)
|
||||
end
|
||||
|
||||
it 'renders error' do
|
||||
visit project_path(project)
|
||||
|
||||
click_link 'Fork'
|
||||
|
||||
page.within '.fork-thumbnail-container' do
|
||||
click_link 'Select'
|
||||
end
|
||||
|
||||
expect(page).to have_content "Name has already been taken"
|
||||
end
|
||||
end
|
||||
|
||||
context 'maintainer in group' do
|
||||
let(:group) { create(:group) }
|
||||
|
||||
before do
|
||||
group.add_maintainer(user)
|
||||
end
|
||||
|
||||
it 'allows user to fork project to group or to user namespace', :js do
|
||||
visit project_path(project)
|
||||
wait_for_requests
|
||||
|
||||
expect(page).not_to have_css('a.disabled', text: 'Fork')
|
||||
|
||||
click_link 'Fork'
|
||||
|
||||
expect(page).to have_css('.fork-thumbnail')
|
||||
expect(page).to have_css('.group-row')
|
||||
expect(page).not_to have_css('.fork-thumbnail.disabled')
|
||||
end
|
||||
|
||||
it 'allows user to fork project to group and not user when exceeded project limit', :js do
|
||||
user.projects_limit = 0
|
||||
user.save!
|
||||
|
||||
visit project_path(project)
|
||||
wait_for_requests
|
||||
|
||||
expect(page).not_to have_css('a.disabled', text: 'Fork')
|
||||
|
||||
click_link 'Fork'
|
||||
|
||||
expect(page).to have_css('.fork-thumbnail.disabled')
|
||||
expect(page).to have_css('.group-row')
|
||||
end
|
||||
|
||||
it 'links to the fork if the project was already forked within that namespace', :sidekiq_might_not_need_inline, :js do
|
||||
forked_project = fork_project(project, user, namespace: group, repository: true)
|
||||
|
||||
visit new_project_fork_path(project)
|
||||
wait_for_requests
|
||||
|
||||
expect(page).to have_css('.group-row a.btn', text: 'Go to fork')
|
||||
|
||||
click_link 'Go to fork'
|
||||
|
||||
expect(current_path).to eq(project_path(forked_project))
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,19 +1,12 @@
|
|||
import { shallowMount } from '@vue/test-utils';
|
||||
import App from '~/pages/projects/forks/new/components/app.vue';
|
||||
import ForkForm from '~/pages/projects/forks/new/components/fork_form.vue';
|
||||
|
||||
describe('App component', () => {
|
||||
let wrapper;
|
||||
|
||||
const DEFAULT_PROPS = {
|
||||
forkIllustration: 'illustrations/project-create-new-sm.svg',
|
||||
endpoint: '/some/project-full-path/-/forks/new.json',
|
||||
projectFullPath: '/some/project-full-path',
|
||||
projectId: '10',
|
||||
projectName: 'Project Name',
|
||||
projectPath: 'project-name',
|
||||
projectDescription: 'some project description',
|
||||
projectVisibility: 'private',
|
||||
restrictedVisibilityLevels: [],
|
||||
};
|
||||
|
||||
const createComponent = (props = {}) => {
|
||||
|
@ -37,7 +30,7 @@ describe('App component', () => {
|
|||
expect(wrapper.find('img').attributes('src')).toBe('illustrations/project-create-new-sm.svg');
|
||||
});
|
||||
|
||||
it('renders ForkForm component with prop', () => {
|
||||
expect(wrapper.props()).toEqual(expect.objectContaining(DEFAULT_PROPS));
|
||||
it('renders ForkForm component', () => {
|
||||
expect(wrapper.findComponent(ForkForm).exists()).toBe(true);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -40,7 +40,9 @@ describe('ForkForm component', () => {
|
|||
},
|
||||
];
|
||||
|
||||
const DEFAULT_PROPS = {
|
||||
const DEFAULT_PROVIDE = {
|
||||
newGroupPath: 'some/groups/path',
|
||||
visibilityHelpPath: 'some/visibility/help/path',
|
||||
endpoint: '/some/project-full-path/-/forks/new.json',
|
||||
projectFullPath: '/some/project-full-path',
|
||||
projectId: '10',
|
||||
|
@ -52,18 +54,14 @@ describe('ForkForm component', () => {
|
|||
};
|
||||
|
||||
const mockGetRequest = (data = {}, statusCode = httpStatus.OK) => {
|
||||
axiosMock.onGet(DEFAULT_PROPS.endpoint).replyOnce(statusCode, data);
|
||||
axiosMock.onGet(DEFAULT_PROVIDE.endpoint).replyOnce(statusCode, data);
|
||||
};
|
||||
|
||||
const createComponentFactory = (mountFn) => (props = {}, data = {}) => {
|
||||
const createComponentFactory = (mountFn) => (provide = {}, data = {}) => {
|
||||
wrapper = mountFn(ForkForm, {
|
||||
provide: {
|
||||
newGroupPath: 'some/groups/path',
|
||||
visibilityHelpPath: 'some/visibility/help/path',
|
||||
},
|
||||
propsData: {
|
||||
...DEFAULT_PROPS,
|
||||
...props,
|
||||
...DEFAULT_PROVIDE,
|
||||
...provide,
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
|
@ -111,7 +109,7 @@ describe('ForkForm component', () => {
|
|||
mockGetRequest();
|
||||
createComponent();
|
||||
|
||||
const { projectFullPath } = DEFAULT_PROPS;
|
||||
const { projectFullPath } = DEFAULT_PROVIDE;
|
||||
const cancelButton = wrapper.find('[data-testid="cancel-button"]');
|
||||
|
||||
expect(cancelButton.attributes('href')).toBe(projectFullPath);
|
||||
|
@ -130,10 +128,10 @@ describe('ForkForm component', () => {
|
|||
mockGetRequest();
|
||||
createComponent();
|
||||
|
||||
expect(findForkNameInput().attributes('value')).toBe(DEFAULT_PROPS.projectName);
|
||||
expect(findForkSlugInput().attributes('value')).toBe(DEFAULT_PROPS.projectPath);
|
||||
expect(findForkNameInput().attributes('value')).toBe(DEFAULT_PROVIDE.projectName);
|
||||
expect(findForkSlugInput().attributes('value')).toBe(DEFAULT_PROVIDE.projectPath);
|
||||
expect(findForkDescriptionTextarea().attributes('value')).toBe(
|
||||
DEFAULT_PROPS.projectDescription,
|
||||
DEFAULT_PROVIDE.projectDescription,
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -164,7 +162,7 @@ describe('ForkForm component', () => {
|
|||
it('make GET request from endpoint', async () => {
|
||||
await axios.waitForAll();
|
||||
|
||||
expect(axiosMock.history.get[0].url).toBe(DEFAULT_PROPS.endpoint);
|
||||
expect(axiosMock.history.get[0].url).toBe(DEFAULT_PROVIDE.endpoint);
|
||||
});
|
||||
|
||||
it('generate default option', async () => {
|
||||
|
@ -469,7 +467,7 @@ describe('ForkForm component', () => {
|
|||
projectName,
|
||||
projectPath,
|
||||
projectVisibility,
|
||||
} = DEFAULT_PROPS;
|
||||
} = DEFAULT_PROVIDE;
|
||||
|
||||
const url = `/api/${GON_API_VERSION}/projects/${projectId}/fork`;
|
||||
const project = {
|
||||
|
|
|
@ -1,73 +0,0 @@
|
|||
import { GlBadge, GlButton, GlLink } from '@gitlab/ui';
|
||||
import { shallowMount } from '@vue/test-utils';
|
||||
import ForkGroupsListItem from '~/pages/projects/forks/new/components/fork_groups_list_item.vue';
|
||||
|
||||
describe('Fork groups list item component', () => {
|
||||
let wrapper;
|
||||
|
||||
const DEFAULT_GROUP_DATA = {
|
||||
id: 22,
|
||||
name: 'Gitlab Org',
|
||||
description: 'Ad et ipsam earum id aut nobis.',
|
||||
visibility: 'public',
|
||||
full_name: 'Gitlab Org',
|
||||
created_at: '2020-06-22T03:32:05.664Z',
|
||||
updated_at: '2020-06-22T03:32:05.664Z',
|
||||
avatar_url: null,
|
||||
fork_path: '/twitter/typeahead-js/-/forks?namespace_key=22',
|
||||
forked_project_path: null,
|
||||
permission: 'Owner',
|
||||
relative_path: '/gitlab-org',
|
||||
markdown_description:
|
||||
'<p data-sourcepos="1:1-1:31" dir="auto">Ad et ipsam earum id aut nobis.</p>',
|
||||
can_create_project: true,
|
||||
marked_for_deletion: false,
|
||||
};
|
||||
|
||||
const DUMMY_PATH = '/dummy/path';
|
||||
|
||||
const createWrapper = (propsData) => {
|
||||
wrapper = shallowMount(ForkGroupsListItem, {
|
||||
propsData: {
|
||||
...propsData,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
it('renders pending deletion badge if applicable', () => {
|
||||
createWrapper({ group: { ...DEFAULT_GROUP_DATA, marked_for_deletion: true } });
|
||||
|
||||
expect(wrapper.find(GlBadge).text()).toBe('pending deletion');
|
||||
});
|
||||
|
||||
it('renders go to fork button if has forked project', () => {
|
||||
createWrapper({ group: { ...DEFAULT_GROUP_DATA, forked_project_path: DUMMY_PATH } });
|
||||
|
||||
expect(wrapper.find(GlButton).text()).toBe('Go to fork');
|
||||
expect(wrapper.find(GlButton).attributes().href).toBe(DUMMY_PATH);
|
||||
});
|
||||
|
||||
it('renders select button if has no forked project', () => {
|
||||
createWrapper({
|
||||
group: { ...DEFAULT_GROUP_DATA, forked_project_path: null, fork_path: DUMMY_PATH },
|
||||
});
|
||||
|
||||
expect(wrapper.find(GlButton).text()).toBe('Select');
|
||||
expect(wrapper.find('form').attributes().action).toBe(DUMMY_PATH);
|
||||
});
|
||||
|
||||
it('renders link to current group', () => {
|
||||
const DUMMY_FULL_NAME = 'dummy';
|
||||
createWrapper({
|
||||
group: { ...DEFAULT_GROUP_DATA, relative_path: DUMMY_PATH, full_name: DUMMY_FULL_NAME },
|
||||
});
|
||||
|
||||
expect(
|
||||
wrapper
|
||||
.findAll(GlLink)
|
||||
.filter((w) => w.text() === DUMMY_FULL_NAME)
|
||||
.at(0)
|
||||
.attributes().href,
|
||||
).toBe(DUMMY_PATH);
|
||||
});
|
||||
});
|
|
@ -1,123 +0,0 @@
|
|||
import { GlLoadingIcon, GlSearchBoxByType } from '@gitlab/ui';
|
||||
import { shallowMount } from '@vue/test-utils';
|
||||
import AxiosMockAdapter from 'axios-mock-adapter';
|
||||
import { nextTick } from 'vue';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import createFlash from '~/flash';
|
||||
import axios from '~/lib/utils/axios_utils';
|
||||
import ForkGroupsList from '~/pages/projects/forks/new/components/fork_groups_list.vue';
|
||||
import ForkGroupsListItem from '~/pages/projects/forks/new/components/fork_groups_list_item.vue';
|
||||
|
||||
jest.mock('~/flash');
|
||||
|
||||
describe('Fork groups list component', () => {
|
||||
let wrapper;
|
||||
let axiosMock;
|
||||
|
||||
const DEFAULT_PROPS = {
|
||||
endpoint: '/dummy',
|
||||
};
|
||||
|
||||
const replyWith = (...args) => axiosMock.onGet(DEFAULT_PROPS.endpoint).reply(...args);
|
||||
|
||||
const createWrapper = (propsData) => {
|
||||
wrapper = shallowMount(ForkGroupsList, {
|
||||
propsData: {
|
||||
...DEFAULT_PROPS,
|
||||
...propsData,
|
||||
},
|
||||
stubs: {
|
||||
GlTabs: {
|
||||
template: '<div><slot></slot><slot name="tabs-end"></slot></div>',
|
||||
},
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
axiosMock = new AxiosMockAdapter(axios);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
axiosMock.reset();
|
||||
|
||||
if (wrapper) {
|
||||
wrapper.destroy();
|
||||
wrapper = null;
|
||||
}
|
||||
});
|
||||
|
||||
it('fires load groups request on mount', async () => {
|
||||
replyWith(200, { namespaces: [] });
|
||||
createWrapper();
|
||||
|
||||
await waitForPromises();
|
||||
|
||||
expect(axiosMock.history.get[0].url).toBe(DEFAULT_PROPS.endpoint);
|
||||
});
|
||||
|
||||
it('displays flash if loading groups fails', async () => {
|
||||
replyWith(500);
|
||||
createWrapper();
|
||||
|
||||
await waitForPromises();
|
||||
|
||||
expect(createFlash).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('displays loading indicator while loading groups', () => {
|
||||
replyWith(() => new Promise(() => {}));
|
||||
createWrapper();
|
||||
|
||||
expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('displays empty text if no groups are available', async () => {
|
||||
const EMPTY_TEXT = 'No available groups to fork the project.';
|
||||
replyWith(200, { namespaces: [] });
|
||||
createWrapper();
|
||||
|
||||
await waitForPromises();
|
||||
|
||||
expect(wrapper.text()).toContain(EMPTY_TEXT);
|
||||
});
|
||||
|
||||
it('displays filter field when groups are available', async () => {
|
||||
replyWith(200, { namespaces: [{ name: 'dummy1' }, { name: 'dummy2' }] });
|
||||
createWrapper();
|
||||
|
||||
await waitForPromises();
|
||||
|
||||
expect(wrapper.find(GlSearchBoxByType).exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('renders list items for each available group', async () => {
|
||||
const namespaces = [{ name: 'dummy1' }, { name: 'dummy2' }, { name: 'otherdummy' }];
|
||||
|
||||
replyWith(200, { namespaces });
|
||||
createWrapper();
|
||||
|
||||
await waitForPromises();
|
||||
|
||||
expect(wrapper.findAll(ForkGroupsListItem)).toHaveLength(namespaces.length);
|
||||
|
||||
namespaces.forEach((namespace, idx) => {
|
||||
expect(wrapper.findAll(ForkGroupsListItem).at(idx).props()).toStrictEqual({
|
||||
group: namespace,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('filters repositories on the fly', async () => {
|
||||
replyWith(200, {
|
||||
namespaces: [{ name: 'dummy1' }, { name: 'dummy2' }, { name: 'otherdummy' }],
|
||||
});
|
||||
createWrapper();
|
||||
await waitForPromises();
|
||||
wrapper.find(GlSearchBoxByType).vm.$emit('input', 'other');
|
||||
await nextTick();
|
||||
|
||||
expect(wrapper.findAll(ForkGroupsListItem)).toHaveLength(1);
|
||||
expect(wrapper.findAll(ForkGroupsListItem).at(0).props().group.name).toBe('otherdummy');
|
||||
});
|
||||
});
|
29
spec/lib/atlassian/jira_connect_spec.rb
Normal file
29
spec/lib/atlassian/jira_connect_spec.rb
Normal file
|
@ -0,0 +1,29 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'fast_spec_helper'
|
||||
|
||||
RSpec.describe Atlassian::JiraConnect do
|
||||
describe '.app_name' do
|
||||
subject { described_class.app_name }
|
||||
|
||||
it { is_expected.to eq('GitLab for Jira (localhost)') }
|
||||
end
|
||||
|
||||
describe '.app_key' do
|
||||
subject(:app_key) { described_class.app_key }
|
||||
|
||||
it { is_expected.to eq('gitlab-jira-connect-localhost') }
|
||||
|
||||
context 'host name is too long' do
|
||||
before do
|
||||
hostname = 'x' * 100
|
||||
|
||||
stub_config(gitlab: { host: hostname })
|
||||
end
|
||||
|
||||
it 'truncates the key to be no longer than 64 characters', :aggregate_failures do
|
||||
expect(app_key).to eq('gitlab-jira-connect-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
127
spec/lib/gitlab/process_supervisor_spec.rb
Normal file
127
spec/lib/gitlab/process_supervisor_spec.rb
Normal file
|
@ -0,0 +1,127 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require_relative '../../../lib/gitlab/process_supervisor'
|
||||
|
||||
RSpec.describe Gitlab::ProcessSupervisor do
|
||||
let(:health_check_interval_seconds) { 0.1 }
|
||||
let(:check_terminate_interval_seconds) { 1 }
|
||||
let(:forwarded_signals) { [] }
|
||||
let(:process_id) do
|
||||
Process.spawn('while true; do sleep 1; done').tap do |pid|
|
||||
Process.detach(pid)
|
||||
end
|
||||
end
|
||||
|
||||
subject(:supervisor) do
|
||||
described_class.new(
|
||||
health_check_interval_seconds: health_check_interval_seconds,
|
||||
check_terminate_interval_seconds: check_terminate_interval_seconds,
|
||||
terminate_timeout_seconds: 1 + check_terminate_interval_seconds,
|
||||
forwarded_signals: forwarded_signals
|
||||
)
|
||||
end
|
||||
|
||||
after do
|
||||
if Gitlab::ProcessManagement.process_alive?(process_id)
|
||||
Process.kill('KILL', process_id)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#supervise' do
|
||||
context 'while supervised process is alive' do
|
||||
it 'does not invoke callback' do
|
||||
expect(Gitlab::ProcessManagement.process_alive?(process_id)).to be(true)
|
||||
pids_killed = []
|
||||
|
||||
thread = Thread.new do
|
||||
supervisor.supervise(process_id) do |dead_pids|
|
||||
pids_killed = dead_pids
|
||||
[]
|
||||
end
|
||||
end
|
||||
|
||||
# Wait several times the poll frequency of the supervisor.
|
||||
sleep health_check_interval_seconds * 10
|
||||
thread.terminate
|
||||
|
||||
expect(pids_killed).to be_empty
|
||||
expect(Gitlab::ProcessManagement.process_alive?(process_id)).to be(true)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when supervised process dies' do
|
||||
it 'triggers callback with the dead PIDs' do
|
||||
expect(Gitlab::ProcessManagement.process_alive?(process_id)).to be(true)
|
||||
pids_killed = []
|
||||
|
||||
thread = Thread.new do
|
||||
supervisor.supervise(process_id) do |dead_pids|
|
||||
pids_killed = dead_pids
|
||||
[]
|
||||
end
|
||||
end
|
||||
|
||||
# Terminate the supervised process.
|
||||
Process.kill('TERM', process_id)
|
||||
|
||||
await_condition(sleep_sec: health_check_interval_seconds) do
|
||||
pids_killed == [process_id]
|
||||
end
|
||||
thread.terminate
|
||||
|
||||
expect(Gitlab::ProcessManagement.process_alive?(process_id)).to be(false)
|
||||
end
|
||||
end
|
||||
|
||||
context 'signal handling' do
|
||||
before do
|
||||
allow(supervisor).to receive(:sleep)
|
||||
allow(Gitlab::ProcessManagement).to receive(:trap_signals)
|
||||
allow(Gitlab::ProcessManagement).to receive(:all_alive?).and_return(false)
|
||||
allow(Gitlab::ProcessManagement).to receive(:signal_processes).with([process_id], anything)
|
||||
end
|
||||
|
||||
context 'termination signals' do
|
||||
context 'when TERM results in timely shutdown of processes' do
|
||||
it 'forwards them to observed processes without waiting for grace period to expire' do
|
||||
allow(Gitlab::ProcessManagement).to receive(:any_alive?).and_return(false)
|
||||
|
||||
expect(Gitlab::ProcessManagement).to receive(:trap_signals).ordered.with(%i(INT TERM)).and_yield(:TERM)
|
||||
expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with([process_id], :TERM)
|
||||
expect(supervisor).not_to receive(:sleep).with(check_terminate_interval_seconds)
|
||||
|
||||
supervisor.supervise(process_id) { [] }
|
||||
end
|
||||
end
|
||||
|
||||
context 'when TERM does not result in timely shutdown of processes' do
|
||||
it 'issues a KILL signal after the grace period expires' do
|
||||
expect(Gitlab::ProcessManagement).to receive(:trap_signals).with(%i(INT TERM)).and_yield(:TERM)
|
||||
expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with([process_id], :TERM)
|
||||
expect(supervisor).to receive(:sleep).ordered.with(check_terminate_interval_seconds).at_least(:once)
|
||||
expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with([process_id], '-KILL')
|
||||
|
||||
supervisor.supervise(process_id) { [] }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'forwarded signals' do
|
||||
let(:forwarded_signals) { %i(USR1) }
|
||||
|
||||
it 'forwards given signals to the observed processes' do
|
||||
expect(Gitlab::ProcessManagement).to receive(:trap_signals).with(%i(USR1)).and_yield(:USR1)
|
||||
expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with([process_id], :USR1)
|
||||
|
||||
supervisor.supervise(process_id) { [] }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def await_condition(timeout_sec: 5, sleep_sec: 0.1)
|
||||
Timeout.timeout(timeout_sec) do
|
||||
sleep sleep_sec until yield
|
||||
end
|
||||
end
|
||||
end
|
|
@ -50,7 +50,8 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
|
|||
'importer',
|
||||
'network_policies',
|
||||
'geo',
|
||||
'growth'
|
||||
'growth',
|
||||
'work_items'
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -0,0 +1,51 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter, :clean_gitlab_redis_shared_state do
|
||||
let(:user) { build(:user, id: 1) }
|
||||
|
||||
shared_examples 'counter that does not track the event' do
|
||||
it 'does not track the event' do
|
||||
expect { 3.times { track_event } }.to not_change {
|
||||
Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(
|
||||
event_names: event_name,
|
||||
start_date: 2.weeks.ago,
|
||||
end_date: 2.weeks.from_now
|
||||
)
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
describe '.track_work_item_title_changed_action' do
|
||||
subject(:track_event) { described_class.track_work_item_title_changed_action(author: user) }
|
||||
|
||||
let(:event_name) { described_class::WORK_ITEM_TITLE_CHANGED }
|
||||
|
||||
context 'when track_work_items_activity FF is enabled' do
|
||||
it 'tracks a unique event only once' do
|
||||
expect { 3.times { track_event } }.to change {
|
||||
Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(
|
||||
event_names: described_class::WORK_ITEM_TITLE_CHANGED,
|
||||
start_date: 2.weeks.ago,
|
||||
end_date: 2.weeks.from_now
|
||||
)
|
||||
}.by(1)
|
||||
end
|
||||
|
||||
context 'when author is nil' do
|
||||
let(:user) { nil }
|
||||
|
||||
it_behaves_like 'counter that does not track the event'
|
||||
end
|
||||
end
|
||||
|
||||
context 'when track_work_items_activity FF is disabled' do
|
||||
before do
|
||||
stub_feature_flags(track_work_items_activity: false)
|
||||
end
|
||||
|
||||
it_behaves_like 'counter that does not track the event'
|
||||
end
|
||||
end
|
||||
end
|
|
@ -4249,6 +4249,29 @@ RSpec.describe MergeRequest, factory_default: :keep do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#eager_fetch_ref!' do
|
||||
let(:project) { create(:project, :repository) }
|
||||
|
||||
# We use build instead of create to test that an IID is allocated
|
||||
subject { build(:merge_request, source_project: project) }
|
||||
|
||||
it 'fetches the ref correctly' do
|
||||
expect(subject.iid).to be_nil
|
||||
|
||||
expect { subject.eager_fetch_ref! }.to change { subject.iid.to_i }.by(1)
|
||||
|
||||
expect(subject.target_project.repository.ref_exists?(subject.ref_path)).to be_truthy
|
||||
end
|
||||
|
||||
it 'only fetches the ref once after saved' do
|
||||
expect(subject.target_project.repository).to receive(:fetch_source_branch!).once.and_call_original
|
||||
|
||||
subject.save!
|
||||
|
||||
expect(subject.target_project.repository.ref_exists?(subject.ref_path)).to be_truthy
|
||||
end
|
||||
end
|
||||
|
||||
describe 'removing a merge request' do
|
||||
it 'refreshes the number of open merge requests of the target project' do
|
||||
project = subject.target_project
|
||||
|
|
|
@ -59,26 +59,4 @@ RSpec.describe ForkNamespaceEntity do
|
|||
it 'exposes human readable permission level' do
|
||||
expect(json[:permission]).to eql 'Developer'
|
||||
end
|
||||
|
||||
it 'exposes can_create_project' do
|
||||
expect(json[:can_create_project]).to be true
|
||||
end
|
||||
|
||||
context 'when fork_project_form feature flag is disabled' do
|
||||
before do
|
||||
stub_feature_flags(fork_project_form: false)
|
||||
end
|
||||
|
||||
it 'sets can_create_project to true when user can create projects in namespace' do
|
||||
allow(user).to receive(:can?).with(:create_projects, namespace).and_return(true)
|
||||
|
||||
expect(json[:can_create_project]).to be true
|
||||
end
|
||||
|
||||
it 'sets can_create_project to false when user is not allowed create projects in namespace' do
|
||||
allow(user).to receive(:can?).with(:create_projects, namespace).and_return(false)
|
||||
|
||||
expect(json[:can_create_project]).to be false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -454,7 +454,7 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
|
|||
end
|
||||
end
|
||||
|
||||
context 'when source and target projects are different' do
|
||||
shared_examples 'when source and target projects are different' do |eager_fetch_ref_enabled|
|
||||
let(:target_project) { fork_project(project, nil, repository: true) }
|
||||
|
||||
let(:opts) do
|
||||
|
@ -497,9 +497,18 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
|
|||
end
|
||||
|
||||
it 'creates the merge request', :sidekiq_might_not_need_inline do
|
||||
expect_next_instance_of(MergeRequest) do |instance|
|
||||
if eager_fetch_ref_enabled
|
||||
expect(instance).to receive(:eager_fetch_ref!).and_call_original
|
||||
else
|
||||
expect(instance).not_to receive(:eager_fetch_ref!)
|
||||
end
|
||||
end
|
||||
|
||||
merge_request = described_class.new(project: project, current_user: user, params: opts).execute
|
||||
|
||||
expect(merge_request).to be_persisted
|
||||
expect(merge_request.iid).to be > 0
|
||||
end
|
||||
|
||||
it 'does not create the merge request when the target project is archived' do
|
||||
|
@ -511,6 +520,18 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
|
|||
end
|
||||
end
|
||||
|
||||
context 'when merge_request_eager_fetch_ref is enabled' do
|
||||
it_behaves_like 'when source and target projects are different', true
|
||||
end
|
||||
|
||||
context 'when merge_request_eager_fetch_ref is disabled' do
|
||||
before do
|
||||
stub_feature_flags(merge_request_eager_fetch_ref: false)
|
||||
end
|
||||
|
||||
it_behaves_like 'when source and target projects are different', false
|
||||
end
|
||||
|
||||
context 'when user sets source project id' do
|
||||
let(:another_project) { create(:project) }
|
||||
|
||||
|
|
|
@ -23,6 +23,9 @@ RSpec.describe WorkItems::UpdateService do
|
|||
|
||||
it 'triggers issuable_title_updated graphql subscription' do
|
||||
expect(GraphqlTriggers).to receive(:issuable_title_updated).with(work_item).and_call_original
|
||||
expect(Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter).to receive(:track_work_item_title_changed_action).with(author: current_user)
|
||||
# During the work item transition we also want to track work items as issues
|
||||
expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_title_changed_action)
|
||||
|
||||
update_work_item
|
||||
end
|
||||
|
@ -33,6 +36,7 @@ RSpec.describe WorkItems::UpdateService do
|
|||
|
||||
it 'does not trigger issuable_title_updated graphql subscription' do
|
||||
expect(GraphqlTriggers).not_to receive(:issuable_title_updated)
|
||||
expect(Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter).not_to receive(:track_work_item_title_changed_action)
|
||||
|
||||
update_work_item
|
||||
end
|
||||
|
|
|
@ -1 +1 @@
|
|||
golang 1.17.6
|
||||
golang 1.17.7
|
||||
|
|
Loading…
Reference in a new issue