Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-02-10 18:09:02 +00:00
parent 6cffe9ea21
commit 577bb49691
70 changed files with 1566 additions and 1628 deletions

View file

@ -693,37 +693,6 @@ RSpec/TimecopTravel:
Graphql/Descriptions:
Exclude:
- 'ee/app/graphql/ee/types/board_list_type.rb'
- 'ee/app/graphql/ee/types/board_type.rb'
- 'ee/app/graphql/ee/types/boards/board_issue_input_base_type.rb'
- 'ee/app/graphql/ee/types/boards/board_issue_input_type.rb'
- 'ee/app/graphql/ee/types/ci/pipeline_type.rb'
- 'ee/app/graphql/ee/types/group_type.rb'
- 'ee/app/graphql/ee/types/issue_connection_type.rb'
- 'ee/app/graphql/ee/types/merge_request_type.rb'
- 'ee/app/graphql/ee/types/namespace_type.rb'
- 'ee/app/graphql/ee/types/project_type.rb'
- 'ee/app/graphql/ee/types/query_type.rb'
- 'ee/app/graphql/types/admin/analytics/devops_adoption/segment_type.rb'
- 'ee/app/graphql/types/admin/analytics/devops_adoption/snapshot_type.rb'
- 'ee/app/graphql/types/boards/board_epic_type.rb'
- 'ee/app/graphql/types/boards/epic_board_type.rb'
- 'ee/app/graphql/types/boards/epic_user_preferences_type.rb'
- 'ee/app/graphql/types/burnup_chart_daily_totals_type.rb'
- 'ee/app/graphql/types/ci_configuration/sast/analyzers_entity_input_type.rb'
- 'ee/app/graphql/types/ci_configuration/sast/analyzers_entity_type.rb'
- 'ee/app/graphql/types/ci_configuration/sast/entity_input_type.rb'
- 'ee/app/graphql/types/ci_configuration/sast/input_type.rb'
- 'ee/app/graphql/types/clusters/agent_token_type.rb'
- 'ee/app/graphql/types/clusters/agent_type.rb'
- 'ee/app/graphql/types/compliance_management/compliance_framework_type.rb'
- 'ee/app/graphql/types/dast_scanner_profile_type.rb'
- 'ee/app/graphql/types/dast_site_profile_type.rb'
- 'ee/app/graphql/types/dast_site_validation_type.rb'
- 'ee/app/graphql/types/epic_descendant_count_type.rb'
- 'ee/app/graphql/types/epic_descendant_weight_sum_type.rb'
- 'ee/app/graphql/types/epic_health_status_type.rb'
- 'ee/app/graphql/types/epic_issue_type.rb'
- 'ee/app/graphql/types/epic_tree/epic_tree_node_input_type.rb'
- 'ee/app/graphql/types/external_issue_type.rb'
- 'ee/app/graphql/types/geo/geo_node_type.rb'

View file

@ -1 +1 @@
8.61.0
8.62.0

View file

@ -1,187 +0,0 @@
<script>
import {
GlButton,
GlFormGroup,
GlFormInput,
GlLink,
GlModal,
GlModalDirective,
GlSprintf,
} from '@gitlab/ui';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import ToggleButton from '~/vue_shared/components/toggle_button.vue';
import axios from '~/lib/utils/axios_utils';
import { s__, __ } from '~/locale';
import { deprecatedCreateFlash as createFlash } from '~/flash';
export default {
i18n: {
usageSection: s__(
'AlertService|You must provide this URL and authorization key to authorize an external service to send alerts to GitLab. You can provide this URL and key to multiple services. After configuring an external service, alerts from your service will display on the GitLab %{linkStart}Alerts%{linkEnd} page.',
),
setupSection: s__(
"AlertService|Review your external service's documentation to learn where to provide this information to your external service, and the %{linkStart}GitLab documentation%{linkEnd} to learn more about configuring your endpoint.",
),
},
COPY_TO_CLIPBOARD: __('Copy'),
RESET_KEY: __('Reset key'),
components: {
GlButton,
GlFormGroup,
GlFormInput,
GlLink,
GlModal,
GlSprintf,
ClipboardButton,
ToggleButton,
},
directives: {
'gl-modal': GlModalDirective,
},
props: {
alertsSetupUrl: {
type: String,
required: true,
},
alertsUsageUrl: {
type: String,
required: true,
},
initialAuthorizationKey: {
type: String,
required: false,
default: '',
},
formPath: {
type: String,
required: true,
},
url: {
type: String,
required: true,
},
initialActivated: {
type: Boolean,
required: true,
},
isDisabled: {
type: Boolean,
required: false,
default: false,
},
},
data() {
return {
activated: this.initialActivated,
loadingActivated: false,
authorizationKey: this.initialAuthorizationKey,
};
},
computed: {
sections() {
return [
{
text: this.$options.i18n.usageSection,
url: this.alertsUsageUrl,
},
{
text: this.$options.i18n.setupSection,
url: this.alertsSetupUrl,
},
];
},
},
methods: {
resetKey() {
return axios
.put(this.formPath, { service: { token: '' } })
.then((res) => {
this.authorizationKey = res.data.token;
})
.catch(() => {
createFlash(__('Failed to reset key. Please try again.'));
});
},
toggleActivated(value) {
this.loadingActivated = true;
return axios
.put(this.formPath, { service: { active: value } })
.then(() => {
this.activated = value;
this.loadingActivated = false;
})
.catch(() => {
createFlash(__('Update failed. Please try again.'));
this.loadingActivated = false;
});
},
},
};
</script>
<template>
<div>
<div data-testid="description">
<p v-for="section in sections" :key="section.text">
<gl-sprintf :message="section.text">
<template #link="{ content }">
<gl-link :href="section.url" target="_blank">{{ content }}</gl-link>
</template>
</gl-sprintf>
</p>
</div>
<gl-form-group :label="__('Active')" label-for="activated" label-class="label-bold">
<toggle-button
id="activated"
:disabled-input="loadingActivated || isDisabled"
:is-loading="loadingActivated"
:value="activated"
@change="toggleActivated"
/>
</gl-form-group>
<gl-form-group :label="__('URL')" label-for="url" label-class="label-bold">
<div class="input-group">
<gl-form-input id="url" :readonly="true" :value="url" />
<span class="input-group-append">
<clipboard-button
:text="url"
:title="$options.COPY_TO_CLIPBOARD"
:disabled="isDisabled"
/>
</span>
</div>
</gl-form-group>
<gl-form-group
:label="__('Authorization key')"
label-for="authorization-key"
label-class="label-bold"
>
<div class="input-group">
<gl-form-input id="authorization-key" :readonly="true" :value="authorizationKey" />
<span class="input-group-append">
<clipboard-button
:text="authorizationKey"
:title="$options.COPY_TO_CLIPBOARD"
:disabled="isDisabled"
/>
</span>
</div>
<gl-button v-gl-modal.authKeyModal class="gl-mt-2" :disabled="isDisabled">{{
$options.RESET_KEY
}}</gl-button>
<gl-modal
modal-id="authKeyModal"
:title="$options.RESET_KEY"
:ok-title="$options.RESET_KEY"
ok-variant="danger"
@ok="resetKey"
>
{{
__(
'Resetting the authorization key for this project will require updating the authorization key in every alert source it is enabled in.',
)
}}
</gl-modal>
</gl-form-group>
</div>
</template>

View file

@ -1,39 +0,0 @@
import Vue from 'vue';
import { parseBoolean } from '~/lib/utils/common_utils';
import AlertsServiceForm from './components/alerts_service_form.vue';
export default (el) => {
if (!el) {
return null;
}
const {
activated: activatedStr,
alertsSetupUrl,
alertsUsageUrl,
formPath,
authorizationKey,
url,
disabled,
} = el.dataset;
const activated = parseBoolean(activatedStr);
const isDisabled = parseBoolean(disabled);
return new Vue({
el,
render(createElement) {
return createElement(AlertsServiceForm, {
props: {
alertsSetupUrl,
alertsUsageUrl,
initialActivated: activated,
formPath,
initialAuthorizationKey: authorizationKey,
url,
isDisabled,
},
});
},
});
};

View file

@ -54,7 +54,7 @@ export default {
<template>
<div>
<div class="title-container">
<h2 v-safe-html="issuable.titleHtml" class="title qa-title" dir="auto"></h2>
<h2 v-safe-html="issuable.titleHtml || issuable.title" class="title qa-title" dir="auto"></h2>
<gl-button
v-if="enableEdit"
v-gl-tooltip.bottom

View file

@ -1,6 +1,5 @@
import IntegrationSettingsForm from '~/integrations/integration_settings_form';
import PrometheusMetrics from '~/prometheus_metrics/prometheus_metrics';
import initAlertsSettings from '~/alerts_service_settings';
document.addEventListener('DOMContentLoaded', () => {
const prometheusSettingsWrapper = document.querySelector('.js-prometheus-metrics-monitoring');
@ -11,6 +10,4 @@ document.addEventListener('DOMContentLoaded', () => {
const prometheusMetrics = new PrometheusMetrics('.js-prometheus-metrics-monitoring');
prometheusMetrics.loadActiveMetrics();
}
initAlertsSettings(document.querySelector('.js-alerts-service-settings'));
});

View file

@ -1,9 +1,6 @@
import IntegrationSettingsForm from '~/integrations/integration_settings_form';
import initAlertsSettings from '~/alerts_service_settings';
document.addEventListener('DOMContentLoaded', () => {
const integrationSettingsForm = new IntegrationSettingsForm('.js-integration-settings-form');
integrationSettingsForm.init();
initAlertsSettings(document.querySelector('.js-alerts-service-settings'));
});

View file

@ -1,6 +1,5 @@
import IntegrationSettingsForm from '~/integrations/integration_settings_form';
import PrometheusMetrics from '~/prometheus_metrics/prometheus_metrics';
import initAlertsSettings from '~/alerts_service_settings';
document.addEventListener('DOMContentLoaded', () => {
const prometheusSettingsWrapper = document.querySelector('.js-prometheus-metrics-monitoring');
@ -11,6 +10,4 @@ document.addEventListener('DOMContentLoaded', () => {
const prometheusMetrics = new PrometheusMetrics('.js-prometheus-metrics-monitoring');
prometheusMetrics.loadActiveMetrics();
}
initAlertsSettings(document.querySelector('.js-alerts-service-settings'));
});

View file

@ -1,7 +1,6 @@
import IntegrationSettingsForm from '~/integrations/integration_settings_form';
import CustomMetrics from '~/prometheus_metrics/custom_metrics';
import PrometheusAlerts from '~/prometheus_alerts';
import initAlertsSettings from '~/alerts_service_settings';
document.addEventListener('DOMContentLoaded', () => {
const integrationSettingsForm = new IntegrationSettingsForm('.js-integration-settings-form');
@ -15,5 +14,4 @@ document.addEventListener('DOMContentLoaded', () => {
}
PrometheusAlerts();
initAlertsSettings(document.querySelector('.js-alerts-service-settings'));
});

View file

@ -18,6 +18,7 @@ import CreateSnippetMutation from '../mutations/createSnippet.mutation.graphql';
import { getSnippetMixin } from '../mixins/snippets';
import { SNIPPET_CREATE_MUTATION_ERROR, SNIPPET_UPDATE_MUTATION_ERROR } from '../constants';
import { markBlobPerformance } from '../utils/blob';
import { getErrorMessage } from '../utils/error';
import SnippetBlobActionsEdit from './snippet_blob_actions_edit.vue';
import SnippetVisibilityEdit from './snippet_visibility_edit.vue';
@ -190,7 +191,10 @@ export default {
}
})
.catch((e) => {
this.flashAPIFailure(e);
// eslint-disable-next-line no-console
console.error('[gitlab] unexpected error while updating snippet', e);
this.flashAPIFailure(getErrorMessage(e));
});
},
updateActions(actions) {

View file

@ -11,10 +11,14 @@ export const getSnippetMixin = {
ids: [this.snippetGid],
};
},
update: (data) => {
update(data) {
const res = data.snippets.nodes[0];
// Set `snippet.blobs` since some child components are coupled to this.
if (res) {
res.blobs = res.blobs.nodes;
// It's possible for us to not get any blobs in a response.
// In this case, we should default to current blobs.
res.blobs = res.blobs ? res.blobs.nodes : this.blobs;
}
return res;

View file

@ -0,0 +1,15 @@
import { isString } from 'lodash';
import { __ } from '~/locale';
export const UNEXPECTED_ERROR = __('Unexpected error');
export const getErrorMessage = (e) => {
if (!e) {
return UNEXPECTED_ERROR;
}
if (isString(e)) {
return e;
}
return e.message || e.networkError || UNEXPECTED_ERROR;
};

View file

@ -0,0 +1,51 @@
# frozen_string_literal: true
module Repositories
# A finder class for getting the tag of the last release before a given
# version.
#
# Imagine a project with the following tags:
#
# * v1.0.0
# * v1.1.0
# * v2.0.0
#
# If the version supplied is 2.1.0, the tag returned will be v2.0.0. And when
# the version is 1.1.1, or 1.2.0, the returned tag will be v1.1.0.
#
# This finder expects that all tags to consider meet the following
# requirements:
#
# * They start with the letter "v"
# * They use semantic versioning for the tag format
#
# Tags not meeting these requirements are ignored.
class PreviousTagFinder
TAG_REGEX = /\Av(?<version>#{Gitlab::Regex.unbounded_semver_regex})\z/.freeze
def initialize(project)
@project = project
end
def execute(new_version)
tags = {}
versions = [new_version]
@project.repository.tags.each do |tag|
matches = tag.name.match(TAG_REGEX)
next unless matches
version = matches[:version]
tags[version] = tag
versions << version
end
VersionSorter.sort!(versions)
index = versions.index(new_version)
tags[versions[index - 1]] if index&.positive?
end
end
end

View file

@ -39,8 +39,8 @@ module Repositories
project,
user,
version:,
from:,
to:,
from: nil,
date: DateTime.now,
branch: project.default_branch_or_master,
trailer: DEFAULT_TRAILER,
@ -61,6 +61,8 @@ module Repositories
# rubocop: enable Metrics/ParameterLists
def execute
from = start_of_commit_range
# For every entry we want to only include the merge request that
# originally introduced the commit, which is the oldest merge request that
# contains the commit. We fetch there merge requests in batches, reducing
@ -71,7 +73,7 @@ module Repositories
.new(version: @version, date: @date, config: config)
commits =
CommitsWithTrailerFinder.new(project: @project, from: @from, to: @to)
CommitsWithTrailerFinder.new(project: @project, from: from, to: @to)
commits.each_page(@trailer) do |page|
mrs = mrs_finder.execute(page)
@ -95,5 +97,19 @@ module Repositories
.new(@project, @user)
.commit(release: release, file: @file, branch: @branch, message: @message)
end
def start_of_commit_range
return @from if @from
if (prev_tag = PreviousTagFinder.new(@project).execute(@version))
return prev_tag.target_commit.id
end
raise(
Gitlab::Changelog::Error,
'The commit start range is unspecified, and no previous tag ' \
'could be found to use instead'
)
end
end
end

View file

@ -7,13 +7,13 @@
- if current_user.two_factor_otp_enabled?
.row.gl-mb-3
.col-md-5
%button#js-setup-token-2fa-device.gl-button.btn.btn-info= _("Set up new device")
%button#js-setup-token-2fa-device.gl-button.btn.btn-confirm= _("Set up new device")
.col-md-7
%p= _("Your device needs to be set up. Plug it in (if needed) and click the button on the left.")
- else
.row.gl-mb-3
.col-md-4
%button#js-setup-token-2fa-device.gl-button.btn.btn-info.btn-block{ disabled: true }= _("Set up new device")
%button#js-setup-token-2fa-device.gl-button.btn.btn-confirm.btn-block{ disabled: true }= _("Set up new device")
.col-md-8
%p= _("You need to register a two-factor authentication app before you can set up a device.")

View file

@ -25,7 +25,7 @@
%p
#{_('Status')}: #{current_user.two_factor_enabled? ? _('Enabled') : _('Disabled')}
- if current_user.two_factor_enabled?
= link_to _('Manage two-factor authentication'), profile_two_factor_auth_path, class: 'gl-button btn btn-info'
= link_to _('Manage two-factor authentication'), profile_two_factor_auth_path, class: 'gl-button btn btn-confirm'
- else
.gl-mb-3
= link_to _('Enable two-factor authentication'), profile_two_factor_auth_path, class: 'gl-button btn btn-success', data: { qa_selector: 'enable_2fa_button' }

View file

@ -22,7 +22,7 @@
data: { confirm: webauthn_enabled ? _('Are you sure? This will invalidate your registered applications and U2F / WebAuthn devices.') : _('Are you sure? This will invalidate your registered applications and U2F devices.') },
class: 'gl-button btn btn-danger gl-mr-3'
= form_tag codes_profile_two_factor_auth_path, {style: 'display: inline-block', method: :post} do |f|
= submit_tag _('Regenerate recovery codes'), class: 'btn'
= submit_tag _('Regenerate recovery codes'), class: 'gl-button btn btn-default'
- else
%p

View file

@ -0,0 +1,5 @@
---
title: Apply new GitLab UI for buttons in 2fa and changed deprecated info to confirm
merge_request: 52580
author: Yogi (@yo)
type: other

View file

@ -0,0 +1,5 @@
---
title: Filter deployments by finished_at in Value Stream Analytics
merge_request: 53861
author:
type: changed

View file

@ -0,0 +1,5 @@
---
title: Improve error message reporting in snippet create or update
merge_request: 53576
author:
type: other

View file

@ -0,0 +1,5 @@
---
title: Update GitLab Workhorse to v8.62.0
merge_request: 53864
author:
type: other

View file

@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/300649
milestone: '13.9'
type: development
group: group::optimize
default_enabled: false
default_enabled: true

View file

@ -562,8 +562,7 @@ supported by consolidated configuration form, refer to the following guides:
If you're working to [scale out](reference_architectures/index.md) your GitLab implementation,
or add fault tolerance and redundancy, you may be
looking at removing dependencies on block or network file systems.
See the following additional guides and
[note that Pages requires disk storage](#gitlab-pages-requires-nfs):
See the following additional guides:
1. Make sure the [`git` user home directory](https://docs.gitlab.com/omnibus/settings/configuration.html#moving-the-home-directory-for-a-user) is on local disk.
1. Configure [database lookup of SSH keys](operations/fast_ssh_key_lookup.md)
@ -598,17 +597,6 @@ with the Fog library that GitLab uses. Symptoms include an error in `production.
411 Length Required
```
### GitLab Pages requires NFS
If you're working to add more GitLab servers for [scaling or fault tolerance](reference_architectures/index.md)
and one of your requirements is [GitLab Pages](../user/project/pages/index.md) this currently requires
NFS. There is [work in progress](https://gitlab.com/gitlab-org/gitlab-pages/-/issues/196)
to remove this dependency. In the future, GitLab Pages may use
[object storage](https://gitlab.com/gitlab-org/gitlab/-/issues/208135).
The dependency on disk storage also prevents Pages being deployed using the
[GitLab Helm chart](https://gitlab.com/gitlab-org/charts/gitlab/-/issues/37).
### Incremental logging is required for CI to use object storage
If you configure GitLab to use object storage for CI logs and artifacts,

View file

@ -27,7 +27,7 @@ can be started.
## Start multiple processes
> - [Introduced](https://gitlab.com/gitlab-org/omnibus-gitlab/-/merge_requests/4006) in GitLab 12.10, starting multiple processes with Sidekiq cluster.
> - [Sidekiq cluster moved](https://gitlab.com/groups/gitlab-com/gl-infra/-/epics/181) to GitLab [Free](https://about.gitlab.com/pricing/) in GitLab 12.10.
> - [Sidekiq cluster moved](https://gitlab.com/groups/gitlab-com/gl-infra/-/epics/181) to GitLab Free in 12.10.
> - [Sidekiq cluster became default](https://gitlab.com/gitlab-org/omnibus-gitlab/-/merge_requests/4140) in GitLab 13.0.
To start multiple processes:
@ -112,8 +112,8 @@ you list:
## Queue selector
> - [Introduced](https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/45) in [GitLab Starter](https://about.gitlab.com/pricing/) 12.8.
> - [Sidekiq cluster including queue selector moved](https://gitlab.com/groups/gitlab-com/gl-infra/-/epics/181) to GitLab [Free](https://about.gitlab.com/pricing/) in GitLab 12.10.
> - [Introduced](https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/45) in GitLab 12.8.
> - [Sidekiq cluster, including queue selector, moved](https://gitlab.com/groups/gitlab-com/gl-infra/-/epics/181) to GitLab Free in 12.10.
> - [Renamed from `experimental_queue_selector` to `queue_selector`](https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/147) in GitLab 13.6.
In addition to selecting queues by name, as above, the `queue_selector`

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -271,19 +271,19 @@ Represents a project or group board.
| Field | Type | Description |
| ----- | ---- | ----------- |
| `assignee` | User | The board assignee |
| `epics` | BoardEpicConnection | Epics associated with board issues |
| `assignee` | User | The board assignee. |
| `epics` | BoardEpicConnection | Epics associated with board issues. |
| `hideBacklogList` | Boolean | Whether or not backlog list is hidden. |
| `hideClosedList` | Boolean | Whether or not closed list is hidden. |
| `id` | ID! | ID (global ID) of the board. |
| `iteration` | Iteration | The board iteration. |
| `labels` | LabelConnection | Labels of the board |
| `labels` | LabelConnection | Labels of the board. |
| `lists` | BoardListConnection | Lists of the board. |
| `milestone` | Milestone | The board milestone |
| `milestone` | Milestone | The board milestone. |
| `name` | String | Name of the board. |
| `webPath` | String! | Web path of the board. |
| `webUrl` | String! | Web URL of the board. |
| `weight` | Int | Weight of the board |
| `weight` | Int | Weight of the board. |
### BoardEpic
@ -334,7 +334,7 @@ Represents an epic on an issue board.
| `userDiscussionsCount` | Int! | Number of user discussions in the epic. |
| `userNotesCount` | Int! | Number of user notes of the epic. |
| `userPermissions` | EpicPermissions! | Permissions for the current user on the resource |
| `userPreferences` | BoardEpicUserPreferences | User preferences for the epic on the issue board |
| `userPreferences` | BoardEpicUserPreferences | User preferences for the epic on the issue board. |
| `webPath` | String! | Web path of the epic. |
| `webUrl` | String! | Web URL of the epic. |
@ -344,7 +344,7 @@ Represents user preferences for a board epic.
| Field | Type | Description |
| ----- | ---- | ----------- |
| `collapsed` | Boolean! | Indicates epic should be displayed as collapsed |
| `collapsed` | Boolean! | Indicates epic should be displayed as collapsed. |
### BoardList
@ -352,21 +352,21 @@ Represents a list for an issue board.
| Field | Type | Description |
| ----- | ---- | ----------- |
| `assignee` | User | Assignee in the list |
| `assignee` | User | Assignee in the list. |
| `collapsed` | Boolean | Indicates if list is collapsed for this user. |
| `id` | ID! | ID (global ID) of the list. |
| `issues` | IssueConnection | Board issues. |
| `issuesCount` | Int | Count of issues in the list. |
| `iteration` | Iteration | Iteration of the list |
| `iteration` | Iteration | Iteration of the list. |
| `label` | Label | Label of the list. |
| `limitMetric` | ListLimitMetric | The current limit metric for the list |
| `limitMetric` | ListLimitMetric | The current limit metric for the list. |
| `listType` | String! | Type of the list. |
| `maxIssueCount` | Int | Maximum number of issues in the list |
| `maxIssueWeight` | Int | Maximum weight of issues in the list |
| `milestone` | Milestone | Milestone of the list |
| `maxIssueCount` | Int | Maximum number of issues in the list. |
| `maxIssueWeight` | Int | Maximum weight of issues in the list. |
| `milestone` | Milestone | Milestone of the list. |
| `position` | Int | Position of list within the board. |
| `title` | String! | Title of the list. |
| `totalWeight` | Int | Total weight of all issues in the list |
| `totalWeight` | Int | Total weight of all issues in the list. |
### BoardListCreatePayload
@ -401,11 +401,11 @@ Represents the total number of issues and their weights for a particular day.
| Field | Type | Description |
| ----- | ---- | ----------- |
| `completedCount` | Int! | Number of closed issues as of this day |
| `completedWeight` | Int! | Total weight of closed issues as of this day |
| `date` | ISO8601Date! | Date for burnup totals |
| `scopeCount` | Int! | Number of issues as of this day |
| `scopeWeight` | Int! | Total weight of issues as of this day |
| `completedCount` | Int! | Number of closed issues as of this day. |
| `completedWeight` | Int! | Total weight of closed issues as of this day. |
| `date` | ISO8601Date! | Date for burnup totals. |
| `scopeCount` | Int! | Number of issues as of this day. |
| `scopeWeight` | Int! | Total weight of issues as of this day. |
### CiApplicationSettings
@ -521,12 +521,12 @@ Autogenerated return type of CiCdSettingsUpdate.
| Field | Type | Description |
| ----- | ---- | ----------- |
| `createdAt` | Time | Timestamp the cluster agent was created |
| `id` | ID! | ID of the cluster agent |
| `name` | String | Name of the cluster agent |
| `project` | Project | The project this cluster agent is associated with |
| `tokens` | ClusterAgentTokenConnection | Tokens associated with the cluster agent |
| `updatedAt` | Time | Timestamp the cluster agent was updated |
| `createdAt` | Time | Timestamp the cluster agent was created. |
| `id` | ID! | ID of the cluster agent. |
| `name` | String | Name of the cluster agent. |
| `project` | Project | The project this cluster agent is associated with. |
| `tokens` | ClusterAgentTokenConnection | Tokens associated with the cluster agent. |
| `updatedAt` | Time | Timestamp the cluster agent was updated. |
### ClusterAgentDeletePayload
@ -541,9 +541,9 @@ Autogenerated return type of ClusterAgentDelete.
| Field | Type | Description |
| ----- | ---- | ----------- |
| `clusterAgent` | ClusterAgent | Cluster agent this token is associated with |
| `createdAt` | Time | Timestamp the token was created |
| `id` | ClustersAgentTokenID! | Global ID of the token |
| `clusterAgent` | ClusterAgent | Cluster agent this token is associated with. |
| `createdAt` | Time | Timestamp the token was created. |
| `id` | ClustersAgentTokenID! | Global ID of the token. |
### ClusterAgentTokenCreatePayload
@ -623,10 +623,10 @@ Represents a ComplianceFramework associated with a Project.
| Field | Type | Description |
| ----- | ---- | ----------- |
| `color` | String! | Hexadecimal representation of compliance framework's label color |
| `description` | String! | Description of the compliance framework |
| `id` | ID! | Compliance framework ID |
| `name` | String! | Name of the compliance framework |
| `color` | String! | Hexadecimal representation of compliance framework's label color. |
| `description` | String! | Description of the compliance framework. |
| `id` | ID! | Compliance framework ID. |
| `name` | String! | Name of the compliance framework. |
| `pipelineConfigurationFullPath` | String | Full path of the compliance pipeline configuration stored in a project repository, such as `.gitlab/compliance/soc2/.gitlab-ci.yml`. |
### ComposerMetadata
@ -947,14 +947,14 @@ Represents a DAST scanner profile.
| Field | Type | Description |
| ----- | ---- | ----------- |
| `editPath` | String | Relative web path to the edit page of a scanner profile |
| `editPath` | String | Relative web path to the edit page of a scanner profile. |
| `globalId` **{warning-solid}** | DastScannerProfileID! | **Deprecated:** Use `id`. Deprecated in 13.6. |
| `id` | DastScannerProfileID! | ID of the DAST scanner profile |
| `profileName` | String | Name of the DAST scanner profile |
| `id` | DastScannerProfileID! | ID of the DAST scanner profile. |
| `profileName` | String | Name of the DAST scanner profile. |
| `scanType` | DastScanTypeEnum | Indicates the type of DAST scan that will run. Either a Passive Scan or an Active Scan. |
| `showDebugMessages` | Boolean! | Indicates if debug messages should be included in DAST console output. True to include the debug messages. |
| `spiderTimeout` | Int | The maximum number of minutes allowed for the spider to traverse the site |
| `targetTimeout` | Int | The maximum number of seconds allowed for the site under test to respond to a request |
| `spiderTimeout` | Int | The maximum number of minutes allowed for the spider to traverse the site. |
| `targetTimeout` | Int | The maximum number of seconds allowed for the site under test to respond to a request. |
| `useAjaxSpider` | Boolean! | Indicates if the AJAX spider should be used to crawl the target site. True to run the AJAX spider in addition to the traditional spider, and false to run only the traditional spider. |
### DastScannerProfileCreatePayload
@ -993,13 +993,13 @@ Represents a DAST Site Profile.
| Field | Type | Description |
| ----- | ---- | ----------- |
| `editPath` | String | Relative web path to the edit page of a site profile |
| `id` | DastSiteProfileID! | ID of the site profile |
| `normalizedTargetUrl` | String | Normalized URL of the target to be scanned |
| `profileName` | String | The name of the site profile |
| `targetUrl` | String | The URL of the target to be scanned |
| `editPath` | String | Relative web path to the edit page of a site profile. |
| `id` | DastSiteProfileID! | ID of the site profile. |
| `normalizedTargetUrl` | String | Normalized URL of the target to be scanned. |
| `profileName` | String | The name of the site profile. |
| `targetUrl` | String | The URL of the target to be scanned. |
| `userPermissions` | DastSiteProfilePermissions! | Permissions for the current user on the resource |
| `validationStatus` | DastSiteProfileValidationStatusEnum | The current validation status of the site profile |
| `validationStatus` | DastSiteProfileValidationStatusEnum | The current validation status of the site profile. |
### DastSiteProfileCreatePayload
@ -1056,9 +1056,9 @@ Represents a DAST Site Validation.
| Field | Type | Description |
| ----- | ---- | ----------- |
| `id` | DastSiteValidationID! | Global ID of the site validation |
| `normalizedTargetUrl` | String | Normalized URL of the target to be validated |
| `status` | DastSiteProfileValidationStatusEnum! | Status of the site validation |
| `id` | DastSiteValidationID! | Global ID of the site validation. |
| `normalizedTargetUrl` | String | Normalized URL of the target to be validated. |
| `status` | DastSiteProfileValidationStatusEnum! | Status of the site validation. |
### DastSiteValidationCreatePayload
@ -1302,10 +1302,10 @@ Segment.
| Field | Type | Description |
| ----- | ---- | ----------- |
| `groups` | Group! => Array | Assigned groups |
| `id` | ID! | ID of the segment |
| `latestSnapshot` | DevopsAdoptionSnapshot | The latest adoption metrics for the segment |
| `name` | String! | Name of the segment |
| `groups` | Group! => Array | Assigned groups. |
| `id` | ID! | ID of the segment. |
| `latestSnapshot` | DevopsAdoptionSnapshot | The latest adoption metrics for the segment. |
| `name` | String! | Name of the segment. |
### DevopsAdoptionSnapshot
@ -1313,16 +1313,16 @@ Snapshot.
| Field | Type | Description |
| ----- | ---- | ----------- |
| `deploySucceeded` | Boolean! | At least one deployment succeeded |
| `endTime` | Time! | The end time for the snapshot where the data points were collected |
| `issueOpened` | Boolean! | At least one issue was opened |
| `mergeRequestApproved` | Boolean! | At least one merge request was approved |
| `mergeRequestOpened` | Boolean! | At least one merge request was opened |
| `pipelineSucceeded` | Boolean! | At least one pipeline succeeded |
| `recordedAt` | Time! | The time the snapshot was recorded |
| `runnerConfigured` | Boolean! | At least one runner was used |
| `securityScanSucceeded` | Boolean! | At least one security scan succeeded |
| `startTime` | Time! | The start time for the snapshot where the data points were collected |
| `deploySucceeded` | Boolean! | At least one deployment succeeded. |
| `endTime` | Time! | The end time for the snapshot where the data points were collected. |
| `issueOpened` | Boolean! | At least one issue was opened. |
| `mergeRequestApproved` | Boolean! | At least one merge request was approved. |
| `mergeRequestOpened` | Boolean! | At least one merge request was opened. |
| `pipelineSucceeded` | Boolean! | At least one pipeline succeeded. |
| `recordedAt` | Time! | The time the snapshot was recorded. |
| `runnerConfigured` | Boolean! | At least one runner was used. |
| `securityScanSucceeded` | Boolean! | At least one security scan succeeded. |
| `startTime` | Time! | The start time for the snapshot where the data points were collected. |
### DiffPosition
@ -1523,10 +1523,10 @@ Counts of descendent epics.
| Field | Type | Description |
| ----- | ---- | ----------- |
| `closedEpics` | Int | Number of closed child epics |
| `closedIssues` | Int | Number of closed epic issues |
| `openedEpics` | Int | Number of opened child epics |
| `openedIssues` | Int | Number of opened epic issues |
| `closedEpics` | Int | Number of closed child epics. |
| `closedIssues` | Int | Number of closed epic issues. |
| `openedEpics` | Int | Number of opened child epics. |
| `openedIssues` | Int | Number of opened epic issues. |
### EpicDescendantWeights
@ -1534,8 +1534,8 @@ Total weight of open and closed descendant issues.
| Field | Type | Description |
| ----- | ---- | ----------- |
| `closedIssues` | Int | Total weight of completed (closed) issues in this epic, including epic descendants |
| `openedIssues` | Int | Total weight of opened issues in this epic, including epic descendants |
| `closedIssues` | Int | Total weight of completed (closed) issues in this epic, including epic descendants. |
| `openedIssues` | Int | Total weight of opened issues in this epic, including epic descendants. |
### EpicHealthStatus
@ -1543,9 +1543,9 @@ Health status of child issues.
| Field | Type | Description |
| ----- | ---- | ----------- |
| `issuesAtRisk` | Int | Number of issues at risk |
| `issuesNeedingAttention` | Int | Number of issues that need attention |
| `issuesOnTrack` | Int | Number of issues on track |
| `issuesAtRisk` | Int | Number of issues at risk. |
| `issuesNeedingAttention` | Int | Number of issues that need attention. |
| `issuesOnTrack` | Int | Number of issues on track. |
### EpicIssue
@ -1572,11 +1572,11 @@ Relationship between an epic and an issue.
| `dueDate` | Time | Due date of the issue. |
| `emailsDisabled` | Boolean! | Indicates if a project has email notifications disabled: `true` if email notifications are disabled. |
| `epic` | Epic | Epic to which this issue belongs. |
| `epicIssueId` | ID! | ID of the epic-issue relation |
| `epicIssueId` | ID! | ID of the epic-issue relation. |
| `healthStatus` | HealthStatus | Current health status. |
| `humanTimeEstimate` | String | Human-readable time estimate of the issue. |
| `humanTotalTimeSpent` | String | Human-readable total time reported as spent on the issue. |
| `id` | ID | Global ID of the epic-issue relation |
| `id` | ID | Global ID of the epic-issue relation. |
| `iid` | ID! | Internal ID of the issue. |
| `iteration` | Iteration | Iteration of the issue. |
| `labels` | LabelConnection | Labels of the issue. |
@ -1587,7 +1587,7 @@ Relationship between an epic and an issue.
| `notes` | NoteConnection! | All notes on this noteable. |
| `participants` | UserConnection | List of participants in the issue. |
| `reference` | String! | Internal reference of the issue. Returned in shortened format by default. |
| `relationPath` | String | URI path of the epic-issue relation |
| `relationPath` | String | URI path of the epic-issue relation. |
| `relativePosition` | Int | Relative position of the issue (used for positioning in epic tree and issue boards). |
| `severity` | IssuableSeverity | Severity level of the incident. |
| `slaDueAt` | Time | Timestamp of when the issue SLA expires. |
@ -1727,34 +1727,34 @@ Autogenerated return type of GitlabSubscriptionActivate.
| Field | Type | Description |
| ----- | ---- | ----------- |
| `actualRepositorySizeLimit` | Float | Size limit for repositories in the namespace in bytes |
| `additionalPurchasedStorageSize` | Float | Additional storage purchased for the root namespace in bytes |
| `actualRepositorySizeLimit` | Float | Size limit for repositories in the namespace in bytes. |
| `additionalPurchasedStorageSize` | Float | Additional storage purchased for the root namespace in bytes. |
| `autoDevopsEnabled` | Boolean | Indicates whether Auto DevOps is enabled for all projects within this group. |
| `avatarUrl` | String | Avatar URL of the group. |
| `board` | Board | A single board of the group. |
| `boards` | BoardConnection | Boards of the group. |
| `codeCoverageActivities` | CodeCoverageActivityConnection | Represents the code coverage activity for this group |
| `codeCoverageActivities` | CodeCoverageActivityConnection | Represents the code coverage activity for this group. |
| `complianceFrameworks` | ComplianceFrameworkConnection | Compliance frameworks available to projects in this namespace. Available only when feature flag `ff_custom_compliance_frameworks` is enabled. |
| `containerRepositories` | ContainerRepositoryConnection | Container repositories of the group. |
| `containerRepositoriesCount` | Int! | Number of container repositories in the group. |
| `containsLockedProjects` | Boolean! | Includes at least one project where the repository size exceeds the limit |
| `containsLockedProjects` | Boolean! | Includes at least one project where the repository size exceeds the limit. |
| `customEmoji` | CustomEmojiConnection | Custom emoji within this namespace. Available only when feature flag `custom_emoji` is enabled. |
| `description` | String | Description of the namespace. |
| `descriptionHtml` | String | The GitLab Flavored Markdown rendering of `description` |
| `emailsDisabled` | Boolean | Indicates if a group has email notifications disabled. |
| `epic` | Epic | Find a single epic |
| `epicBoard` | EpicBoard | Find a single epic board |
| `epicBoards` | EpicBoardConnection | Find epic boards |
| `epics` | EpicConnection | Find epics |
| `epic` | Epic | Find a single epic. |
| `epicBoard` | EpicBoard | Find a single epic board. |
| `epicBoards` | EpicBoardConnection | Find epic boards. |
| `epics` | EpicConnection | Find epics. |
| `epicsEnabled` | Boolean | Indicates if Epics are enabled for namespace |
| `fullName` | String! | Full name of the namespace. |
| `fullPath` | ID! | Full path of the namespace. |
| `groupMembers` | GroupMemberConnection | A membership of a user within this group. |
| `groupTimelogsEnabled` | Boolean | Indicates if Group timelogs are enabled for namespace |
| `id` | ID! | ID of the namespace. |
| `isTemporaryStorageIncreaseEnabled` | Boolean! | Status of the temporary storage increase |
| `isTemporaryStorageIncreaseEnabled` | Boolean! | Status of the temporary storage increase. |
| `issues` | IssueConnection | Issues for projects in this group. |
| `iterations` | IterationConnection | Find iterations |
| `iterations` | IterationConnection | Find iterations. |
| `label` | Label | A label available on this group. |
| `labels` | LabelConnection | Labels available on this group. |
| `lfsEnabled` | Boolean | Indicates if Large File Storage (LFS) is enabled for namespace. |
@ -1767,27 +1767,27 @@ Autogenerated return type of GitlabSubscriptionActivate.
| `path` | String! | Path of the namespace. |
| `projectCreationLevel` | String | The permission level required to create projects in the group. |
| `projects` | ProjectConnection! | Projects within this namespace. |
| `repositorySizeExcessProjectCount` | Int! | Number of projects in the root namespace where the repository size exceeds the limit |
| `repositorySizeExcessProjectCount` | Int! | Number of projects in the root namespace where the repository size exceeds the limit. |
| `requestAccessEnabled` | Boolean | Indicates if users can request access to namespace. |
| `requireTwoFactorAuthentication` | Boolean | Indicates if all users in this group are required to set up two-factor authentication. |
| `rootStorageStatistics` | RootStorageStatistics | Aggregated storage statistics of the namespace. Only available for root namespaces. |
| `shareWithGroupLock` | Boolean | Indicates if sharing a project with another group within this group is prevented. |
| `stats` | GroupStats | Group statistics |
| `storageSizeLimit` | Float | Total storage limit of the root namespace in bytes |
| `stats` | GroupStats | Group statistics. |
| `storageSizeLimit` | Float | Total storage limit of the root namespace in bytes. |
| `subgroupCreationLevel` | String | The permission level required to create subgroups within the group. |
| `temporaryStorageIncreaseEndsOn` | Time | Date until the temporary storage increase is active |
| `timelogs` | TimelogConnection! | Time logged in issues by group members |
| `totalRepositorySize` | Float | Total repository size of all projects in the root namespace in bytes |
| `totalRepositorySizeExcess` | Float | Total excess repository size of all projects in the root namespace in bytes |
| `temporaryStorageIncreaseEndsOn` | Time | Date until the temporary storage increase is active. |
| `timelogs` | TimelogConnection! | Time logged in issues by group members. |
| `totalRepositorySize` | Float | Total repository size of all projects in the root namespace in bytes. |
| `totalRepositorySizeExcess` | Float | Total excess repository size of all projects in the root namespace in bytes. |
| `twoFactorGracePeriod` | Int | Time before two-factor authentication is enforced. |
| `userPermissions` | GroupPermissions! | Permissions for the current user on the resource |
| `visibility` | String | Visibility of the namespace. |
| `vulnerabilities` | VulnerabilityConnection | Vulnerabilities reported on the projects in the group and its subgroups |
| `vulnerabilitiesCountByDay` | VulnerabilitiesCountByDayConnection | Number of vulnerabilities per day for the projects in the group and its subgroups |
| `vulnerabilities` | VulnerabilityConnection | Vulnerabilities reported on the projects in the group and its subgroups. |
| `vulnerabilitiesCountByDay` | VulnerabilitiesCountByDayConnection | Number of vulnerabilities per day for the projects in the group and its subgroups. |
| `vulnerabilitiesCountByDayAndSeverity` **{warning-solid}** | VulnerabilitiesCountByDayAndSeverityConnection | **Deprecated:** Use `vulnerabilitiesCountByDay`. Deprecated in 13.3. |
| `vulnerabilityGrades` | VulnerableProjectsByGrade! => Array | Represents vulnerable project counts for each grade |
| `vulnerabilityScanners` | VulnerabilityScannerConnection | Vulnerability scanners reported on the project vulnerabilities of the group and its subgroups |
| `vulnerabilitySeveritiesCount` | VulnerabilitySeveritiesCount | Counts for each vulnerability severity in the group and its subgroups |
| `vulnerabilityGrades` | VulnerableProjectsByGrade! => Array | Represents vulnerable project counts for each grade. |
| `vulnerabilityScanners` | VulnerabilityScannerConnection | Vulnerability scanners reported on the project vulnerabilities of the group and its subgroups. |
| `vulnerabilitySeveritiesCount` | VulnerabilitySeveritiesCount | Counts for each vulnerability severity in the group and its subgroups. |
| `webUrl` | String! | Web URL of the group. |
### GroupMember
@ -2235,8 +2235,8 @@ Autogenerated return type of MarkAsSpamSnippet.
| Field | Type | Description |
| ----- | ---- | ----------- |
| `allowCollaboration` | Boolean | Indicates if members of the target project can push to the fork. |
| `approvalsLeft` | Int | Number of approvals left |
| `approvalsRequired` | Int | Number of approvals required |
| `approvalsLeft` | Int | Number of approvals left. |
| `approvalsRequired` | Int | Number of approvals required. |
| `approved` | Boolean! | Indicates if the merge request has all the required approvals. Returns true if no required approvals are configured. |
| `approvedBy` | UserConnection | Users who approved the merge request. |
| `assignees` | UserConnection | Assignees of the merge request. |
@ -2272,7 +2272,7 @@ Autogenerated return type of MarkAsSpamSnippet.
| `mergeError` | String | Error message due to a merge error. |
| `mergeOngoing` | Boolean! | Indicates if a merge is currently occurring. |
| `mergeStatus` | String | Status of the merge request. |
| `mergeTrainsCount` | Int | |
| `mergeTrainsCount` | Int | Number of merge requests in the merge train. |
| `mergeUser` | User | User who merged this merge request. |
| `mergeWhenPipelineSucceeds` | Boolean | Indicates if the merge has been set to be merged when its pipeline succeeds (MWPS). |
| `mergeable` | Boolean! | Indicates if the merge request is mergeable. |
@ -2509,28 +2509,28 @@ Contains statistics about a milestone.
| Field | Type | Description |
| ----- | ---- | ----------- |
| `actualRepositorySizeLimit` | Float | Size limit for repositories in the namespace in bytes |
| `additionalPurchasedStorageSize` | Float | Additional storage purchased for the root namespace in bytes |
| `actualRepositorySizeLimit` | Float | Size limit for repositories in the namespace in bytes. |
| `additionalPurchasedStorageSize` | Float | Additional storage purchased for the root namespace in bytes. |
| `complianceFrameworks` | ComplianceFrameworkConnection | Compliance frameworks available to projects in this namespace. Available only when feature flag `ff_custom_compliance_frameworks` is enabled. |
| `containsLockedProjects` | Boolean! | Includes at least one project where the repository size exceeds the limit |
| `containsLockedProjects` | Boolean! | Includes at least one project where the repository size exceeds the limit. |
| `description` | String | Description of the namespace. |
| `descriptionHtml` | String | The GitLab Flavored Markdown rendering of `description` |
| `fullName` | String! | Full name of the namespace. |
| `fullPath` | ID! | Full path of the namespace. |
| `id` | ID! | ID of the namespace. |
| `isTemporaryStorageIncreaseEnabled` | Boolean! | Status of the temporary storage increase |
| `isTemporaryStorageIncreaseEnabled` | Boolean! | Status of the temporary storage increase. |
| `lfsEnabled` | Boolean | Indicates if Large File Storage (LFS) is enabled for namespace. |
| `name` | String! | Name of the namespace. |
| `packageSettings` | PackageSettings | The package settings for the namespace. |
| `path` | String! | Path of the namespace. |
| `projects` | ProjectConnection! | Projects within this namespace. |
| `repositorySizeExcessProjectCount` | Int! | Number of projects in the root namespace where the repository size exceeds the limit |
| `repositorySizeExcessProjectCount` | Int! | Number of projects in the root namespace where the repository size exceeds the limit. |
| `requestAccessEnabled` | Boolean | Indicates if users can request access to namespace. |
| `rootStorageStatistics` | RootStorageStatistics | Aggregated storage statistics of the namespace. Only available for root namespaces. |
| `storageSizeLimit` | Float | Total storage limit of the root namespace in bytes |
| `temporaryStorageIncreaseEndsOn` | Time | Date until the temporary storage increase is active |
| `totalRepositorySize` | Float | Total repository size of all projects in the root namespace in bytes |
| `totalRepositorySizeExcess` | Float | Total excess repository size of all projects in the root namespace in bytes |
| `storageSizeLimit` | Float | Total storage limit of the root namespace in bytes. |
| `temporaryStorageIncreaseEndsOn` | Time | Date until the temporary storage increase is active. |
| `totalRepositorySize` | Float | Total repository size of all projects in the root namespace in bytes. |
| `totalRepositorySizeExcess` | Float | Total excess repository size of all projects in the root namespace in bytes. |
| `visibility` | String | Visibility of the namespace. |
### NamespaceIncreaseStorageTemporarilyPayload
@ -2751,7 +2751,7 @@ Information about pagination in a connection..
| `path` | String | Relative path to the pipeline's page. |
| `project` | Project | Project the pipeline belongs to. |
| `retryable` | Boolean! | Specifies if a pipeline can be retried. |
| `securityReportSummary` | SecurityReportSummary | Vulnerability and scanned resource counts for each security scanner of the pipeline |
| `securityReportSummary` | SecurityReportSummary | Vulnerability and scanned resource counts for each security scanner of the pipeline. |
| `sha` | String! | SHA of the pipeline's commit. |
| `sourceJob` | CiJob | Job where pipeline was triggered from. |
| `stages` | CiStageConnection | Stages of the pipeline. |
@ -2818,12 +2818,12 @@ Autogenerated return type of PipelineRetry.
| Field | Type | Description |
| ----- | ---- | ----------- |
| `actualRepositorySizeLimit` | Float | Size limit for the repository in bytes |
| `actualRepositorySizeLimit` | Float | Size limit for the repository in bytes. |
| `alertManagementAlert` | AlertManagementAlert | A single Alert Management alert of the project. |
| `alertManagementAlertStatusCounts` | AlertManagementAlertStatusCountsType | Counts of alerts by status for the project. |
| `alertManagementAlerts` | AlertManagementAlertConnection | Alert Management alerts of the project. |
| `alertManagementIntegrations` | AlertManagementIntegrationConnection | Integrations which can receive alerts for the project. |
| `alertManagementPayloadFields` | AlertManagementPayloadAlertField! => Array | Extract alert fields from payload for custom mapping |
| `alertManagementPayloadFields` | AlertManagementPayloadAlertField! => Array | Extract alert fields from payload for custom mapping. |
| `allowMergeOnSkippedPipeline` | Boolean | If `only_allow_merge_if_pipeline_succeeds` is true, indicates if merge requests of the project can also be merged with skipped jobs. |
| `apiFuzzingCiConfiguration` | ApiFuzzingCiConfiguration | API fuzzing configuration for the project. Available only when feature flag `api_fuzzing_configuration_ui` is enabled. |
| `archived` | Boolean | Indicates the archived status of the project. |
@ -2832,10 +2832,10 @@ Autogenerated return type of PipelineRetry.
| `board` | Board | A single board of the project. |
| `boards` | BoardConnection | Boards of the project. |
| `ciCdSettings` | ProjectCiCdSetting | CI/CD settings for the project. |
| `clusterAgent` | ClusterAgent | Find a single cluster agent by name |
| `clusterAgents` | ClusterAgentConnection | Cluster agents associated with the project |
| `codeCoverageSummary` | CodeCoverageSummary | Code coverage summary associated with the project |
| `complianceFrameworks` | ComplianceFrameworkConnection | Compliance frameworks associated with the project |
| `clusterAgent` | ClusterAgent | Find a single cluster agent by name. |
| `clusterAgents` | ClusterAgentConnection | Cluster agents associated with the project. |
| `codeCoverageSummary` | CodeCoverageSummary | Code coverage summary associated with the project. |
| `complianceFrameworks` | ComplianceFrameworkConnection | Compliance frameworks associated with the project. |
| `containerExpirationPolicy` | ContainerExpirationPolicy | The container expiration policy of the project. |
| `containerRegistryEnabled` | Boolean | Indicates if the project stores Docker container images in a container registry. |
| `containerRepositories` | ContainerRepositoryConnection | Container repositories of the project. |
@ -2857,12 +2857,12 @@ Autogenerated return type of PipelineRetry.
| `httpUrlToRepo` | String | URL to connect to the project via HTTPS. |
| `id` | ID! | ID of the project. |
| `importStatus` | String | Status of import background job of the project. |
| `incidentManagementOncallSchedules` | IncidentManagementOncallScheduleConnection | Incident Management On-call schedules of the project |
| `incidentManagementOncallSchedules` | IncidentManagementOncallScheduleConnection | Incident Management On-call schedules of the project. |
| `issue` | Issue | A single issue of the project. |
| `issueStatusCounts` | IssueStatusCountsType | Counts of issues by status for the project. |
| `issues` | IssueConnection | Issues of the project. |
| `issuesEnabled` | Boolean | Indicates if Issues are enabled for the current user |
| `iterations` | IterationConnection | Find iterations |
| `iterations` | IterationConnection | Find iterations. |
| `jiraImportStatus` | String | Status of Jira import background job of the project. |
| `jiraImports` | JiraImportConnection | Jira imports into the project. |
| `jobsEnabled` | Boolean | Indicates if CI/CD pipeline jobs are enabled for the current user. |
@ -2893,14 +2893,14 @@ Autogenerated return type of PipelineRetry.
| `releases` | ReleaseConnection | Releases of the project. |
| `removeSourceBranchAfterMerge` | Boolean | Indicates if `Delete source branch` option should be enabled by default for all new merge requests of the project. |
| `repository` | Repository | Git repository of the project. |
| `repositorySizeExcess` | Float | Size of repository that exceeds the limit in bytes |
| `repositorySizeExcess` | Float | Size of repository that exceeds the limit in bytes. |
| `requestAccessEnabled` | Boolean | Indicates if users can request member access to the project. |
| `requirement` | Requirement | Find a single requirement |
| `requirementStatesCount` | RequirementStatesCount | Number of requirements for the project by their state |
| `requirements` | RequirementConnection | Find requirements |
| `requirement` | Requirement | Find a single requirement. |
| `requirementStatesCount` | RequirementStatesCount | Number of requirements for the project by their state. |
| `requirements` | RequirementConnection | Find requirements. |
| `sastCiConfiguration` | SastCiConfiguration | SAST CI configuration for the project. |
| `securityDashboardPath` | String | Path to project's security dashboard |
| `securityScanners` | SecurityScanners | Information about security analyzers used in the project |
| `securityDashboardPath` | String | Path to project's security dashboard. |
| `securityScanners` | SecurityScanners | Information about security analyzers used in the project. |
| `sentryDetailedError` | SentryDetailedError | Detailed version of a Sentry error on the project. |
| `sentryErrors` | SentryErrorCollection | Paginated collection of Sentry errors on the project. |
| `serviceDeskAddress` | String | E-mail address of the service desk. |
@ -2919,10 +2919,10 @@ Autogenerated return type of PipelineRetry.
| `terraformStates` | TerraformStateConnection | Terraform states associated with the project. |
| `userPermissions` | ProjectPermissions! | Permissions for the current user on the resource |
| `visibility` | String | Visibility of the project. |
| `vulnerabilities` | VulnerabilityConnection | Vulnerabilities reported on the project |
| `vulnerabilitiesCountByDay` | VulnerabilitiesCountByDayConnection | Number of vulnerabilities per day for the project |
| `vulnerabilityScanners` | VulnerabilityScannerConnection | Vulnerability scanners reported on the project vulnerabilities |
| `vulnerabilitySeveritiesCount` | VulnerabilitySeveritiesCount | Counts for each vulnerability severity in the project |
| `vulnerabilities` | VulnerabilityConnection | Vulnerabilities reported on the project. |
| `vulnerabilitiesCountByDay` | VulnerabilitiesCountByDayConnection | Number of vulnerabilities per day for the project. |
| `vulnerabilityScanners` | VulnerabilityScannerConnection | Vulnerability scanners reported on the project vulnerabilities. |
| `vulnerabilitySeveritiesCount` | VulnerabilitySeveritiesCount | Counts for each vulnerability severity in the project. |
| `webUrl` | String | Web URL of the project. |
| `wikiEnabled` | Boolean | Indicates if Wikis are enabled for the current user |

View file

@ -309,7 +309,7 @@ Supported attributes:
| Attribute | Type | Required | Description |
| :-------- | :------- | :--------- | :---------- |
| `version` | string | yes | The version to generate the changelog for. The format must follow [semantic versioning](https://semver.org/). |
| `from` | string | yes | The start of the range of commits (as a SHA) to use for generating the changelog. This commit itself isn't included in the list. |
| `from` | string | no | The start of the range of commits (as a SHA) to use for generating the changelog. This commit itself isn't included in the list. |
| `to` | string | yes | The end of the range of commits (as a SHA) to use for the changelog. This commit _is_ included in the list. |
| `date` | datetime | no | The date and time of the release, defaults to the current time. |
| `branch` | string | no | The branch to commit the changelog changes to, defaults to the project's default branch. |
@ -317,6 +317,29 @@ Supported attributes:
| `file` | string | no | The file to commit the changes to, defaults to `CHANGELOG.md`. |
| `message` | string | no | The commit message to produce when committing the changes, defaults to `Add changelog for version X` where X is the value of the `version` argument. |
If the `from` attribute is unspecified, GitLab uses the Git tag of the last
version that came before the version specified in the `version` attribute. For
this to work, your project must create Git tags for versions using the
following format:
```plaintext
vX.Y.Z
```
Where `X.Y.Z` is a version that follows semantic versioning. For example,
consider a project with the following tags:
- v1.0.0
- v1.1.0
- v2.0.0
If the `version` attribute is `2.1.0`, GitLab uses tag v2.0.0. And when the
version is `1.1.1`, or `1.2.0`, GitLab uses tag v1.1.0.
If `from` is unspecified and no tag to use is found, the API produces an error.
To solve such an error, you must explicitly specify a value for the `from`
attribute.
### How it works
Changelogs are generated based on commit titles. Commits are only included if

View file

@ -0,0 +1,183 @@
---
stage: none
group: unassigned
comments: false
description: 'GraphQL API architecture foundation'
---
# GraphQL API
[GraphQL](https://graphql.org/) is a data query and manipulation language for
APIs, and a runtime for fulfilling queries with existing data.
At GitLab we want to adopt GraphQL to make it easier for the wider community to
interact with GitLab in a reliable way, but also to advance our own product by
modeling communication between backend and frontend components using GraphQL.
We've recently increased the pace of the adoption by defining quarterly OKRs
related to GraphQL migration. This resulted in us spending more time on the
GraphQL development and helped to surface the need of improving tooling we use
to extend the new API.
This document describes the work that is needed to build a stable foundation that
will support our development efforts and a large-scale usage of the [GraphQL
API](https://docs.gitlab.com/ee/api/graphql/index.html).
## Summary
The GraphQL initiative at GitLab [started around three years ago](https://gitlab.com/gitlab-org/gitlab/-/commit/9c6c17cbcdb8bf8185fc1b873dcfd08f723e4df5).
Most of the work around the GraphQL ecosystem has been done by volunteers that are
[GraphQL experts](https://gitlab.com/groups/gitlab-org/graphql-experts/-/group_members?with_inherited_permissions=exclude).
The [retrospective on our progress](https://gitlab.com/gitlab-org/gitlab/-/issues/235659)
surfaced a few opportunities to streamline our GraphQL development efforts and
to reduce the risk of performance degradations and possible outages that may
be related to the gaps in the essential mechanisms needed to make the GraphQL
API observable and operable at scale.
Amongst small improvements to the GraphQL engine itself we want to build a
comprehensive monitoring dashboard, that will enable team members to make sense
of what is happening inside our GraphQL API. We want to make it possible to define
SLOs, triage breached SLIs and to be able to zoom into relevant details using
Grafana and Elastic. We want to see historical data and predict future usage.
It is an opportunity to learn from our experience in evolving the REST API, for
the scale, and to apply this knowledge onto the GraphQL development efforts. We
can do that by building query-to-feature correlation mechanisms, adding
scalable state synchronization support and aligning GraphQL with other
architectural initiatives being executed in parallel, like [the support for
direct uploads](https://gitlab.com/gitlab-org/gitlab/-/issues/280819).
GraphQL should be secure by default. We can avoid common security mistakes by
building mechanisms that will help us to enforce [OWASP GraphQL
recommendations](https://cheatsheetseries.owasp.org/cheatsheets/GraphQL_Cheat_Sheet.html)
that are relevant to us.
Understanding what are the needs of the wider community will also allow us to
plan deprecation policies better and to design parity between GraphQL and REST
API that suits their needs.
## Challenges
### Make sense of what is happening in GraphQL
Being able to see how GraphQL performs in a production environment is a
prerequisite for improving performance and reliability of that service.
We do not yet have tools that would make it possible for us to answer a
question of how GraphQL performs and what the bottlenecks we should optimize
are. This, combined with a pace of GraphQL adoption and the scale in which we
expect it operate, imposes a risk of an increased rate of production incidents
what will be difficult to resolve.
We want to build a comprehensive Grafana dashboard that will focus on
delivering insights of how GraphQL endpoint performs, while still empowering
team members with capability of zooming in into details. We want to improve
logging to make it possible to better correlate GraphQL queries with feature
using Elastic and to index them in a way that performance problems can be
detected early.
- Build a comprehensive Grafana dashboard for GraphQL
- Build a GraphQL query-to-feature correlation mechanisms
- Improve logging GraphQL queries in Elastic
- Redesign error handling on frontend to surface warnings
### Manage volatile GraphQL data structures
Our GraphQL API will evolve with time. GraphQL has been designed to make such
evolution easier. GraphQL APIs are easier to extend because of how composable
GraphQL is. On the other hand this is also a reason why versioning of GraphQL
APIs is considered unnecessary. Instead of versioning the API we want to mark
some fields as deprecated, but we need to have a way to understand what is the
usage of deprecated fields, types and a way to visualize it in a way that is
easy to understand. We might want to detect usage of deprecated fields and
notify users that we plan to remove them.
- Define a data-informed deprecation policy that will serve our users better
- Build a dashboard showing usage frequency of deprecated GraphQL fields
- Build mechanisms required to send deprecated fields usage in usage ping
### Ensure consistency with the rest of the codebase
GraphQL is not the only thing we work on, but it cuts across the entire
application. It is being used to expose data collected and processed in almost
every part of our product. It makes it tightly coupled with our monolithic
codebase.
We need to ensure that how we use GraphQL is consistent with other mechanisms
we've designed to improve performance and reliability of GitLab.
We have extensive experience with evolving our REST API. We want to apply
this knowledge onto GraphQL and make it performant and secure by default.
- Design direct uploads for GraphQL
- Build GraphQL query depth and complexity histograms
- Visualize the amount of GraphQL queries reaching limits
- Add support for GraphQL etags for existing features
### Design GraphQL interoperability with REST API
We do not plan to deprecate our REST API. It is a simple way to interact with
GitLab, and GraphQL might never become a full replacement of a traditional REST
API. The two APIs will need to coexist together. We will need to remove
duplication between them to make their codebases maintainable. This symbiosis,
however, is not only a technical challenge we need to resolve on the backend.
Users might want to use the two APIs interchangeably or even at the same time.
Making it interoperable by exposing a common scheme for resource identifiers is
a prerequisite for interoperability.
- Make GraphQL and REST API interoperable
- Design common resource identifiers for both APIs
### Design scalable state synchronization mechanisms
One of the most important goals related to GraphQL adoption at GitLab is using
it to model interactions between GitLab backend and frontend components. This
is an ongoing process that has already surfaced the need of building better
state synchronization mechanisms and hooking into existing ones.
- Design a scalable state synchronization mechanism
- Evaluate state synchronization through pub/sub and websockets
- Build a generic support for GraphQL feature correlation and feature etags
- Redesign frontend code responsible for managing shared global state
## Iterations
1. [Build comprehensive Grafana dashboard for GraphQL](https://gitlab.com/groups/gitlab-com/-/epics/1343)
1. [Improve logging of GraphQL requests in Elastic](https://gitlab.com/groups/gitlab-org/-/epics/4646)
1. [Build a scalable state synchronization for GraphQL](https://gitlab.com/groups/gitlab-org/-/epics/5319)
1. [Build GraphQL feature-to-query correlation mechanisms](https://gitlab.com/groups/gitlab-org/-/epics/5320)
1. [Design a better data-informed deprecation policy](https://gitlab.com/groups/gitlab-org/-/epics/5321)
1. [Add support for direct uploads for GraphQL](https://gitlab.com/gitlab-org/gitlab/-/issues/280819)
1. [Review GraphQL design choices related to security](https://gitlab.com/gitlab-org/security/gitlab/-/issues/339)
## Status
Current status: in progress.
## Who
Proposal:
<!-- vale gitlab.Spelling = NO -->
| Role | Who
|------------------------------|-------------------------|
| Author | Grzegorz Bizon |
| Architecture Evolution Coach | Kamil Trzciński |
| Engineering Leader | Darva Satcher |
| Product Manager | Patrick Deuley |
| Domain Expert / GraphQL | Charlie Ablett |
| Domain Expert / GraphQL | Alex Kalderimis |
| Domain Expert / GraphQL | Natalia Tepluhina |
| Domain Expert / Scalability | Bob Van Landuyt |
DRIs:
| Role | Who
|------------------------------|------------------------|
| Leadership | Darva Satcher |
| Product | Patrick Deuley |
| Engineering | |
<!-- vale gitlab.Spelling = YES -->

View file

@ -2024,8 +2024,10 @@ This example creates four paths of execution:
- For GitLab.com, the limit is 50. For more information, see our
[infrastructure issue](https://gitlab.com/gitlab-com/gl-infra/infrastructure/-/issues/7541).
- For self-managed instances, the limit is: 50. This limit [can be changed](#changing-the-needs-job-limit).
- If `needs:` refers to a job that is marked as `parallel:`.
the current job depends on all parallel jobs being created.
- If `needs:` refers to a job that uses the [`parallel`](#parallel) keyword,
it depends on all jobs created in parallel, not just one job. It also downloads
artifacts from all the parallel jobs by default. If the artifacts have the same
name, they overwrite each other and only the last one downloaded is saved.
- `needs:` is similar to `dependencies:` in that it must use jobs from prior stages,
meaning it's impossible to create circular dependencies. Depending on jobs in the
current stage is not possible either, but support [is planned](https://gitlab.com/gitlab-org/gitlab/-/issues/30632).

View file

@ -121,6 +121,7 @@ In these cases, use the following workflow:
- [User Experience (UX)](https://about.gitlab.com/handbook/engineering/ux/)
- [Security](https://about.gitlab.com/handbook/engineering/security/)
- [Quality](https://about.gitlab.com/handbook/engineering/quality/)
- [Engineering Productivity](https://about.gitlab.com/handbook/engineering/quality/engineering-productivity-team/)
- [Infrastructure](https://about.gitlab.com/handbook/engineering/infrastructure/)
- [Technical Writing](https://about.gitlab.com/handbook/engineering/ux/technical-writing/)

View file

@ -1,6 +1,6 @@
---
stage: none
group: unassigned
stage: Enablement
group: Distribution
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---

View file

@ -161,95 +161,69 @@ Nanoc layout), which is displayed at the top of the page if defined:
## Move or rename a page
Moving or renaming a document is the same as changing its location.
Be sure to assign a technical writer to any MR that renames or moves a page. Technical
Writers can help with any questions and can review your change.
Moving or renaming a document is the same as changing its location. Be sure to
assign a technical writer to any merge request that renames or moves a page.
Technical Writers can help with any questions and can review your change.
When moving or renaming a page, you must redirect browsers to the new page. This
ensures users find the new page, and have the opportunity to update their bookmarks.
When moving or renaming a page, you must redirect browsers to the new page.
This ensures users find the new page, and have the opportunity to update their
bookmarks.
There are two types of redirects:
- Redirect files added into the docs themselves, for users who view the docs in `/help`
on self-managed instances. For example, [`/help` on GitLab.com](https://gitlab.com/help).
- Redirects in a [`_redirects`](../../user/project/pages/redirects.md) file, for users
who view the docs on <https://docs.gitlab.com>.
- Redirect codes added into the documentation files themselves, for users who
view the docs in `/help` on self-managed instances. For example,
[`/help` on GitLab.com](https://gitlab.com/help).
- [GitLab Pages redirects](../../user/project/pages/redirects.md),
for users who view the docs on [`docs.gitlab.com`](https://docs.gitlab.com).
The Technical Writing team manages the [process](https://gitlab.com/gitlab-org/technical-writing/-/blob/master/.gitlab/issue_templates/tw-monthly-tasks.md)
to regularly update the [`redirects.yaml`](https://gitlab.com/gitlab-org/gitlab-docs/-/blob/master/content/_data/redirects.yaml)
file.
To add a redirect:
1. In an MR in one of the internal docs projects (`gitlab`, `gitlab-runner`, `omnibus-gitlab`
or `charts`):
1. Move or rename the doc, but do not delete the old doc.
1. In the old doc, add the redirect code for `/help`. Use the following template exactly,
and only change the links and date. Use relative paths and `.md` for a redirect
to another docs page. Use the full URL to redirect to a different project or site:
1. Create a merge request in one of the internal docs projects (`gitlab`,
`gitlab-runner`, `omnibus-gitlab`, or `charts`), depending on the location of
the file that's being moved, renamed, or removed.
1. To move or rename the documentation file, create a new file with the new
name or location, but don't delete the existing documentation file.
1. In the original documentation file, add the redirect code for
`/help`. Use the following template exactly, and change only the links and
date. Use relative paths and `.md` for a redirect to another documentation
page. Use the full URL (with `https://`) to redirect to a different project or
site:
```markdown
---
redirect_to: '../path/to/file/index.md'
---
```markdown
---
redirect_to: '../path/to/file/index.md'
---
This document was moved to [another location](../path/to/file/index.md).
This document was moved to [another location](../path/to/file/index.md).
<!-- This redirect file can be deleted after <YYYY-MM-DD>. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
```
<!-- This redirect file can be deleted after <YYYY-MM-DD>. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->
```
Redirect files linking to docs in any of the 4 internal docs projects can be
removed after 3 months. Redirect files linking to external sites can be removed
after 1 year.
Redirect files linking to docs in any of the internal documentations projects
are removed after three months. Redirect files linking to external sites are
removed after one year.
1. If the document being moved has any Disqus comments on it, follow the steps
described in [Redirections for pages with Disqus comments](#redirections-for-pages-with-disqus-comments).
1. If a documentation page you're removing includes images that aren't used
with any other documentation pages, be sure to use your MR to delete
those images from the repository.
1. Assign the MR to a technical writer for review and merge.
1. If the redirect is to one of the 4 internal docs projects (not an external URL),
create an MR in [`gitlab-docs`](https://gitlab.com/gitlab-org/gitlab-docs):
1. Update [`content/_data/redirects.yaml`](https://gitlab.com/gitlab-org/gitlab-docs/-/blob/master/content/_data/redirects.yaml)
with one redirect entry for each renamed or moved file. This code works for
<https://docs.gitlab.com> links only. Keep them alphabetically sorted:
```yaml
- from: /ee/path/to/old_file.html
to: /ee/path/to/new_file.html
remove_date: YYYY-MM-DD
```
The path must start with the internal project directory `/ee`,
`/runner`, `/omnibus` or `/charts`, and end with either `.html` or `/`
for a clean URL.
If the `from:` redirect is an `index.html` file, add a duplicate entry for
the `/` URL (without `index.html). For example:
```yaml
- from: /ee/user/project/operations/index.html
to: /ee/operations/index.html
remove_date: 2021-11-01
- from: /ee/user/project/operations/
to: /ee/operations/index.html
remove_date: 2021-11-01
```
The `remove_date` should be one year after the redirect is submitted.
1. Run the Rake task in the `gitlab-docs` project to populate the `_redirects` file:
```shell
bundle exec rake redirects
```
1. Add both `content/_redirects` and `content/_data/redirects.yaml` to your MR.
1. Search for links to the old file. You must find and update all links to the old file:
1. If the documentation page being moved has any Disqus comments, follow the steps
described in [Redirections for pages with Disqus comments](#redirections-for-pages-with-disqus-comments).
1. If a documentation page you're removing includes images that aren't used
with any other documentation pages, be sure to use your merge request to delete
those images from the repository.
1. Assign the merge request to a technical writer for review and merge.
1. Search for links to the original documentation file. You must find and update all
links that point to the original documentation file:
- In <https://gitlab.com/gitlab-com/www-gitlab-com>, search for full URLs:
`grep -r "docs.gitlab.com/ee/path/to/file.html" .`
- In <https://gitlab.com/gitlab-org/gitlab-docs/-/tree/master/content/_data>,
search the navigation bar configuration files for the path with `.html`:
`grep -r "path/to/file.html" .`
- In any of the 4 internal projects. This includes searching for links in the docs
- In any of the four internal projects. This includes searching for links in the docs
and codebase. Search for all variations, including full URL and just the path.
In macOS for example, go to the root directory of the `gitlab` project and run:
@ -260,8 +234,8 @@ To add a redirect:
grep -r "path/to/file" .
```
You may need to try variations of relative links as well, such as `../path/to/file`
or even `../file` to find every case.
You may need to try variations of relative links, such as `../path/to/file` or
`../file` to find every case.
### Redirections for pages with Disqus comments

View file

@ -6,28 +6,75 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Frontend dependencies
## Package manager
We use [yarn@1](https://classic.yarnpkg.com/lang/en/) to manage frontend dependencies.
We use [Yarn](https://yarnpkg.com/) to manage frontend dependencies. There are a few exceptions, stored in `vendor/assets/`.
There are a few exceptions in the GitLab repository, stored in `vendor/assets/`.
## What are production and development dependencies?
These dependencies are defined in two groups within `package.json`, `dependencies` and `devDependencies`.
For our purposes, we consider anything that is required to compile our production assets a "production" dependency.
That is, anything required to run the `webpack` script with `NODE_ENV=production`.
Tools like `eslint`, `jest`, and various plugins and tools used in development are considered `devDependencies`.
This distinction is used by omnibus to determine which dependencies it requires when building GitLab.
Exceptions are made for some tools that we require in the
`compile-production-assets` CI job such as `webpack-bundle-analyzer` to analyze our
production assets post-compile.
## Updating dependencies
### Renovate GitLab Bot
We use the [Renovate GitLab Bot](https://gitlab.com/gitlab-org/frontend/renovate-gitlab-bot) to
automatically create merge requests for updating dependencies of several projects. You can find the
up-to-date list of projects managed by the renovate bot in the projects README. Some key dependencies
updated using renovate are:
automatically create merge requests for updating dependencies of several projects.
You can find the up-to-date list of projects managed by the renovate bot in the projects README.
Some key dependencies updated using renovate are:
- [`@gitlab/ui`](https://gitlab.com/gitlab-org/gitlab-ui)
- [`@gitlab/svgs`](https://gitlab.com/gitlab-org/gitlab-svgs)
- [`@gitlab/eslint-plugin`](https://gitlab.com/gitlab-org/frontend/eslint-plugin)
- And any other package in the `@gitlab/` scope
We have the goal of updating [_all_ dependencies with renovate](https://gitlab.com/gitlab-org/frontend/rfcs/-/issues/21).
Updating dependencies automatically has several benefits, have a look at this [example MR](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/53613).
- MRs will be created automatically when new versions are released
- MRs can easily be rebased and updated with just checking a checkbox in the MR description
- MRs contain changelog summaries and links to compare the different package versions
- MRs can be assigned to people directly responsible for the dependencies
### Community contributions updating dependencies
It is okay to reject Community Contributions that solely bump dependencies.
Simple dependency updates are better done automatically for the reasons provided above.
If a community contribution needs to be rebased, runs into conflicts, or goes stale, the effort required
to instruct the contributor to correct it often outweighs the benefits.
If a dependency update is accompanied with significant migration efforts, due to major version updates,
a community contribution is acceptable.
Here is a message you can use to explain to community contributors as to why we reject simple updates:
```markdown
Hello CONTRIBUTOR!
Thank you very much for this contribution. It seems like you are doing a "simple" dependency update.
If a dependency update is as simple as increasing the version number, we'd like a Bot to do this to save you and ourselves some time.
This has certain benefits as outlined in our <a href="https://docs.gitlab.com/ee/development/fe_guide/dependencies.html#updating-dependencies">Frontend development guidelines</a>.
You might find that we do not currently update DEPENDENCY automatically, but we are planning to do so in [the near future](https://gitlab.com/gitlab-org/frontend/rfcs/-/issues/21).
Thank you for understanding, I will close this Merge Request.
/close
```
### Blocked dependencies
We discourage installing some dependencies in [GitLab repository](https://gitlab.com/gitlab-org/gitlab)
because they can create conflicts in the dependency tree. Blocked dependencies are declared in the
`blockDependencies` property of the GitLab [`package.json` file](https://gitlab.com/gitlab-org/gitlab/-/blob/master/package.json).
We discourage installing some dependencies in [GitLab repository](https://gitlab.com/gitlab-org/gitlab) because they can create conflicts in the dependency tree.
Blocked dependencies are declared in the `blockDependencies` property of the GitLab [`package.json`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/package.json).
## Dependency notes

View file

@ -1,38 +1,8 @@
---
stage: none
group: unassigned
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
redirect_to: '../fe_guide/dependencies.md'
---
# Dependencies
This document was moved to [another location](../fe_guide/dependencies.md).
## Adding Dependencies
GitLab uses `yarn` to manage dependencies. These dependencies are defined in
two groups within `package.json`, `dependencies` and `devDependencies`. For
our purposes, we consider anything that is required to compile our production
assets a "production" dependency. That is, anything required to run the
`webpack` script with `NODE_ENV=production`. Tools like `eslint`, `karma`, and
various plugins and tools used in development are considered `devDependencies`.
This distinction is used by omnibus to determine which dependencies it requires
when building GitLab.
Exceptions are made for some tools that we require in the
`gitlab:assets:compile` CI job such as `webpack-bundle-analyzer` to analyze our
production assets post-compile.
To add or upgrade a dependency, run:
```shell
yarn add <your dependency here>
```
This may introduce duplicate dependencies. To de-duplicate `yarn.lock`, run:
```shell
node_modules/.bin/yarn-deduplicate --list --strategy fewer yarn.lock && yarn install
```
---
> TODO: Add Dependencies
<!-- This redirect file can be deleted after <2021-05-14>. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/#move-or-rename-a-page -->

View file

@ -13,10 +13,6 @@ This is a living document, and we welcome contributions, feedback, and suggestio
Guidance on topics related to development.
## [Dependencies](dependencies.md)
Learn about all the dependencies that make up our frontend, including some of our own custom built libraries.
## [Modules](modules/index.md)
Learn about all the internal JavaScript modules that make up our frontend.

View file

@ -106,6 +106,8 @@ environment variable `ENABLE_SHERLOCK` to a non empty value. For example:
ENABLE_SHERLOCK=1 bundle exec rails s
```
Sherlock is also [available though the GitLab GDK](https://gitlab.com/gitlab-org/gitlab-development-kit/-/blob/master/doc/howto/sherlock.md).
Recorded transactions can be found by navigating to `/sherlock/transactions`.
## Bullet

View file

@ -110,7 +110,7 @@ Install the required packages (needed to compile Ruby and native extensions to R
sudo apt-get install -y build-essential zlib1g-dev libyaml-dev libssl-dev libgdbm-dev libre2-dev \
libreadline-dev libncurses5-dev libffi-dev curl openssh-server checkinstall libxml2-dev \
libxslt-dev libcurl4-openssl-dev libicu-dev logrotate rsync python-docutils pkg-config cmake \
runit
runit-systemd
```
Ubuntu 14.04 (Trusty Tahr) doesn't have the `libre2-dev` package available, but

View file

@ -311,6 +311,18 @@ The words "change," "improve," "fix," and "refactor" don't add much information
For example, "Improve XML generation" could be better written as "Properly escape special characters in XML generation."
For more information about formatting commit messages, please see this excellent [blog post by Tim Pope](https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html).
To add more context to a commit message, consider adding information regarding the
origin of the change. For example, the URL of a GitLab issue, or a Jira issue number,
containing more information for users who need in-depth context about the change.
For example:
```plaintext
Properly escape special characters in XML generation.
Issue: gitlab.com/gitlab-org/gitlab/-/issues/1
```
## Testing before merging
![Merge requests showing the test states: red, yellow, and green](img/gitlab_flow_ci_mr.png)

View file

@ -13,25 +13,25 @@ For details about diff files, [View changes between files](../project/merge_requ
## Maximum diff patch size
Diff files which exceed this value will be presented as 'too large' and won't
be expandable. Instead of an expandable view, a link to the blob view will be
Diff files which exceed this value are presented as 'too large' and cannot
be expandable. Instead of an expandable view, a link to the blob view is
shown.
Patches greater than 10% of this size will be automatically collapsed, and a
link to expand the diff will be presented.
Patches greater than 10% of this size are automatically collapsed, and a
link to expand the diff is presented.
This affects merge requests and branch comparison views.
NOTE:
Merge requests and branch comparison views will be affected.
WARNING:
This setting is experimental. An increased maximum will increase resource
consumption of your instance. Keep this in mind when adjusting the maximum.
To set the maximum diff patch size:
1. Go to **Admin Area > Settings > General**.
1. Expand **Diff limits**.
1. Enter a value for **Maximum diff patch size**, measured in bytes.
1. Click on **Save changes**.
WARNING:
This setting is experimental. An increased maximum increases resource
consumption of your instance. Keep this in mind when adjusting the maximum.
<!-- ## Troubleshooting
Include any troubleshooting steps that you can foresee. If you know beforehand what issues

View file

@ -172,9 +172,9 @@ To set a limit on how long personal access tokens are valid:
1. Fill in the **Maximum allowable lifetime for personal access tokens (days)** field.
1. Click **Save changes**.
Once a lifetime for personal access tokens is set, GitLab will:
Once a lifetime for personal access tokens is set, GitLab:
- Apply the lifetime for new personal access tokens, and require users to set an expiration date
- Applies the lifetime for new personal access tokens, and require users to set an expiration date
and a date no later than the allowed lifetime.
- After three hours, revoke old tokens with no expiration date or with a lifetime longer than the
allowed lifetime. Three hours is given to allow administrators to change the allowed lifetime,

View file

@ -22,10 +22,9 @@ select the project to serve as the custom template repository.
![File templates in the Admin Area](img/file_template_admin_area.png)
Once a project has been selected, you can add custom templates to the repository,
and they will appear in the appropriate places in the
[frontend](../../project/repository/web_editor.md#template-dropdowns) and
[API](../../../api/settings.md).
After that, you can add custom templates to the selected repository and use them for the entire instance.
They will be available on the [Web Editor's dropdown](../../project/repository/web_editor.md#template-dropdowns)
and through the [API settings](../../../api/settings.md).
Templates must be added to a specific subdirectory in the repository,
corresponding to the kind of template. The following types of custom templates
@ -61,9 +60,7 @@ extension and not be empty. So, the hierarchy should look like this:
|-- another_metrics-dashboard.yml
```
Once this is established, the list of custom templates will be included when
creating a new file and the template type is selected. These will appear at the
top of the list.
Your custom templates will be displayed on the dropdown menu when a new file is added through the GitLab UI:
![Custom template dropdown menu](img/file_template_user_dropdown.png)

View file

@ -9,17 +9,17 @@ type: reference
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/31007) in GitLab 12.4.
This allows you to set the number of changes (branches or tags) in a single push
to determine whether individual push events or bulk push event will be created.
Bulk push events will be created if it surpasses that value.
Set the number of branches or tags to limit the number of single push events
allowed at once. If the number of events is greater than this, GitLab creates
bulk push event instead.
For example, if 4 branches are pushed and the limit is currently set to 3,
you'll see the following in the activity feed:
![Bulk push event](img/bulk_push_event_v12_4.png)
With this feature, when a single push includes a lot of changes (e.g. 1,000
branches), only 1 bulk push event will be created instead of creating 1,000 push
With this feature, when a single push includes a lot of changes (for example, 1,000
branches), only 1 bulk push event is created instead of 1,000 push
events. This helps in maintaining good system performance and preventing spam on
the activity feed.

View file

@ -8,21 +8,21 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Code Review Analytics **(PREMIUM)**
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/38062) in [GitLab Starter](https://about.gitlab.com/pricing/) 12.7.
> - Moved to [GitLab Premium](https://about.gitlab.com/pricing/) due to Starter/Bronze being [discontinued](https://about.gitlab.com/blog/2021/01/26/new-gitlab-product-subscription-model/) in 13.9.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/38062) in GitLab 12.7.
> - Moved to GitLab Premium in 13.9.
Code Review Analytics makes it easy to view the longest-running reviews among open merge requests and
enables you to:
Use Code Review Analytics to view the longest-running reviews among open merge
requests, and:
1. Take action on individual merge requests.
1. Reduce overall cycle time.
- Take action on individual merge requests.
- Reduce overall cycle time.
NOTE:
Initially, no data appears. Data is populated as users comment on open merge requests.
## Overview
Code Review Analytics displays a table of open merge requests that have at least one non-author comment. The review time is measured from the time the first non-author comment was submitted.
Code Review Analytics is available to users with Reporter access and above, and displays a table of open merge requests that have at least one non-author comment. The review time is measured from the time the first non-author comment was submitted.
To access Code Review Analytics, from your project's menu, go to **Project Analytics > Code Review**.
@ -54,8 +54,3 @@ For example:
- Lots of comments or commits? Maybe the code is too complex.
- A particular author is involved? Maybe more training is required.
- Few comments and approvers? Maybe your team is understaffed.
## Permissions
- On [Starter or Bronze tier](https://about.gitlab.com/pricing/) and above.
- By users with Reporter access and above.

View file

@ -581,7 +581,7 @@ Here are examples of regex patterns you may want to use:
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/288812) in GitLab 13.9.
> - It's [deployed behind a feature flag](../../feature_flags.md), disabled by default.
> - It's disabled on GitLab.com.
> - It's enabled on GitLab.com.
> - It's not recommended for production use.
> - To use it in GitLab self-managed instances, ask a GitLab administrator to [enable it](#enable-or-disable-cleanup-policy-limits).

View file

@ -7,13 +7,13 @@ type: reference
# Projects **(FREE)**
In GitLab, you can create projects for hosting
your codebase, use it as an issue tracker, collaborate on code, and continuously
build, test, and deploy your app with built-in GitLab CI/CD.
In GitLab, you can create projects to host
your codebase. You can also use projects to track issues, plan work,
collaborate on code, and continuously build, test, and use
built-in CI/CD to deploy your app.
Your projects can be [available](../../public_access/public_access.md)
publicly, internally, or privately, at your choice. GitLab does not limit
the number of private projects you create.
Projects can be available [publicly, internally, or privately](../../public_access/public_access.md).
GitLab does not limit the number of private projects you can create.
## Project features
@ -21,46 +21,43 @@ Projects include the following [features](https://about.gitlab.com/features/):
**Repositories:**
- [Issue tracker](issues/index.md): Discuss implementations with your team in issues
- [Issue Boards](issue_board.md): Organize and prioritize your workflow
- [Multiple Issue Boards](issue_board.md#multiple-issue-boards): Allow your teams to create their own workflows (Issue Boards) for the same project
- [Repositories](repository/index.md): Host your code in a fully
integrated platform
- [Branches](repository/branches/index.md): use Git branching strategies to
collaborate on code
- [Issue tracker](issues/index.md): Discuss implementations with your team.
- [Issue Boards](issue_board.md): Organize and prioritize your workflow.
- [Multiple Issue Boards](issue_board.md#multiple-issue-boards): Create team-specific workflows (Issue Boards) for a project.
- [Repositories](repository/index.md): Host your code in a fully-integrated platform.
- [Branches](repository/branches/index.md): Use Git branching strategies to
collaborate on code.
- [Protected branches](protected_branches.md): Prevent collaborators
from messing with history or pushing code without review
- [Protected tags](protected_tags.md): Control over who has
permission to create tags, and prevent accidental update or deletion
from changing history or pushing code without review.
- [Protected tags](protected_tags.md): Control who has
permission to create tags and prevent accidental updates or deletions.
- [Repository mirroring](repository/repository_mirroring.md)
- [Signing commits](repository/gpg_signed_commits/index.md): use GPG to sign your commits
- [Deploy tokens](deploy_tokens/index.md): Manage project-based deploy tokens that allow permanent access to the repository and Container Registry.
- [Signing commits](repository/gpg_signed_commits/index.md): Use GNU Privacy Guard (GPG) to sign your commits.
- [Deploy tokens](deploy_tokens/index.md): Manage access to the repository and Container Registry.
- [Web IDE](web_ide/index.md)
- [CVE ID Requests](../application_security/cve_id_request.md): Request a CVE identifier to track a
vulnerability in your project.
**Issues and merge requests:**
- [Issue tracker](issues/index.md): Discuss implementations with your team in issues
- [Issue Boards](issue_board.md): Organize and prioritize your workflow
- [Multiple Issue Boards](issue_board.md#multiple-issue-boards): Allow your teams to create their own workflows (Issue Boards) for the same project
- [Merge Requests](merge_requests/index.md): Apply your branching
strategy and get reviewed by your team
- [Issue tracker](issues/index.md): Discuss implementations with your team.
- [Issue Boards](issue_board.md): Organize and prioritize your workflow.
- [Multiple Issue Boards](issue_board.md#multiple-issue-boards): Create team-specific workflows (Issue Boards) for a project.
- [Merge Requests](merge_requests/index.md): Apply a branching
strategy and get reviewed by your team.
- [Merge Request Approvals](merge_requests/merge_request_approvals.md): Ask for approval before
implementing a change
- [Fix merge conflicts from the UI](merge_requests/resolve_conflicts.md):
Your Git diff tool right from the GitLab UI
- [Review Apps](../../ci/review_apps/index.md): Live preview the results
of the changes proposed in a merge request in a per-branch basis
- [Labels](labels.md): Organize issues and merge requests by labels
- [Time Tracking](time_tracking.md): Track estimate time
and time spent on
the conclusion of an issue or merge request
- [Milestones](milestones/index.md): Work towards a target date
implementing a change.
- [Fix merge conflicts from the UI](merge_requests/resolve_conflicts.md): View Git diffs from the GitLab UI.
- [Review Apps](../../ci/review_apps/index.md): By branch, preview the results
of the changes proposed in a merge request.
- [Labels](labels.md): Organize issues and merge requests by labels.
- [Time Tracking](time_tracking.md): Track time estimated and
spent on issues and merge requests.
- [Milestones](milestones/index.md): Work toward a target date.
- [Description templates](description_templates.md): Define context-specific
templates for issue and merge request description fields for your project
- [Slash commands (quick actions)](quick_actions.md): Textual shortcuts for
common actions on issues or merge requests
templates for issue and merge request description fields.
- [Slash commands (quick actions)](quick_actions.md): Create text shortcuts for
common actions.
- [Autocomplete characters](autocomplete_characters.md): Autocomplete
references to users, groups, issues, merge requests, and other GitLab
elements.
@ -68,51 +65,49 @@ Projects include the following [features](https://about.gitlab.com/features/):
**GitLab CI/CD:**
- [GitLab CI/CD](../../ci/README.md): the GitLab built-in [Continuous Integration, Delivery, and Deployment](https://about.gitlab.com/blog/2016/08/05/continuous-integration-delivery-and-deployment-with-gitlab/) tool
- [GitLab CI/CD](../../ci/README.md): Use the built-in [Continuous Integration, Delivery, and Deployment](https://about.gitlab.com/blog/2016/08/05/continuous-integration-delivery-and-deployment-with-gitlab/) tool.
- [Container Registry](../packages/container_registry/index.md): Build and push Docker
images out-of-the-box
images.
- [Auto Deploy](../../topics/autodevops/stages.md#auto-deploy): Configure GitLab CI/CD
to automatically set up your app's deployment
to automatically set up your app's deployment.
- [Enable and disable GitLab CI/CD](../../ci/enable_or_disable_ci.md)
- [Pipelines](../../ci/pipelines/index.md): Configure and visualize
your GitLab CI/CD pipelines from the UI
your GitLab CI/CD pipelines from the UI.
- [Scheduled Pipelines](../../ci/pipelines/schedules.md): Schedule a pipeline
to start at a chosen time
to start at a chosen time.
- [Pipeline Graphs](../../ci/pipelines/index.md#visualize-pipelines): View your
entire pipeline from the UI
pipeline from the UI.
- [Job artifacts](../../ci/pipelines/job_artifacts.md): Define,
browse, and download job artifacts
- [Pipeline settings](../../ci/pipelines/settings.md): Set up Git strategy (choose the default way your repository is fetched from GitLab in a job),
timeout (defines the maximum amount of time in minutes that a job is able run), custom path for `.gitlab-ci.yml`, test coverage parsing, pipeline's visibility, and much more
- [Kubernetes cluster integration](clusters/index.md): Connecting your GitLab project
with a Kubernetes cluster
- [Feature Flags](../../operations/feature_flags.md): Feature flags allow you to ship a project in
different flavors by dynamically toggling certain functionality **(PREMIUM)**
browse, and download job artifacts.
- [Pipeline settings](../../ci/pipelines/settings.md): Set up Git strategy (how jobs fetch your repository),
timeout (the maximum amount of time a job can run), custom path for `.gitlab-ci.yml`, test coverage parsing, pipeline visibility, and more.
- [Kubernetes cluster integration](clusters/index.md): Connect your GitLab project
with a Kubernetes cluster.
- [Feature Flags](../../operations/feature_flags.md): Ship different features
by dynamically toggling functionality. **(PREMIUM)**
- [GitLab Pages](pages/index.md): Build, test, and deploy your static
website with GitLab Pages
website.
**Other features:**
- [Wiki](wiki/index.md): document your GitLab project in an integrated Wiki.
- [Snippets](../snippets.md): store, share and collaborate on code snippets.
- [Value Stream Analytics](../analytics/value_stream_analytics.md): review your development lifecycle.
- [Insights](insights/index.md): configure the Insights that matter for your projects. **(ULTIMATE)**
- [Security Dashboard](../application_security/security_dashboard/index.md): Security Dashboard. **(ULTIMATE)**
- [Syntax highlighting](highlighting.md): an alternative to customize
your code blocks, overriding the GitLab default choice of language.
- [Badges](badges.md): badges for the project overview.
- [Releases](releases/index.md): a way to track deliverables in your project as snapshot in time of
the source, build output, other metadata, and other artifacts
- [Wiki](wiki/index.md): Document your GitLab project in an integrated Wiki.
- [Snippets](../snippets.md): Store, share and collaborate on code snippets.
- [Value Stream Analytics](../analytics/value_stream_analytics.md): Review your development lifecycle.
- [Insights](insights/index.md): Configure the insights that matter for your projects. **(ULTIMATE)**
- [Security Dashboard](../application_security/security_dashboard/index.md) **(ULTIMATE)**
- [Syntax highlighting](highlighting.md): Customize
your code blocks, overriding the default language choice.
- [Badges](badges.md): Add an image to the project overview.
- [Releases](releases/index.md): Take a snapshot of
the source, build output, metadata, and artifacts
associated with a released version of your code.
- [Conan packages](../packages/conan_repository/index.md): your private Conan repository in GitLab.
- [Maven packages](../packages/maven_repository/index.md): your private Maven repository in GitLab.
- [NPM packages](../packages/npm_registry/index.md): your private NPM package registry in GitLab.
- [Code owners](code_owners.md): specify code owners for certain files
- [License Compliance](../compliance/license_compliance/index.md): approve and deny licenses for projects. **(ULTIMATE)**
- [Dependency List](../application_security/dependency_list/index.md): view project dependencies. **(ULTIMATE)**
- [Requirements](requirements/index.md): Requirements allow you to create criteria to check your products against. **(ULTIMATE)**
- [Static Site Editor](static_site_editor/index.md): quickly edit content on static websites without prior knowledge of the codebase or Git commands.
- [Code Intelligence](code_intelligence.md): code navigation features.
- [Package Registry](../packages/package_registry/index.md): Publish and install packages.
- [Code owners](code_owners.md): Specify code owners for specific files.
- [License Compliance](../compliance/license_compliance/index.md): Approve and deny licenses for projects. **(ULTIMATE)**
- [Dependency List](../application_security/dependency_list/index.md): View project dependencies. **(ULTIMATE)**
- [Requirements](requirements/index.md): Create criteria to check your products against. **(ULTIMATE)**
- [Static Site Editor](static_site_editor/index.md): Edit content on static websites without prior knowledge of the codebase or Git commands.
- [Code Intelligence](code_intelligence.md): Navigate code.
## Project integrations
@ -129,13 +124,6 @@ Kubernetes, Slack, and a lot more.
- [Export a project from GitLab](settings/import_export.md#exporting-a-project-and-its-data)
- [Importing and exporting projects between GitLab instances](settings/import_export.md)
## CI/CD for external repositories **(PREMIUM)**
Instead of importing a repository directly to GitLab, you can connect your repository
as a CI/CD project.
Read through the documentation on [CI/CD for external repositories](../../ci/ci_cd_for_external_repos/index.md).
## GitLab Workflow - VS Code extension
To avoid switching from the GitLab UI and VS Code while working in GitLab repositories, you can integrate

View file

@ -16,7 +16,7 @@ pre-push:
markdownlint:
tags: documentation style
files: git diff --name-only $(git merge-base origin/master HEAD)..HEAD
glob: "*.md"
glob: "doc/*.md"
run: yarn markdownlint {files}
scss-lint:
tags: stylesheet css style
@ -32,5 +32,5 @@ pre-push:
vale: # Requires Vale: https://docs.gitlab.com/ee/development/documentation/#install-linters
tags: documentation style
files: git diff --name-only $(git merge-base origin/master HEAD)..HEAD
glob: "*.md"
glob: "doc/*.md"
run: if command -v vale 2> /dev/null; then vale --config .vale.ini --minAlertLevel error {files}; else echo "Vale not found. Install Vale"; fi

View file

@ -180,7 +180,7 @@ module API
regexp: Gitlab::Regex.unbounded_semver_regex,
desc: 'The version of the release, using the semantic versioning format'
requires :from,
optional :from,
type: String,
desc: 'The first commit in the range of commits to use for the changelog'

View file

@ -15,7 +15,7 @@ module Gitlab
private
def deployments_count
if Feature.enabled?(:query_deploymenys_via_finished_at_in_vsa)
if Feature.enabled?(:query_deploymenys_via_finished_at_in_vsa, default_enabled: :yaml)
DeploymentsFinder
.new(project: @project, finished_after: @from, finished_before: @to, status: :success)
.execute

View file

@ -23,7 +23,9 @@ module Gitlab
end
def self.verified?(env)
call(env)
minimal_env = env.slice('REQUEST_METHOD', 'rack.session', 'HTTP_X_CSRF_TOKEN')
.merge('rack.input' => '')
call(minimal_env)
true
rescue ActionController::InvalidAuthenticityToken

View file

@ -2636,12 +2636,6 @@ msgstr ""
msgid "AlertMappingBuilder|Title is a required field for alerts in GitLab. Should the payload field you specified not be available, specifiy which field we should use instead. "
msgstr ""
msgid "AlertService|Review your external service's documentation to learn where to provide this information to your external service, and the %{linkStart}GitLab documentation%{linkEnd} to learn more about configuring your endpoint."
msgstr ""
msgid "AlertService|You must provide this URL and authorization key to authorize an external service to send alerts to GitLab. You can provide this URL and key to multiple services. After configuring an external service, alerts from your service will display on the GitLab %{linkStart}Alerts%{linkEnd} page."
msgstr ""
msgid "AlertSettings|1. Select integration type"
msgstr ""
@ -16676,6 +16670,9 @@ msgstr ""
msgid "JiraService|Jira workflow transition IDs"
msgstr ""
msgid "JiraService|Not all data may be displayed here. To view more details or make changes to this issue, go to %{linkStart}Jira%{linkEnd}."
msgstr ""
msgid "JiraService|Open Jira"
msgstr ""
@ -16700,6 +16697,9 @@ msgstr ""
msgid "JiraService|This feature requires a Premium plan."
msgstr ""
msgid "JiraService|This issue is synchronized with Jira"
msgstr ""
msgid "JiraService|Use a password for server version and an API token for cloud version"
msgstr ""
@ -24961,9 +24961,6 @@ msgstr ""
msgid "Reset to project defaults"
msgstr ""
msgid "Resetting the authorization key for this project will require updating the authorization key in every alert source it is enabled in."
msgstr ""
msgid "Resetting the authorization key will invalidate the previous key. Existing alert configurations will need to be updated with the new key."
msgstr ""
@ -31318,9 +31315,6 @@ msgstr ""
msgid "Update failed"
msgstr ""
msgid "Update failed. Please try again."
msgstr ""
msgid "Update it"
msgstr ""

View file

@ -0,0 +1,41 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Repositories::PreviousTagFinder do
let(:project) { build_stubbed(:project) }
let(:finder) { described_class.new(project) }
describe '#execute' do
context 'when there is a previous tag' do
it 'returns the previous tag' do
tag1 = double(:tag1, name: 'v1.0.0')
tag2 = double(:tag2, name: 'v1.1.0')
tag3 = double(:tag3, name: 'v2.0.0')
tag4 = double(:tag4, name: '1.0.0')
allow(project.repository)
.to receive(:tags)
.and_return([tag1, tag3, tag2, tag4])
expect(finder.execute('2.1.0')).to eq(tag3)
expect(finder.execute('2.0.0')).to eq(tag2)
expect(finder.execute('1.5.0')).to eq(tag2)
expect(finder.execute('1.0.1')).to eq(tag1)
end
end
context 'when there is no previous tag' do
it 'returns nil' do
tag1 = double(:tag1, name: 'v1.0.0')
tag2 = double(:tag2, name: 'v1.1.0')
allow(project.repository)
.to receive(:tags)
.and_return([tag1, tag2])
expect(finder.execute('1.0.0')).to be_nil
end
end
end
end

View file

@ -1,9 +0,0 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`AlertsServiceForm with default values renders "authorization-key" input 1`] = `"<gl-form-input-stub id=\\"authorization-key\\" readonly=\\"true\\" value=\\"abcedfg123\\"></gl-form-input-stub>"`;
exports[`AlertsServiceForm with default values renders "url" input 1`] = `"<gl-form-input-stub id=\\"url\\" readonly=\\"true\\" value=\\"https://gitlab.com/endpoint-url\\"></gl-form-input-stub>"`;
exports[`AlertsServiceForm with default values renders toggle button 1`] = `"<toggle-button-stub id=\\"activated\\"></toggle-button-stub>"`;
exports[`AlertsServiceForm with default values shows description and docs links 1`] = `"<p><gl-sprintf-stub message=\\"You must provide this URL and authorization key to authorize an external service to send alerts to GitLab. You can provide this URL and key to multiple services. After configuring an external service, alerts from your service will display on the GitLab %{linkStart}Alerts%{linkEnd} page.\\"></gl-sprintf-stub></p><p><gl-sprintf-stub message=\\"Review your external service's documentation to learn where to provide this information to your external service, and the %{linkStart}GitLab documentation%{linkEnd} to learn more about configuring your endpoint.\\"></gl-sprintf-stub></p>"`;

View file

@ -1,152 +0,0 @@
import { nextTick } from 'vue';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { shallowMount } from '@vue/test-utils';
import { GlModal } from '@gitlab/ui';
import AlertsServiceForm from '~/alerts_service_settings/components/alerts_service_form.vue';
import ToggleButton from '~/vue_shared/components/toggle_button.vue';
import { deprecatedCreateFlash as createFlash } from '~/flash';
jest.mock('~/flash');
const defaultProps = {
initialAuthorizationKey: 'abcedfg123',
formPath: 'http://invalid',
url: 'https://gitlab.com/endpoint-url',
alertsSetupUrl: 'http://invalid',
alertsUsageUrl: 'http://invalid',
initialActivated: false,
isDisabled: false,
};
describe('AlertsServiceForm', () => {
let wrapper;
let mockAxios;
const createComponent = (props = defaultProps) => {
wrapper = shallowMount(AlertsServiceForm, {
propsData: {
...defaultProps,
...props,
},
});
};
const findUrl = () => wrapper.find('#url');
const findAuthorizationKey = () => wrapper.find('#authorization-key');
const findDescription = () => wrapper.find('[data-testid="description"');
beforeEach(() => {
mockAxios = new MockAdapter(axios);
});
afterEach(() => {
wrapper.destroy();
mockAxios.restore();
});
describe('with default values', () => {
beforeEach(() => {
createComponent();
});
it('renders "url" input', () => {
expect(findUrl().html()).toMatchSnapshot();
});
it('renders "authorization-key" input', () => {
expect(findAuthorizationKey().html()).toMatchSnapshot();
});
it('renders toggle button', () => {
expect(wrapper.find(ToggleButton).html()).toMatchSnapshot();
});
it('shows description and docs links', () => {
expect(findDescription().element.innerHTML).toMatchSnapshot();
});
});
describe('reset key', () => {
it('updates the authorization key on success', async () => {
const formPath = 'some/path';
mockAxios.onPut(formPath).replyOnce(200, { token: 'newToken' });
createComponent({ formPath });
wrapper.find(GlModal).vm.$emit('ok');
await axios.waitForAll();
expect(findAuthorizationKey().attributes('value')).toBe('newToken');
});
it('shows flash message on error', () => {
const formPath = 'some/path';
mockAxios.onPut(formPath).replyOnce(404);
createComponent({ formPath });
return wrapper.vm.resetKey().then(() => {
expect(findAuthorizationKey().attributes('value')).toBe(
defaultProps.initialAuthorizationKey,
);
expect(createFlash).toHaveBeenCalled();
});
});
});
describe('activate toggle', () => {
describe('successfully completes', () => {
describe.each`
initialActivated | value
${false} | ${true}
${true} | ${false}
`(
'when initialActivated=$initialActivated and value=$value',
({ initialActivated, value }) => {
beforeEach(() => {
const formPath = 'some/path';
mockAxios
.onPut(formPath, { service: { active: value } })
.replyOnce(200, { active: value });
createComponent({ initialActivated, formPath });
return wrapper.vm.toggleActivated(value);
});
it(`updates toggle button value to ${value}`, () => {
expect(wrapper.find(ToggleButton).props('value')).toBe(value);
});
},
);
});
describe('error is encountered', () => {
beforeEach(() => {
const formPath = 'some/path';
mockAxios.onPut(formPath).replyOnce(500);
});
it('restores previous value', () => {
createComponent({ initialActivated: false });
return wrapper.vm.toggleActivated(true).then(() => {
expect(wrapper.find(ToggleButton).props('value')).toBe(false);
});
});
});
});
describe('form is disabled', () => {
beforeEach(() => {
createComponent({ isDisabled: true });
});
it('cannot be toggled', () => {
wrapper.find(ToggleButton).vm.$emit('change');
return nextTick().then(() => {
expect(wrapper.find(ToggleButton).props('disabledInput')).toBe(true);
});
});
});
});

View file

@ -2,6 +2,8 @@ import VueApollo, { ApolloMutation } from 'vue-apollo';
import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import { nextTick } from 'vue';
import { merge } from 'lodash';
import { useFakeDate } from 'helpers/fake_date';
import waitForPromises from 'helpers/wait_for_promises';
import createMockApollo from 'helpers/mock_apollo_helper';
import { stubComponent } from 'helpers/stub_component';
@ -22,162 +24,97 @@ import {
} from '~/snippets/constants';
import UpdateSnippetMutation from '~/snippets/mutations/updateSnippet.mutation.graphql';
import CreateSnippetMutation from '~/snippets/mutations/createSnippet.mutation.graphql';
import { testEntries } from '../test_utils';
import { testEntries, createGQLSnippetsQueryResponse, createGQLSnippet } from '../test_utils';
jest.mock('~/flash');
const TEST_UPLOADED_FILES = ['foo/bar.txt', 'alpha/beta.js'];
const TEST_API_ERROR = 'Ufff';
const TEST_MUTATION_ERROR = 'Bummer';
const TEST_API_ERROR = new Error('TEST_API_ERROR');
const TEST_MUTATION_ERROR = 'Test mutation error';
const TEST_CAPTCHA_RESPONSE = 'i-got-a-captcha';
const TEST_CAPTCHA_SITE_KEY = 'abc123';
const TEST_ACTIONS = {
NO_CONTENT: {
...testEntries.created.diff,
content: '',
},
NO_PATH: {
...testEntries.created.diff,
filePath: '',
},
VALID: {
...testEntries.created.diff,
},
NO_CONTENT: merge({}, testEntries.created.diff, { content: '' }),
NO_PATH: merge({}, testEntries.created.diff, { filePath: '' }),
VALID: merge({}, testEntries.created.diff),
};
const TEST_WEB_URL = '/snippets/7';
const TEST_SNIPPET_GID = 'gid://gitlab/PersonalSnippet/42';
const createTestSnippet = () => ({
webUrl: TEST_WEB_URL,
id: 7,
title: 'Snippet Title',
description: 'Lorem ipsum snippet desc',
visibilityLevel: SNIPPET_VISIBILITY_PRIVATE,
});
describe('Snippet Edit app', () => {
let wrapper;
let fakeApollo;
const captchaSiteKey = 'abc123';
const relativeUrlRoot = '/foo/';
const originalRelativeUrlRoot = gon.relative_url_root;
const GetSnippetQuerySpy = jest.fn().mockResolvedValue({
data: { snippets: { nodes: [createTestSnippet()] } },
const createSnippet = () =>
merge(createGQLSnippet(), {
webUrl: TEST_WEB_URL,
visibilityLevel: SNIPPET_VISIBILITY_PRIVATE,
});
const mutationTypes = {
RESOLVE: jest.fn().mockResolvedValue({
data: {
updateSnippet: {
errors: [],
snippet: createTestSnippet(),
needsCaptchaResponse: null,
captchaSiteKey: null,
},
},
}),
RESOLVE_WITH_ERRORS: jest.fn().mockResolvedValue({
data: {
updateSnippet: {
errors: [TEST_MUTATION_ERROR],
snippet: createTestSnippet(),
needsCaptchaResponse: null,
captchaSiteKey: null,
},
createSnippet: {
errors: [TEST_MUTATION_ERROR],
snippet: null,
needsCaptchaResponse: null,
captchaSiteKey: null,
},
},
}),
// TODO: QUESTION - This has to be wrapped in a factory function in order for the mock to have
// the `mockResolvedValueOnce` counter properly cleared/reset between test `it` examples, by
// ensuring each one gets a fresh mock instance. It's apparently impossible/hard to manually
// clear/reset them (see https://github.com/facebook/jest/issues/7136). So, should
// we convert all the others to factory functions too, to be consistent? And/or move the whole
// `mutationTypes` declaration into a `beforeEach`? (not sure if that will still solve the
// mock reset problem though).
RESOLVE_WITH_NEEDS_CAPTCHA_RESPONSE: () =>
jest
.fn()
// NOTE: There may be a captcha-related error, but it is not used in the GraphQL/Vue flow,
// only a truthy 'needsCaptchaResponse' value is used to trigger the captcha modal showing.
.mockResolvedValueOnce({
data: {
createSnippet: {
errors: ['ignored captcha error message'],
snippet: null,
needsCaptchaResponse: true,
captchaSiteKey,
},
},
})
// After the captcha is solved and the modal is closed, the second form submission should
// be successful and return needsCaptchaResponse = false.
.mockResolvedValueOnce({
data: {
createSnippet: {
errors: ['ignored captcha error message'],
snippet: createTestSnippet(),
needsCaptchaResponse: false,
captchaSiteKey: null,
},
},
}),
REJECT: jest.fn().mockRejectedValue(TEST_API_ERROR),
};
const createQueryResponse = (obj = {}) =>
createGQLSnippetsQueryResponse([merge(createSnippet(), obj)]);
function createComponent({
props = {},
loading = false,
mutationRes = mutationTypes.RESOLVE,
selectedLevel = SNIPPET_VISIBILITY_PRIVATE,
withApollo = false,
} = {}) {
let componentData = {
mocks: {
$apollo: {
queries: {
snippet: { loading },
},
mutate: mutationRes,
const createMutationResponse = (key, obj = {}) => ({
data: {
[key]: merge(
{
errors: [],
snippet: {
__typename: 'Snippet',
webUrl: TEST_WEB_URL,
},
spamLogId: null,
needsCaptchaResponse: false,
captchaSiteKey: null,
},
};
obj,
),
},
});
if (withApollo) {
const localVue = createLocalVue();
localVue.use(VueApollo);
const createMutationResponseWithErrors = (key) =>
createMutationResponse(key, { errors: [TEST_MUTATION_ERROR] });
const requestHandlers = [[GetSnippetQuery, GetSnippetQuerySpy]];
fakeApollo = createMockApollo(requestHandlers);
componentData = {
localVue,
apolloProvider: fakeApollo,
};
}
const createMutationResponseWithRecaptcha = (key) =>
createMutationResponse(key, {
errors: ['ignored captcha error message'],
needsCaptchaResponse: true,
captchaSiteKey: TEST_CAPTCHA_SITE_KEY,
});
wrapper = shallowMount(SnippetEditApp, {
...componentData,
stubs: {
ApolloMutation,
FormFooterActions,
CaptchaModal: stubComponent(CaptchaModal),
},
provide: {
selectedLevel,
},
propsData: {
snippetGid: 'gid://gitlab/PersonalSnippet/42',
markdownPreviewPath: 'http://preview.foo.bar',
markdownDocsPath: 'http://docs.foo.bar',
...props,
},
});
}
const getApiData = ({
id,
title = '',
description = '',
visibilityLevel = SNIPPET_VISIBILITY_PRIVATE,
} = {}) => ({
id,
title,
description,
visibilityLevel,
blobActions: [],
});
const localVue = createLocalVue();
localVue.use(VueApollo);
describe('Snippet Edit app', () => {
useFakeDate();
let wrapper;
let getSpy;
// Mutate spy receives a "key" so that we can:
// - Use the same spy whether we are creating or updating.
// - Build the correct response object
// - Assert which mutation was sent
let mutateSpy;
const relativeUrlRoot = '/foo/';
const originalRelativeUrlRoot = gon.relative_url_root;
beforeEach(() => {
getSpy = jest.fn().mockResolvedValue(createQueryResponse());
// See `mutateSpy` declaration comment for why we send a key
mutateSpy = jest.fn().mockImplementation((key) => Promise.resolve(createMutationResponse(key)));
gon.relative_url_root = relativeUrlRoot;
jest.spyOn(urlUtils, 'redirectTo').mockImplementation();
});
@ -193,7 +130,6 @@ describe('Snippet Edit app', () => {
const findSubmitButton = () => wrapper.find('[data-testid="snippet-submit-btn"]');
const findCancelButton = () => wrapper.find('[data-testid="snippet-cancel-btn"]');
const hasDisabledSubmit = () => Boolean(findSubmitButton().attributes('disabled'));
const clickSubmitBtn = () => wrapper.find('[data-testid="snippet-edit-form"]').trigger('submit');
const triggerBlobActions = (actions) => findBlobActions().vm.$emit('actions', actions);
const setUploadFilesHtml = (paths) => {
@ -201,56 +137,92 @@ describe('Snippet Edit app', () => {
.map((path) => `<input name="files[]" value="${path}">`)
.join('');
};
const getApiData = ({
id,
title = '',
description = '',
visibilityLevel = SNIPPET_VISIBILITY_PRIVATE,
} = {}) => ({
id,
title,
description,
visibilityLevel,
blobActions: [],
});
const setTitle = (val) => wrapper.find(TitleField).vm.$emit('input', val);
const setDescription = (val) => wrapper.find(SnippetDescriptionEdit).vm.$emit('input', val);
// Ideally we wouldn't call this method directly, but we don't have a way to trigger
// apollo responses yet.
const loadSnippet = (...nodes) => {
if (nodes.length) {
wrapper.setData({
snippet: nodes[0],
newSnippet: false,
});
} else {
wrapper.setData({
newSnippet: true,
});
const createComponent = ({ props = {}, selectedLevel = SNIPPET_VISIBILITY_PRIVATE } = {}) => {
if (wrapper) {
throw new Error('wrapper already created');
}
const requestHandlers = [
[GetSnippetQuery, getSpy],
// See `mutateSpy` declaration comment for why we send a key
[UpdateSnippetMutation, (...args) => mutateSpy('updateSnippet', ...args)],
[CreateSnippetMutation, (...args) => mutateSpy('createSnippet', ...args)],
];
const apolloProvider = createMockApollo(requestHandlers);
wrapper = shallowMount(SnippetEditApp, {
apolloProvider,
localVue,
stubs: {
ApolloMutation,
FormFooterActions,
CaptchaModal: stubComponent(CaptchaModal),
},
provide: {
selectedLevel,
},
propsData: {
snippetGid: TEST_SNIPPET_GID,
markdownPreviewPath: 'http://preview.foo.bar',
markdownDocsPath: 'http://docs.foo.bar',
...props,
},
});
};
describe('rendering', () => {
it('renders loader while the query is in flight', () => {
createComponent({ loading: true });
// Creates comopnent and waits for gql load
const createComponentAndLoad = async (...args) => {
createComponent(...args);
await waitForPromises();
};
// Creates loaded component and submits form
const createComponentAndSubmit = async (...args) => {
await createComponentAndLoad(...args);
clickSubmitBtn();
await waitForPromises();
};
describe('when loading', () => {
it('renders loader', () => {
createComponent();
expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
});
});
it.each([[{}], [{ snippetGid: '' }]])(
'should render all required components with %s',
(props) => {
createComponent(props);
describe.each`
snippetGid | expectedQueries
${TEST_SNIPPET_GID} | ${[[{ ids: [TEST_SNIPPET_GID] }]]}
${''} | ${[]}
`('when loaded with snippetGid=$snippetGid', ({ snippetGid, expectedQueries }) => {
beforeEach(() => createComponentAndLoad({ props: { snippetGid } }));
expect(wrapper.find(CaptchaModal).exists()).toBe(true);
expect(wrapper.find(TitleField).exists()).toBe(true);
expect(wrapper.find(SnippetDescriptionEdit).exists()).toBe(true);
expect(wrapper.find(SnippetVisibilityEdit).exists()).toBe(true);
expect(wrapper.find(FormFooterActions).exists()).toBe(true);
expect(findBlobActions().exists()).toBe(true);
},
);
it(`queries with ${JSON.stringify(expectedQueries)}`, () => {
expect(getSpy.mock.calls).toEqual(expectedQueries);
});
it('should render components', () => {
expect(wrapper.find(CaptchaModal).exists()).toBe(true);
expect(wrapper.find(TitleField).exists()).toBe(true);
expect(wrapper.find(SnippetDescriptionEdit).exists()).toBe(true);
expect(wrapper.find(SnippetVisibilityEdit).exists()).toBe(true);
expect(wrapper.find(FormFooterActions).exists()).toBe(true);
expect(findBlobActions().exists()).toBe(true);
});
it('should hide loader', () => {
expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
});
});
describe('default', () => {
it.each`
title | actions | shouldDisable
${''} | ${[]} | ${true}
@ -260,11 +232,12 @@ describe('Snippet Edit app', () => {
${'foo'} | ${[TEST_ACTIONS.VALID, TEST_ACTIONS.NO_CONTENT]} | ${true}
${'foo'} | ${[TEST_ACTIONS.VALID, TEST_ACTIONS.NO_PATH]} | ${false}
`(
'should handle submit disable (title=$title, actions=$actions, shouldDisable=$shouldDisable)',
'should handle submit disable (title="$title", actions="$actions", shouldDisable="$shouldDisable")',
async ({ title, actions, shouldDisable }) => {
createComponent();
getSpy.mockResolvedValue(createQueryResponse({ title }));
await createComponentAndLoad();
loadSnippet({ title });
triggerBlobActions(actions);
await nextTick();
@ -274,244 +247,226 @@ describe('Snippet Edit app', () => {
);
it.each`
projectPath | snippetArg | expectation
${''} | ${[]} | ${urlUtils.joinPaths('/', relativeUrlRoot, '-', 'snippets')}
${'project/path'} | ${[]} | ${urlUtils.joinPaths('/', relativeUrlRoot, 'project/path/-', 'snippets')}
${''} | ${[createTestSnippet()]} | ${TEST_WEB_URL}
${'project/path'} | ${[createTestSnippet()]} | ${TEST_WEB_URL}
projectPath | snippetGid | expectation
${''} | ${''} | ${urlUtils.joinPaths('/', relativeUrlRoot, '-', 'snippets')}
${'project/path'} | ${''} | ${urlUtils.joinPaths('/', relativeUrlRoot, 'project/path/-', 'snippets')}
${''} | ${TEST_SNIPPET_GID} | ${TEST_WEB_URL}
${'project/path'} | ${TEST_SNIPPET_GID} | ${TEST_WEB_URL}
`(
'should set cancel href when (projectPath=$projectPath, snippet=$snippetArg)',
async ({ projectPath, snippetArg, expectation }) => {
createComponent({
props: { projectPath },
'should set cancel href (projectPath="$projectPath", snippetGid="$snippetGid")',
async ({ projectPath, snippetGid, expectation }) => {
await createComponentAndLoad({
props: {
projectPath,
snippetGid,
},
});
loadSnippet(...snippetArg);
await nextTick();
expect(findCancelButton().attributes('href')).toBe(expectation);
},
);
});
describe('functionality', () => {
it('does not fetch snippet when create a new snippet', async () => {
createComponent({ props: { snippetGid: '' }, withApollo: true });
it.each([SNIPPET_VISIBILITY_PRIVATE, SNIPPET_VISIBILITY_INTERNAL, SNIPPET_VISIBILITY_PUBLIC])(
'marks %s visibility by default',
async (visibility) => {
createComponent({
props: { snippetGid: '' },
selectedLevel: visibility,
});
jest.runOnlyPendingTimers();
await nextTick();
expect(GetSnippetQuerySpy).not.toHaveBeenCalled();
});
describe('default visibility', () => {
it.each([SNIPPET_VISIBILITY_PRIVATE, SNIPPET_VISIBILITY_INTERNAL, SNIPPET_VISIBILITY_PUBLIC])(
'marks %s visibility by default',
async (visibility) => {
createComponent({
props: { snippetGid: '' },
selectedLevel: visibility,
});
expect(wrapper.vm.snippet.visibilityLevel).toEqual(visibility);
},
);
});
expect(wrapper.find(SnippetVisibilityEdit).props('value')).toBe(visibility);
},
);
describe('form submission handling', () => {
it.each`
snippetArg | projectPath | uploadedFiles | input | mutation
${[]} | ${'project/path'} | ${[]} | ${{ ...getApiData(), projectPath: 'project/path', uploadedFiles: [] }} | ${CreateSnippetMutation}
${[]} | ${''} | ${[]} | ${{ ...getApiData(), projectPath: '', uploadedFiles: [] }} | ${CreateSnippetMutation}
${[]} | ${''} | ${TEST_UPLOADED_FILES} | ${{ ...getApiData(), projectPath: '', uploadedFiles: TEST_UPLOADED_FILES }} | ${CreateSnippetMutation}
${[createTestSnippet()]} | ${'project/path'} | ${[]} | ${getApiData(createTestSnippet())} | ${UpdateSnippetMutation}
${[createTestSnippet()]} | ${''} | ${[]} | ${getApiData(createTestSnippet())} | ${UpdateSnippetMutation}
snippetGid | projectPath | uploadedFiles | input | mutationType
${''} | ${'project/path'} | ${[]} | ${{ ...getApiData(), projectPath: 'project/path', uploadedFiles: [] }} | ${'createSnippet'}
${''} | ${''} | ${[]} | ${{ ...getApiData(), projectPath: '', uploadedFiles: [] }} | ${'createSnippet'}
${''} | ${''} | ${TEST_UPLOADED_FILES} | ${{ ...getApiData(), projectPath: '', uploadedFiles: TEST_UPLOADED_FILES }} | ${'createSnippet'}
${TEST_SNIPPET_GID} | ${'project/path'} | ${[]} | ${getApiData(createSnippet())} | ${'updateSnippet'}
${TEST_SNIPPET_GID} | ${''} | ${[]} | ${getApiData(createSnippet())} | ${'updateSnippet'}
`(
'should submit mutation with (snippet=$snippetArg, projectPath=$projectPath, uploadedFiles=$uploadedFiles)',
async ({ snippetArg, projectPath, uploadedFiles, mutation, input }) => {
createComponent({
'should submit mutation $mutationType (snippetGid=$snippetGid, projectPath=$projectPath, uploadedFiles=$uploadedFiles)',
async ({ snippetGid, projectPath, uploadedFiles, mutationType, input }) => {
await createComponentAndLoad({
props: {
snippetGid,
projectPath,
},
});
loadSnippet(...snippetArg);
setUploadFilesHtml(uploadedFiles);
await nextTick();
clickSubmitBtn();
expect(mutationTypes.RESOLVE).toHaveBeenCalledWith({
mutation,
variables: {
input,
},
expect(mutateSpy).toHaveBeenCalledTimes(1);
expect(mutateSpy).toHaveBeenCalledWith(mutationType, {
input,
});
},
);
it('should redirect to snippet view on successful mutation', async () => {
createComponent();
loadSnippet(createTestSnippet());
clickSubmitBtn();
await waitForPromises();
await createComponentAndSubmit();
expect(urlUtils.redirectTo).toHaveBeenCalledWith(TEST_WEB_URL);
});
it.each`
snippetArg | projectPath | mutationRes | expectMessage
${[]} | ${'project/path'} | ${mutationTypes.RESOLVE_WITH_ERRORS} | ${`Can't create snippet: ${TEST_MUTATION_ERROR}`}
${[]} | ${''} | ${mutationTypes.RESOLVE_WITH_ERRORS} | ${`Can't create snippet: ${TEST_MUTATION_ERROR}`}
${[]} | ${''} | ${mutationTypes.REJECT} | ${`Can't create snippet: ${TEST_API_ERROR}`}
${[createTestSnippet()]} | ${'project/path'} | ${mutationTypes.RESOLVE_WITH_ERRORS} | ${`Can't update snippet: ${TEST_MUTATION_ERROR}`}
${[createTestSnippet()]} | ${''} | ${mutationTypes.RESOLVE_WITH_ERRORS} | ${`Can't update snippet: ${TEST_MUTATION_ERROR}`}
snippetGid | projectPath | mutationRes | expectMessage
${''} | ${'project/path'} | ${createMutationResponseWithErrors('createSnippet')} | ${`Can't create snippet: ${TEST_MUTATION_ERROR}`}
${''} | ${''} | ${createMutationResponseWithErrors('createSnippet')} | ${`Can't create snippet: ${TEST_MUTATION_ERROR}`}
${TEST_SNIPPET_GID} | ${'project/path'} | ${createMutationResponseWithErrors('updateSnippet')} | ${`Can't update snippet: ${TEST_MUTATION_ERROR}`}
${TEST_SNIPPET_GID} | ${''} | ${createMutationResponseWithErrors('updateSnippet')} | ${`Can't update snippet: ${TEST_MUTATION_ERROR}`}
`(
'should flash error with (snippet=$snippetArg, projectPath=$projectPath)',
async ({ snippetArg, projectPath, mutationRes, expectMessage }) => {
createComponent({
'should flash error with (snippet=$snippetGid, projectPath=$projectPath)',
async ({ snippetGid, projectPath, mutationRes, expectMessage }) => {
mutateSpy.mockResolvedValue(mutationRes);
await createComponentAndSubmit({
props: {
projectPath,
snippetGid,
},
mutationRes,
});
loadSnippet(...snippetArg);
clickSubmitBtn();
await waitForPromises();
expect(urlUtils.redirectTo).not.toHaveBeenCalled();
expect(Flash).toHaveBeenCalledWith(expectMessage);
},
);
describe('with apollo network error', () => {
beforeEach(async () => {
jest.spyOn(console, 'error').mockImplementation();
mutateSpy.mockRejectedValue(TEST_API_ERROR);
await createComponentAndSubmit();
});
it('should not redirect', () => {
expect(urlUtils.redirectTo).not.toHaveBeenCalled();
});
it('should flash', () => {
// Apollo automatically wraps the resolver's error in a NetworkError
expect(Flash).toHaveBeenCalledWith(
`Can't update snippet: Network error: ${TEST_API_ERROR.message}`,
);
});
it('should console error', () => {
// eslint-disable-next-line no-console
expect(console.error).toHaveBeenCalledTimes(1);
// eslint-disable-next-line no-console
expect(console.error).toHaveBeenCalledWith(
'[gitlab] unexpected error while updating snippet',
expect.objectContaining({ message: `Network error: ${TEST_API_ERROR.message}` }),
);
});
});
describe('when needsCaptchaResponse is true', () => {
let modal;
let captchaResponse;
let mutationRes;
beforeEach(async () => {
mutationRes = mutationTypes.RESOLVE_WITH_NEEDS_CAPTCHA_RESPONSE();
createComponent({
props: {
snippetGid: '',
projectPath: '',
},
mutationRes,
});
// await waitForPromises();
mutateSpy
.mockResolvedValueOnce(createMutationResponseWithRecaptcha('updateSnippet'))
.mockResolvedValueOnce(createMutationResponseWithErrors('updateSnippet'));
await createComponentAndSubmit();
modal = findCaptchaModal();
loadSnippet();
clickSubmitBtn();
await waitForPromises();
mutateSpy.mockClear();
});
it('should display captcha modal', () => {
expect(urlUtils.redirectTo).not.toHaveBeenCalled();
expect(modal.props('needsCaptchaResponse')).toEqual(true);
expect(modal.props('captchaSiteKey')).toEqual(captchaSiteKey);
});
describe('when a non-empty captcha response is received', () => {
beforeEach(() => {
captchaResponse = 'xyz123';
});
it('sets needsCaptchaResponse to false', async () => {
modal.vm.$emit('receivedCaptchaResponse', captchaResponse);
await nextTick();
expect(modal.props('needsCaptchaResponse')).toEqual(false);
});
it('resubmits form with captchaResponse', async () => {
modal.vm.$emit('receivedCaptchaResponse', captchaResponse);
await nextTick();
expect(mutationRes.mock.calls[1][0]).toEqual({
mutation: CreateSnippetMutation,
variables: {
input: {
...getApiData(),
captchaResponse,
projectPath: '',
uploadedFiles: [],
},
},
});
expect(modal.props()).toEqual({
needsCaptchaResponse: true,
captchaSiteKey: TEST_CAPTCHA_SITE_KEY,
});
});
describe('when an empty captcha response is received ', () => {
beforeEach(() => {
captchaResponse = '';
describe.each`
response | expectedCalls
${null} | ${[]}
${TEST_CAPTCHA_RESPONSE} | ${[['updateSnippet', { input: { ...getApiData(createSnippet()), captchaResponse: TEST_CAPTCHA_RESPONSE } }]]}
`('when captcha response is $response', ({ response, expectedCalls }) => {
beforeEach(async () => {
modal.vm.$emit('receivedCaptchaResponse', response);
await nextTick();
});
it('sets needsCaptchaResponse to false', async () => {
modal.vm.$emit('receivedCaptchaResponse', captchaResponse);
await nextTick();
it('sets needsCaptchaResponse to false', () => {
expect(modal.props('needsCaptchaResponse')).toEqual(false);
});
it('does not resubmit form', async () => {
modal.vm.$emit('receivedCaptchaResponse', captchaResponse);
await nextTick();
expect(mutationRes.mock.calls.length).toEqual(1);
it(`expected to call times = ${expectedCalls.length}`, () => {
expect(mutateSpy.mock.calls).toEqual(expectedCalls);
});
});
});
});
});
describe('on before unload', () => {
const caseNoActions = () => triggerBlobActions([]);
const caseEmptyAction = () => triggerBlobActions([testEntries.empty.diff]);
const caseSomeActions = () => triggerBlobActions([testEntries.updated.diff]);
const caseTitleIsSet = () => {
caseEmptyAction();
setTitle('test');
};
const caseDescriptionIsSet = () => {
caseEmptyAction();
setDescription('test');
};
const caseClickSubmitBtn = () => {
caseSomeActions();
clickSubmitBtn();
};
it.each`
condition | expectPrevented | action
${'there are no actions'} | ${false} | ${caseNoActions}
${'there is an empty action'} | ${false} | ${caseEmptyAction}
${'there are actions'} | ${true} | ${caseSomeActions}
${'the title is set'} | ${true} | ${caseTitleIsSet}
${'the description is set'} | ${true} | ${caseDescriptionIsSet}
${'the snippet is being saved'} | ${false} | ${caseClickSubmitBtn}
`(
'handles before unload prevent when $condition (expectPrevented=$expectPrevented)',
({ expectPrevented, action }) => {
createComponent();
loadSnippet();
action();
const event = new Event('beforeunload');
const returnValueSetter = jest.spyOn(event, 'returnValue', 'set');
window.dispatchEvent(event);
if (expectPrevented) {
expect(returnValueSetter).toHaveBeenCalledWith(
'Are you sure you want to lose unsaved changes?',
);
} else {
expect(returnValueSetter).not.toHaveBeenCalled();
}
describe('on before unload', () => {
it.each([
['there are no actions', false, () => triggerBlobActions([])],
['there is an empty action', false, () => triggerBlobActions([testEntries.empty.diff])],
['there are actions', true, () => triggerBlobActions([testEntries.updated.diff])],
[
'the title is set',
true,
() => {
triggerBlobActions([testEntries.empty.diff]);
setTitle('test');
},
);
});
],
[
'the description is set',
true,
() => {
triggerBlobActions([testEntries.empty.diff]);
setDescription('test');
},
],
[
'the snippet is being saved',
false,
() => {
triggerBlobActions([testEntries.updated.diff]);
clickSubmitBtn();
},
],
])(
'handles before unload prevent when %s (expectPrevented=%s)',
async (_, expectPrevented, action) => {
await createComponentAndLoad({
props: {
snippetGid: '',
},
});
action();
const event = new Event('beforeunload');
const returnValueSetter = jest.spyOn(event, 'returnValue', 'set');
window.dispatchEvent(event);
if (expectPrevented) {
expect(returnValueSetter).toHaveBeenCalledWith(
'Are you sure you want to lose unsaved changes?',
);
} else {
expect(returnValueSetter).not.toHaveBeenCalled();
}
},
);
});
});

View file

@ -1,3 +1,4 @@
import { TEST_HOST } from 'helpers/test_constants';
import {
SNIPPET_BLOB_ACTION_CREATE,
SNIPPET_BLOB_ACTION_UPDATE,
@ -8,6 +9,51 @@ import {
const CONTENT_1 = 'Lorem ipsum dolar\nSit amit\n\nGoodbye!\n';
const CONTENT_2 = 'Lorem ipsum dolar sit amit.\n\nGoodbye!\n';
export const createGQLSnippet = () => ({
__typename: 'Snippet',
id: 7,
title: 'Snippet Title',
description: 'Lorem ipsum snippet desc',
descriptionHtml: '<p>Lorem ipsum snippet desc</p>',
createdAt: new Date(Date.now() - 1e6),
updatedAt: new Date(Date.now() - 1e3),
httpUrlToRepo: `${TEST_HOST}/repo`,
sshUrlToRepo: 'ssh://ssh.test/repo',
blobs: [],
userPermissions: {
__typename: 'SnippetPermissions',
adminSnippet: true,
updateSnippet: true,
},
project: {
__typename: 'Project',
fullPath: 'group/project',
webUrl: `${TEST_HOST}/group/project`,
},
author: {
__typename: 'User',
id: 1,
avatarUrl: `${TEST_HOST}/avatar.png`,
name: 'root',
username: 'root',
webUrl: `${TEST_HOST}/root`,
status: {
__typename: 'UserStatus',
emoji: '',
message: '',
},
},
});
export const createGQLSnippetsQueryResponse = (snippets) => ({
data: {
snippets: {
__typename: 'SnippetConnection',
nodes: snippets,
},
},
});
export const testEntries = {
created: {
id: 'blob_1',

View file

@ -0,0 +1,16 @@
import { getErrorMessage, UNEXPECTED_ERROR } from '~/snippets/utils/error';
describe('~/snippets/utils/error', () => {
describe('getErrorMessage', () => {
it.each`
input | output
${null} | ${UNEXPECTED_ERROR}
${'message'} | ${'message'}
${new Error('test message')} | ${'test message'}
${{ networkError: 'Network error: test message' }} | ${'Network error: test message'}
${{}} | ${UNEXPECTED_ERROR}
`('with $input, should return "$output"', ({ input, output }) => {
expect(getErrorMessage(input)).toBe(output);
});
});
});

View file

@ -1,10 +1,9 @@
import Vue from 'vue';
import mountComponent from 'helpers/vue_mount_component_helper';
import ciBadge from '~/vue_shared/components/ci_badge_link.vue';
import { shallowMount } from '@vue/test-utils';
import CiBadge from '~/vue_shared/components/ci_badge_link.vue';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
describe('CI Badge Link Component', () => {
let CIBadge;
let vm;
let wrapper;
const statuses = {
canceled: {
@ -72,29 +71,30 @@ describe('CI Badge Link Component', () => {
},
};
beforeEach(() => {
CIBadge = Vue.extend(ciBadge);
});
const findIcon = () => wrapper.findComponent(CiIcon);
const createComponent = (propsData) => {
wrapper = shallowMount(CiBadge, { propsData });
};
afterEach(() => {
vm.$destroy();
wrapper.destroy();
wrapper = null;
});
it('should render each status badge', () => {
Object.keys(statuses).map((status) => {
vm = mountComponent(CIBadge, { status: statuses[status] });
it.each(Object.keys(statuses))('should render badge for status: %s', (status) => {
createComponent({ status: statuses[status] });
expect(vm.$el.getAttribute('href')).toEqual(statuses[status].details_path);
expect(vm.$el.textContent.trim()).toEqual(statuses[status].text);
expect(vm.$el.getAttribute('class')).toContain(`ci-status ci-${statuses[status].group}`);
expect(vm.$el.querySelector('svg')).toBeDefined();
return vm;
});
expect(wrapper.attributes('href')).toBe(statuses[status].details_path);
expect(wrapper.text()).toBe(statuses[status].text);
expect(wrapper.classes()).toContain('ci-status');
expect(wrapper.classes()).toContain(`ci-${statuses[status].group}`);
expect(findIcon().exists()).toBe(true);
});
it('should not render label', () => {
vm = mountComponent(CIBadge, { status: statuses.canceled, showText: false });
createComponent({ status: statuses.canceled, showText: false });
expect(vm.$el.textContent.trim()).toEqual('');
expect(wrapper.text()).toBe('');
});
});

View file

@ -1,122 +1,51 @@
import Vue from 'vue';
import mountComponent from 'helpers/vue_mount_component_helper';
import { shallowMount } from '@vue/test-utils';
import { GlIcon } from '@gitlab/ui';
import ciIcon from '~/vue_shared/components/ci_icon.vue';
describe('CI Icon component', () => {
const Component = Vue.extend(ciIcon);
let vm;
let wrapper;
afterEach(() => {
vm.$destroy();
wrapper.destroy();
wrapper = null;
});
it('should render a span element with an svg', () => {
vm = mountComponent(Component, {
status: {
icon: 'status_success',
wrapper = shallowMount(ciIcon, {
propsData: {
status: {
icon: 'status_success',
},
},
});
expect(vm.$el.tagName).toEqual('SPAN');
expect(vm.$el.querySelector('span > svg')).toBeDefined();
expect(wrapper.find('span').exists()).toBe(true);
expect(wrapper.find(GlIcon).exists()).toBe(true);
});
it('should render a success status', () => {
vm = mountComponent(Component, {
status: {
icon: 'status_success',
group: 'success',
},
describe('rendering a status', () => {
it.each`
icon | group | cssClass
${'status_success'} | ${'success'} | ${'ci-status-icon-success'}
${'status_failed'} | ${'failed'} | ${'ci-status-icon-failed'}
${'status_warning'} | ${'warning'} | ${'ci-status-icon-warning'}
${'status_pending'} | ${'pending'} | ${'ci-status-icon-pending'}
${'status_running'} | ${'running'} | ${'ci-status-icon-running'}
${'status_created'} | ${'created'} | ${'ci-status-icon-created'}
${'status_skipped'} | ${'skipped'} | ${'ci-status-icon-skipped'}
${'status_canceled'} | ${'canceled'} | ${'ci-status-icon-canceled'}
${'status_manual'} | ${'manual'} | ${'ci-status-icon-manual'}
`('should render a $group status', ({ icon, group, cssClass }) => {
wrapper = shallowMount(ciIcon, {
propsData: {
status: {
icon,
group,
},
},
});
expect(wrapper.classes()).toContain(cssClass);
});
expect(vm.$el.classList.contains('ci-status-icon-success')).toEqual(true);
});
it('should render a failed status', () => {
vm = mountComponent(Component, {
status: {
icon: 'status_failed',
group: 'failed',
},
});
expect(vm.$el.classList.contains('ci-status-icon-failed')).toEqual(true);
});
it('should render success with warnings status', () => {
vm = mountComponent(Component, {
status: {
icon: 'status_warning',
group: 'warning',
},
});
expect(vm.$el.classList.contains('ci-status-icon-warning')).toEqual(true);
});
it('should render pending status', () => {
vm = mountComponent(Component, {
status: {
icon: 'status_pending',
group: 'pending',
},
});
expect(vm.$el.classList.contains('ci-status-icon-pending')).toEqual(true);
});
it('should render running status', () => {
vm = mountComponent(Component, {
status: {
icon: 'status_running',
group: 'running',
},
});
expect(vm.$el.classList.contains('ci-status-icon-running')).toEqual(true);
});
it('should render created status', () => {
vm = mountComponent(Component, {
status: {
icon: 'status_created',
group: 'created',
},
});
expect(vm.$el.classList.contains('ci-status-icon-created')).toEqual(true);
});
it('should render skipped status', () => {
vm = mountComponent(Component, {
status: {
icon: 'status_skipped',
group: 'skipped',
},
});
expect(vm.$el.classList.contains('ci-status-icon-skipped')).toEqual(true);
});
it('should render canceled status', () => {
vm = mountComponent(Component, {
status: {
icon: 'status_canceled',
group: 'canceled',
},
});
expect(vm.$el.classList.contains('ci-status-icon-canceled')).toEqual(true);
});
it('should render status for manual action', () => {
vm = mountComponent(Component, {
status: {
icon: 'status_manual',
group: 'manual',
},
});
expect(vm.$el.classList.contains('ci-status-icon-manual')).toEqual(true);
});
});

View file

@ -52,6 +52,11 @@ RSpec.describe Gitlab::RequestForgeryProtection, :allow_forgery_protection do
end
describe '.verified?' do
it 'does not modify the env' do
env['REQUEST_METHOD'] = "GET"
expect { described_class.verified?(env) }.not_to change { env }
end
context 'when the request method is GET' do
before do
env['REQUEST_METHOD'] = 'GET'

View file

@ -67,6 +67,62 @@ RSpec.describe Repositories::ChangelogService do
end
end
describe '#start_of_commit_range' do
let(:project) { build_stubbed(:project) }
let(:user) { build_stubbed(:user) }
context 'when the "from" argument is specified' do
it 'returns the value of the argument' do
service = described_class
.new(project, user, version: '1.0.0', from: 'foo', to: 'bar')
expect(service.start_of_commit_range).to eq('foo')
end
end
context 'when the "from" argument is unspecified' do
it 'returns the tag commit of the previous version' do
service = described_class
.new(project, user, version: '1.0.0', to: 'bar')
finder_spy = instance_spy(Repositories::PreviousTagFinder)
tag = double(:tag, target_commit: double(:commit, id: '123'))
allow(Repositories::PreviousTagFinder)
.to receive(:new)
.with(project)
.and_return(finder_spy)
allow(finder_spy)
.to receive(:execute)
.with('1.0.0')
.and_return(tag)
expect(service.start_of_commit_range).to eq('123')
end
it 'raises an error when no tag is found' do
service = described_class
.new(project, user, version: '1.0.0', to: 'bar')
finder_spy = instance_spy(Repositories::PreviousTagFinder)
allow(Repositories::PreviousTagFinder)
.to receive(:new)
.with(project)
.and_return(finder_spy)
allow(finder_spy)
.to receive(:execute)
.with('1.0.0')
.and_return(nil)
expect { service.start_of_commit_range }
.to raise_error(Gitlab::Changelog::Error)
end
end
end
def create_commit(project, user, params)
params = { start_branch: 'master', branch_name: 'master' }.merge(params)
Files::MultiService.new(project, user, params).execute.fetch(:result)

View file

@ -57,7 +57,7 @@ RSpec.describe Search::GlobalService do
let(:scope) { 'issues' }
context 'sorting' do
let!(:project) { create(:project, :public) }
let_it_be(:project) { create(:project, :public) }
let!(:old_result) { create(:issue, project: project, title: 'sorted old', created_at: 1.month.ago) }
let!(:new_result) { create(:issue, project: project, title: 'sorted recent', created_at: 1.day.ago) }

View file

@ -45,8 +45,8 @@ RSpec.describe Search::GroupService do
let(:scope) { 'issues' }
context 'sorting' do
let!(:group) { create(:group) }
let!(:project) { create(:project, :public, group: group) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :public, group: group) }
let!(:old_result) { create(:issue, project: project, title: 'sorted old', created_at: 1.month.ago) }
let!(:new_result) { create(:issue, project: project, title: 'sorted recent', created_at: 1.day.ago) }

0
vendor/gitignore/C++.gitignore vendored Normal file → Executable file
View file

0
vendor/gitignore/Java.gitignore vendored Normal file → Executable file
View file

View file

@ -1,5 +1,15 @@
# Changelog for gitlab-workhorse
## v8.62.0
### Added
- Add RubyGems registry upload route
https://gitlab.com/gitlab-org/gitlab-workhorse/-/merge_requests/680
### Fixed
- Cleanup Connection headers
https://gitlab.com/gitlab-org/gitlab-workhorse/-/merge_requests/678
## v8.61.0
### Fixed

View file

@ -1 +1 @@
8.61.0
8.62.0

View file

@ -6,6 +6,7 @@ import (
"fmt"
"io"
"net/http"
"net/textproto"
"net/url"
"strconv"
"strings"
@ -188,6 +189,8 @@ func (api *API) newRequest(r *http.Request, suffix string) (*http.Request, error
authReq = authReq.WithContext(r.Context())
removeConnectionHeaders(authReq.Header)
// Clean some headers when issuing a new request without body
authReq.Header.Del("Content-Type")
authReq.Header.Del("Content-Encoding")
@ -203,7 +206,9 @@ func (api *API) newRequest(r *http.Request, suffix string) (*http.Request, error
authReq.Header.Del("Proxy-Authenticate")
authReq.Header.Del("Proxy-Authorization")
authReq.Header.Del("Te")
authReq.Header.Del("Trailers")
// "Trailer", not "Trailers" as per rfc2616; See errata https://www.rfc-editor.org/errata_search.php?eid=4522
// See https://httpwg.org/http-core/draft-ietf-httpbis-semantics-latest.html#field.connection
authReq.Header.Del("Trailer")
authReq.Header.Del("Upgrade")
// Also forward the Host header, which is excluded from the Header map by the http library.
@ -290,6 +295,18 @@ func (api *API) doRequestWithoutRedirects(authReq *http.Request) (*http.Response
return signingTripper.RoundTrip(authReq)
}
// removeConnectionHeaders removes hop-by-hop headers listed in the "Connection" header of h.
// See https://tools.ietf.org/html/rfc7230#section-6.1
func removeConnectionHeaders(h http.Header) {
for _, f := range h["Connection"] {
for _, sf := range strings.Split(f, ",") {
if sf = textproto.TrimString(sf); sf != "" {
h.Del(sf)
}
}
}
}
func copyAuthHeader(httpResponse *http.Response, w http.ResponseWriter) {
// Negotiate authentication (Kerberos) may need to return a WWW-Authenticate
// header to the client even in case of success as per RFC4559.

View file

@ -264,6 +264,9 @@ func (u *upstream) configureRoutes() {
// Debian Artifact Repository
u.route("PUT", apiPattern+`v4/projects/[0-9]+/packages/debian/`, upload.BodyUploader(api, signingProxy, preparers.packages)),
// Gem Artifact Repository
u.route("POST", apiPattern+`v4/projects/[0-9]+/packages/rubygems/`, upload.BodyUploader(api, signingProxy, preparers.packages)),
// We are porting API to disk acceleration
// we need to declare each routes until we have fixed all the routes on the rails codebase.
// Overall status can be seen at https://gitlab.com/groups/gitlab-org/-/epics/1802#current-status

View file

@ -292,9 +292,9 @@ func TestLfsUpload(t *testing.T) {
require.Equal(t, rspBody, string(rspData))
}
func packageUploadTestServer(t *testing.T, resource string, reqBody string, rspBody string) *httptest.Server {
func packageUploadTestServer(t *testing.T, method string, resource string, reqBody string, rspBody string) *httptest.Server {
return testhelper.TestServerWithHandler(regexp.MustCompile(`.`), func(w http.ResponseWriter, r *http.Request) {
require.Equal(t, r.Method, "PUT")
require.Equal(t, r.Method, method)
apiResponse := fmt.Sprintf(
`{"TempPath":%q, "Size": %d}`, scratchDir, len(reqBody),
)
@ -330,17 +330,17 @@ func packageUploadTestServer(t *testing.T, resource string, reqBody string, rspB
})
}
func testPackageFileUpload(t *testing.T, resource string) {
func testPackageFileUpload(t *testing.T, method string, resource string) {
reqBody := "test data"
rspBody := "test success"
ts := packageUploadTestServer(t, resource, reqBody, rspBody)
ts := packageUploadTestServer(t, method, resource, reqBody, rspBody)
defer ts.Close()
ws := startWorkhorseServer(ts.URL)
defer ws.Close()
req, err := http.NewRequest("PUT", ws.URL+resource, strings.NewReader(reqBody))
req, err := http.NewRequest(method, ws.URL+resource, strings.NewReader(reqBody))
require.NoError(t, err)
resp, err := http.DefaultClient.Do(req)
@ -355,15 +355,19 @@ func testPackageFileUpload(t *testing.T, resource string) {
}
func TestPackageFilesUpload(t *testing.T) {
routes := []string{
"/api/v4/packages/conan/v1/files",
"/api/v4/projects/2412/packages/conan/v1/files",
"/api/v4/projects/2412/packages/maven/v1/files",
"/api/v4/projects/2412/packages/generic/mypackage/0.0.1/myfile.tar.gz",
"/api/v4/projects/2412/packages/debian/libsample0_1.2.3~alpha2-1_amd64.deb",
routes := []struct {
method string
resource string
}{
{"PUT", "/api/v4/packages/conan/v1/files"},
{"PUT", "/api/v4/projects/2412/packages/conan/v1/files"},
{"PUT", "/api/v4/projects/2412/packages/maven/v1/files"},
{"PUT", "/api/v4/projects/2412/packages/generic/mypackage/0.0.1/myfile.tar.gz"},
{"PUT", "/api/v4/projects/2412/packages/debian/libsample0_1.2.3~alpha2-1_amd64.deb"},
{"POST", "/api/v4/projects/2412/packages/rubygems/api/v1/gems/sample.gem"},
}
for _, r := range routes {
testPackageFileUpload(t, r)
testPackageFileUpload(t, r.method, r.resource)
}
}