Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
231a6ae572
commit
22baaecaa8
46 changed files with 633 additions and 386 deletions
|
@ -1,7 +1,9 @@
|
|||
<script>
|
||||
import { GlTooltipDirective, GlModal } from '@gitlab/ui';
|
||||
import createFlash from '~/flash';
|
||||
import { __, s__, sprintf } from '~/locale';
|
||||
import eventHub from '../event_hub';
|
||||
import deleteEnvironmentMutation from '../graphql/mutations/delete_environment.mutation.graphql';
|
||||
|
||||
export default {
|
||||
id: 'delete-environment-modal',
|
||||
|
@ -17,6 +19,11 @@ export default {
|
|||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
graphql: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
primaryProps() {
|
||||
|
@ -49,7 +56,29 @@ export default {
|
|||
},
|
||||
methods: {
|
||||
onSubmit() {
|
||||
eventHub.$emit('deleteEnvironment', this.environment);
|
||||
if (this.graphql) {
|
||||
this.$apollo
|
||||
.mutate({
|
||||
mutation: deleteEnvironmentMutation,
|
||||
variables: { environment: this.environment },
|
||||
})
|
||||
.then(([message]) => {
|
||||
if (message) {
|
||||
createFlash({ message });
|
||||
}
|
||||
})
|
||||
.catch((error) =>
|
||||
createFlash({
|
||||
message: s__(
|
||||
'Environments|An error occurred while deleting the environment. Check if the environment stopped; if not, stop it and try again.',
|
||||
),
|
||||
error,
|
||||
captureError: true,
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
eventHub.$emit('deleteEnvironment', this.environment);
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
import { GlDropdownItem, GlModalDirective } from '@gitlab/ui';
|
||||
import { s__ } from '~/locale';
|
||||
import eventHub from '../event_hub';
|
||||
import setEnvironmentToDelete from '../graphql/mutations/set_environment_to_delete.mutation.graphql';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
|
@ -20,6 +21,11 @@ export default {
|
|||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
graphql: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
|
@ -30,14 +36,25 @@ export default {
|
|||
title: s__('Environments|Delete environment'),
|
||||
},
|
||||
mounted() {
|
||||
eventHub.$on('deleteEnvironment', this.onDeleteEnvironment);
|
||||
if (!this.graphql) {
|
||||
eventHub.$on('deleteEnvironment', this.onDeleteEnvironment);
|
||||
}
|
||||
},
|
||||
beforeDestroy() {
|
||||
eventHub.$off('deleteEnvironment', this.onDeleteEnvironment);
|
||||
if (!this.graphql) {
|
||||
eventHub.$off('deleteEnvironment', this.onDeleteEnvironment);
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
onClick() {
|
||||
eventHub.$emit('requestDeleteEnvironment', this.environment);
|
||||
if (this.graphql) {
|
||||
this.$apollo.mutate({
|
||||
mutation: setEnvironmentToDelete,
|
||||
variables: { environment: this.environment },
|
||||
});
|
||||
} else {
|
||||
eventHub.$emit('requestDeleteEnvironment', this.environment);
|
||||
}
|
||||
},
|
||||
onDeleteEnvironment(environment) {
|
||||
if (this.environment.id === environment.id) {
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
mutation SetEnvironmentToDelete($environment: Environment) {
|
||||
setEnvironmentToDelete(environment: $environment) @client
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
query environmentToDelete {
|
||||
environmentToDelete @client {
|
||||
id
|
||||
name
|
||||
deletePath
|
||||
}
|
||||
}
|
|
@ -3,6 +3,7 @@ import { s__ } from '~/locale';
|
|||
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
|
||||
import pollIntervalQuery from './queries/poll_interval.query.graphql';
|
||||
import environmentToRollbackQuery from './queries/environment_to_rollback.query.graphql';
|
||||
import environmentToDeleteQuery from './queries/environment_to_delete.query.graphql';
|
||||
|
||||
const buildErrors = (errors = []) => ({
|
||||
errors,
|
||||
|
@ -67,7 +68,16 @@ export const resolvers = (endpoint) => ({
|
|||
});
|
||||
},
|
||||
deleteEnvironment(_, { environment: { deletePath } }) {
|
||||
return axios.delete(deletePath);
|
||||
return axios
|
||||
.delete(deletePath)
|
||||
.then(() => buildErrors())
|
||||
.catch(() =>
|
||||
buildErrors([
|
||||
s__(
|
||||
'Environments|An error occurred while deleting the environment. Check if the environment stopped; if not, stop it and try again.',
|
||||
),
|
||||
]),
|
||||
);
|
||||
},
|
||||
rollbackEnvironment(_, { environment, isLastDeployment }) {
|
||||
return axios
|
||||
|
@ -85,6 +95,12 @@ export const resolvers = (endpoint) => ({
|
|||
]);
|
||||
});
|
||||
},
|
||||
setEnvironmentToDelete(_, { environment }, { client }) {
|
||||
client.writeQuery({
|
||||
query: environmentToDeleteQuery,
|
||||
data: { environmentToDelete: environment },
|
||||
});
|
||||
},
|
||||
setEnvironmentToRollback(_, { environment }, { client }) {
|
||||
client.writeQuery({
|
||||
query: environmentToRollbackQuery,
|
||||
|
|
|
@ -58,6 +58,7 @@ type LocalErrors {
|
|||
extend type Query {
|
||||
environmentApp: LocalEnvironmentApp
|
||||
folder(environment: NestedLocalEnvironmentInput): LocalEnvironmentFolder
|
||||
environmentToDelete: LocalEnvironment
|
||||
environmentToRollback: LocalEnvironment
|
||||
isLastDeployment: Boolean
|
||||
}
|
||||
|
@ -67,5 +68,6 @@ extend type Mutation {
|
|||
deleteEnvironment(environment: LocalEnvironmentInput): LocalErrors
|
||||
rollbackEnvironment(environment: LocalEnvironmentInput): LocalErrors
|
||||
cancelAutoStop(environment: LocalEnvironmentInput): LocalErrors
|
||||
setEnvironmentToDelete(environment: LocalEnvironmentInput): LocalErrors
|
||||
setEnvironmentToRollback(environment: LocalEnvironmentInput): LocalErrors
|
||||
}
|
||||
|
|
|
@ -13,3 +13,42 @@ export default function findAndFollowLink(selector) {
|
|||
visitUrl(link);
|
||||
}
|
||||
}
|
||||
|
||||
export function prefetchDocument(url) {
|
||||
const newPrefetchLink = document.createElement('link');
|
||||
newPrefetchLink.rel = 'prefetch';
|
||||
newPrefetchLink.href = url;
|
||||
newPrefetchLink.setAttribute('as', 'document');
|
||||
document.head.appendChild(newPrefetchLink);
|
||||
}
|
||||
|
||||
export function initPrefetchLinks(selector) {
|
||||
document.querySelectorAll(selector).forEach((el) => {
|
||||
let mouseOverTimer;
|
||||
|
||||
const mouseOutHandler = () => {
|
||||
if (mouseOverTimer) {
|
||||
clearTimeout(mouseOverTimer);
|
||||
mouseOverTimer = undefined;
|
||||
}
|
||||
};
|
||||
|
||||
const mouseOverHandler = () => {
|
||||
el.addEventListener('mouseout', mouseOutHandler, { once: true, passive: true });
|
||||
|
||||
mouseOverTimer = setTimeout(() => {
|
||||
if (el.href) prefetchDocument(el.href);
|
||||
|
||||
// Only execute once
|
||||
el.removeEventListener('mouseover', mouseOverHandler, true);
|
||||
|
||||
mouseOverTimer = undefined;
|
||||
}, 100);
|
||||
};
|
||||
|
||||
el.addEventListener('mouseover', mouseOverHandler, {
|
||||
capture: true,
|
||||
passive: true,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@ import { GlBreakpointInstance as bp } from '@gitlab/ui/dist/utils';
|
|||
import { initRails } from '~/lib/utils/rails_ujs';
|
||||
import * as popovers from '~/popovers';
|
||||
import * as tooltips from '~/tooltips';
|
||||
import { initPrefetchLinks } from '~/lib/utils/navigation_utility';
|
||||
import initAlertHandler from './alert_handler';
|
||||
import { addDismissFlashClickListener } from './flash';
|
||||
import initTodoToggle from './header';
|
||||
|
@ -90,6 +91,7 @@ function deferredInitialisation() {
|
|||
initTopNav();
|
||||
initBreadcrumbs();
|
||||
initTodoToggle();
|
||||
initPrefetchLinks('.js-prefetch-document');
|
||||
initLogoAnimation();
|
||||
initServicePingConsent();
|
||||
initUserPopovers();
|
||||
|
|
|
@ -77,10 +77,6 @@ class InvitesController < ApplicationController
|
|||
def track_invite_join_click
|
||||
return unless member && initial_invite_email?
|
||||
|
||||
if params[:experiment_name] == 'invite_email_preview_text'
|
||||
experiment(:invite_email_preview_text, actor: member).track(:join_clicked)
|
||||
end
|
||||
|
||||
Gitlab::Tracking.event(self.class.name, 'join_clicked', label: 'invite_email', property: member.id.to_s)
|
||||
end
|
||||
|
||||
|
@ -102,7 +98,6 @@ class InvitesController < ApplicationController
|
|||
session[:invite_email] = member.invite_email
|
||||
|
||||
session[:originating_member_id] = member.id if initial_invite_email?
|
||||
session[:invite_email_experiment_name] = params[:experiment_name] if initial_invite_email? && params[:experiment_name]
|
||||
end
|
||||
|
||||
def initial_invite_email?
|
||||
|
|
|
@ -210,8 +210,6 @@ class RegistrationsController < Devise::RegistrationsController
|
|||
|
||||
return unless member
|
||||
|
||||
experiment_name = session.delete(:invite_email_experiment_name)
|
||||
experiment(:invite_email_preview_text, actor: member).track(:accepted) if experiment_name == 'invite_email_preview_text'
|
||||
Gitlab::Tracking.event(self.class.name, 'accepted', label: 'invite_email', property: member.id.to_s)
|
||||
end
|
||||
|
||||
|
|
|
@ -20,14 +20,4 @@ module NotifyHelper
|
|||
|
||||
(source.description || default_description).truncate(200, separator: ' ')
|
||||
end
|
||||
|
||||
def invited_join_url(token, member)
|
||||
additional_params = { invite_type: Emails::Members::INITIAL_INVITE }
|
||||
|
||||
if experiment(:invite_email_preview_text, actor: member).enabled?
|
||||
additional_params[:experiment_name] = 'invite_email_preview_text'
|
||||
end
|
||||
|
||||
invite_url(token, additional_params)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -46,7 +46,7 @@
|
|||
= sprite_icon(search_menu_item.fetch(:icon))
|
||||
- if header_link?(:issues)
|
||||
= nav_link(path: 'dashboard#issues', html_options: { class: "user-counter" }) do
|
||||
= link_to assigned_issues_dashboard_path, title: _('Issues'), class: 'dashboard-shortcuts-issues', aria: { label: _('Issues') },
|
||||
= link_to assigned_issues_dashboard_path, title: _('Issues'), class: 'dashboard-shortcuts-issues js-prefetch-document', aria: { label: _('Issues') },
|
||||
data: { qa_selector: 'issues_shortcut_button', toggle: 'tooltip', placement: 'bottom',
|
||||
track_label: 'main_navigation',
|
||||
track_action: 'click_issues_link',
|
||||
|
@ -75,18 +75,18 @@
|
|||
%li.dropdown-header
|
||||
= _('Merge requests')
|
||||
%li
|
||||
= link_to assigned_mrs_dashboard_path, class: 'gl-display-flex! gl-align-items-center' do
|
||||
= link_to assigned_mrs_dashboard_path, class: 'gl-display-flex! gl-align-items-center js-prefetch-document' do
|
||||
= _('Assigned to you')
|
||||
%span.badge.gl-badge.badge-pill.badge-muted.merge-request-badge.gl-ml-auto.js-assigned-mr-count{ class: "" }
|
||||
= user_merge_requests_counts[:assigned]
|
||||
%li
|
||||
= link_to reviewer_mrs_dashboard_path, class: 'gl-display-flex! gl-align-items-center' do
|
||||
= link_to reviewer_mrs_dashboard_path, class: 'gl-display-flex! gl-align-items-center js-prefetch-document' do
|
||||
= _('Review requests for you')
|
||||
%span.badge.gl-badge.badge-pill.badge-muted.merge-request-badge.gl-ml-auto.js-reviewer-mr-count{ class: "" }
|
||||
= user_merge_requests_counts[:review_requested]
|
||||
- if header_link?(:todos)
|
||||
= nav_link(controller: 'dashboard/todos', html_options: { class: "user-counter" }) do
|
||||
= link_to dashboard_todos_path, title: _('To-Do List'), aria: { label: _('To-Do List') }, class: 'shortcuts-todos',
|
||||
= link_to dashboard_todos_path, title: _('To-Do List'), aria: { label: _('To-Do List') }, class: 'shortcuts-todos js-prefetch-document',
|
||||
data: { qa_selector: 'todos_shortcut_button', toggle: 'tooltip', placement: 'bottom',
|
||||
track_label: 'main_navigation',
|
||||
track_action: 'click_to_do_link',
|
||||
|
|
|
@ -6,17 +6,15 @@
|
|||
role: member.human_access.downcase }
|
||||
- join_text = s_('InviteEmail|Join now')
|
||||
- inviter_name = member.created_by.name if member.created_by
|
||||
- join_url = invite_url(@token, invite_type: Emails::Members::INITIAL_INVITE)
|
||||
|
||||
- experiment(:invite_email_preview_text, actor: member) do |experiment_instance|
|
||||
- experiment_instance.use {}
|
||||
- experiment_instance.candidate do
|
||||
= content_for :preview_text do
|
||||
%div{ style: "display:none;font-size:1px;line-height:1px;max-height:0px;max-width:0px;opacity:0;overflow:hidden;" }
|
||||
- if member.created_by
|
||||
= s_('InviteEmail|Join your team on GitLab! %{inviter} invited you to %{project_or_group_name}') % { inviter: inviter_name, project_or_group_name: placeholders[:project_or_group_name] }
|
||||
- else
|
||||
= s_('InviteEmail|Join your team on GitLab! You are invited to %{project_or_group_name}') % { project_or_group_name: placeholders[:project_or_group_name] }
|
||||
= gmail_goto_action(join_text, invited_join_url(@token, member))
|
||||
= content_for :preview_text do
|
||||
%div{ style: "display:none;font-size:1px;line-height:1px;max-height:0px;max-width:0px;opacity:0;overflow:hidden;" }
|
||||
- if member.created_by
|
||||
= s_('InviteEmail|Join your team on GitLab! %{inviter} invited you to %{project_or_group_name}') % { inviter: inviter_name, project_or_group_name: placeholders[:project_or_group_name] }
|
||||
- else
|
||||
= s_('InviteEmail|Join your team on GitLab! You are invited to %{project_or_group_name}') % { project_or_group_name: placeholders[:project_or_group_name] }
|
||||
= gmail_goto_action(join_text, join_url)
|
||||
|
||||
%tr
|
||||
%td.text-content{ colspan: 2 }
|
||||
|
@ -32,7 +30,7 @@
|
|||
- else
|
||||
= html_escape(s_("InviteEmail|You are invited to join the %{strong_start}%{project_or_group_name}%{strong_end}%{br_tag}%{project_or_group} as a %{role}")) % placeholders
|
||||
%p.invite-actions
|
||||
= link_to join_text, invited_join_url(@token, member), class: 'invite-btn-join'
|
||||
= link_to join_text, join_url, class: 'invite-btn-join'
|
||||
%tr.border-top
|
||||
%td.text-content.mailer-align-left.half-width
|
||||
%h4
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
.issuable-main-info
|
||||
.merge-request-title.title
|
||||
%span.merge-request-title-text.js-onboarding-mr-item
|
||||
= link_to merge_request.title, merge_request_path(merge_request)
|
||||
= link_to merge_request.title, merge_request_path(merge_request), class: 'js-prefetch-document'
|
||||
- if merge_request.tasks?
|
||||
%span.task-status.d-none.d-sm-inline-block
|
||||
|
||||
|
|
|
@ -1577,7 +1577,7 @@
|
|||
:worker_name: Ci::CreateDownstreamPipelineWorker
|
||||
:feature_category: :continuous_integration
|
||||
:has_external_dependencies:
|
||||
:urgency: :low
|
||||
:urgency: :high
|
||||
:resource_boundary: :cpu
|
||||
:weight: 3
|
||||
:idempotent:
|
||||
|
|
|
@ -7,6 +7,7 @@ module Ci
|
|||
|
||||
sidekiq_options retry: 3
|
||||
worker_resource_boundary :cpu
|
||||
urgency :high
|
||||
|
||||
def perform(bridge_id)
|
||||
::Ci::Bridge.find_by_id(bridge_id).try do |bridge|
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
---
|
||||
name: invite_email_preview_text
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/67236
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/growth/team-tasks/-/issues/421
|
||||
milestone: '14.2'
|
||||
type: experiment
|
||||
group: group::expansion
|
||||
default_enabled: false
|
|
@ -245,8 +245,6 @@ The request was processed by `Projects::TreeController`.
|
|||
|
||||
## `api_json.log`
|
||||
|
||||
> Introduced in GitLab 10.0.
|
||||
|
||||
Depending on your installation method, this file is located at:
|
||||
|
||||
- Omnibus GitLab: `/var/log/gitlab/gitlab-rails/api_json.log`
|
||||
|
@ -367,8 +365,6 @@ like this example:
|
|||
|
||||
## `kubernetes.log`
|
||||
|
||||
> Introduced in GitLab 11.6.
|
||||
|
||||
Depending on your installation method, this file is located at:
|
||||
|
||||
- Omnibus GitLab: `/var/log/gitlab/gitlab-rails/kubernetes.log`
|
||||
|
@ -696,8 +692,6 @@ on a project.
|
|||
|
||||
## `importer.log`
|
||||
|
||||
> Introduced in GitLab 11.3.
|
||||
|
||||
Depending on your installation method, this file is located at:
|
||||
|
||||
- Omnibus GitLab: `/var/log/gitlab/gitlab-rails/importer.log`
|
||||
|
@ -915,8 +909,6 @@ For example:
|
|||
|
||||
## `geo.log` **(PREMIUM SELF)**
|
||||
|
||||
> Introduced in 9.5.
|
||||
|
||||
Geo stores structured log messages in a `geo.log` file. For Omnibus GitLab
|
||||
installations, this file is at `/var/log/gitlab/gitlab-rails/geo.log`.
|
||||
|
||||
|
@ -934,8 +926,6 @@ This message shows that Geo detected that a repository update was needed for pro
|
|||
|
||||
## `update_mirror_service_json.log`
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/commit/7f637e2af7006dc2b1b2649d9affc0b86cfb33c4) in GitLab 11.12.
|
||||
|
||||
Depending on your installation method, this file is located at:
|
||||
|
||||
- Omnibus GitLab: `/var/log/gitlab/gitlab-rails/update_mirror_service_json.log`
|
||||
|
|
|
@ -218,8 +218,8 @@ We recommend using the [consolidated object storage settings](../object_storage.
|
|||
|
||||
### Migrating local packages to object storage
|
||||
|
||||
After [configuring the object storage](#using-object-storage), you may use the
|
||||
following task to migrate existing packages from the local storage to the remote one.
|
||||
After [configuring the object storage](#using-object-storage), use the following task to
|
||||
migrate existing packages from the local storage to the remote storage.
|
||||
The processing is done in a background worker and requires **no downtime**.
|
||||
|
||||
For Omnibus GitLab:
|
||||
|
@ -234,11 +234,13 @@ For installations from source:
|
|||
RAILS_ENV=production sudo -u git -H bundle exec rake gitlab:packages:migrate
|
||||
```
|
||||
|
||||
You can optionally track progress and verify that all packages migrated successfully.
|
||||
You can optionally track progress and verify that all packages migrated successfully using the
|
||||
[PostgreSQL console](https://docs.gitlab.com/omnibus/settings/database.html#connecting-to-the-bundled-postgresql-database):
|
||||
|
||||
From the [PostgreSQL console](https://docs.gitlab.com/omnibus/settings/database.html#connecting-to-the-bundled-postgresql-database)
|
||||
(`sudo gitlab-psql -d gitlabhq_production` for Omnibus GitLab), verify that `objectstg` below (where
|
||||
`file_store=2`) has the count of all packages:
|
||||
- `sudo gitlab-rails dbconsole` for Omnibus GitLab instances.
|
||||
- `sudo -u git -H psql -d gitlabhq_production` for source-installed instances.
|
||||
|
||||
Verify `objectstg` below (where `store=2`) has count of all packages:
|
||||
|
||||
```shell
|
||||
gitlabhq_production=# SELECT count(*) AS total, sum(case when file_store = '1' then 1 else 0 end) AS filesystem, sum(case when file_store = '2' then 1 else 0 end) AS objectstg FROM packages_package_files;
|
||||
|
@ -247,3 +249,9 @@ total | filesystem | objectstg
|
|||
------+------------+-----------
|
||||
34 | 0 | 34
|
||||
```
|
||||
|
||||
Verify that there are no files on disk in the `packages` folder:
|
||||
|
||||
```shell
|
||||
sudo find /var/opt/gitlab/gitlab-rails/shared/packages -type f | grep -v tmp | wc -l
|
||||
```
|
||||
|
|
|
@ -258,7 +258,6 @@ control over how the Pages daemon runs and serves content in your environment.
|
|||
| `pages_path` | The directory on disk where pages are stored, defaults to `GITLAB-RAILS/shared/pages`. |
|
||||
| **`pages_nginx[]`** | |
|
||||
| `enable` | Include a virtual host `server{}` block for Pages inside NGINX. Needed for NGINX to proxy traffic back to the Pages daemon. Set to `false` if the Pages daemon should directly receive all requests, for example, when using [custom domains](index.md#custom-domains). |
|
||||
| `FF_ENABLE_REDIRECTS` | Feature flag to enable/disable redirects (enabled by default). Read the [redirects documentation](../../user/project/pages/redirects.md#feature-flag-for-redirects) for more information. |
|
||||
| `FF_ENABLE_PLACEHOLDERS` | Feature flag to enable/disable rewrites (disabled by default). Read the [redirects documentation](../../user/project/pages/redirects.md#feature-flag-for-rewrites) for more information. |
|
||||
| `use_legacy_storage` | Temporarily-introduced parameter allowing to use legacy domain configuration source and storage. [Removed in 14.3](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/6166). |
|
||||
| `rate_limit_source_ip` | Rate limit per source IP in number of requests per second. Set to `0` to disable this feature. |
|
||||
|
@ -1059,11 +1058,11 @@ Source-IP rate limits are enforced using the following:
|
|||
gitlab_pages['rate_limit_source_ip_burst'] = 600
|
||||
```
|
||||
|
||||
1. To reject requests that exceed the specified limits, enable the `FF_ENABLE_RATE_LIMITER` feature flag in
|
||||
1. To reject requests that exceed the specified limits, enable the `FF_ENFORCE_IP_RATE_LIMITS` feature flag in
|
||||
`/etc/gitlab/gitlab.rb`:
|
||||
|
||||
```ruby
|
||||
gitlab_pages['env'] = {'FF_ENABLE_RATE_LIMITER' => 'true'}
|
||||
gitlab_pages['env'] = {'FF_ENFORCE_IP_RATE_LIMITS' => 'true'}
|
||||
```
|
||||
|
||||
1. [Reconfigure GitLab](../restart_gitlab.md#omnibus-gitlab-reconfigure).
|
||||
|
|
|
@ -12,6 +12,7 @@ full list of reference architectures, see
|
|||
|
||||
> - **Supported users (approximate):** 10,000
|
||||
> - **High Availability:** Yes ([Praefect](#configure-praefect-postgresql) needs a third-party PostgreSQL solution for HA)
|
||||
> - **Estimated Costs:** [GCP](https://cloud.google.com/products/calculator#id=e77713f6-dc0b-4bb3-bcef-cea904ac8efd)
|
||||
> - **Cloud Native Hybrid Alternative:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
|
||||
> - **Performance tested daily with the [GitLab Performance Tool](https://gitlab.com/gitlab-org/quality/performance)**:
|
||||
> - **Test requests per second (RPS) rates:** API: 200 RPS, Web: 20 RPS, Git (Pull): 20 RPS, Git (Push): 4 RPS
|
||||
|
@ -133,21 +134,34 @@ monitor .[#7FFFD4,norank]u--> elb
|
|||
@enduml
|
||||
```
|
||||
|
||||
The Google Cloud Platform (GCP) architectures were built and tested using the
|
||||
## Requirements
|
||||
|
||||
Before starting, you should take note of the following requirements / guidance for this reference architecture.
|
||||
|
||||
### Supported CPUs
|
||||
|
||||
This reference architecture was built and tested on Google Cloud Platform (GCP) using the
|
||||
[Intel Xeon E5 v3 (Haswell)](https://cloud.google.com/compute/docs/cpu-platforms)
|
||||
CPU platform. On different hardware you may find that adjustments, either lower
|
||||
or higher, are required for your CPU or node counts. For more information, see
|
||||
our [Sysbench](https://github.com/akopytov/sysbench)-based
|
||||
[CPU benchmarks](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Reference-Architectures/GCP-CPU-Benchmarks).
|
||||
|
||||
Due to better performance and availability, for data objects (such as LFS,
|
||||
uploads, or artifacts), using an [object storage service](#configure-the-object-storage)
|
||||
is recommended.
|
||||
### Supported infrastructure
|
||||
|
||||
It's also worth noting that at this time [Praefect requires its own database server](../gitaly/praefect.md#postgresql) and
|
||||
As a general guidance, GitLab should run on most infrastructure such as reputable Cloud Providers (AWS, GCP, Azure) and their services, or self managed (ESXi) that meet both the specs detailed above, as well as any requirements in this section. However, this does not constitute a guarantee for every potential permutation.
|
||||
|
||||
Be aware of the following specific call outs:
|
||||
|
||||
- [Azure Database for PostgreSQL](https://docs.microsoft.com/en-us/azure/postgresql/#:~:text=Azure%20Database%20for%20PostgreSQL%20is,high%20availability%2C%20and%20dynamic%20scalability.) is [not recommended](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61) due to known performance issues or missing features.
|
||||
- [Azure Blob Storage](https://docs.microsoft.com/en-us/azure/storage/blobs/) is recommended to be configured with [Premium accounts](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-block-blob-premium) to ensure consistent performance.
|
||||
|
||||
### Praefect PostgreSQL
|
||||
|
||||
It's worth noting that at this time [Praefect requires its own database server](../gitaly/praefect.md#postgresql) and
|
||||
that to achieve full High Availability a third-party PostgreSQL database solution will be required.
|
||||
We hope to offer a built in solutions for these restrictions in the future but in the meantime a non HA PostgreSQL server
|
||||
can be set up via Omnibus GitLab, which the above specs reflect. Refer to the following issues for more information: [`omnibus-gitlab#5919`](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5919) & [`gitaly#3398`](https://gitlab.com/gitlab-org/gitaly/-/issues/3398)
|
||||
can be set up via Omnibus GitLab, which the above specs reflect. Refer to the following issues for more information: [`omnibus-gitlab#5919`](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5919) & [`gitaly#3398`](https://gitlab.com/gitlab-org/gitaly/-/issues/3398).
|
||||
|
||||
## Setup components
|
||||
|
||||
|
|
|
@ -29,13 +29,30 @@ many organizations.
|
|||
| Up to 500 | 4 vCPU, 3.6 GB memory | `n1-highcpu-4` | `c5.xlarge` | `F4s v2` |
|
||||
| Up to 1,000 | 8 vCPU, 7.2 GB memory | `n1-highcpu-8` | `c5.2xlarge` | `F8s v2` |
|
||||
|
||||
The Google Cloud Platform (GCP) architectures were built and tested using the
|
||||
## Requirements
|
||||
|
||||
Before starting, you should take note of the following requirements / guidance for this reference architecture.
|
||||
|
||||
### Supported CPUs
|
||||
|
||||
This reference architecture was built and tested on Google Cloud Platform (GCP) using the
|
||||
[Intel Xeon E5 v3 (Haswell)](https://cloud.google.com/compute/docs/cpu-platforms)
|
||||
CPU platform. On different hardware you may find that adjustments, either lower
|
||||
or higher, are required for your CPU or node counts. For more information, see
|
||||
our [Sysbench](https://github.com/akopytov/sysbench)-based
|
||||
[CPU benchmarks](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Reference-Architectures/GCP-CPU-Benchmarks).
|
||||
|
||||
### Supported infrastructure
|
||||
|
||||
As a general guidance, GitLab should run on most infrastructure such as reputable Cloud Providers (AWS, GCP, Azure) and their services, or self managed (ESXi) that meet both the specs detailed above, as well as any requirements in this section. However, this does not constitute a guarantee for every potential permutation.
|
||||
|
||||
Be aware of the following specific call outs:
|
||||
|
||||
- [Azure Database for PostgreSQL](https://docs.microsoft.com/en-us/azure/postgresql/#:~:text=Azure%20Database%20for%20PostgreSQL%20is,high%20availability%2C%20and%20dynamic%20scalability.) is [not recommended](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61) due to known performance issues or missing features.
|
||||
- [Azure Blob Storage](https://docs.microsoft.com/en-us/azure/storage/blobs/) is recommended to be configured with [Premium accounts](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-block-blob-premium) to ensure consistent performance.
|
||||
|
||||
### Swap
|
||||
|
||||
In addition to the stated configurations, we recommend having at least 2 GB of
|
||||
swap on your server, even if you currently have enough available memory. Having
|
||||
swap helps to reduce the chance of errors occurring if your available memory
|
||||
|
|
|
@ -12,6 +12,7 @@ full list of reference architectures, see
|
|||
|
||||
> - **Supported users (approximate):** 25,000
|
||||
> - **High Availability:** Yes ([Praefect](#configure-praefect-postgresql) needs a third-party PostgreSQL solution for HA)
|
||||
> - **Estimated Costs:** [GCP](https://cloud.google.com/products/calculator#id=925386e1-c01c-4c0a-8d7d-ebde1824b7b0)
|
||||
> - **Cloud Native Hybrid Alternative:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
|
||||
> - **Performance tested weekly with the [GitLab Performance Tool (GPT)](https://gitlab.com/gitlab-org/quality/performance)**:
|
||||
> - **Test requests per second (RPS) rates:** API: 500 RPS, Web: 50 RPS, Git (Pull): 50 RPS, Git (Push): 10 RPS
|
||||
|
@ -133,21 +134,34 @@ monitor .[#7FFFD4,norank]u--> elb
|
|||
@enduml
|
||||
```
|
||||
|
||||
The Google Cloud Platform (GCP) architectures were built and tested using the
|
||||
## Requirements
|
||||
|
||||
Before starting, you should take note of the following requirements / guidance for this reference architecture.
|
||||
|
||||
### Supported CPUs
|
||||
|
||||
This reference architecture was built and tested on Google Cloud Platform (GCP) using the
|
||||
[Intel Xeon E5 v3 (Haswell)](https://cloud.google.com/compute/docs/cpu-platforms)
|
||||
CPU platform. On different hardware you may find that adjustments, either lower
|
||||
or higher, are required for your CPU or node counts. For more information, see
|
||||
our [Sysbench](https://github.com/akopytov/sysbench)-based
|
||||
[CPU benchmarks](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Reference-Architectures/GCP-CPU-Benchmarks).
|
||||
|
||||
Due to better performance and availability, for data objects (such as LFS,
|
||||
uploads, or artifacts), using an [object storage service](#configure-the-object-storage)
|
||||
is recommended.
|
||||
### Supported infrastructure
|
||||
|
||||
It's also worth noting that at this time [Praefect requires its own database server](../gitaly/praefect.md#postgresql) and
|
||||
As a general guidance, GitLab should run on most infrastructure such as reputable Cloud Providers (AWS, GCP, Azure) and their services, or self managed (ESXi) that meet both the specs detailed above, as well as any requirements in this section. However, this does not constitute a guarantee for every potential permutation.
|
||||
|
||||
Be aware of the following specific call outs:
|
||||
|
||||
- [Azure Database for PostgreSQL](https://docs.microsoft.com/en-us/azure/postgresql/#:~:text=Azure%20Database%20for%20PostgreSQL%20is,high%20availability%2C%20and%20dynamic%20scalability.) is [not recommended](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61) due to known performance issues or missing features.
|
||||
- [Azure Blob Storage](https://docs.microsoft.com/en-us/azure/storage/blobs/) is recommended to be configured with [Premium accounts](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-block-blob-premium) to ensure consistent performance.
|
||||
|
||||
### Praefect PostgreSQL
|
||||
|
||||
It's worth noting that at this time [Praefect requires its own database server](../gitaly/praefect.md#postgresql) and
|
||||
that to achieve full High Availability a third-party PostgreSQL database solution will be required.
|
||||
We hope to offer a built in solutions for these restrictions in the future but in the meantime a non HA PostgreSQL server
|
||||
can be set up via Omnibus GitLab, which the above specs reflect. Refer to the following issues for more information: [`omnibus-gitlab#5919`](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5919) & [`gitaly#3398`](https://gitlab.com/gitlab-org/gitaly/-/issues/3398)
|
||||
can be set up via Omnibus GitLab, which the above specs reflect. Refer to the following issues for more information: [`omnibus-gitlab#5919`](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5919) & [`gitaly#3398`](https://gitlab.com/gitlab-org/gitaly/-/issues/3398).
|
||||
|
||||
## Setup components
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@ For a full list of reference architectures, see
|
|||
> - **Supported users (approximate):** 2,000
|
||||
> - **High Availability:** No. For a highly-available environment, you can
|
||||
> follow a modified [3K reference architecture](3k_users.md#supported-modifications-for-lower-user-counts-ha).
|
||||
> - **Estimated Costs:** [GCP](https://cloud.google.com/products/calculator#id=84d11491-d72a-493c-a16e-650931faa658)
|
||||
> - **Cloud Native Hybrid:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
|
||||
> - **Performance tested daily with the [GitLab Performance Tool (GPT)](https://gitlab.com/gitlab-org/quality/performance)**:
|
||||
> - **Test requests per second (RPS) rates:** API: 40 RPS, Web: 4 RPS, Git (Pull): 4 RPS, Git (Push): 1 RPS
|
||||
|
@ -27,7 +28,7 @@ For a full list of reference architectures, see
|
|||
| GitLab Rails | 2 | 8 vCPU, 7.2 GB memory | `n1-highcpu-8` | `c5.2xlarge` | `F8s v2` |
|
||||
| Monitoring node | 1 | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` | `F2s v2` |
|
||||
| Object storage<sup>4</sup> | n/a | n/a | n/a | n/a | n/a |
|
||||
| NFS server (optional, not recommended) | 1 | 4 vCPU, 3.6 GB memory | `n1-highcpu-4` | `c5.xlarge` | `F4s v2` |
|
||||
| NFS server (non-Gitaly) | 1 | 4 vCPU, 3.6 GB memory | `n1-highcpu-4` | `c5.xlarge` | `F4s v2` |
|
||||
|
||||
<!-- markdownlint-disable MD029 -->
|
||||
1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. [Google Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) and [Amazon RDS](https://aws.amazon.com/rds/) are known to work, however Azure Database for PostgreSQL is **not recommended** due to [performance issues](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61). Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery.
|
||||
|
@ -69,17 +70,27 @@ monitor .[#7FFFD4,norank]u--> elb
|
|||
@enduml
|
||||
```
|
||||
|
||||
The Google Cloud Platform (GCP) architectures were built and tested using the
|
||||
## Requirements
|
||||
|
||||
Before starting, you should take note of the following requirements / guidance for this reference architecture.
|
||||
|
||||
### Supported CPUs
|
||||
|
||||
This reference architecture was built and tested on Google Cloud Platform (GCP) using the
|
||||
[Intel Xeon E5 v3 (Haswell)](https://cloud.google.com/compute/docs/cpu-platforms)
|
||||
CPU platform. On different hardware you may find that adjustments, either lower
|
||||
or higher, are required for your CPU or node counts. For more information, see
|
||||
our [Sysbench](https://github.com/akopytov/sysbench)-based
|
||||
[CPU benchmarks](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Reference-Architectures/GCP-CPU-Benchmarks).
|
||||
|
||||
Due to better performance and availability, for data objects (such as LFS,
|
||||
uploads, or artifacts), using an [object storage service](#configure-the-object-storage)
|
||||
is recommended instead of using NFS. Using an object storage service also
|
||||
doesn't require you to provision and maintain a node.
|
||||
### Supported infrastructure
|
||||
|
||||
As a general guidance, GitLab should run on most infrastructure such as reputable Cloud Providers (AWS, GCP, Azure) and their services, or self managed (ESXi) that meet both the specs detailed above, as well as any requirements in this section. However, this does not constitute a guarantee for every potential permutation.
|
||||
|
||||
Be aware of the following specific call outs:
|
||||
|
||||
- [Azure Database for PostgreSQL](https://docs.microsoft.com/en-us/azure/postgresql/#:~:text=Azure%20Database%20for%20PostgreSQL%20is,high%20availability%2C%20and%20dynamic%20scalability.) is [not recommended](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61) due to known performance issues or missing features.
|
||||
- [Azure Blob Storage](https://docs.microsoft.com/en-us/azure/storage/blobs/) is recommended to be configured with [Premium accounts](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-block-blob-premium) to ensure consistent performance.
|
||||
|
||||
## Setup components
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ For a full list of reference architectures, see
|
|||
|
||||
> - **Supported users (approximate):** 3,000
|
||||
> - **High Availability:** Yes, although [Praefect](#configure-praefect-postgresql) needs a third-party PostgreSQL solution
|
||||
> - **Estimated Costs:** [GCP](https://cloud.google.com/products/calculator/#id=ac4838e6-9c40-4a36-ac43-6d1bc1843e08)
|
||||
> - **Cloud Native Hybrid Alternative:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
|
||||
> - **Performance tested weekly with the [GitLab Performance Tool (GPT)](https://gitlab.com/gitlab-org/quality/performance)**:
|
||||
> - **Test requests per second (RPS) rates:** API: 60 RPS, Web: 6 RPS, Git (Pull): 6 RPS, Git (Push): 1 RPS
|
||||
|
@ -42,7 +43,7 @@ For a full list of reference architectures, see
|
|||
| GitLab Rails | 3 | 8 vCPU, 7.2 GB memory | `n1-highcpu-8` | `c5.2xlarge` | `F8s v2` |
|
||||
| Monitoring node | 1 | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` | `F2s v2` |
|
||||
| Object storage<sup>4</sup> | n/a | n/a | n/a | n/a | n/a |
|
||||
| NFS server (optional, not recommended) | 1 | 4 vCPU, 3.6 GB memory | `n1-highcpu-4` | `c5.xlarge` | `F4s v2` |
|
||||
| NFS server (non-Gitaly) | 1 | 4 vCPU, 3.6 GB memory | `n1-highcpu-4` | `c5.xlarge` | `F4s v2` |
|
||||
|
||||
<!-- Disable ordered list rule https://github.com/DavidAnson/markdownlint/blob/main/doc/Rules.md#md029---ordered-list-item-prefix -->
|
||||
<!-- markdownlint-disable MD029 -->
|
||||
|
@ -139,27 +140,34 @@ monitor .[#7FFFD4,norank]u--> elb
|
|||
@enduml
|
||||
```
|
||||
|
||||
The Google Cloud Platform (GCP) architectures were built and tested using the
|
||||
## Requirements
|
||||
|
||||
Before starting, you should take note of the following requirements / guidance for this reference architecture.
|
||||
|
||||
### Supported CPUs
|
||||
|
||||
This reference architecture was built and tested on Google Cloud Platform (GCP) using the
|
||||
[Intel Xeon E5 v3 (Haswell)](https://cloud.google.com/compute/docs/cpu-platforms)
|
||||
CPU platform. On different hardware you may find that adjustments, either lower
|
||||
or higher, are required for your CPU or node counts. For more information, see
|
||||
our [Sysbench](https://github.com/akopytov/sysbench)-based
|
||||
[CPU benchmarks](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Reference-Architectures/GCP-CPU-Benchmarks).
|
||||
|
||||
Due to better performance and availability, for data objects (such as LFS,
|
||||
uploads, or artifacts), using an [object storage service](#configure-the-object-storage)
|
||||
is recommended instead of using NFS. Using an object storage service also
|
||||
doesn't require you to provision and maintain a node.
|
||||
### Supported infrastructure
|
||||
|
||||
[Praefect requires its own database server](../gitaly/praefect.md#postgresql),
|
||||
and a third-party PostgreSQL database solution is required to achieve full
|
||||
high availability. Although we hope to offer a built-in solution for these
|
||||
restrictions in the future, you can set up a non-HA PostgreSQL server by using
|
||||
Omnibus GitLab (which the previous specifications reflect). Refer to the
|
||||
following issues for more information:
|
||||
As a general guidance, GitLab should run on most infrastructure such as reputable Cloud Providers (AWS, GCP, Azure) and their services, or self managed (ESXi) that meet both the specs detailed above, as well as any requirements in this section. However, this does not constitute a guarantee for every potential permutation.
|
||||
|
||||
- [`omnibus-gitlab#5919`](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5919)
|
||||
- [`gitaly#3398`](https://gitlab.com/gitlab-org/gitaly/-/issues/3398)
|
||||
Be aware of the following specific call outs:
|
||||
|
||||
- [Azure Database for PostgreSQL](https://docs.microsoft.com/en-us/azure/postgresql/#:~:text=Azure%20Database%20for%20PostgreSQL%20is,high%20availability%2C%20and%20dynamic%20scalability.) is [not recommended](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61) due to known performance issues or missing features.
|
||||
- [Azure Blob Storage](https://docs.microsoft.com/en-us/azure/storage/blobs/) is recommended to be configured with [Premium accounts](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-block-blob-premium) to ensure consistent performance.
|
||||
|
||||
### Praefect PostgreSQL
|
||||
|
||||
It's worth noting that at this time [Praefect requires its own database server](../gitaly/praefect.md#postgresql) and
|
||||
that to achieve full High Availability a third-party PostgreSQL database solution will be required.
|
||||
We hope to offer a built in solutions for these restrictions in the future but in the meantime a non HA PostgreSQL server
|
||||
can be set up via Omnibus GitLab, which the above specs reflect. Refer to the following issues for more information: [`omnibus-gitlab#5919`](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5919) & [`gitaly#3398`](https://gitlab.com/gitlab-org/gitaly/-/issues/3398).
|
||||
|
||||
## Setup components
|
||||
|
||||
|
|
|
@ -12,6 +12,7 @@ full list of reference architectures, see
|
|||
|
||||
> - **Supported users (approximate):** 50,000
|
||||
> - **High Availability:** Yes ([Praefect](#configure-praefect-postgresql) needs a third-party PostgreSQL solution for HA)
|
||||
> - **Estimated Costs:** [GCP](https://cloud.google.com/products/calculator/#id=8006396b-88ee-40cd-a1c8-77cdefa4d3c8)
|
||||
> - **Cloud Native Hybrid Alternative:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
|
||||
> - **Performance tested weekly with the [GitLab Performance Tool (GPT)](https://gitlab.com/gitlab-org/quality/performance)**:
|
||||
> - **Test requests per second (RPS) rates:** API: 1000 RPS, Web: 100 RPS, Git (Pull): 100 RPS, Git (Push): 20 RPS
|
||||
|
@ -133,21 +134,34 @@ monitor .[#7FFFD4,norank]u--> elb
|
|||
@enduml
|
||||
```
|
||||
|
||||
The Google Cloud Platform (GCP) architectures were built and tested using the
|
||||
## Requirements
|
||||
|
||||
Before starting, you should take note of the following requirements / guidance for this reference architecture.
|
||||
|
||||
### Supported CPUs
|
||||
|
||||
This reference architecture was built and tested on Google Cloud Platform (GCP) using the
|
||||
[Intel Xeon E5 v3 (Haswell)](https://cloud.google.com/compute/docs/cpu-platforms)
|
||||
CPU platform. On different hardware you may find that adjustments, either lower
|
||||
or higher, are required for your CPU or node counts. For more information, see
|
||||
our [Sysbench](https://github.com/akopytov/sysbench)-based
|
||||
[CPU benchmarks](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Reference-Architectures/GCP-CPU-Benchmarks).
|
||||
|
||||
Due to better performance and availability, for data objects (such as LFS,
|
||||
uploads, or artifacts), using an [object storage service](#configure-the-object-storage)
|
||||
is recommended.
|
||||
### Supported infrastructure
|
||||
|
||||
It's also worth noting that at this time [Praefect requires its own database server](../gitaly/praefect.md#postgresql) and
|
||||
As a general guidance, GitLab should run on most infrastructure such as reputable Cloud Providers (AWS, GCP, Azure) and their services, or self managed (ESXi) that meet both the specs detailed above, as well as any requirements in this section. However, this does not constitute a guarantee for every potential permutation.
|
||||
|
||||
Be aware of the following specific call outs:
|
||||
|
||||
- [Azure Database for PostgreSQL](https://docs.microsoft.com/en-us/azure/postgresql/#:~:text=Azure%20Database%20for%20PostgreSQL%20is,high%20availability%2C%20and%20dynamic%20scalability.) is [not recommended](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61) due to known performance issues or missing features.
|
||||
- [Azure Blob Storage](https://docs.microsoft.com/en-us/azure/storage/blobs/) is recommended to be configured with [Premium accounts](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-block-blob-premium) to ensure consistent performance.
|
||||
|
||||
### Praefect PostgreSQL
|
||||
|
||||
It's worth noting that at this time [Praefect requires its own database server](../gitaly/praefect.md#postgresql) and
|
||||
that to achieve full High Availability a third-party PostgreSQL database solution will be required.
|
||||
We hope to offer a built in solutions for these restrictions in the future but in the meantime a non HA PostgreSQL server
|
||||
can be set up via Omnibus GitLab, which the above specs reflect. Refer to the following issues for more information: [`omnibus-gitlab#5919`](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5919) & [`gitaly#3398`](https://gitlab.com/gitlab-org/gitaly/-/issues/3398)
|
||||
can be set up via Omnibus GitLab, which the above specs reflect. Refer to the following issues for more information: [`omnibus-gitlab#5919`](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5919) & [`gitaly#3398`](https://gitlab.com/gitlab-org/gitaly/-/issues/3398).
|
||||
|
||||
## Setup components
|
||||
|
||||
|
|
|
@ -19,6 +19,7 @@ costly-to-operate environment by using the
|
|||
|
||||
> - **Supported users (approximate):** 5,000
|
||||
> - **High Availability:** Yes ([Praefect](#configure-praefect-postgresql) needs a third-party PostgreSQL solution for HA)
|
||||
> - **Estimated Costs:** [GCP](https://cloud.google.com/products/calculator/#id=8742e8ea-c08f-4e0a-b058-02f3a1c38a2f)
|
||||
> - **Cloud Native Hybrid Alternative:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
|
||||
> - **Performance tested weekly with the [GitLab Performance Tool (GPT)](https://gitlab.com/gitlab-org/quality/performance)**:
|
||||
> - **Test requests per second (RPS) rates:** API: 100 RPS, Web: 10 RPS, Git (Pull): 10 RPS, Git (Push): 2 RPS
|
||||
|
@ -39,7 +40,7 @@ costly-to-operate environment by using the
|
|||
| GitLab Rails | 3 | 16 vCPU, 14.4 GB memory | `n1-highcpu-16` | `c5.4xlarge` | `F16s v2`|
|
||||
| Monitoring node | 1 | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` | `F2s v2` |
|
||||
| Object storage<sup>4</sup> | n/a | n/a | n/a | n/a | n/a |
|
||||
| NFS server (optional, not recommended) | 1 | 4 vCPU, 3.6 GB memory | `n1-highcpu-4` | `c5.xlarge` | `F4s v2` |
|
||||
| NFS server (non-Gitaly) | 1 | 4 vCPU, 3.6 GB memory | `n1-highcpu-4` | `c5.xlarge` | `F4s v2` |
|
||||
|
||||
<!-- Disable ordered list rule https://github.com/DavidAnson/markdownlint/blob/main/doc/Rules.md#md029---ordered-list-item-prefix -->
|
||||
<!-- markdownlint-disable MD029 -->
|
||||
|
@ -136,22 +137,34 @@ monitor .[#7FFFD4,norank]u--> elb
|
|||
@enduml
|
||||
```
|
||||
|
||||
The Google Cloud Platform (GCP) architectures were built and tested using the
|
||||
## Requirements
|
||||
|
||||
Before starting, you should take note of the following requirements / guidance for this reference architecture.
|
||||
|
||||
### Supported CPUs
|
||||
|
||||
This reference architecture was built and tested on Google Cloud Platform (GCP) using the
|
||||
[Intel Xeon E5 v3 (Haswell)](https://cloud.google.com/compute/docs/cpu-platforms)
|
||||
CPU platform. On different hardware you may find that adjustments, either lower
|
||||
or higher, are required for your CPU or node counts. For more information, see
|
||||
our [Sysbench](https://github.com/akopytov/sysbench)-based
|
||||
[CPU benchmarks](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Reference-Architectures/GCP-CPU-Benchmarks).
|
||||
|
||||
Due to better performance and availability, for data objects (such as LFS,
|
||||
uploads, or artifacts), using an [object storage service](#configure-the-object-storage)
|
||||
is recommended instead of using NFS. Using an object storage service also
|
||||
doesn't require you to provision and maintain a node.
|
||||
### Supported infrastructure
|
||||
|
||||
It's also worth noting that at this time [Praefect requires its own database server](../gitaly/praefect.md#postgresql) and
|
||||
As a general guidance, GitLab should run on most infrastructure such as reputable Cloud Providers (AWS, GCP, Azure) and their services, or self managed (ESXi) that meet both the specs detailed above, as well as any requirements in this section. However, this does not constitute a guarantee for every potential permutation.
|
||||
|
||||
Be aware of the following specific call outs:
|
||||
|
||||
- [Azure Database for PostgreSQL](https://docs.microsoft.com/en-us/azure/postgresql/#:~:text=Azure%20Database%20for%20PostgreSQL%20is,high%20availability%2C%20and%20dynamic%20scalability.) is [not recommended](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61) due to known performance issues or missing features.
|
||||
- [Azure Blob Storage](https://docs.microsoft.com/en-us/azure/storage/blobs/) is recommended to be configured with [Premium accounts](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-block-blob-premium) to ensure consistent performance.
|
||||
|
||||
### Praefect PostgreSQL
|
||||
|
||||
It's worth noting that at this time [Praefect requires its own database server](../gitaly/praefect.md#postgresql) and
|
||||
that to achieve full High Availability a third-party PostgreSQL database solution will be required.
|
||||
We hope to offer a built in solutions for these restrictions in the future but in the meantime a non HA PostgreSQL server
|
||||
can be set up via Omnibus GitLab, which the above specs reflect. Refer to the following issues for more information: [`omnibus-gitlab#5919`](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5919) & [`gitaly#3398`](https://gitlab.com/gitlab-org/gitaly/-/issues/3398)
|
||||
can be set up via Omnibus GitLab, which the above specs reflect. Refer to the following issues for more information: [`omnibus-gitlab#5919`](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5919) & [`gitaly#3398`](https://gitlab.com/gitlab-org/gitaly/-/issues/3398).
|
||||
|
||||
## Setup components
|
||||
|
||||
|
|
|
@ -191,3 +191,32 @@ The reference architectures for user counts [3,000](3k_users.md) and up support
|
|||
In the specific case you have the requirement to achieve HA but have a lower user count, select modifications to the [3,000 user](3k_users.md) architecture are supported.
|
||||
|
||||
For more details, [refer to this section in the architecture's documentation](3k_users.md#supported-modifications-for-lower-user-counts-ha).
|
||||
|
||||
## Testing process and results
|
||||
|
||||
The [Quality Engineering - Enablement team](https://about.gitlab.com/handbook/engineering/quality/quality-engineering/) does regular smoke and performance tests for the reference architectures to ensure they remain compliant.
|
||||
|
||||
In this section, we detail some of the process as well as the results.
|
||||
|
||||
Note the following about the testing process:
|
||||
|
||||
- Testing occurs against all main reference architectures and cloud providers in an automated and ad-hoc fashion.
|
||||
This is achieved through two tools built by the team:
|
||||
- The [GitLab Environment Toolkit](https://gitlab.com/gitlab-org/quality/gitlab-environment-toolkit) for building the environments.
|
||||
- The [GitLab Performance Tool](https://gitlab.com/gitlab-org/quality/performance) for performance testing.
|
||||
- Network latency on the test environments between components on all Cloud Providers were measured at <5ms. Note that this is shared as an observation and not as an implicit recommendation.
|
||||
- We aim to have a "test smart" approach where architectures tested have a good range that can also apply to others. Testing focuses on 10k Omnibus on GCP as the testing has shown this is a good bellwether for the other architectures and cloud providers as well as Cloud Native Hybrids.
|
||||
- Testing is done publicly and all results are shared.
|
||||
|
||||
Τhe following table details the testing done against the reference architectures along with the frequency and results.
|
||||
Testing is continuously evaluated and iterated on, so the table is constantly updated.
|
||||
|
||||
| Reference architecture size | GCP | AWS | Azure |
|
||||
|-----------------------------|-----------------------------------------------------------------------------------------------------|-----|-------|
|
||||
| 1k | [Omnibus - Daily](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/1k) | - | - |
|
||||
| 2k | [Omnibus - Daily](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/2k) | - | - |
|
||||
| 3k | [Omnibus - Weekly](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/3k) | - | - |
|
||||
| 5k | [Omnibus - Weekly](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/5k) | - | - |
|
||||
| 10k | [Omnibus - Daily](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/10k) <br/> [Omnibus (inc Cloud Services) - Ad-Hoc](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Past-Results/10k) <br/> [Cloud Native Hybrid - Ad-Hoc](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Past-Results/10k-Cloud-Native-Hybrid) | [Omnibus (inc Cloud Services) - Ad-Hoc](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Past-Results/10k) | [Omnibus - Ad-Hoc](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Past-Results/10k) |
|
||||
| 25k | [Omnibus - Weekly](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/25k) | - | [Omnibus - Ad-Hoc](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Past-Results/25k) |
|
||||
| 50k | [Omnibus - Weekly](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/50k) | [Omnibus (inc Cloud Services) - Ad-Hoc](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Past-Results/50k) | - |
|
||||
|
|
|
@ -20,6 +20,19 @@ There are no fixed endpoints and no data model, so you can add
|
|||
to the API without creating [breaking changes](../../development/contributing/#breaking-changes).
|
||||
This enables us to have a [versionless API](https://graphql.org/learn/best-practices/#versioning).
|
||||
|
||||
## Vision
|
||||
|
||||
We want the GraphQL API to be the **primary** means of interacting
|
||||
programmatically with GitLab. To achieve this, it needs full coverage - anything
|
||||
possible in the REST API should also be possible in the GraphQL API.
|
||||
|
||||
To help us meet this vision, the frontend should use GraphQL in preference to
|
||||
the REST API for new features.
|
||||
|
||||
There are no plans to deprecate the REST API. To reduce the technical burden of
|
||||
supporting two APIs in parallel, they should share implementations as much as
|
||||
possible.
|
||||
|
||||
## Work with GraphQL
|
||||
|
||||
If you're new to the GitLab GraphQL API, see [Get started with GitLab GraphQL API](getting_started.md).
|
||||
|
@ -57,72 +70,60 @@ To generate the required documentation and schema, see
|
|||
|
||||
Run the commands using the [GitLab Development Kit](https://gitlab.com/gitlab-org/gitlab-development-kit/).
|
||||
|
||||
## Vision
|
||||
|
||||
We want the GraphQL API to be the **primary** means of interacting
|
||||
programmatically with GitLab. To achieve this, it needs full coverage - anything
|
||||
possible in the REST API should also be possible in the GraphQL API.
|
||||
|
||||
To help us meet this vision, the frontend should use GraphQL in preference to
|
||||
the REST API for new features.
|
||||
|
||||
There are no plans to deprecate the REST API. To reduce the technical burden of
|
||||
supporting two APIs in parallel, they should share implementations as much as
|
||||
possible.
|
||||
|
||||
## Breaking changes
|
||||
|
||||
The GitLab GraphQL API is [versionless](https://graphql.org/learn/best-practices/#versioning) and
|
||||
changes are made to the API in a way that maintains backwards-compatibility.
|
||||
The GitLab GraphQL API is [versionless](https://graphql.org/learn/best-practices/#versioning) and changes to the API are primarily backward-compatible.
|
||||
|
||||
Occasionally GitLab needs to change the GraphQL API in a way that is not backwards-compatible.
|
||||
These changes include the removal or renaming of fields, arguments or other parts of the schema.
|
||||
However, GitLab sometimes changes the GraphQL API in a way that is not backward-compatible. These changes are considered breaking changes, and
|
||||
can include removing or renaming fields, arguments, or other parts of the schema.
|
||||
When creating a breaking change, GitLab follows a [deprecation and removal process](#deprecation-and-removal-process).
|
||||
|
||||
In these situations, GitLab follows a [Deprecation and removal process](#deprecation-and-removal-process)
|
||||
where the deprecated part of the schema is supported for a period of time before being removed.
|
||||
Learn more about [breaking changes](../../development/contributing/#breaking-changes).
|
||||
|
||||
There are some changes which are explicitly [not considered breaking](../../development/contributing/#breaking-changes).
|
||||
Fields behind a feature flag and disabled by default do not follow the deprecation and removal process, and can be removed at any time without notice.
|
||||
|
||||
Clients should familiarize themselves with the process to avoid breaking changes affecting their integrations.
|
||||
To avoid having a breaking change affect your integrations, you should
|
||||
familiarize yourself with the deprecation and removal process.
|
||||
|
||||
WARNING:
|
||||
While GitLab will make all attempts to follow the [deprecation and removal process](#deprecation-and-removal-process),
|
||||
GitLab may on very rare occasions need to make immediate breaking changes to the GraphQL API to patch critical security or performance
|
||||
concerns and where the deprecation process would be considered to pose significant risk.
|
||||
GitLab makes all attempts to follow the [deprecation and removal process](#deprecation-and-removal-process).
|
||||
On rare occasions, GitLab might make immediate breaking changes to the GraphQL
|
||||
API to patch critical security or performance concerns if the deprecation
|
||||
process would pose significant risk.
|
||||
|
||||
NOTE:
|
||||
Fields behind a feature flag and disabled by default are exempt from the deprecation process,
|
||||
and can be removed at any time without notice.
|
||||
|
||||
### Deprecation and Removal process
|
||||
### Deprecation and removal process
|
||||
|
||||
The deprecation and removal process for the GitLab GraphQL API aligns with the wider GitLab
|
||||
[deprecation process](https://about.gitlab.com/handbook/product/gitlab-the-product/#breaking-changes-deprecations-and-removing-features).
|
||||
|
||||
Parts of the schema marked for removal from the GitLab GraphQL API are first [deprecated](https://about.gitlab.com/handbook/product/gitlab-the-product/#deprecation) but still available
|
||||
for at least six releases, and then [removed](https://about.gitlab.com/handbook/product/gitlab-the-product/#removal) entirely. Removals occur on `XX.0` major releases.
|
||||
Parts of the schema marked for removal from the GitLab GraphQL API are first
|
||||
[deprecated](https://about.gitlab.com/handbook/product/gitlab-the-product/#deprecation)
|
||||
but still available for at least six releases. They are then [removed](https://about.gitlab.com/handbook/product/gitlab-the-product/#removal)
|
||||
entirely during the next `XX.0` major release.
|
||||
|
||||
Items are marked as deprecated [in the schema](https://spec.graphql.org/October2021/#sec--deprecated),
|
||||
and are also displayed as deprecated in:
|
||||
Items are marked as deprecated in:
|
||||
|
||||
- The [schema](https://spec.graphql.org/October2021/#sec--deprecated).
|
||||
- The [GraphQL API reference](reference/index.md).
|
||||
- The [deprecation feature removal schedule](../../update/deprecations.md), which is linked to in release posts.
|
||||
- The [deprecation feature removal schedule](../../update/deprecations.md), which is linked from release posts.
|
||||
- Introspection queries of the GraphQL API.
|
||||
|
||||
NOTE:
|
||||
Consumers of the GraphQL API are encouraged to remove the use of deprecated schema in their GraphQL
|
||||
If you use the GraphQL API, we recommend you remove the deprecated schema from your GraphQL
|
||||
API calls as soon as possible to avoid experiencing breaking changes.
|
||||
|
||||
If an alternative is provided for the deprecated schema item, then its deprecation message mentions this.
|
||||
The deprecation message provides an alternative for the deprecated schema item,
|
||||
if applicable.
|
||||
|
||||
**Example:**
|
||||
#### Deprecation example
|
||||
|
||||
The following fields could both be removed in `14.0`:
|
||||
The following fields are deprecated in different minor releases, but both
|
||||
removed in GitLab 14.0:
|
||||
|
||||
| Field deprecated in | Reason |
|
||||
| --- | --- |
|
||||
| `12.7` | As GitLab traditionally has 12 minor releases per major release, the next major release that occurs 6 months after the field was deprecated is `14.0`, rather than `13.0`. |
|
||||
| `13.6` | The removal in `14.0` allows for 6 months of deprecation. |
|
||||
| ------------------- | --- |
|
||||
| 12.7 | GitLab traditionally has 12 minor releases per major release. To ensure the field is available for 6 more releases, it is removed in the 14.0 major release (and not 13.0). |
|
||||
| 13.6 | The removal in 14.0 allows for 6 months of availability. |
|
||||
|
||||
### List of removed items
|
||||
|
||||
|
@ -132,16 +133,18 @@ View the [list of items removed](removed_items.md) in previous releases.
|
|||
|
||||
The GraphQL API includes the following queries at the root level:
|
||||
|
||||
1. `project` : Project information, with many of its associations such as issues and merge requests.
|
||||
1. `group` : Basic group information and epics **(ULTIMATE)** are currently supported.
|
||||
1. `user` : Information about a particular user.
|
||||
1. `namespace` : Within a namespace it is also possible to fetch `projects`.
|
||||
1. `currentUser`: Information about the currently logged in user.
|
||||
1. `users`: Information about a collection of users.
|
||||
1. `metaData`: Metadata about GitLab and the GraphQL API.
|
||||
1. `snippets`: Snippets visible to the currently logged in user.
|
||||
Query | Description
|
||||
--------------|------------
|
||||
`project` | Project information and many of its associations, such as issues and merge requests.
|
||||
`group` | Basic group information and epics.
|
||||
`user` | Information about a particular user.
|
||||
`namespace` | The namespace and the `projects` in it.
|
||||
`currentUser` | Information about the signed-in user.
|
||||
`users` | Information about a collection of users.
|
||||
`metaData` | Metadata about GitLab and the GraphQL API.
|
||||
`snippets` | Snippets visible to the signed-in user.
|
||||
|
||||
New associations and root level objects are constantly being added.
|
||||
New associations and root level objects are regularly added.
|
||||
See the [GraphQL API Reference](reference/index.md) for up-to-date information.
|
||||
|
||||
Root-level queries are defined in
|
||||
|
@ -159,41 +162,33 @@ library GitLab uses on the backend.
|
|||
|
||||
The following limits apply to the GitLab GraphQL API.
|
||||
|
||||
### Max page size
|
||||
|
||||
By default, connections return at most `100` records ("nodes") per page,
|
||||
and this limit applies to most connections in the API. Particular connections
|
||||
may have different max page size limits that are higher or lower.
|
||||
Limit | Default
|
||||
---------------------|---------------------------------------------------------------------
|
||||
Max page size | 100 records (nodes) per page. Applies to most connections in the API. Particular connections may have different max page size limits that are higher or lower.
|
||||
[Max query complexity](#max-query-complexity) | `200` for unauthenticated requests and `250` for authenticated requests.
|
||||
Request timeout | 30 seconds.
|
||||
|
||||
### Max query complexity
|
||||
|
||||
The GitLab GraphQL API scores the _complexity_ of a query. Generally, larger
|
||||
queries will have a higher complexity score. This limit is designed to protect
|
||||
queries have a higher complexity score. This limit is designed to protect
|
||||
the API from performing queries that could negatively impact its overall performance.
|
||||
|
||||
The complexity of a single query is limited to a maximum of:
|
||||
You can [query](getting_started.md#query-complexity) the complexity score of a query
|
||||
and the limit for the request.
|
||||
|
||||
- `200` for unauthenticated requests.
|
||||
- `250` for authenticated requests.
|
||||
If a query exceeds the complexity limit, an error message response is
|
||||
returned.
|
||||
|
||||
The complexity score of a query and limit for the request [can be queried for](getting_started.md#query-complexity).
|
||||
|
||||
If a query exceeds the complexity limit an error message response will
|
||||
be returned.
|
||||
|
||||
In general, each field in a query will add `1` to the complexity score, although
|
||||
this can be higher or lower for particular fields. Sometimes the addition of
|
||||
In general, each field in a query adds `1` to the complexity score, although
|
||||
this can be higher or lower for particular fields. Sometimes, adding
|
||||
certain arguments may also increase the complexity of a query.
|
||||
|
||||
NOTE:
|
||||
The complexity limits may be revised in future, and additionally, the complexity
|
||||
of a query may be altered.
|
||||
|
||||
### Request timeout
|
||||
|
||||
Requests time out at 30 seconds.
|
||||
|
||||
### Spam
|
||||
## Spam
|
||||
|
||||
GraphQL mutations can be detected as spam. If this happens, a
|
||||
[GraphQL top-level error](https://spec.graphql.org/June2018/#sec-Errors) is raised. For example:
|
||||
|
@ -218,11 +213,11 @@ GraphQL mutations can be detected as spam. If this happens, a
|
|||
}
|
||||
```
|
||||
|
||||
If mutation is detected as potential spam and a CAPTCHA service is configured:
|
||||
If a mutation is detected as potential spam and a CAPTCHA service is configured:
|
||||
|
||||
- The `captchaSiteKey` should be used to obtain a CAPTCHA response value using the appropriate CAPTCHA API.
|
||||
- Use the `captchaSiteKey` to obtain a CAPTCHA response value using the appropriate CAPTCHA API.
|
||||
Only [Google reCAPTCHA v2](https://developers.google.com/recaptcha/docs/display) is supported.
|
||||
- The request can be resubmitted with the `X-GitLab-Captcha-Response` and `X-GitLab-Spam-Log-Id` headers set.
|
||||
- Resubmit the request with the `X-GitLab-Captcha-Response` and `X-GitLab-Spam-Log-Id` headers set.
|
||||
|
||||
```json
|
||||
{
|
||||
|
|
|
@ -1404,6 +1404,7 @@ Supported attributes:
|
|||
| `jobs_enabled` | boolean | **{dotted-circle}** No | _(Deprecated)_ Enable jobs for this project. Use `builds_access_level` instead. |
|
||||
| `lfs_enabled` | boolean | **{dotted-circle}** No | Enable LFS. |
|
||||
| `merge_commit_template` | string | **{dotted-circle}** No | [Template](../user/project/merge_requests/commit_templates.md) used to create merge commit message in merge requests. _([Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/20263) in GitLab 14.5.)_ |
|
||||
| `squash_commit_template` | string | **{dotted-circle}** No | [Template](../user/project/merge_requests/commit_templates.md) used to create squash commit message in merge requests. _([Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/345275) in GitLab 14.6.)_ |
|
||||
| `merge_method` | string | **{dotted-circle}** No | Set the [merge method](#project-merge-method) used. |
|
||||
| `merge_requests_access_level` | string | **{dotted-circle}** No | One of `disabled`, `private`, or `enabled`. |
|
||||
| `merge_requests_enabled` | boolean | **{dotted-circle}** No | _(Deprecated)_ Enable merge requests for this project. Use `merge_requests_access_level` instead. |
|
||||
|
|
|
@ -154,7 +154,9 @@ To change the namespace linked to a subscription:
|
|||
[linked](#change-the-linked-account) GitLab SaaS account.
|
||||
1. Navigate to the **Manage Purchases** page.
|
||||
1. Select **Change linked namespace**.
|
||||
1. Select the desired group from the **This subscription is for** dropdown.
|
||||
1. Select the desired group from the **This subscription is for** dropdown. For a group to appear
|
||||
here, you must have the Owner [role](../user/permissions.md)
|
||||
for that group.
|
||||
1. Select **Proceed to checkout**.
|
||||
|
||||
Subscription charges are calculated based on the total number of users in a group, including its subgroups and nested projects. If the total number of users exceeds the number of seats in your subscription, your account is charged for the additional users.
|
||||
|
|
|
@ -180,6 +180,7 @@ You can [configure](#customizing-the-container-scanning-settings) analyzers by u
|
|||
| `CS_ANALYZER_IMAGE` | `registry.gitlab.com/security-products/container-scanning:4` | Docker image of the analyzer. | All |
|
||||
| `CS_DEFAULT_BRANCH_IMAGE` | `""` | The name of the `DOCKER_IMAGE` on the default branch. See [Setting the default branch image](#setting-the-default-branch-image) for more details. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/338877) in GitLab 14.5. | All |
|
||||
| `CS_DISABLE_DEPENDENCY_LIST` | `"false"` | Disable Dependency Scanning for packages installed in the scanned image. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/345434) in GitLab 14.6. | All |
|
||||
| `CS_DISABLE_LANGUAGE_VULNERABILITY_SCAN` | `"true"` | Disable scanning for language-specific packages installed in the scanned image. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/345434) in GitLab 14.6. | All |
|
||||
| `CS_DOCKER_INSECURE` | `"false"` | Allow access to secure Docker registries using HTTPS without validating the certificates. | All |
|
||||
| `CS_REGISTRY_INSECURE` | `"false"` | Allow access to insecure registries (HTTP only). Should only be set to `true` when testing the image locally. Works with all scanners, but the registry must listen on port `80/tcp` for Trivy to work. | All |
|
||||
| `CS_SEVERITY_THRESHOLD` | `UNKNOWN` | Severity level threshold. The scanner outputs vulnerabilities with severity level higher than or equal to this threshold. Supported levels are Unknown, Low, Medium, High, and Critical. | Trivy |
|
||||
|
|
|
@ -36,15 +36,11 @@ To authenticate to the Helm repository, you need either:
|
|||
|
||||
## Publish a package
|
||||
|
||||
WARNING:
|
||||
The `helm-push` command is broken in Helm 3.7. For more information, see the [open issue](https://github.com/chartmuseum/helm-push/issues/109)
|
||||
in the Chart Museum project.
|
||||
|
||||
NOTE:
|
||||
You can publish Helm charts with duplicate names or versions. If duplicates exist, GitLab always
|
||||
returns the chart with the latest version.
|
||||
|
||||
Once built, a chart can be uploaded to the desired channel with `curl` or `helm-push`:
|
||||
Once built, a chart can be uploaded to the desired channel with `curl` or `helm cm-push`:
|
||||
|
||||
- With `curl`:
|
||||
|
||||
|
@ -61,11 +57,11 @@ Once built, a chart can be uploaded to the desired channel with `curl` or `helm-
|
|||
[URL-encoded](../../../api/index.md#namespaced-path-encoding) path of the project (like `group%2Fproject`).
|
||||
- `<channel>`: the name of the channel (like `stable`).
|
||||
|
||||
- With the [`helm-push`](https://github.com/chartmuseum/helm-push/#readme) plugin:
|
||||
- With the [`helm cm-push`](https://github.com/chartmuseum/helm-push/#readme) plugin:
|
||||
|
||||
```shell
|
||||
helm repo add --username <username> --password <access_token> project-1 https://gitlab.example.com/api/v4/projects/<project_id>/packages/helm/<channel>
|
||||
helm push mychart-0.1.0.tgz project-1
|
||||
helm cm-push mychart-0.1.0.tgz project-1
|
||||
```
|
||||
|
||||
- `<username>`: the GitLab username or the deploy token username.
|
||||
|
@ -135,12 +131,6 @@ To fix the error, use the correct version syntax and upload the chart again.
|
|||
|
||||
### `helm push` results in an error
|
||||
|
||||
The `helm push` plugin is not yet supported in Helm 3.7. If you try to push a chart using
|
||||
`helm push`, it produces the following error:
|
||||
|
||||
```plaintext
|
||||
Error: this feature has been marked as experimental and is not enabled by default. Please set HELM_EXPERIMENTAL_OCI=1 in your environment to use this feature
|
||||
```
|
||||
|
||||
To continue to use the plugin, you can push an image using [curl](#use-cicd-to-publish-a-helm-package)
|
||||
or downgrade your version of Helm.
|
||||
Helm 3.7 introduced a breaking change for the `helm-push` plugin. You can update the
|
||||
[Chart Museum plugin](https://github.com/chartmuseum/helm-push/#readme)
|
||||
to use `helm cm-push`.
|
||||
|
|
|
@ -7,10 +7,10 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
|
||||
# Personal access tokens **(FREE)**
|
||||
|
||||
> - Introduced in GitLab 12.6: [Notifications for expiring tokens](https://gitlab.com/gitlab-org/gitlab/-/issues/3649).
|
||||
> - Introduced in GitLab 12.6: [Token lifetime limits](https://gitlab.com/gitlab-org/gitlab/-/issues/3649).
|
||||
> - Introduced in GitLab 13.3: [Additional notifications for expiring tokens](https://gitlab.com/gitlab-org/gitlab/-/issues/214721).
|
||||
> - Introduced in GitLab 14.1: [Prefill token name and scopes](https://gitlab.com/gitlab-org/gitlab/-/issues/334664).
|
||||
> - Notifications for expiring tokens [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/3649) in GitLab 12.6.
|
||||
> - Token lifetime limits [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/3649) in GitLab 12.6.
|
||||
> - Additional notifications for expiring tokens [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/214721) in GitLab 13.3.
|
||||
> - Prefill for token name and scopes [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/334664) in GitLab 14.1.
|
||||
|
||||
Personal access tokens can be an alternative to [OAuth2](../../api/oauth2.md) and used to:
|
||||
|
||||
|
|
|
@ -92,7 +92,7 @@ you can explicitly set your own. The following HTTP codes are supported:
|
|||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab-pages/-/merge_requests/458) in GitLab 14.3.
|
||||
> - Enabled on GitLab.com.
|
||||
> - Enabled by default in self-managed GitLab behind the [`FF_ENABLE_REDIRECTS` feature flag](#feature-flag-for-redirects).
|
||||
> - Enabled on self-managed in [GitLab 14.6](https://gitlab.com/gitlab-org/gitlab-pages/-/issues/618).
|
||||
|
||||
To create a redirect, add a rule that includes a `from` path, a `to` path,
|
||||
and an [HTTP status code](#http-status-codes):
|
||||
|
@ -261,36 +261,7 @@ However, there are some minor differences:
|
|||
literal `:placeholder`).
|
||||
- GitLab redirects to `/new/`.
|
||||
|
||||
## Features behind feature flags
|
||||
|
||||
Some Pages features are behind feature flags.
|
||||
|
||||
### Feature flag for redirects
|
||||
|
||||
FLAG:
|
||||
Redirects in GitLab Pages is under development, and is deployed behind a feature flag
|
||||
that is **enabled by default**.
|
||||
|
||||
To disable redirects, for [Omnibus installations](../../../administration/pages/index.md), define the
|
||||
`FF_ENABLE_REDIRECTS` environment variable in the
|
||||
[global settings](../../../administration/pages/index.md#global-settings).
|
||||
Add the following line to `/etc/gitlab/gitlab.rb` and
|
||||
[reconfigure the instance](../../../administration/restart_gitlab.md#omnibus-gitlab-reconfigure).
|
||||
|
||||
```ruby
|
||||
gitlab_pages['env']['FF_ENABLE_REDIRECTS'] = 'false'
|
||||
```
|
||||
|
||||
For [source installations](../../../administration/pages/source.md), define the
|
||||
`FF_ENABLE_REDIRECTS` environment variable, then
|
||||
[restart GitLab](../../../administration/restart_gitlab.md#installations-from-source):
|
||||
|
||||
```shell
|
||||
export FF_ENABLE_REDIRECTS="false"
|
||||
/path/to/pages/bin/gitlab-pages -config gitlab-pages.conf
|
||||
```
|
||||
|
||||
### Feature flag for rewrites
|
||||
## Feature flag for rewrites
|
||||
|
||||
FLAG:
|
||||
Rewrites in GitLab Pages is under development, and is deployed behind a feature flag
|
||||
|
|
|
@ -97,29 +97,6 @@ RSpec.describe InvitesController do
|
|||
)
|
||||
end
|
||||
|
||||
context 'when it is part of the invite_email_preview_text experiment' do
|
||||
let(:extra_params) { { invite_type: 'initial_email', experiment_name: 'invite_email_preview_text' } }
|
||||
|
||||
it 'tracks the initial join click from email' do
|
||||
experiment = double(track: true)
|
||||
allow(controller).to receive(:experiment).with(:invite_email_preview_text, actor: member).and_return(experiment)
|
||||
|
||||
request
|
||||
|
||||
expect(experiment).to have_received(:track).with(:join_clicked)
|
||||
end
|
||||
|
||||
context 'when member does not exist' do
|
||||
let(:raw_invite_token) { '_bogus_token_' }
|
||||
|
||||
it 'does not track the experiment' do
|
||||
expect(controller).not_to receive(:experiment).with(:invite_email_preview_text, actor: member)
|
||||
|
||||
request
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when member does not exist' do
|
||||
let(:raw_invite_token) { '_bogus_token_' }
|
||||
|
||||
|
@ -145,14 +122,6 @@ RSpec.describe InvitesController do
|
|||
label: 'invite_email'
|
||||
)
|
||||
end
|
||||
|
||||
context 'when it is not part of our invite email experiment' do
|
||||
it 'does not track via experiment' do
|
||||
expect(controller).not_to receive(:experiment).with(:invite_email_preview_text, actor: member)
|
||||
|
||||
request
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when not logged in' do
|
||||
|
|
|
@ -159,12 +159,11 @@ RSpec.describe RegistrationsController do
|
|||
let_it_be(:member) { create(:project_member, :invited, invite_email: user_params.dig(:user, :email)) }
|
||||
|
||||
let(:originating_member_id) { member.id }
|
||||
let(:extra_session_params) { {} }
|
||||
let(:session_params) do
|
||||
{
|
||||
invite_email: user_params.dig(:user, :email),
|
||||
originating_member_id: originating_member_id
|
||||
}.merge extra_session_params
|
||||
}
|
||||
end
|
||||
|
||||
context 'when member exists from the session key value' do
|
||||
|
@ -193,40 +192,6 @@ RSpec.describe RegistrationsController do
|
|||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with the invite_email_preview_text experiment', :experiment do
|
||||
let(:extra_session_params) { { invite_email_experiment_name: 'invite_email_preview_text' } }
|
||||
|
||||
context 'when member and invite_email_experiment_name exists from the session key value' do
|
||||
it 'tracks the invite acceptance' do
|
||||
expect(experiment(:invite_email_preview_text)).to track(:accepted)
|
||||
.with_context(actor: member)
|
||||
.on_next_instance
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
|
||||
context 'when member does not exist from the session key value' do
|
||||
let(:originating_member_id) { -1 }
|
||||
|
||||
it 'does not track invite acceptance' do
|
||||
expect(experiment(:invite_email_preview_text)).not_to track(:accepted)
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
|
||||
context 'when invite_email_experiment_name does not exist from the session key value' do
|
||||
let(:extra_session_params) { {} }
|
||||
|
||||
it 'does not track invite acceptance' do
|
||||
expect(experiment(:invite_email_preview_text)).not_to track(:accepted)
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when invite email matches email used on registration' do
|
||||
|
|
|
@ -226,20 +226,6 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
|
|||
end
|
||||
end
|
||||
|
||||
context 'with invite email acceptance for the invite_email_preview_text experiment', :experiment do
|
||||
let(:extra_params) do
|
||||
{ invite_type: Emails::Members::INITIAL_INVITE, experiment_name: 'invite_email_preview_text' }
|
||||
end
|
||||
|
||||
it 'tracks the accepted invite' do
|
||||
expect(experiment(:invite_email_preview_text)).to track(:accepted)
|
||||
.with_context(actor: group_invite)
|
||||
.on_next_instance
|
||||
|
||||
fill_in_sign_up_form(new_user)
|
||||
end
|
||||
end
|
||||
|
||||
it 'signs up and redirects to the group activity page with all the project/groups invitation automatically accepted' do
|
||||
fill_in_sign_up_form(new_user)
|
||||
fill_in_welcome_form
|
||||
|
|
64
spec/frontend/environments/delete_environment_modal_spec.js
Normal file
64
spec/frontend/environments/delete_environment_modal_spec.js
Normal file
|
@ -0,0 +1,64 @@
|
|||
import { GlModal } from '@gitlab/ui';
|
||||
import { shallowMount } from '@vue/test-utils';
|
||||
import Vue, { nextTick } from 'vue';
|
||||
import VueApollo from 'vue-apollo';
|
||||
import { s__, sprintf } from '~/locale';
|
||||
import DeleteEnvironmentModal from '~/environments/components/delete_environment_modal.vue';
|
||||
import createMockApollo from 'helpers/mock_apollo_helper';
|
||||
import { resolvedEnvironment } from './graphql/mock_data';
|
||||
|
||||
Vue.use(VueApollo);
|
||||
|
||||
describe('~/environments/components/delete_environment_modal.vue', () => {
|
||||
let mockApollo;
|
||||
let deleteResolver;
|
||||
let wrapper;
|
||||
|
||||
const createComponent = ({ props = {}, apolloProvider } = {}) => {
|
||||
wrapper = shallowMount(DeleteEnvironmentModal, {
|
||||
propsData: {
|
||||
graphql: true,
|
||||
environment: resolvedEnvironment,
|
||||
...props,
|
||||
},
|
||||
apolloProvider,
|
||||
});
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
deleteResolver = jest.fn();
|
||||
mockApollo = createMockApollo([], {
|
||||
Mutation: { deleteEnvironment: deleteResolver },
|
||||
});
|
||||
});
|
||||
|
||||
it('should confirm the environment to delete', () => {
|
||||
createComponent({ apolloProvider: mockApollo });
|
||||
|
||||
expect(wrapper.text()).toBe(
|
||||
sprintf(
|
||||
s__(
|
||||
`Environments|Deleting the '%{environmentName}' environment cannot be undone. Do you want to delete it anyway?`,
|
||||
),
|
||||
{
|
||||
environmentName: resolvedEnvironment.name,
|
||||
},
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
it('should send the delete mutation on primary', async () => {
|
||||
createComponent({ apolloProvider: mockApollo });
|
||||
|
||||
wrapper.findComponent(GlModal).vm.$emit('primary');
|
||||
|
||||
await nextTick();
|
||||
|
||||
expect(deleteResolver).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
{ environment: resolvedEnvironment },
|
||||
expect.anything(),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
});
|
|
@ -1,37 +1,71 @@
|
|||
import { GlDropdownItem } from '@gitlab/ui';
|
||||
|
||||
import { shallowMount } from '@vue/test-utils';
|
||||
import Vue from 'vue';
|
||||
import VueApollo from 'vue-apollo';
|
||||
import setEnvironmentToDelete from '~/environments/graphql/mutations/set_environment_to_delete.mutation.graphql';
|
||||
import DeleteComponent from '~/environments/components/environment_delete.vue';
|
||||
import eventHub from '~/environments/event_hub';
|
||||
import createMockApollo from 'helpers/mock_apollo_helper';
|
||||
import { resolvedEnvironment } from './graphql/mock_data';
|
||||
|
||||
describe('External URL Component', () => {
|
||||
let wrapper;
|
||||
|
||||
const createWrapper = () => {
|
||||
const createWrapper = (props = {}, options = {}) => {
|
||||
wrapper = shallowMount(DeleteComponent, {
|
||||
...options,
|
||||
propsData: {
|
||||
environment: {},
|
||||
environment: resolvedEnvironment,
|
||||
...props,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
const findDropdownItem = () => wrapper.find(GlDropdownItem);
|
||||
|
||||
beforeEach(() => {
|
||||
jest.spyOn(window, 'confirm');
|
||||
describe('event hub', () => {
|
||||
beforeEach(() => {
|
||||
createWrapper();
|
||||
});
|
||||
|
||||
createWrapper();
|
||||
it('should render a dropdown item to delete the environment', () => {
|
||||
expect(findDropdownItem().exists()).toBe(true);
|
||||
expect(wrapper.text()).toEqual('Delete environment');
|
||||
expect(findDropdownItem().attributes('variant')).toBe('danger');
|
||||
});
|
||||
|
||||
it('emits requestDeleteEnvironment in the event hub when button is clicked', () => {
|
||||
jest.spyOn(eventHub, '$emit');
|
||||
findDropdownItem().vm.$emit('click');
|
||||
expect(eventHub.$emit).toHaveBeenCalledWith('requestDeleteEnvironment', resolvedEnvironment);
|
||||
});
|
||||
});
|
||||
|
||||
it('should render a dropdown item to delete the environment', () => {
|
||||
expect(findDropdownItem().exists()).toBe(true);
|
||||
expect(wrapper.text()).toEqual('Delete environment');
|
||||
expect(findDropdownItem().attributes('variant')).toBe('danger');
|
||||
});
|
||||
describe('graphql', () => {
|
||||
Vue.use(VueApollo);
|
||||
let mockApollo;
|
||||
|
||||
it('emits requestDeleteEnvironment in the event hub when button is clicked', () => {
|
||||
jest.spyOn(eventHub, '$emit');
|
||||
findDropdownItem().vm.$emit('click');
|
||||
expect(eventHub.$emit).toHaveBeenCalledWith('requestDeleteEnvironment', wrapper.vm.environment);
|
||||
beforeEach(() => {
|
||||
mockApollo = createMockApollo();
|
||||
createWrapper(
|
||||
{ graphql: true, environment: resolvedEnvironment },
|
||||
{ apolloProvider: mockApollo },
|
||||
);
|
||||
});
|
||||
|
||||
it('should render a dropdown item to delete the environment', () => {
|
||||
expect(findDropdownItem().exists()).toBe(true);
|
||||
expect(wrapper.text()).toEqual('Delete environment');
|
||||
expect(findDropdownItem().attributes('variant')).toBe('danger');
|
||||
});
|
||||
|
||||
it('emits requestDeleteEnvironment in the event hub when button is clicked', () => {
|
||||
jest.spyOn(mockApollo.defaultClient, 'mutate');
|
||||
findDropdownItem().vm.$emit('click');
|
||||
expect(mockApollo.defaultClient.mutate).toHaveBeenCalledWith({
|
||||
mutation: setEnvironmentToDelete,
|
||||
variables: { environment: resolvedEnvironment },
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -2,6 +2,7 @@ import MockAdapter from 'axios-mock-adapter';
|
|||
import axios from '~/lib/utils/axios_utils';
|
||||
import { resolvers } from '~/environments/graphql/resolvers';
|
||||
import environmentToRollback from '~/environments/graphql/queries/environment_to_rollback.query.graphql';
|
||||
import environmentToDelete from '~/environments/graphql/queries/environment_to_delete.query.graphql';
|
||||
import createMockApollo from 'helpers/mock_apollo_helper';
|
||||
import pollIntervalQuery from '~/environments/graphql/queries/poll_interval.query.graphql';
|
||||
import { TEST_HOST } from 'helpers/test_constants';
|
||||
|
@ -135,4 +136,19 @@ describe('~/frontend/environments/graphql/resolvers', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
describe('setEnvironmentToDelete', () => {
|
||||
it('should write the given environment to the cache', () => {
|
||||
localState.client.writeQuery = jest.fn();
|
||||
mockResolvers.Mutation.setEnvironmentToDelete(
|
||||
null,
|
||||
{ environment: resolvedEnvironment },
|
||||
localState,
|
||||
);
|
||||
|
||||
expect(localState.client.writeQuery).toHaveBeenCalledWith({
|
||||
query: environmentToDelete,
|
||||
data: { environmentToDelete: resolvedEnvironment },
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import findAndFollowLink from '~/lib/utils/navigation_utility';
|
||||
import * as navigationUtils from '~/lib/utils/navigation_utility';
|
||||
import { visitUrl } from '~/lib/utils/url_utility';
|
||||
|
||||
jest.mock('~/lib/utils/url_utility');
|
||||
|
@ -21,3 +22,91 @@ describe('findAndFollowLink', () => {
|
|||
expect(visitUrl).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('prefetchDocument', () => {
|
||||
it('creates a prefetch link tag', () => {
|
||||
const linkElement = document.createElement('link');
|
||||
|
||||
jest.spyOn(document, 'createElement').mockImplementation(() => linkElement);
|
||||
jest.spyOn(document.head, 'appendChild');
|
||||
|
||||
navigationUtils.prefetchDocument('index.htm');
|
||||
|
||||
expect(document.head.appendChild).toHaveBeenCalledWith(linkElement);
|
||||
expect(linkElement.href).toEqual('http://test.host/index.htm');
|
||||
expect(linkElement.rel).toEqual('prefetch');
|
||||
expect(linkElement.getAttribute('as')).toEqual('document');
|
||||
});
|
||||
});
|
||||
|
||||
describe('initPrefetchLinks', () => {
|
||||
let newLink;
|
||||
|
||||
beforeEach(() => {
|
||||
newLink = document.createElement('a');
|
||||
newLink.href = 'index_prefetch.htm';
|
||||
newLink.classList.add('js-test-prefetch-link');
|
||||
document.body.appendChild(newLink);
|
||||
});
|
||||
|
||||
it('adds to all links mouse out handlers when hovered', () => {
|
||||
const mouseOverEvent = new Event('mouseover');
|
||||
|
||||
jest.spyOn(newLink, 'addEventListener');
|
||||
|
||||
navigationUtils.initPrefetchLinks('.js-test-prefetch-link');
|
||||
newLink.dispatchEvent(mouseOverEvent);
|
||||
|
||||
expect(newLink.addEventListener).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('it is not fired when less then 100ms over link', () => {
|
||||
const mouseOverEvent = new Event('mouseover');
|
||||
const mouseOutEvent = new Event('mouseout');
|
||||
|
||||
jest.spyOn(newLink, 'addEventListener');
|
||||
jest.spyOn(navigationUtils, 'prefetchDocument').mockImplementation(() => true);
|
||||
|
||||
navigationUtils.initPrefetchLinks('.js-test-prefetch-link');
|
||||
newLink.dispatchEvent(mouseOverEvent);
|
||||
newLink.dispatchEvent(mouseOutEvent);
|
||||
|
||||
expect(navigationUtils.prefetchDocument).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
describe('executes correctly when hovering long enough', () => {
|
||||
const mouseOverEvent = new Event('mouseover');
|
||||
|
||||
beforeEach(() => {
|
||||
jest.useFakeTimers();
|
||||
|
||||
jest.spyOn(global, 'setTimeout');
|
||||
jest.spyOn(newLink, 'removeEventListener');
|
||||
});
|
||||
|
||||
it('calls prefetchDocument which adds to document', () => {
|
||||
jest.spyOn(document.head, 'appendChild');
|
||||
|
||||
navigationUtils.initPrefetchLinks('.js-test-prefetch-link');
|
||||
newLink.dispatchEvent(mouseOverEvent);
|
||||
|
||||
jest.runAllTimers();
|
||||
|
||||
expect(setTimeout).toHaveBeenCalledWith(expect.any(Function), 100);
|
||||
expect(document.head.appendChild).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('removes Event Listener when fired so only done once', () => {
|
||||
navigationUtils.initPrefetchLinks('.js-test-prefetch-link');
|
||||
newLink.dispatchEvent(mouseOverEvent);
|
||||
|
||||
jest.runAllTimers();
|
||||
|
||||
expect(newLink.removeEventListener).toHaveBeenCalledWith(
|
||||
'mouseover',
|
||||
expect.any(Function),
|
||||
true,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -55,31 +55,4 @@ RSpec.describe NotifyHelper do
|
|||
def reference_link(entity, url)
|
||||
"<a href=\"#{url}\">#{entity.to_reference}</a>"
|
||||
end
|
||||
|
||||
describe '#invited_join_url' do
|
||||
let_it_be(:member) { create(:project_member) }
|
||||
|
||||
let(:token) { '_token_' }
|
||||
|
||||
context 'when invite_email_preview_text is enabled', :experiment do
|
||||
before do
|
||||
stub_experiments(invite_email_preview_text: :control)
|
||||
end
|
||||
|
||||
it 'has correct params' do
|
||||
expect(helper.invited_join_url(token, member))
|
||||
.to eq("http://test.host/-/invites/#{token}?experiment_name=invite_email_preview_text&invite_type=initial_email")
|
||||
end
|
||||
end
|
||||
|
||||
context 'when invite_email_preview_text is disabled' do
|
||||
before do
|
||||
stub_feature_flags(invite_email_preview_text: false)
|
||||
end
|
||||
|
||||
it 'has correct params' do
|
||||
expect(helper.invited_join_url(token, member)).to eq("http://test.host/-/invites/#{token}?invite_type=initial_email")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -829,7 +829,7 @@ RSpec.describe Notify do
|
|||
end
|
||||
|
||||
it_behaves_like 'an email sent from GitLab'
|
||||
it_behaves_like 'it should not have Gmail Actions links'
|
||||
it_behaves_like 'it should show Gmail Actions Join now link'
|
||||
it_behaves_like "a user cannot unsubscribe through footer link"
|
||||
it_behaves_like 'appearance header and footer enabled'
|
||||
it_behaves_like 'appearance header and footer not enabled'
|
||||
|
@ -867,27 +867,6 @@ RSpec.describe Notify do
|
|||
end
|
||||
end
|
||||
|
||||
context 'with invite_email_preview_text enabled', :experiment do
|
||||
before do
|
||||
stub_experiments(invite_email_preview_text: :control)
|
||||
end
|
||||
|
||||
it 'has the correct invite_url with params' do
|
||||
is_expected.to have_link('Join now',
|
||||
href: invite_url(project_member.invite_token,
|
||||
invite_type: Emails::Members::INITIAL_INVITE,
|
||||
experiment_name: 'invite_email_preview_text'))
|
||||
end
|
||||
|
||||
it 'tracks the sent invite' do
|
||||
expect(experiment(:invite_email_preview_text)).to track(:assignment)
|
||||
.with_context(actor: project_member)
|
||||
.on_next_instance
|
||||
|
||||
invite_email.deliver_now
|
||||
end
|
||||
end
|
||||
|
||||
context 'when invite email sent is tracked', :snowplow do
|
||||
it 'tracks the sent invite' do
|
||||
invite_email.deliver_now
|
||||
|
@ -1461,7 +1440,7 @@ RSpec.describe Notify do
|
|||
subject { described_class.member_invited_email('Group', group_member.id, group_member.invite_token) }
|
||||
|
||||
it_behaves_like 'an email sent from GitLab'
|
||||
it_behaves_like 'it should not have Gmail Actions links'
|
||||
it_behaves_like 'it should show Gmail Actions Join now link'
|
||||
it_behaves_like "a user cannot unsubscribe through footer link"
|
||||
it_behaves_like 'appearance header and footer enabled'
|
||||
it_behaves_like 'appearance header and footer not enabled'
|
||||
|
|
|
@ -11,7 +11,7 @@ RSpec.configure do |config|
|
|||
raise "$SUITE_FLAKY_RSPEC_REPORT_PATH is empty." if ENV['SUITE_FLAKY_RSPEC_REPORT_PATH'].to_s.empty?
|
||||
raise "#{ENV['SUITE_FLAKY_RSPEC_REPORT_PATH']} doesn't exist" unless File.exist?(ENV['SUITE_FLAKY_RSPEC_REPORT_PATH'])
|
||||
|
||||
RspecFlaky::Report.load(ENV['SUITE_FLAKY_RSPEC_REPORT_PATH']).map { |_, flaky_test_data| flaky_test_data["example_id"] }
|
||||
RspecFlaky::Report.load(ENV['SUITE_FLAKY_RSPEC_REPORT_PATH']).map { |_, flaky_test_data| flaky_test_data.to_h[:example_id] }
|
||||
rescue => e # rubocop:disable Style/RescueStandardError
|
||||
puts e
|
||||
[]
|
||||
|
|
|
@ -161,6 +161,12 @@ RSpec.shared_examples 'it should not have Gmail Actions links' do
|
|||
end
|
||||
end
|
||||
|
||||
RSpec.shared_examples 'it should show Gmail Actions Join now link' do
|
||||
it_behaves_like 'it should have Gmail Actions links'
|
||||
|
||||
it { is_expected.to have_body_text('Join now') }
|
||||
end
|
||||
|
||||
RSpec.shared_examples 'it should show Gmail Actions View Issue link' do
|
||||
it_behaves_like 'it should have Gmail Actions links'
|
||||
|
||||
|
|
Loading…
Reference in a new issue