Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-05-23 15:08:42 +00:00
parent 097eb36475
commit 3e53902ee1
40 changed files with 981 additions and 474 deletions

View File

@ -59,7 +59,7 @@ workflow:
variables:
PG_VERSION: "12"
DEFAULT_CI_IMAGE: "${REGISTRY_HOST}/${REGISTRY_GROUP}/gitlab-build-images:debian-${DEBIAN_VERSION}-ruby-2.7.patched-golang-1.16-git-2.33-lfs-2.9-chrome-97-node-16.14-yarn-1.22-postgresql-${PG_VERSION}-graphicsmagick-1.3.36"
DEFAULT_CI_IMAGE: "${REGISTRY_HOST}/${REGISTRY_GROUP}/gitlab-build-images/debian-${DEBIAN_VERSION}-ruby-2.7.patched-golang-1.17-node-16.14-postgresql-${PG_VERSION}:git-2.36-lfs-2.9-chrome-101-yarn-1.22-graphicsmagick-1.3.36"
RAILS_ENV: "test"
NODE_ENV: "test"
BUNDLE_WITHOUT: "production:development"

View File

@ -6,6 +6,7 @@ export * from './alert_management_alerts_api';
const PROJECTS_PATH = '/api/:version/projects.json';
const PROJECT_IMPORT_MEMBERS_PATH = '/api/:version/projects/:id/import_project_members/:project_id';
const PROJECT_REPOSITORY_SIZE_PATH = '/api/:version/projects/:id/repository_size';
export function getProjects(query, options, callback = () => {}) {
const url = buildApiUrl(PROJECTS_PATH);
@ -35,3 +36,11 @@ export function importProjectMembers(sourceId, targetId) {
.replace(':project_id', targetId);
return axios.post(url);
}
export function updateRepositorySize(projectPath) {
const url = buildApiUrl(PROJECT_REPOSITORY_SIZE_PATH).replace(
':id',
encodeURIComponent(projectPath),
);
return axios.post(url);
}

View File

@ -249,33 +249,39 @@ class HastToProseMirrorConverterState {
* @returns An object that contains ProseMirror node factories
*/
const createProseMirrorNodeFactories = (schema, proseMirrorFactorySpecs, source) => {
const handlers = {
root: (state, hastNode) => state.openNode(schema.topNodeType, hastNode, {}),
text: (state, hastNode) => {
const { factorySpec } = state.top;
const factories = {
root: {
selector: 'root',
handle: (state, hastNode) => state.openNode(schema.topNodeType, hastNode, {}),
},
text: {
selector: 'text',
handle: (state, hastNode) => {
const { factorySpec } = state.top;
if (/^\s+$/.test(hastNode.value)) {
return;
}
if (/^\s+$/.test(hastNode.value)) {
return;
}
if (factorySpec.wrapTextInParagraph === true) {
state.openNode(schema.nodeType('paragraph'));
state.addText(schema, hastNode.value);
state.closeNode();
} else {
state.addText(schema, hastNode.value);
}
if (factorySpec.wrapTextInParagraph === true) {
state.openNode(schema.nodeType('paragraph'));
state.addText(schema, hastNode.value);
state.closeNode();
} else {
state.addText(schema, hastNode.value);
}
},
},
};
for (const [proseMirrorName, factorySpec] of Object.entries(proseMirrorFactorySpecs)) {
const factory = {
selector: factorySpec.selector,
skipChildren: factorySpec.skipChildren,
};
for (const [hastNodeTagName, factorySpec] of Object.entries(proseMirrorFactorySpecs)) {
if (factorySpec.block) {
handlers[hastNodeTagName] = (state, hastNode, parent, ancestors) => {
const nodeType = schema.nodeType(
isFunction(factorySpec.block)
? factorySpec.block(hastNode, parent, ancestors)
: factorySpec.block,
);
if (factorySpec.type === 'block') {
factory.handle = (state, hastNode, parent) => {
const nodeType = schema.nodeType(proseMirrorName);
state.closeUntil(parent);
state.openNode(
@ -297,9 +303,9 @@ const createProseMirrorNodeFactories = (schema, proseMirrorFactorySpecs, source)
state.closeNode();
}
};
} else if (factorySpec.inline) {
const nodeType = schema.nodeType(factorySpec.inline);
handlers[hastNodeTagName] = (state, hastNode, parent) => {
} else if (factorySpec.type === 'inline') {
const nodeType = schema.nodeType(proseMirrorName);
factory.handle = (state, hastNode, parent) => {
state.closeUntil(parent);
state.openNode(
nodeType,
@ -310,9 +316,9 @@ const createProseMirrorNodeFactories = (schema, proseMirrorFactorySpecs, source)
// Inline nodes do not have children therefore they are immediately closed
state.closeNode();
};
} else if (factorySpec.mark) {
const markType = schema.marks[factorySpec.mark];
handlers[hastNodeTagName] = (state, hastNode, parent) => {
} else if (factorySpec.type === 'mark') {
const markType = schema.marks[proseMirrorName];
factory.handle = (state, hastNode, parent) => {
state.openMark(markType, getAttrs(factorySpec, hastNode, parent, source));
if (factorySpec.inlineContent) {
@ -320,13 +326,26 @@ const createProseMirrorNodeFactories = (schema, proseMirrorFactorySpecs, source)
}
};
} else {
throw new RangeError(`Unrecognized node factory spec ${JSON.stringify(factorySpec)}`);
throw new RangeError(
`Unrecognized ProseMirror object type ${JSON.stringify(factorySpec.type)}`,
);
}
factories[proseMirrorName] = factory;
}
return handlers;
return factories;
};
const findFactory = (hastNode, factories) =>
Object.entries(factories).find(([, factorySpec]) => {
const { selector } = factorySpec;
return isFunction(selector)
? selector(hastNode)
: [hastNode.tagName, hastNode.type].includes(selector);
})?.[1];
/**
* Converts a Hast AST to a ProseMirror document based on a series
* of specifications that describe how to map all the nodes of the former
@ -339,8 +358,9 @@ const createProseMirrorNodeFactories = (schema, proseMirrorFactorySpecs, source)
* The object should have the following shape:
*
* {
* [hastNode.tagName]: {
* [block|node|mark]: [ProseMirror.Node.name],
* [ProseMirrorNodeOrMarkName]: {
* type: 'block' | 'inline' | 'mark',
* selector: String | hastNode -> Boolean,
* ...configurationOptions
* }
* }
@ -348,57 +368,21 @@ const createProseMirrorNodeFactories = (schema, proseMirrorFactorySpecs, source)
* Where each property in the object represents a HAST node with a given tag name, for example:
*
* {
* h1: {},
* h2: {},
* table: {},
* strong: {},
* horizontalRule: {
* type: 'block',
* selector: 'hr',
* },
* heading: {
* type: 'block',
* selector: (hastNode) => ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(hastNode),
* },
* bold: {
* type: 'mark'
* selector: (hastNode) => ['b', 'strong'].includes(hastNode),
* },
* // etc
* }
*
* You can specify the type of ProseMirror object adding one the following
* properties:
*
* 1. "block": A ProseMirror node that contains one or more children.
* 2. "inline": A ProseMirror node that doesnt contain any children although
* it can have inline content like a code block or a reference.
* 3. "mark": A ProseMirror mark.
*
* The value of that property should be the name of the ProseMirror node or mark, i.e:
*
* {
* h1: {
* block: 'heading',
* },
* h2: {
* block: 'heading',
* },
* img: {
* node: 'image',
* },
* strong: {
* mark: 'bold',
* }
* }
*
* You can compute a ProseMirrors node or mark name based on the HAST node
* by passing a function instead of a String. The converter invokes the function
* and provides a HAST node object:
*
* {
* list: {
* block: (hastNode) => {
* let type = 'bulletList';
* if (hastNode.children.some(isTaskItem)) {
* type = 'taskList';
* } else if (hastNode.ordered) {
* type = 'orderedList';
* }
* return type;
* }
* }
* }
*
* Configuration options
* ----------------------
@ -406,6 +390,26 @@ const createProseMirrorNodeFactories = (schema, proseMirrorFactorySpecs, source)
* You can customize the conversion process for every node or mark
* setting the following properties in the specification object:
*
* **type**
*
* The `type` property should have one of following three values:
*
* 1. "block": A ProseMirror node that contains one or more children.
* 2. "inline": A ProseMirror node that doesnt contain any children although
* it can have inline content like an image or a mention object.
* 3. "mark": A ProseMirror mark.
*
* **selector**
*
* The `selector` property matches a HastNode to a ProseMirror node or
* Mark. If you assign a string value to this property, the converter
* will match the first hast node with a `tagName` or `type` property
* that equals the string value.
*
* If you assign a function, the converter will invoke the function with
* the hast node. The function should return `true` if the hastNode matches
* the custom criteria implemented in the function
*
* **getAttrs**
*
* Computes a ProseMirror node or mark attributes. The converter will invoke
@ -447,12 +451,9 @@ export const createProseMirrorDocFromMdastTree = ({ schema, factorySpecs, tree,
const state = new HastToProseMirrorConverterState();
visitParents(tree, (hastNode, ancestors) => {
const parent = ancestors[ancestors.length - 1];
const skipChildren = factorySpecs[hastNode.tagName]?.skipChildren;
const factory = findFactory(hastNode, proseMirrorNodeFactories);
const handler = proseMirrorNodeFactories[hastNode.tagName || hastNode.type];
if (!handler) {
if (!factory) {
throw new Error(
`Hast node of type "${
hastNode.tagName || hastNode.type
@ -460,9 +461,11 @@ export const createProseMirrorDocFromMdastTree = ({ schema, factorySpecs, tree,
);
}
handler(state, hastNode, parent, ancestors);
const parent = ancestors[ancestors.length - 1];
return skipChildren === true ? 'skip' : true;
factory.handle(state, hastNode, parent);
return factory.skipChildren === true ? 'skip' : true;
});
let doc;

View File

@ -3,38 +3,24 @@ import { render } from '~/lib/gfm';
import { createProseMirrorDocFromMdastTree } from './hast_to_prosemirror_converter';
const factorySpecs = {
blockquote: { block: 'blockquote' },
p: { block: 'paragraph' },
li: { block: 'listItem', wrapTextInParagraph: true },
ul: { block: 'bulletList' },
ol: { block: 'orderedList' },
h1: {
block: 'heading',
getAttrs: () => ({ level: 1 }),
blockquote: { type: 'block', selector: 'blockquote' },
paragraph: { type: 'block', selector: 'p' },
listItem: { type: 'block', selector: 'li', wrapTextInParagraph: true },
orderedList: { type: 'block', selector: 'ol' },
bulletList: { type: 'block', selector: 'ul' },
heading: {
type: 'block',
selector: (hastNode) => ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(hastNode.tagName),
getAttrs: (hastNode) => {
const level = parseInt(/(\d)$/.exec(hastNode.tagName)?.[1], 10) || 1;
return { level };
},
},
h2: {
block: 'heading',
getAttrs: () => ({ level: 2 }),
},
h3: {
block: 'heading',
getAttrs: () => ({ level: 3 }),
},
h4: {
block: 'heading',
getAttrs: () => ({ level: 4 }),
},
h5: {
block: 'heading',
getAttrs: () => ({ level: 5 }),
},
h6: {
block: 'heading',
getAttrs: () => ({ level: 6 }),
},
pre: {
block: 'codeBlock',
codeBlock: {
type: 'block',
skipChildren: true,
selector: 'pre',
getContent: ({ hastNodeText }) => hastNodeText.replace(/\n$/, ''),
getAttrs: (hastNode) => {
const languageClass = hastNode.children[0]?.properties.className?.[0];
@ -43,23 +29,38 @@ const factorySpecs = {
return { language };
},
},
hr: { inline: 'horizontalRule' },
img: {
inline: 'image',
horizontalRule: {
type: 'block',
selector: 'hr',
},
image: {
type: 'inline',
selector: 'img',
getAttrs: (hastNode) => ({
src: hastNode.properties.src,
title: hastNode.properties.title,
alt: hastNode.properties.alt,
}),
},
br: { inline: 'hardBreak' },
code: { mark: 'code' },
em: { mark: 'italic' },
i: { mark: 'italic' },
strong: { mark: 'bold' },
b: { mark: 'bold' },
a: {
mark: 'link',
hardBreak: {
type: 'inline',
selector: 'br',
},
code: {
type: 'mark',
selector: 'code',
},
italic: {
type: 'mark',
selector: (hastNode) => ['em', 'i'].includes(hastNode.tagName),
},
bold: {
type: 'mark',
selector: (hastNode) => ['strong', 'b'].includes(hastNode.tagName),
},
link: {
type: 'mark',
selector: 'a',
getAttrs: (hastNode) => ({
href: hastNode.properties.href,
title: hastNode.properties.title,

View File

@ -8,13 +8,6 @@ input {
background-color: $input-bg;
}
input,
textarea {
&:focus {
@include gl-focus;
}
}
input[type='text'].danger {
background: $input-danger-bg !important;
border-color: $red-400;

View File

@ -83,6 +83,11 @@
}
}
}
// Remove custom focus from element
.inputarea {
@include gl-shadow-none;
}
}
.active-line-text {

View File

@ -56,7 +56,7 @@
position: relative;
font-family: $monospace-font;
$left: 12px;
overflow: hidden; // See https://gitlab.com/gitlab-org/gitlab-foss/issues/13987
.max-width-marker {
width: 72ch;
color: $commit-max-width-marker-color;

View File

@ -750,8 +750,12 @@
.add-issuable-form-input-wrapper {
&.focus {
border-color: $blue-300;
box-shadow: 0 0 4px $dropdown-input-focus-shadow;
border-color: $gray-700;
@include gl-focus;
input {
@include gl-shadow-none;
}
}
.gl-show-field-errors &.form-control:not(textarea) {

View File

@ -77,7 +77,7 @@
// Disable inner focus
textarea:focus {
box-shadow: none;
@include gl-shadow-none;
}
}
}

View File

@ -9,7 +9,8 @@ module BulkImports
).freeze
LFS_OBJECTS_RELATION = 'lfs_objects'
REPOSITORY_BUNDLE_RELATION = 'repository_bundle'
REPOSITORY_BUNDLE_RELATION = 'repository'
DESIGN_BUNDLE_RELATION = 'design'
def import_export_yaml
::Gitlab::ImportExport.config_file
@ -20,7 +21,12 @@ module BulkImports
end
def file_relations
[UPLOADS_RELATION, LFS_OBJECTS_RELATION, REPOSITORY_BUNDLE_RELATION]
[
UPLOADS_RELATION,
LFS_OBJECTS_RELATION,
REPOSITORY_BUNDLE_RELATION,
DESIGN_BUNDLE_RELATION
]
end
end
end

View File

@ -26,7 +26,7 @@ class ProjectStatistics < ApplicationRecord
pipeline_artifacts_size: %i[storage_size],
snippets_size: %i[storage_size]
}.freeze
NAMESPACE_RELATABLE_COLUMNS = [:repository_size, :wiki_size, :lfs_objects_size, :uploads_size].freeze
NAMESPACE_RELATABLE_COLUMNS = [:repository_size, :wiki_size, :lfs_objects_size, :uploads_size, :container_registry_size].freeze
scope :for_project_ids, ->(project_ids) { where(project_id: project_ids) }

View File

@ -31,7 +31,9 @@ module BulkImports
when FileTransfer::ProjectConfig::LFS_OBJECTS_RELATION
LfsObjectsExportService.new(portable, export_path)
when FileTransfer::ProjectConfig::REPOSITORY_BUNDLE_RELATION
RepositoryBundleExportService.new(portable, export_path)
RepositoryBundleExportService.new(portable.repository, export_path, relation)
when FileTransfer::ProjectConfig::DESIGN_BUNDLE_RELATION
RepositoryBundleExportService.new(portable.design_repository, export_path, relation)
else
raise BulkImports::Error, 'Unsupported relation export type'
end

View File

@ -2,12 +2,10 @@
module BulkImports
class RepositoryBundleExportService
FILENAME = 'project.bundle'
def initialize(portable, export_path)
@portable = portable
def initialize(repository, export_path, export_filename)
@repository = repository
@export_path = export_path
@repository = portable.repository
@export_filename = export_filename
end
def execute
@ -16,10 +14,10 @@ module BulkImports
private
attr_reader :portable, :export_path, :repository
attr_reader :repository, :export_path, :export_filename
def bundle_filepath
File.join(export_path, FILENAME)
File.join(export_path, "#{export_filename}.bundle")
end
end
end

View File

@ -21,7 +21,7 @@
.text-secondary
= sprite_icon("rocket", size: 12)
= _("Release")
= link_to release.name, project_releases_path(@project, anchor: release.tag), class: 'gl-text-blue-600!'
= link_to release.name, ::Feature.enabled?(:fix_release_path_in_tag_index_page, @project) ? project_release_path(@project, release) : project_releases_path(@project, anchor: release.tag), class: 'gl-text-blue-600!'
- if tag.message.present?
%pre.wrap

View File

@ -1,5 +1,11 @@
- page_title s_("UsageQuota|Usage")
= render Pajamas::AlertComponent.new(title: _('Repository size recalculation started'),
variant: :info,
alert_class: 'js-recalculation-started-alert gl-mt-4 gl-mb-5 gl-display-none') do |c|
= c.body do
= _('Refresh the page in a few minutes to view usage.')
%h3.page-title
= s_('UsageQuota|Usage Quotas')

View File

@ -0,0 +1,8 @@
---
name: fix_release_path_in_tag_index_page
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/87736
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/362915
milestone: '15.1'
type: development
group: group::release
default_enabled: false

View File

@ -13,7 +13,7 @@ a warm-standby as part of a disaster recovery strategy.
WARNING:
Geo undergoes significant changes from release to release. Upgrades are
supported and [documented](#updating-geo), but you should ensure that you're
supported and [documented](#upgrading-geo), but you should ensure that you're
using the right version of the documentation for your installation.
Fetching large repositories can take a long time for teams located far from a single GitLab instance.
@ -234,9 +234,9 @@ After installing GitLab on the **secondary** sites and performing the initial co
For information on configuring Geo, see [Geo configuration](replication/configuration.md).
### Updating Geo
### Upgrading Geo
For information on how to update your Geo sites to the latest GitLab version, see [Updating the Geo sites](replication/updating_the_geo_sites.md).
For information on how to update your Geo sites to the latest GitLab version, see [Upgrading the Geo sites](replication/upgrading_the_geo_sites.md).
### Pausing and resuming replication
@ -252,7 +252,7 @@ WARNING:
Pausing and resuming of replication is only supported for Geo installations using an
Omnibus GitLab-managed database. External databases are not supported.
In some circumstances, like during [upgrades](replication/updating_the_geo_sites.md) or a [planned failover](disaster_recovery/planned_failover.md), it is desirable to pause replication between the primary and secondary.
In some circumstances, like during [upgrades](replication/upgrading_the_geo_sites.md) or a [planned failover](disaster_recovery/planned_failover.md), it is desirable to pause replication between the primary and secondary.
Pausing and resuming replication is done via a command line tool from the node in the secondary site where the `postgresql` service is enabled.

View File

@ -384,7 +384,7 @@ when:
## Upgrading Geo
See the [updating the Geo sites document](updating_the_geo_sites.md).
See the [upgrading the Geo sites document](upgrading_the_geo_sites.md).
## Troubleshooting

View File

@ -5,27 +5,27 @@ info: To determine the technical writer assigned to the Stage/Group associated w
type: howto
---
# Updating the Geo sites **(PREMIUM SELF)**
# Upgrading the Geo sites **(PREMIUM SELF)**
WARNING:
Read these sections carefully before updating your Geo sites. Not following
version-specific update steps may result in unexpected downtime. If you have
version-specific upgrade steps may result in unexpected downtime. If you have
any specific questions, [contact Support](https://about.gitlab.com/support/#contact-support).
Updating Geo sites involves performing:
Upgrading Geo sites involves performing:
1. [Version-specific update steps](version_specific_updates.md), depending on the
version being updated to or from.
1. [General update steps](#general-update-steps), for all updates.
1. [Version-specific upgrade steps](version_specific_upgrades.md), depending on the
version being upgraded to or from.
1. [General upgrade steps](#general-upgrade-steps), for all upgrades.
## General update steps
## General upgrade steps
NOTE:
These general update steps are not intended for multi-site deployments,
These general upgrade steps are not intended for multi-site deployments,
and will cause downtime. If you want to avoid downtime, consider using
[zero downtime upgrades](../../../update/zero_downtime.md#multi-node--ha-deployment-with-geo).
To update the Geo sites when a new GitLab version is released, update **primary**
To upgrade the Geo sites when a new GitLab version is released, upgrade **primary**
and all **secondary** sites:
1. **Optional:** [Pause replication on each **secondary** sites.](../index.md#pausing-and-resuming-replication)
@ -34,11 +34,11 @@ and all **secondary** sites:
1. SSH into each node of **secondary** sites.
1. [Upgrade GitLab on each **secondary** site](../../../update/package/index.md#upgrade-using-the-official-repositories).
1. If you paused replication in step 1, [resume replication on each **secondary**](../index.md#pausing-and-resuming-replication)
1. [Test](#check-status-after-updating) **primary** and **secondary** sites, and check version in each.
1. [Test](#check-status-after-upgrading) **primary** and **secondary** sites, and check version in each.
### Check status after updating
### Check status after upgrading
Now that the update process is complete, you may want to check whether
Now that the upgrade process is complete, you may want to check whether
everything is working correctly:
1. Run the Geo Rake task on an application node for the primary and secondary sites. Everything should be green:

View File

@ -4,21 +4,21 @@ group: Geo
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---
# Version-specific update instructions **(PREMIUM SELF)**
# Version-specific upgrade instructions **(PREMIUM SELF)**
Review this page for update instructions for your version. These steps
accompany the [general steps](updating_the_geo_sites.md#general-update-steps)
for updating Geo sites.
Review this page for upgrade instructions for your version. These steps
accompany the [general steps](upgrading_the_geo_sites.md#general-upgrade-steps)
for upgrading Geo sites.
## Updating to 14.9
## Upgrading to 14.9
**DO NOT** update to GitLab 14.9.0. Instead, use 14.9.1 or later.
**DO NOT** upgrade to GitLab 14.9.0. Instead, use 14.9.1 or later.
We've discovered an issue with Geo's CI verification feature that may [cause job traces to be lost](https://gitlab.com/gitlab-com/gl-infra/production/-/issues/6664). This issue was fixed in [the GitLab 14.9.1 patch release](https://about.gitlab.com/releases/2022/03/23/gitlab-14-9-1-released/).
If you have already updated to GitLab 14.9.0, you can disable the feature causing the issue by [disabling the `geo_job_artifact_replication` feature flag](../../feature_flags.md#how-to-enable-and-disable-features-behind-flags).
If you have already upgraded to GitLab 14.9.0, you can disable the feature causing the issue by [disabling the `geo_job_artifact_replication` feature flag](../../feature_flags.md#how-to-enable-and-disable-features-behind-flags).
## Updating to 14.2 through 14.7
## Upgrading to 14.2 through 14.7
There is [an issue in GitLab 14.2 through 14.7](https://gitlab.com/gitlab-org/gitlab/-/issues/299819#note_822629467)
that affects Geo when the GitLab-managed object storage replication is used, causing blob object types to fail synchronization.
@ -33,11 +33,11 @@ results in a loop that consistently fails for all objects stored in object stora
For information on how to fix this, see
[Troubleshooting - Failed syncs with GitLab-managed object storage replication](troubleshooting.md#failed-syncs-with-gitlab-managed-object-storage-replication).
## Updating to 14.4
## Upgrading to 14.4
There is [an issue in GitLab 14.4.0 through 14.4.2](../../../update/index.md#1440) that can affect Geo and other features that rely on cronjobs. We recommend upgrading to GitLab 14.4.3 or later.
## Updating to 14.1, 14.2, 14.3
## Upgrading to 14.1, 14.2, 14.3
### Multi-arch images
@ -72,13 +72,13 @@ Otherwise, for each **secondary** site, on a Rails application node, open a [Rai
end
```
If you are running a version prior to 14.1 and are using Geo and multi-arch containers in your Container Registry, we recommend [upgrading](updating_the_geo_sites.md) to at least GitLab 14.1.
If you are running a version prior to 14.1 and are using Geo and multi-arch containers in your Container Registry, we recommend [upgrading](upgrading_the_geo_sites.md) to at least GitLab 14.1.
### Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode
GitLab 13.9 through GitLab 14.3 are affected by a bug in which enabling [GitLab Maintenance Mode](../../maintenance_mode/index.md) causes Geo secondary site statuses to appear to stop updating and become unhealthy. For more information, see [Troubleshooting - Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode](troubleshooting.md#geo-admin-area-shows-unhealthy-after-enabling-maintenance-mode).
GitLab 13.9 through GitLab 14.3 are affected by a bug in which enabling [GitLab Maintenance Mode](../../maintenance_mode/index.md) causes Geo secondary site statuses to appear to stop upgrading and become unhealthy. For more information, see [Troubleshooting - Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode](troubleshooting.md#geo-admin-area-shows-unhealthy-after-enabling-maintenance-mode).
## Updating to GitLab 14.0/14.1
## Upgrading to GitLab 14.0/14.1
### Primary sites can not be removed from the UI
@ -90,13 +90,13 @@ If you are running an affected version and need to remove your Primary site, you
### Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode
GitLab 13.9 through GitLab 14.3 are affected by a bug in which enabling [GitLab Maintenance Mode](../../maintenance_mode/index.md) causes Geo secondary site statuses to appear to stop updating and become unhealthy. For more information, see [Troubleshooting - Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode](troubleshooting.md#geo-admin-area-shows-unhealthy-after-enabling-maintenance-mode).
GitLab 13.9 through GitLab 14.3 are affected by a bug in which enabling [GitLab Maintenance Mode](../../maintenance_mode/index.md) causes Geo secondary site statuses to appear to stop upgrading and become unhealthy. For more information, see [Troubleshooting - Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode](troubleshooting.md#geo-admin-area-shows-unhealthy-after-enabling-maintenance-mode).
## Updating to GitLab 13.12
## Upgrading to GitLab 13.12
### Secondary sites re-download all LFS files upon update
### Secondary sites re-download all LFS files upon upgrade
We found an issue where [secondary sites re-download all LFS files](https://gitlab.com/gitlab-org/gitlab/-/issues/334550) upon update. This bug:
We found an issue where [secondary sites re-download all LFS files](https://gitlab.com/gitlab-org/gitlab/-/issues/334550) upon upgrade. This bug:
- Only applies to Geo secondary sites that have replicated LFS objects.
- Is _not_ a data loss risk.
@ -104,9 +104,9 @@ We found an issue where [secondary sites re-download all LFS files](https://gitl
- May impact performance for GitLab installations with a large number of LFS files.
If you don't have many LFS objects or can stand a bit of churn, then it is safe to let the secondary sites re-download LFS objects.
If you do have many LFS objects, or many Geo secondary sites, or limited bandwidth, or a combination of them all, then we recommend you skip GitLab 13.12.0 through 13.12.6 and update to GitLab 13.12.7 or newer.
If you do have many LFS objects, or many Geo secondary sites, or limited bandwidth, or a combination of them all, then we recommend you skip GitLab 13.12.0 through 13.12.6 and upgrade to GitLab 13.12.7 or newer.
#### If you have already updated to an affected version, and the re-sync is ongoing
#### If you have already upgraded to an affected version, and the re-sync is ongoing
You can manually migrate the legacy sync state to the new state column by running the following command in a [Rails console](../../operations/rails_console.md). It should take under a minute:
@ -116,29 +116,29 @@ Geo::LfsObjectRegistry.where(state: 0, success: true).update_all(state: 2)
### Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode
GitLab 13.9 through GitLab 14.3 are affected by a bug in which enabling [GitLab Maintenance Mode](../../maintenance_mode/index.md) causes Geo secondary site statuses to appear to stop updating and become unhealthy. For more information, see [Troubleshooting - Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode](troubleshooting.md#geo-admin-area-shows-unhealthy-after-enabling-maintenance-mode).
GitLab 13.9 through GitLab 14.3 are affected by a bug in which enabling [GitLab Maintenance Mode](../../maintenance_mode/index.md) causes Geo secondary site statuses to appear to stop upgrading and become unhealthy. For more information, see [Troubleshooting - Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode](troubleshooting.md#geo-admin-area-shows-unhealthy-after-enabling-maintenance-mode).
## Updating to GitLab 13.11
## Upgrading to GitLab 13.11
We found an [issue with Git clone/pull through HTTP(s)](https://gitlab.com/gitlab-org/gitlab/-/issues/330787) on Geo secondaries and on any GitLab instance if maintenance mode is enabled. This was caused by a regression in GitLab Workhorse. This is fixed in the [GitLab 13.11.4 patch release](https://about.gitlab.com/releases/2021/05/14/gitlab-13-11-4-released/). To avoid this issue, upgrade to GitLab 13.11.4 or later.
### Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode
GitLab 13.9 through GitLab 14.3 are affected by a bug in which enabling [GitLab Maintenance Mode](../../maintenance_mode/index.md) causes Geo secondary site statuses to appear to stop updating and become unhealthy. For more information, see [Troubleshooting - Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode](troubleshooting.md#geo-admin-area-shows-unhealthy-after-enabling-maintenance-mode).
GitLab 13.9 through GitLab 14.3 are affected by a bug in which enabling [GitLab Maintenance Mode](../../maintenance_mode/index.md) causes Geo secondary site statuses to appear to stop upgrading and become unhealthy. For more information, see [Troubleshooting - Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode](troubleshooting.md#geo-admin-area-shows-unhealthy-after-enabling-maintenance-mode).
## Updating to GitLab 13.10
## Upgrading to GitLab 13.10
### Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode
GitLab 13.9 through GitLab 14.3 are affected by a bug in which enabling [GitLab Maintenance Mode](../../maintenance_mode/index.md) causes Geo secondary site statuses to appear to stop updating and become unhealthy. For more information, see [Troubleshooting - Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode](troubleshooting.md#geo-admin-area-shows-unhealthy-after-enabling-maintenance-mode).
GitLab 13.9 through GitLab 14.3 are affected by a bug in which enabling [GitLab Maintenance Mode](../../maintenance_mode/index.md) causes Geo secondary site statuses to appear to stop upgrading and become unhealthy. For more information, see [Troubleshooting - Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode](troubleshooting.md#geo-admin-area-shows-unhealthy-after-enabling-maintenance-mode).
## Updating to GitLab 13.9
## Upgrading to GitLab 13.9
### Error during zero-downtime update: "cannot drop column asset_proxy_whitelist"
### Error during zero-downtime upgrade: "cannot drop column asset_proxy_whitelist"
We've detected an issue [with a column rename](https://gitlab.com/gitlab-org/gitlab/-/issues/324160)
that prevents upgrades to GitLab 13.9.0, 13.9.1, 13.9.2 and 13.9.3 when following the zero-downtime steps. It is necessary
to perform the following additional steps for the zero-downtime update:
to perform the following additional steps for the zero-downtime upgrade:
1. Before running the final `sudo gitlab-rake db:migrate` command on the deploy node,
execute the following queries using the PostgreSQL console (or `sudo gitlab-psql`)
@ -169,27 +169,27 @@ PG::DependentObjectsStillExist: ERROR: cannot drop column asset_proxy_whitelist
DETAIL: trigger trigger_0d588df444c8 on table application_settings depends on column asset_proxy_whitelist of table application_settings
```
To work around this bug, follow the previous steps to complete the update.
To work around this bug, follow the previous steps to complete the upgrade.
More details are available [in this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/324160).
### Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode
GitLab 13.9 through GitLab 14.3 are affected by a bug in which enabling [GitLab Maintenance Mode](../../maintenance_mode/index.md) causes Geo secondary site statuses to appear to stop updating and become unhealthy. For more information, see [Troubleshooting - Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode](troubleshooting.md#geo-admin-area-shows-unhealthy-after-enabling-maintenance-mode).
GitLab 13.9 through GitLab 14.3 are affected by a bug in which enabling [GitLab Maintenance Mode](../../maintenance_mode/index.md) causes Geo secondary site statuses to appear to stop upgrading and become unhealthy. For more information, see [Troubleshooting - Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode](troubleshooting.md#geo-admin-area-shows-unhealthy-after-enabling-maintenance-mode).
## Updating to GitLab 13.7
## Upgrading to GitLab 13.7
We've detected an issue with the `FetchRemove` call used by Geo secondaries.
This causes performance issues as we execute reference transaction hooks for
each updated reference. Delay any upgrade attempts until this is in the
each upgraded reference. Delay any upgrade attempts until this is in the
[13.7.5 patch release.](https://gitlab.com/gitlab-org/gitaly/-/merge_requests/3002).
More details are available [in this issue](https://gitlab.com/gitlab-org/git/-/issues/79).
## Updating to GitLab 13.5
## Upgrading to GitLab 13.5
GitLab 13.5 has a [regression that prevents viewing a list of container repositories and registries](https://gitlab.com/gitlab-org/gitlab/-/issues/285475)
on Geo secondaries. This issue is fixed in GitLab 13.6.1 and later.
## Updating to GitLab 13.3
## Upgrading to GitLab 13.3
In GitLab 13.3, Geo removed the PostgreSQL [Foreign Data Wrapper](https://www.postgresql.org/docs/11/postgres-fdw.html)
dependency for the tracking database.
@ -219,61 +219,61 @@ when using `--force` or `--skip-preflight-checks`, due to [an issue](https://git
The [troubleshooting steps](troubleshooting.md#errors-when-using---skip-preflight-checks-or---force)
contain a workaround if you run into errors during the failover.
## Updating to GitLab 13.2
## Upgrading to GitLab 13.2
In GitLab 13.2, promoting a secondary site to a primary while the secondary is
paused fails. Do not pause replication before promoting a secondary. If the
site is paused, be sure to resume before promoting. To avoid this issue,
upgrade to GitLab 13.4 or later.
## Updating to GitLab 13.0
## Upgrading to GitLab 13.0
Upgrading to GitLab 13.0 requires GitLab 12.10 to already be using PostgreSQL
version 11. For the recommended procedure, see the
[Omnibus GitLab documentation](https://docs.gitlab.com/omnibus/settings/database.html#upgrading-a-geo-instance).
## Updating to GitLab 12.10
## Upgrading to GitLab 12.10
GitLab 12.10 doesn't attempt to update the embedded PostgreSQL server when
GitLab 12.10 doesn't attempt to upgrade the embedded PostgreSQL server when
using Geo, because the PostgreSQL upgrade requires downtime for secondaries
while reinitializing streaming replication. It must be upgraded manually. For
the recommended procedure, see the
[Omnibus GitLab documentation](https://docs.gitlab.com/omnibus/settings/database.html#upgrading-a-geo-instance).
## Updating to GitLab 12.9
## Upgrading to GitLab 12.9
WARNING:
GitLab 12.9.0 through GitLab 12.9.3 are affected by [a bug that stops
repository verification](https://gitlab.com/gitlab-org/gitlab/-/issues/213523).
The issue is fixed in GitLab 12.9.4. Upgrade to GitLab 12.9.4 or later.
By default, GitLab 12.9 attempts to update the embedded PostgreSQL server
By default, GitLab 12.9 attempts to upgrade the embedded PostgreSQL server
version from 9.6 to 10.12, which requires downtime on secondaries while
reinitializing streaming replication. For the recommended procedure, see the
[Omnibus GitLab documentation](https://docs.gitlab.com/omnibus/settings/database.html#upgrading-a-geo-instance).
You can temporarily disable this behavior by running the following before
updating:
upgrading:
```shell
sudo touch /etc/gitlab/disable-postgresql-upgrade
```
## Updating to GitLab 12.8
## Upgrading to GitLab 12.8
By default, GitLab 12.8 attempts to update the embedded PostgreSQL server
By default, GitLab 12.8 attempts to upgrade the embedded PostgreSQL server
version from 9.6 to 10.12, which requires downtime on secondaries while
reinitializing streaming replication. For the recommended procedure, see the
[Omnibus GitLab documentation](https://docs.gitlab.com/omnibus/settings/database.html#upgrading-a-geo-instance).
You can temporarily disable this behavior by running the following before
updating:
upgrading:
```shell
sudo touch /etc/gitlab/disable-postgresql-upgrade
```
## Updating to GitLab 12.7
## Upgrading to GitLab 12.7
WARNING:
Only upgrade to GitLab 12.7.5 or later. Do not upgrade to versions 12.7.0
@ -281,65 +281,65 @@ through 12.7.4 because there is [an initialization order bug](https://gitlab.com
[The fix](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/24021) was
shipped in 12.7.5.
By default, GitLab 12.7 attempts to update the embedded PostgreSQL server
By default, GitLab 12.7 attempts to upgrade the embedded PostgreSQL server
version from 9.6 to 10.9, which requires downtime on secondaries while
reinitializing streaming replication. For the recommended procedure, see the
[Omnibus GitLab documentation](https://docs.gitlab.com/omnibus/settings/database.html#upgrading-a-geo-instance).
You can temporarily disable this behavior by running the following before
updating:
upgrading:
```shell
sudo touch /etc/gitlab/disable-postgresql-upgrade
```
## Updating to GitLab 12.6
## Upgrading to GitLab 12.6
By default, GitLab 12.6 attempts to update the embedded PostgreSQL server
By default, GitLab 12.6 attempts to upgrade the embedded PostgreSQL server
version from 9.6 to 10.9, which requires downtime on secondaries while
reinitializing streaming replication. For the recommended procedure, see the
[Omnibus GitLab documentation](https://docs.gitlab.com/omnibus/settings/database.html#upgrading-a-geo-instance).
You can temporarily disable this behavior by running the following before
updating:
upgrading:
```shell
sudo touch /etc/gitlab/disable-postgresql-upgrade
```
## Updating to GitLab 12.5
## Upgrading to GitLab 12.5
By default, GitLab 12.5 attempts to update the embedded PostgreSQL server
By default, GitLab 12.5 attempts to upgrade the embedded PostgreSQL server
version from 9.6 to 10.9, which requires downtime on secondaries while
reinitializing streaming replication. For the recommended procedure, see the
[Omnibus GitLab documentation](https://docs.gitlab.com/omnibus/settings/database.html#upgrading-a-geo-instance).
You can temporarily disable this behavior by running the following before
updating:
upgrading:
```shell
sudo touch /etc/gitlab/disable-postgresql-upgrade
```
## Updating to GitLab 12.4
## Upgrading to GitLab 12.4
By default, GitLab 12.4 attempts to update the embedded PostgreSQL server
By default, GitLab 12.4 attempts to upgrade the embedded PostgreSQL server
version from 9.6 to 10.9, which requires downtime on secondaries while
reinitializing streaming replication. For the recommended procedure, see the
[Omnibus GitLab documentation](https://docs.gitlab.com/omnibus/settings/database.html#upgrading-a-geo-instance).
You can temporarily disable this behavior by running the following before
updating:
upgrading:
```shell
sudo touch /etc/gitlab/disable-postgresql-upgrade
```
## Updating to GitLab 12.3
## Upgrading to GitLab 12.3
WARNING:
If the existing PostgreSQL server version is 9.6.x, we recommend upgrading to
GitLab 12.4 or later. By default, GitLab 12.3 attempts to update the embedded
GitLab 12.4 or later. By default, GitLab 12.3 attempts to upgrade the embedded
PostgreSQL server version from 9.6 to 10.9. In certain circumstances, it can
fail. For more information, see the
[Omnibus GitLab documentation](https://docs.gitlab.com/omnibus/settings/database.html#upgrading-a-geo-instance).
@ -349,11 +349,11 @@ requires downtime for secondaries while reinitializing streaming replication.
For the recommended procedure, see the
[Omnibus GitLab documentation](https://docs.gitlab.com/omnibus/settings/database.html#upgrading-a-geo-instance).
## Updating to GitLab 12.2
## Upgrading to GitLab 12.2
WARNING:
If the existing PostgreSQL server version is 9.6.x, we recommend upgrading to
GitLab 12.4 or later. By default, GitLab 12.2 attempts to update the embedded
GitLab 12.4 or later. By default, GitLab 12.2 attempts to upgrade the embedded
PostgreSQL server version from 9.6 to 10.9. In certain circumstances, it can
fail. For more information, see the
[Omnibus GitLab documentation](https://docs.gitlab.com/omnibus/settings/database.html#upgrading-a-geo-instance).
@ -363,12 +363,12 @@ requires downtime for secondaries while reinitializing streaming replication.
For the recommended procedure, see the
[Omnibus GitLab documentation](https://docs.gitlab.com/omnibus/settings/database.html#upgrading-a-geo-instance).
GitLab 12.2 includes the following minor PostgreSQL updates:
GitLab 12.2 includes the following minor PostgreSQL upgrades:
- To version `9.6.14`, if you run PostgreSQL 9.6.
- To version `10.9`, if you run PostgreSQL 10.
This update occurs even if major PostgreSQL updates are disabled.
This upgrade occurs even if major PostgreSQL upgrades are disabled.
Before [refreshing Foreign Data Wrapper during a Geo upgrade](../../../update/zero_downtime.md#step-4-run-post-deployment-migrations-and-checks),
restart the Geo tracking database:
@ -380,11 +380,11 @@ sudo gitlab-ctl restart geo-postgresql
The restart avoids a version mismatch when PostgreSQL tries to load the FDW
extension.
## Updating to GitLab 12.1
## Upgrading to GitLab 12.1
WARNING:
If the existing PostgreSQL server version is 9.6.x, we recommend upgrading to
GitLab 12.4 or later. By default, GitLab 12.1 attempts to update the embedded
GitLab 12.4 or later. By default, GitLab 12.1 attempts to upgrade the embedded
PostgreSQL server version from 9.6 to 10.9. In certain circumstances, it can
fail. For more information, see the
[Omnibus GitLab documentation](https://docs.gitlab.com/omnibus/settings/database.html#upgrading-a-geo-instance).
@ -394,7 +394,7 @@ requires downtime for secondaries while reinitializing streaming replication.
For the recommended procedure, see the
[Omnibus GitLab documentation](https://docs.gitlab.com/omnibus/settings/database.html#upgrading-a-geo-instance).
## Updating to GitLab 12.0
## Upgrading to GitLab 12.0
WARNING:
This version is affected by a [bug that results in new LFS objects not being

View File

@ -82,10 +82,10 @@ you [configure it manually](../../../user/project/integrations/prometheus.md#man
## Take action on Prometheus alerts **(ULTIMATE)**
You can [add a webhook](../../../operations/metrics/alerts.md#external-prometheus-instances)
to the Prometheus configuration for GitLab to receive notifications of any alerts.
You can [add a Prometheus integration](../../../operations/incident_management/integrations.md)
to GitLab to receive notifications of any alerts.
Once the webhook is setup, you can
Once the integration is setup, you can
[take action on incoming alerts](../../../operations/metrics/alerts.md#trigger-actions-from-alerts).
## Add custom metrics to the self monitoring project

View File

@ -1,6 +1,6 @@
---
stage: Growth
group: Activation
stage: Fulfillment
group: Utilization
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---

View File

@ -1,6 +1,6 @@
---
stage: Growth
group: Activation
stage: Fulfillment
group: Utilization
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---

View File

@ -78,6 +78,8 @@ sequenceDiagram
Snowflake DW->>Sisense Dashboards: Data available for querying
```
For more details about the architecture, see [Snowplow infrastructure](infrastructure.md).
## Structured event taxonomy
Click events must be consistent. If each feature captures events differently, it can be difficult
@ -184,19 +186,6 @@ LIMIT 100
Snowplow JavaScript adds [web-specific parameters](https://docs.snowplowanalytics.com/docs/collecting-data/collecting-from-own-applications/snowplow-tracker-protocol/#Web-specific_parameters) to all web events by default.
## Snowplow monitoring
For different stages in the processing pipeline, there are several tools that monitor Snowplow events tracking:
- [Product Intelligence Grafana dashboard](https://dashboards.gitlab.net/d/product-intelligence-main/product-intelligence-product-intelligence?orgId=1) monitors backend events sent from GitLab.com instance to collectors fleet. This dashboard provides information about:
- The number of events that successfully reach Snowplow collectors.
- The number of events that failed to reach Snowplow collectors.
- The number of backend events that were sent.
- [AWS CloudWatch dashboard](https://console.aws.amazon.com/cloudwatch/home?region=us-east-1#dashboards:name=SnowPlow;start=P3D) monitors the state of the events processing pipeline. The pipeline starts from Snowplow collectors, through to enrichers and pseudonymization, and up to persistence on S3 bucket from which events are imported to Snowflake Data Warehouse. To view this dashboard AWS access is required, follow this [instruction](https://gitlab.com/gitlab-org/growth/product-intelligence/snowplow-pseudonymization#monitoring) if you are interested in getting one.
- [SiSense dashboard](https://app.periscopedata.com/app/gitlab/417669/Snowplow-Summary-Dashboard) provides information about the number of good and bad events imported into the Data Warehouse, in addition to the total number of imported Snowplow events.
For more information, see this [video walk-through](https://www.youtube.com/watch?v=NxPS0aKa_oU).
## Related topics
- [Snowplow data structure](https://docs.snowplowanalytics.com/docs/understanding-your-pipeline/canonical-event/)

View File

@ -0,0 +1,101 @@
---
stage: Growth
group: Product Intelligence
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---
# Snowplow infrastructure
Snowplow events on GitLab SaaS fired by a [tracker](implementation.md) go through an AWS pipeline, managed by GitLab.
## Event flow in the AWS pipeline
Every event goes through a collector, enricher, and pseudonymization lambda. The event is then dumped to S3 storage where it can be picked up by the Snowflake data warehouse.
Deploying and managing the infrastructure is automated using Terraform in the current [Terraform repository](https://gitlab.com/gitlab-com/gl-infra/config-mgmt/-/tree/master/environments/aws-snowplow).
```mermaid
graph LR
GL[GitLab.com]-->COL
subgraph aws-cloud[AWS]
COL[Collector]-->|snowplow-raw-good|ENR
COL[Collector]-->|snowplow-raw-bad|FRBE
subgraph firehoserbe[Firehose]
FRBE[AWS Lambda]
end
FRBE-->S3RBE
ENR[Enricher]-->|snowplow-enriched-bad|FEBE
subgraph firehoseebe[Firehose]
FEBE[AWS Lambda]
end
FEBE-->S3EBE
ENR[Enricher]-->|snowplow-enriched-good|FRGE
subgraph firehosege[Firehose]
FRGE[AWS Lambda]
end
FRGE-->S3GE
end
subgraph snowflake[Data warehouse]
S3RBE[S3 raw-bad]-->BE[gitlab_bad_events]
S3EBE[S3 enriched-bad]-->BE[gitlab_bad_events]
S3GE[S3 output]-->GE[gitlab_events]
end
```
See [Snowplow technology 101](https://github.com/snowplow/snowplow/#snowplow-technology-101) for Snowplow's own documentation and an overview how collectors and enrichers work.
### Pseudonymization
In contrast to a typical Snowplow pipeline, after enrichment, GitLab Snowplow events go through a [pseudonymization service](https://gitlab.com/gitlab-org/growth/product-intelligence/snowplow-pseudonymization) in the form of an AWS Lambda service before they are stored in S3 storage.
#### Why events need to be pseudonymized
GitLab is bound by its [obligations to community](https://about.gitlab.com/handbook/product/product-intelligence-guide/service-usage-data-commitment/)
and by [legal regulations](https://about.gitlab.com/handbook/legal/privacy/services-usage-data/) to protect the privacy of its users.
GitLab must provide valuable insights for business decisions, and there is a need
for a better understanding of different users' behavior patterns. The
pseudonymization process helps you find a compromise between these two requirements.
Pseudonymization processes personally identifiable information inside a Snowplow event in an irreversible fashion
maintaining deterministic output for given input, while masking any relation to that input.
#### How events are pseudonymized
Pseudonymization uses an allowlist that provides privacy by default. Therefore, each
attribute received as part of a Snowplow event is pseudonymized unless the attribute
is an allowed exception.
Pseudonymization is done using the HMAC-SHA256 keyed hash algorithm.
Attributes are combined with a secret salt to replace each identifiable information with a pseudonym.
### S3 bucket data lake to Snowflake
See [Data team's Snowplow Overview](https://about.gitlab.com/handbook/business-technology/data-team/platform/snowplow/) for further details how data is ingested into our Snowflake data warehouse.
## Monitoring
There are several tools that monitor Snowplow events tracking in different stages of the processing pipeline:
- [Product Intelligence Grafana dashboard](https://dashboards.gitlab.net/d/product-intelligence-main/product-intelligence-product-intelligence?orgId=1) monitors backend events sent from a GitLab.com instance to a collectors fleet. This dashboard provides information about:
- The number of events that successfully reach Snowplow collectors.
- The number of events that failed to reach Snowplow collectors.
- The number of backend events that were sent.
- [AWS CloudWatch dashboard](https://console.aws.amazon.com/cloudwatch/home?region=us-east-1#dashboards:name=SnowPlow;start=P3D) monitors the state of the events in a processing pipeline. The pipeline starts from Snowplow collectors, goes through to enrichers and pseudonymization, and then up to persistence in an S3 bucket. From S3, the events are imported into the Snowflake Data Warehouse. You must have AWS access rights to view this dashboard. For more information, see [monitoring](https://gitlab.com/gitlab-org/growth/product-intelligence/snowplow-pseudonymization#monitoring) in the Snowplow Events pseudonymization service documentation.
- [Sisense dashboard](https://app.periscopedata.com/app/gitlab/417669/Snowplow-Summary-Dashboard) provides information about the number of good and bad events imported into the Data Warehouse, in addition to the total number of imported Snowplow events.
For more information, see this [video walk-through](https://www.youtube.com/watch?v=NxPS0aKa_oU).
## Related topics
- [Snowplow technology 101](https://github.com/snowplow/snowplow/#snowplow-technology-101)
- [Snowplow pseudonymization AWS Lambda project](https://gitlab.com/gitlab-org/growth/product-intelligence/snowplow-pseudonymization)
- [Product Intelligence Guide](https://about.gitlab.com/handbook/product/product-intelligence-guide/)
- [Data Infrastructure](https://about.gitlab.com/handbook/business-technology/data-team/platform/infrastructure/)
- [Snowplow architecture overview (internal)](https://www.youtube.com/watch?v=eVYJjzspsLU)
- [Snowplow architecture overview slide deck (internal)](https://docs.google.com/presentation/d/16gQEO5CAg8Tx4NBtfnZj-GF4juFI6HfEPWcZgH4Rn14/edit?usp=sharing)
- [AWS Lambda implementation (internal)](https://youtu.be/cQd0mdMhkQA)

View File

@ -10,9 +10,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
> - [Moved](https://gitlab.com/gitlab-org/gitlab/-/issues/42640) from GitLab Ultimate to GitLab Free in 12.8.
GitLab can accept alerts from any source via a webhook receiver. This can be configured
generically or, in GitLab versions 13.1 and greater, you can configure
[External Prometheus instances](../metrics/alerts.md#external-prometheus-instances)
to use this endpoint.
generically.
## Integrations list
@ -27,8 +25,7 @@ the integration name, type, and status (enabled or disabled):
## Configuration
GitLab can receive alerts via a HTTP endpoint that you configure,
or the [Prometheus integration](#external-prometheus-integration).
GitLab can receive alerts via a HTTP endpoint that you configure.
### Single HTTP Endpoint
@ -89,12 +86,6 @@ GitLab fields when you [create an HTTP endpoint](#http-endpoints):
![Alert Management List](img/custom_alert_mapping_v13_11.png)
### External Prometheus integration
For GitLab versions 13.1 and greater, read
[External Prometheus Instances](../metrics/alerts.md#external-prometheus-instances)
to configure alerts for this integration.
## Customize the alert payload outside of GitLab
For HTTP Endpoints without [custom mappings](#map-fields-in-custom-alerts), you can customize the payload by sending the following

View File

@ -17,46 +17,15 @@ your team when environment performance falls outside of the boundaries you set.
Alerts are not currently supported for [Prometheus cluster integrations](../../user/clusters/integrations.md).
## External Prometheus instances
<!--- start_remove The following content will be removed on remove_date: '2022-09-22' -->
> [Moved](https://gitlab.com/gitlab-org/gitlab/-/issues/42640) to GitLab Free in 12.10.
## External Prometheus instances (removed)
For manually configured Prometheus servers, GitLab provides a notify endpoint for
use with Prometheus webhooks. If you have manual configuration enabled, an
**Alerts** section is added to **Settings > Integrations > Prometheus**.
This section contains the needed **URL** and **Authorization Key**. The
**Reset Key** button invalidates the key and generates a new one.
This feature was [deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/219142) in GitLab 13.2 and [removed](https://gitlab.com/gitlab-org/gitlab/-/issues/338834) in 15.0.
To manually configure a Prometheus server, we recommend
you use the [generic alerts integration](../incident_management/integrations.md).
![Prometheus integration configuration of Alerts](img/prometheus_integration_alerts.png)
To send GitLab alert notifications, copy the **URL** and **Authorization Key** into the
[`webhook_configs`](https://prometheus.io/docs/alerting/latest/configuration/#webhook_config)
section of your Prometheus Alertmanager configuration:
```yaml
receivers:
- name: gitlab
webhook_configs:
- http_config:
authorization:
type: Bearer
credentials: 9e1cbfcd546896a9ea8be557caf13a76
send_resolved: true
url: http://192.168.178.31:3001/root/manual_prometheus/prometheus/alerts/notify.json
# Rest of configuration omitted
# ...
```
For GitLab to associate your alerts with an [environment](../../ci/environments/index.md),
you must configure a `gitlab_environment_name` label on the alerts you set up in
Prometheus. The value of this should match the name of your environment in GitLab.
You can display alerts with a `gitlab_environment_name` of `production`
[on a dashboard](../../user/operations_dashboard/index.md#adding-a-project-to-the-dashboard).
In GitLab versions 13.1 and greater, you can configure your manually configured
Prometheus server to use the
[Generic alerts integration](../incident_management/integrations.md).
<!--- end_remove -->
## Trigger actions from alerts **(ULTIMATE)**

View File

@ -157,10 +157,10 @@ version prior to upgrading the application server.
If you're using Geo:
- Review [Geo upgrade documentation](../administration/geo/replication/updating_the_geo_sites.md).
- Read about the [Geo version-specific update instructions](../administration/geo/replication/version_specific_updates.md).
- Review Geo-specific steps when [updating the database](https://docs.gitlab.com/omnibus/settings/database.html#upgrading-a-geo-instance).
- Create an upgrade and rollback plan for _each_ Geo node (primary and each secondary).
- Review [Geo upgrade documentation](../administration/geo/replication/upgrading_the_geo_sites.md).
- Read about the [Geo version-specific update instructions](../administration/geo/replication/version_specific_upgrades.md).
- Review Geo-specific steps when [upgrading the database](https://docs.gitlab.com/omnibus/settings/database.html#upgrading-a-geo-instance).
- Create an upgrade and rollback plan for _each_ Geo site (primary and each secondary).
#### Runners

View File

@ -20,8 +20,8 @@ have a [GitLab Premium](https://about.gitlab.com/pricing/) plan.
To add a project to the dashboard:
1. Ensure your alerts
[populate the `gitlab_environment_name` field](../../operations/metrics/alerts.md#external-prometheus-instances).
1. Ensure your alerts populate the `gitlab_environment_name` label on the alerts you set up in Prometheus.
The value of this should match the name of your environment in GitLab.
In GitLab 13.9, you can display alerts for the `production` environment only.
1. Click the **Add projects** button in the home screen of the dashboard.
1. Search and add one or more projects using the **Search your projects** field.

View File

@ -21,10 +21,12 @@ When a branch is protected, the default behavior enforces these restrictions on
| Protect a branch | At least the Maintainer role. |
| Push to the branch | GitLab administrators and anyone with **Allowed** permission. (1) |
| Force push to the branch | No one. |
| Delete the branch | No one. |
| Delete the branch | No one. (2) |
1. Users with the Developer role can create a project in a group, but might not be allowed to
initially push to the [default branch](repository/branches/default.md).
1. No one can delete a protected branch using Git commands, however, users with at least Maintainer
role can [delete a protected branch from the UI or API](#delete-a-protected-branch).
### Set the default branch protection level

View File

@ -31249,6 +31249,9 @@ msgstr ""
msgid "Refresh the page and try again."
msgstr ""
msgid "Refresh the page in a few minutes to view usage."
msgstr ""
msgid "Refreshing in a second to show the updated status..."
msgid_plural "Refreshing in %d seconds to show the updated status..."
msgstr[0] ""
@ -32142,6 +32145,9 @@ msgstr ""
msgid "Repository size limit (MB)"
msgstr ""
msgid "Repository size recalculation started"
msgstr ""
msgid "Repository storage"
msgstr ""
@ -40878,6 +40884,9 @@ msgstr ""
msgid "UsageQuota|Purchased storage used"
msgstr ""
msgid "UsageQuota|Recalculate repository usage"
msgstr ""
msgid "UsageQuota|Repository"
msgstr ""

View File

@ -11,11 +11,12 @@ import Italic from '~/content_editor/extensions/italic';
import Link from '~/content_editor/extensions/link';
import ListItem from '~/content_editor/extensions/list_item';
import OrderedList from '~/content_editor/extensions/ordered_list';
import Paragraph from '~/content_editor/extensions/paragraph';
import Sourcemap from '~/content_editor/extensions/sourcemap';
import remarkMarkdownDeserializer from '~/content_editor/services/remark_markdown_deserializer';
import markdownSerializer from '~/content_editor/services/markdown_serializer';
import { createTestEditor } from './test_utils';
import { createTestEditor, createDocBuilder } from './test_utils';
const tiptapEditor = createTestEditor({
extensions: [
@ -36,6 +37,44 @@ const tiptapEditor = createTestEditor({
],
});
const {
builders: {
doc,
paragraph,
bold,
blockquote,
bulletList,
code,
codeBlock,
hardBreak,
heading,
horizontalRule,
image,
italic,
link,
listItem,
orderedList,
},
} = createDocBuilder({
tiptapEditor,
names: {
blockquote: { nodeType: Blockquote.name },
bold: { markType: Bold.name },
bulletList: { nodeType: BulletList.name },
code: { markType: Code.name },
codeBlock: { nodeType: CodeBlockHighlight.name },
hardBreak: { nodeType: HardBreak.name },
heading: { nodeType: Heading.name },
horizontalRule: { nodeType: HorizontalRule.name },
image: { nodeType: Image.name },
italic: { nodeType: Italic.name },
link: { markType: Link.name },
listItem: { nodeType: ListItem.name },
orderedList: { nodeType: OrderedList.name },
paragraph: { nodeType: Paragraph.name },
},
});
describe('Client side Markdown processing', () => {
const deserialize = async (content) => {
const { document } = await remarkMarkdownDeserializer().deserialize({
@ -52,197 +91,486 @@ describe('Client side Markdown processing', () => {
pristineDoc: document,
});
const sourceAttrs = (sourceMapKey, sourceMarkdown) => ({
sourceMapKey,
sourceMarkdown,
});
it.each([
{
markdown: '__bold text__',
expectedDoc: doc(
paragraph(
sourceAttrs('0:13', '__bold text__'),
bold(sourceAttrs('0:13', '__bold text__'), 'bold text'),
),
),
},
{
markdown: '**bold text**',
expectedDoc: doc(
paragraph(
sourceAttrs('0:13', '**bold text**'),
bold(sourceAttrs('0:13', '**bold text**'), 'bold text'),
),
),
},
{
markdown: '<strong>bold text</strong>',
expectedDoc: doc(
paragraph(
sourceAttrs('0:26', '<strong>bold text</strong>'),
bold(sourceAttrs('0:26', '<strong>bold text</strong>'), 'bold text'),
),
),
},
{
markdown: '<b>bold text</b>',
expectedDoc: doc(
paragraph(
sourceAttrs('0:16', '<b>bold text</b>'),
bold(sourceAttrs('0:16', '<b>bold text</b>'), 'bold text'),
),
),
},
{
markdown: '_italic text_',
expectedDoc: doc(
paragraph(
sourceAttrs('0:13', '_italic text_'),
italic(sourceAttrs('0:13', '_italic text_'), 'italic text'),
),
),
},
{
markdown: '*italic text*',
expectedDoc: doc(
paragraph(
sourceAttrs('0:13', '*italic text*'),
italic(sourceAttrs('0:13', '*italic text*'), 'italic text'),
),
),
},
{
markdown: '<em>italic text</em>',
expectedDoc: doc(
paragraph(
sourceAttrs('0:20', '<em>italic text</em>'),
italic(sourceAttrs('0:20', '<em>italic text</em>'), 'italic text'),
),
),
},
{
markdown: '<i>italic text</i>',
expectedDoc: doc(
paragraph(
sourceAttrs('0:18', '<i>italic text</i>'),
italic(sourceAttrs('0:18', '<i>italic text</i>'), 'italic text'),
),
),
},
{
markdown: '`inline code`',
expectedDoc: doc(
paragraph(
sourceAttrs('0:13', '`inline code`'),
code(sourceAttrs('0:13', '`inline code`'), 'inline code'),
),
),
},
{
markdown: '**`inline code bold`**',
expectedDoc: doc(
paragraph(
sourceAttrs('0:22', '**`inline code bold`**'),
bold(
sourceAttrs('0:22', '**`inline code bold`**'),
code(sourceAttrs('2:20', '`inline code bold`'), 'inline code bold'),
),
),
),
},
{
markdown: '__`inline code italics`__',
markdown: '_`inline code italics`_',
expectedDoc: doc(
paragraph(
sourceAttrs('0:23', '_`inline code italics`_'),
italic(
sourceAttrs('0:23', '_`inline code italics`_'),
code(sourceAttrs('1:22', '`inline code italics`'), 'inline code italics'),
),
),
),
},
{
markdown: '[GitLab](https://gitlab.com "Go to GitLab")',
expectedDoc: doc(
paragraph(
sourceAttrs('0:43', '[GitLab](https://gitlab.com "Go to GitLab")'),
link(
{
...sourceAttrs('0:43', '[GitLab](https://gitlab.com "Go to GitLab")'),
href: 'https://gitlab.com',
title: 'Go to GitLab',
},
'GitLab',
),
),
),
},
{
markdown: '**[GitLab](https://gitlab.com "Go to GitLab")**',
expectedDoc: doc(
paragraph(
sourceAttrs('0:47', '**[GitLab](https://gitlab.com "Go to GitLab")**'),
bold(
sourceAttrs('0:47', '**[GitLab](https://gitlab.com "Go to GitLab")**'),
link(
{
...sourceAttrs('2:45', '[GitLab](https://gitlab.com "Go to GitLab")'),
href: 'https://gitlab.com',
title: 'Go to GitLab',
},
'GitLab',
),
),
),
),
},
{
markdown: `
This is a paragraph with a\\
hard line break`,
expectedDoc: doc(
paragraph(
sourceAttrs('0:43', 'This is a paragraph with a\\\nhard line break'),
'This is a paragraph with a',
hardBreak(sourceAttrs('26:28', '\\\n')),
'\nhard line break',
),
),
},
{
markdown: '![GitLab Logo](https://gitlab.com/logo.png "GitLab Logo")',
expectedDoc: doc(
paragraph(
sourceAttrs('0:57', '![GitLab Logo](https://gitlab.com/logo.png "GitLab Logo")'),
image({
...sourceAttrs('0:57', '![GitLab Logo](https://gitlab.com/logo.png "GitLab Logo")'),
alt: 'GitLab Logo',
src: 'https://gitlab.com/logo.png',
title: 'GitLab Logo',
}),
),
),
},
{
markdown: '---',
expectedDoc: doc(horizontalRule(sourceAttrs('0:3', '---'))),
},
{
markdown: '***',
expectedDoc: doc(horizontalRule(sourceAttrs('0:3', '***'))),
},
{
markdown: '___',
expectedDoc: doc(horizontalRule(sourceAttrs('0:3', '___'))),
},
{
markdown: '<hr>',
expectedDoc: doc(horizontalRule(sourceAttrs('0:4', '<hr>'))),
},
{
markdown: '# Heading 1',
expectedDoc: doc(heading({ ...sourceAttrs('0:11', '# Heading 1'), level: 1 }, 'Heading 1')),
},
{
markdown: '## Heading 2',
expectedDoc: doc(heading({ ...sourceAttrs('0:12', '## Heading 2'), level: 2 }, 'Heading 2')),
},
{
markdown: '### Heading 3',
expectedDoc: doc(heading({ ...sourceAttrs('0:13', '### Heading 3'), level: 3 }, 'Heading 3')),
},
{
markdown: '#### Heading 4',
expectedDoc: doc(
heading({ ...sourceAttrs('0:14', '#### Heading 4'), level: 4 }, 'Heading 4'),
),
},
{
markdown: '##### Heading 5',
expectedDoc: doc(
heading({ ...sourceAttrs('0:15', '##### Heading 5'), level: 5 }, 'Heading 5'),
),
},
{
markdown: '###### Heading 6',
expectedDoc: doc(
heading({ ...sourceAttrs('0:16', '###### Heading 6'), level: 6 }, 'Heading 6'),
),
},
{
markdown: `
Heading
one
======
`,
expectedDoc: doc(
heading({ ...sourceAttrs('0:18', 'Heading\none\n======'), level: 1 }, 'Heading\none'),
),
},
{
markdown: `
Heading
two
-------
`,
expectedDoc: doc(
heading({ ...sourceAttrs('0:19', 'Heading\ntwo\n-------'), level: 2 }, 'Heading\ntwo'),
),
},
{
markdown: `
- List item 1
- List item 2
`,
expectedDoc: doc(
bulletList(
sourceAttrs('0:27', '- List item 1\n- List item 2'),
listItem(sourceAttrs('0:13', '- List item 1'), paragraph('List item 1')),
listItem(sourceAttrs('14:27', '- List item 2'), paragraph('List item 2')),
),
),
},
{
markdown: `
* List item 1
* List item 2
`,
expectedDoc: doc(
bulletList(
sourceAttrs('0:27', '* List item 1\n* List item 2'),
listItem(sourceAttrs('0:13', '* List item 1'), paragraph('List item 1')),
listItem(sourceAttrs('14:27', '* List item 2'), paragraph('List item 2')),
),
),
},
{
markdown: `
+ List item 1
+ List item 2
`,
expectedDoc: doc(
bulletList(
sourceAttrs('0:27', '+ List item 1\n+ List item 2'),
listItem(sourceAttrs('0:13', '+ List item 1'), paragraph('List item 1')),
listItem(sourceAttrs('14:27', '+ List item 2'), paragraph('List item 2')),
),
),
},
{
markdown: `
1. List item 1
1. List item 2
`,
expectedDoc: doc(
orderedList(
sourceAttrs('0:29', '1. List item 1\n1. List item 2'),
listItem(sourceAttrs('0:14', '1. List item 1'), paragraph('List item 1')),
listItem(sourceAttrs('15:29', '1. List item 2'), paragraph('List item 2')),
),
),
},
{
markdown: `
1. List item 1
2. List item 2
`,
expectedDoc: doc(
orderedList(
sourceAttrs('0:29', '1. List item 1\n2. List item 2'),
listItem(sourceAttrs('0:14', '1. List item 1'), paragraph('List item 1')),
listItem(sourceAttrs('15:29', '2. List item 2'), paragraph('List item 2')),
),
),
},
{
markdown: `
1) List item 1
2) List item 2
`,
expectedDoc: doc(
orderedList(
sourceAttrs('0:29', '1) List item 1\n2) List item 2'),
listItem(sourceAttrs('0:14', '1) List item 1'), paragraph('List item 1')),
listItem(sourceAttrs('15:29', '2) List item 2'), paragraph('List item 2')),
),
),
},
{
markdown: `
- List item 1
- Sub list item 1
`,
expectedDoc: doc(
bulletList(
sourceAttrs('0:33', '- List item 1\n - Sub list item 1'),
listItem(
sourceAttrs('0:33', '- List item 1\n - Sub list item 1'),
paragraph('List item 1\n'),
bulletList(
sourceAttrs('16:33', '- Sub list item 1'),
listItem(sourceAttrs('16:33', '- Sub list item 1'), paragraph('Sub list item 1')),
),
),
),
),
},
{
markdown: `
- List item 1 paragraph 1
{
markdown: `
Heading
one
======
`,
List item 1 paragraph 2
- List item 2
`,
expectedDoc: doc(
bulletList(
sourceAttrs(
'0:66',
'- List item 1 paragraph 1\n\n List item 1 paragraph 2\n- List item 2',
),
listItem(
sourceAttrs('0:52', '- List item 1 paragraph 1\n\n List item 1 paragraph 2'),
paragraph(sourceAttrs('2:25', 'List item 1 paragraph 1'), 'List item 1 paragraph 1'),
paragraph(sourceAttrs('29:52', 'List item 1 paragraph 2'), 'List item 1 paragraph 2'),
),
listItem(
sourceAttrs('53:66', '- List item 2'),
paragraph(sourceAttrs('55:66', 'List item 2'), 'List item 2'),
),
),
),
},
{
markdown: `
Heading
two
-------
`,
> This is a blockquote
`,
expectedDoc: doc(
blockquote(
sourceAttrs('0:22', '> This is a blockquote'),
paragraph(sourceAttrs('2:22', 'This is a blockquote'), 'This is a blockquote'),
),
),
},
{
markdown: `
- List item 1
- List item 2
`,
> - List item 1
> - List item 2
`,
expectedDoc: doc(
blockquote(
sourceAttrs('0:31', '> - List item 1\n> - List item 2'),
bulletList(
sourceAttrs('2:31', '- List item 1\n> - List item 2'),
listItem(sourceAttrs('2:15', '- List item 1'), paragraph('List item 1')),
listItem(sourceAttrs('18:31', '- List item 2'), paragraph('List item 2')),
),
),
),
},
{
markdown: `
* List item 1
* List item 2
`,
},
{
markdown: `
+ List item 1
+ List item 2
`,
},
{
markdown: `
1. List item 1
1. List item 2
`,
},
{
markdown: `
1. List item 1
2. List item 2
`,
},
{
markdown: `
1) List item 1
2) List item 2
`,
},
{
markdown: `
- List item 1
- Sub list item 1
`,
},
{
markdown: `
- List item 1 paragraph 1
code block
List item 1 paragraph 2
- List item 2
`,
},
{
markdown: `
> This is a blockquote
`,
},
{
markdown: `
> - List item 1
> - List item 2
`,
},
{
markdown: `
const fn = () => 'GitLab';
`,
},
{
markdown: `
\`\`\`javascript
const fn = () => 'GitLab';
\`\`\`\
`,
},
{
markdown: `
~~~javascript
const fn = () => 'GitLab';
~~~
`,
},
{
markdown: `
\`\`\`
\`\`\`\
`,
},
{
markdown: `
\`\`\`javascript
const fn = () => 'GitLab';
const fn = () => 'GitLab';
\`\`\`\
`,
`,
expectedDoc: doc(
paragraph(sourceAttrs('0:10', 'code block'), 'code block'),
codeBlock(
{
...sourceAttrs('12:42', " const fn = () => 'GitLab';"),
class: 'code highlight',
language: null,
},
"const fn = () => 'GitLab';",
),
),
},
])('processes %s correctly', async ({ markdown }) => {
{
markdown: `
\`\`\`javascript
const fn = () => 'GitLab';
\`\`\`\
`,
expectedDoc: doc(
codeBlock(
{
...sourceAttrs('0:44', "```javascript\nconst fn = () => 'GitLab';\n```"),
class: 'code highlight',
language: 'javascript',
},
"const fn = () => 'GitLab';",
),
),
},
{
markdown: `
~~~javascript
const fn = () => 'GitLab';
~~~
`,
expectedDoc: doc(
codeBlock(
{
...sourceAttrs('0:44', "~~~javascript\nconst fn = () => 'GitLab';\n~~~"),
class: 'code highlight',
language: 'javascript',
},
"const fn = () => 'GitLab';",
),
),
},
{
markdown: `
\`\`\`
\`\`\`\
`,
expectedDoc: doc(
codeBlock(
{
...sourceAttrs('0:7', '```\n```'),
class: 'code highlight',
language: null,
},
'',
),
),
},
{
markdown: `
\`\`\`javascript
const fn = () => 'GitLab';
\`\`\`\
`,
expectedDoc: doc(
codeBlock(
{
...sourceAttrs('0:45', "```javascript\nconst fn = () => 'GitLab';\n\n```"),
class: 'code highlight',
language: 'javascript',
},
"const fn = () => 'GitLab';\n",
),
),
},
])('processes %s correctly', async ({ markdown, expectedDoc }) => {
const trimmed = markdown.trim();
const document = await deserialize(trimmed);
expect(document.toJSON()).toEqual(expectedDoc.toJSON());
expect(serialize(document)).toEqual(trimmed);
});
});

View File

@ -3,19 +3,12 @@
require 'spec_helper'
RSpec.describe Gitlab::ProjectTemplate do
include ProjectTemplateTestHelper
describe '.all' do
it 'returns all templates' do
expected = %w[
rails spring express iosswift dotnetcore android
gomicro gatsby hugo jekyll plainhtml gitbook
hexo middleman gitpod_spring_petclinic nfhugo
nfjekyll nfplainhtml nfgitbook nfhexo salesforcedx
serverless_framework tencent_serverless_framework
jsonnet cluster_management kotlin_native_linux
]
expect(described_class.all).to be_an(Array)
expect(described_class.all.map(&:name)).to match_array(expected)
expect(described_class.all.map(&:name)).to match_array(all_templates)
end
end

View File

@ -94,7 +94,7 @@ RSpec.describe BulkImports::FileTransfer::ProjectConfig do
describe '#file_relations' do
it 'returns project file relations' do
expect(subject.file_relations).to contain_exactly('uploads', 'lfs_objects', 'repository_bundle')
expect(subject.file_relations).to contain_exactly('uploads', 'lfs_objects', 'repository', 'design')
end
end
end

View File

@ -54,6 +54,18 @@ RSpec.describe ProjectStatistics do
end
end
describe 'namespace relatable columns' do
it 'treats the correct columns as namespace relatable' do
expect(described_class::NAMESPACE_RELATABLE_COLUMNS).to match_array %i[
repository_size
wiki_size
lfs_objects_size
uploads_size
container_registry_size
]
end
end
describe '#total_repository_size' do
it "sums repository and LFS object size" do
statistics.repository_size = 2

View File

@ -4,56 +4,34 @@ require 'spec_helper'
RSpec.describe BulkImports::FileExportService do
let_it_be(:project) { create(:project) }
let_it_be(:export_path) { Dir.mktmpdir }
let(:relation) { BulkImports::FileTransfer::BaseConfig::UPLOADS_RELATION }
subject(:service) { described_class.new(project, export_path, relation) }
describe '#execute' do
it 'executes export service and archives exported data' do
expect_next_instance_of(BulkImports::UploadsExportService) do |service|
expect(service).to receive(:execute)
end
it 'executes export service and archives exported data for each file relation' do
relations = {
'uploads' => BulkImports::UploadsExportService,
'lfs_objects' => BulkImports::LfsObjectsExportService,
'repository' => BulkImports::RepositoryBundleExportService,
'design' => BulkImports::RepositoryBundleExportService
}
expect(subject).to receive(:tar_cf).with(archive: File.join(export_path, 'uploads.tar'), dir: export_path)
relations.each do |relation, klass|
Dir.mktmpdir do |export_path|
service = described_class.new(project, export_path, relation)
subject.execute
end
expect_next_instance_of(klass) do |service|
expect(service).to receive(:execute)
end
context 'when relation is lfs objects' do
let(:relation) { BulkImports::FileTransfer::ProjectConfig::LFS_OBJECTS_RELATION }
expect(service).to receive(:tar_cf).with(archive: File.join(export_path, "#{relation}.tar"), dir: export_path)
it 'executes lfs objects export service' do
expect_next_instance_of(BulkImports::LfsObjectsExportService) do |service|
expect(service).to receive(:execute)
service.execute
end
expect(subject).to receive(:tar_cf).with(archive: File.join(export_path, 'lfs_objects.tar'), dir: export_path)
subject.execute
end
end
context 'when relation is repository bundle' do
let(:relation) { BulkImports::FileTransfer::ProjectConfig::REPOSITORY_BUNDLE_RELATION }
it 'executes repository bundle export service' do
expect_next_instance_of(BulkImports::RepositoryBundleExportService) do |service|
expect(service).to receive(:execute)
end
expect(subject)
.to receive(:tar_cf)
.with(archive: File.join(export_path, 'repository_bundle.tar'), dir: export_path)
subject.execute
end
end
context 'when unsupported relation is passed' do
it 'raises an error' do
service = described_class.new(project, export_path, 'unsupported')
service = described_class.new(project, nil, 'unsupported')
expect { service.execute }.to raise_error(BulkImports::Error, 'Unsupported relation export type')
end
@ -62,7 +40,9 @@ RSpec.describe BulkImports::FileExportService do
describe '#exported_filename' do
it 'returns filename of the exported file' do
expect(subject.exported_filename).to eq('uploads.tar')
service = described_class.new(project, nil, 'uploads')
expect(service.exported_filename).to eq('uploads.tar')
end
end
end

View File

@ -3,32 +3,44 @@
require 'spec_helper'
RSpec.describe BulkImports::RepositoryBundleExportService do
let(:project) { build(:project) }
let(:project) { create(:project) }
let(:export_path) { Dir.mktmpdir }
subject(:service) { described_class.new(project, export_path) }
subject(:service) { described_class.new(repository, export_path, export_filename) }
after do
FileUtils.remove_entry(export_path) if Dir.exist?(export_path)
end
describe '#execute' do
context 'when repository exists' do
it 'bundles repository to disk' do
allow(project.repository).to receive(:exists?).and_return(true)
expect(project.repository).to receive(:bundle_to_disk).with(File.join(export_path, 'project.bundle'))
shared_examples 'repository export' do
context 'when repository exists' do
it 'bundles repository to disk' do
allow(repository).to receive(:exists?).and_return(true)
expect(repository).to receive(:bundle_to_disk).with(File.join(export_path, "#{export_filename}.bundle"))
service.execute
service.execute
end
end
context 'when repository does not exist' do
it 'does not bundle repository to disk' do
allow(repository).to receive(:exists?).and_return(false)
expect(repository).not_to receive(:bundle_to_disk)
service.execute
end
end
end
context 'when repository does not exist' do
it 'does not bundle repository to disk' do
allow(project.repository).to receive(:exists?).and_return(false)
expect(project.repository).not_to receive(:bundle_to_disk)
include_examples 'repository export' do
let(:repository) { project.repository }
let(:export_filename) { 'repository' }
end
service.execute
end
include_examples 'repository export' do
let(:repository) { project.design_repository }
let(:export_filename) { 'design' }
end
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
module ProjectTemplateTestHelper
def all_templates
%w[
rails spring express iosswift dotnetcore android
gomicro gatsby hugo jekyll plainhtml gitbook
hexo middleman gitpod_spring_petclinic nfhugo
nfjekyll nfplainhtml nfgitbook nfhexo salesforcedx
serverless_framework tencent_serverless_framework
jsonnet cluster_management kotlin_native_linux
]
end
end
ProjectTemplateTestHelper.prepend_mod

View File

@ -0,0 +1,35 @@
# frozen_string_literal: true
RSpec.shared_examples 'correct pagination' do
it 'paginates correctly to page 3 and back' do
expect(page).to have_selector(item_selector, count: per_page)
page1_item_text = page.find(item_selector).text
click_next_page(next_button_selector)
expect(page).to have_selector(item_selector, count: per_page)
page2_item_text = page.find(item_selector).text
click_next_page(next_button_selector)
expect(page).to have_selector(item_selector, count: per_page)
page3_item_text = page.find(item_selector).text
click_prev_page(prev_button_selector)
expect(page3_item_text).not_to eql(page2_item_text)
expect(page.find(item_selector).text).to eql(page2_item_text)
click_prev_page(prev_button_selector)
expect(page.find(item_selector).text).to eql(page1_item_text)
expect(page).to have_selector(item_selector, count: per_page)
end
def click_next_page(next_button_selector)
page.find(next_button_selector).click
wait_for_requests
end
def click_prev_page(prev_button_selector)
page.find(prev_button_selector).click
wait_for_requests
end
end

View File

@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe 'projects/tags/index.html.haml' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:tags) { project.repository.tags }
let_it_be(:git_tag) { project.repository.tags.last }
let_it_be(:release) do
create(:release, project: project,
@ -25,9 +24,41 @@ RSpec.describe 'projects/tags/index.html.haml' do
allow(view).to receive(:current_user).and_return(project.namespace.owner)
end
it 'renders links to the Releases page for tags associated with a release' do
render
expect(rendered).to have_link(release.name, href: project_releases_path(project, anchor: release.tag))
context 'when tag is associated with a release' do
context 'with feature flag disabled' do
before do
stub_feature_flags(fix_release_path_in_tag_index_page: false)
end
it 'renders a link to the release page with anchor' do
render
expect(rendered).to have_link(release.name, href: project_releases_path(project, anchor: release))
end
end
context 'with feature flag enabled' do
before do
stub_feature_flags(fix_release_path_in_tag_index_page: true)
end
context 'when name contains backslash' do
let_it_be(:release) { create(:release, project: project, tag: 'test/v1') }
before_all do
project.repository.add_tag(project.owner, 'test/v1', project.default_branch_or_main)
project.repository.expire_tags_cache
project.releases.reload
assign(:tags, Kaminari.paginate_array(tags).page(0))
end
it 'renders a link to the release page with backslash escaped' do
render
expect(rendered).to have_link(release.name, href: project_release_path(project, release))
end
end
end
end
context 'when the most recent build for a tag has artifacts' do
@ -104,4 +135,8 @@ RSpec.describe 'projects/tags/index.html.haml' do
)
end
end
def tags
project.repository.tags
end
end