Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-10-14 15:11:00 +00:00
parent a0d49dc011
commit 1ab98e892c
38 changed files with 757 additions and 422 deletions

View File

@ -15,6 +15,7 @@ Gitlab/ServiceResponse:
- 'app/services/ci/retry_pipeline_service.rb'
- 'app/services/ci/runners/assign_runner_service.rb'
- 'app/services/ci/runners/register_runner_service.rb'
- 'app/services/ci/runners/set_runner_associated_projects_service.rb'
- 'app/services/concerns/alert_management/responses.rb'
- 'app/services/concerns/services/return_service_responses.rb'
- 'app/services/container_expiration_policies/update_service.rb'
@ -64,15 +65,18 @@ Gitlab/ServiceResponse:
- 'ee/app/services/vulnerability_issue_links/create_service.rb'
- 'ee/app/services/vulnerability_issue_links/delete_service.rb'
- 'ee/spec/graphql/mutations/security/finding/dismiss_spec.rb'
- 'spec/controllers/boards/issues_controller_spec.rb'
- 'spec/controllers/import/bulk_imports_controller_spec.rb'
- 'spec/controllers/import/fogbugz_controller_spec.rb'
- 'spec/controllers/projects/alerting/notifications_controller_spec.rb'
- 'spec/controllers/projects/issues_controller_spec.rb'
- 'spec/controllers/projects/pipelines_controller_spec.rb'
- 'spec/controllers/projects/prometheus/alerts_controller_spec.rb'
- 'spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb'
- 'spec/requests/api/ci/pipelines_spec.rb'
- 'spec/requests/api/ci/runner/runners_post_spec.rb'
- 'spec/requests/api/group_export_spec.rb'
- 'spec/requests/api/issues/issues_spec.rb'
- 'spec/requests/api/project_export_spec.rb'
- 'spec/requests/api/project_import_spec.rb'
- 'spec/requests/projects/incident_management/pagerduty_incidents_spec.rb'

View File

@ -1,21 +1,11 @@
import $ from 'jquery';
import { escape } from 'lodash';
import { groupsPath } from '~/vue_shared/components/group_select/utils';
import { __ } from '~/locale';
import Api from './api';
import { loadCSSFile } from './lib/utils/css_utils';
import { select2AxiosTransport } from './lib/utils/select2_utils';
const groupsPath = (groupsFilter, parentGroupID) => {
switch (groupsFilter) {
case 'descendant_groups':
return Api.descendantGroupsPath.replace(':id', parentGroupID);
case 'subgroups':
return Api.subgroupsPath.replace(':id', parentGroupID);
default:
return Api.groupsPath;
}
};
const groupsSelect = () => {
loadCSSFile(gon.select2_css_path)
.then(() => {

View File

@ -41,7 +41,7 @@ export default {
</gl-sprintf>
<gl-sprintf v-else-if="!updatedAt" :message="__('Edited by %{author}')">
<template #author>
<a :href="updatedByPath" class="author-link">
<a :href="updatedByPath" class="author-link gl-hover-text-decoration-underline">
<span>{{ updatedByName }}</span>
</a>
</template>
@ -51,7 +51,7 @@ export default {
<time-ago-tooltip :time="updatedAt" tooltip-placement="bottom" />
</template>
<template #author>
<a :href="updatedByPath" class="author-link">
<a :href="updatedByPath" class="author-link gl-hover-text-decoration-underline">
<span>{{ updatedByName }}</span>
</a>
</template>

View File

@ -229,7 +229,7 @@ export default {
</script>
<template>
<div class="detail-page-header-actions gl-display-flex">
<div class="detail-page-header-actions gl-display-flex gl-align-self-start">
<gl-dropdown
v-if="hasMobileDropdown"
class="gl-sm-display-none! w-100"

View File

@ -1,5 +1,6 @@
<script>
import { GlFormCheckbox } from '@gitlab/ui';
import { s__ } from '~/locale';
import checkedRunnerIdsQuery from '../graphql/list/checked_runner_ids.query.graphql';
export default {
@ -34,6 +35,9 @@ export default {
indeterminate() {
return !this.checked && this.runners.some(this.isChecked);
},
label() {
return this.checked ? s__('Runners|Unselect all') : s__('Runners|Select all');
},
},
methods: {
isChecked({ id }) {
@ -51,6 +55,7 @@ export default {
<template>
<gl-form-checkbox
:aria-label="label"
:indeterminate="indeterminate"
:checked="checked"
:disabled="disabled"

View File

@ -33,7 +33,7 @@ export default {
:title="confidentialTooltip"
icon="eye-slash"
variant="warning"
class="gl-display-inline gl-mr-2"
class="gl-display-inline gl-mr-3"
>{{ __('Confidential') }}</gl-badge
>
</template>

View File

@ -0,0 +1,15 @@
import Api from '~/api';
export const groupsPath = (groupsFilter, parentGroupID) => {
if (groupsFilter !== undefined && parentGroupID === undefined) {
throw new Error('Cannot use groupsFilter without a parentGroupID');
}
switch (groupsFilter) {
case 'descendant_groups':
return Api.descendantGroupsPath.replace(':id', parentGroupID);
case 'subgroups':
return Api.subgroupsPath.replace(':id', parentGroupID);
default:
return Api.groupsPath;
}
};

View File

@ -5,6 +5,7 @@ import { helpPagePath } from '~/helpers/help_page_helper';
import { getDraft, clearDraft, updateDraft } from '~/lib/utils/autosave';
import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
import { __, s__ } from '~/locale';
import EditedAt from '~/issues/show/components/edited.vue';
import Tracking from '~/tracking';
import MarkdownField from '~/vue_shared/components/markdown/field.vue';
import workItemQuery from '../graphql/work_item.query.graphql';
@ -16,6 +17,7 @@ export default {
SafeHtml: GlSafeHtmlDirective,
},
components: {
EditedAt,
GlButton,
GlFormGroup,
MarkdownField,
@ -89,6 +91,15 @@ export default {
workItemType() {
return this.workItem?.workItemType?.name;
},
lastEditedAt() {
return this.workItemDescription?.lastEditedAt;
},
lastEditedByName() {
return this.workItemDescription?.lastEditedBy?.name;
},
lastEditedByPath() {
return this.workItemDescription?.lastEditedBy?.webPath;
},
markdownPreviewPath() {
return `${gon.relative_url_root || ''}/${this.fullPath}/preview_markdown?target_type=${
this.workItemType
@ -235,5 +246,11 @@ export default {
<div v-if="descriptionEmpty" class="gl-text-secondary gl-mb-5">{{ __('None') }}</div>
<div v-else v-safe-html="descriptionHtml" class="md gl-mb-5 gl-min-h-8"></div>
<edited-at
v-if="lastEditedAt"
:updated-at="lastEditedAt"
:updated-by-name="lastEditedByName"
:updated-by-path="lastEditedByPath"
/>
</div>
</template>

View File

@ -6,6 +6,11 @@ fragment WorkItemWidgets on WorkItemWidget {
type
description
descriptionHtml
lastEditedAt
lastEditedBy {
name
webPath
}
}
... on WorkItemWidgetAssignees {
type

View File

@ -157,9 +157,9 @@ module IssuablesHelper
if issuable.respond_to?(:work_item_type) && WorkItems::Type::WI_TYPES_WITH_CREATED_HEADER.include?(issuable.work_item_type.base_type)
output << content_tag(:span, sprite_icon("#{issuable.work_item_type.icon_name}", css_class: 'gl-icon gl-vertical-align-middle gl-text-gray-500'), class: 'gl-mr-2', aria: { hidden: 'true' })
output << s_('IssuableStatus|%{wi_type} created %{created_at} by ').html_safe % { wi_type: issuable.issue_type.capitalize, created_at: time_ago_with_tooltip(issuable.created_at) }
output << content_tag(:span, s_('IssuableStatus|%{wi_type} created %{created_at} by ').html_safe % { wi_type: issuable.issue_type.capitalize, created_at: time_ago_with_tooltip(issuable.created_at) }, class: 'gl-mr-2' )
else
output << s_('IssuableStatus|Created %{created_at} by').html_safe % { created_at: time_ago_with_tooltip(issuable.created_at) }
output << content_tag(:span, s_('IssuableStatus|Created %{created_at} by').html_safe % { created_at: time_ago_with_tooltip(issuable.created_at) }, class: 'gl-mr-2' )
end
if issuable.is_a?(Issue) && issuable.service_desk_reply_to

View File

@ -15,7 +15,77 @@ module Integrations
TAG_KEY_VALUE_RE = %r{\A [\w-]+ : .*\S.* \z}x.freeze
prop_accessor :datadog_site, :api_url, :api_key, :datadog_service, :datadog_env, :datadog_tags
field :datadog_site,
placeholder: DEFAULT_DOMAIN,
help: -> do
ERB::Util.html_escape(
s_('DatadogIntegration|The Datadog site to send data to. To send data to the EU site, use %{codeOpen}datadoghq.eu%{codeClose}.')
) % {
codeOpen: '<code>'.html_safe,
codeClose: '</code>'.html_safe
}
end
field :api_url,
exposes_secrets: true,
title: -> { s_('DatadogIntegration|API URL') },
help: -> { s_('DatadogIntegration|(Advanced) The full URL for your Datadog site.') }
field :api_key,
type: 'password',
title: -> { _('API key') },
non_empty_password_title: -> { s_('ProjectService|Enter new API key') },
non_empty_password_help: -> { s_('ProjectService|Leave blank to use your current API key') },
help: -> do
ERB::Util.html_escape(
s_('DatadogIntegration|%{linkOpen}API key%{linkClose} used for authentication with Datadog.')
) % {
linkOpen: %Q{<a href="#{URL_API_KEYS_DOCS}" target="_blank" rel="noopener noreferrer">}.html_safe,
linkClose: '</a>'.html_safe
}
end,
required: true
field :archive_trace_events,
storage: :attribute,
type: 'checkbox',
title: -> { s_('Logs') },
checkbox_label: -> { s_('Enable logs collection') },
help: -> { s_('When enabled, job logs are collected by Datadog and displayed along with pipeline execution traces.') }
field :datadog_service,
title: -> { s_('DatadogIntegration|Service') },
placeholder: 'gitlab-ci',
help: -> { s_('DatadogIntegration|Tag all data from this GitLab instance in Datadog. Useful when managing several self-managed deployments.') }
field :datadog_env,
title: -> { s_('DatadogIntegration|Environment') },
placeholder: 'ci',
help: -> do
ERB::Util.html_escape(
s_('DatadogIntegration|For self-managed deployments, set the %{codeOpen}env%{codeClose} tag for all the data sent to Datadog. %{linkOpen}How do I use tags?%{linkClose}')
) % {
codeOpen: '<code>'.html_safe,
codeClose: '</code>'.html_safe,
linkOpen: '<a href="https://docs.datadoghq.com/getting_started/tagging/#using-tags" target="_blank" rel="noopener noreferrer">'.html_safe,
linkClose: '</a>'.html_safe
}
end
field :datadog_tags,
type: 'textarea',
title: -> { s_('DatadogIntegration|Tags') },
placeholder: "tag:value\nanother_tag:value",
help: -> do
ERB::Util.html_escape(
s_('DatadogIntegration|Custom tags in Datadog. Enter one tag per line in the %{codeOpen}key:value%{codeClose} format. %{linkOpen}How do I use tags?%{linkClose}')
) % {
codeOpen: '<code>'.html_safe,
codeClose: '</code>'.html_safe,
linkOpen: '<a href="https://docs.datadoghq.com/getting_started/tagging/#using-tags" target="_blank" rel="noopener noreferrer">'.html_safe,
linkClose: '</a>'.html_safe
}
end
before_validation :strip_properties
@ -68,87 +138,6 @@ module Integrations
'datadog'
end
def fields
[
{
type: 'text',
name: 'datadog_site',
placeholder: DEFAULT_DOMAIN,
help: ERB::Util.html_escape(
s_('DatadogIntegration|The Datadog site to send data to. To send data to the EU site, use %{codeOpen}datadoghq.eu%{codeClose}.')
) % {
codeOpen: '<code>'.html_safe,
codeClose: '</code>'.html_safe
},
required: false
},
{
type: 'text',
name: 'api_url',
title: s_('DatadogIntegration|API URL'),
help: s_('DatadogIntegration|(Advanced) The full URL for your Datadog site.'),
required: false
},
{
type: 'password',
name: 'api_key',
title: _('API key'),
non_empty_password_title: s_('ProjectService|Enter new API key'),
non_empty_password_help: s_('ProjectService|Leave blank to use your current API key'),
help: ERB::Util.html_escape(
s_('DatadogIntegration|%{linkOpen}API key%{linkClose} used for authentication with Datadog.')
) % {
linkOpen: %Q{<a href="#{URL_API_KEYS_DOCS}" target="_blank" rel="noopener noreferrer">}.html_safe,
linkClose: '</a>'.html_safe
},
required: true
},
{
type: 'checkbox',
name: 'archive_trace_events',
title: s_('Logs'),
checkbox_label: s_('Enable logs collection'),
help: s_('When enabled, job logs are collected by Datadog and displayed along with pipeline execution traces.'),
required: false
},
{
type: 'text',
name: 'datadog_service',
title: s_('DatadogIntegration|Service'),
placeholder: 'gitlab-ci',
help: s_('DatadogIntegration|Tag all data from this GitLab instance in Datadog. Useful when managing several self-managed deployments.')
},
{
type: 'text',
name: 'datadog_env',
title: s_('DatadogIntegration|Environment'),
placeholder: 'ci',
help: ERB::Util.html_escape(
s_('DatadogIntegration|For self-managed deployments, set the %{codeOpen}env%{codeClose} tag for all the data sent to Datadog. %{linkOpen}How do I use tags?%{linkClose}')
) % {
codeOpen: '<code>'.html_safe,
codeClose: '</code>'.html_safe,
linkOpen: '<a href="https://docs.datadoghq.com/getting_started/tagging/#using-tags" target="_blank" rel="noopener noreferrer">'.html_safe,
linkClose: '</a>'.html_safe
}
},
{
type: 'textarea',
name: 'datadog_tags',
title: s_('DatadogIntegration|Tags'),
placeholder: "tag:value\nanother_tag:value",
help: ERB::Util.html_escape(
s_('DatadogIntegration|Custom tags in Datadog. Enter one tag per line in the %{codeOpen}key:value%{codeClose} format. %{linkOpen}How do I use tags?%{linkClose}')
) % {
codeOpen: '<code>'.html_safe,
codeClose: '</code>'.html_safe,
linkOpen: '<a href="https://docs.datadoghq.com/getting_started/tagging/#using-tags" target="_blank" rel="noopener noreferrer">'.html_safe,
linkClose: '</a>'.html_safe
}
}
]
end
override :hook_url
def hook_url
url = api_url.presence || sprintf(URL_TEMPLATE, datadog_domain: datadog_domain)

View File

@ -2,7 +2,7 @@
- badge_classes = 'issuable-status-badge gl-mr-3'
.detail-page-header
.detail-page-header-body
.detail-page-header-body.gl-flex-wrap-wrap
= gl_badge_tag({ variant: :info, icon: 'issue-closed', icon_classes: 'gl-mr-0!' }, { class: "#{issue_status_visibility(issuable, status_box: :closed)} #{badge_classes} issuable-status-badge-closed" }) do
.gl-display-none.gl-sm-display-block.gl-ml-2
= issue_closed_text(issuable, current_user)
@ -13,9 +13,8 @@
%span.gl-display-none.gl-sm-display-block.gl-ml-2
= _('Open')
.issuable-meta
#js-issuable-header-warnings{ data: { hidden: issue_hidden?(issuable).to_s } }
= issuable_meta(issuable, @project)
#js-issuable-header-warnings{ data: { hidden: issue_hidden?(issuable).to_s } }
= issuable_meta(issuable, @project)
%a.btn.gl-button.btn-default.btn-icon.float-right.gl-display-block.d-sm-none.gutter-toggle.issuable-gutter-toggle.js-sidebar-toggle{ href: "#" }
= sprite_icon('chevron-double-lg-left')

29
bin/diagnostic-reports-uploader Executable file
View File

@ -0,0 +1,29 @@
#!/usr/bin/env ruby
# frozen_string_literal: true
require 'fog/google'
require_relative '../lib/gitlab/memory/reports_uploader'
require_relative '../lib/gitlab/memory/upload_and_cleanup_reports'
require_relative '../lib/gitlab/memory/diagnostic_reports_logger'
# Fail fast if the necessary ENV vars are not set.
reports_path = ENV["GITLAB_DIAGNOSTIC_REPORTS_PATH"].to_s
raise 'GITLAB_DIAGNOSTIC_REPORTS_PATH dir is missing' unless Dir.exist?(reports_path)
gcs_key = ENV["GITLAB_GCP_KEY_PATH"].to_s
raise "GCS keyfile not found: #{gcs_key}" unless File.exist?(gcs_key)
gcs_project = ENV["GITLAB_DIAGNOSTIC_REPORTS_PROJECT"].to_s
raise 'GITLAB_DIAGNOSTIC_REPORTS_PROJECT is missing' unless gcs_project && !gcs_project.empty?
gcs_bucket = ENV["GITLAB_DIAGNOSTIC_REPORTS_BUCKET"].to_s
raise 'GITLAB_DIAGNOSTIC_REPORTS_BUCKET is missing' unless gcs_bucket && !gcs_bucket.empty?
rails_root = File.expand_path("..", __dir__)
log_file = File.expand_path('log/diagnostic_reports_json.log', rails_root)
logger = Gitlab::Memory::DiagnosticReportsLogger.new(log_file)
uploader = Gitlab::Memory::ReportsUploader.new(gcs_key: gcs_key, gcs_project: gcs_project, gcs_bucket: gcs_bucket,
logger: logger)
Gitlab::Memory::UploadAndCleanupReports.new(uploader: uploader, reports_path: reports_path, logger: logger).call

View File

@ -1,8 +0,0 @@
---
name: gitlab_diagnostic_reports_uploader
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/97155
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/372771
milestone: '15.4'
type: ops
group: group::application performance
default_enabled: false

View File

@ -6,13 +6,4 @@ return unless Gitlab::Runtime.puma?
Gitlab::Cluster::LifecycleEvents.on_worker_start do
Gitlab::Memory::ReportsDaemon.instance.start
# Avoid concurrent uploads, so thread out from a single worker.
# We want only one uploader thread running for the Puma cluster.
# We do not spawn a thread from the `master`, to keep its state pristine.
# This should have a minimal impact on the given worker.
if ::Prometheus::PidProvider.worker_id == 'puma_0'
reports_watcher = Gitlab::Memory::UploadAndCleanupReports.new
Gitlab::BackgroundTask.new(reports_watcher).start
end
end

View File

@ -140,3 +140,7 @@ rules apply:
For example, if you purchase a license for 100 users, you can have 110 users when you add
your license. However, if you have 111 users, you must purchase more users before you can add
the license.
### `Start GitLab Ultimate trial` still displays after adding license
To fix this issue, restart [Puma or your entire GitLab instance](../../administration/restart_gitlab.md).

View File

@ -84,7 +84,7 @@ You can authenticate using:
- Your GitLab username and password.
- A [personal access token](../../../user/profile/personal_access_tokens.md) with the scope set to `read_registry` and `write_registry`.
- A [group deploy token](../../../user/project/deploy_tokens/index.md#group-deploy-token) with the scope set to `read_registry` and `write_registry`.
- A [group deploy token](../../../user/project/deploy_tokens/index.md) with the scope set to `read_registry` and `write_registry`.
Users accessing the Dependency Proxy with a personal access token or username and password must
have at least the Guest role for the group they pull images from.

View File

@ -53,7 +53,7 @@ For most package types, the following credential types are valid:
- [Project deploy token](../../project/deploy_tokens/index.md):
allows access to all packages in a project. Good for granting and revoking project access to many
users.
- [Group deploy token](../../project/deploy_tokens/index.md#group-deploy-token):
- [Group deploy token](../../project/deploy_tokens/index.md):
allows access to all packages in a group and its subgroups. Good for granting and revoking access
to a large number of packages to sets of users.
- [Job token](../../../ci/jobs/ci_job_token.md):

Binary file not shown.

Before

Width:  |  Height:  |  Size: 34 KiB

View File

@ -83,158 +83,150 @@ Deploy tokens can't be used with the GitLab public API. However, you can use dep
endpoints, such as those from the Package Registry. For more information, see
[Authenticate with the registry](../../packages/package_registry/index.md#authenticate-with-the-registry).
## Creating a Deploy token
## Create a deploy token
You can create as many deploy tokens as you need from the settings of your
project. Alternatively, you can also create [group-scoped deploy tokens](#group-deploy-token).
Create a deploy token to automate deployment tasks that can run independently of a user account.
Prerequisites:
- You must have at least the Maintainer role for the project or group.
1. Sign in to your GitLab account.
1. On the top bar, select **Main menu**, and:
- For a project, select ***Projects** and find your project.
- For a group, select **Groups** and find your group.
- For a project deploy token, select **Projects** and find your project.
- For a group deploy token, select **Groups** and find your group.
1. On the left sidebar, select **Settings > Repository**.
1. Expand **Deploy tokens**.
1. Choose a name, and optionally, an expiration date and username for the token.
1. Choose the [desired scopes](#scope).
1. Complete the fields, and select the desired [scopes](#scope).
1. Select **Create deploy token**.
Save the deploy token somewhere safe. After you leave or refresh
the page, **you can't access it again**.
Record the deploy token's values. After you leave or refresh the page, **you cannot access it
again**.
![Personal access tokens page](img/deploy_tokens_ui.png)
## Revoke a deploy token
## Revoking a deploy token
Revoke a token when it's no longer required.
Prerequisites:
- You must have at least the Maintainer role for the project or group.
To revoke a deploy token:
1. On the top bar, select **Main menu**, and:
- For a project, select ***Projects** and find your project.
- For a group, select **Groups** and find your group.
- For a project deploy token, select **Projects** and find your project.
- For a group deploy token, select **Groups** and find your group.
1. On the left sidebar, select **Settings > Repository**.
1. Expand **Deploy tokens**.
1. In the **Active Deploy Tokens** section, by the token you want to revoke, select **Revoke**.
## Usage
## Clone a repository
### Git clone a repository
You can use a deploy token to clone a repository.
To download a repository using a deploy token:
Prerequisites:
1. Create a deploy token with `read_repository` as a scope.
1. Take note of your `username` and `token`.
1. `git clone` the project using the deploy token:
- A deploy token with the `read_repository` scope.
```shell
git clone https://<username>:<deploy_token>@gitlab.example.com/tanuki/awesome_project.git
```
Example of using a deploy token to clone a repository:
Replace `<username>` and `<deploy_token>` with the proper values.
```shell
git clone https://<username>:<deploy_token>@gitlab.example.com/tanuki/awesome_project.git
```
### Read Container Registry images
## Pull images from a container registry
To read the container registry images, you must:
You can use a deploy token to pull images from a container registry.
1. Create a deploy token with `read_registry` as a scope.
1. Take note of your `username` and `token`.
1. Sign in to the GitLab Container Registry using the deploy token:
Prerequisites:
- A deploy token with the `read_registry` scope.
Example of using a deploy token to pull images from a container registry:
```shell
docker login -u <username> -p <deploy_token> registry.example.com
docker pull $CONTAINER_TEST_IMAGE
```
Replace `<username>` and `<deploy_token>` with the proper values. You can now
pull images from your Container Registry.
## Push images to a container registry
### Push Container Registry images
You can use a deploy token to push images to a container registry.
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/22743) in GitLab 12.10.
Prerequisites:
To push the container registry images, you must:
- A deploy token with the `write_registry` scope.
1. Create a deploy token with `write_registry` as a scope.
1. Take note of your `username` and `token`.
1. Sign in to the GitLab Container Registry using the deploy token:
Example of using a deploy token to push an image to a container registry:
```shell
docker login -u <username> -p <deploy_token> registry.example.com
```
```shell
docker login -u <username> -p <deploy_token> registry.example.com
docker push $CONTAINER_TEST_IMAGE
```
Replace `<username>` and `<deploy_token>` with the proper values. You can now
push images to your Container Registry.
### Read or pull packages
## Pull packages from a package registry
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/213566) in GitLab 13.0.
To pull packages in the GitLab package registry, you must:
You can use a deploy token to pull packages from a package registry.
1. Create a deploy token with `read_package_registry` as a scope.
1. Take note of your `username` and `token`.
1. For the [package type of your choice](../../packages/index.md), follow the
authentication instructions for deploy tokens.
Prerequisites:
Example request publishing a NuGet package using a deploy token:
- A deploy token with the `read_package_registry` scope.
For the [package type of your choice](../../packages/index.md), follow the authentication
instructions for deploy tokens.
Example of installing a NuGet package from a GitLab registry:
```shell
nuget source Add -Name GitLab -Source "https://gitlab.example.com/api/v4/projects/10/packages/nuget/index.json" -UserName deploy-token-username -Password 12345678asdf
nuget source Add -Name GitLab -Source "https://gitlab.example.com/api/v4/projects/10/packages/nuget/index.json" -UserName <username> -Password <deploy_token>
nuget install mypkg.nupkg
```
## Push packages to a package repository
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/213566) in GitLab 13.0.
You can use a deploy token to push packages to a GitLab package registry.
Prerequisites:
- A deploy token with the `write_package_registry` scope.
For the [package type of your choice](../../packages/index.md), follow the authentication
instructions for deploy tokens.
Example of publishing a NuGet package to a package registry:
```shell
nuget source Add -Name GitLab -Source "https://gitlab.example.com/api/v4/projects/10/packages/nuget/index.json" -UserName <username> -Password <deploy_token>
nuget push mypkg.nupkg -Source GitLab
```
### Push or upload packages
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/213566) in GitLab 13.0.
To upload packages in the GitLab package registry, you must:
1. Create a deploy token with `write_package_registry` as a scope.
1. Take note of your `username` and `token`.
1. For the [package type of your choice](../../packages/index.md), follow the
authentication instructions for deploy tokens.
### Group deploy token
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/21765) in GitLab 12.9.
A deploy token created at the group level can be used across all projects that
belong either to the specific group or to one of its subgroups.
<i class="fa fa-youtube-play youtube" aria-hidden="true"></i>
For an overview, see [Group Deploy Tokens](https://youtu.be/8kxTJvaD9ks).
The Group deploy tokens UI is now accessible under **Settings > Repository**,
not **Settings > CI/CD** as indicated in the video.
To use a group deploy token:
1. [Create](#creating-a-deploy-token) a deploy token for a group.
1. Use it the same way you use a project deploy token when
[cloning a repository](#git-clone-a-repository).
The scopes applied to a group deploy token (such as `read_repository`)
apply consistently when cloning the repository of related projects.
### Pull images from the Dependency Proxy
## Pull images from the dependency proxy
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/280586) in GitLab 14.2.
To pull images from the Dependency Proxy, you must:
You can use a deploy token to pull images from the dependency proxy.
1. Create a group deploy token with both `read_registry` and `write_registry` scopes.
1. Take note of your `username` and `token`.
1. Follow the Dependency Proxy [authentication instructions](../../packages/dependency_proxy/index.md).
Prerequisites:
- A deploy token with `read_registry` and `write_registry` scopes.
Follow the dependency proxy [authentication instructions](../../packages/dependency_proxy/index.md).
## Troubleshooting
### Group deploy tokens and LFS
### Error: `api error: Repository or object not found:`
A bug
[prevents Group Deploy Tokens from cloning LFS objects](https://gitlab.com/gitlab-org/gitlab/-/issues/235398).
If you receive `404 Not Found` errors and this error,
use a Project Deploy Token to work around the bug:
When using a group deploy token to clone from LFS objects, you might get `404 Not Found` responses
and this error message. This occurs because of a bug, documented in
[issue 235398](https://gitlab.com/gitlab-org/gitlab/-/issues/235398).
```plaintext
api error: Repository or object not found:
https://<URL-with-token>.git/info/lfs/objects/batch
Check that it exists and that you have proper access to it
```
The workaround is to use a project deploy token.

View File

@ -118,6 +118,22 @@ To change the assignee on a task:
1. From the dropdown list, select the users to add as an assignee.
1. Select any area outside the dropdown list.
## Assign labels to a task
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/339756) in GitLab 15.5 [with a flag](../administration/feature_flags.md) named `work_items_mvc_2`. Disabled by default.
FLAG:
On self-managed GitLab, by default this feature is not available. To make it available, ask an administrator to [enable the feature flag](../administration/feature_flags.md) named `work_items_mvc_2`.
On GitLab.com, this feature is not available.
This feature is not ready for production use.
To add [labels](project/labels.md) to a task:
1. In the issue description, in the **Tasks** section, select the title of the task you want to edit. The task window opens.
1. Next to **Labels**, select **Add labels**.
1. From the dropdown list, select the labels to add.
1. Select any area outside the dropdown list.
## Set a start and due date
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/365399) in GitLab 15.4 [with a flag](../administration/feature_flags.md) named `work_items_mvc_2`. Disabled by default.

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
require 'logger'
module Gitlab
module Memory
class DiagnosticReportsLogger < ::Logger
def format_message(severity, timestamp, progname, message)
data = {}
data[:severity] = severity
data[:time] = timestamp.utc.iso8601(3)
data.merge!(message)
"#{JSON.generate(data)}\n" # rubocop:disable Gitlab/Json
end
end
end
end

View File

@ -1,35 +1,52 @@
# frozen_string_literal: true
require_relative '../metrics/system'
module Gitlab
module Memory
class ReportsUploader
# This is no-op currently, it will only write logs.
# The uploader implementation will be done in the next MR(s). For more details, check:
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/97155#note_1099244930
def initialize(gcs_key:, gcs_project:, gcs_bucket:, logger:)
@gcs_bucket = gcs_bucket
@fog = Fog::Storage::Google.new(google_project: gcs_project, google_json_key_location: gcs_key)
@logger = logger
end
def upload(path)
log_upload_requested(path)
start_monotonic_time = Gitlab::Metrics::System.monotonic_time
false # nothing is uploaded in the current implementation
File.open(path.to_s) { |file| fog.put_object(gcs_bucket, File.basename(path), file) }
duration_s = Gitlab::Metrics::System.monotonic_time - start_monotonic_time
log_upload_success(path, duration_s)
rescue StandardError, Errno::ENOENT => error
log_exception(error)
end
private
attr_reader :gcs_bucket, :fog, :logger
def log_upload_requested(path)
Gitlab::AppLogger.info(log_labels.merge(perf_report_status: 'upload requested', perf_report_path: path))
logger.info(log_labels.merge(perf_report_status: 'upload requested', perf_report_path: path))
end
def log_upload_success(path, duration_s)
logger.info(log_labels.merge(perf_report_status: 'upload success', perf_report_path: path,
duration_s: duration_s))
end
def log_exception(error)
logger.error(log_labels.merge(perf_report_status: "error", error: error.message))
end
def log_labels
{
message: "Diagnostic reports",
class: self.class.name,
pid: $$,
worker_id: worker_id
pid: $$
}
end
def worker_id
::Prometheus::PidProvider.worker_id
end
end
end
end

View File

@ -6,32 +6,26 @@ module Gitlab
DEFAULT_SLEEP_TIME_SECONDS = 900 # 15 minutes
def initialize(
sleep_time_seconds: ENV['GITLAB_DIAGNOSTIC_REPORTS_UPLOADER_SLEEP_S']&.to_i || DEFAULT_SLEEP_TIME_SECONDS,
reports_path: ENV["GITLAB_DIAGNOSTIC_REPORTS_PATH"])
uploader:,
reports_path:,
logger:,
sleep_time_seconds: ENV['GITLAB_DIAGNOSTIC_REPORTS_UPLOADER_SLEEP_S']&.to_i || DEFAULT_SLEEP_TIME_SECONDS)
@sleep_time_seconds = sleep_time_seconds
@uploader = uploader
@reports_path = reports_path
unless @reports_path.present?
log_error_reports_path_missing
return
end
@uploader = ReportsUploader.new
@sleep_time_seconds = sleep_time_seconds
@alive = true
@logger = logger
end
attr_reader :sleep_time_seconds, :reports_path, :uploader, :alive
attr_reader :uploader, :reports_path, :sleep_time_seconds, :logger
def call
log_started
while alive
loop do
sleep(sleep_time_seconds)
next unless Feature.enabled?(:gitlab_diagnostic_reports_uploader, type: :ops)
files_to_process.each { |path| upload_and_cleanup!(path) }
end
end
@ -39,9 +33,11 @@ module Gitlab
private
def upload_and_cleanup!(path)
cleanup!(path) if uploader.upload(path)
rescue StandardError => error
uploader.upload(path)
rescue StandardError, Errno::ENOENT => error
log_exception(error)
ensure
cleanup!(path)
end
def cleanup!(path)
@ -56,30 +52,21 @@ module Gitlab
.select { |path| File.file?(path) }
end
def log_error_reports_path_missing
Gitlab::AppLogger.error(log_labels.merge(perf_report_status: "path is not configured"))
end
def log_started
Gitlab::AppLogger.info(log_labels.merge(perf_report_status: "started"))
logger.info(log_labels.merge(perf_report_status: "started"))
end
def log_exception(error)
Gitlab::ErrorTracking.log_exception(error, log_labels)
logger.error(log_labels.merge(perf_report_status: "error", error: error.message))
end
def log_labels
{
message: "Diagnostic reports",
class: self.class.name,
pid: $$,
worker_id: worker_id
pid: $$
}
end
def worker_id
::Prometheus::PidProvider.worker_id
end
end
end
end

View File

@ -8,6 +8,7 @@ module Gitlab
# but if they weren't, the routes will be drawn and available for the rest of
# application.
API::API.compile!
API::API.reset_routes!
API::API.routes.select { |route| route.app.options[:for] < API::Base }
end

View File

@ -15302,6 +15302,9 @@ msgstr ""
msgid "Epics|Add an existing epic"
msgstr ""
msgid "Epics|Are you sure you want to remove %{bStart}%{targetEpicTitle}%{bEnd} from %{bStart}%{parentEpicTitle}%{bEnd}?"
msgstr ""
msgid "Epics|Are you sure you want to remove %{bStart}%{targetIssueTitle}%{bEnd} from %{bStart}%{parentEpicTitle}%{bEnd}?"
msgstr ""
@ -34954,6 +34957,9 @@ msgstr ""
msgid "Runners|Runs untagged jobs"
msgstr ""
msgid "Runners|Select all"
msgstr ""
msgid "Runners|Select projects to assign to this runner"
msgstr ""
@ -35040,6 +35046,9 @@ msgstr ""
msgid "Runners|Token expiry"
msgstr ""
msgid "Runners|Unselect all"
msgstr ""
msgid "Runners|Up to date"
msgstr ""

View File

@ -0,0 +1,86 @@
# frozen_string_literal: true
require 'fast_spec_helper'
require 'tempfile'
RSpec.describe 'bin/diagnostic-reports-uploader' do
let(:reports_dir) { Dir.mktmpdir }
let(:gcs_key) { Tempfile.new }
let(:gcs_project) { 'test_gcs_project' }
let(:gcs_bucket) { 'test_gcs_bucket' }
after do
FileUtils.remove_entry(reports_dir)
FileUtils.remove_entry(gcs_key)
end
subject(:load_bin) { load File.expand_path('../../bin/diagnostic-reports-uploader', __dir__) }
context 'when necessary ENV vars are set' do
before do
stub_env('GITLAB_DIAGNOSTIC_REPORTS_PATH', reports_dir)
stub_env('GITLAB_GCP_KEY_PATH', gcs_key.path)
stub_env('GITLAB_DIAGNOSTIC_REPORTS_PROJECT', gcs_project)
stub_env('GITLAB_DIAGNOSTIC_REPORTS_BUCKET', gcs_bucket)
end
let(:reports_uploader) { instance_double(Gitlab::Memory::ReportsUploader) }
let(:upload_and_cleanup_reports) { instance_double(Gitlab::Memory::UploadAndCleanupReports) }
let(:logger) { instance_double(Gitlab::Memory::DiagnosticReportsLogger) }
it 'runs successfully' do
expect(Gitlab::Memory::DiagnosticReportsLogger).to receive(:new).and_return(logger)
expect(Gitlab::Memory::ReportsUploader)
.to receive(:new).with(gcs_key: gcs_key.path, gcs_project: gcs_project, gcs_bucket: gcs_bucket, logger: logger)
.and_return(reports_uploader)
expect(Gitlab::Memory::UploadAndCleanupReports)
.to receive(:new).with(uploader: reports_uploader, reports_path: reports_dir, logger: logger)
.and_return(upload_and_cleanup_reports)
expect(upload_and_cleanup_reports).to receive(:call)
load_bin
end
end
context 'when GITLAB_DIAGNOSTIC_REPORTS_PATH is missing' do
it 'raises RuntimeError' do
expect { load_bin }.to raise_error(RuntimeError, 'GITLAB_DIAGNOSTIC_REPORTS_PATH dir is missing')
end
end
context 'when GITLAB_GCP_KEY_PATH is missing' do
before do
stub_env('GITLAB_DIAGNOSTIC_REPORTS_PATH', reports_dir)
end
it 'raises RuntimeError' do
expect { load_bin }.to raise_error(RuntimeError, /GCS keyfile not found/)
end
end
context 'when GITLAB_DIAGNOSTIC_REPORTS_PROJECT is missing' do
before do
stub_env('GITLAB_DIAGNOSTIC_REPORTS_PATH', reports_dir)
stub_env('GITLAB_GCP_KEY_PATH', gcs_key.path)
end
it 'raises RuntimeError' do
expect { load_bin }.to raise_error(RuntimeError, 'GITLAB_DIAGNOSTIC_REPORTS_PROJECT is missing')
end
end
context 'when GITLAB_DIAGNOSTIC_REPORTS_BUCKET is missing' do
before do
stub_env('GITLAB_DIAGNOSTIC_REPORTS_PATH', reports_dir)
stub_env('GITLAB_GCP_KEY_PATH', gcs_key.path)
stub_env('GITLAB_DIAGNOSTIC_REPORTS_PROJECT', gcs_project)
end
it 'raises RuntimeError' do
expect { load_bin }.to raise_error(RuntimeError, 'GITLAB_DIAGNOSTIC_REPORTS_BUCKET is missing')
end
end
end

View File

@ -0,0 +1,83 @@
# frozen_string_literal: true
require 'fast_spec_helper'
require 'tempfile'
# We need to capture pid from Process.spawn and then clean up by killing the process, which requires instance variables.
# rubocop: disable RSpec/InstanceVariable
RSpec.describe 'bin/diagnostic-reports-uploader' do
# This is a smoke test for 'bin/diagnostic-reports-uploader'.
# We intend to run this binary with `ruby bin/diagnostic-reports-uploader`, without preloading the entire Rails app.
# Also, we use inline gemfile, to avoid pulling full Gemfile from the main app into memory.
# The goal of that test is to confirm that the binary starts that way.
# The implementation logic is covered in 'spec/bin/diagnostic_reports_uploader_spec.rb'
include FastRailsRoot
let(:gcs_bucket) { 'test_bucket' }
let(:gcs_project) { 'test_project' }
let(:gcs_key) { Tempfile.new }
let(:reports_dir) { Dir.mktmpdir }
let(:report) { Tempfile.new('report.json', reports_dir) }
let(:env) do
{
'GITLAB_DIAGNOSTIC_REPORTS_BUCKET' => gcs_bucket,
'GITLAB_DIAGNOSTIC_REPORTS_PROJECT' => gcs_project,
'GITLAB_GCP_KEY_PATH' => gcs_key.path,
'GITLAB_DIAGNOSTIC_REPORTS_PATH' => reports_dir,
'GITLAB_DIAGNOSTIC_REPORTS_UPLOADER_SLEEP_S' => '1'
}
end
before do
gcs_key.write(
{
type: "service_account",
client_email: 'test@gitlab.com',
private_key_id: "test_id",
private_key: File.read(rails_root_join('spec/fixtures/ssl_key.pem'))
}.to_json
)
gcs_key.rewind
FileUtils.touch(report.path)
end
after do
if @pid
Timeout.timeout(10) do
Process.kill('TERM', @pid)
Process.waitpid(@pid)
end
end
rescue Errno::ESRCH, Errno::ECHILD => _
# 'No such process' or 'No child processes' means the process died before
ensure
gcs_key.unlink
FileUtils.rm_rf(reports_dir, secure: true)
end
it 'starts successfully' do
expect(File.exist?(report.path)).to be true
bin_path = rails_root_join("bin/diagnostic-reports-uploader")
cmd = ['bundle', 'exec', 'ruby', bin_path]
@pid = Process.spawn(env, *cmd)
expect(Gitlab::ProcessManagement.process_alive?(@pid)).to be true
expect do
Timeout.timeout(10) do
# Uploader will remove the file, no matter the upload result. We are waiting for exactly that.
# The report being removed means the uploader loop works. We are not attempting real upload.
attempted_upload_and_cleanup = false
until attempted_upload_and_cleanup
sleep 1
attempted_upload_and_cleanup = !File.exist?(report.path)
end
end
end.not_to raise_error
end
end
# rubocop: enable RSpec/InstanceVariable

View File

@ -51,11 +51,9 @@ RSpec.describe ProductAnalyticsTracking, :snowplow do
end
end
def expect_tracking(user: self.user)
def expect_redis_hll_tracking
expect(Gitlab::UsageDataCounters::HLLRedisCounter).to have_received(:track_event)
.with('g_analytics_valuestream', values: instance_of(String))
expect_snowplow_tracking(user)
end
def expect_snowplow_tracking(user)
@ -85,7 +83,8 @@ RSpec.describe ProductAnalyticsTracking, :snowplow do
it 'tracks the event' do
get :index
expect_tracking
expect_redis_hll_tracking
expect_snowplow_tracking(user)
end
context 'when FF is disabled' do
@ -105,7 +104,8 @@ RSpec.describe ProductAnalyticsTracking, :snowplow do
get :index
expect_tracking
expect_redis_hll_tracking
expect_snowplow_tracking(user)
end
it 'does not track the event if DNT is enabled' do
@ -145,7 +145,8 @@ RSpec.describe ProductAnalyticsTracking, :snowplow do
get :show, params: { id: 1 }
expect_tracking(user: nil)
expect_redis_hll_tracking
expect_snowplow_tracking(nil)
end
end
@ -159,16 +160,24 @@ RSpec.describe ProductAnalyticsTracking, :snowplow do
it 'tracks the event when there is custom id' do
get :show, params: { id: 1 }
expect_tracking(user: nil)
expect_redis_hll_tracking
expect_snowplow_tracking(nil)
end
it 'does not track the HLL event when there is no custom id' do
allow(controller).to receive(:get_custom_id).and_return(nil)
context 'when there is no custom_id set' do
before do
allow(controller).to receive(:get_custom_id).and_return(nil)
get :show, params: { id: 2 }
get :show, params: { id: 2 }
end
expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
expect_snowplow_tracking(nil)
it 'does not track the HLL event' do
expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
end
it 'tracks Snowplow event' do
expect_snowplow_tracking(nil)
end
end
end
end

View File

@ -66,10 +66,26 @@ RSpec.describe "Admin Runners" do
it 'has all necessary texts' do
expect(page).to have_text "Register an instance runner"
expect(page).to have_text "#{s_('Runners|All')} 3"
expect(page).to have_text "#{s_('Runners|Online')} 1"
expect(page).to have_text "#{s_('Runners|Offline')} 2"
expect(page).to have_text "#{s_('Runners|Stale')} 1"
end
describe 'delete all runners in bulk' do
before do
check s_('Runners|Select all')
click_button s_('Runners|Delete selected')
within_modal do
click_on 'Permanently delete 3 runners'
end
wait_for_requests
end
it_behaves_like 'shows no runners registered'
end
end
it 'shows a job count' do

View File

@ -0,0 +1,24 @@
import { groupsPath } from '~/vue_shared/components/group_select/utils';
describe('group_select utils', () => {
describe('groupsPath', () => {
it.each`
groupsFilter | parentGroupID | expectedPath
${undefined} | ${undefined} | ${'/api/:version/groups.json'}
${undefined} | ${1} | ${'/api/:version/groups.json'}
${'descendant_groups'} | ${1} | ${'/api/:version/groups/1/descendant_groups'}
${'subgroups'} | ${1} | ${'/api/:version/groups/1/subgroups'}
`(
'returns $expectedPath with groupsFilter = $groupsFilter and parentGroupID = $parentGroupID',
({ groupsFilter, parentGroupID, expectedPath }) => {
expect(groupsPath(groupsFilter, parentGroupID)).toBe(expectedPath);
},
);
});
it('throws if groupsFilter is passed but parentGroupID is undefined', () => {
expect(() => {
groupsPath('descendant_groups');
}).toThrow('Cannot use groupsFilter without a parentGroupID');
});
});

View File

@ -4,6 +4,7 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import { mockTracking } from 'helpers/tracking_helper';
import waitForPromises from 'helpers/wait_for_promises';
import EditedAt from '~/issues/show/components/edited.vue';
import { updateDraft } from '~/lib/utils/autosave';
import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
import MarkdownField from '~/vue_shared/components/markdown/field.vue';
@ -35,6 +36,7 @@ describe('WorkItemDescription', () => {
const findEditButton = () => wrapper.find('[data-testid="edit-description"]');
const findMarkdownField = () => wrapper.findComponent(MarkdownField);
const findEditedAt = () => wrapper.findComponent(EditedAt);
const editDescription = (newText) => wrapper.find('textarea').setValue(newText);
@ -44,9 +46,9 @@ describe('WorkItemDescription', () => {
const createComponent = async ({
mutationHandler = mutationSuccessHandler,
canUpdate = true,
workItemResponse = workItemResponseFactory({ canUpdate }),
isEditing = false,
} = {}) => {
const workItemResponse = workItemResponseFactory({ canUpdate });
const workItemResponseHandler = jest.fn().mockResolvedValue(workItemResponse);
const { id } = workItemQueryResponse.data.workItem;
@ -100,6 +102,33 @@ describe('WorkItemDescription', () => {
});
describe('editing description', () => {
it('shows edited by text', async () => {
const lastEditedAt = '2022-09-21T06:18:42Z';
const lastEditedBy = {
name: 'Administrator',
webPath: '/root',
};
await createComponent({
workItemResponse: workItemResponseFactory({
lastEditedAt,
lastEditedBy,
}),
});
expect(findEditedAt().props()).toEqual({
updatedAt: lastEditedAt,
updatedByName: lastEditedBy.name,
updatedByPath: lastEditedBy.webPath,
});
});
it('does not show edited by text', async () => {
await createComponent();
expect(findEditedAt().exists()).toBe(false);
});
it('cancels when clicking cancel', async () => {
await createComponent({
isEditing: true,

View File

@ -69,6 +69,8 @@ export const workItemQueryResponse = {
description: 'some **great** text',
descriptionHtml:
'<p data-sourcepos="1:1-1:19" dir="auto">some <strong>great</strong> text</p>',
lastEditedAt: null,
lastEditedBy: null,
},
{
__typename: 'WorkItemWidgetAssignees',
@ -187,6 +189,8 @@ export const workItemResponseFactory = ({
confidential = false,
canInviteMembers = false,
allowsScopedLabels = false,
lastEditedAt = null,
lastEditedBy = null,
parent = mockParent.parent,
} = {}) => ({
data: {
@ -221,6 +225,8 @@ export const workItemResponseFactory = ({
description: 'some **great** text',
descriptionHtml:
'<p data-sourcepos="1:1-1:19" dir="auto">some <strong>great</strong> text</p>',
lastEditedAt,
lastEditedBy,
},
assigneesWidgetPresent
? {
@ -362,6 +368,11 @@ export const createWorkItemFromTaskMutationResponse = {
type: 'DESCRIPTION',
description: 'New description',
descriptionHtml: '<p>New description</p>',
lastEditedAt: '2022-09-21T06:18:42Z',
lastEditedBy: {
name: 'Administrator',
webPath: '/root',
},
},
],
},

View File

@ -43,43 +43,6 @@ RSpec.describe 'diagnostic reports' do
load_initializer
end
context 'with `Gitlab::Memory::UploadAndCleanupReports` added into initializer' do
before do
allow(Gitlab::Memory::ReportsDaemon).to receive(:instance).and_return(report_daemon)
allow(report_daemon).to receive(:start)
end
context 'when run from `puma_0` worker process' do
let(:uploader) { instance_double(Gitlab::Memory::UploadAndCleanupReports) }
let(:background_task) { instance_double(Gitlab::BackgroundTask) }
before do
allow(Prometheus::PidProvider).to receive(:worker_id).and_return('puma_0')
end
it 'sets up `Gitlab::Memory::UploadAndCleanupReports` as `BackgroundTask`' do
expect(Gitlab::Memory::UploadAndCleanupReports).to receive(:new).and_return(uploader)
expect(Gitlab::BackgroundTask).to receive(:new).with(uploader).and_return(background_task)
expect(background_task).to receive(:start)
load_initializer
end
end
context 'when run from worker process other than `puma_0`' do
before do
allow(Prometheus::PidProvider).to receive(:worker_id).and_return('puma_1')
end
it 'does not set up `Gitlab::Memory::UploadAndCleanupReports`' do
expect(Gitlab::Memory::UploadAndCleanupReports).not_to receive(:new)
expect(Gitlab::BackgroundTask).not_to receive(:new)
load_initializer
end
end
end
end
context 'when run in non-Puma context, such as rails console, tests, Sidekiq' do

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
require 'fast_spec_helper'
RSpec.describe Gitlab::Memory::DiagnosticReportsLogger do
subject { described_class.new('/dev/null') }
let(:now) { Time.current }
describe '#format_message' do
it 'formats incoming hash properly' do
output = subject.format_message('INFO', now, 'test', { hello: 1 })
# Disabling the cop because it is not relevant, we encode with `JSON.generate`. Allows `fast_spec_helper`.
data = JSON.parse(output) # rubocop: disable Gitlab/Json
expect(data['severity']).to eq('INFO')
expect(data['time']).to eq(now.utc.iso8601(3))
expect(data['hello']).to eq(1)
expect(data['message']).to be_nil
end
end
end

View File

@ -3,19 +3,78 @@
require 'spec_helper'
RSpec.describe Gitlab::Memory::ReportsUploader, :aggregate_failures do
let(:uploader) { described_class.new }
let(:gcs_key) { 'test_gcs_key' }
let(:gcs_project) { 'test_gcs_project' }
let(:gcs_bucket) { 'test_gcs_bucket' }
let(:logger) { instance_double(Gitlab::Memory::DiagnosticReportsLogger) }
let(:path) { '/report/to/upload' }
let(:uploader) do
described_class.new(gcs_key: gcs_key, gcs_project: gcs_project, gcs_bucket: gcs_bucket, logger: logger)
end
# rubocop: disable RSpec/VerifiedDoubles
# `Fog::Storage::Google` does not implement `put_object` itself, so it is tricky to pinpoint particular method
# with instance_double without revealing `Fog::Storage::Google` internals. For simplicity, we use a simple double.
let(:fog) { double("Fog::Storage::Google") }
# rubocop: enable RSpec/VerifiedDoubles
let(:report) { Tempfile.new("report.1.worker_1.#{Time.current.to_i}.json") }
after do
FileUtils.remove_entry(report)
end
describe '#upload' do
# currently no-op
it 'logs and returns false' do
expect(Gitlab::AppLogger)
.to receive(:info)
.with(hash_including(:pid, :worker_id, message: "Diagnostic reports", perf_report_status: "upload requested",
class: 'Gitlab::Memory::ReportsUploader', perf_report_path: path))
before do
allow(Fog::Storage::Google)
.to receive(:new)
.with(google_project: gcs_project, google_json_key_location: gcs_key)
.and_return(fog)
end
expect(uploader.upload(path)).to be false
it 'calls fog, logs upload requested and success with duration' do
expect(logger)
.to receive(:info)
.with(hash_including(:pid, message: "Diagnostic reports", perf_report_status: "upload requested",
class: 'Gitlab::Memory::ReportsUploader', perf_report_path: report.path))
.ordered
expect(fog).to receive(:put_object).with(gcs_bucket, File.basename(report), instance_of(File))
expect(logger)
.to receive(:info)
.with(hash_including(:pid, :duration_s,
message: "Diagnostic reports", perf_report_status: "upload success",
class: 'Gitlab::Memory::ReportsUploader', perf_report_path: report.path))
.ordered
uploader.upload(report.path)
end
context 'when Google API responds with an error' do
let(:invalid_bucket) { 'WRONG BUCKET' }
let(:uploader) do
described_class.new(gcs_key: gcs_key, gcs_project: gcs_project, gcs_bucket: invalid_bucket, logger: logger)
end
it 'logs error raised by Fog and do not re-raise' do
expect(logger)
.to receive(:info)
.with(hash_including(:pid, message: "Diagnostic reports", perf_report_status: "upload requested",
class: 'Gitlab::Memory::ReportsUploader', perf_report_path: report.path))
expect(fog).to receive(:put_object).with(invalid_bucket, File.basename(report), instance_of(File))
.and_raise(Google::Apis::ClientError.new("invalid: Invalid bucket name: #{invalid_bucket}"))
expect(logger)
.to receive(:error)
.with(hash_including(:pid,
message: "Diagnostic reports", class: 'Gitlab::Memory::ReportsUploader',
perf_report_status: 'error', error: "invalid: Invalid bucket name: #{invalid_bucket}"))
expect { uploader.upload(report.path) }.not_to raise_error
end
end
end
end

View File

@ -3,158 +3,106 @@
require 'spec_helper'
RSpec.describe Gitlab::Memory::UploadAndCleanupReports, :aggregate_failures do
let(:uploader) { instance_double(Gitlab::Memory::ReportsUploader) }
let(:logger) { instance_double(Gitlab::Memory::DiagnosticReportsLogger) }
describe '#initalize' do
context 'when settings are passed through the environment' do
let(:reports_path) { '/path/to/reports' }
context 'when sleep_time_seconds is passed through the environment' do
before do
stub_env('GITLAB_DIAGNOSTIC_REPORTS_UPLOADER_SLEEP_S', '600')
stub_env('GITLAB_DIAGNOSTIC_REPORTS_PATH', '/path/to/reports')
end
it 'initializes with these settings' do
upload_and_cleanup = described_class.new
upload_and_cleanup = described_class.new(uploader: uploader, reports_path: reports_path, logger: logger)
expect(upload_and_cleanup.sleep_time_seconds).to eq(600)
expect(upload_and_cleanup.reports_path).to eq('/path/to/reports')
expect(upload_and_cleanup.alive).to be true
end
end
context 'when settings are passed through the initializer' do
context 'when sleep_time_seconds is passed through the initializer' do
it 'initializes with these settings' do
upload_and_cleanup = described_class.new(sleep_time_seconds: 600, reports_path: '/path/to/reports')
upload_and_cleanup = described_class.new(uploader: uploader, reports_path: reports_path, sleep_time_seconds: 60,
logger: logger)
expect(upload_and_cleanup.sleep_time_seconds).to eq(600)
expect(upload_and_cleanup.reports_path).to eq('/path/to/reports')
expect(upload_and_cleanup.alive).to be true
expect(upload_and_cleanup.sleep_time_seconds).to eq(60)
end
end
context 'when `sleep_time_seconds` is not passed' do
it 'initialized with the default' do
upload_and_cleanup = described_class.new(reports_path: '/path/to/reports')
upload_and_cleanup = described_class.new(uploader: uploader, reports_path: reports_path, logger: logger)
expect(upload_and_cleanup.sleep_time_seconds).to eq(described_class::DEFAULT_SLEEP_TIME_SECONDS)
expect(upload_and_cleanup.alive).to be true
end
end
shared_examples 'checks reports_path presence' do
it 'logs error and does not set `alive`' do
expect(Gitlab::AppLogger).to receive(:error)
.with(hash_including(
:pid, :worker_id,
message: "Diagnostic reports",
class: 'Gitlab::Memory::UploadAndCleanupReports',
perf_report_status: 'path is not configured'))
upload_and_cleanup = described_class.new(sleep_time_seconds: 600, reports_path: path)
expect(upload_and_cleanup.alive).to be_falsey
end
end
context 'when `reports_path` is nil' do
let(:path) { nil }
it_behaves_like 'checks reports_path presence'
end
context 'when `reports_path` is blank' do
let(:path) { '' }
it_behaves_like 'checks reports_path presence'
end
end
describe '#call' do
let(:upload_and_cleanup) do
described_class.new(sleep_time_seconds: 600, reports_path: dir).tap do |instance|
allow(instance).to receive(:sleep).and_return(nil)
allow(instance).to receive(:alive).and_return(true, false)
described_class.new(sleep_time_seconds: 600, reports_path: dir, uploader: uploader,
logger: logger).tap do |instance|
allow(instance).to receive(:loop).and_yield
allow(instance).to receive(:sleep)
end
end
let_it_be(:dir) { Dir.mktmpdir }
let(:dir) { Dir.mktmpdir }
after(:all) do
let(:reports_count) { 3 }
let(:reports) do
(1..reports_count).map do |i|
Tempfile.new("report.1.worker_#{i}.#{Time.current.to_i}.json", dir)
end
end
after do
FileUtils.remove_entry(dir)
end
context 'when `gitlab_diagnostic_reports_uploader` ops FF is enabled' do
let_it_be(:reports_count) { 3 }
it 'invokes the uploader and cleans the files' do
expect(logger)
.to receive(:info)
.with(hash_including(:pid,
message: "Diagnostic reports",
class: 'Gitlab::Memory::UploadAndCleanupReports',
perf_report_status: 'started'))
let_it_be(:reports) do
(1..reports_count).map do |i|
Tempfile.new("report.1.worker_#{i}.#{Time.current.to_i}.json", dir)
end
reports.each do |report|
expect(upload_and_cleanup.uploader).to receive(:upload).with(report.path)
end
let_it_be(:unfinished_report) do
unfinished_reports_dir = File.join(dir, 'tmp')
FileUtils.mkdir_p(unfinished_reports_dir)
Tempfile.new("report.10.worker_0.#{Time.current.to_i}.json", unfinished_reports_dir)
end
expect { upload_and_cleanup.call }
.to change { Dir.entries(dir).count { |e| e.match(/report.*/) } }
.from(reports_count).to(0)
end
let_it_be(:failed_to_upload_report) do
Tempfile.new("report.100.worker_0.#{Time.current.to_i}.json", dir)
end
context 'when there is an exception' do
let(:report) { Tempfile.new("report.1.worker_1.#{Time.current.to_i}.json", dir) }
it 'invokes the uploader and cleans only successfully uploaded files' do
expect(Gitlab::AppLogger)
it 'logs it and does not crash the loop' do
expect(logger)
.to receive(:info)
.with(hash_including(:pid, :worker_id,
.with(hash_including(:pid,
message: "Diagnostic reports",
class: 'Gitlab::Memory::UploadAndCleanupReports',
perf_report_status: 'started'))
.ordered
reports.each do |report|
expect(upload_and_cleanup.uploader).to receive(:upload).with(report.path).and_return(true)
end
expect(upload_and_cleanup.uploader)
.to receive(:upload)
.with(report.path)
.and_raise(StandardError, 'Error Message')
expect(upload_and_cleanup.uploader).not_to receive(:upload).with(unfinished_report.path)
expect(logger)
.to receive(:error)
.with(hash_including(:pid, message: "Diagnostic reports", class: 'Gitlab::Memory::UploadAndCleanupReports',
perf_report_status: 'error', error: 'Error Message'))
.ordered
expect(upload_and_cleanup.uploader).to receive(:upload).with(failed_to_upload_report.path).and_return(false)
expect { upload_and_cleanup.call }
.to change { Dir.entries(dir).count { |e| e.match(/report.*/) } }
.from(reports_count + 1).to(1)
end
context 'when there is an exception' do
it 'logs it and does not crash the loop' do
expect(upload_and_cleanup.uploader)
.to receive(:upload)
.at_least(:once)
.and_raise(StandardError, 'Error Message')
expect(Gitlab::ErrorTracking)
.to receive(:log_exception)
.with(an_instance_of(StandardError),
hash_including(:pid, :worker_id, message: "Diagnostic reports",
class: 'Gitlab::Memory::UploadAndCleanupReports'))
.at_least(:once)
expect { upload_and_cleanup.call }.not_to raise_error
end
end
end
context 'when `gitlab_diagnostic_reports_uploader` ops FF is disabled' do
let(:dir) { Dir.mktmpdir }
before do
stub_feature_flags(gitlab_diagnostic_reports_uploader: false)
Tempfile.new("report.1.worker_1.#{Time.current.to_i}.json", dir)
end
after do
FileUtils.remove_entry(dir)
end
it 'does not upload and remove any files' do
expect(upload_and_cleanup.uploader).not_to receive(:upload)
expect { upload_and_cleanup.call }.not_to change { Dir.entries(dir).count }
expect { upload_and_cleanup.call }.not_to raise_error
end
end
end

View File

@ -47,6 +47,10 @@ RSpec.describe Integrations::Datadog do
Gitlab::DataBuilder::ArchiveTrace.build(build)
end
it_behaves_like Integrations::ResetSecretFields do
let(:integration) { instance }
end
it_behaves_like Integrations::HasWebHook do
let(:integration) { instance }
let(:hook_url) { "#{described_class::URL_TEMPLATE % { datadog_domain: dd_site }}?dd-api-key={api_key}&env=#{dd_env}&service=#{dd_service}" }