Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-07-30 06:09:55 +00:00
parent 2dfc108817
commit 0629b10324
33 changed files with 584 additions and 154 deletions

View File

@ -1 +1 @@
f3778a1784e50c9640086fb3ef9cbf7ba4d80513 8e731456707441f9e22bfb3b668885f0f983c449

View File

@ -1 +1 @@
2.5.0 2.6.0

View File

@ -0,0 +1,33 @@
<script>
import { GlButton, GlTooltipDirective } from '@gitlab/ui';
import { s__ } from '~/locale';
import routes from '../router/constants';
export default {
components: {
GlButton,
},
directives: {
GlTooltip: GlTooltipDirective,
},
i18n: {
backToDashboard: s__('Metrics|Back to dashboard'),
},
routes,
};
</script>
<template>
<div class="gl-display-flex gl-align-items-baseline">
<gl-button
v-gl-tooltip
icon="go-back"
:to="{ name: $options.routes.DASHBOARD_PAGE, params: { dashboard: $route.params.dashboard } }"
:aria-label="$options.i18n.backToDashboard"
:title="$options.i18n.backToDashboard"
class="gl-mr-5"
/>
<h1 class="gl-mt-5 gl-font-size-h1">{{ s__('Metrics|Add panel') }}</h1>
<!-- TODO: Add components. See https://gitlab.com/groups/gitlab-org/-/epics/2882 -->
</div>
</template>

View File

@ -1,4 +1,7 @@
export const BASE_DASHBOARD_PAGE = 'dashboard'; export const DASHBOARD_PAGE = 'dashboard';
export const CUSTOM_DASHBOARD_PAGE = 'custom_dashboard'; export const PANEL_NEW_PAGE = 'panel_new';
export default {}; export default {
DASHBOARD_PAGE,
PANEL_NEW_PAGE,
};

View File

@ -1,6 +1,7 @@
import DashboardPage from '../pages/dashboard_page.vue'; import DashboardPage from '../pages/dashboard_page.vue';
import PanelNewPage from '../pages/panel_new_page.vue';
import { BASE_DASHBOARD_PAGE, CUSTOM_DASHBOARD_PAGE } from './constants'; import { DASHBOARD_PAGE, PANEL_NEW_PAGE } from './constants';
/** /**
* Because the cluster health page uses the dashboard * Because the cluster health page uses the dashboard
@ -11,13 +12,13 @@ import { BASE_DASHBOARD_PAGE, CUSTOM_DASHBOARD_PAGE } from './constants';
*/ */
export default [ export default [
{ {
name: BASE_DASHBOARD_PAGE, name: PANEL_NEW_PAGE,
path: '/', path: '/:dashboard(.*)?/panel/new',
component: DashboardPage, component: PanelNewPage,
}, },
{ {
name: CUSTOM_DASHBOARD_PAGE, name: DASHBOARD_PAGE,
path: '/:dashboard(.*)', path: '/:dashboard(.*)?',
component: DashboardPage, component: DashboardPage,
}, },
]; ];

View File

@ -5,7 +5,7 @@ import SidebarTimeTracking from './components/time_tracking/sidebar_time_trackin
import SidebarAssignees from './components/assignees/sidebar_assignees.vue'; import SidebarAssignees from './components/assignees/sidebar_assignees.vue';
import ConfidentialIssueSidebar from './components/confidential/confidential_issue_sidebar.vue'; import ConfidentialIssueSidebar from './components/confidential/confidential_issue_sidebar.vue';
import SidebarMoveIssue from './lib/sidebar_move_issue'; import SidebarMoveIssue from './lib/sidebar_move_issue';
import LockIssueSidebar from './components/lock/lock_issue_sidebar.vue'; import IssuableLockForm from './components/lock/issuable_lock_form.vue';
import sidebarParticipants from './components/participants/sidebar_participants.vue'; import sidebarParticipants from './components/participants/sidebar_participants.vue';
import sidebarSubscriptions from './components/subscriptions/sidebar_subscriptions.vue'; import sidebarSubscriptions from './components/subscriptions/sidebar_subscriptions.vue';
import Translate from '../vue_shared/translate'; import Translate from '../vue_shared/translate';
@ -95,7 +95,7 @@ function mountLockComponent() {
fullPath, fullPath,
}, },
render: createElement => render: createElement =>
createElement(LockIssueSidebar, { createElement(IssuableLockForm, {
props: { props: {
isEditable: initialData.is_editable, isEditable: initialData.is_editable,
}, },

View File

@ -1,7 +1,6 @@
<script> <script>
import { GlDeprecatedButton } from '@gitlab/ui'; import { GlButton } from '@gitlab/ui';
import { __ } from '~/locale'; import { __ } from '~/locale';
import Icon from '~/vue_shared/components/icon.vue';
/** /**
* Port of detail_behavior expand button. * Port of detail_behavior expand button.
@ -16,8 +15,7 @@ import Icon from '~/vue_shared/components/icon.vue';
export default { export default {
name: 'ExpandButton', name: 'ExpandButton',
components: { components: {
GlDeprecatedButton, GlButton,
Icon,
}, },
data() { data() {
return { return {
@ -41,25 +39,23 @@ export default {
</script> </script>
<template> <template>
<span> <span>
<gl-deprecated-button <gl-button
v-show="isCollapsed" v-show="isCollapsed"
:aria-label="ariaLabel" :aria-label="ariaLabel"
type="button" type="button"
class="js-text-expander-prepend text-expander btn-blank" class="js-text-expander-prepend text-expander btn-blank"
icon="ellipsis_h"
@click="onClick" @click="onClick"
> />
<icon :size="12" name="ellipsis_h" />
</gl-deprecated-button>
<span v-if="isCollapsed"> <slot name="short"></slot> </span> <span v-if="isCollapsed"> <slot name="short"></slot> </span>
<span v-if="!isCollapsed"> <slot name="expanded"></slot> </span> <span v-if="!isCollapsed"> <slot name="expanded"></slot> </span>
<gl-deprecated-button <gl-button
v-show="!isCollapsed" v-show="!isCollapsed"
:aria-label="ariaLabel" :aria-label="ariaLabel"
type="button" type="button"
class="js-text-expander-append text-expander btn-blank" class="js-text-expander-append text-expander btn-blank"
icon="ellipsis_h"
@click="onClick" @click="onClick"
> />
<icon :size="12" name="ellipsis_h" />
</gl-deprecated-button>
</span> </span>
</template> </template>

View File

@ -14,6 +14,10 @@ module Projects
end end
def show def show
if params[:page].present? && !Feature.enabled?(:metrics_dashboard_new_panel_page, project)
return render_404
end
if environment if environment
render 'projects/environments/metrics' render 'projects/environments/metrics'
else else

View File

@ -34,6 +34,8 @@ module AlertManagement
else else
create_alert_management_alert create_alert_management_alert
end end
process_incident_alert
end end
def reset_alert_management_alert_status def reset_alert_management_alert_status
@ -47,16 +49,17 @@ module AlertManagement
end end
def create_alert_management_alert def create_alert_management_alert
am_alert = AlertManagement::Alert.new(am_alert_params.merge(ended_at: nil)) new_alert = AlertManagement::Alert.new(am_alert_params.merge(ended_at: nil))
if am_alert.save if new_alert.save
am_alert.execute_services new_alert.execute_services
@am_alert = new_alert
return return
end end
logger.warn( logger.warn(
message: 'Unable to create AlertManagement::Alert', message: 'Unable to create AlertManagement::Alert',
project_id: project.id, project_id: project.id,
alert_errors: am_alert.errors.messages alert_errors: new_alert.errors.messages
) )
end end
@ -89,12 +92,21 @@ module AlertManagement
SystemNoteService.auto_resolve_prometheus_alert(issue, project, User.alert_bot) if issue.reset.closed? SystemNoteService.auto_resolve_prometheus_alert(issue, project, User.alert_bot) if issue.reset.closed?
end end
def process_incident_alert
return unless am_alert
return if am_alert.issue
IncidentManagement::ProcessAlertWorker.perform_async(nil, nil, am_alert.id)
end
def logger def logger
@logger ||= Gitlab::AppLogger @logger ||= Gitlab::AppLogger
end end
def am_alert def am_alert
@am_alert ||= AlertManagement::Alert.not_resolved.for_fingerprint(project, gitlab_fingerprint).first strong_memoize(:am_alert) do
AlertManagement::Alert.not_resolved.for_fingerprint(project, gitlab_fingerprint).first
end
end end
def bad_request def bad_request

View File

@ -30,7 +30,11 @@ module IncidentManagement
end end
def parsed_payload(alert) def parsed_payload(alert)
Gitlab::Alerting::NotificationPayloadParser.call(alert.payload.to_h, alert.project) if alert.prometheus?
alert.payload
else
Gitlab::Alerting::NotificationPayloadParser.call(alert.payload.to_h, alert.project)
end
end end
def create_issue_for(alert) def create_issue_for(alert)

View File

@ -0,0 +1,5 @@
---
title: Fix automatic issue creation via Prometheus alerts
merge_request: 37884
author:
type: fixed

View File

@ -25,7 +25,7 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
# Use this scope for all new project routes. # Use this scope for all new project routes.
scope '-' do scope '-' do
get 'archive/*id', constraints: { format: Gitlab::PathRegex.archive_formats_regex, id: /.+?/ }, to: 'repositories#archive', as: 'archive' get 'archive/*id', constraints: { format: Gitlab::PathRegex.archive_formats_regex, id: /.+?/ }, to: 'repositories#archive', as: 'archive'
get 'metrics(/:dashboard_path)', constraints: { dashboard_path: /.+\.yml/ }, get 'metrics(/:dashboard_path)(/:page)', constraints: { dashboard_path: /.+\.yml/, page: 'panel/new' },
to: 'metrics_dashboard#show', as: :metrics_dashboard, format: false to: 'metrics_dashboard#show', as: :metrics_dashboard, format: false
resources :artifacts, only: [:index, :destroy] resources :artifacts, only: [:index, :destroy]

View File

@ -344,11 +344,11 @@ have [duplicate pipelines](#differences-between-rules-and-onlyexcept).
Useful workflow rules clauses: Useful workflow rules clauses:
| Clause | Details | | Clause | Details |
|---------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------| |----------------------------------------------------------------------------|---------------------------------------------------------|
| `if: '$CI_PIPELINE_SOURCE == "merge_request_event"'` | Allow or block merge request pipelines. | | `if: '$CI_PIPELINE_SOURCE == "merge_request_event"'` | Allow or block merge request pipelines. |
| `if: '$CI_PIPELINE_SOURCE == "push"'` | Allow or block both branch pipelines and tag pipelines. | | `if: '$CI_PIPELINE_SOURCE == "push"'` | Allow or block both branch pipelines and tag pipelines. |
| `if: $CI_COMMIT_BEFORE_SHA == '0000000000000000000000000000000000000000'` | Allow or block pipeline creation when new branches are created or pushed with no commits. | | `if: '$CI_COMMIT_BEFORE_SHA == '0000000000000000000000000000000000000000'` | Allow or block pipeline creation when new branches are created or pushed with no commits. This will also skip tag and scheduled pipelines. See [common `rules:if` clauses](#common-if-clauses-for-rules) for examples on how to define these rules more strictly. |
#### `workflow:rules` templates #### `workflow:rules` templates
@ -1297,8 +1297,10 @@ Some details regarding the logic that determines the `when` for the job:
- You can define `when` once per rule, or once at the job-level, which applies to - You can define `when` once per rule, or once at the job-level, which applies to
all rules. You can't mix `when` at the job-level with `when` in rules. all rules. You can't mix `when` at the job-level with `when` in rules.
##### Common `if` clauses for `rules`
For behavior similar to the [`only`/`except` keywords](#onlyexcept-basic), you can For behavior similar to the [`only`/`except` keywords](#onlyexcept-basic), you can
check the value of the `$CI_PIPELINE_SOURCE` variable. check the value of the `$CI_PIPELINE_SOURCE` variable:
| Value | Description | | Value | Description |
|-------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| |-------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
@ -1308,7 +1310,7 @@ check the value of the `$CI_PIPELINE_SOURCE` variable.
| `schedule` | For [scheduled pipelines](../pipelines/schedules.md). | | `schedule` | For [scheduled pipelines](../pipelines/schedules.md). |
| `api` | For pipelines triggered by the [pipelines API](../../api/pipelines.md#create-a-new-pipeline). | | `api` | For pipelines triggered by the [pipelines API](../../api/pipelines.md#create-a-new-pipeline). |
| `external` | When using CI services other than GitLab. | | `external` | When using CI services other than GitLab. |
| `pipeline` | For multi-project pipelines created by [using the API with `CI_JOB_TOKEN`](../triggers/README.md#when-used-with-multi-project-pipelines). | | `pipeline` | For multi-project pipelines created by [using the API with `CI_JOB_TOKEN`](../triggers/README.md#when-used-with-multi-project-pipelines). |
| `chat` | For pipelines created by using a [GitLab ChatOps](../chatops/README.md) command. | | `chat` | For pipelines created by using a [GitLab ChatOps](../chatops/README.md) command. |
| `webide` | For pipelines created by using the [WebIDE](../../user/project/web_ide/index.md). | | `webide` | For pipelines created by using the [WebIDE](../../user/project/web_ide/index.md). |
| `merge_request_event` | For pipelines created when a merge request is created or updated. Required to enable [merge request pipelines](../merge_request_pipelines/index.md), [merged results pipelines](../merge_request_pipelines/pipelines_for_merged_results/index.md), and [merge trains](../merge_request_pipelines/pipelines_for_merged_results/merge_trains/index.md). | | `merge_request_event` | For pipelines created when a merge request is created or updated. Required to enable [merge request pipelines](../merge_request_pipelines/index.md), [merged results pipelines](../merge_request_pipelines/pipelines_for_merged_results/index.md), and [merge trains](../merge_request_pipelines/pipelines_for_merged_results/merge_trains/index.md). |
@ -1358,6 +1360,29 @@ Other commonly used variables for `if` clauses:
- `if: '$CUSTOM_VARIABLE == "value1"'`: If the custom variable `CUSTOM_VARIABLE` is - `if: '$CUSTOM_VARIABLE == "value1"'`: If the custom variable `CUSTOM_VARIABLE` is
exactly `value1`. exactly `value1`.
To avoid running pipelines when a branch is created without any changes,
check the value of `$CI_COMMIT_BEFORE_SHA`. It has a value of
`0000000000000000000000000000000000000000`:
- In branches with no commits.
- Tag pipelines and scheduled pipelines. You should define rules very
narrowly if you don't want to skip these.
To skip pipelines on all empty branches, but also tags and schedules:
```yaml
rules:
- if: $CI_COMMIT_BEFORE_SHA == '0000000000000000000000000000000000000000'
when: never
```
To skip branch pipelines when the branch is empty:
```yaml
rules:
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BEFORE_SHA != '0000000000000000000000000000000000000000'
```
##### `rules:changes` ##### `rules:changes`
To determine if jobs should be added to a pipeline, `rules: changes` clauses check To determine if jobs should be added to a pipeline, `rules: changes` clauses check

View File

@ -56,6 +56,7 @@ Complementary reads:
- [Danger bot](dangerbot.md) - [Danger bot](dangerbot.md)
- [Generate a changelog entry with `bin/changelog`](changelog.md) - [Generate a changelog entry with `bin/changelog`](changelog.md)
- [Requesting access to Chatops on GitLab.com](chatops_on_gitlabcom.md#requesting-access) (for GitLab team members) - [Requesting access to Chatops on GitLab.com](chatops_on_gitlabcom.md#requesting-access) (for GitLab team members)
- [Patch release process for developers](https://gitlab.com/gitlab-org/release/docs/blob/master/general/patch/process.md#process-for-developers)
## UX and Frontend guides ## UX and Frontend guides

View File

@ -429,6 +429,52 @@ module Types
end end
``` ```
## JSON
When data to be returned by GraphQL is stored as
[JSON](migration_style_guide.md#storing-json-in-database), we should continue to use
GraphQL types whenever possible. Avoid using the `GraphQL::Types::JSON` type unless
the JSON data returned is _truly_ unstructured.
If the structure of the JSON data varies, but will be one of a set of known possible
structures, use a
[union](https://graphql-ruby.org/type_definitions/unions.html).
An example of the use of a union for this purpose is
[!30129](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/30129).
Field names can be mapped to hash data keys using the `hash_key:` keyword if needed.
For example, given the following simple JSON data:
```json
{
"title": "My chart",
"data": [
{ "x": 0, "y": 1 },
{ "x": 1, "y": 1 },
{ "x": 2, "y": 2 }
]
}
```
We can use GraphQL types like this:
```ruby
module Types
class ChartType < BaseObject
field :title, GraphQL::STRING_TYPE, null: true, description: 'Title of the chart'
field :data, [Types::ChartDatumType], null: true, description: 'Data of the chart'
end
end
module Types
class ChartDatumType < BaseObject
field :x, GraphQL::INT_TYPE, null: true, description: 'X-axis value of the chart datum'
field :y, GraphQL::INT_TYPE, null: true, description: 'Y-axis value of the chart datum'
end
end
```
## Descriptions ## Descriptions
All fields and arguments All fields and arguments

View File

@ -205,7 +205,7 @@ If you were running GitLab and GitLab CI on the same server you can skip this
step. step.
Copy your CI data archive to your GitLab server. There are many ways to do Copy your CI data archive to your GitLab server. There are many ways to do
this, below we use SSH agent forwarding and 'scp', which will be easy and fast this, below we use SSH agent forwarding and `scp`, which will be easy and fast
for most setups. You can also copy the data archive first from the CI server to for most setups. You can also copy the data archive first from the CI server to
your laptop and then from your laptop to the GitLab server. your laptop and then from your laptop to the GitLab server.

View File

@ -604,25 +604,25 @@ msgstr[1] ""
msgid "%{remaining_approvals} left" msgid "%{remaining_approvals} left"
msgstr "" msgstr ""
msgid "%{reportType} %{status} detected %{criticalStart}%{critical} new critical%{criticalEnd} and %{highStart}%{high} new high%{highEnd} severity vulnerabilities out of %{total}." msgid "%{reportType} %{status} detected %{criticalStart}%{critical} critical%{criticalEnd} and %{highStart}%{high} high%{highEnd} severity vulnerabilities out of %{total}."
msgstr "" msgstr ""
msgid "%{reportType} %{status} detected %{criticalStart}%{critical} new critical%{criticalEnd} and %{highStart}%{high} new high%{highEnd} severity vulnerabilities." msgid "%{reportType} %{status} detected %{criticalStart}%{critical} critical%{criticalEnd} and %{highStart}%{high} high%{highEnd} severity vulnerabilities."
msgstr "" msgstr ""
msgid "%{reportType} %{status} detected %{criticalStart}%{critical} new critical%{criticalEnd} severity vulnerabilities out of %{total}." msgid "%{reportType} %{status} detected %{criticalStart}%{critical} critical%{criticalEnd} severity vulnerabilities out of %{total}."
msgstr "" msgstr ""
msgid "%{reportType} %{status} detected %{criticalStart}%{critical} new critical%{criticalEnd} severity vulnerability." msgid "%{reportType} %{status} detected %{criticalStart}%{critical} critical%{criticalEnd} severity vulnerability."
msgid_plural "%{reportType} %{status} detected %{criticalStart}%{critical} new critical%{criticalEnd} severity vulnerabilities." msgid_plural "%{reportType} %{status} detected %{criticalStart}%{critical} critical%{criticalEnd} severity vulnerabilities."
msgstr[0] "" msgstr[0] ""
msgstr[1] "" msgstr[1] ""
msgid "%{reportType} %{status} detected %{highStart}%{high} new high%{highEnd} severity vulnerabilities out of %{total}." msgid "%{reportType} %{status} detected %{highStart}%{high} high%{highEnd} severity vulnerabilities out of %{total}."
msgstr "" msgstr ""
msgid "%{reportType} %{status} detected %{highStart}%{high} new high%{highEnd} severity vulnerability." msgid "%{reportType} %{status} detected %{highStart}%{high} high%{highEnd} severity vulnerability."
msgid_plural "%{reportType} %{status} detected %{highStart}%{high} new high%{highEnd} severity vulnerabilities." msgid_plural "%{reportType} %{status} detected %{highStart}%{high} high%{highEnd} severity vulnerabilities."
msgstr[0] "" msgstr[0] ""
msgstr[1] "" msgstr[1] ""
@ -631,7 +631,7 @@ msgid_plural "%{reportType} %{status} detected %{other} vulnerabilities."
msgstr[0] "" msgstr[0] ""
msgstr[1] "" msgstr[1] ""
msgid "%{reportType} %{status} detected no new vulnerabilities." msgid "%{reportType} %{status} detected no vulnerabilities."
msgstr "" msgstr ""
msgid "%{retryButtonStart}Try again%{retryButtonEnd} or %{newFileButtonStart}attach a new file%{newFileButtonEnd}" msgid "%{retryButtonStart}Try again%{retryButtonEnd} or %{newFileButtonStart}attach a new file%{newFileButtonEnd}"
@ -14984,9 +14984,15 @@ msgstr ""
msgid "Metrics|Add metric" msgid "Metrics|Add metric"
msgstr "" msgstr ""
msgid "Metrics|Add panel"
msgstr ""
msgid "Metrics|Avg" msgid "Metrics|Avg"
msgstr "" msgstr ""
msgid "Metrics|Back to dashboard"
msgstr ""
msgid "Metrics|Cancel" msgid "Metrics|Cancel"
msgstr "" msgstr ""

View File

@ -6,6 +6,8 @@ module QA
class Client class Client
attr_reader :address, :user attr_reader :address, :user
AuthorizationError = Class.new(RuntimeError)
def initialize(address = :gitlab, personal_access_token: nil, is_new_session: true, user: nil, ip_limits: false) def initialize(address = :gitlab, personal_access_token: nil, is_new_session: true, user: nil, ip_limits: false)
@address = address @address = address
@personal_access_token = personal_access_token @personal_access_token = personal_access_token

View File

@ -10,16 +10,24 @@ module QA
RepositoryStorageMovesError = Class.new(RuntimeError) RepositoryStorageMovesError = Class.new(RuntimeError)
def has_status?(project, status, destination_storage = Env.additional_repository_storage) def has_status?(project, status, destination_storage = Env.additional_repository_storage)
all.any? do |move| find_any do |move|
move[:project][:path_with_namespace] == project.path_with_namespace && next unless move[:project][:path_with_namespace] == project.path_with_namespace
QA::Runtime::Logger.debug("Move data: #{move}")
move[:state] == status && move[:state] == status &&
move[:destination_storage_name] == destination_storage move[:destination_storage_name] == destination_storage
end end
end end
def all def find_any
Logger.debug('Getting repository storage moves') Logger.debug('Getting repository storage moves')
parse_body(get(Request.new(api_client, '/project_repository_storage_moves').url))
Support::Waiter.wait_until do
with_paginated_response_body(Request.new(api_client, '/project_repository_storage_moves', per_page: '100').url) do |page|
break true if page.any? { |item| yield item }
end
end
end end
private private

View File

@ -61,6 +61,10 @@ module QA
ENV['QA_ADDITIONAL_REPOSITORY_STORAGE'] ENV['QA_ADDITIONAL_REPOSITORY_STORAGE']
end end
def non_cluster_repository_storage
ENV['QA_GITALY_NON_CLUSTER_STORAGE'] || 'gitaly'
end
def praefect_repository_storage def praefect_repository_storage
ENV['QA_PRAEFECT_REPOSITORY_STORAGE'] ENV['QA_PRAEFECT_REPOSITORY_STORAGE']
end end

View File

@ -5,6 +5,8 @@ module QA
class PraefectManager class PraefectManager
include Service::Shellout include Service::Shellout
attr_accessor :gitlab
def initialize def initialize
@gitlab = 'gitlab-gitaly-ha' @gitlab = 'gitlab-gitaly-ha'
@praefect = 'praefect' @praefect = 'praefect'
@ -100,6 +102,14 @@ module QA
enable_writes enable_writes
end end
def verify_storage_move(source_storage, destination_storage)
return if QA::Runtime::Env.dot_com?
repo_path = verify_storage_move_from_gitaly(source_storage[:name])
destination_storage[:type] == :praefect ? verify_storage_move_to_praefect(repo_path, destination_storage[:name]) : verify_storage_move_to_gitaly(repo_path, destination_storage[:name])
end
def wait_for_praefect def wait_for_praefect
wait_until_shell_command_matches( wait_until_shell_command_matches(
"docker exec #{@praefect} bash -c 'cat /var/log/gitlab/praefect/current'", "docker exec #{@praefect} bash -c 'cat /var/log/gitlab/praefect/current'",
@ -163,16 +173,48 @@ module QA
private private
def wait_until_shell_command(cmd) def verify_storage_move_from_gitaly(storage)
Support::Waiter.wait_until do wait_until_shell_command("docker exec #{@gitlab} bash -c 'tail -n 50 /var/log/gitlab/gitaly/current'") do |line|
log = JSON.parse(line)
break log['grpc.request.repoPath'] if log['grpc.method'] == 'RenameRepository' && log['grpc.request.repoStorage'] == storage && !log['grpc.request.repoPath'].include?('wiki')
rescue JSON::ParserError
# Ignore lines that can't be parsed as JSON
end
end
def verify_storage_move_to_praefect(repo_path, virtual_storage)
wait_until_shell_command("docker exec #{@gitlab} bash -c 'tail -n 50 /var/log/gitlab/praefect/current'") do |line|
log = JSON.parse(line)
log['grpc.method'] == 'ReplicateRepository' && log['virtual_storage'] == virtual_storage && log['relative_path'] == repo_path
rescue JSON::ParserError
# Ignore lines that can't be parsed as JSON
end
end
def verify_storage_move_to_gitaly(repo_path, storage)
wait_until_shell_command("docker exec #{@gitlab} bash -c 'tail -n 50 /var/log/gitlab/gitaly/current'") do |line|
log = JSON.parse(line)
log['grpc.method'] == 'ReplicateRepository' && log['grpc.request.repoStorage'] == storage && log['grpc.request.repoPath'] == repo_path
rescue JSON::ParserError
# Ignore lines that can't be parsed as JSON
end
end
def wait_until_shell_command(cmd, **kwargs)
sleep_interval = kwargs.delete(:sleep_interval) || 1
Support::Waiter.wait_until(sleep_interval: sleep_interval, **kwargs) do
shell cmd do |line| shell cmd do |line|
break true if yield line break true if yield line
end end
end end
end end
def wait_until_shell_command_matches(cmd, regex) def wait_until_shell_command_matches(cmd, regex, **kwargs)
wait_until_shell_command(cmd) do |line| wait_until_shell_command(cmd, kwargs) do |line|
QA::Runtime::Logger.info(line.chomp) QA::Runtime::Logger.info(line.chomp)
line =~ regex line =~ regex

View File

@ -3,13 +3,14 @@
module QA module QA
RSpec.describe 'Create' do RSpec.describe 'Create' do
describe 'Changing Gitaly repository storage', :requires_admin do describe 'Changing Gitaly repository storage', :requires_admin do
praefect_manager = Service::PraefectManager.new
praefect_manager.gitlab = 'gitlab'
shared_examples 'repository storage move' do shared_examples 'repository storage move' do
it 'confirms a `finished` status after moving project repository storage' do it 'confirms a `finished` status after moving project repository storage' do
expect(project).to have_file('README.md') expect(project).to have_file('README.md')
expect { project.change_repository_storage(destination_storage[:name]) }.not_to raise_error
project.change_repository_storage(destination_storage) expect { praefect_manager.verify_storage_move(source_storage, destination_storage) }.not_to raise_error
expect(Runtime::API::RepositoryStorageMoves).to have_status(project, 'finished', destination_storage)
Resource::Repository::ProjectPush.fabricate! do |push| Resource::Repository::ProjectPush.fabricate! do |push|
push.project = project push.project = project
@ -25,28 +26,35 @@ module QA
end end
context 'when moving from one Gitaly storage to another', :orchestrated, :repository_storage do context 'when moving from one Gitaly storage to another', :orchestrated, :repository_storage do
let(:source_storage) { { type: :gitaly, name: 'default' } }
let(:destination_storage) { { type: :gitaly, name: QA::Runtime::Env.additional_repository_storage } }
let(:project) do let(:project) do
Resource::Project.fabricate_via_api! do |project| Resource::Project.fabricate_via_api! do |project|
project.name = 'repo-storage-move-status' project.name = 'repo-storage-move-status'
project.initialize_with_readme = true project.initialize_with_readme = true
project.api_client = Runtime::API::Client.as_admin
end end
end end
let(:destination_storage) { QA::Runtime::Env.additional_repository_storage }
it_behaves_like 'repository storage move' it_behaves_like 'repository storage move'
end end
# Note: This test doesn't have the :orchestrated tag because it runs in the Test::Integration::Praefect # Note: This test doesn't have the :orchestrated tag because it runs in the Test::Integration::Praefect
# scenario with other tests that aren't considered orchestrated. # scenario with other tests that aren't considered orchestrated.
context 'when moving from Gitaly to Gitaly Cluster', :requires_praefect, quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/227127', type: :investigating } do # It also runs on staging using nfs-file07 as non-cluster storage and nfs-file22 as cluster/praefect storage
context 'when moving from Gitaly to Gitaly Cluster', :requires_praefect do
let(:source_storage) { { type: :gitaly, name: QA::Runtime::Env.non_cluster_repository_storage } }
let(:destination_storage) { { type: :praefect, name: QA::Runtime::Env.praefect_repository_storage } }
let(:project) do let(:project) do
Resource::Project.fabricate_via_api! do |project| Resource::Project.fabricate_via_api! do |project|
project.name = 'repo-storage-move' project.name = 'repo-storage-move'
project.initialize_with_readme = true project.initialize_with_readme = true
project.repository_storage = 'gitaly' project.repository_storage = source_storage[:name]
project.api_client = Runtime::API::Client.as_admin
end end
end end
let(:destination_storage) { QA::Runtime::Env.praefect_repository_storage }
it_behaves_like 'repository storage move' it_behaves_like 'repository storage move'
end end

View File

@ -77,6 +77,30 @@ module QA
error.response error.response
end end
def with_paginated_response_body(url)
loop do
response = get(url)
QA::Runtime::Logger.debug("Fetching page #{response.headers[:x_page]} of #{response.headers[:x_total_pages]}...")
yield parse_body(response)
next_link = pagination_links(response).find { |link| link[:rel] == 'next' }
break unless next_link
url = next_link[:url]
end
end
def pagination_links(response)
response.headers[:link].split(',').map do |link|
match = link.match(/\<(?<url>.*)\>\; rel=\"(?<rel>\w+)\"/)
break nil unless match
{ url: match[:url], rel: match[:rel] }
end.compact
end
end end
end end
end end

View File

@ -101,6 +101,16 @@ FactoryBot.define do
trait :prometheus do trait :prometheus do
monitoring_tool { Gitlab::AlertManagement::AlertParams::MONITORING_TOOLS[:prometheus] } monitoring_tool { Gitlab::AlertManagement::AlertParams::MONITORING_TOOLS[:prometheus] }
payload do
{
annotations: {
title: 'This is a prometheus error',
summary: 'Summary of the error',
description: 'Description of the error'
},
startsAt: started_at
}.with_indifferent_access
end
end end
trait :all_fields do trait :all_fields do

View File

@ -0,0 +1,59 @@
import { shallowMount } from '@vue/test-utils';
import { GlButton } from '@gitlab/ui';
import { DASHBOARD_PAGE } from '~/monitoring/router/constants';
import PanelNewPage from '~/monitoring/pages/panel_new_page.vue';
const dashboard = 'dashboard.yml';
// Button stub that can accept `to` as router links do
// https://bootstrap-vue.org/docs/components/button#comp-ref-b-button-props
const GlButtonStub = {
extends: GlButton,
props: {
to: [String, Object],
},
};
describe('monitoring/pages/panel_new_page', () => {
let wrapper;
let $route;
const mountComponent = (propsData = {}, routeParams = { dashboard }) => {
$route = {
params: routeParams,
};
wrapper = shallowMount(PanelNewPage, {
propsData,
stubs: {
GlButton: GlButtonStub,
},
mocks: {
$route,
},
});
};
const findBackButton = () => wrapper.find(GlButtonStub);
afterEach(() => {
wrapper.destroy();
});
describe('back to dashboard button', () => {
it('is rendered', () => {
mountComponent();
expect(findBackButton().exists()).toBe(true);
expect(findBackButton().props('icon')).toBe('go-back');
});
it('links back to the dashboard', () => {
const dashboardLocation = {
name: DASHBOARD_PAGE,
params: { dashboard },
};
expect(findBackButton().props('to')).toEqual(dashboardLocation);
});
});
});

View File

@ -1,18 +1,28 @@
import { mount, createLocalVue } from '@vue/test-utils'; import { mount, createLocalVue } from '@vue/test-utils';
import VueRouter from 'vue-router'; import VueRouter from 'vue-router';
import DashboardPage from '~/monitoring/pages/dashboard_page.vue'; import DashboardPage from '~/monitoring/pages/dashboard_page.vue';
import PanelNewPage from '~/monitoring/pages/panel_new_page.vue';
import Dashboard from '~/monitoring/components/dashboard.vue'; import Dashboard from '~/monitoring/components/dashboard.vue';
import { createStore } from '~/monitoring/stores'; import { createStore } from '~/monitoring/stores';
import createRouter from '~/monitoring/router'; import createRouter from '~/monitoring/router';
import { dashboardProps } from './fixture_data'; import { dashboardProps } from './fixture_data';
import { dashboardHeaderProps } from './mock_data'; import { dashboardHeaderProps } from './mock_data';
const LEGACY_BASE_PATH = '/project/my-group/test-project/-/environments/71146/metrics';
const BASE_PATH = '/project/my-group/test-project/-/metrics';
const MockApp = {
data() {
return {
dashboardProps: { ...dashboardProps, ...dashboardHeaderProps },
};
},
template: `<router-view :dashboard-props="dashboardProps"/>`,
};
describe('Monitoring router', () => { describe('Monitoring router', () => {
let router; let router;
let store; let store;
const propsData = { dashboardProps: { ...dashboardProps, ...dashboardHeaderProps } };
const NEW_BASE_PATH = '/project/my-group/test-project/-/metrics';
const OLD_BASE_PATH = '/project/my-group/test-project/-/environments/71146/metrics';
const createWrapper = (basePath, routeArg) => { const createWrapper = (basePath, routeArg) => {
const localVue = createLocalVue(); const localVue = createLocalVue();
@ -23,11 +33,10 @@ describe('Monitoring router', () => {
router.push(routeArg); router.push(routeArg);
} }
return mount(DashboardPage, { return mount(MockApp, {
localVue, localVue,
store, store,
router, router,
propsData,
}); });
}; };
@ -40,26 +49,32 @@ describe('Monitoring router', () => {
window.location.hash = ''; window.location.hash = '';
}); });
describe('support old URL with full dashboard path', () => { describe('support legacy URLs with full dashboard path to visit dashboard page', () => {
it.each` it.each`
route | currentDashboard path | currentDashboard
${'/dashboard.yml'} | ${'dashboard.yml'} ${'/dashboard.yml'} | ${'dashboard.yml'}
${'/folder1/dashboard.yml'} | ${'folder1/dashboard.yml'} ${'/folder1/dashboard.yml'} | ${'folder1/dashboard.yml'}
${'/?dashboard=dashboard.yml'} | ${'dashboard.yml'} ${'/?dashboard=dashboard.yml'} | ${'dashboard.yml'}
`('sets component as $componentName for path "$route"', ({ route, currentDashboard }) => { `('"$path" renders page with dashboard "$currentDashboard"', ({ path, currentDashboard }) => {
const wrapper = createWrapper(OLD_BASE_PATH, route); const wrapper = createWrapper(LEGACY_BASE_PATH, path);
expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/setCurrentDashboard', { expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/setCurrentDashboard', {
currentDashboard, currentDashboard,
}); });
expect(wrapper.find(Dashboard)).toExist(); expect(wrapper.find(DashboardPage).exists()).toBe(true);
expect(
wrapper
.find(DashboardPage)
.find(Dashboard)
.exists(),
).toBe(true);
}); });
}); });
describe('supports new URL with short dashboard path', () => { describe('supports URLs to visit dashboard page', () => {
it.each` it.each`
route | currentDashboard path | currentDashboard
${'/'} | ${null} ${'/'} | ${null}
${'/dashboard.yml'} | ${'dashboard.yml'} ${'/dashboard.yml'} | ${'dashboard.yml'}
${'/folder1/dashboard.yml'} | ${'folder1/dashboard.yml'} ${'/folder1/dashboard.yml'} | ${'folder1/dashboard.yml'}
@ -68,14 +83,35 @@ describe('Monitoring router', () => {
${'/config/prometheus/common_metrics.yml'} | ${'config/prometheus/common_metrics.yml'} ${'/config/prometheus/common_metrics.yml'} | ${'config/prometheus/common_metrics.yml'}
${'/config/prometheus/pod_metrics.yml'} | ${'config/prometheus/pod_metrics.yml'} ${'/config/prometheus/pod_metrics.yml'} | ${'config/prometheus/pod_metrics.yml'}
${'/config%2Fprometheus%2Fpod_metrics.yml'} | ${'config/prometheus/pod_metrics.yml'} ${'/config%2Fprometheus%2Fpod_metrics.yml'} | ${'config/prometheus/pod_metrics.yml'}
`('sets component as $componentName for path "$route"', ({ route, currentDashboard }) => { `('"$path" renders page with dashboard "$currentDashboard"', ({ path, currentDashboard }) => {
const wrapper = createWrapper(NEW_BASE_PATH, route); const wrapper = createWrapper(BASE_PATH, path);
expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/setCurrentDashboard', { expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/setCurrentDashboard', {
currentDashboard, currentDashboard,
}); });
expect(wrapper.find(Dashboard)).toExist(); expect(wrapper.find(DashboardPage).exists()).toBe(true);
expect(
wrapper
.find(DashboardPage)
.find(Dashboard)
.exists(),
).toBe(true);
});
});
describe('supports URLs to visit new panel page', () => {
it.each`
path | currentDashboard
${'/panel/new'} | ${undefined}
${'/dashboard.yml/panel/new'} | ${'dashboard.yml'}
${'/config/prometheus/common_metrics.yml/panel/new'} | ${'config/prometheus/common_metrics.yml'}
${'/config%2Fprometheus%2Fcommon_metrics.yml/panel/new'} | ${'config/prometheus/common_metrics.yml'}
`('"$path" renders page with dashboard "$currentDashboard"', ({ path, currentDashboard }) => {
const wrapper = createWrapper(BASE_PATH, path);
expect(wrapper.vm.$route.params.dashboard).toBe(currentDashboard);
expect(wrapper.find(PanelNewPage).exists()).toBe(true);
}); });
}); });
}); });

View File

@ -1,15 +1,14 @@
import { shallowMount } from '@vue/test-utils'; import { shallowMount } from '@vue/test-utils';
import { mockTracking, triggerEvent } from 'helpers/tracking_helper'; import { mockTracking, triggerEvent } from 'helpers/tracking_helper';
import LockIssueSidebar from '~/sidebar/components/lock/lock_issue_sidebar.vue'; import IssuableLockForm from '~/sidebar/components/lock/issuable_lock_form.vue';
import EditForm from '~/sidebar/components/lock/edit_form.vue'; import EditForm from '~/sidebar/components/lock/edit_form.vue';
import createStore from '~/notes/stores'; import createStore from '~/notes/stores';
import { createStore as createMrStore } from '~/mr_notes/stores'; import { createStore as createMrStore } from '~/mr_notes/stores';
import { ISSUABLE_TYPE_ISSUE, ISSUABLE_TYPE_MR } from './constants'; import { ISSUABLE_TYPE_ISSUE, ISSUABLE_TYPE_MR } from './constants';
describe('LockIssueSidebar', () => { describe('IssuableLockForm', () => {
let wrapper; let wrapper;
let store; let store;
let mediator;
let issuableType; // Either ISSUABLE_TYPE_ISSUE or ISSUABLE_TYPE_MR let issuableType; // Either ISSUABLE_TYPE_ISSUE or ISSUABLE_TYPE_MR
const setIssuableType = pageType => { const setIssuableType = pageType => {
@ -21,15 +20,6 @@ describe('LockIssueSidebar', () => {
const findEditLink = () => wrapper.find('[data-testid="edit-link"]'); const findEditLink = () => wrapper.find('[data-testid="edit-link"]');
const findEditForm = () => wrapper.find(EditForm); const findEditForm = () => wrapper.find(EditForm);
const initMediator = () => {
mediator = {
service: {
update: Promise.resolve(true),
},
store: {},
};
};
const initStore = isLocked => { const initStore = isLocked => {
if (issuableType === ISSUABLE_TYPE_ISSUE) { if (issuableType === ISSUABLE_TYPE_ISSUE) {
store = createStore(); store = createStore();
@ -41,11 +31,10 @@ describe('LockIssueSidebar', () => {
}; };
const createComponent = ({ props = {} }) => { const createComponent = ({ props = {} }) => {
wrapper = shallowMount(LockIssueSidebar, { wrapper = shallowMount(IssuableLockForm, {
store, store,
propsData: { propsData: {
isEditable: true, isEditable: true,
mediator,
...props, ...props,
}, },
}); });
@ -62,7 +51,6 @@ describe('LockIssueSidebar', () => {
`('In $pageType page', ({ pageType }) => { `('In $pageType page', ({ pageType }) => {
beforeEach(() => { beforeEach(() => {
setIssuableType(pageType); setIssuableType(pageType);
initMediator();
}); });
describe.each` describe.each`

View File

@ -4,20 +4,22 @@ exports[`Expand button on click when short text is provided renders button after
<span> <span>
<button <button
aria-label="Click to expand text" aria-label="Click to expand text"
class="btn js-text-expander-prepend text-expander btn-blank btn-secondary btn-md" class="btn js-text-expander-prepend text-expander btn-blank btn-default btn-md btn-icon button-ellipsis-horizontal gl-button"
style="display: none;" style="display: none;"
type="button" type="button"
> >
<!----> <!---->
<svg <svg
aria-hidden="true" class="gl-icon s16"
class="s12 ic-ellipsis_h" data-testid="ellipsis_h-icon"
> >
<use <use
xlink:href="#ellipsis_h" href="#ellipsis_h"
/> />
</svg> </svg>
<!---->
</button> </button>
<!----> <!---->
@ -30,20 +32,22 @@ exports[`Expand button on click when short text is provided renders button after
<button <button
aria-label="Click to expand text" aria-label="Click to expand text"
class="btn js-text-expander-append text-expander btn-blank btn-secondary btn-md" class="btn js-text-expander-append text-expander btn-blank btn-default btn-md btn-icon button-ellipsis-horizontal gl-button"
style="" style=""
type="button" type="button"
> >
<!----> <!---->
<svg <svg
aria-hidden="true" class="gl-icon s16"
class="s12 ic-ellipsis_h" data-testid="ellipsis_h-icon"
> >
<use <use
xlink:href="#ellipsis_h" href="#ellipsis_h"
/> />
</svg> </svg>
<!---->
</button> </button>
</span> </span>
`; `;
@ -52,19 +56,21 @@ exports[`Expand button when short text is provided renders button before text 1`
<span> <span>
<button <button
aria-label="Click to expand text" aria-label="Click to expand text"
class="btn js-text-expander-prepend text-expander btn-blank btn-secondary btn-md" class="btn js-text-expander-prepend text-expander btn-blank btn-default btn-md btn-icon button-ellipsis-horizontal gl-button"
type="button" type="button"
> >
<!----> <!---->
<svg <svg
aria-hidden="true" class="gl-icon s16"
class="s12 ic-ellipsis_h" data-testid="ellipsis_h-icon"
> >
<use <use
xlink:href="#ellipsis_h" href="#ellipsis_h"
/> />
</svg> </svg>
<!---->
</button> </button>
<span> <span>
@ -77,20 +83,22 @@ exports[`Expand button when short text is provided renders button before text 1`
<button <button
aria-label="Click to expand text" aria-label="Click to expand text"
class="btn js-text-expander-append text-expander btn-blank btn-secondary btn-md" class="btn js-text-expander-append text-expander btn-blank btn-default btn-md btn-icon button-ellipsis-horizontal gl-button"
style="display: none;" style="display: none;"
type="button" type="button"
> >
<!----> <!---->
<svg <svg
aria-hidden="true" class="gl-icon s16"
class="s12 ic-ellipsis_h" data-testid="ellipsis_h-icon"
> >
<use <use
xlink:href="#ellipsis_h" href="#ellipsis_h"
/> />
</svg> </svg>
<!---->
</button> </button>
</span> </span>
`; `;

View File

@ -79,6 +79,26 @@ RSpec.describe 'metrics dashboard page' do
end end
end end
describe 'GET :/namespace/:project/-/metrics/:page' do
it 'returns 200 with path param page and feature flag enabled' do
stub_feature_flags(metrics_dashboard_new_panel_page: true)
# send_request(page: 'panel/new') cannot be used because it encodes '/'
get "/#{project.namespace.to_param}/#{project.to_param}/-/metrics/panel/new"
expect(response).to have_gitlab_http_status(:ok)
end
it 'returns 404 with path param page and feature flag disabled' do
stub_feature_flags(metrics_dashboard_new_panel_page: false)
# send_request(page: 'panel/new') cannot be used because it encodes '/'
get "/#{project.namespace.to_param}/#{project.to_param}/-/metrics/panel/new"
expect(response).to have_gitlab_http_status(:not_found)
end
end
def send_request(params = {}) def send_request(params = {})
get namespace_project_metrics_dashboard_path(namespace_id: project.namespace, project_id: project, **params) get namespace_project_metrics_dashboard_path(namespace_id: project.namespace, project_id: project, **params)
end end

View File

@ -823,4 +823,66 @@ RSpec.describe 'project routing' do
project_id: 'gitlabhq', snippet_id: '1', ref: 'master', path: 'lib/version.rb') project_id: 'gitlabhq', snippet_id: '1', ref: 'master', path: 'lib/version.rb')
end end
end end
describe Projects::MetricsDashboardController, 'routing' do
it 'routes to #show with no dashboard_path and no page' do
expect(get: "/gitlab/gitlabhq/-/metrics").to route_to(
"projects/metrics_dashboard#show",
**base_params
)
end
it 'routes to #show with only dashboard_path' do
expect(get: "/gitlab/gitlabhq/-/metrics/dashboard1.yml").to route_to(
"projects/metrics_dashboard#show",
dashboard_path: 'dashboard1.yml',
**base_params
)
end
it 'routes to #show with only page' do
expect(get: "/gitlab/gitlabhq/-/metrics/panel/new").to route_to(
"projects/metrics_dashboard#show",
page: 'panel/new',
**base_params
)
end
it 'routes to #show with dashboard_path and page' do
expect(get: "/gitlab/gitlabhq/-/metrics/config%2Fprometheus%2Fcommon_metrics.yml/panel/new").to route_to(
"projects/metrics_dashboard#show",
dashboard_path: 'config/prometheus/common_metrics.yml',
page: 'panel/new',
**base_params
)
end
it 'routes to 404 with invalid page' do
expect(get: "/gitlab/gitlabhq/-/metrics/invalid_page").to route_to(
'application#route_not_found',
unmatched_route: 'gitlab/gitlabhq/-/metrics/invalid_page'
)
end
it 'routes to 404 with invalid dashboard_path' do
expect(get: "/gitlab/gitlabhq/-/metrics/invalid_dashboard").to route_to(
'application#route_not_found',
unmatched_route: 'gitlab/gitlabhq/-/metrics/invalid_dashboard'
)
end
it 'routes to 404 with invalid dashboard_path and valid page' do
expect(get: "/gitlab/gitlabhq/-/metrics/dashboard1/panel/new").to route_to(
'application#route_not_found',
unmatched_route: 'gitlab/gitlabhq/-/metrics/dashboard1/panel/new'
)
end
it 'routes to 404 with valid dashboard_path and invalid page' do
expect(get: "/gitlab/gitlabhq/-/metrics/dashboard1.yml/invalid_page").to route_to(
'application#route_not_found',
unmatched_route: 'gitlab/gitlabhq/-/metrics/dashboard1.yml/invalid_page'
)
end
end
end end

View File

@ -83,6 +83,15 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
context 'when alert does not exist' do context 'when alert does not exist' do
context 'when alert can be created' do context 'when alert can be created' do
it_behaves_like 'creates an alert management alert' it_behaves_like 'creates an alert management alert'
it 'processes the incident alert' do
expect(IncidentManagement::ProcessAlertWorker)
.to receive(:perform_async)
.with(nil, nil, kind_of(Integer))
.once
expect(subject).to be_success
end
end end
context 'when alert cannot be created' do context 'when alert cannot be created' do
@ -102,6 +111,13 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
execute execute
end end
it 'does not create incident issue' do
expect(IncidentManagement::ProcessAlertWorker)
.not_to receive(:perform_async)
expect(subject).to be_success
end
end end
it { is_expected.to be_success } it { is_expected.to be_success }

View File

@ -16,16 +16,62 @@ RSpec.describe IncidentManagement::ProcessAlertWorker do
subject { described_class.new.perform(nil, nil, alert.id) } subject { described_class.new.perform(nil, nil, alert.id) }
before do before do
allow(Gitlab::AppLogger).to receive(:warn).and_call_original
allow(IncidentManagement::CreateIssueService) allow(IncidentManagement::CreateIssueService)
.to receive(:new).with(alert.project, parsed_payload) .to receive(:new).with(alert.project, parsed_payload)
.and_call_original .and_call_original
end end
it 'creates an issue' do shared_examples 'creates issue successfully' do
expect(IncidentManagement::CreateIssueService) it 'creates an issue' do
.to receive(:new).with(alert.project, parsed_payload) expect(IncidentManagement::CreateIssueService)
.to receive(:new).with(alert.project, parsed_payload)
expect { subject }.to change { Issue.count }.by(1) expect { subject }.to change { Issue.count }.by(1)
end
it 'updates AlertManagement::Alert#issue_id' do
subject
expect(alert.reload.issue_id).to eq(created_issue.id)
end
it 'does not write a warning to log' do
subject
expect(Gitlab::AppLogger).not_to have_received(:warn)
end
end
context 'with valid alert' do
it_behaves_like 'creates issue successfully'
context 'when alert cannot be updated' do
let_it_be(:alert) { create(:alert_management_alert, :with_validation_errors, project: project, payload: payload) }
it 'updates AlertManagement::Alert#issue_id' do
expect { subject }.not_to change { alert.reload.issue_id }
end
it 'logs a warning' do
subject
expect(Gitlab::AppLogger).to have_received(:warn).with(
message: 'Cannot link an Issue with Alert',
issue_id: created_issue.id,
alert_id: alert.id,
alert_errors: { hosts: ['hosts array is over 255 chars'] }
)
end
end
context 'prometheus alert' do
let_it_be(:alert) { create(:alert_management_alert, :prometheus, project: project, started_at: started_at) }
let_it_be(:parsed_payload) { alert.payload }
it_behaves_like 'creates issue successfully'
end
end end
context 'with invalid alert' do context 'with invalid alert' do
@ -39,44 +85,5 @@ RSpec.describe IncidentManagement::ProcessAlertWorker do
expect { subject }.not_to change { Issue.count } expect { subject }.not_to change { Issue.count }
end end
end end
context 'with valid alert' do
before do
allow(Gitlab::AppLogger).to receive(:warn).and_call_original
end
context 'when alert can be updated' do
it 'updates AlertManagement::Alert#issue_id' do
subject
expect(alert.reload.issue_id).to eq(created_issue.id)
end
it 'does not write a warning to log' do
subject
expect(Gitlab::AppLogger).not_to have_received(:warn)
end
context 'when alert cannot be updated' do
let_it_be(:alert) { create(:alert_management_alert, :with_validation_errors, project: project, payload: payload) }
it 'updates AlertManagement::Alert#issue_id' do
expect { subject }.not_to change { alert.reload.issue_id }
end
it 'logs a warning' do
subject
expect(Gitlab::AppLogger).to have_received(:warn).with(
message: 'Cannot link an Issue with Alert',
issue_id: created_issue.id,
alert_id: alert.id,
alert_errors: { hosts: ['hosts array is over 255 chars'] }
)
end
end
end
end
end end
end end