Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
b941629bbf
commit
d91ff791fb
34 changed files with 271 additions and 124 deletions
|
@ -323,6 +323,7 @@ export default {
|
|||
v-gl-tooltip
|
||||
:title="__('Weight')"
|
||||
class="d-none d-sm-inline-block js-weight"
|
||||
data-testid="weight"
|
||||
>
|
||||
<gl-icon name="weight" class="align-text-bottom" />
|
||||
{{ issuable.weight }}
|
||||
|
|
|
@ -48,7 +48,10 @@ export default {
|
|||
return this.result.values.map(val => {
|
||||
const [yLabel] = val;
|
||||
|
||||
return formatDate(new Date(yLabel), { format: formats.shortTime, timezone: this.timezone });
|
||||
return formatDate(new Date(yLabel), {
|
||||
format: formats.shortTime,
|
||||
timezone: this.timezone,
|
||||
});
|
||||
});
|
||||
},
|
||||
result() {
|
||||
|
|
|
@ -76,11 +76,14 @@ export const getYAxisOptions = ({
|
|||
};
|
||||
};
|
||||
|
||||
export const getTimeAxisOptions = ({ timezone = timezones.LOCAL } = {}) => ({
|
||||
export const getTimeAxisOptions = ({
|
||||
timezone = timezones.LOCAL,
|
||||
format = formats.shortDateTime,
|
||||
} = {}) => ({
|
||||
name: __('Time'),
|
||||
type: axisTypes.time,
|
||||
axisLabel: {
|
||||
formatter: date => formatDate(date, { format: formats.shortTime, timezone }),
|
||||
formatter: date => formatDate(date, { format, timezone }),
|
||||
},
|
||||
axisPointer: {
|
||||
snap: false,
|
||||
|
|
|
@ -6,7 +6,7 @@ import { chartHeight, legendLayoutTypes } from '../../constants';
|
|||
import { s__ } from '~/locale';
|
||||
import { graphDataValidatorForValues } from '../../utils';
|
||||
import { getTimeAxisOptions, axisTypes } from './options';
|
||||
import { timezones } from '../../format_date';
|
||||
import { formats, timezones } from '../../format_date';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
|
@ -97,7 +97,7 @@ export default {
|
|||
chartOptions() {
|
||||
return {
|
||||
xAxis: {
|
||||
...getTimeAxisOptions({ timezone: this.timezone }),
|
||||
...getTimeAxisOptions({ timezone: this.timezone, format: formats.shortTime }),
|
||||
type: this.xAxisType,
|
||||
},
|
||||
dataZoom: [this.dataZoomConfig],
|
||||
|
|
|
@ -14,6 +14,7 @@ export const timezones = {
|
|||
|
||||
export const formats = {
|
||||
shortTime: 'h:MM TT',
|
||||
shortDateTime: 'm/d h:MM TT',
|
||||
default: 'dd mmm yyyy, h:MMTT (Z)',
|
||||
};
|
||||
|
||||
|
|
|
@ -22,4 +22,8 @@ module AutoDevopsHelper
|
|||
s_('CICD|instance enabled')
|
||||
end
|
||||
end
|
||||
|
||||
def auto_devops_settings_path(project)
|
||||
project_settings_ci_cd_path(project, anchor: 'autodevops-settings')
|
||||
end
|
||||
end
|
||||
|
|
|
@ -8,6 +8,11 @@ module Clusters
|
|||
include ::Clusters::Concerns::ApplicationCore
|
||||
include ::Clusters::Concerns::ApplicationStatus
|
||||
|
||||
# Cilium can only be installed and uninstalled through the
|
||||
# cluster-applications project by triggering CI pipeline for a
|
||||
# management project. UI operations are not available for such
|
||||
# applications. More information:
|
||||
# https://docs.gitlab.com/ee/user/clusters/management_project.html
|
||||
def allowed_to_uninstall?
|
||||
false
|
||||
end
|
||||
|
|
|
@ -1697,6 +1697,10 @@ class User < ApplicationRecord
|
|||
impersonator.present?
|
||||
end
|
||||
|
||||
def created_recently?
|
||||
created_at > Devise.confirm_within.ago
|
||||
end
|
||||
|
||||
protected
|
||||
|
||||
# override, from Devise::Validatable
|
||||
|
|
|
@ -5,7 +5,7 @@ module Clusters
|
|||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
MAX_ACCEPTABLE_ARTIFACT_SIZE = 5.kilobytes
|
||||
RELEASE_NAMES = %w[prometheus].freeze
|
||||
RELEASE_NAMES = %w[prometheus cilium].freeze
|
||||
|
||||
def initialize(job, current_user)
|
||||
@job = job
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
- confirmation_link = confirmation_url(@resource, confirmation_token: @token)
|
||||
- if @resource.unconfirmed_email.present?
|
||||
- if @resource.unconfirmed_email.present? || !@resource.created_recently?
|
||||
#content
|
||||
= email_default_heading(@resource.unconfirmed_email)
|
||||
= email_default_heading(@resource.unconfirmed_email || @resource.email)
|
||||
%p Click the link below to confirm your email address.
|
||||
#cta
|
||||
= link_to 'Confirm your email address', confirmation_link
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
<% if @resource.unconfirmed_email.present? %>
|
||||
<%= @resource.unconfirmed_email %>,
|
||||
|
||||
<% if @resource.unconfirmed_email.present? || !@resource.created_recently? %>
|
||||
<%= @resource.unconfirmed_email || @resource.email %>,
|
||||
Use the link below to confirm your email address.
|
||||
<% else %>
|
||||
<% if Gitlab.com? %>
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Replace misleading text in re-confirmation emails
|
||||
merge_request: 36634
|
||||
author:
|
||||
type: security
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add default and non-default branch jobs for secret detection
|
||||
merge_request: 36570
|
||||
author:
|
||||
type: added
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add DOCKERFILE_PATH to Auto DevOps workflow:rules
|
||||
merge_request: 36475
|
||||
author:
|
||||
type: fixed
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add date to x-axes timestamps
|
||||
merge_request: 36675
|
||||
author:
|
||||
type: changed
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add Cilium to the ParseClusterApplicationsArtifactService
|
||||
merge_request: 34695
|
||||
author:
|
||||
type: added
|
5
changelogs/unreleased/jsx-analyzer.yml
Normal file
5
changelogs/unreleased/jsx-analyzer.yml
Normal file
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Update eslint secure analyzer to analyze jsx
|
||||
merge_request: 36505
|
||||
author:
|
||||
type: changed
|
|
@ -1455,7 +1455,7 @@ lines with an inserted line break. Splitting product or feature names across
|
|||
lines makes searching for these items more difficult, and can cause problems if
|
||||
names change.
|
||||
|
||||
For example, the followng Markdown content is *not* formatted correctly:
|
||||
For example, the following Markdown content is *not* formatted correctly:
|
||||
|
||||
```markdown
|
||||
When entering a product or feature name that includes a space (such as GitLab
|
||||
|
|
|
@ -70,7 +70,7 @@ The following configuration is required:
|
|||
The following example shows a basic request/response flow between the following components:
|
||||
|
||||
- Snowplow JS / Ruby Trackers on GitLab.com
|
||||
- [GitLab.com Snowplow Collector](https://about.gitlab.com/handbook/engineering/infrastructure/library/snowplow/)
|
||||
- [GitLab.com Snowplow Collector](https://gitlab.com/gitlab-com/gl-infra/readiness/-/blob/master/library/snowplow/index.md)
|
||||
- GitLab's S3 Bucket
|
||||
- GitLab's Snowflake Data Warehouse
|
||||
- Sisense:
|
||||
|
|
|
@ -1556,3 +1556,16 @@ The number and size of nodes might not have enough IP addresses to run or instal
|
|||
|
||||
For reference, all the AWS instance IP limits are found
|
||||
[in this AWS repository on GitHub](https://github.com/aws/amazon-vpc-cni-k8s/blob/master/pkg/awsutils/vpc_ip_resource_limit.go) (search for `InstanceENIsAvailable`).
|
||||
|
||||
### Unable to install Prometheus
|
||||
|
||||
Installing Prometheus is failing with the following error:
|
||||
|
||||
```shell
|
||||
# kubectl -n gitlab-managed-apps logs install-prometheus
|
||||
...
|
||||
Error: Could not get apiVersions from Kubernetes: unable to retrieve the complete list of server APIs: admission.certmanager.k8s.io/v1beta1: the server is currently unable to handle the request
|
||||
```
|
||||
|
||||
This is a bug that was introduced in Helm `2.15` and fixed in `3.0.2`. As a workaround, you'll need
|
||||
to make sure that [`cert-manager`](#cert-manager) is installed successfully prior to installing Prometheus.
|
||||
|
|
|
@ -75,7 +75,7 @@ stages:
|
|||
|
||||
workflow:
|
||||
rules:
|
||||
- if: '$BUILDPACK_URL || $AUTO_DEVOPS_EXPLICITLY_ENABLED == "1"'
|
||||
- if: '$BUILDPACK_URL || $AUTO_DEVOPS_EXPLICITLY_ENABLED == "1" || $DOCKERFILE_PATH'
|
||||
|
||||
- exists:
|
||||
- Dockerfile
|
||||
|
|
|
@ -94,6 +94,7 @@ eslint-sast:
|
|||
exists:
|
||||
- '**/*.html'
|
||||
- '**/*.js'
|
||||
- '**/*.jsx'
|
||||
|
||||
flawfinder-sast:
|
||||
extends: .sast-analyzer
|
||||
|
|
|
@ -8,17 +8,33 @@ variables:
|
|||
SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/gitlab-org/security-products/analyzers"
|
||||
SECRETS_ANALYZER_VERSION: "3"
|
||||
|
||||
secret_detection:
|
||||
.secret-analyzer:
|
||||
stage: test
|
||||
image: "$SECURE_ANALYZERS_PREFIX/secrets:$SECRETS_ANALYZER_VERSION"
|
||||
services: []
|
||||
rules:
|
||||
- if: $SECRET_DETECTION_DISABLED
|
||||
when: never
|
||||
- if: $CI_COMMIT_BRANCH && $GITLAB_FEATURES =~ /\bsecret_detection\b/
|
||||
when: on_success
|
||||
artifacts:
|
||||
reports:
|
||||
secret_detection: gl-secret-detection-report.json
|
||||
|
||||
secret_detection_default_branch:
|
||||
extends: .secret-analyzer
|
||||
rules:
|
||||
- if: $SECRET_DETECTION_DISABLED
|
||||
when: never
|
||||
- if: $CI_DEFAULT_BRANCH == $CI_COMMIT_BRANCH &&
|
||||
$GITLAB_FEATURES =~ /\bsecret_detection\b/
|
||||
script:
|
||||
- /analyzer run
|
||||
|
||||
secret_detection:
|
||||
extends: .secret-analyzer
|
||||
rules:
|
||||
- if: $SECRET_DETECTION_DISABLED
|
||||
when: never
|
||||
- if: $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH &&
|
||||
$GITLAB_FEATURES =~ /\bsecret_detection\b/
|
||||
script:
|
||||
- git fetch origin $CI_DEFAULT_BRANCH $CI_BUILD_REF_NAME
|
||||
- export SECRET_DETECTION_COMMIT_TO=$(git log --left-right --cherry-pick --pretty=format:"%H" refs/remotes/origin/$CI_DEFAULT_BRANCH...refs/remotes/origin/$CI_BUILD_REF_NAME | tail -n 1)
|
||||
- export SECRET_DETECTION_COMMIT_FROM=$CI_COMMIT_SHA
|
||||
- /analyzer run
|
||||
|
|
|
@ -7,6 +7,19 @@ module Gitlab
|
|||
module Metrics
|
||||
module Dashboard
|
||||
class Finder
|
||||
# Dashboards that should not be part of the list of all dashboards
|
||||
# displayed on the metrics dashboard page.
|
||||
PREDEFINED_DASHBOARD_EXCLUSION_LIST = [
|
||||
# This dashboard is only useful in the self monitoring project.
|
||||
::Metrics::Dashboard::SelfMonitoringDashboardService,
|
||||
|
||||
# This dashboard is displayed on the K8s cluster settings health page.
|
||||
::Metrics::Dashboard::ClusterDashboardService,
|
||||
|
||||
# This dashboard is not yet ready for the world.
|
||||
::Metrics::Dashboard::PodDashboardService
|
||||
].freeze
|
||||
|
||||
class << self
|
||||
# Returns a formatted dashboard packed with DB info.
|
||||
# @param project [Project]
|
||||
|
@ -67,12 +80,32 @@ module Gitlab
|
|||
def find_all_paths_from_source(project)
|
||||
Gitlab::Metrics::Dashboard::Cache.delete_all!
|
||||
|
||||
default_dashboard_path(project)
|
||||
.+ project_service.all_dashboard_paths(project)
|
||||
user_facing_dashboard_services(project).flat_map do |service|
|
||||
service.all_dashboard_paths(project)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def user_facing_dashboard_services(project)
|
||||
predefined_dashboard_services_for(project) + [project_service]
|
||||
end
|
||||
|
||||
def predefined_dashboard_services_for(project)
|
||||
# Only list the self monitoring dashboard on the self monitoring project,
|
||||
# since it is the only dashboard (at time of writing) that shows data
|
||||
# about GitLab itself.
|
||||
if project.self_monitoring?
|
||||
return [self_monitoring_service]
|
||||
end
|
||||
|
||||
predefined_dashboard_services
|
||||
end
|
||||
|
||||
def predefined_dashboard_services
|
||||
::Metrics::Dashboard::PredefinedDashboardService.descendants - PREDEFINED_DASHBOARD_EXCLUSION_LIST
|
||||
end
|
||||
|
||||
def system_service
|
||||
::Metrics::Dashboard::SystemDashboardService
|
||||
end
|
||||
|
@ -85,14 +118,6 @@ module Gitlab
|
|||
::Metrics::Dashboard::SelfMonitoringDashboardService
|
||||
end
|
||||
|
||||
def default_dashboard_path(project)
|
||||
if project.self_monitoring?
|
||||
self_monitoring_service.all_dashboard_paths(project)
|
||||
else
|
||||
system_service.all_dashboard_paths(project)
|
||||
end
|
||||
end
|
||||
|
||||
def service_for(options)
|
||||
Gitlab::Metrics::Dashboard::ServiceSelector.call(options)
|
||||
end
|
||||
|
|
BIN
spec/fixtures/helm/helm_list_v2_cilium_deployed.json.gz
vendored
Normal file
BIN
spec/fixtures/helm/helm_list_v2_cilium_deployed.json.gz
vendored
Normal file
Binary file not shown.
BIN
spec/fixtures/helm/helm_list_v2_cilium_failed.json.gz
vendored
Normal file
BIN
spec/fixtures/helm/helm_list_v2_cilium_failed.json.gz
vendored
Normal file
Binary file not shown.
BIN
spec/fixtures/helm/helm_list_v2_cilium_missing.json.gz
vendored
Normal file
BIN
spec/fixtures/helm/helm_list_v2_cilium_missing.json.gz
vendored
Normal file
Binary file not shown.
|
@ -63,8 +63,8 @@ describe('Column component', () => {
|
|||
return formatter(date);
|
||||
};
|
||||
|
||||
it('x-axis is formatted correctly in AM/PM format', () => {
|
||||
expect(useXAxisFormatter(mockDate)).toEqual('8:00 PM');
|
||||
it('x-axis is formatted correctly in m/d h:MM TT format', () => {
|
||||
expect(useXAxisFormatter(mockDate)).toEqual('5/26 8:00 PM');
|
||||
});
|
||||
|
||||
describe('when in PT timezone', () => {
|
||||
|
@ -78,17 +78,17 @@ describe('Column component', () => {
|
|||
|
||||
it('by default, values are formatted in PT', () => {
|
||||
createWrapper();
|
||||
expect(useXAxisFormatter(mockDate)).toEqual('1:00 PM');
|
||||
expect(useXAxisFormatter(mockDate)).toEqual('5/26 1:00 PM');
|
||||
});
|
||||
|
||||
it('when the chart uses local timezone, y-axis is formatted in PT', () => {
|
||||
createWrapper({ timezone: 'LOCAL' });
|
||||
expect(useXAxisFormatter(mockDate)).toEqual('1:00 PM');
|
||||
expect(useXAxisFormatter(mockDate)).toEqual('5/26 1:00 PM');
|
||||
});
|
||||
|
||||
it('when the chart uses UTC, y-axis is formatted in UTC', () => {
|
||||
createWrapper({ timezone: 'UTC' });
|
||||
expect(useXAxisFormatter(mockDate)).toEqual('8:00 PM');
|
||||
expect(useXAxisFormatter(mockDate)).toEqual('5/26 8:00 PM');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -527,8 +527,8 @@ describe('Time series component', () => {
|
|||
return formatter(date);
|
||||
};
|
||||
|
||||
it('x-axis is formatted correctly in AM/PM format', () => {
|
||||
expect(useXAxisFormatter(mockDate)).toEqual('8:00 PM');
|
||||
it('x-axis is formatted correctly in m/d h:MM TT format', () => {
|
||||
expect(useXAxisFormatter(mockDate)).toEqual('5/26 8:00 PM');
|
||||
});
|
||||
|
||||
describe('when in PT timezone', () => {
|
||||
|
@ -542,17 +542,17 @@ describe('Time series component', () => {
|
|||
|
||||
it('by default, values are formatted in PT', () => {
|
||||
createWrapper();
|
||||
expect(useXAxisFormatter(mockDate)).toEqual('1:00 PM');
|
||||
expect(useXAxisFormatter(mockDate)).toEqual('5/26 1:00 PM');
|
||||
});
|
||||
|
||||
it('when the chart uses local timezone, y-axis is formatted in PT', () => {
|
||||
createWrapper({ timezone: 'LOCAL' });
|
||||
expect(useXAxisFormatter(mockDate)).toEqual('1:00 PM');
|
||||
expect(useXAxisFormatter(mockDate)).toEqual('5/26 1:00 PM');
|
||||
});
|
||||
|
||||
it('when the chart uses UTC, y-axis is formatted in UTC', () => {
|
||||
createWrapper({ timezone: 'UTC' });
|
||||
expect(useXAxisFormatter(mockDate)).toEqual('8:00 PM');
|
||||
expect(useXAxisFormatter(mockDate)).toEqual('5/26 8:00 PM');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -93,6 +93,12 @@ RSpec.describe AutoDevopsHelper do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#auto_devops_settings_path' do
|
||||
it 'returns auto devops settings path' do
|
||||
expect(helper.auto_devops_settings_path(project)).to eql(project_settings_ci_cd_path(project, anchor: 'autodevops-settings'))
|
||||
end
|
||||
end
|
||||
|
||||
describe '#badge_for_auto_devops_scope' do
|
||||
subject { helper.badge_for_auto_devops_scope(receiver) }
|
||||
|
||||
|
|
|
@ -205,6 +205,7 @@ RSpec.describe 'Auto-DevOps.gitlab-ci.yml' do
|
|||
'Buildpack' | { 'README.md' => '' } | { 'BUILDPACK_URL' => 'http://example.com' } | %w(build test) | %w()
|
||||
'Explicit set' | { 'README.md' => '' } | { 'AUTO_DEVOPS_EXPLICITLY_ENABLED' => '1' } | %w(build test) | %w()
|
||||
'Explicit unset' | { 'README.md' => '' } | { 'AUTO_DEVOPS_EXPLICITLY_ENABLED' => '0' } | %w() | %w(build test)
|
||||
'DOCKERFILE_PATH' | { 'README.md' => '' } | { 'DOCKERFILE_PATH' => 'Docker.file' } | %w(build test) | %w()
|
||||
'Dockerfile' | { 'Dockerfile' => '' } | {} | %w(build test) | %w()
|
||||
'Clojure' | { 'project.clj' => '' } | {} | %w(build test) | %w()
|
||||
'Go modules' | { 'go.mod' => '' } | {} | %w(build test) | %w()
|
||||
|
|
27
spec/mailers/devise_mailer_spec.rb
Normal file
27
spec/mailers/devise_mailer_spec.rb
Normal file
|
@ -0,0 +1,27 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require 'email_spec'
|
||||
|
||||
RSpec.describe DeviseMailer do
|
||||
describe "#confirmation_instructions" do
|
||||
subject { described_class.confirmation_instructions(user, 'faketoken', {}) }
|
||||
|
||||
context "when confirming the unconfirmed_email" do
|
||||
let(:user) { build(:user, unconfirmed_email: 'jdoe@example.com') }
|
||||
|
||||
it "shows the unconfirmed_email" do
|
||||
expect(subject.body.encoded).to have_text user.unconfirmed_email
|
||||
expect(subject.body.encoded).not_to have_text user.email
|
||||
end
|
||||
end
|
||||
|
||||
context "when re-confirming the primary email after a security issue" do
|
||||
let(:user) { build(:user, created_at: 10.days.ago, unconfirmed_email: nil) }
|
||||
|
||||
it "shows the primary email" do
|
||||
expect(subject.body.encoded).to have_text user.email
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -120,90 +120,9 @@ RSpec.describe Clusters::ParseClusterApplicationsArtifactService do
|
|||
end
|
||||
end
|
||||
|
||||
context 'release is missing' do
|
||||
let(:fixture) { 'spec/fixtures/helm/helm_list_v2_prometheus_missing.json.gz' }
|
||||
let(:file) { fixture_file_upload(Rails.root.join(fixture)) }
|
||||
let(:artifact) { create(:ci_job_artifact, :cluster_applications, job: job, file: file) }
|
||||
|
||||
context 'application does not exist' do
|
||||
it 'does not create or destroy an application' do
|
||||
expect do
|
||||
described_class.new(job, user).execute(artifact)
|
||||
end.not_to change(Clusters::Applications::Prometheus, :count)
|
||||
end
|
||||
end
|
||||
|
||||
context 'application exists' do
|
||||
before do
|
||||
create(:clusters_applications_prometheus, :installed, cluster: cluster)
|
||||
end
|
||||
|
||||
it 'marks the application as uninstalled' do
|
||||
described_class.new(job, user).execute(artifact)
|
||||
|
||||
cluster.application_prometheus.reload
|
||||
expect(cluster.application_prometheus).to be_uninstalled
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'release is deployed' do
|
||||
let(:fixture) { 'spec/fixtures/helm/helm_list_v2_prometheus_deployed.json.gz' }
|
||||
let(:file) { fixture_file_upload(Rails.root.join(fixture)) }
|
||||
let(:artifact) { create(:ci_job_artifact, :cluster_applications, job: job, file: file) }
|
||||
|
||||
context 'application does not exist' do
|
||||
it 'creates an application and marks it as installed' do
|
||||
expect do
|
||||
described_class.new(job, user).execute(artifact)
|
||||
end.to change(Clusters::Applications::Prometheus, :count)
|
||||
|
||||
expect(cluster.application_prometheus).to be_persisted
|
||||
expect(cluster.application_prometheus).to be_installed
|
||||
end
|
||||
end
|
||||
|
||||
context 'application exists' do
|
||||
before do
|
||||
create(:clusters_applications_prometheus, :errored, cluster: cluster)
|
||||
end
|
||||
|
||||
it 'marks the application as installed' do
|
||||
described_class.new(job, user).execute(artifact)
|
||||
|
||||
expect(cluster.application_prometheus).to be_installed
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'release is failed' do
|
||||
let(:fixture) { 'spec/fixtures/helm/helm_list_v2_prometheus_failed.json.gz' }
|
||||
let(:file) { fixture_file_upload(Rails.root.join(fixture)) }
|
||||
let(:artifact) { create(:ci_job_artifact, :cluster_applications, job: job, file: file) }
|
||||
|
||||
context 'application does not exist' do
|
||||
it 'creates an application and marks it as errored' do
|
||||
expect do
|
||||
described_class.new(job, user).execute(artifact)
|
||||
end.to change(Clusters::Applications::Prometheus, :count)
|
||||
|
||||
expect(cluster.application_prometheus).to be_persisted
|
||||
expect(cluster.application_prometheus).to be_errored
|
||||
expect(cluster.application_prometheus.status_reason).to eq('Helm release failed to install')
|
||||
end
|
||||
end
|
||||
|
||||
context 'application exists' do
|
||||
before do
|
||||
create(:clusters_applications_prometheus, :installed, cluster: cluster)
|
||||
end
|
||||
|
||||
it 'marks the application as errored' do
|
||||
described_class.new(job, user).execute(artifact)
|
||||
|
||||
expect(cluster.application_prometheus).to be_errored
|
||||
expect(cluster.application_prometheus.status_reason).to eq('Helm release failed to install')
|
||||
end
|
||||
Clusters::ParseClusterApplicationsArtifactService::RELEASE_NAMES.each do |release_name|
|
||||
context release_name do
|
||||
include_examples 'parse cluster applications artifact', release_name
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -0,0 +1,89 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
RSpec.shared_examples 'parse cluster applications artifact' do |release_name|
|
||||
let(:application_class) { Clusters::Cluster::APPLICATIONS[release_name] }
|
||||
let(:cluster_application) { cluster.public_send("application_#{release_name}") }
|
||||
let(:file) { fixture_file_upload(Rails.root.join(fixture)) }
|
||||
let(:artifact) { create(:ci_job_artifact, :cluster_applications, job: job, file: file) }
|
||||
|
||||
context 'release is missing' do
|
||||
let(:fixture) { "spec/fixtures/helm/helm_list_v2_#{release_name}_missing.json.gz" }
|
||||
|
||||
context 'application does not exist' do
|
||||
it 'does not create or destroy an application' do
|
||||
expect do
|
||||
described_class.new(job, user).execute(artifact)
|
||||
end.not_to change(application_class, :count)
|
||||
end
|
||||
end
|
||||
|
||||
context 'application exists' do
|
||||
before do
|
||||
create("clusters_applications_#{release_name}".to_sym, :installed, cluster: cluster)
|
||||
end
|
||||
|
||||
it 'marks the application as uninstalled' do
|
||||
described_class.new(job, user).execute(artifact)
|
||||
|
||||
cluster_application.reload
|
||||
expect(cluster_application).to be_uninstalled
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'release is deployed' do
|
||||
let(:fixture) { "spec/fixtures/helm/helm_list_v2_#{release_name}_deployed.json.gz" }
|
||||
|
||||
context 'application does not exist' do
|
||||
it 'creates an application and marks it as installed' do
|
||||
expect do
|
||||
described_class.new(job, user).execute(artifact)
|
||||
end.to change(application_class, :count)
|
||||
|
||||
expect(cluster_application).to be_persisted
|
||||
expect(cluster_application).to be_installed
|
||||
end
|
||||
end
|
||||
|
||||
context 'application exists' do
|
||||
before do
|
||||
create("clusters_applications_#{release_name}".to_sym, :errored, cluster: cluster)
|
||||
end
|
||||
|
||||
it 'marks the application as installed' do
|
||||
described_class.new(job, user).execute(artifact)
|
||||
|
||||
expect(cluster_application).to be_installed
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'release is failed' do
|
||||
let(:fixture) { "spec/fixtures/helm/helm_list_v2_#{release_name}_failed.json.gz" }
|
||||
|
||||
context 'application does not exist' do
|
||||
it 'creates an application and marks it as errored' do
|
||||
expect do
|
||||
described_class.new(job, user).execute(artifact)
|
||||
end.to change(application_class, :count)
|
||||
|
||||
expect(cluster_application).to be_persisted
|
||||
expect(cluster_application).to be_errored
|
||||
expect(cluster_application.status_reason).to eq('Helm release failed to install')
|
||||
end
|
||||
end
|
||||
|
||||
context 'application exists' do
|
||||
before do
|
||||
create("clusters_applications_#{release_name}".to_sym, :installed, cluster: cluster)
|
||||
end
|
||||
|
||||
it 'marks the application as errored' do
|
||||
described_class.new(job, user).execute(artifact)
|
||||
|
||||
expect(cluster_application).to be_errored
|
||||
expect(cluster_application.status_reason).to eq('Helm release failed to install')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
Loading…
Reference in a new issue