Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
7a5662b39b
commit
1ab35c9208
34 changed files with 489 additions and 638 deletions
|
@ -90,7 +90,7 @@ export const fetchData = ({ dispatch }) => {
|
|||
* ready after the BE piece is implemented.
|
||||
* https://gitlab.com/gitlab-org/gitlab/-/issues/211330
|
||||
*/
|
||||
if (isFeatureFlagEnabled('metrics_dashboard_annotations')) {
|
||||
if (isFeatureFlagEnabled('metricsDashboardAnnotations')) {
|
||||
dispatch('fetchAnnotations');
|
||||
}
|
||||
};
|
||||
|
@ -283,8 +283,6 @@ export const receiveEnvironmentsDataFailure = ({ commit }) => {
|
|||
};
|
||||
|
||||
export const fetchAnnotations = ({ state, dispatch }) => {
|
||||
dispatch('requestAnnotations');
|
||||
|
||||
return gqClient
|
||||
.mutate({
|
||||
mutation: getAnnotations,
|
||||
|
@ -309,9 +307,6 @@ export const fetchAnnotations = ({ state, dispatch }) => {
|
|||
});
|
||||
};
|
||||
|
||||
// While this commit does not update the state it will
|
||||
// eventually be useful to show a loading state
|
||||
export const requestAnnotations = ({ commit }) => commit(types.REQUEST_ANNOTATIONS);
|
||||
export const receiveAnnotationsSuccess = ({ commit }, data) =>
|
||||
commit(types.RECEIVE_ANNOTATIONS_SUCCESS, data);
|
||||
export const receiveAnnotationsFailure = ({ commit }) => commit(types.RECEIVE_ANNOTATIONS_FAILURE);
|
||||
|
|
|
@ -4,7 +4,6 @@ export const RECEIVE_METRICS_DASHBOARD_SUCCESS = 'RECEIVE_METRICS_DASHBOARD_SUCC
|
|||
export const RECEIVE_METRICS_DASHBOARD_FAILURE = 'RECEIVE_METRICS_DASHBOARD_FAILURE';
|
||||
|
||||
// Annotations
|
||||
export const REQUEST_ANNOTATIONS = 'REQUEST_ANNOTATIONS';
|
||||
export const RECEIVE_ANNOTATIONS_SUCCESS = 'RECEIVE_ANNOTATIONS_SUCCESS';
|
||||
export const RECEIVE_ANNOTATIONS_FAILURE = 'RECEIVE_ANNOTATIONS_FAILURE';
|
||||
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
module Ci
|
||||
class Bridge < Ci::Processable
|
||||
include Ci::Contextable
|
||||
include Ci::PipelineDelegator
|
||||
include Ci::Metadatable
|
||||
include Importable
|
||||
include AfterCommitQueue
|
||||
|
|
|
@ -4,7 +4,6 @@ module Ci
|
|||
class Build < Ci::Processable
|
||||
include Ci::Metadatable
|
||||
include Ci::Contextable
|
||||
include Ci::PipelineDelegator
|
||||
include TokenAuthenticatable
|
||||
include AfterCommitQueue
|
||||
include ObjectStorage::BackgroundMove
|
||||
|
@ -591,13 +590,7 @@ module Ci
|
|||
|
||||
def merge_request
|
||||
strong_memoize(:merge_request) do
|
||||
merge_requests = MergeRequest.includes(:latest_merge_request_diff)
|
||||
.where(source_branch: ref, source_project: pipeline.project)
|
||||
.reorder(iid: :desc)
|
||||
|
||||
merge_requests.find do |merge_request|
|
||||
merge_request.commit_shas.include?(pipeline.sha)
|
||||
end
|
||||
pipeline.all_merge_requests.order(iid: :asc).first
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -51,6 +51,12 @@ module Ci
|
|||
validates :type, presence: true
|
||||
validates :scheduling_type, presence: true, on: :create, if: :validate_scheduling_type?
|
||||
|
||||
delegate :merge_request?,
|
||||
:merge_request_ref?,
|
||||
:legacy_detached_merge_request_pipeline?,
|
||||
:merge_train_pipeline?,
|
||||
to: :pipeline
|
||||
|
||||
def aggregated_needs_names
|
||||
read_attribute(:aggregated_needs_names)
|
||||
end
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
##
|
||||
# We will disable `ref` and `sha` attributes in `Ci::Build` in the future
|
||||
# and remove this module in favor of Ci::PipelineDelegator.
|
||||
# and remove this module in favor of Ci::Processable.
|
||||
module Ci
|
||||
module HasRef
|
||||
extend ActiveSupport::Concern
|
||||
|
|
|
@ -1,20 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
##
|
||||
# This module is mainly used by child associations of `Ci::Pipeline` that needs to look up
|
||||
# single source of truth. For example, `Ci::Build` has `git_ref` method, which behaves
|
||||
# slightly different from `Ci::Pipeline`'s `git_ref`. This is very confusing as
|
||||
# the system could behave differently time to time.
|
||||
# We should have a single interface in `Ci::Pipeline` and access the method always.
|
||||
module Ci
|
||||
module PipelineDelegator
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
included do
|
||||
delegate :merge_request?,
|
||||
:merge_request_ref?,
|
||||
:legacy_detached_merge_request_pipeline?,
|
||||
:merge_train_pipeline?, to: :pipeline
|
||||
end
|
||||
end
|
||||
end
|
5
changelogs/unreleased/update-ci-build-merge-request.yml
Normal file
5
changelogs/unreleased/update-ci-build-merge-request.yml
Normal file
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Use Ci::Pipeline#all_merge_requests.first as Ci::Build#merge_request
|
||||
merge_request: 27968
|
||||
author:
|
||||
type: fixed
|
|
@ -4,6 +4,7 @@
|
|||
def get_karma_files(files)
|
||||
files.select do |file|
|
||||
file.start_with?('ee/spec/javascripts', 'spec/javascripts') &&
|
||||
file.end_with?('_spec.js') &&
|
||||
!file.end_with?('browser_spec.js')
|
||||
end
|
||||
end
|
||||
|
|
|
@ -9560,6 +9560,11 @@ type Vulnerability {
|
|||
"""
|
||||
location: JSON
|
||||
|
||||
"""
|
||||
The project on which the vulnerability was found
|
||||
"""
|
||||
project: Project
|
||||
|
||||
"""
|
||||
Type of the security report that found the vulnerability (SAST, DEPENDENCY_SCANNING, CONTAINER_SCANNING, DAST)
|
||||
"""
|
||||
|
|
|
@ -28863,6 +28863,20 @@
|
|||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "project",
|
||||
"description": "The project on which the vulnerability was found",
|
||||
"args": [
|
||||
|
||||
],
|
||||
"type": {
|
||||
"kind": "OBJECT",
|
||||
"name": "Project",
|
||||
"ofType": null
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "reportType",
|
||||
"description": "Type of the security report that found the vulnerability (SAST, DEPENDENCY_SCANNING, CONTAINER_SCANNING, DAST)",
|
||||
|
|
|
@ -1502,6 +1502,7 @@ Represents a vulnerability.
|
|||
| `description` | String | Description of the vulnerability |
|
||||
| `id` | ID! | GraphQL ID of the vulnerability |
|
||||
| `location` | JSON | The JSON location metadata for the vulnerability. Its format depends on the type of the security scan that found the vulnerability |
|
||||
| `project` | Project | The project on which the vulnerability was found |
|
||||
| `reportType` | VulnerabilityReportType | Type of the security report that found the vulnerability (SAST, DEPENDENCY_SCANNING, CONTAINER_SCANNING, DAST) |
|
||||
| `severity` | VulnerabilitySeverity | Severity of the vulnerability (INFO, UNKNOWN, LOW, MEDIUM, HIGH, CRITICAL) |
|
||||
| `state` | VulnerabilityState | State of the vulnerability (DETECTED, DISMISSED, RESOLVED, CONFIRMED) |
|
||||
|
|
|
@ -118,7 +118,7 @@ not without its own challenges:
|
|||
instance of Docker engine so they won't conflict with each other. But this
|
||||
also means that jobs can be slower because there's no caching of layers.
|
||||
- By default, Docker 17.09 and higher uses `--storage-driver overlay2` which is
|
||||
the recommended storage driver. See [Using the overlayfs driver](#using-the-overlayfs-driver)
|
||||
the recommended storage driver. See [Using the overlayfs driver](#use-the-overlayfs-driver)
|
||||
for details.
|
||||
- Since the `docker:19.03.8-dind` container and the Runner container don't share their
|
||||
root filesystem, the job's working directory can be used as a mount point for
|
||||
|
@ -448,7 +448,7 @@ The steps in the `script` section for the `build` stage can be summed up to:
|
|||
1. The last two commands push the tagged Docker images to the container registry
|
||||
so that they may also be used as cache for subsequent builds.
|
||||
|
||||
## Using the OverlayFS driver
|
||||
## Use the OverlayFS driver
|
||||
|
||||
NOTE: **Note:**
|
||||
The shared Runners on GitLab.com use the `overlay2` driver by default.
|
||||
|
@ -480,18 +480,22 @@ which can be avoided if a different driver is used, for example `overlay2`.
|
|||
overlay
|
||||
```
|
||||
|
||||
### Use driver per project
|
||||
### Use the OverlayFS driver per project
|
||||
|
||||
You can enable the driver for each project individually by editing the project's `.gitlab-ci.yml`:
|
||||
You can enable the driver for each project individually by using the `DOCKER_DRIVER`
|
||||
environment [variable](../yaml/README.md#variables) in `.gitlab-ci.yml`:
|
||||
|
||||
```yaml
|
||||
variables:
|
||||
DOCKER_DRIVER: overlay2
|
||||
```
|
||||
|
||||
### Use driver for every project
|
||||
### Use the OverlayFS driver for every project
|
||||
|
||||
To enable the driver for every project, you can set the environment variable for every build by adding `environment` in the `[[runners]]` section of `config.toml`:
|
||||
If you use your own [GitLab Runners](https://docs.gitlab.com/runner/), you
|
||||
can enable the driver for every project by setting the `DOCKER_DRIVER`
|
||||
environment variable in the
|
||||
[`[[runners]]` section of `config.toml`](https://docs.gitlab.com/runner/configuration/advanced-configuration.html#the-runners-section):
|
||||
|
||||
```toml
|
||||
environment = ["DOCKER_DRIVER=overlay2"]
|
||||
|
@ -499,11 +503,9 @@ environment = ["DOCKER_DRIVER=overlay2"]
|
|||
|
||||
If you're running multiple Runners you will have to modify all configuration files.
|
||||
|
||||
> **Notes:**
|
||||
>
|
||||
> - More information about the Runner configuration is available in the [Runner documentation](https://docs.gitlab.com/runner/configuration/).
|
||||
> - For more information about using OverlayFS with Docker, you can read
|
||||
> [Use the OverlayFS storage driver](https://docs.docker.com/engine/userguide/storagedriver/overlayfs-driver/).
|
||||
NOTE: **Note:**
|
||||
Read more about the [Runner configuration](https://docs.gitlab.com/runner/configuration/)
|
||||
and [using the OverlayFS storage driver](https://docs.docker.com/engine/userguide/storagedriver/overlayfs-driver/).
|
||||
|
||||
## Using the GitLab Container Registry
|
||||
|
||||
|
|
|
@ -242,7 +242,7 @@ For a demo of the Azure SAML setup including SCIM, see [SCIM Provisioning on Azu
|
|||
|--------------|----------------|
|
||||
| Identifier | Identifier (Entity ID) |
|
||||
| Assertion consumer service URL | Reply URL (Assertion Consumer Service URL) |
|
||||
| Identity provider single sign on URL | Login URL |
|
||||
| Identity provider single sign on URL | Sign on URL |
|
||||
| Certificate fingerprint | Thumbprint |
|
||||
|
||||
We recommend:
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import { mount } from '@vue/test-utils';
|
||||
import { setTestTimeout } from 'helpers/timeout';
|
||||
import { GlLink } from '@gitlab/ui';
|
||||
import { TEST_HOST } from 'jest/helpers/test_constants';
|
||||
import {
|
||||
GlAreaChart,
|
||||
GlLineChart,
|
||||
|
@ -12,23 +13,16 @@ import { shallowWrapperContainsSlotText } from 'helpers/vue_test_utils_helper';
|
|||
import { createStore } from '~/monitoring/stores';
|
||||
import TimeSeries from '~/monitoring/components/charts/time_series.vue';
|
||||
import * as types from '~/monitoring/stores/mutation_types';
|
||||
import { deploymentData, mockProjectDir } from '../../mock_data';
|
||||
import {
|
||||
deploymentData,
|
||||
mockedQueryResultFixture,
|
||||
metricsDashboardPayload,
|
||||
metricsDashboardViewModel,
|
||||
mockProjectDir,
|
||||
mockHost,
|
||||
} from '../../mock_data';
|
||||
metricResultStatus,
|
||||
} from '../../fixture_data';
|
||||
import * as iconUtils from '~/lib/utils/icon_utils';
|
||||
import { getJSONFixture } from '../../../helpers/fixtures';
|
||||
|
||||
const mockSvgPathContent = 'mockSvgPathContent';
|
||||
|
||||
const metricsDashboardFixture = getJSONFixture(
|
||||
'metrics_dashboard/environment_metrics_dashboard.json',
|
||||
);
|
||||
const metricsDashboardPayload = metricsDashboardFixture.dashboard;
|
||||
|
||||
jest.mock('lodash/throttle', () =>
|
||||
// this throttle mock executes immediately
|
||||
jest.fn(func => {
|
||||
|
@ -51,7 +45,7 @@ describe('Time series component', () => {
|
|||
graphData: { ...graphData, type },
|
||||
deploymentData: store.state.monitoringDashboard.deploymentData,
|
||||
annotations: store.state.monitoringDashboard.annotations,
|
||||
projectPath: `${mockHost}${mockProjectDir}`,
|
||||
projectPath: `${TEST_HOST}${mockProjectDir}`,
|
||||
},
|
||||
store,
|
||||
stubs: {
|
||||
|
@ -74,7 +68,7 @@ describe('Time series component', () => {
|
|||
|
||||
store.commit(
|
||||
`monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
|
||||
mockedQueryResultFixture,
|
||||
metricResultStatus,
|
||||
);
|
||||
// dashboard is a dynamically generated fixture and stored at environment_metrics_dashboard.json
|
||||
[mockGraphData] = store.state.monitoringDashboard.dashboard.panelGroups[1].panels;
|
||||
|
@ -606,7 +600,7 @@ describe('Time series component', () => {
|
|||
store = createStore();
|
||||
const graphData = cloneDeep(metricsDashboardViewModel.panelGroups[0].panels[3]);
|
||||
graphData.metrics.forEach(metric =>
|
||||
Object.assign(metric, { result: mockedQueryResultFixture.result }),
|
||||
Object.assign(metric, { result: metricResultStatus.result }),
|
||||
);
|
||||
|
||||
timeSeriesChart = makeTimeSeriesChart(graphData, 'area-chart');
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { shallowMount, createLocalVue, mount } from '@vue/test-utils';
|
||||
import { shallowMount, mount } from '@vue/test-utils';
|
||||
import { GlDropdownItem, GlDeprecatedButton } from '@gitlab/ui';
|
||||
import VueDraggable from 'vuedraggable';
|
||||
import MockAdapter from 'axios-mock-adapter';
|
||||
|
@ -6,7 +6,6 @@ import axios from '~/lib/utils/axios_utils';
|
|||
import statusCodes from '~/lib/utils/http_status';
|
||||
import { metricStates } from '~/monitoring/constants';
|
||||
import Dashboard from '~/monitoring/components/dashboard.vue';
|
||||
import { getJSONFixture } from '../../../../spec/frontend/helpers/fixtures';
|
||||
|
||||
import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
|
||||
import DashboardsDropdown from '~/monitoring/components/dashboards_dropdown.vue';
|
||||
|
@ -14,21 +13,9 @@ import GroupEmptyState from '~/monitoring/components/group_empty_state.vue';
|
|||
import PanelType from 'ee_else_ce/monitoring/components/panel_type.vue';
|
||||
import { createStore } from '~/monitoring/stores';
|
||||
import * as types from '~/monitoring/stores/mutation_types';
|
||||
import { setupComponentStore, propsData } from '../init_utils';
|
||||
import {
|
||||
metricsDashboardViewModel,
|
||||
environmentData,
|
||||
dashboardGitResponse,
|
||||
mockedQueryResultFixture,
|
||||
} from '../mock_data';
|
||||
|
||||
const localVue = createLocalVue();
|
||||
const expectedPanelCount = 4;
|
||||
|
||||
const metricsDashboardFixture = getJSONFixture(
|
||||
'metrics_dashboard/environment_metrics_dashboard.json',
|
||||
);
|
||||
const metricsDashboardPayload = metricsDashboardFixture.dashboard;
|
||||
import { setupStoreWithDashboard, setMetricResult, setupStoreWithData } from '../store_utils';
|
||||
import { environmentData, dashboardGitResponse, propsData } from '../mock_data';
|
||||
import { metricsDashboardViewModel, metricsDashboardPanelCount } from '../fixture_data';
|
||||
|
||||
describe('Dashboard', () => {
|
||||
let store;
|
||||
|
@ -43,7 +30,6 @@ describe('Dashboard', () => {
|
|||
|
||||
const createShallowWrapper = (props = {}, options = {}) => {
|
||||
wrapper = shallowMount(Dashboard, {
|
||||
localVue,
|
||||
propsData: { ...propsData, ...props },
|
||||
methods: {
|
||||
fetchData: jest.fn(),
|
||||
|
@ -55,7 +41,6 @@ describe('Dashboard', () => {
|
|||
|
||||
const createMountedWrapper = (props = {}, options = {}) => {
|
||||
wrapper = mount(Dashboard, {
|
||||
localVue,
|
||||
propsData: { ...propsData, ...props },
|
||||
methods: {
|
||||
fetchData: jest.fn(),
|
||||
|
@ -144,7 +129,7 @@ describe('Dashboard', () => {
|
|||
{ stubs: ['graph-group', 'panel-type'] },
|
||||
);
|
||||
|
||||
setupComponentStore(wrapper);
|
||||
setupStoreWithData(wrapper.vm.$store);
|
||||
|
||||
return wrapper.vm.$nextTick().then(() => {
|
||||
expect(wrapper.vm.showEmptyState).toEqual(false);
|
||||
|
@ -172,7 +157,7 @@ describe('Dashboard', () => {
|
|||
beforeEach(() => {
|
||||
createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
|
||||
|
||||
setupComponentStore(wrapper);
|
||||
setupStoreWithData(wrapper.vm.$store);
|
||||
|
||||
return wrapper.vm.$nextTick();
|
||||
});
|
||||
|
@ -201,14 +186,7 @@ describe('Dashboard', () => {
|
|||
it('hides the environments dropdown list when there is no environments', () => {
|
||||
createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
|
||||
|
||||
wrapper.vm.$store.commit(
|
||||
`monitoringDashboard/${types.RECEIVE_METRICS_DASHBOARD_SUCCESS}`,
|
||||
metricsDashboardPayload,
|
||||
);
|
||||
wrapper.vm.$store.commit(
|
||||
`monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
|
||||
mockedQueryResultFixture,
|
||||
);
|
||||
setupStoreWithDashboard(wrapper.vm.$store);
|
||||
|
||||
return wrapper.vm.$nextTick().then(() => {
|
||||
expect(findAllEnvironmentsDropdownItems()).toHaveLength(0);
|
||||
|
@ -218,7 +196,7 @@ describe('Dashboard', () => {
|
|||
it('renders the datetimepicker dropdown', () => {
|
||||
createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
|
||||
|
||||
setupComponentStore(wrapper);
|
||||
setupStoreWithData(wrapper.vm.$store);
|
||||
|
||||
return wrapper.vm.$nextTick().then(() => {
|
||||
expect(wrapper.find(DateTimePicker).exists()).toBe(true);
|
||||
|
@ -228,7 +206,7 @@ describe('Dashboard', () => {
|
|||
it('renders the refresh dashboard button', () => {
|
||||
createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
|
||||
|
||||
setupComponentStore(wrapper);
|
||||
setupStoreWithData(wrapper.vm.$store);
|
||||
|
||||
return wrapper.vm.$nextTick().then(() => {
|
||||
const refreshBtn = wrapper.findAll({ ref: 'refreshDashboardBtn' });
|
||||
|
@ -241,7 +219,11 @@ describe('Dashboard', () => {
|
|||
describe('when one of the metrics is missing', () => {
|
||||
beforeEach(() => {
|
||||
createShallowWrapper({ hasMetrics: true });
|
||||
setupComponentStore(wrapper);
|
||||
|
||||
const { $store } = wrapper.vm;
|
||||
|
||||
setupStoreWithDashboard($store);
|
||||
setMetricResult({ $store, result: [], panel: 2 });
|
||||
|
||||
return wrapper.vm.$nextTick();
|
||||
});
|
||||
|
@ -273,7 +255,7 @@ describe('Dashboard', () => {
|
|||
},
|
||||
);
|
||||
|
||||
setupComponentStore(wrapper);
|
||||
setupStoreWithData(wrapper.vm.$store);
|
||||
|
||||
return wrapper.vm.$nextTick();
|
||||
});
|
||||
|
@ -348,14 +330,14 @@ describe('Dashboard', () => {
|
|||
beforeEach(() => {
|
||||
createShallowWrapper({ hasMetrics: true });
|
||||
|
||||
setupComponentStore(wrapper);
|
||||
setupStoreWithData(wrapper.vm.$store);
|
||||
|
||||
return wrapper.vm.$nextTick();
|
||||
});
|
||||
|
||||
it('wraps vuedraggable', () => {
|
||||
expect(findDraggablePanels().exists()).toBe(true);
|
||||
expect(findDraggablePanels().length).toEqual(expectedPanelCount);
|
||||
expect(findDraggablePanels().length).toEqual(metricsDashboardPanelCount);
|
||||
});
|
||||
|
||||
it('is disabled by default', () => {
|
||||
|
@ -411,11 +393,11 @@ describe('Dashboard', () => {
|
|||
it('shows a remove button, which removes a panel', () => {
|
||||
expect(findFirstDraggableRemoveButton().isEmpty()).toBe(false);
|
||||
|
||||
expect(findDraggablePanels().length).toEqual(expectedPanelCount);
|
||||
expect(findDraggablePanels().length).toEqual(metricsDashboardPanelCount);
|
||||
findFirstDraggableRemoveButton().trigger('click');
|
||||
|
||||
return wrapper.vm.$nextTick(() => {
|
||||
expect(findDraggablePanels().length).toEqual(expectedPanelCount - 1);
|
||||
expect(findDraggablePanels().length).toEqual(metricsDashboardPanelCount - 1);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -534,7 +516,7 @@ describe('Dashboard', () => {
|
|||
beforeEach(() => {
|
||||
createShallowWrapper({ hasMetrics: true, currentDashboard });
|
||||
|
||||
setupComponentStore(wrapper);
|
||||
setupStoreWithData(wrapper.vm.$store);
|
||||
|
||||
return wrapper.vm.$nextTick();
|
||||
});
|
||||
|
|
|
@ -3,7 +3,7 @@ import MockAdapter from 'axios-mock-adapter';
|
|||
import axios from '~/lib/utils/axios_utils';
|
||||
import Dashboard from '~/monitoring/components/dashboard.vue';
|
||||
import { createStore } from '~/monitoring/stores';
|
||||
import { propsData } from '../init_utils';
|
||||
import { propsData } from '../mock_data';
|
||||
|
||||
jest.mock('~/lib/utils/url_utility');
|
||||
|
||||
|
|
|
@ -9,12 +9,11 @@ import {
|
|||
updateHistory,
|
||||
} from '~/lib/utils/url_utility';
|
||||
import axios from '~/lib/utils/axios_utils';
|
||||
import { mockProjectDir } from '../mock_data';
|
||||
import { mockProjectDir, propsData } from '../mock_data';
|
||||
|
||||
import Dashboard from '~/monitoring/components/dashboard.vue';
|
||||
import { createStore } from '~/monitoring/stores';
|
||||
import { defaultTimeRange } from '~/vue_shared/constants';
|
||||
import { propsData } from '../init_utils';
|
||||
|
||||
jest.mock('~/flash');
|
||||
jest.mock('~/lib/utils/url_utility');
|
||||
|
|
25
spec/frontend/monitoring/fixture_data.js
Normal file
25
spec/frontend/monitoring/fixture_data.js
Normal file
|
@ -0,0 +1,25 @@
|
|||
import { mapToDashboardViewModel } from '~/monitoring/stores/utils';
|
||||
import { metricsResult } from './mock_data';
|
||||
|
||||
// Use globally available `getJSONFixture` so this file can be imported by both karma and jest specs
|
||||
export const metricsDashboardResponse = getJSONFixture(
|
||||
'metrics_dashboard/environment_metrics_dashboard.json',
|
||||
);
|
||||
export const metricsDashboardPayload = metricsDashboardResponse.dashboard;
|
||||
export const metricsDashboardViewModel = mapToDashboardViewModel(metricsDashboardPayload);
|
||||
|
||||
export const metricsDashboardPanelCount = 22;
|
||||
export const metricResultStatus = {
|
||||
// First metric in fixture `metrics_dashboard/environment_metrics_dashboard.json`
|
||||
metricId: 'NO_DB_response_metrics_nginx_ingress_throughput_status_code',
|
||||
result: metricsResult,
|
||||
};
|
||||
export const metricResultPods = {
|
||||
// Second metric in fixture `metrics_dashboard/environment_metrics_dashboard.json`
|
||||
metricId: 'NO_DB_response_metrics_nginx_ingress_latency_pod_average',
|
||||
result: metricsResult,
|
||||
};
|
||||
export const metricResultEmpty = {
|
||||
metricId: 'NO_DB_response_metrics_nginx_ingress_16_throughput_status_code',
|
||||
result: [],
|
||||
};
|
|
@ -1,57 +0,0 @@
|
|||
import * as types from '~/monitoring/stores/mutation_types';
|
||||
import {
|
||||
metricsDashboardPayload,
|
||||
mockedEmptyResult,
|
||||
mockedQueryResultPayload,
|
||||
mockedQueryResultPayloadCoresTotal,
|
||||
mockApiEndpoint,
|
||||
environmentData,
|
||||
} from './mock_data';
|
||||
|
||||
export const propsData = {
|
||||
hasMetrics: false,
|
||||
documentationPath: '/path/to/docs',
|
||||
settingsPath: '/path/to/settings',
|
||||
clustersPath: '/path/to/clusters',
|
||||
tagsPath: '/path/to/tags',
|
||||
projectPath: '/path/to/project',
|
||||
logsPath: '/path/to/logs',
|
||||
defaultBranch: 'master',
|
||||
metricsEndpoint: mockApiEndpoint,
|
||||
deploymentsEndpoint: null,
|
||||
emptyGettingStartedSvgPath: '/path/to/getting-started.svg',
|
||||
emptyLoadingSvgPath: '/path/to/loading.svg',
|
||||
emptyNoDataSvgPath: '/path/to/no-data.svg',
|
||||
emptyNoDataSmallSvgPath: '/path/to/no-data-small.svg',
|
||||
emptyUnableToConnectSvgPath: '/path/to/unable-to-connect.svg',
|
||||
currentEnvironmentName: 'production',
|
||||
customMetricsAvailable: false,
|
||||
customMetricsPath: '',
|
||||
validateQueryPath: '',
|
||||
};
|
||||
|
||||
export const setupComponentStore = wrapper => {
|
||||
wrapper.vm.$store.commit(
|
||||
`monitoringDashboard/${types.RECEIVE_METRICS_DASHBOARD_SUCCESS}`,
|
||||
metricsDashboardPayload,
|
||||
);
|
||||
|
||||
// Load 3 panels to the dashboard, one with an empty result
|
||||
wrapper.vm.$store.commit(
|
||||
`monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
|
||||
mockedEmptyResult,
|
||||
);
|
||||
wrapper.vm.$store.commit(
|
||||
`monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
|
||||
mockedQueryResultPayload,
|
||||
);
|
||||
wrapper.vm.$store.commit(
|
||||
`monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
|
||||
mockedQueryResultPayloadCoresTotal,
|
||||
);
|
||||
|
||||
wrapper.vm.$store.commit(
|
||||
`monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`,
|
||||
environmentData,
|
||||
);
|
||||
};
|
|
@ -1,13 +1,47 @@
|
|||
import { mapToDashboardViewModel } from '~/monitoring/stores/utils';
|
||||
|
||||
// This import path needs to be relative for now because this mock data is used in
|
||||
// Karma specs too, where the helpers/test_constants alias can not be resolved
|
||||
import { TEST_HOST } from '../helpers/test_constants';
|
||||
|
||||
export const mockHost = 'http://test.host';
|
||||
export const mockProjectDir = '/frontend-fixtures/environments-project';
|
||||
export const mockApiEndpoint = `${TEST_HOST}/monitoring/mock`;
|
||||
|
||||
export const propsData = {
|
||||
hasMetrics: false,
|
||||
documentationPath: '/path/to/docs',
|
||||
settingsPath: '/path/to/settings',
|
||||
clustersPath: '/path/to/clusters',
|
||||
tagsPath: '/path/to/tags',
|
||||
projectPath: '/path/to/project',
|
||||
logsPath: '/path/to/logs',
|
||||
defaultBranch: 'master',
|
||||
metricsEndpoint: mockApiEndpoint,
|
||||
deploymentsEndpoint: null,
|
||||
emptyGettingStartedSvgPath: '/path/to/getting-started.svg',
|
||||
emptyLoadingSvgPath: '/path/to/loading.svg',
|
||||
emptyNoDataSvgPath: '/path/to/no-data.svg',
|
||||
emptyNoDataSmallSvgPath: '/path/to/no-data-small.svg',
|
||||
emptyUnableToConnectSvgPath: '/path/to/unable-to-connect.svg',
|
||||
currentEnvironmentName: 'production',
|
||||
customMetricsAvailable: false,
|
||||
customMetricsPath: '',
|
||||
validateQueryPath: '',
|
||||
};
|
||||
|
||||
const customDashboardsData = new Array(30).fill(null).map((_, idx) => ({
|
||||
default: false,
|
||||
display_name: `Custom Dashboard ${idx}`,
|
||||
can_edit: true,
|
||||
system_dashboard: false,
|
||||
project_blob_path: `${mockProjectDir}/blob/master/dashboards/.gitlab/dashboards/dashboard_${idx}.yml`,
|
||||
path: `.gitlab/dashboards/dashboard_${idx}.yml`,
|
||||
}));
|
||||
|
||||
export const mockDashboardsErrorResponse = {
|
||||
all_dashboards: customDashboardsData,
|
||||
message: "Each 'panel_group' must define an array :panels",
|
||||
status: 'error',
|
||||
};
|
||||
|
||||
export const anomalyDeploymentData = [
|
||||
{
|
||||
id: 111,
|
||||
|
@ -266,77 +300,6 @@ export const metricsNewGroupsAPIResponse = [
|
|||
},
|
||||
];
|
||||
|
||||
const metricsResult = [
|
||||
{
|
||||
metric: {},
|
||||
values: [
|
||||
[1563272065.589, '10.396484375'],
|
||||
[1563272125.589, '10.333984375'],
|
||||
[1563272185.589, '10.333984375'],
|
||||
[1563272245.589, '10.333984375'],
|
||||
[1563272305.589, '10.333984375'],
|
||||
[1563272365.589, '10.333984375'],
|
||||
[1563272425.589, '10.38671875'],
|
||||
[1563272485.589, '10.333984375'],
|
||||
[1563272545.589, '10.333984375'],
|
||||
[1563272605.589, '10.333984375'],
|
||||
[1563272665.589, '10.333984375'],
|
||||
[1563272725.589, '10.333984375'],
|
||||
[1563272785.589, '10.396484375'],
|
||||
[1563272845.589, '10.333984375'],
|
||||
[1563272905.589, '10.333984375'],
|
||||
[1563272965.589, '10.3984375'],
|
||||
[1563273025.589, '10.337890625'],
|
||||
[1563273085.589, '10.34765625'],
|
||||
[1563273145.589, '10.337890625'],
|
||||
[1563273205.589, '10.337890625'],
|
||||
[1563273265.589, '10.337890625'],
|
||||
[1563273325.589, '10.337890625'],
|
||||
[1563273385.589, '10.337890625'],
|
||||
[1563273445.589, '10.337890625'],
|
||||
[1563273505.589, '10.337890625'],
|
||||
[1563273565.589, '10.337890625'],
|
||||
[1563273625.589, '10.337890625'],
|
||||
[1563273685.589, '10.337890625'],
|
||||
[1563273745.589, '10.337890625'],
|
||||
[1563273805.589, '10.337890625'],
|
||||
[1563273865.589, '10.390625'],
|
||||
[1563273925.589, '10.390625'],
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
export const mockedEmptyResult = {
|
||||
metricId: '1_response_metrics_nginx_ingress_throughput_status_code',
|
||||
result: [],
|
||||
};
|
||||
|
||||
export const mockedEmptyThroughputResult = {
|
||||
metricId: 'NO_DB_response_metrics_nginx_ingress_16_throughput_status_code',
|
||||
result: [],
|
||||
};
|
||||
|
||||
export const mockedQueryResultPayload = {
|
||||
metricId: '12_system_metrics_kubernetes_container_memory_total',
|
||||
result: metricsResult,
|
||||
};
|
||||
|
||||
export const mockedQueryResultPayloadCoresTotal = {
|
||||
metricId: '13_system_metrics_kubernetes_container_cores_total',
|
||||
result: metricsResult,
|
||||
};
|
||||
|
||||
export const mockedQueryResultFixture = {
|
||||
// First metric in fixture `metrics_dashboard/environment_metrics_dashboard.json`
|
||||
metricId: 'NO_DB_response_metrics_nginx_ingress_throughput_status_code',
|
||||
result: metricsResult,
|
||||
};
|
||||
|
||||
export const mockedQueryResultFixtureStatusCode = {
|
||||
metricId: 'NO_DB_response_metrics_nginx_ingress_latency_pod_average',
|
||||
result: metricsResult,
|
||||
};
|
||||
|
||||
const extraEnvironmentData = new Array(15).fill(null).map((_, idx) => ({
|
||||
id: `gid://gitlab/Environments/${150 + idx}`,
|
||||
name: `no-deployment/noop-branch-${idx}`,
|
||||
|
@ -384,158 +347,6 @@ export const environmentData = [
|
|||
},
|
||||
].concat(extraEnvironmentData);
|
||||
|
||||
export const metricsDashboardPayload = {
|
||||
dashboard: 'Environment metrics',
|
||||
priority: 1,
|
||||
panel_groups: [
|
||||
{
|
||||
group: 'System metrics (Kubernetes)',
|
||||
priority: 5,
|
||||
panels: [
|
||||
{
|
||||
title: 'Memory Usage (Total)',
|
||||
type: 'area-chart',
|
||||
y_label: 'Total Memory Used',
|
||||
weight: 4,
|
||||
y_axis: {
|
||||
format: 'megabytes',
|
||||
},
|
||||
metrics: [
|
||||
{
|
||||
id: 'system_metrics_kubernetes_container_memory_total',
|
||||
query_range:
|
||||
'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) /1000/1000',
|
||||
label: 'Total',
|
||||
unit: 'MB',
|
||||
metric_id: 12,
|
||||
prometheus_endpoint_path: 'http://test',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'Core Usage (Total)',
|
||||
type: 'area-chart',
|
||||
y_label: 'Total Cores',
|
||||
weight: 3,
|
||||
metrics: [
|
||||
{
|
||||
id: 'system_metrics_kubernetes_container_cores_total',
|
||||
query_range:
|
||||
'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job)',
|
||||
label: 'Total',
|
||||
unit: 'cores',
|
||||
metric_id: 13,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'Memory Usage (Pod average)',
|
||||
type: 'line-chart',
|
||||
y_label: 'Memory Used per Pod',
|
||||
weight: 2,
|
||||
metrics: [
|
||||
{
|
||||
id: 'system_metrics_kubernetes_container_memory_average',
|
||||
query_range:
|
||||
'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024',
|
||||
label: 'Pod average',
|
||||
unit: 'MB',
|
||||
metric_id: 14,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'memories',
|
||||
type: 'area-chart',
|
||||
y_label: 'memories',
|
||||
metrics: [
|
||||
{
|
||||
id: 'metric_of_ages_1000',
|
||||
label: 'memory_1000',
|
||||
unit: 'count',
|
||||
prometheus_endpoint_path: '/root',
|
||||
metric_id: 20,
|
||||
},
|
||||
{
|
||||
id: 'metric_of_ages_1001',
|
||||
label: 'memory_1000',
|
||||
unit: 'count',
|
||||
prometheus_endpoint_path: '/root',
|
||||
metric_id: 21,
|
||||
},
|
||||
{
|
||||
id: 'metric_of_ages_1002',
|
||||
label: 'memory_1000',
|
||||
unit: 'count',
|
||||
prometheus_endpoint_path: '/root',
|
||||
metric_id: 22,
|
||||
},
|
||||
{
|
||||
id: 'metric_of_ages_1003',
|
||||
label: 'memory_1000',
|
||||
unit: 'count',
|
||||
prometheus_endpoint_path: '/root',
|
||||
metric_id: 23,
|
||||
},
|
||||
{
|
||||
id: 'metric_of_ages_1004',
|
||||
label: 'memory_1004',
|
||||
unit: 'count',
|
||||
prometheus_endpoint_path: '/root',
|
||||
metric_id: 24,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
group: 'Response metrics (NGINX Ingress VTS)',
|
||||
priority: 10,
|
||||
panels: [
|
||||
{
|
||||
metrics: [
|
||||
{
|
||||
id: 'response_metrics_nginx_ingress_throughput_status_code',
|
||||
label: 'Status Code',
|
||||
metric_id: 1,
|
||||
prometheus_endpoint_path:
|
||||
'/root/autodevops-deploy/environments/32/prometheus/api/v1/query_range?query=sum%28rate%28nginx_upstream_responses_total%7Bupstream%3D~%22%25%7Bkube_namespace%7D-%25%7Bci_environment_slug%7D-.%2A%22%7D%5B2m%5D%29%29+by+%28status_code%29',
|
||||
query_range:
|
||||
'sum(rate(nginx_upstream_responses_total{upstream=~"%{kube_namespace}-%{ci_environment_slug}-.*"}[2m])) by (status_code)',
|
||||
unit: 'req / sec',
|
||||
},
|
||||
],
|
||||
title: 'Throughput',
|
||||
type: 'area-chart',
|
||||
weight: 1,
|
||||
y_label: 'Requests / Sec',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
/**
|
||||
* Mock of response of metrics_dashboard.json
|
||||
*/
|
||||
export const metricsDashboardResponse = {
|
||||
all_dashboards: [],
|
||||
dashboard: metricsDashboardPayload,
|
||||
metrics_data: {},
|
||||
status: 'success',
|
||||
};
|
||||
|
||||
export const metricsDashboardViewModel = mapToDashboardViewModel(metricsDashboardPayload);
|
||||
|
||||
const customDashboardsData = new Array(30).fill(null).map((_, idx) => ({
|
||||
default: false,
|
||||
display_name: `Custom Dashboard ${idx}`,
|
||||
can_edit: true,
|
||||
system_dashboard: false,
|
||||
project_blob_path: `${mockProjectDir}/blob/master/dashboards/.gitlab/dashboards/dashboard_${idx}.yml`,
|
||||
path: `.gitlab/dashboards/dashboard_${idx}.yml`,
|
||||
}));
|
||||
|
||||
export const dashboardGitResponse = [
|
||||
{
|
||||
default: true,
|
||||
|
@ -548,11 +359,47 @@ export const dashboardGitResponse = [
|
|||
...customDashboardsData,
|
||||
];
|
||||
|
||||
export const mockDashboardsErrorResponse = {
|
||||
all_dashboards: customDashboardsData,
|
||||
message: "Each 'panel_group' must define an array :panels",
|
||||
status: 'error',
|
||||
};
|
||||
// Metrics mocks
|
||||
|
||||
export const metricsResult = [
|
||||
{
|
||||
metric: {},
|
||||
values: [
|
||||
[1563272065.589, '10.396484375'],
|
||||
[1563272125.589, '10.333984375'],
|
||||
[1563272185.589, '10.333984375'],
|
||||
[1563272245.589, '10.333984375'],
|
||||
[1563272305.589, '10.333984375'],
|
||||
[1563272365.589, '10.333984375'],
|
||||
[1563272425.589, '10.38671875'],
|
||||
[1563272485.589, '10.333984375'],
|
||||
[1563272545.589, '10.333984375'],
|
||||
[1563272605.589, '10.333984375'],
|
||||
[1563272665.589, '10.333984375'],
|
||||
[1563272725.589, '10.333984375'],
|
||||
[1563272785.589, '10.396484375'],
|
||||
[1563272845.589, '10.333984375'],
|
||||
[1563272905.589, '10.333984375'],
|
||||
[1563272965.589, '10.3984375'],
|
||||
[1563273025.589, '10.337890625'],
|
||||
[1563273085.589, '10.34765625'],
|
||||
[1563273145.589, '10.337890625'],
|
||||
[1563273205.589, '10.337890625'],
|
||||
[1563273265.589, '10.337890625'],
|
||||
[1563273325.589, '10.337890625'],
|
||||
[1563273385.589, '10.337890625'],
|
||||
[1563273445.589, '10.337890625'],
|
||||
[1563273505.589, '10.337890625'],
|
||||
[1563273565.589, '10.337890625'],
|
||||
[1563273625.589, '10.337890625'],
|
||||
[1563273685.589, '10.337890625'],
|
||||
[1563273745.589, '10.337890625'],
|
||||
[1563273805.589, '10.337890625'],
|
||||
[1563273865.589, '10.390625'],
|
||||
[1563273925.589, '10.390625'],
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
export const graphDataPrometheusQuery = {
|
||||
title: 'Super Chart A2',
|
||||
|
|
|
@ -31,11 +31,14 @@ import {
|
|||
deploymentData,
|
||||
environmentData,
|
||||
annotationsData,
|
||||
metricsDashboardResponse,
|
||||
metricsDashboardViewModel,
|
||||
dashboardGitResponse,
|
||||
mockDashboardsErrorResponse,
|
||||
} from '../mock_data';
|
||||
import {
|
||||
metricsDashboardResponse,
|
||||
metricsDashboardViewModel,
|
||||
metricsDashboardPanelCount,
|
||||
} from '../fixture_data';
|
||||
|
||||
jest.mock('~/flash');
|
||||
|
||||
|
@ -257,10 +260,7 @@ describe('Monitoring store actions', () => {
|
|||
null,
|
||||
state,
|
||||
[],
|
||||
[
|
||||
{ type: 'requestAnnotations' },
|
||||
{ type: 'receiveAnnotationsSuccess', payload: annotationsData },
|
||||
],
|
||||
[{ type: 'receiveAnnotationsSuccess', payload: annotationsData }],
|
||||
() => {
|
||||
expect(mockMutate).toHaveBeenCalledWith(mutationVariables);
|
||||
},
|
||||
|
@ -285,7 +285,7 @@ describe('Monitoring store actions', () => {
|
|||
null,
|
||||
state,
|
||||
[],
|
||||
[{ type: 'requestAnnotations' }, { type: 'receiveAnnotationsFailure' }],
|
||||
[{ type: 'receiveAnnotationsFailure' }],
|
||||
() => {
|
||||
expect(mockMutate).toHaveBeenCalledWith(mutationVariables);
|
||||
},
|
||||
|
@ -553,7 +553,7 @@ describe('Monitoring store actions', () => {
|
|||
|
||||
fetchDashboardData({ state, commit, dispatch })
|
||||
.then(() => {
|
||||
expect(dispatch).toHaveBeenCalledTimes(10); // one per metric plus 1 for deployments
|
||||
expect(dispatch).toHaveBeenCalledTimes(metricsDashboardPanelCount + 1); // plus 1 for deployments
|
||||
expect(dispatch).toHaveBeenCalledWith('fetchDeploymentsData');
|
||||
expect(dispatch).toHaveBeenCalledWith('fetchPrometheusMetric', {
|
||||
metric,
|
||||
|
@ -581,11 +581,13 @@ describe('Monitoring store actions', () => {
|
|||
let metric;
|
||||
let state;
|
||||
let data;
|
||||
let prometheusEndpointPath;
|
||||
|
||||
beforeEach(() => {
|
||||
state = storeState();
|
||||
[metric] = metricsDashboardResponse.dashboard.panel_groups[0].panels[0].metrics;
|
||||
metric = convertObjectPropsToCamelCase(metric, { deep: true });
|
||||
[metric] = metricsDashboardViewModel.panelGroups[0].panels[0].metrics;
|
||||
|
||||
prometheusEndpointPath = metric.prometheusEndpointPath;
|
||||
|
||||
data = {
|
||||
metricId: metric.metricId,
|
||||
|
@ -594,7 +596,7 @@ describe('Monitoring store actions', () => {
|
|||
});
|
||||
|
||||
it('commits result', done => {
|
||||
mock.onGet('http://test').reply(200, { data }); // One attempt
|
||||
mock.onGet(prometheusEndpointPath).reply(200, { data }); // One attempt
|
||||
|
||||
testAction(
|
||||
fetchPrometheusMetric,
|
||||
|
@ -631,7 +633,7 @@ describe('Monitoring store actions', () => {
|
|||
};
|
||||
|
||||
it('uses calculated step', done => {
|
||||
mock.onGet('http://test').reply(200, { data }); // One attempt
|
||||
mock.onGet(prometheusEndpointPath).reply(200, { data }); // One attempt
|
||||
|
||||
testAction(
|
||||
fetchPrometheusMetric,
|
||||
|
@ -673,7 +675,7 @@ describe('Monitoring store actions', () => {
|
|||
};
|
||||
|
||||
it('uses metric step', done => {
|
||||
mock.onGet('http://test').reply(200, { data }); // One attempt
|
||||
mock.onGet(prometheusEndpointPath).reply(200, { data }); // One attempt
|
||||
|
||||
testAction(
|
||||
fetchPrometheusMetric,
|
||||
|
@ -705,10 +707,10 @@ describe('Monitoring store actions', () => {
|
|||
|
||||
it('commits result, when waiting for results', done => {
|
||||
// Mock multiple attempts while the cache is filling up
|
||||
mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
|
||||
mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
|
||||
mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
|
||||
mock.onGet('http://test').reply(200, { data }); // 4th attempt
|
||||
mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
|
||||
mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
|
||||
mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
|
||||
mock.onGet(prometheusEndpointPath).reply(200, { data }); // 4th attempt
|
||||
|
||||
testAction(
|
||||
fetchPrometheusMetric,
|
||||
|
@ -739,10 +741,10 @@ describe('Monitoring store actions', () => {
|
|||
|
||||
it('commits failure, when waiting for results and getting a server error', done => {
|
||||
// Mock multiple attempts while the cache is filling up and fails
|
||||
mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
|
||||
mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
|
||||
mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
|
||||
mock.onGet('http://test').reply(500); // 4th attempt
|
||||
mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
|
||||
mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
|
||||
mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
|
||||
mock.onGet(prometheusEndpointPath).reply(500); // 4th attempt
|
||||
|
||||
const error = new Error('Request failed with status code 500');
|
||||
|
||||
|
|
|
@ -3,18 +3,13 @@ import * as getters from '~/monitoring/stores/getters';
|
|||
import mutations from '~/monitoring/stores/mutations';
|
||||
import * as types from '~/monitoring/stores/mutation_types';
|
||||
import { metricStates } from '~/monitoring/constants';
|
||||
import { environmentData, metricsResult } from '../mock_data';
|
||||
import {
|
||||
environmentData,
|
||||
mockedEmptyThroughputResult,
|
||||
mockedQueryResultFixture,
|
||||
mockedQueryResultFixtureStatusCode,
|
||||
} from '../mock_data';
|
||||
import { getJSONFixture } from '../../helpers/fixtures';
|
||||
|
||||
const metricsDashboardFixture = getJSONFixture(
|
||||
'metrics_dashboard/environment_metrics_dashboard.json',
|
||||
);
|
||||
const metricsDashboardPayload = metricsDashboardFixture.dashboard;
|
||||
metricsDashboardPayload,
|
||||
metricResultStatus,
|
||||
metricResultPods,
|
||||
metricResultEmpty,
|
||||
} from '../fixture_data';
|
||||
|
||||
describe('Monitoring store Getters', () => {
|
||||
describe('getMetricStates', () => {
|
||||
|
@ -22,6 +17,21 @@ describe('Monitoring store Getters', () => {
|
|||
let state;
|
||||
let getMetricStates;
|
||||
|
||||
const setMetricSuccess = ({ result = metricsResult, group = 0, panel = 0, metric = 0 }) => {
|
||||
const { metricId } = state.dashboard.panelGroups[group].panels[panel].metrics[metric];
|
||||
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, {
|
||||
metricId,
|
||||
result,
|
||||
});
|
||||
};
|
||||
|
||||
const setMetricFailure = ({ group = 0, panel = 0, metric = 0 }) => {
|
||||
const { metricId } = state.dashboard.panelGroups[group].panels[panel].metrics[metric];
|
||||
mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
|
||||
metricId,
|
||||
});
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
setupState = (initState = {}) => {
|
||||
state = initState;
|
||||
|
@ -61,31 +71,30 @@ describe('Monitoring store Getters', () => {
|
|||
|
||||
it('on an empty metric with no result, returns NO_DATA', () => {
|
||||
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
|
||||
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedEmptyThroughputResult);
|
||||
setMetricSuccess({ result: [], group: 2 });
|
||||
|
||||
expect(getMetricStates()).toEqual([metricStates.NO_DATA]);
|
||||
});
|
||||
|
||||
it('on a metric with a result, returns OK', () => {
|
||||
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
|
||||
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
|
||||
setMetricSuccess({ group: 1 });
|
||||
|
||||
expect(getMetricStates()).toEqual([metricStates.OK]);
|
||||
});
|
||||
|
||||
it('on a metric with an error, returns an error', () => {
|
||||
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
|
||||
mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
|
||||
metricId: groups[0].panels[0].metrics[0].metricId,
|
||||
});
|
||||
setMetricFailure({});
|
||||
|
||||
expect(getMetricStates()).toEqual([metricStates.UNKNOWN_ERROR]);
|
||||
});
|
||||
|
||||
it('on multiple metrics with results, returns OK', () => {
|
||||
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
|
||||
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
|
||||
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixtureStatusCode);
|
||||
|
||||
setMetricSuccess({ group: 1 });
|
||||
setMetricSuccess({ group: 1, panel: 1 });
|
||||
|
||||
expect(getMetricStates()).toEqual([metricStates.OK]);
|
||||
|
||||
|
@ -96,15 +105,8 @@ describe('Monitoring store Getters', () => {
|
|||
it('on multiple metrics errors', () => {
|
||||
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
|
||||
|
||||
mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
|
||||
metricId: groups[0].panels[0].metrics[0].metricId,
|
||||
});
|
||||
mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
|
||||
metricId: groups[0].panels[0].metrics[0].metricId,
|
||||
});
|
||||
mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
|
||||
metricId: groups[1].panels[0].metrics[0].metricId,
|
||||
});
|
||||
setMetricFailure({});
|
||||
setMetricFailure({ group: 1 });
|
||||
|
||||
// Entire dashboard fails
|
||||
expect(getMetricStates()).toEqual([metricStates.UNKNOWN_ERROR]);
|
||||
|
@ -116,14 +118,11 @@ describe('Monitoring store Getters', () => {
|
|||
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
|
||||
|
||||
// An success in 1 group
|
||||
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
|
||||
setMetricSuccess({ group: 1 });
|
||||
|
||||
// An error in 2 groups
|
||||
mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
|
||||
metricId: groups[1].panels[1].metrics[0].metricId,
|
||||
});
|
||||
mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
|
||||
metricId: groups[2].panels[0].metrics[0].metricId,
|
||||
});
|
||||
setMetricFailure({ group: 1, panel: 1 });
|
||||
setMetricFailure({ group: 2, panel: 0 });
|
||||
|
||||
expect(getMetricStates()).toEqual([metricStates.OK, metricStates.UNKNOWN_ERROR]);
|
||||
expect(getMetricStates(groups[1].key)).toEqual([
|
||||
|
@ -182,38 +181,35 @@ describe('Monitoring store Getters', () => {
|
|||
|
||||
it('an empty metric, returns empty', () => {
|
||||
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
|
||||
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedEmptyThroughputResult);
|
||||
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultEmpty);
|
||||
|
||||
expect(metricsWithData()).toEqual([]);
|
||||
});
|
||||
|
||||
it('a metric with results, it returns a metric', () => {
|
||||
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
|
||||
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
|
||||
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultStatus);
|
||||
|
||||
expect(metricsWithData()).toEqual([mockedQueryResultFixture.metricId]);
|
||||
expect(metricsWithData()).toEqual([metricResultStatus.metricId]);
|
||||
});
|
||||
|
||||
it('multiple metrics with results, it return multiple metrics', () => {
|
||||
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
|
||||
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
|
||||
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixtureStatusCode);
|
||||
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultStatus);
|
||||
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultPods);
|
||||
|
||||
expect(metricsWithData()).toEqual([
|
||||
mockedQueryResultFixture.metricId,
|
||||
mockedQueryResultFixtureStatusCode.metricId,
|
||||
]);
|
||||
expect(metricsWithData()).toEqual([metricResultStatus.metricId, metricResultPods.metricId]);
|
||||
});
|
||||
|
||||
it('multiple metrics with results, it returns metrics filtered by group', () => {
|
||||
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
|
||||
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
|
||||
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixtureStatusCode);
|
||||
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultStatus);
|
||||
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultPods);
|
||||
|
||||
// First group has metrics
|
||||
expect(metricsWithData(state.dashboard.panelGroups[1].key)).toEqual([
|
||||
mockedQueryResultFixture.metricId,
|
||||
mockedQueryResultFixtureStatusCode.metricId,
|
||||
metricResultStatus.metricId,
|
||||
metricResultPods.metricId,
|
||||
]);
|
||||
|
||||
// Second group has no metrics
|
||||
|
|
|
@ -6,12 +6,7 @@ import state from '~/monitoring/stores/state';
|
|||
import { metricStates } from '~/monitoring/constants';
|
||||
|
||||
import { deploymentData, dashboardGitResponse } from '../mock_data';
|
||||
import { getJSONFixture } from '../../helpers/fixtures';
|
||||
|
||||
const metricsDashboardFixture = getJSONFixture(
|
||||
'metrics_dashboard/environment_metrics_dashboard.json',
|
||||
);
|
||||
const metricsDashboardPayload = metricsDashboardFixture.dashboard;
|
||||
import { metricsDashboardPayload } from '../fixture_data';
|
||||
|
||||
describe('Monitoring mutations', () => {
|
||||
let stateCopy;
|
||||
|
|
34
spec/frontend/monitoring/store_utils.js
Normal file
34
spec/frontend/monitoring/store_utils.js
Normal file
|
@ -0,0 +1,34 @@
|
|||
import * as types from '~/monitoring/stores/mutation_types';
|
||||
import { metricsResult, environmentData } from './mock_data';
|
||||
import { metricsDashboardPayload } from './fixture_data';
|
||||
|
||||
export const setMetricResult = ({ $store, result, group = 0, panel = 0, metric = 0 }) => {
|
||||
const { dashboard } = $store.state.monitoringDashboard;
|
||||
const { metricId } = dashboard.panelGroups[group].panels[panel].metrics[metric];
|
||||
|
||||
$store.commit(`monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`, {
|
||||
metricId,
|
||||
result,
|
||||
});
|
||||
};
|
||||
|
||||
const setEnvironmentData = $store => {
|
||||
$store.commit(`monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`, environmentData);
|
||||
};
|
||||
|
||||
export const setupStoreWithDashboard = $store => {
|
||||
$store.commit(
|
||||
`monitoringDashboard/${types.RECEIVE_METRICS_DASHBOARD_SUCCESS}`,
|
||||
metricsDashboardPayload,
|
||||
);
|
||||
};
|
||||
|
||||
export const setupStoreWithData = $store => {
|
||||
setupStoreWithDashboard($store);
|
||||
|
||||
setMetricResult({ $store, result: [], panel: 0 });
|
||||
setMetricResult({ $store, result: metricsResult, panel: 1 });
|
||||
setMetricResult({ $store, result: metricsResult, panel: 2 });
|
||||
|
||||
setEnvironmentData($store);
|
||||
};
|
|
@ -1,7 +1,7 @@
|
|||
import * as monitoringUtils from '~/monitoring/utils';
|
||||
import { queryToObject, mergeUrlParams, removeParams } from '~/lib/utils/url_utility';
|
||||
import { TEST_HOST } from 'jest/helpers/test_constants';
|
||||
import {
|
||||
mockHost,
|
||||
mockProjectDir,
|
||||
graphDataPrometheusQuery,
|
||||
graphDataPrometheusQueryRange,
|
||||
|
@ -11,7 +11,7 @@ import {
|
|||
|
||||
jest.mock('~/lib/utils/url_utility');
|
||||
|
||||
const mockPath = `${mockHost}${mockProjectDir}/-/environments/29/metrics`;
|
||||
const mockPath = `${TEST_HOST}${mockProjectDir}/-/environments/29/metrics`;
|
||||
|
||||
const generatedLink = 'http://chart.link.com';
|
||||
|
||||
|
|
74
spec/frontend/sidebar/sidebar_assignees_spec.js
Normal file
74
spec/frontend/sidebar/sidebar_assignees_spec.js
Normal file
|
@ -0,0 +1,74 @@
|
|||
import { shallowMount } from '@vue/test-utils';
|
||||
import AxiosMockAdapter from 'axios-mock-adapter';
|
||||
import axios from 'axios';
|
||||
import SidebarAssignees from '~/sidebar/components/assignees/sidebar_assignees.vue';
|
||||
import Assigness from '~/sidebar/components/assignees/assignees.vue';
|
||||
import SidebarMediator from '~/sidebar/sidebar_mediator';
|
||||
import SidebarService from '~/sidebar/services/sidebar_service';
|
||||
import SidebarStore from '~/sidebar/stores/sidebar_store';
|
||||
import Mock from './mock_data';
|
||||
|
||||
describe('sidebar assignees', () => {
|
||||
let wrapper;
|
||||
let mediator;
|
||||
let axiosMock;
|
||||
|
||||
const createComponent = () => {
|
||||
wrapper = shallowMount(SidebarAssignees, {
|
||||
propsData: {
|
||||
mediator,
|
||||
field: '',
|
||||
},
|
||||
// Attaching to document is required because this component emits something from the parent element :/
|
||||
attachToDocument: true,
|
||||
});
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
axiosMock = new AxiosMockAdapter(axios);
|
||||
mediator = new SidebarMediator(Mock.mediator);
|
||||
|
||||
jest.spyOn(mediator, 'saveAssignees');
|
||||
jest.spyOn(mediator, 'assignYourself');
|
||||
|
||||
createComponent();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
wrapper.destroy();
|
||||
wrapper = null;
|
||||
|
||||
SidebarService.singleton = null;
|
||||
SidebarStore.singleton = null;
|
||||
SidebarMediator.singleton = null;
|
||||
axiosMock.restore();
|
||||
});
|
||||
|
||||
it('calls the mediator when saves the assignees', () => {
|
||||
expect(mediator.saveAssignees).not.toHaveBeenCalled();
|
||||
|
||||
wrapper.vm.saveAssignees();
|
||||
|
||||
expect(mediator.saveAssignees).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('calls the mediator when "assignSelf" method is called', () => {
|
||||
expect(mediator.assignYourself).not.toHaveBeenCalled();
|
||||
expect(mediator.store.assignees.length).toBe(0);
|
||||
|
||||
wrapper.vm.assignSelf();
|
||||
|
||||
expect(mediator.assignYourself).toHaveBeenCalled();
|
||||
expect(mediator.store.assignees.length).toBe(1);
|
||||
});
|
||||
|
||||
it('hides assignees until fetched', () => {
|
||||
expect(wrapper.find(Assigness).exists()).toBe(false);
|
||||
|
||||
wrapper.vm.store.isFetching.assignees = false;
|
||||
|
||||
return wrapper.vm.$nextTick(() => {
|
||||
expect(wrapper.find(Assigness).exists()).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -2,66 +2,13 @@ import Vue from 'vue';
|
|||
import { createLocalVue } from '@vue/test-utils';
|
||||
import MockAdapter from 'axios-mock-adapter';
|
||||
import Dashboard from '~/monitoring/components/dashboard.vue';
|
||||
import * as types from '~/monitoring/stores/mutation_types';
|
||||
import { createStore } from '~/monitoring/stores';
|
||||
import axios from '~/lib/utils/axios_utils';
|
||||
import {
|
||||
metricsDashboardPayload,
|
||||
mockedEmptyResult,
|
||||
mockedQueryResultPayload,
|
||||
mockedQueryResultPayloadCoresTotal,
|
||||
mockApiEndpoint,
|
||||
environmentData,
|
||||
} from '../mock_data';
|
||||
import { mockApiEndpoint, propsData } from '../mock_data';
|
||||
import { metricsDashboardPayload } from '../fixture_data';
|
||||
import { setupStoreWithData } from '../store_utils';
|
||||
|
||||
const localVue = createLocalVue();
|
||||
const propsData = {
|
||||
hasMetrics: false,
|
||||
documentationPath: '/path/to/docs',
|
||||
settingsPath: '/path/to/settings',
|
||||
clustersPath: '/path/to/clusters',
|
||||
tagsPath: '/path/to/tags',
|
||||
projectPath: '/path/to/project',
|
||||
defaultBranch: 'master',
|
||||
metricsEndpoint: mockApiEndpoint,
|
||||
deploymentsEndpoint: null,
|
||||
emptyGettingStartedSvgPath: '/path/to/getting-started.svg',
|
||||
emptyLoadingSvgPath: '/path/to/loading.svg',
|
||||
emptyNoDataSvgPath: '/path/to/no-data.svg',
|
||||
emptyNoDataSmallSvgPath: '/path/to/no-data-small.svg',
|
||||
emptyUnableToConnectSvgPath: '/path/to/unable-to-connect.svg',
|
||||
currentEnvironmentName: 'production',
|
||||
customMetricsAvailable: false,
|
||||
customMetricsPath: '',
|
||||
validateQueryPath: '',
|
||||
};
|
||||
|
||||
function setupComponentStore(component) {
|
||||
// Load 2 panel groups
|
||||
component.$store.commit(
|
||||
`monitoringDashboard/${types.RECEIVE_METRICS_DASHBOARD_SUCCESS}`,
|
||||
metricsDashboardPayload,
|
||||
);
|
||||
|
||||
// Load 3 panels to the dashboard, one with an empty result
|
||||
component.$store.commit(
|
||||
`monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
|
||||
mockedEmptyResult,
|
||||
);
|
||||
component.$store.commit(
|
||||
`monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
|
||||
mockedQueryResultPayload,
|
||||
);
|
||||
component.$store.commit(
|
||||
`monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
|
||||
mockedQueryResultPayloadCoresTotal,
|
||||
);
|
||||
|
||||
component.$store.commit(
|
||||
`monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`,
|
||||
environmentData,
|
||||
);
|
||||
}
|
||||
|
||||
describe('Dashboard', () => {
|
||||
let DashboardComponent;
|
||||
|
@ -109,7 +56,7 @@ describe('Dashboard', () => {
|
|||
store,
|
||||
});
|
||||
|
||||
setupComponentStore(component);
|
||||
setupStoreWithData(component.$store);
|
||||
|
||||
return Vue.nextTick().then(() => {
|
||||
[promPanel] = component.$el.querySelectorAll('.prometheus-panel');
|
||||
|
|
1
spec/javascripts/monitoring/fixture_data.js
Normal file
1
spec/javascripts/monitoring/fixture_data.js
Normal file
|
@ -0,0 +1 @@
|
|||
export * from '../../frontend/monitoring/fixture_data';
|
1
spec/javascripts/monitoring/store_utils.js
Normal file
1
spec/javascripts/monitoring/store_utils.js
Normal file
|
@ -0,0 +1 @@
|
|||
export * from '../../frontend/monitoring/store_utils';
|
|
@ -1,64 +0,0 @@
|
|||
import Vue from 'vue';
|
||||
import mountComponent from 'spec/helpers/vue_mount_component_helper';
|
||||
import SidebarAssignees from '~/sidebar/components/assignees/sidebar_assignees.vue';
|
||||
import SidebarMediator from '~/sidebar/sidebar_mediator';
|
||||
import SidebarService from '~/sidebar/services/sidebar_service';
|
||||
import SidebarStore from '~/sidebar/stores/sidebar_store';
|
||||
import Mock from './mock_data';
|
||||
|
||||
describe('sidebar assignees', () => {
|
||||
let vm;
|
||||
let mediator;
|
||||
let sidebarAssigneesEl;
|
||||
preloadFixtures('issues/open-issue.html');
|
||||
|
||||
beforeEach(() => {
|
||||
loadFixtures('issues/open-issue.html');
|
||||
|
||||
mediator = new SidebarMediator(Mock.mediator);
|
||||
spyOn(mediator, 'saveAssignees').and.callThrough();
|
||||
spyOn(mediator, 'assignYourself').and.callThrough();
|
||||
|
||||
const SidebarAssigneeComponent = Vue.extend(SidebarAssignees);
|
||||
sidebarAssigneesEl = document.querySelector('#js-vue-sidebar-assignees');
|
||||
vm = mountComponent(
|
||||
SidebarAssigneeComponent,
|
||||
{
|
||||
mediator,
|
||||
field: sidebarAssigneesEl.dataset.field,
|
||||
},
|
||||
sidebarAssigneesEl,
|
||||
);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
SidebarService.singleton = null;
|
||||
SidebarStore.singleton = null;
|
||||
SidebarMediator.singleton = null;
|
||||
});
|
||||
|
||||
it('calls the mediator when saves the assignees', () => {
|
||||
vm.saveAssignees();
|
||||
|
||||
expect(mediator.saveAssignees).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('calls the mediator when "assignSelf" method is called', () => {
|
||||
vm.assignSelf();
|
||||
|
||||
expect(mediator.assignYourself).toHaveBeenCalled();
|
||||
expect(mediator.store.assignees.length).toEqual(1);
|
||||
});
|
||||
|
||||
it('hides assignees until fetched', done => {
|
||||
const currentAssignee = sidebarAssigneesEl.querySelector('.value');
|
||||
|
||||
expect(currentAssignee).toBe(null);
|
||||
|
||||
vm.store.isFetching.assignees = false;
|
||||
Vue.nextTick(() => {
|
||||
expect(vm.$el.querySelector('.value')).toBeVisible();
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -17,8 +17,6 @@ describe Ci::Bridge do
|
|||
{ trigger: { project: 'my/project', branch: 'master' } }
|
||||
end
|
||||
|
||||
it { is_expected.to include_module(Ci::PipelineDelegator) }
|
||||
|
||||
it 'has many sourced pipelines' do
|
||||
expect(bridge).to have_many(:sourced_pipelines)
|
||||
end
|
||||
|
|
|
@ -37,8 +37,6 @@ describe Ci::Build do
|
|||
it { is_expected.to delegate_method(:merge_request_ref?).to(:pipeline) }
|
||||
it { is_expected.to delegate_method(:legacy_detached_merge_request_pipeline?).to(:pipeline) }
|
||||
|
||||
it { is_expected.to include_module(Ci::PipelineDelegator) }
|
||||
|
||||
describe 'associations' do
|
||||
it 'has a bidirectional relationship with projects' do
|
||||
expect(described_class.reflect_on_association(:project).has_inverse?).to eq(:builds)
|
||||
|
@ -1818,64 +1816,65 @@ describe Ci::Build do
|
|||
end
|
||||
|
||||
describe '#merge_request' do
|
||||
def create_mr(build, pipeline, factory: :merge_request, created_at: Time.now)
|
||||
create(factory, source_project: pipeline.project,
|
||||
target_project: pipeline.project,
|
||||
source_branch: build.ref,
|
||||
created_at: created_at)
|
||||
end
|
||||
subject { pipeline.builds.take.merge_request }
|
||||
|
||||
context 'when a MR has a reference to the pipeline' do
|
||||
before do
|
||||
@merge_request = create_mr(build, pipeline, factory: :merge_request)
|
||||
context 'on a branch pipeline' do
|
||||
let!(:pipeline) { create(:ci_pipeline, :with_job, project: project, ref: 'fix') }
|
||||
|
||||
commits = [double(id: pipeline.sha)]
|
||||
allow(@merge_request).to receive(:commits).and_return(commits)
|
||||
allow(MergeRequest).to receive_message_chain(:includes, :where, :reorder).and_return([@merge_request])
|
||||
context 'with no merge request' do
|
||||
it { is_expected.to be_nil }
|
||||
end
|
||||
|
||||
it 'returns the single associated MR' do
|
||||
expect(build.merge_request.id).to eq(@merge_request.id)
|
||||
context 'with an open merge request from the same ref name' do
|
||||
let!(:merge_request) { create(:merge_request, source_project: project, source_branch: 'fix') }
|
||||
|
||||
# If no diff exists, the pipeline commit was not part of the merge
|
||||
# request and may have simply incidentally used the same ref name.
|
||||
context 'without a merge request diff containing the pipeline commit' do
|
||||
it { is_expected.to be_nil }
|
||||
end
|
||||
|
||||
# If the merge request was truly opened from the branch that the
|
||||
# pipeline ran on, that head sha will be present in a diff.
|
||||
context 'with a merge request diff containing the pipeline commit' do
|
||||
let!(:mr_diff) { create(:merge_request_diff, merge_request: merge_request) }
|
||||
let!(:mr_diff_commit) { create(:merge_request_diff_commit, sha: build.sha, merge_request_diff: mr_diff) }
|
||||
|
||||
it { is_expected.to eq(merge_request) }
|
||||
end
|
||||
end
|
||||
|
||||
context 'with multiple open merge requests' do
|
||||
let!(:merge_request) { create(:merge_request, source_project: project, source_branch: 'fix') }
|
||||
let!(:mr_diff) { create(:merge_request_diff, merge_request: merge_request) }
|
||||
let!(:mr_diff_commit) { create(:merge_request_diff_commit, sha: build.sha, merge_request_diff: mr_diff) }
|
||||
|
||||
let!(:new_merge_request) { create(:merge_request, source_project: project, source_branch: 'fix', target_branch: 'staging') }
|
||||
let!(:new_mr_diff) { create(:merge_request_diff, merge_request: new_merge_request) }
|
||||
let!(:new_mr_diff_commit) { create(:merge_request_diff_commit, sha: build.sha, merge_request_diff: new_mr_diff) }
|
||||
|
||||
it 'returns the first merge request' do
|
||||
expect(subject).to eq(merge_request)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there is not a MR referencing the pipeline' do
|
||||
it 'returns nil' do
|
||||
expect(build.merge_request).to be_nil
|
||||
end
|
||||
context 'on a detached merged request pipeline' do
|
||||
let(:pipeline) { create(:ci_pipeline, :detached_merge_request_pipeline, :with_job) }
|
||||
|
||||
it { is_expected.to eq(pipeline.merge_request) }
|
||||
end
|
||||
|
||||
context 'when more than one MR have a reference to the pipeline' do
|
||||
before do
|
||||
@merge_request = create_mr(build, pipeline, factory: :merge_request)
|
||||
@merge_request.close!
|
||||
@merge_request2 = create_mr(build, pipeline, factory: :merge_request)
|
||||
context 'on a legacy detached merged request pipeline' do
|
||||
let(:pipeline) { create(:ci_pipeline, :legacy_detached_merge_request_pipeline, :with_job) }
|
||||
|
||||
commits = [double(id: pipeline.sha)]
|
||||
allow(@merge_request).to receive(:commits).and_return(commits)
|
||||
allow(@merge_request2).to receive(:commits).and_return(commits)
|
||||
allow(MergeRequest).to receive_message_chain(:includes, :where, :reorder).and_return([@merge_request, @merge_request2])
|
||||
end
|
||||
|
||||
it 'returns the first MR' do
|
||||
expect(build.merge_request.id).to eq(@merge_request.id)
|
||||
end
|
||||
it { is_expected.to eq(pipeline.merge_request) }
|
||||
end
|
||||
|
||||
context 'when a Build is created after the MR' do
|
||||
before do
|
||||
@merge_request = create_mr(build, pipeline, factory: :merge_request_with_diffs)
|
||||
pipeline2 = create(:ci_pipeline, project: project)
|
||||
@build2 = create(:ci_build, pipeline: pipeline2)
|
||||
context 'on a pipeline for merged results' do
|
||||
let(:pipeline) { create(:ci_pipeline, :merged_result_pipeline, :with_job) }
|
||||
|
||||
allow(@merge_request).to receive(:commit_shas)
|
||||
.and_return([pipeline.sha, pipeline2.sha])
|
||||
allow(MergeRequest).to receive_message_chain(:includes, :where, :reorder).and_return([@merge_request])
|
||||
end
|
||||
|
||||
it 'returns the current MR' do
|
||||
expect(@build2.merge_request.id).to eq(@merge_request.id)
|
||||
end
|
||||
it { is_expected.to eq(pipeline.merge_request) }
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -6,6 +6,18 @@ describe Ci::Processable do
|
|||
let_it_be(:project) { create(:project) }
|
||||
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
|
||||
|
||||
let_it_be(:detached_merge_request_pipeline) do
|
||||
create(:ci_pipeline, :detached_merge_request_pipeline, :with_job, project: project)
|
||||
end
|
||||
|
||||
let_it_be(:legacy_detached_merge_request_pipeline) do
|
||||
create(:ci_pipeline, :legacy_detached_merge_request_pipeline, :with_job, project: project)
|
||||
end
|
||||
|
||||
let_it_be(:merged_result_pipeline) do
|
||||
create(:ci_pipeline, :merged_result_pipeline, :with_job, project: project)
|
||||
end
|
||||
|
||||
describe '#aggregated_needs_names' do
|
||||
let(:with_aggregated_needs) { pipeline.processables.select_with_aggregated_needs(project) }
|
||||
|
||||
|
@ -155,4 +167,70 @@ describe Ci::Processable do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#merge_request?' do
|
||||
subject { pipeline.processables.first.merge_request? }
|
||||
|
||||
context 'in a detached merge request pipeline' do
|
||||
let(:pipeline) { detached_merge_request_pipeline }
|
||||
|
||||
it { is_expected.to eq(pipeline.merge_request?) }
|
||||
end
|
||||
|
||||
context 'in a legacy detached merge_request_pipeline' do
|
||||
let(:pipeline) { legacy_detached_merge_request_pipeline }
|
||||
|
||||
it { is_expected.to eq(pipeline.merge_request?) }
|
||||
end
|
||||
|
||||
context 'in a pipeline for merged results' do
|
||||
let(:pipeline) { merged_result_pipeline }
|
||||
|
||||
it { is_expected.to eq(pipeline.merge_request?) }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#merge_request_ref?' do
|
||||
subject { pipeline.processables.first.merge_request_ref? }
|
||||
|
||||
context 'in a detached merge request pipeline' do
|
||||
let(:pipeline) { detached_merge_request_pipeline }
|
||||
|
||||
it { is_expected.to eq(pipeline.merge_request_ref?) }
|
||||
end
|
||||
|
||||
context 'in a legacy detached merge_request_pipeline' do
|
||||
let(:pipeline) { legacy_detached_merge_request_pipeline }
|
||||
|
||||
it { is_expected.to eq(pipeline.merge_request_ref?) }
|
||||
end
|
||||
|
||||
context 'in a pipeline for merged results' do
|
||||
let(:pipeline) { merged_result_pipeline }
|
||||
|
||||
it { is_expected.to eq(pipeline.merge_request_ref?) }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#legacy_detached_merge_request_pipeline?' do
|
||||
subject { pipeline.processables.first.legacy_detached_merge_request_pipeline? }
|
||||
|
||||
context 'in a detached merge request pipeline' do
|
||||
let(:pipeline) { detached_merge_request_pipeline }
|
||||
|
||||
it { is_expected.to eq(pipeline.legacy_detached_merge_request_pipeline?) }
|
||||
end
|
||||
|
||||
context 'in a legacy detached merge_request_pipeline' do
|
||||
let(:pipeline) { legacy_detached_merge_request_pipeline }
|
||||
|
||||
it { is_expected.to eq(pipeline.legacy_detached_merge_request_pipeline?) }
|
||||
end
|
||||
|
||||
context 'in a pipeline for merged results' do
|
||||
let(:pipeline) { merged_result_pipeline }
|
||||
|
||||
it { is_expected.to eq(pipeline.legacy_detached_merge_request_pipeline?) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
Loading…
Reference in a new issue