Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-03-17 18:09:44 +00:00
parent 8e129497b2
commit 2c156e3c7b
116 changed files with 3132 additions and 290 deletions

View File

@ -1,5 +1,5 @@
<script>
import { createNamespacedHelpers, mapState, mapActions } from 'vuex';
import { createNamespacedHelpers, mapState, mapActions, mapGetters } from 'vuex';
import { escape as esc } from 'lodash';
import { GlFormInput, GlFormCheckbox } from '@gitlab/ui';
import { sprintf, s__ } from '~/locale';
@ -61,6 +61,7 @@ export default {
'gitlabManagedCluster',
'isCreatingCluster',
]),
...mapGetters(['subnetValid']),
...mapRolesState({
roles: 'items',
isLoadingRoles: 'isLoadingItems',
@ -119,7 +120,7 @@ export default {
!this.selectedRegion ||
!this.selectedKeyPair ||
!this.selectedVpc ||
!this.selectedSubnet ||
!this.subnetValid ||
!this.selectedRole ||
!this.selectedSecurityGroup ||
!this.selectedInstanceType ||
@ -127,6 +128,9 @@ export default {
this.isCreatingCluster
);
},
displaySubnetError() {
return Boolean(this.loadingSubnetsError) || this.selectedSubnet?.length === 1;
},
createClusterButtonLabel() {
return this.isCreatingCluster
? s__('ClusterIntegration|Creating Kubernetes cluster')
@ -216,6 +220,13 @@ export default {
false,
);
},
subnetValidationErrorText() {
if (this.loadingSubnetsError) {
return s__('ClusterIntegration|Could not load subnets for the selected VPC');
}
return s__('ClusterIntegration|You should select at least two subnets');
},
securityGroupDropdownHelpText() {
return sprintf(
s__(
@ -289,14 +300,14 @@ export default {
this.setRegion({ region });
this.setVpc({ vpc: null });
this.setKeyPair({ keyPair: null });
this.setSubnet({ subnet: null });
this.setSubnet({ subnet: [] });
this.setSecurityGroup({ securityGroup: null });
this.fetchVpcs({ region });
this.fetchKeyPairs({ region });
},
setVpcAndFetchSubnets(vpc) {
this.setVpc({ vpc });
this.setSubnet({ subnet: null });
this.setSubnet({ subnet: [] });
this.setSecurityGroup({ securityGroup: null });
this.fetchSubnets({ vpc, region: this.selectedRegion });
this.fetchSecurityGroups({ vpc, region: this.selectedRegion });
@ -436,8 +447,8 @@ export default {
:placeholder="s__('ClusterIntergation|Select a subnet')"
:search-field-placeholder="s__('ClusterIntegration|Search subnets')"
:empty-text="s__('ClusterIntegration|No subnet found')"
:has-errors="Boolean(loadingSubnetsError)"
:error-message="s__('ClusterIntegration|Could not load subnets for the selected VPC')"
:has-errors="displaySubnetError"
:error-message="subnetValidationErrorText"
@input="setSubnet({ subnet: $event })"
/>
<p class="form-text text-muted" v-html="subnetDropdownHelpText"></p>

View File

@ -0,0 +1,3 @@
// eslint-disable-next-line import/prefer-default-export
export const subnetValid = ({ selectedSubnet }) =>
Array.isArray(selectedSubnet) && selectedSubnet.length >= 2;

View File

@ -21,7 +21,7 @@ export default () => ({
selectedRole: '',
selectedKeyPair: '',
selectedVpc: '',
selectedSubnet: '',
selectedSubnet: [],
selectedSecurityGroup: '',
selectedInstanceType: 'm5.large',
nodeCount: '3',

View File

@ -50,6 +50,11 @@ export default {
type: String,
required: true,
},
endpointCoverage: {
type: String,
required: false,
default: '',
},
projectPath: {
type: String,
required: true,
@ -169,6 +174,7 @@ export default {
endpoint: this.endpoint,
endpointMetadata: this.endpointMetadata,
endpointBatch: this.endpointBatch,
endpointCoverage: this.endpointCoverage,
projectPath: this.projectPath,
dismissEndpoint: this.dismissEndpoint,
showSuggestPopover: this.showSuggestPopover,
@ -218,6 +224,7 @@ export default {
'fetchDiffFiles',
'fetchDiffFilesMeta',
'fetchDiffFilesBatch',
'fetchCoverageFiles',
'startRenderDiffsQueue',
'assignDiscussionsToDiff',
'setHighlightedRow',
@ -292,6 +299,10 @@ export default {
});
}
if (this.endpointCoverage) {
this.fetchCoverageFiles();
}
if (!this.isNotesFetched) {
eventHub.$emit('fetchNotesData');
}

View File

@ -54,7 +54,7 @@ export default {
colspan: {
type: Number,
required: false,
default: 3,
default: 4,
},
},
computed: {

View File

@ -51,7 +51,7 @@ export default {
<template>
<tr v-if="shouldRender" :class="className" class="notes_holder">
<td class="notes-content" colspan="3">
<td class="notes-content" colspan="4">
<div class="content">
<diff-discussions
v-if="line.discussions.length"

View File

@ -1,5 +1,6 @@
<script>
import { mapActions, mapState } from 'vuex';
import { mapActions, mapGetters, mapState } from 'vuex';
import { GlTooltipDirective } from '@gitlab/ui';
import DiffTableCell from './diff_table_cell.vue';
import {
MATCH_LINE_TYPE,
@ -15,11 +16,18 @@ export default {
components: {
DiffTableCell,
},
directives: {
GlTooltip: GlTooltipDirective,
},
props: {
fileHash: {
type: String,
required: true,
},
filePath: {
type: String,
required: true,
},
contextLinesPath: {
type: String,
required: true,
@ -40,6 +48,7 @@ export default {
};
},
computed: {
...mapGetters('diffs', ['fileLineCoverage']),
...mapState({
isHighlighted(state) {
return this.line.line_code !== null && this.line.line_code === state.diffs.highlightedRow;
@ -62,6 +71,9 @@ export default {
isMatchLine() {
return this.line.type === MATCH_LINE_TYPE;
},
coverageState() {
return this.fileLineCoverage(this.filePath, this.line.new_line);
},
},
created() {
this.newLineType = NEW_LINE_TYPE;
@ -113,6 +125,12 @@ export default {
:is-highlighted="isHighlighted"
class="diff-line-num new_line qa-new-diff-line"
/>
<td
v-gl-tooltip.hover
:title="coverageState.text"
:class="[line.type, coverageState.class, { hll: isHighlighted }]"
class="line-coverage"
></td>
<td
:class="[
line.type,
@ -120,7 +138,7 @@ export default {
hll: isHighlighted,
},
]"
class="line_content"
class="line_content with-coverage"
v-html="line.rich_text"
></td>
</tr>

View File

@ -48,6 +48,7 @@ export default {
<colgroup>
<col style="width: 50px;" />
<col style="width: 50px;" />
<col style="width: 8px;" />
<col />
</colgroup>
<tbody>
@ -63,6 +64,7 @@ export default {
<inline-diff-table-row
:key="`${line.line_code || index}`"
:file-hash="diffFile.file_hash"
:file-path="diffFile.file_path"
:context-lines-path="diffFile.context_lines_path"
:line="line"
:is-bottom="index + 1 === diffLinesLength"

View File

@ -122,7 +122,7 @@ export default {
<template>
<tr v-if="shouldRender" :class="className" class="notes_holder">
<td class="notes-content parallel old" colspan="2">
<td class="notes-content parallel old" colspan="3">
<div v-if="shouldRenderDiscussionsOnLeft" class="content">
<diff-discussions
:discussions="line.left.discussions"
@ -147,7 +147,7 @@ export default {
</template>
</diff-discussion-reply>
</td>
<td class="notes-content parallel new" colspan="2">
<td class="notes-content parallel new" colspan="3">
<div v-if="shouldRenderDiscussionsOnRight" class="content">
<diff-discussions
:discussions="line.right.discussions"

View File

@ -49,7 +49,7 @@ export default {
:line="line.left"
:is-top="isTop"
:is-bottom="isBottom"
:colspan="4"
:colspan="6"
/>
</template>
</tr>

View File

@ -1,6 +1,7 @@
<script>
import { mapActions, mapState } from 'vuex';
import { mapActions, mapGetters, mapState } from 'vuex';
import $ from 'jquery';
import { GlTooltipDirective } from '@gitlab/ui';
import DiffTableCell from './diff_table_cell.vue';
import {
MATCH_LINE_TYPE,
@ -18,11 +19,18 @@ export default {
components: {
DiffTableCell,
},
directives: {
GlTooltip: GlTooltipDirective,
},
props: {
fileHash: {
type: String,
required: true,
},
filePath: {
type: String,
required: true,
},
contextLinesPath: {
type: String,
required: true,
@ -44,6 +52,7 @@ export default {
};
},
computed: {
...mapGetters('diffs', ['fileLineCoverage']),
...mapState({
isHighlighted(state) {
const lineCode =
@ -82,6 +91,9 @@ export default {
isMatchLineRight() {
return this.line.right && this.line.right.type === MATCH_LINE_TYPE;
},
coverageState() {
return this.fileLineCoverage(this.filePath, this.line.right.new_line);
},
},
created() {
this.newLineType = NEW_LINE_TYPE;
@ -99,7 +111,7 @@ export default {
const allCellsInHoveringRow = Array.from(e.currentTarget.children);
const hoverIndex = allCellsInHoveringRow.indexOf(hoveringCell);
if (hoverIndex >= 2) {
if (hoverIndex >= 3) {
this.isRightHover = isHover;
} else {
this.isLeftHover = isHover;
@ -143,17 +155,19 @@ export default {
line-position="left"
class="diff-line-num old_line"
/>
<td :class="parallelViewLeftLineType" class="line-coverage left-side"></td>
<td
:id="line.left.line_code"
:class="parallelViewLeftLineType"
class="line_content parallel left-side"
class="line_content with-coverage parallel left-side"
@mousedown="handleParallelLineMouseDown"
v-html="line.left.rich_text"
></td>
</template>
<template v-else>
<td class="diff-line-num old_line empty-cell"></td>
<td class="line_content parallel left-side empty-cell"></td>
<td class="line-coverage left-side empty-cell"></td>
<td class="line_content with-coverage parallel left-side empty-cell"></td>
</template>
<template v-if="line.right && !isMatchLineRight">
<diff-table-cell
@ -169,6 +183,12 @@ export default {
line-position="right"
class="diff-line-num new_line"
/>
<td
v-gl-tooltip.hover
:title="coverageState.text"
:class="[line.right.type, coverageState.class, { hll: isHighlighted }]"
class="line-coverage right-side"
></td>
<td
:id="line.right.line_code"
:class="[
@ -177,14 +197,15 @@ export default {
hll: isHighlighted,
},
]"
class="line_content parallel right-side"
class="line_content with-coverage parallel right-side"
@mousedown="handleParallelLineMouseDown"
v-html="line.right.rich_text"
></td>
</template>
<template v-else>
<td class="diff-line-num old_line empty-cell"></td>
<td class="line_content parallel right-side empty-cell"></td>
<td class="line-coverage right-side empty-cell"></td>
<td class="line_content with-coverage parallel right-side empty-cell"></td>
</template>
</tr>
</template>

View File

@ -47,8 +47,10 @@ export default {
>
<colgroup>
<col style="width: 50px;" />
<col style="width: 8px;" />
<col />
<col style="width: 50px;" />
<col style="width: 8px;" />
<col />
</colgroup>
<tbody>
@ -64,6 +66,7 @@ export default {
<parallel-diff-table-row
:key="line.line_code"
:file-hash="diffFile.file_hash"
:file-path="diffFile.file_path"
:context-lines-path="diffFile.context_lines_path"
:line="line"
:is-bottom="index + 1 === diffLinesLength"

View File

@ -69,6 +69,7 @@ export default function initDiffsApp(store) {
endpoint: dataset.endpoint,
endpointMetadata: dataset.endpointMetadata || '',
endpointBatch: dataset.endpointBatch || '',
endpointCoverage: dataset.endpointCoverage || '',
projectPath: dataset.projectPath,
helpPagePath: dataset.helpPagePath,
currentUser: JSON.parse(dataset.currentUserData) || {},
@ -104,6 +105,7 @@ export default function initDiffsApp(store) {
endpoint: this.endpoint,
endpointMetadata: this.endpointMetadata,
endpointBatch: this.endpointBatch,
endpointCoverage: this.endpointCoverage,
currentUser: this.currentUser,
projectPath: this.projectPath,
helpPagePath: this.helpPagePath,

View File

@ -1,8 +1,10 @@
import Vue from 'vue';
import Cookies from 'js-cookie';
import Poll from '~/lib/utils/poll';
import axios from '~/lib/utils/axios_utils';
import httpStatusCodes from '~/lib/utils/http_status';
import createFlash from '~/flash';
import { s__ } from '~/locale';
import { __, s__ } from '~/locale';
import { handleLocationHash, historyPushState, scrollToElement } from '~/lib/utils/common_utils';
import { mergeUrlParams, getLocationHash } from '~/lib/utils/url_utility';
import TreeWorker from '../workers/tree_worker';
@ -43,6 +45,7 @@ export const setBaseConfig = ({ commit }, options) => {
endpoint,
endpointMetadata,
endpointBatch,
endpointCoverage,
projectPath,
dismissEndpoint,
showSuggestPopover,
@ -52,6 +55,7 @@ export const setBaseConfig = ({ commit }, options) => {
endpoint,
endpointMetadata,
endpointBatch,
endpointCoverage,
projectPath,
dismissEndpoint,
showSuggestPopover,
@ -170,6 +174,26 @@ export const fetchDiffFilesMeta = ({ commit, state }) => {
.catch(() => worker.terminate());
};
export const fetchCoverageFiles = ({ commit, state }) => {
const coveragePoll = new Poll({
resource: {
getCoverageReports: endpoint => axios.get(endpoint),
},
data: state.endpointCoverage,
method: 'getCoverageReports',
successCallback: ({ status, data }) => {
if (status === httpStatusCodes.OK) {
commit(types.SET_COVERAGE_DATA, data);
coveragePoll.stop();
}
},
errorCallback: () => createFlash(__('Something went wrong on our end. Please try again!')),
});
coveragePoll.makeRequest();
};
export const setHighlightedRow = ({ commit }, lineCode) => {
const fileHash = lineCode.split('_')[0];
commit(types.SET_HIGHLIGHTED_ROW, lineCode);

View File

@ -1,3 +1,4 @@
import { __, n__ } from '~/locale';
import { PARALLEL_DIFF_VIEW_TYPE, INLINE_DIFF_VIEW_TYPE } from '../constants';
export const isParallelView = state => state.diffViewType === PARALLEL_DIFF_VIEW_TYPE;
@ -98,6 +99,29 @@ export const allBlobs = (state, getters) =>
export const getCommentFormForDiffFile = state => fileHash =>
state.commentForms.find(form => form.fileHash === fileHash);
/**
* Returns the test coverage hits for a specific line of a given file
* @param {string} file
* @param {number} line
* @returns {number}
*/
export const fileLineCoverage = state => (file, line) => {
if (!state.coverageFiles.files) return {};
const fileCoverage = state.coverageFiles.files[file];
if (!fileCoverage) return {};
const lineCoverage = fileCoverage[String(line)];
if (lineCoverage === 0) {
return { text: __('No test coverage'), class: 'no-coverage' };
} else if (lineCoverage >= 0) {
return {
text: n__('Test coverage: %d hit', 'Test coverage: %d hits', lineCoverage),
class: 'coverage',
};
}
return {};
};
/**
* Returns index of a currently selected diff in diffFiles
* @returns {number}

View File

@ -17,6 +17,7 @@ export default () => ({
commit: null,
startVersion: null,
diffFiles: [],
coverageFiles: {},
mergeRequestDiffs: [],
mergeRequestDiff: null,
diffViewType: viewTypeFromQueryString || viewTypeFromCookie || defaultViewType,

View File

@ -5,6 +5,7 @@ export const SET_RETRIEVING_BATCHES = 'SET_RETRIEVING_BATCHES';
export const SET_DIFF_DATA = 'SET_DIFF_DATA';
export const SET_DIFF_DATA_BATCH = 'SET_DIFF_DATA_BATCH';
export const SET_DIFF_VIEW_TYPE = 'SET_DIFF_VIEW_TYPE';
export const SET_COVERAGE_DATA = 'SET_COVERAGE_DATA';
export const SET_MERGE_REQUEST_DIFFS = 'SET_MERGE_REQUEST_DIFFS';
export const TOGGLE_LINE_HAS_FORM = 'TOGGLE_LINE_HAS_FORM';
export const ADD_CONTEXT_LINES = 'ADD_CONTEXT_LINES';

View File

@ -16,6 +16,7 @@ export default {
endpoint,
endpointMetadata,
endpointBatch,
endpointCoverage,
projectPath,
dismissEndpoint,
showSuggestPopover,
@ -25,6 +26,7 @@ export default {
endpoint,
endpointMetadata,
endpointBatch,
endpointCoverage,
projectPath,
dismissEndpoint,
showSuggestPopover,
@ -69,6 +71,10 @@ export default {
});
},
[types.SET_COVERAGE_DATA](state, coverageFiles) {
Object.assign(state, { coverageFiles });
},
[types.RENDER_FILE](state, file) {
Object.assign(file, {
renderIt: true,

View File

@ -29,3 +29,15 @@
color: $link;
}
}
@mixin line-coverage-border-color($coverage, $no-coverage) {
transition: border-left 0.1s ease-out;
&.coverage {
border-left: 3px solid $coverage;
}
&.no-coverage {
border-left: 3px solid $no-coverage;
}
}

View File

@ -24,6 +24,8 @@ $dark-pre-hll-bg: #373b41;
$dark-hll-bg: #373b41;
$dark-over-bg: #9f9ab5;
$dark-expanded-bg: #3e3e3e;
$dark-coverage: #b5bd68;
$dark-no-coverage: #de935f;
$dark-c: #969896;
$dark-err: #c66;
$dark-k: #b294bb;
@ -124,12 +126,18 @@ $dark-il: #de935f;
}
td.diff-line-num.hll:not(.empty-cell),
td.line-coverage.hll:not(.empty-cell),
td.line_content.hll:not(.empty-cell) {
background-color: $dark-diff-not-empty-bg;
border-color: darken($dark-diff-not-empty-bg, 15%);
}
.line-coverage {
@include line-coverage-border-color($dark-coverage, $dark-no-coverage);
}
.diff-line-num.new,
.line-coverage.new,
.line_content.new {
@include diff-background($dark-new-bg, $dark-new-idiff, $dark-border);
@ -140,6 +148,7 @@ $dark-il: #de935f;
}
.diff-line-num.old,
.line-coverage.old,
.line_content.old {
@include diff-background($dark-old-bg, $dark-old-idiff, $dark-border);
@ -168,6 +177,7 @@ $dark-il: #de935f;
&:not(.diff-expanded) + .diff-expanded,
&.diff-expanded + .line_holder:not(.diff-expanded) {
> .diff-line-num,
> .line-coverage,
> .line_content {
border-top: 1px solid $black;
}
@ -175,6 +185,7 @@ $dark-il: #de935f;
&.diff-expanded {
> .diff-line-num,
> .line-coverage,
> .line_content {
background: $dark-expanded-bg;
border-color: $dark-expanded-bg;

View File

@ -17,6 +17,8 @@ $monokai-diff-border: #808080;
$monokai-highlight-bg: #ffe792;
$monokai-over-bg: #9f9ab5;
$monokai-expanded-bg: #3e3e3e;
$monokai-coverage: #a6e22e;
$monokai-no-coverage: #fd971f;
$monokai-new-bg: rgba(166, 226, 46, 0.1);
$monokai-new-idiff: rgba(166, 226, 46, 0.15);
@ -124,12 +126,18 @@ $monokai-gi: #a6e22e;
}
td.diff-line-num.hll:not(.empty-cell),
td.line-coverage.hll:not(.empty-cell),
td.line_content.hll:not(.empty-cell) {
background-color: $monokai-line-empty-bg;
border-color: $monokai-line-empty-border;
}
.line-coverage {
@include line-coverage-border-color($monokai-coverage, $monokai-no-coverage);
}
.diff-line-num.new,
.line-coverage.new,
.line_content.new {
@include diff-background($monokai-new-bg, $monokai-new-idiff, $monokai-diff-border);
@ -140,6 +148,7 @@ $monokai-gi: #a6e22e;
}
.diff-line-num.old,
.line-coverage.old,
.line_content.old {
@include diff-background($monokai-old-bg, $monokai-old-idiff, $monokai-diff-border);
@ -168,6 +177,7 @@ $monokai-gi: #a6e22e;
&:not(.diff-expanded) + .diff-expanded,
&.diff-expanded + .line_holder:not(.diff-expanded) {
> .diff-line-num,
> .line-coverage,
> .line_content {
border-top: 1px solid $black;
}
@ -175,6 +185,7 @@ $monokai-gi: #a6e22e;
&.diff-expanded {
> .diff-line-num,
> .line-coverage,
> .line_content {
background: $monokai-expanded-bg;
border-color: $monokai-expanded-bg;

View File

@ -51,6 +51,15 @@
@include match-line;
}
.line-coverage {
@include line-coverage-border-color($green-500, $orange-500);
&.old,
&.new {
background-color: $white-normal;
}
}
.diff-line-num {
&.old {
a {
@ -83,6 +92,7 @@
&:not(.diff-expanded) + .diff-expanded,
&.diff-expanded + .line_holder:not(.diff-expanded) {
> .diff-line-num,
> .line-coverage,
> .line_content {
border-top: 1px solid $none-expanded-border;
}
@ -90,6 +100,7 @@
&.diff-expanded {
> .diff-line-num,
> .line-coverage,
> .line_content {
background: $none-expanded-bg;
border-color: $none-expanded-bg;

View File

@ -21,6 +21,8 @@ $solarized-dark-highlight: #094554;
$solarized-dark-hll-bg: #174652;
$solarized-dark-over-bg: #9f9ab5;
$solarized-dark-expanded-bg: #010d10;
$solarized-dark-coverage: #859900;
$solarized-dark-no-coverage: #cb4b16;
$solarized-dark-c: #586e75;
$solarized-dark-err: #93a1a1;
$solarized-dark-g: #93a1a1;
@ -128,12 +130,18 @@ $solarized-dark-il: #2aa198;
}
td.diff-line-num.hll:not(.empty-cell),
td.line-coverage.hll:not(.empty-cell),
td.line_content.hll:not(.empty-cell) {
background-color: $solarized-dark-hll-bg;
border-color: darken($solarized-dark-hll-bg, 15%);
}
.line-coverage {
@include line-coverage-border-color($solarized-dark-coverage, $solarized-dark-no-coverage);
}
.diff-line-num.new,
.line-coverage.new,
.line_content.new {
@include diff-background($solarized-dark-new-bg, $solarized-dark-new-idiff, $solarized-dark-border);
@ -144,6 +152,7 @@ $solarized-dark-il: #2aa198;
}
.diff-line-num.old,
.line-coverage.old,
.line_content.old {
@include diff-background($solarized-dark-old-bg, $solarized-dark-old-idiff, $solarized-dark-border);
@ -172,6 +181,7 @@ $solarized-dark-il: #2aa198;
&:not(.diff-expanded) + .diff-expanded,
&.diff-expanded + .line_holder:not(.diff-expanded) {
> .diff-line-num,
> .line-coverage,
> .line_content {
border-top: 1px solid $black;
}
@ -179,6 +189,7 @@ $solarized-dark-il: #2aa198;
&.diff-expanded {
> .diff-line-num,
> .line-coverage,
> .line_content {
background: $solarized-dark-expanded-bg;
border-color: $solarized-dark-expanded-bg;

View File

@ -23,6 +23,8 @@ $solarized-light-hll-bg: #ddd8c5;
$solarized-light-over-bg: #ded7fc;
$solarized-light-expanded-border: #d2cdbd;
$solarized-light-expanded-bg: #ece6d4;
$solarized-light-coverage: #859900;
$solarized-light-no-coverage: #cb4b16;
$solarized-light-c: #93a1a1;
$solarized-light-err: #586e75;
$solarized-light-g: #586e75;
@ -135,12 +137,18 @@ $solarized-light-il: #2aa198;
}
td.diff-line-num.hll:not(.empty-cell),
td.line-coverage.hll:not(.empty-cell),
td.line_content.hll:not(.empty-cell) {
background-color: $solarized-light-hll-bg;
border-color: darken($solarized-light-hll-bg, 15%);
}
.line-coverage {
@include line-coverage-border-color($solarized-light-coverage, $solarized-light-no-coverage);
}
.diff-line-num.new,
.line-coverage.new,
.line_content.new {
@include diff-background($solarized-light-new-bg,
$solarized-light-new-idiff, $solarized-light-border);
@ -152,6 +160,7 @@ $solarized-light-il: #2aa198;
}
.diff-line-num.old,
.line-coverage.old,
.line_content.old {
@include diff-background($solarized-light-old-bg, $solarized-light-old-idiff, $solarized-light-border);
@ -180,6 +189,7 @@ $solarized-light-il: #2aa198;
&:not(.diff-expanded) + .diff-expanded,
&.diff-expanded + .line_holder:not(.diff-expanded) {
> .diff-line-num,
> .line-coverage,
> .line_content {
border-top: 1px solid $solarized-light-expanded-border;
}
@ -187,6 +197,7 @@ $solarized-light-il: #2aa198;
&.diff-expanded {
> .diff-line-num,
> .line-coverage,
> .line_content {
background: $solarized-light-expanded-bg;
border-color: $solarized-light-expanded-bg;

View File

@ -151,6 +151,7 @@ pre.code,
&:not(.diff-expanded) + .diff-expanded,
&.diff-expanded + .line_holder:not(.diff-expanded) {
> .diff-line-num,
> .line-coverage,
> .line_content {
border-top: 1px solid $white-expanded-border;
}
@ -158,6 +159,7 @@ pre.code,
&.diff-expanded {
> .diff-line-num,
> .line-coverage,
> .line_content {
background: $white-expanded-bg;
border-color: $white-expanded-bg;
@ -197,6 +199,22 @@ pre.code,
background-color: $line-select-yellow;
}
}
.line-coverage {
@include line-coverage-border-color($green-500, $orange-500);
&.old {
background-color: $line-removed;
}
&.new {
background-color: $line-added;
}
&.hll:not(.empty-cell) {
background-color: $line-select-yellow;
}
}
}
// highlight line via anchor

View File

@ -514,6 +514,10 @@ table.code {
position: absolute;
left: 0.5em;
}
&.with-coverage::before {
left: 0;
}
}
&.new {
@ -522,6 +526,10 @@ table.code {
position: absolute;
left: 0.5em;
}
&.with-coverage::before {
left: 0;
}
}
}
}

View File

@ -14,7 +14,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
skip_before_action :merge_request, only: [:index, :bulk_update]
before_action :whitelist_query_limiting, only: [:assign_related_issues, :update]
before_action :authorize_update_issuable!, only: [:close, :edit, :update, :remove_wip, :sort]
before_action :authorize_read_actual_head_pipeline!, only: [:test_reports, :exposed_artifacts]
before_action :authorize_read_actual_head_pipeline!, only: [:test_reports, :exposed_artifacts, :coverage_reports]
before_action :set_issuables_index, only: [:index]
before_action :authenticate_user!, only: [:assign_related_issues]
before_action :check_user_can_push_to_source_branch!, only: [:rebase]
@ -63,6 +63,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
@issuable_sidebar = serializer.represent(@merge_request, serializer: 'sidebar')
@current_user_data = UserSerializer.new(project: @project).represent(current_user, {}, MergeRequestUserEntity).to_json
@show_whitespace_default = current_user.nil? || current_user.show_whitespace_in_diffs
@coverage_path = coverage_reports_project_merge_request_path(@project, @merge_request, format: :json) if @merge_request.has_coverage_reports?
set_pipeline_variables
@ -131,6 +132,14 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
reports_response(@merge_request.compare_test_reports)
end
def coverage_reports
if @merge_request.has_coverage_reports?
reports_response(@merge_request.find_coverage_reports)
else
head :no_content
end
end
def exposed_artifacts
if @merge_request.has_exposed_artifacts?
reports_response(@merge_request.find_exposed_artifacts)

View File

@ -916,6 +916,14 @@ module Ci
end
end
def collect_coverage_reports!(coverage_report)
each_report(Ci::JobArtifact::COVERAGE_REPORT_FILE_TYPES) do |file_type, blob|
Gitlab::Ci::Parsers.fabricate!(file_type).parse!(blob, coverage_report)
end
coverage_report
end
def report_artifacts
job_artifacts.with_reports
end

View File

@ -11,6 +11,7 @@ module Ci
NotSupportedAdapterError = Class.new(StandardError)
TEST_REPORT_FILE_TYPES = %w[junit].freeze
COVERAGE_REPORT_FILE_TYPES = %w[cobertura].freeze
NON_ERASABLE_FILE_TYPES = %w[trace].freeze
DEFAULT_FILE_NAMES = {
archive: nil,
@ -29,7 +30,8 @@ module Ci
performance: 'performance.json',
metrics: 'metrics.txt',
lsif: 'lsif.json',
dotenv: '.env'
dotenv: '.env',
cobertura: 'cobertura-coverage.xml'
}.freeze
INTERNAL_TYPES = {
@ -45,6 +47,7 @@ module Ci
network_referee: :gzip,
lsif: :gzip,
dotenv: :gzip,
cobertura: :gzip,
# All these file formats use `raw` as we need to store them uncompressed
# for Frontend to fetch the files and do analysis
@ -92,6 +95,10 @@ module Ci
with_file_types(TEST_REPORT_FILE_TYPES)
end
scope :coverage_reports, -> do
with_file_types(COVERAGE_REPORT_FILE_TYPES)
end
scope :erasable, -> do
types = self.file_types.reject { |file_type| NON_ERASABLE_FILE_TYPES.include?(file_type) }.values
@ -121,7 +128,8 @@ module Ci
metrics_referee: 13, ## runner referees
network_referee: 14, ## runner referees
lsif: 15, # LSIF data for code navigation
dotenv: 16
dotenv: 16,
cobertura: 17
}
enum file_format: {

View File

@ -820,6 +820,14 @@ module Ci
end
end
def coverage_reports
Gitlab::Ci::Reports::CoverageReports.new.tap do |coverage_reports|
builds.latest.with_reports(Ci::JobArtifact.coverage_reports).each do |build|
build.collect_coverage_reports!(coverage_reports)
end
end
end
def has_exposed_artifacts?
complete? && builds.latest.with_exposed_artifacts.exists?
end

View File

@ -135,29 +135,11 @@ class Event < ApplicationRecord
super(presenter_class: ::EventPresenter)
end
# rubocop:disable Metrics/CyclomaticComplexity
# rubocop:disable Metrics/PerceivedComplexity
def visible_to_user?(user = nil)
if push_action? || commit_note?
Ability.allowed?(user, :download_code, project)
elsif membership_changed?
Ability.allowed?(user, :read_project, project)
elsif created_project_action?
Ability.allowed?(user, :read_project, project)
elsif issue? || issue_note?
Ability.allowed?(user, :read_issue, note? ? note_target : target)
elsif merge_request? || merge_request_note?
Ability.allowed?(user, :read_merge_request, note? ? note_target : target)
elsif personal_snippet_note? || project_snippet_note?
Ability.allowed?(user, :read_snippet, note_target)
elsif milestone?
Ability.allowed?(user, :read_milestone, project)
else
false # No other event types are visible
end
return false unless capability.present?
Ability.allowed?(user, capability, permission_object)
end
# rubocop:enable Metrics/PerceivedComplexity
# rubocop:enable Metrics/CyclomaticComplexity
def resource_parent
project || group
@ -364,8 +346,38 @@ class Event < ApplicationRecord
Event._to_partial_path
end
protected
def capability
@capability ||= begin
if push_action? || commit_note?
:download_code
elsif membership_changed? || created_project_action?
:read_project
elsif issue? || issue_note?
:read_issue
elsif merge_request? || merge_request_note?
:read_merge_request
elsif personal_snippet_note? || project_snippet_note?
:read_snippet
elsif milestone?
:read_milestone
end
end
end
private
def permission_object
if note?
note_target
elsif target_id.present?
target
else
project
end
end
def push_action_name
if new_ref?
"pushed new"

View File

@ -567,6 +567,10 @@ class MergeRequest < ApplicationRecord
diffs.modified_paths
end
def new_paths
diffs.diff_files.map(&:new_path)
end
def diff_base_commit
if merge_request_diff.persisted?
merge_request_diff.base_commit
@ -1295,6 +1299,24 @@ class MergeRequest < ApplicationRecord
compare_reports(Ci::CompareTestReportsService)
end
def has_coverage_reports?
return false unless Feature.enabled?(:coverage_report_view, project)
actual_head_pipeline&.has_reports?(Ci::JobArtifact.coverage_reports)
end
# TODO: this method and compare_test_reports use the same
# result type, which is handled by the controller's #reports_response.
# we should minimize mistakes by isolating the common parts.
# issue: https://gitlab.com/gitlab-org/gitlab/issues/34224
def find_coverage_reports
unless has_coverage_reports?
return { status: :error, status_reason: 'This merge request does not have coverage reports' }
end
compare_reports(Ci::GenerateCoverageReportsService)
end
def has_exposed_artifacts?
return false unless Feature.enabled?(:ci_expose_arbitrary_artifacts_in_mr, default_enabled: true)
@ -1318,7 +1340,7 @@ class MergeRequest < ApplicationRecord
# issue: https://gitlab.com/gitlab-org/gitlab/issues/34224
def compare_reports(service_class, current_user = nil)
with_reactive_cache(service_class.name, current_user&.id) do |data|
unless service_class.new(project, current_user)
unless service_class.new(project, current_user, id: id)
.latest?(base_pipeline, actual_head_pipeline, data)
raise InvalidateReactiveCache
end
@ -1335,7 +1357,7 @@ class MergeRequest < ApplicationRecord
raise NameError, service_class unless service_class < Ci::CompareReportsBaseService
current_user = User.find_by(id: current_user_id)
service_class.new(project, current_user).execute(base_pipeline, actual_head_pipeline)
service_class.new(project, current_user, id: id).execute(base_pipeline, actual_head_pipeline)
end
def all_commits

View File

@ -1672,6 +1672,16 @@ class User < ApplicationRecord
callouts.any?
end
def gitlab_employee?
strong_memoize(:gitlab_employee) do
if Gitlab.com?
Mail::Address.new(email).domain == "gitlab.com"
else
false
end
end
end
# @deprecated
alias_method :owned_or_masters_groups, :owned_or_maintainers_groups

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
module Projects
module ImportExport
class ProjectExportPresenter < Gitlab::View::Presenter::Delegated
include ActiveModel::Serializers::JSON
presents :project
def project_members
super + converted_group_members
end
def description
self.respond_to?(:override_description) ? override_description : super
end
private
def converted_group_members
group_members.each do |group_member|
group_member.source_type = 'Project' # Make group members project members of the future import
end
end
# rubocop: disable CodeReuse/ActiveRecord
def group_members
return [] unless current_user.can?(:admin_group, project.group)
# We need `.where.not(user_id: nil)` here otherwise when a group has an
# invitee, it would make the following query return 0 rows since a NULL
# user_id would be present in the subquery
# See http://stackoverflow.com/questions/129077/not-in-clause-and-null-values
non_null_user_ids = project.project_members.where.not(user_id: nil).select(:user_id)
GroupMembersFinder.new(project.group).execute.where.not(user_id: non_null_user_ids)
end
# rubocop: enable CodeReuse/ActiveRecord
end
end
end

View File

@ -0,0 +1,30 @@
# frozen_string_literal: true
module Ci
# TODO: a couple of points with this approach:
# + reuses existing architecture and reactive caching
# - it's not a report comparison and some comparing features must be turned off.
# see CompareReportsBaseService for more notes.
# issue: https://gitlab.com/gitlab-org/gitlab/issues/34224
class GenerateCoverageReportsService < CompareReportsBaseService
def execute(base_pipeline, head_pipeline)
merge_request = MergeRequest.find_by_id(params[:id])
{
status: :parsed,
key: key(base_pipeline, head_pipeline),
data: head_pipeline.coverage_reports.pick(merge_request.new_paths)
}
rescue => e
Gitlab::ErrorTracking.track_exception(e, project_id: project.id)
{
status: :error,
key: key(base_pipeline, head_pipeline),
status_reason: _('An error occurred while fetching coverage reports.')
}
end
def latest?(base_pipeline, head_pipeline, data)
data&.fetch(:key, nil) == key(base_pipeline, head_pipeline)
end
end
end

View File

@ -54,7 +54,16 @@ module Projects
end
def project_tree_saver
Gitlab::ImportExport::Project::TreeSaver.new(project: project, current_user: current_user, shared: shared, params: params)
tree_saver_class.new(project: project, current_user: current_user, shared: shared, params: params)
end
def tree_saver_class
if ::Feature.enabled?(:streaming_serializer, project)
Gitlab::ImportExport::Project::TreeSaver
else
# Once we remove :streaming_serializer feature flag, Project::LegacyTreeSaver should be removed as well
Gitlab::ImportExport::Project::LegacyTreeSaver
end
end
def uploads_saver

View File

@ -78,6 +78,7 @@
endpoint: diffs_project_merge_request_path(@project, @merge_request, 'json', request.query_parameters),
endpoint_metadata: diffs_metadata_project_json_merge_request_path(@project, @merge_request, 'json', request.query_parameters),
endpoint_batch: diffs_batch_project_json_merge_request_path(@project, @merge_request, 'json', request.query_parameters),
endpoint_coverage: @coverage_path,
help_page_path: suggest_changes_help_path,
current_user_data: @current_user_data,
project_path: project_path(@merge_request.project),

View File

@ -1,91 +1,9 @@
#!/bin/sh
cd $(dirname $0)/..
app_root=$(pwd)
sidekiq_pidfile="$app_root/tmp/pids/sidekiq.pid"
sidekiq_logfile="$app_root/log/sidekiq.log"
sidekiq_config="$app_root/config/sidekiq_queues.yml"
gitlab_user=$(ls -l config.ru | awk '{print $3}')
warn()
{
echo "$@" 1>&2
}
stop()
{
bundle exec sidekiqctl stop $sidekiq_pidfile >> $sidekiq_logfile 2>&1
}
killall()
{
pkill -u $gitlab_user -f 'sidekiq [0-9]'
}
restart()
{
if [ -f $sidekiq_pidfile ]; then
stop
fi
killall
start_sidekiq -P $sidekiq_pidfile -d -L $sidekiq_logfile >> $sidekiq_logfile 2>&1
}
start_no_deamonize()
{
start_sidekiq >> $sidekiq_logfile 2>&1
}
start_sidekiq()
{
cmd="exec"
chpst=$(which chpst)
if [ -n "$chpst" ]; then
cmd="${cmd} ${chpst} -P"
fi
${cmd} bundle exec sidekiq -C "${sidekiq_config}" -e $RAILS_ENV "$@"
}
load_ok()
{
sidekiq_pid=$(cat $sidekiq_pidfile)
if [ -z "$sidekiq_pid" ] ; then
warn "Could not find a PID in $sidekiq_pidfile"
exit 0
fi
if (ps -p $sidekiq_pid -o args | grep '\([0-9]\+\) of \1 busy' 1>&2) ; then
warn "Too many busy Sidekiq workers"
exit 1
fi
exit 0
}
case "$1" in
stop)
stop
;;
start)
restart
;;
start_no_deamonize)
start_no_deamonize
;;
start_foreground)
start_sidekiq
;;
restart)
restart
;;
killall)
killall
;;
load_ok)
load_ok
;;
*)
echo "Usage: RAILS_ENV=your_env $0 {stop|start|start_no_deamonize|restart|killall|load_ok}"
esac
if [ -n "$SIDEKIQ_WORKERS" ] ; then
exec bin/background_jobs_sk_cluster "$@"
else
exec bin/background_jobs_sk "$@"
fi

67
bin/background_jobs_sk Executable file
View File

@ -0,0 +1,67 @@
#!/bin/sh
cd $(dirname $0)/..
app_root=$(pwd)
sidekiq_pidfile="$app_root/tmp/pids/sidekiq.pid"
sidekiq_logfile="$app_root/log/sidekiq.log"
sidekiq_config="$app_root/config/sidekiq_queues.yml"
gitlab_user=$(ls -l config.ru | awk '{print $3}')
warn()
{
echo "$@" 1>&2
}
stop()
{
bundle exec sidekiqctl stop $sidekiq_pidfile >> $sidekiq_logfile 2>&1
}
restart()
{
if [ -f $sidekiq_pidfile ]; then
stop
fi
pkill -u $gitlab_user -f 'sidekiq [0-9]'
start_sidekiq -P $sidekiq_pidfile -d -L $sidekiq_logfile >> $sidekiq_logfile 2>&1
}
# Starts on foreground but output to the logfile instead stdout.
start_silent()
{
start_sidekiq >> $sidekiq_logfile 2>&1
}
start_sidekiq()
{
cmd="exec"
chpst=$(which chpst)
if [ -n "$chpst" ]; then
cmd="${cmd} ${chpst} -P"
fi
${cmd} bundle exec sidekiq -C "${sidekiq_config}" -e $RAILS_ENV "$@"
}
case "$1" in
stop)
stop
;;
start)
restart
;;
start_silent)
warn "Deprecated: Will be removed at 13.0 (see https://gitlab.com/gitlab-org/gitlab/-/issues/196731)."
start_silent
;;
start_foreground)
start_sidekiq
;;
restart)
restart
;;
*)
echo "Usage: RAILS_ENV=<env> $0 {stop|start|start_silent|start_foreground|restart}"
esac

76
bin/background_jobs_sk_cluster Executable file
View File

@ -0,0 +1,76 @@
#!/bin/sh
cd $(dirname $0)/..
app_root=$(pwd)
sidekiq_pidfile="$app_root/tmp/pids/sidekiq-cluster.pid"
sidekiq_logfile="$app_root/log/sidekiq.log"
gitlab_user=$(ls -l config.ru | awk '{print $3}')
warn()
{
echo "$@" 1>&2
}
get_sidekiq_pid()
{
if [ ! -f $sidekiq_pidfile ]; then
warn "No pidfile found at $sidekiq_pidfile; is Sidekiq running?"
return
fi
cat $sidekiq_pidfile
}
stop()
{
sidekiq_pid=$(get_sidekiq_pid)
if [ $sidekiq_pid ]; then
kill -TERM $sidekiq_pid
fi
}
restart()
{
if [ -f $sidekiq_pidfile ]; then
stop
fi
warn "Sidekiq output will be written to $sidekiq_logfile"
start_sidekiq >> $sidekiq_logfile 2>&1
}
start_sidekiq()
{
cmd="exec"
chpst=$(which chpst)
if [ -n "$chpst" ]; then
cmd="${cmd} ${chpst} -P"
fi
# sidekiq-cluster expects '*' '*' arguments (one wildcard for each process).
for (( i=1; i<=$SIDEKIQ_WORKERS; i++ ))
do
processes_args+=("*")
done
${cmd} bin/sidekiq-cluster "${processes_args[@]}" -P $sidekiq_pidfile -e $RAILS_ENV
}
case "$1" in
stop)
stop
;;
start)
restart &
;;
start_foreground)
start_sidekiq
;;
restart)
restart &
;;
*)
echo "Usage: RAILS_ENV=<env> SIDEKIQ_WORKERS=<n> $0 {stop|start|start_foreground|restart}"
esac

View File

@ -0,0 +1,5 @@
---
title: Enable customizable_cycle_analytics feature flag by default
merge_request: 27418
author:
type: changed

View File

@ -0,0 +1,5 @@
---
title: Add Cobertura XML coverage visualization to merge request diff view
merge_request: 21791
author: Fabio Huser
type: added

View File

@ -0,0 +1,5 @@
---
title: Support sidekiq-cluster supervision through bin/background_jobs
merge_request: 27042
author:
type: added

View File

@ -0,0 +1,5 @@
---
title: Validate that users selects at least two subnets in EKS Form
merge_request: 26936
author:
type: fixed

View File

@ -14,6 +14,7 @@ resources :merge_requests, concerns: :awardable, except: [:new, :create, :show],
post :rebase
get :test_reports
get :exposed_artifacts
get :coverage_reports
scope constraints: ->(req) { req.format == :json }, as: :json do
get :commits

View File

@ -1,9 +1,9 @@
# frozen_string_literal: true
TELEMETRY_CHANGED_FILES_MESSAGE = <<~MSG
This merge request adds or changes files that require a
review from the Data team and Telemetry team @gitlab-org/growth/telemetry.
The specific group is mentioned in order to send a notification to team members.
This merge request adds or changes files for which a
review from the Data team and Telemetry team is recommended.
@gitlab-org/growth/telemetry group is mentioned in order to notify team members.
MSG
usage_data_changed_files = git.modified_files.grep(%r{usage_data})
@ -12,7 +12,7 @@ if usage_data_changed_files.any?
warn format(TELEMETRY_CHANGED_FILES_MESSAGE)
USAGE_DATA_FILES_MESSAGE = <<~MSG
The following files require a review from the [Data team and Telemetry team](https://gitlab.com/groups/gitlab-org/growth/telemetry/-/group_members?with_inherited_permissions=exclude):
For the following files, a review from the [Data team and Telemetry team](https://gitlab.com/groups/gitlab-org/growth/telemetry/-/group_members?with_inherited_permissions=exclude) is recommended:
MSG
markdown(USAGE_DATA_FILES_MESSAGE + helper.markdown_list(usage_data_changed_files))

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class AddCiPipelineSchedulesToPlanLimits < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
DOWNTIME = false
def up
add_column_with_default(:plan_limits, :ci_pipeline_schedules, :integer, default: 0, allow_null: false)
end
def down
remove_column(:plan_limits, :ci_pipeline_schedules)
end
end

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
class InsertCiPipelineSchedulesPlanLimits < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
return unless Gitlab.com?
create_or_update_plan_limit('ci_pipeline_schedules', 'free', 10)
create_or_update_plan_limit('ci_pipeline_schedules', 'bronze', 50)
create_or_update_plan_limit('ci_pipeline_schedules', 'silver', 50)
create_or_update_plan_limit('ci_pipeline_schedules', 'gold', 50)
end
def down
return unless Gitlab.com?
create_or_update_plan_limit('ci_pipeline_schedules', 'free', 0)
create_or_update_plan_limit('ci_pipeline_schedules', 'bronze', 0)
create_or_update_plan_limit('ci_pipeline_schedules', 'silver', 0)
create_or_update_plan_limit('ci_pipeline_schedules', 'gold', 0)
end
end

View File

@ -3148,6 +3148,7 @@ ActiveRecord::Schema.define(version: 2020_03_12_163407) do
t.integer "project_hooks", default: 0, null: false
t.integer "group_hooks", default: 0, null: false
t.integer "ci_project_subscriptions", default: 0, null: false
t.integer "ci_pipeline_schedules", default: 0, null: false
t.index ["plan_id"], name: "index_plan_limits_on_plan_id", unique: true
end

View File

@ -109,6 +109,29 @@ Plan.default.limits.update!(ci_project_subscriptions: 500)
NOTE: **Note:** Set the limit to `0` to disable it.
### Number of pipeline schedules
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/29566) in GitLab 12.10.
The total number of pipeline schedules can be limited per project. This limit is
checked each time a new pipeline schedule is created. If a new pipeline schedule
would cause the total number of pipeline schedules to exceed the limit, the
pipeline schedule will not be created.
On GitLab.com, different limits are [defined per plan](../user/gitlab_com/index.md#gitlab-cicd),
and they affect all projects under that plan.
On self-managed instances ([GitLab Starter](https://about.gitlab.com/pricing/#self-managed)
or higher tiers), this limit is defined for the `default` plan that affects all
projects. By default, there is no limit.
To set this limit on a self-managed installation, run the following in the
[GitLab Rails console](https://docs.gitlab.com/omnibus/maintenance/#starting-a-rails-console-session):
```ruby
Plan.default.limits.update!(ci_pipeline_schedules: 100)
```
## Environment data on Deploy Boards
[Deploy Boards](../user/project/deploy_boards.md) load information from Kubernetes about

View File

@ -120,6 +120,7 @@ The following API resources are available outside of project and group contexts
| [Events](events.md) | `/events`, `/users/:id/events` (also available for projects) |
| [Feature flags](features.md) | `/features` |
| [Geo Nodes](geo_nodes.md) **(PREMIUM ONLY)** | `/geo_nodes` |
| [Group Activity Analytics](group_activity_analytics.md) **(STARTER)** | `/analytics/group_activity/{issues_count | merge_requests_count}` |
| [Import repository from GitHub](import.md) | `/import/github` |
| [Issues](issues.md) | `/issues` (also available for groups and projects) |
| [Issues Statistics](issues_statistics.md) | `/issues_statistics` (also available for groups and projects) |

View File

@ -1988,8 +1988,7 @@ type Epic implements Noteable {
descendantCounts: EpicDescendantCount
"""
Total weight of open and closed issues in the epic and its descendants.
Available only when feature flag `unfiltered_epic_aggregates` is enabled.
Total weight of open and closed issues in the epic and its descendants
"""
descendantWeightSum: EpicDescendantWeights

View File

@ -5847,7 +5847,7 @@
},
{
"name": "descendantWeightSum",
"description": "Total weight of open and closed issues in the epic and its descendants. Available only when feature flag `unfiltered_epic_aggregates` is enabled.",
"description": "Total weight of open and closed issues in the epic and its descendants",
"args": [
],

View File

@ -327,7 +327,7 @@ Represents an epic.
| `closedAt` | Time | Timestamp of the epic's closure |
| `createdAt` | Time | Timestamp of the epic's creation |
| `descendantCounts` | EpicDescendantCount | Number of open and closed descendant epics and issues |
| `descendantWeightSum` | EpicDescendantWeights | Total weight of open and closed issues in the epic and its descendants. Available only when feature flag `unfiltered_epic_aggregates` is enabled. |
| `descendantWeightSum` | EpicDescendantWeights | Total weight of open and closed issues in the epic and its descendants |
| `description` | String | Description of the epic |
| `downvotes` | Int! | Number of downvotes the epic has received |
| `dueDate` | Time | Due date of the epic |

View File

@ -0,0 +1,55 @@
# Group Activity Analytics API
> **Note:** This feature was [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/26460) in GitLab 12.9.
## Get count of recently created issues for group
```plaintext
GET /analytics/group_activity/issues_count
```
Parameters:
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `group_path` | string | yes | Group path |
Example request:
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/analytics/group_activity/issues_count?group_path=gitlab-org
```
Example response:
```json
[
{ issues_count : 10 }
]
```
## Get count of recently created merge requests for group
```plaintext
GET /analytics/group_activity/merge_requests_count
```
Parameters:
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `group_path` | string | yes | Group path |
Example request:
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/analytics/group_activity/merge_requests_count?group_path=gitlab-org
```
Example response:
```json
[
{ merge_requests_count : 10 }
]
```

View File

@ -107,7 +107,7 @@ The following table lists available parameters for jobs:
| [`when`](#when) | When to run job. Also available: `when:manual` and `when:delayed`. |
| [`environment`](#environment) | Name of an environment to which the job deploys. Also available: `environment:name`, `environment:url`, `environment:on_stop`, `environment:auto_stop_in` and `environment:action`. |
| [`cache`](#cache) | List of files that should be cached between subsequent runs. Also available: `cache:paths`, `cache:key`, `cache:untracked`, and `cache:policy`. |
| [`artifacts`](#artifacts) | List of files and directories to attach to a job on success. Also available: `artifacts:paths`, `artifacts:expose_as`, `artifacts:name`, `artifacts:untracked`, `artifacts:when`, `artifacts:expire_in`, `artifacts:reports`, and `artifacts:reports:junit`.<br><br>In GitLab [Enterprise Edition](https://about.gitlab.com/pricing/), these are available: `artifacts:reports:codequality`, `artifacts:reports:sast`, `artifacts:reports:dependency_scanning`, `artifacts:reports:container_scanning`, `artifacts:reports:dast`, `artifacts:reports:license_management`, `artifacts:reports:performance` and `artifacts:reports:metrics`. |
| [`artifacts`](#artifacts) | List of files and directories to attach to a job on success. Also available: `artifacts:paths`, `artifacts:expose_as`, `artifacts:name`, `artifacts:untracked`, `artifacts:when`, `artifacts:expire_in`, `artifacts:reports`, `artifacts:reports:junit`, and `artifacts:reports:cobertura`.<br><br>In GitLab [Enterprise Edition](https://about.gitlab.com/pricing/), these are available: `artifacts:reports:codequality`, `artifacts:reports:sast`, `artifacts:reports:dependency_scanning`, `artifacts:reports:container_scanning`, `artifacts:reports:dast`, `artifacts:reports:license_management`, `artifacts:reports:performance` and `artifacts:reports:metrics`. |
| [`dependencies`](#dependencies) | Restrict which artifacts are passed to a specific job by providing a list of jobs to fetch artifacts from. |
| [`coverage`](#coverage) | Code coverage settings for a given job. |
| [`retry`](#retry) | When and how many times a job can be auto-retried in case of a failure. |
@ -2286,6 +2286,18 @@ There are a couple of limitations on top of the [original dotenv rules](https://
- It doesn't support empty lines and comments (`#`) in dotenv file.
- It doesn't support quote escape, spaces in a quote, a new line expansion in a quote, in dotenv file.
##### `artifacts:reports:cobertura`
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/3708) in GitLab 12.9.
> Requires [GitLab Runner](https://docs.gitlab.com/runner/) 11.5 and above.
The `cobertura` report collects [Cobertura coverage XML files](../../user/project/merge_requests/test_coverage_visualization.md).
The collected Cobertura coverage reports will be uploaded to GitLab as an artifact
and will be automatically shown in merge requests.
Cobertura was originally developed for Java, but there are many
third party ports for other languages like JavaScript, Python, Ruby, etc.
##### `artifacts:reports:codequality` **(STARTER)**
> Introduced in GitLab 11.5. Requires GitLab Runner 11.5 and above.

View File

@ -76,6 +76,7 @@ Below are the current settings regarding [GitLab CI/CD](../../ci/README.md).
| Artifacts [expiry time](../../ci/yaml/README.md#artifactsexpire_in) | kept forever | deleted after 30 days unless otherwise specified |
| Scheduled Pipeline Cron | `*/5 * * * *` | `19 * * * *` |
| [Max jobs in active pipelines](../../administration/instance_limits.md#number-of-jobs-in-active-pipelines) | `500` for Free tier, unlimited otherwise | Unlimited
| [Max pipeline schedules in projects](../../administration/instance_limits.md#number-of-pipeline-schedules) | `10` for Free tier, `50` for all paid tiers | Unlimited |
## Repository size limit

Binary file not shown.

After

Width:  |  Height:  |  Size: 54 KiB

View File

@ -102,6 +102,7 @@ or link to useful information directly in the merge request page:
| [Multi-Project pipelines](../../../ci/multi_project_pipelines.md) **(PREMIUM)** | When you set up GitLab CI/CD across multiple projects, you can visualize the entire pipeline, including all cross-project interdependencies. |
| [Pipelines for merge requests](../../../ci/merge_request_pipelines/index.md) | Customize a specific pipeline structure for merge requests in order to speed the cycle up by running only important jobs. |
| [Pipeline Graphs](../../../ci/pipelines/index.md#visualizing-pipelines) | View the status of pipelines within the merge request, including the deployment process. |
| [Test Coverage visualization](test_coverage_visualization.md) | See test coverage results for merge requests, within the file diff. |
### Security Reports **(ULTIMATE)**

View File

@ -0,0 +1,78 @@
---
type: reference, howto
---
# Test Coverage Visualization
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/3708) in GitLab 12.9.
With the help of [GitLab CI/CD](../../../ci/README.md), you can collect the test
coverage information of your favorite testing or coverage-analysis tool, and visualize
this information inside the file diff view of your merge requests (MRs). This will allow you
to see which lines are covered by tests, and which lines still require coverage, before the
MR is merged.
![Test Coverage Visualization Diff View](img/test_coverage_visualization_v12_9.png)
## How test coverage visualization works
Collecting the coverage information is done via GitLab CI/CD's
[artifacts reports feature](../../../ci/yaml/README.md#artifactsreports).
You can specify one or more coverage reports to collect, including wildcard paths.
GitLab will then take the coverage information in all the files and combine it
together.
For the coverage analysis to work, you have to provide a properly formated
[Cobertura XML](https://cobertura.github.io/cobertura/) report to
[`artifacts:reports:cobertura`](../../../ci/yaml/README.md#artifactsreportscobertura).
This format was originally developed for Java, but most coverage analysis frameworks
for other languages have plugins to add support for it, like:
- [simplecov-cobertura](https://rubygems.org/gems/simplecov-cobertura) (Ruby)
- [gocover-cobertura](https://github.com/t-yuki/gocover-cobertura) (Golang)
Other coverage analysis frameworks support the format out of the box, for example:
- [Istanbul](https://istanbul.js.org/docs/advanced/alternative-reporters/#cobertura) (JavaScript)
- [Coverage.py](https://coverage.readthedocs.io/en/coverage-5.0/cmd.html#xml-reporting) (Python)
Once configured, if you create a merge request that triggers a pipeline which collects
coverage reports, the coverage will be shown in the diff view. This includes reports
from any job in any stage in the pipeline. The coverage will be displayed for each line:
- `covered` (green): lines which have been checked at least once by tests
- `no test coverage` (orange): lines which are loaded but never executed
- no coverage information: lines which are non-instrumented or not loaded
Hovering over the coverage bar will provide further information, such as the number
of times the line was checked by tests.
## Example test coverage configuration
The following [`gitlab-ci.yml`](../../../ci/yaml/README.md) example uses [Mocha](https://mochajs.org/)
JavaScript testing and [NYC](https://github.com/istanbuljs/nyc) coverage-tooling to
generate the coverage artifact:
```yaml
test:
script:
- npm install
- npx nyc --reporter cobertura mocha
artifacts:
reports:
cobertura: coverage/cobertura-coverage.xml
```
## Enabling the feature
This feature comes with the `:coverage_report_view` feature flag disabled by
default. This feature is disabled due to some performance issues with very large
data sets. When [the performance issue](https://gitlab.com/gitlab-org/gitlab/issues/37725)
is resolved, the feature will be enabled by default.
To enable this feature, ask a GitLab administrator with Rails console access to
run the following command:
```ruby
Feature.enable(:coverage_report_view)
```

View File

@ -5,7 +5,7 @@ module API
class Discussion < Grape::Entity
expose :id
expose :individual_note?, as: :individual_note
expose :notes, using: Entities::Note
expose :notes, using: Entities::NoteWithGitlabEmployeeBadge
end
end
end

View File

@ -0,0 +1,10 @@
# frozen_string_literal: true
module API
module Entities
class NoteWithGitlabEmployeeBadge < Note
expose :author, using: Entities::UserWithGitlabEmployeeBadge
expose :resolved_by, using: Entities::UserWithGitlabEmployeeBadge, if: ->(note, options) { note.resolvable? }
end
end
end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
module API
module Entities
class UserWithGitlabEmployeeBadge < UserBasic
expose :gitlab_employee?, as: :is_gitlab_employee, if: ->(user, options) { ::Feature.enabled?(:gitlab_employee_badge) && user.gitlab_employee? }
end
end
end

View File

@ -14,7 +14,7 @@ module Gitlab
ALLOWED_KEYS =
%i[junit codequality sast dependency_scanning container_scanning
dast performance license_management license_scanning metrics lsif
dotenv].freeze
dotenv cobertura].freeze
attributes ALLOWED_KEYS
@ -35,6 +35,7 @@ module Gitlab
validates :metrics, array_of_strings_or_string: true
validates :lsif, array_of_strings_or_string: true
validates :dotenv, array_of_strings_or_string: true
validates :cobertura, array_of_strings_or_string: true
end
end

View File

@ -9,7 +9,8 @@ module Gitlab
def self.parsers
{
junit: ::Gitlab::Ci::Parsers::Test::Junit
junit: ::Gitlab::Ci::Parsers::Test::Junit,
cobertura: ::Gitlab::Ci::Parsers::Coverage::Cobertura
}
end

View File

@ -0,0 +1,64 @@
# frozen_string_literal: true
module Gitlab
module Ci
module Parsers
module Coverage
class Cobertura
CoberturaParserError = Class.new(Gitlab::Ci::Parsers::ParserError)
def parse!(xml_data, coverage_report)
root = Hash.from_xml(xml_data)
parse_all(root, coverage_report)
rescue Nokogiri::XML::SyntaxError
raise CoberturaParserError, "XML parsing failed"
rescue
raise CoberturaParserError, "Cobertura parsing failed"
end
private
def parse_all(root, coverage_report)
return unless root.present?
root.each do |key, value|
parse_node(key, value, coverage_report)
end
end
def parse_node(key, value, coverage_report)
if key == 'class'
Array.wrap(value).each do |item|
parse_class(item, coverage_report)
end
elsif value.is_a?(Hash)
parse_all(value, coverage_report)
elsif value.is_a?(Array)
value.each do |item|
parse_all(item, coverage_report)
end
end
end
def parse_class(file, coverage_report)
return unless file["filename"].present? && file["lines"].present?
parsed_lines = parse_lines(file["lines"])
coverage_report.add_file(file["filename"], Hash[parsed_lines])
end
def parse_lines(lines)
line_array = Array.wrap(lines["line"])
line_array.map do |line|
# Using `Integer()` here to raise exception on invalid values
[Integer(line["number"]), Integer(line["hits"])]
end
end
end
end
end
end
end

View File

@ -0,0 +1,43 @@
# frozen_string_literal: true
module Gitlab
module Ci
module Reports
class CoverageReports
attr_reader :files
def initialize
@files = {}
end
def pick(keys)
coverage_files = files.select do |key|
keys.include?(key)
end
{ files: coverage_files }
end
def add_file(name, line_coverage)
if files[name].present?
line_coverage.each { |line, hits| combine_lines(name, line, hits) }
else
files[name] = line_coverage
end
end
private
def combine_lines(name, line, hits)
if files[name][line].present?
files[name][line] += hits
else
files[name][line] = hits
end
end
end
end
end
end

View File

@ -49,7 +49,7 @@ module Gitlab
end
def tree_saver
@tree_saver ||= RelationTreeSaver.new
@tree_saver ||= LegacyRelationTreeSaver.new
end
end
end

View File

@ -0,0 +1,73 @@
# frozen_string_literal: true
module Gitlab
module ImportExport
module JSON
class LegacyWriter
include Gitlab::ImportExport::CommandLineUtil
attr_reader :path
def initialize(path)
@path = path
@last_array = nil
@keys = Set.new
mkdir_p(File.dirname(@path))
file.write('{}')
end
def close
@file&.close
@file = nil
end
def set(hash)
hash.each do |key, value|
write(key, value)
end
end
def write(key, value)
raise ArgumentError, "key '#{key}' already written" if @keys.include?(key)
# rewind by one byte, to overwrite '}'
file.pos = file.size - 1
file.write(',') if @keys.any?
file.write(key.to_json)
file.write(':')
file.write(value.to_json)
file.write('}')
@keys.add(key)
@last_array = nil
@last_array_count = nil
end
def append(key, value)
unless @last_array == key
write(key, [])
@last_array = key
@last_array_count = 0
end
# rewind by two bytes, to overwrite ']}'
file.pos = file.size - 2
file.write(',') if @last_array_count > 0
file.write(value.to_json)
file.write(']}')
@last_array_count += 1
end
private
def file
@file ||= File.open(@path, "wb")
end
end
end
end
end

View File

@ -0,0 +1,82 @@
# frozen_string_literal: true
module Gitlab
module ImportExport
module JSON
class StreamingSerializer
include Gitlab::ImportExport::CommandLineUtil
BATCH_SIZE = 100
class Raw < String
def to_json(*_args)
to_s
end
end
def initialize(exportable, relations_schema, json_writer)
@exportable = exportable
@relations_schema = relations_schema
@json_writer = json_writer
end
def execute
serialize_root
includes.each do |relation_definition|
serialize_relation(relation_definition)
end
end
private
attr_reader :json_writer, :relations_schema, :exportable
def serialize_root
attributes = exportable.as_json(
relations_schema.merge(include: nil, preloads: nil))
json_writer.set(attributes)
end
def serialize_relation(definition)
raise ArgumentError, 'definition needs to be Hash' unless definition.is_a?(Hash)
raise ArgumentError, 'definition needs to have exactly one Hash element' unless definition.one?
key, options = definition.first
record = exportable.public_send(key) # rubocop: disable GitlabSecurity/PublicSend
if record.is_a?(ActiveRecord::Relation)
serialize_many_relations(key, record, options)
else
serialize_single_relation(key, record, options)
end
end
def serialize_many_relations(key, records, options)
key_preloads = preloads&.dig(key)
records = records.preload(key_preloads) if key_preloads
records.find_each(batch_size: BATCH_SIZE) do |record|
json = Raw.new(record.to_json(options))
json_writer.append(key, json)
end
end
def serialize_single_relation(key, record, options)
json = Raw.new(record.to_json(options))
json_writer.write(key, json)
end
def includes
relations_schema[:include]
end
def preloads
relations_schema[:preload]
end
end
end
end
end

View File

@ -2,7 +2,7 @@
module Gitlab
module ImportExport
class RelationTreeSaver
class LegacyRelationTreeSaver
include Gitlab::ImportExport::CommandLineUtil
def serialize(exportable, relations_tree)

View File

@ -0,0 +1,68 @@
# frozen_string_literal: true
module Gitlab
module ImportExport
module Project
class LegacyTreeSaver
attr_reader :full_path
def initialize(project:, current_user:, shared:, params: {})
@params = params
@project = project
@current_user = current_user
@shared = shared
@full_path = File.join(@shared.export_path, ImportExport.project_filename)
end
def save
project_tree = tree_saver.serialize(@project, reader.project_tree)
fix_project_tree(project_tree)
tree_saver.save(project_tree, @shared.export_path, ImportExport.project_filename)
true
rescue => e
@shared.error(e)
false
end
private
# Aware that the resulting hash needs to be pure-hash and
# does not include any AR objects anymore, only objects that run `.to_json`
def fix_project_tree(project_tree)
if @params[:description].present?
project_tree['description'] = @params[:description]
end
project_tree['project_members'] += group_members_array
end
def reader
@reader ||= Gitlab::ImportExport::Reader.new(shared: @shared)
end
def group_members_array
group_members.as_json(reader.group_members_tree).each do |group_member|
group_member['source_type'] = 'Project' # Make group members project members of the future import
end
end
def group_members
return [] unless @current_user.can?(:admin_group, @project.group)
# We need `.where.not(user_id: nil)` here otherwise when a group has an
# invitee, it would make the following query return 0 rows since a NULL
# user_id would be present in the subquery
# See http://stackoverflow.com/questions/129077/not-in-clause-and-null-values
non_null_user_ids = @project.project_members.where.not(user_id: nil).select(:user_id)
GroupMembersFinder.new(@project.group).execute.where.not(user_id: non_null_user_ids)
end
def tree_saver
@tree_saver ||= Gitlab::ImportExport::LegacyRelationTreeSaver.new
end
end
end
end
end

View File

@ -15,52 +15,40 @@ module Gitlab
end
def save
project_tree = tree_saver.serialize(@project, reader.project_tree)
fix_project_tree(project_tree)
tree_saver.save(project_tree, @shared.export_path, ImportExport.project_filename)
json_writer = ImportExport::JSON::LegacyWriter.new(@full_path)
serializer = ImportExport::JSON::StreamingSerializer.new(exportable, reader.project_tree, json_writer)
serializer.execute
true
rescue => e
@shared.error(e)
false
ensure
json_writer&.close
end
private
# Aware that the resulting hash needs to be pure-hash and
# does not include any AR objects anymore, only objects that run `.to_json`
def fix_project_tree(project_tree)
if @params[:description].present?
project_tree['description'] = @params[:description]
end
project_tree['project_members'] += group_members_array
end
def reader
@reader ||= Gitlab::ImportExport::Reader.new(shared: @shared)
end
def group_members_array
group_members.as_json(reader.group_members_tree).each do |group_member|
group_member['source_type'] = 'Project' # Make group members project members of the future import
end
def exportable
@project.present(exportable_params)
end
def group_members
return [] unless @current_user.can?(:admin_group, @project.group)
# We need `.where.not(user_id: nil)` here otherwise when a group has an
# invitee, it would make the following query return 0 rows since a NULL
# user_id would be present in the subquery
# See http://stackoverflow.com/questions/129077/not-in-clause-and-null-values
non_null_user_ids = @project.project_members.where.not(user_id: nil).select(:user_id)
GroupMembersFinder.new(@project.group).execute.where.not(user_id: non_null_user_ids)
def exportable_params
params = {
presenter_class: presenter_class,
current_user: @current_user
}
params[:override_description] = @params[:description] if @params[:description].present?
params
end
def tree_saver
@tree_saver ||= RelationTreeSaver.new
def presenter_class
Projects::ImportExport::ProjectExportPresenter
end
end
end

View File

@ -19,7 +19,6 @@ namespace :gitlab do
if ENV['EXPORT_DEBUG'].present?
ActiveRecord::Base.logger = logger
Gitlab::Metrics::Exporter::SidekiqExporter.instance.start
logger.level = Logger::DEBUG
else
logger.level = Logger::INFO

View File

@ -23,7 +23,6 @@ namespace :gitlab do
if ENV['IMPORT_DEBUG'].present?
ActiveRecord::Base.logger = logger
Gitlab::Metrics::Exporter::SidekiqExporter.instance.start
logger.level = Logger::DEBUG
else
logger.level = Logger::INFO

View File

@ -33,6 +33,6 @@ namespace :sidekiq do
task :launchd do
deprecation_warning!
system(*%w(bin/background_jobs start_no_deamonize))
system(*%w(bin/background_jobs start_silent))
end
end

View File

@ -1810,6 +1810,9 @@ msgstr ""
msgid "An error occurred while enabling Service Desk."
msgstr ""
msgid "An error occurred while fetching coverage reports."
msgstr ""
msgid "An error occurred while fetching environments."
msgstr ""
@ -4822,6 +4825,9 @@ msgstr ""
msgid "ClusterIntegration|You must have an RBAC-enabled cluster to install Knative."
msgstr ""
msgid "ClusterIntegration|You should select at least two subnets"
msgstr ""
msgid "ClusterIntegration|Your account must have %{link_to_kubernetes_engine}"
msgstr ""
@ -13325,6 +13331,9 @@ msgstr ""
msgid "No template"
msgstr ""
msgid "No test coverage"
msgstr ""
msgid "No thanks, don't show this again"
msgstr ""
@ -19472,6 +19481,11 @@ msgstr ""
msgid "Test coverage parsing"
msgstr ""
msgid "Test coverage: %d hit"
msgid_plural "Test coverage: %d hits"
msgstr[0] ""
msgstr[1] ""
msgid "Test failed."
msgstr ""

View File

@ -984,6 +984,136 @@ describe Projects::MergeRequestsController do
end
end
describe 'GET coverage_reports' do
let(:merge_request) do
create(:merge_request,
:with_merge_request_pipeline,
target_project: project,
source_project: project)
end
let(:pipeline) do
create(:ci_pipeline,
:success,
project: merge_request.source_project,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha)
end
before do
allow_any_instance_of(MergeRequest)
.to receive(:find_coverage_reports)
.and_return(report)
allow_any_instance_of(MergeRequest)
.to receive(:actual_head_pipeline)
.and_return(pipeline)
end
subject do
get :coverage_reports, params: {
namespace_id: project.namespace.to_param,
project_id: project,
id: merge_request.iid
},
format: :json
end
describe 'permissions on a public project with private CI/CD' do
let(:project) { create :project, :repository, :public, :builds_private }
let(:report) { { status: :parsed, data: [] } }
context 'while signed out' do
before do
sign_out(user)
end
it 'responds with a 404' do
subject
expect(response).to have_gitlab_http_status(:not_found)
expect(response.body).to be_blank
end
end
context 'while signed in as an unrelated user' do
before do
sign_in(create(:user))
end
it 'responds with a 404' do
subject
expect(response).to have_gitlab_http_status(:not_found)
expect(response.body).to be_blank
end
end
end
context 'when pipeline has jobs with coverage reports' do
before do
allow_any_instance_of(MergeRequest)
.to receive(:has_coverage_reports?)
.and_return(true)
end
context 'when processing coverage reports is in progress' do
let(:report) { { status: :parsing } }
it 'sends polling interval' do
expect(Gitlab::PollingInterval).to receive(:set_header)
subject
end
it 'returns 204 HTTP status' do
subject
expect(response).to have_gitlab_http_status(:no_content)
end
end
context 'when processing coverage reports is completed' do
let(:report) { { status: :parsed, data: pipeline.coverage_reports } }
it 'returns coverage reports' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq({ 'files' => {} })
end
end
context 'when user created corrupted coverage reports' do
let(:report) { { status: :error, status_reason: 'Failed to parse coverage reports' } }
it 'does not send polling interval' do
expect(Gitlab::PollingInterval).not_to receive(:set_header)
subject
end
it 'returns 400 HTTP status' do
subject
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response).to eq({ 'status_reason' => 'Failed to parse coverage reports' })
end
end
end
context 'when pipeline does not have jobs with coverage reports' do
let(:report) { double }
it 'returns no content' do
subject
expect(response).to have_gitlab_http_status(:no_content)
expect(response.body).to be_empty
end
end
end
describe 'GET test_reports' do
let(:merge_request) do
create(:merge_request,

View File

@ -311,6 +311,12 @@ FactoryBot.define do
end
end
trait :coverage_reports do
after(:build) do |build|
build.job_artifacts << create(:ci_job_artifact, :cobertura, job: build)
end
end
trait :expired do
artifacts_expire_at { 1.minute.ago }
end
@ -355,6 +361,8 @@ FactoryBot.define do
options { {} }
end
# TODO: move Security traits to ee_ci_build
# https://gitlab.com/gitlab-org/gitlab/-/issues/210486
trait :dast do
options do
{
@ -395,6 +403,14 @@ FactoryBot.define do
end
end
trait :license_scanning do
options do
{
artifacts: { reports: { license_management: 'gl-license-scanning-report.json' } }
}
end
end
trait :non_playable do
status { 'created' }
self.when { 'manual' }

View File

@ -129,6 +129,36 @@ FactoryBot.define do
end
end
trait :cobertura do
file_type { :cobertura }
file_format { :gzip }
after(:build) do |artifact, evaluator|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/cobertura/coverage.xml.gz'), 'application/x-gzip')
end
end
trait :coverage_gocov_xml do
file_type { :cobertura }
file_format { :gzip }
after(:build) do |artifact, evaluator|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/cobertura/coverage_gocov_xml.xml.gz'), 'application/x-gzip')
end
end
trait :coverage_with_corrupted_data do
file_type { :cobertura }
file_format { :gzip }
after(:build) do |artifact, evaluator|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/cobertura/coverage_with_corrupted_data.xml.gz'), 'application/x-gzip')
end
end
trait :codequality do
file_type { :codequality }
file_format { :raw }

View File

@ -67,6 +67,14 @@ FactoryBot.define do
end
end
trait :with_coverage_reports do
status { :success }
after(:build) do |pipeline, evaluator|
pipeline.builds << build(:ci_build, :coverage_reports, pipeline: pipeline, project: pipeline.project)
end
end
trait :with_exposed_artifacts do
status { :success }

View File

@ -121,6 +121,18 @@ FactoryBot.define do
end
end
trait :with_coverage_reports do
after(:build) do |merge_request|
merge_request.head_pipeline = build(
:ci_pipeline,
:success,
:with_coverage_reports,
project: merge_request.source_project,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha)
end
end
trait :with_exposed_artifacts do
after(:build) do |merge_request|
merge_request.head_pipeline = build(

View File

@ -190,7 +190,7 @@ describe 'Merge request > User sees avatars on diff notes', :js do
def find_line(line_code)
line = find("[id='#{line_code}']")
line = line.find(:xpath, 'preceding-sibling::*[1][self::td]') if line.tag_name == 'td'
line = line.find(:xpath, 'preceding-sibling::*[1][self::td]/preceding-sibling::*[1][self::td]') if line.tag_name == 'td'
line
end
end

View File

@ -17,7 +17,8 @@
"path": { "type": "string" },
"name": { "type": "string" },
"username": { "type": "string" },
"status_tooltip_html": { "$ref": "../types/nullable_string.json" }
"status_tooltip_html": { "$ref": "../types/nullable_string.json" },
"is_gitlab_employee": { "type": "boolean" }
},
"additionalProperties": false
}

43
spec/fixtures/cobertura/coverage.xml vendored Normal file
View File

@ -0,0 +1,43 @@
<?xml version='1.0'?>
<!DOCTYPE coverage SYSTEM "http://cobertura.sourceforge.net/xml/coverage-04.dtd">
<!-- cobertura example file - generated by simplecov-cobertura - subset of gitlab-org/gitlab - manually modified -->
<!-- Generated by simplecov-cobertura version 1.3.1 (https://github.com/dashingrocket/simplecov-cobertura) -->
<coverage line-rate="0.5" branch-rate="0" lines-covered="73865" lines-valid="147397" branches-covered="0" branches-valid="0" complexity="0" version="0" timestamp="1577128350">
<sources>
<source>/tmp/projects/gitlab-ce/gitlab</source>
</sources>
<packages>
<package name="Controllers" line-rate="0.43" branch-rate="0" complexity="0">
<classes>
<class name="abuse_reports_controller" filename="app/controllers/abuse_reports_controller.rb" line-rate="0.3" branch-rate="0" complexity="0">
<methods/>
<lines>
<line number="3" branch="false" hits="1"/>
<line number="4" branch="false" hits="1"/>
<line number="6" branch="false" hits="1"/>
<line number="7" branch="false" hits="0"/>
<line number="8" branch="false" hits="0"/>
<line number="9" branch="false" hits="0"/>
<line number="12" branch="false" hits="1"/>
<line number="13" branch="false" hits="0"/>
<line number="14" branch="false" hits="0"/>
<line number="16" branch="false" hits="0"/>
<line number="17" branch="false" hits="0"/>
<line number="19" branch="false" hits="0"/>
<line number="20" branch="false" hits="0"/>
<line number="22" branch="false" hits="0"/>
<line number="26" branch="false" hits="1"/>
<line number="28" branch="false" hits="1"/>
<line number="29" branch="false" hits="0"/>
<line number="36" branch="false" hits="1"/>
<line number="37" branch="false" hits="0"/>
<line number="39" branch="false" hits="0"/>
<line number="40" branch="false" hits="0"/>
<line number="41" branch="false" hits="0"/>
<line number="42" branch="false" hits="0"/>
</lines>
</class>
</classes>
</package>
</packages>
</coverage>

BIN
spec/fixtures/cobertura/coverage.xml.gz vendored Normal file

Binary file not shown.

View File

@ -0,0 +1,216 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE coverage SYSTEM "http://cobertura.sourceforge.net/xml/coverage-04.dtd">
<!-- cobertura example file - generated by gocov-xml - subset of gitlab-org/gitaly -->
<coverage line-rate="0.7966102" branch-rate="0" lines-covered="47" lines-valid="59" branches-covered="0" branches-valid="0" complexity="0" version="" timestamp="1577127162320">
<packages>
<package name="gitlab.com/gitlab-org/gitaly/auth" line-rate="0.7966102" branch-rate="0" complexity="0" line-count="59" line-hits="47">
<classes>
<class name="-" filename="auth/rpccredentials.go" line-rate="0.2" branch-rate="0" complexity="0" line-count="5" line-hits="1">
<methods>
<method name="RPCCredentials" signature="" line-rate="1" branch-rate="0" complexity="0" line-count="0" line-hits="0">
<lines>
<line number="17" hits="1"></line>
</lines>
</method>
<method name="RPCCredentialsV2" signature="" line-rate="0" branch-rate="0" complexity="0" line-count="0" line-hits="0">
<lines>
<line number="34" hits="0"></line>
</lines>
</method>
<method name="hmacToken" signature="" line-rate="0" branch-rate="0" complexity="0" line-count="0" line-hits="0">
<lines>
<line number="52" hits="0"></line>
<line number="53" hits="0"></line>
<line number="55" hits="0"></line>
</lines>
</method>
</methods>
<lines>
<line number="17" hits="1"></line>
<line number="34" hits="0"></line>
<line number="52" hits="0"></line>
<line number="53" hits="0"></line>
<line number="55" hits="0"></line>
</lines>
</class>
<class name="rpcCredentials" filename="auth/rpccredentials.go" line-rate="0.5" branch-rate="0" complexity="0" line-count="2" line-hits="1">
<methods>
<method name="RequireTransportSecurity" signature="" line-rate="0" branch-rate="0" complexity="0" line-count="0" line-hits="0">
<lines>
<line number="24" hits="0"></line>
</lines>
</method>
<method name="GetRequestMetadata" signature="" line-rate="1" branch-rate="0" complexity="0" line-count="0" line-hits="0">
<lines>
<line number="27" hits="1"></line>
</lines>
</method>
</methods>
<lines>
<line number="24" hits="0"></line>
<line number="27" hits="1"></line>
</lines>
</class>
<class name="rpcCredentialsV2" filename="auth/rpccredentials.go" line-rate="0" branch-rate="0" complexity="0" line-count="3" line-hits="0">
<methods>
<method name="RequireTransportSecurity" signature="" line-rate="0" branch-rate="0" complexity="0" line-count="0" line-hits="0">
<lines>
<line number="41" hits="0"></line>
</lines>
</method>
<method name="GetRequestMetadata" signature="" line-rate="0" branch-rate="0" complexity="0" line-count="0" line-hits="0">
<lines>
<line number="44" hits="0"></line>
</lines>
</method>
<method name="hmacToken" signature="" line-rate="0" branch-rate="0" complexity="0" line-count="0" line-hits="0">
<lines>
<line number="48" hits="0"></line>
</lines>
</method>
</methods>
<lines>
<line number="41" hits="0"></line>
<line number="44" hits="0"></line>
<line number="48" hits="0"></line>
</lines>
</class>
<class name="-" filename="auth/token.go" line-rate="0.9183673" branch-rate="0" complexity="0" line-count="49" line-hits="45">
<methods>
<method name="init" signature="" line-rate="1" branch-rate="0" complexity="0" line-count="0" line-hits="0">
<lines>
<line number="38" hits="1"></line>
</lines>
</method>
<method name="CheckToken" signature="" line-rate="0.9285714" branch-rate="0" complexity="0" line-count="0" line-hits="0">
<lines>
<line number="52" hits="1"></line>
<line number="53" hits="0"></line>
<line number="56" hits="1"></line>
<line number="57" hits="1"></line>
<line number="58" hits="1"></line>
<line number="61" hits="1"></line>
<line number="63" hits="1"></line>
<line number="64" hits="1"></line>
<line number="65" hits="1"></line>
<line number="68" hits="1"></line>
<line number="69" hits="1"></line>
<line number="72" hits="1"></line>
<line number="73" hits="1"></line>
<line number="77" hits="1"></line>
</lines>
</method>
<method name="tokensEqual" signature="" line-rate="1" branch-rate="0" complexity="0" line-count="0" line-hits="0">
<lines>
<line number="81" hits="1"></line>
</lines>
</method>
<method name="ExtractAuthInfo" signature="" line-rate="0.90909094" branch-rate="0" complexity="0" line-count="0" line-hits="0">
<lines>
<line number="86" hits="1"></line>
<line number="88" hits="1"></line>
<line number="89" hits="1"></line>
<line number="92" hits="1"></line>
<line number="96" hits="1"></line>
<line number="97" hits="1"></line>
<line number="100" hits="1"></line>
<line number="101" hits="1"></line>
<line number="102" hits="1"></line>
<line number="103" hits="0"></line>
<line number="106" hits="1"></line>
</lines>
</method>
<method name="countV2Error" signature="" line-rate="1" branch-rate="0" complexity="0" line-count="0" line-hits="0">
<lines>
<line number="109" hits="1"></line>
</lines>
</method>
<method name="v2HmacInfoValid" signature="" line-rate="0.8888889" branch-rate="0" complexity="0" line-count="0" line-hits="0">
<lines>
<line number="112" hits="1"></line>
<line number="113" hits="1"></line>
<line number="114" hits="1"></line>
<line number="115" hits="1"></line>
<line number="118" hits="1"></line>
<line number="119" hits="1"></line>
<line number="120" hits="0"></line>
<line number="121" hits="0"></line>
<line number="124" hits="1"></line>
<line number="125" hits="1"></line>
<line number="126" hits="1"></line>
<line number="128" hits="1"></line>
<line number="129" hits="1"></line>
<line number="130" hits="1"></line>
<line number="133" hits="1"></line>
<line number="134" hits="1"></line>
<line number="135" hits="1"></line>
<line number="138" hits="1"></line>
</lines>
</method>
<method name="hmacSign" signature="" line-rate="1" branch-rate="0" complexity="0" line-count="0" line-hits="0">
<lines>
<line number="142" hits="1"></line>
<line number="143" hits="1"></line>
<line number="145" hits="1"></line>
</lines>
</method>
</methods>
<lines>
<line number="38" hits="1"></line>
<line number="52" hits="1"></line>
<line number="53" hits="0"></line>
<line number="56" hits="1"></line>
<line number="57" hits="1"></line>
<line number="58" hits="1"></line>
<line number="61" hits="1"></line>
<line number="63" hits="1"></line>
<line number="64" hits="1"></line>
<line number="65" hits="1"></line>
<line number="68" hits="1"></line>
<line number="69" hits="1"></line>
<line number="72" hits="1"></line>
<line number="73" hits="1"></line>
<line number="77" hits="1"></line>
<line number="81" hits="1"></line>
<line number="86" hits="1"></line>
<line number="88" hits="1"></line>
<line number="89" hits="1"></line>
<line number="92" hits="1"></line>
<line number="96" hits="1"></line>
<line number="97" hits="1"></line>
<line number="100" hits="1"></line>
<line number="101" hits="1"></line>
<line number="102" hits="1"></line>
<line number="103" hits="0"></line>
<line number="106" hits="1"></line>
<line number="109" hits="1"></line>
<line number="112" hits="1"></line>
<line number="113" hits="1"></line>
<line number="114" hits="1"></line>
<line number="115" hits="1"></line>
<line number="118" hits="1"></line>
<line number="119" hits="1"></line>
<line number="120" hits="0"></line>
<line number="121" hits="0"></line>
<line number="124" hits="1"></line>
<line number="125" hits="1"></line>
<line number="126" hits="1"></line>
<line number="128" hits="1"></line>
<line number="129" hits="1"></line>
<line number="130" hits="1"></line>
<line number="133" hits="1"></line>
<line number="134" hits="1"></line>
<line number="135" hits="1"></line>
<line number="138" hits="1"></line>
<line number="142" hits="1"></line>
<line number="143" hits="1"></line>
<line number="145" hits="1"></line>
</lines>
</class>
</classes>
</package>
</packages>
<sources>
<source>/tmp/projects/gitlab-ce/gitaly/src/gitlab.com/gitlab-org/gitaly</source>
</sources>
</coverage>

Binary file not shown.

View File

@ -0,0 +1,50 @@
<?xml version="1.0" ?>
<!DOCTYPE coverage SYSTEM "http://cobertura.sourceforge.net/xml/coverage-04.dtd">
<!-- cobertura example file - generated by NYC - manually modified -->
<coverage lines-valid="22" lines-covered="16" line-rate="0.7273000000000001" branches-valid="4" branches-covered="2" branch-rate="0.5" timestamp="1576756029756" complexity="0" version="0.1">
<sources>
<source>/tmp/projects/coverage-test</source>
</sources>
<packages>
<package name="coverage-test" line-rate="0.6842" branch-rate="0.5">
<classes>
<class name="index.js" filename="index.js" line-rate="0.6842" branch-rate="0.5">
<methods>
<method name="(anonymous_3)" hits="0" signature="()V">
<lines>
<line number="21" hits="0"/>
</lines>
</method>
</methods>
<lines>
<line number="21" hits="1" branch="false"/>
<line number="22" hits="0" branch="false"/>
<line number="25" hits="1" branch="true" condition-coverage="50% (1/2)"/>
<line number="26" hits="0" branch="false"/>
<line number="27" hits="0" branch="false"/>
<line number="28" hits="0" branch="false"/>
<line number="29" hits="0" branch="false"/>
</lines>
</class>
</classes>
</package>
<package name="coverage-test.lib.math" line-rate="1" branch-rate="1">
<classes>
<class name="add.js" filename="lib/math/add.js" line-rate="1" branch-rate="1">
<methods>
<method name="(anonymous_0)" hits="1" signature="()V">
<lines>
<line number="1" hits="1"/>
</lines>
</method>
</methods>
<lines>
<line null="test" hits="1" branch="false"/>
<line number="2" hits="1" branch="false"/>
<line number="3" hits="1" branch="false"/>
</lines>
</class>
</classes>
</package>
</packages>
</coverage>

Binary file not shown.

View File

@ -13,6 +13,7 @@ localVue.use(Vuex);
describe('EksClusterConfigurationForm', () => {
let store;
let actions;
let getters;
let state;
let rolesState;
let regionsState;
@ -29,8 +30,7 @@ describe('EksClusterConfigurationForm', () => {
let securityGroupsActions;
let vm;
beforeEach(() => {
state = eksClusterFormState();
const createStore = (config = {}) => {
actions = {
createCluster: jest.fn(),
setClusterName: jest.fn(),
@ -64,29 +64,44 @@ describe('EksClusterConfigurationForm', () => {
securityGroupsActions = {
fetchItems: jest.fn(),
};
state = {
...eksClusterFormState(),
...config.initialState,
};
rolesState = {
...clusterDropdownStoreState(),
...config.rolesState,
};
regionsState = {
...clusterDropdownStoreState(),
...config.regionsState,
};
vpcsState = {
...clusterDropdownStoreState(),
...config.vpcsState,
};
subnetsState = {
...clusterDropdownStoreState(),
...config.subnetsState,
};
keyPairsState = {
...clusterDropdownStoreState(),
...config.keyPairsState,
};
securityGroupsState = {
...clusterDropdownStoreState(),
...config.securityGroupsState,
};
instanceTypesState = {
...clusterDropdownStoreState(),
...config.instanceTypesState,
};
getters = {
subnetValid: config?.getters?.subnetValid || (() => false),
};
store = new Vuex.Store({
state,
getters,
actions,
modules: {
vpcs: {
@ -125,9 +140,29 @@ describe('EksClusterConfigurationForm', () => {
},
},
});
});
};
beforeEach(() => {
const createValidStateStore = initialState => {
createStore({
initialState: {
clusterName: 'cluster name',
environmentScope: '*',
selectedRegion: 'region',
selectedRole: 'role',
selectedKeyPair: 'key pair',
selectedVpc: 'vpc',
selectedSubnet: ['subnet 1', 'subnet 2'],
selectedSecurityGroup: 'group',
selectedInstanceType: 'small-1',
...initialState,
},
getters: {
subnetValid: () => true,
},
});
};
const buildWrapper = () => {
vm = shallowMount(EksClusterConfigurationForm, {
localVue,
store,
@ -137,27 +172,17 @@ describe('EksClusterConfigurationForm', () => {
externalLinkIcon: '',
},
});
};
beforeEach(() => {
createStore();
buildWrapper();
});
afterEach(() => {
vm.destroy();
});
const setAllConfigurationFields = () => {
store.replaceState({
...state,
clusterName: 'cluster name',
environmentScope: '*',
selectedRegion: 'region',
selectedRole: 'role',
selectedKeyPair: 'key pair',
selectedVpc: 'vpc',
selectedSubnet: 'subnet',
selectedSecurityGroup: 'group',
selectedInstanceType: 'small-1',
});
};
const findCreateClusterButton = () => vm.find('.js-create-cluster');
const findClusterNameInput = () => vm.find('[id=eks-cluster-name]');
const findEnvironmentScopeInput = () => vm.find('[id=eks-environment-scope]');
@ -310,12 +335,29 @@ describe('EksClusterConfigurationForm', () => {
expect(findSubnetDropdown().props('items')).toBe(subnetsState.items);
});
it('sets SubnetDropdown hasErrors to true when loading subnets fails', () => {
subnetsState.loadingItemsError = new Error();
return Vue.nextTick().then(() => {
expect(findSubnetDropdown().props('hasErrors')).toEqual(true);
it('displays a validation error in the subnet dropdown when loading subnets fails', () => {
createStore({
subnetsState: {
loadingItemsError: new Error(),
},
});
buildWrapper();
expect(findSubnetDropdown().props('hasErrors')).toEqual(true);
});
it('displays a validation error in the subnet dropdown when a single subnet is selected', () => {
createStore({
initialState: {
selectedSubnet: ['subnet 1'],
},
});
buildWrapper();
expect(findSubnetDropdown().props('hasErrors')).toEqual(true);
expect(findSubnetDropdown().props('errorMessage')).toEqual(
'You should select at least two subnets',
);
});
it('disables SecurityGroupDropdown when no vpc is selected', () => {
@ -386,11 +428,7 @@ describe('EksClusterConfigurationForm', () => {
});
it('cleans selected subnet', () => {
expect(actions.setSubnet).toHaveBeenCalledWith(
expect.anything(),
{ subnet: null },
undefined,
);
expect(actions.setSubnet).toHaveBeenCalledWith(expect.anything(), { subnet: [] }, undefined);
});
it('cleans selected security group', () => {
@ -464,11 +502,7 @@ describe('EksClusterConfigurationForm', () => {
});
it('cleans selected subnet', () => {
expect(actions.setSubnet).toHaveBeenCalledWith(
expect.anything(),
{ subnet: null },
undefined,
);
expect(actions.setSubnet).toHaveBeenCalledWith(expect.anything(), { subnet: [] }, undefined);
});
it('cleans selected security group', () => {
@ -573,22 +607,19 @@ describe('EksClusterConfigurationForm', () => {
});
describe('when all cluster configuration fields are set', () => {
beforeEach(() => {
setAllConfigurationFields();
});
it('enables create cluster button', () => {
createValidStateStore();
buildWrapper();
expect(findCreateClusterButton().props('disabled')).toBe(false);
});
});
describe('when at least one cluster configuration field is not set', () => {
beforeEach(() => {
setAllConfigurationFields();
store.replaceState({
...state,
clusterName: '',
createValidStateStore({
clusterName: null,
});
buildWrapper();
});
it('disables create cluster button', () => {
@ -596,13 +627,12 @@ describe('EksClusterConfigurationForm', () => {
});
});
describe('when isCreatingCluster', () => {
describe('when is creating cluster', () => {
beforeEach(() => {
setAllConfigurationFields();
store.replaceState({
...state,
createValidStateStore({
isCreatingCluster: true,
});
buildWrapper();
});
it('sets create cluster button as loading', () => {

View File

@ -0,0 +1,13 @@
import { subnetValid } from '~/create_cluster/eks_cluster/store/getters';
describe('EKS Cluster Store Getters', () => {
describe('subnetValid', () => {
it('returns true if there are 2 or more selected subnets', () => {
expect(subnetValid({ selectedSubnet: [1, 2] })).toBe(true);
});
it.each([[[], [1]]])('returns false if there are 1 or less selected subnets', subnets => {
expect(subnetValid({ selectedSubnet: subnets })).toBe(false);
});
});
});

View File

@ -41,6 +41,7 @@ describe('diffs/components/app', () => {
endpoint: TEST_ENDPOINT,
endpointMetadata: `${TEST_HOST}/diff/endpointMetadata`,
endpointBatch: `${TEST_HOST}/diff/endpointBatch`,
endpointCoverage: `${TEST_HOST}/diff/endpointCoverage`,
projectPath: 'namespace/project',
currentUser: {},
changesEmptyStateIllustration: '',
@ -95,6 +96,7 @@ describe('diffs/components/app', () => {
jest.spyOn(wrapper.vm, 'fetchDiffFiles').mockImplementation(fetchResolver);
jest.spyOn(wrapper.vm, 'fetchDiffFilesMeta').mockImplementation(fetchResolver);
jest.spyOn(wrapper.vm, 'fetchDiffFilesBatch').mockImplementation(fetchResolver);
jest.spyOn(wrapper.vm, 'fetchCoverageFiles').mockImplementation(fetchResolver);
jest.spyOn(wrapper.vm, 'setDiscussions').mockImplementation(() => {});
jest.spyOn(wrapper.vm, 'startRenderDiffsQueue').mockImplementation(() => {});
jest.spyOn(wrapper.vm, 'unwatchDiscussions').mockImplementation(() => {});
@ -250,6 +252,7 @@ describe('diffs/components/app', () => {
expect(wrapper.vm.startRenderDiffsQueue).toHaveBeenCalled();
expect(wrapper.vm.fetchDiffFilesMeta).not.toHaveBeenCalled();
expect(wrapper.vm.fetchDiffFilesBatch).not.toHaveBeenCalled();
expect(wrapper.vm.fetchCoverageFiles).toHaveBeenCalled();
expect(wrapper.vm.unwatchDiscussions).toHaveBeenCalled();
expect(wrapper.vm.diffFilesLength).toEqual(100);
expect(wrapper.vm.unwatchRetrievingBatches).toHaveBeenCalled();
@ -269,6 +272,7 @@ describe('diffs/components/app', () => {
expect(wrapper.vm.startRenderDiffsQueue).toHaveBeenCalled();
expect(wrapper.vm.fetchDiffFilesMeta).toHaveBeenCalled();
expect(wrapper.vm.fetchDiffFilesBatch).toHaveBeenCalled();
expect(wrapper.vm.fetchCoverageFiles).toHaveBeenCalled();
expect(wrapper.vm.unwatchDiscussions).toHaveBeenCalled();
expect(wrapper.vm.diffFilesLength).toEqual(100);
expect(wrapper.vm.unwatchRetrievingBatches).toHaveBeenCalled();
@ -286,6 +290,7 @@ describe('diffs/components/app', () => {
expect(wrapper.vm.startRenderDiffsQueue).toHaveBeenCalled();
expect(wrapper.vm.fetchDiffFilesMeta).toHaveBeenCalled();
expect(wrapper.vm.fetchDiffFilesBatch).toHaveBeenCalled();
expect(wrapper.vm.fetchCoverageFiles).toHaveBeenCalled();
expect(wrapper.vm.unwatchDiscussions).toHaveBeenCalled();
expect(wrapper.vm.diffFilesLength).toEqual(100);
expect(wrapper.vm.unwatchRetrievingBatches).toHaveBeenCalled();

View File

@ -12,6 +12,7 @@ describe('InlineDiffTableRow', () => {
vm = createComponentWithStore(Vue.extend(InlineDiffTableRow), createStore(), {
line: thisLine,
fileHash: diffFileMockData.file_hash,
filePath: diffFileMockData.file_path,
contextLinesPath: 'contextLinesPath',
isHighlighted: false,
}).$mount();
@ -39,4 +40,64 @@ describe('InlineDiffTableRow', () => {
.then(done)
.catch(done.fail);
});
describe('sets coverage title and class', () => {
it('for lines with coverage', done => {
vm.$nextTick()
.then(() => {
const name = diffFileMockData.file_path;
const line = thisLine.new_line;
vm.$store.state.diffs.coverageFiles = { files: { [name]: { [line]: 5 } } };
return vm.$nextTick();
})
.then(() => {
const coverage = vm.$el.querySelector('.line-coverage');
expect(coverage.title).toContain('Test coverage: 5 hits');
expect(coverage.classList).toContain('coverage');
})
.then(done)
.catch(done.fail);
});
it('for lines without coverage', done => {
vm.$nextTick()
.then(() => {
const name = diffFileMockData.file_path;
const line = thisLine.new_line;
vm.$store.state.diffs.coverageFiles = { files: { [name]: { [line]: 0 } } };
return vm.$nextTick();
})
.then(() => {
const coverage = vm.$el.querySelector('.line-coverage');
expect(coverage.title).toContain('No test coverage');
expect(coverage.classList).toContain('no-coverage');
})
.then(done)
.catch(done.fail);
});
it('for unknown lines', done => {
vm.$nextTick()
.then(() => {
vm.$store.state.diffs.coverageFiles = {};
return vm.$nextTick();
})
.then(() => {
const coverage = vm.$el.querySelector('.line-coverage');
expect(coverage.title).not.toContain('Coverage');
expect(coverage.classList).not.toContain('coverage');
expect(coverage.classList).not.toContain('no-coverage');
})
.then(done)
.catch(done.fail);
});
});
});

View File

@ -14,6 +14,7 @@ describe('ParallelDiffTableRow', () => {
vm = createComponentWithStore(Vue.extend(ParallelDiffTableRow), createStore(), {
line: thisLine,
fileHash: diffFileMockData.file_hash,
filePath: diffFileMockData.file_path,
contextLinesPath: 'contextLinesPath',
isHighlighted: false,
}).$mount();
@ -52,6 +53,7 @@ describe('ParallelDiffTableRow', () => {
vm = createComponentWithStore(Vue.extend(ParallelDiffTableRow), createStore(), {
line: thisLine,
fileHash: diffFileMockData.file_hash,
filePath: diffFileMockData.file_path,
contextLinesPath: 'contextLinesPath',
isHighlighted: false,
}).$mount();
@ -81,5 +83,65 @@ describe('ParallelDiffTableRow', () => {
.then(done)
.catch(done.fail);
});
describe('sets coverage title and class', () => {
it('for lines with coverage', done => {
vm.$nextTick()
.then(() => {
const name = diffFileMockData.file_path;
const line = rightLine.new_line;
vm.$store.state.diffs.coverageFiles = { files: { [name]: { [line]: 5 } } };
return vm.$nextTick();
})
.then(() => {
const coverage = vm.$el.querySelector('.line-coverage.right-side');
expect(coverage.title).toContain('Test coverage: 5 hits');
expect(coverage.classList).toContain('coverage');
})
.then(done)
.catch(done.fail);
});
it('for lines without coverage', done => {
vm.$nextTick()
.then(() => {
const name = diffFileMockData.file_path;
const line = rightLine.new_line;
vm.$store.state.diffs.coverageFiles = { files: { [name]: { [line]: 0 } } };
return vm.$nextTick();
})
.then(() => {
const coverage = vm.$el.querySelector('.line-coverage.right-side');
expect(coverage.title).toContain('No test coverage');
expect(coverage.classList).toContain('no-coverage');
})
.then(done)
.catch(done.fail);
});
it('for unknown lines', done => {
vm.$nextTick()
.then(() => {
vm.$store.state.diffs.coverageFiles = {};
return vm.$nextTick();
})
.then(() => {
const coverage = vm.$el.querySelector('.line-coverage.right-side');
expect(coverage.title).not.toContain('Coverage');
expect(coverage.classList).not.toContain('coverage');
expect(coverage.classList).not.toContain('no-coverage');
})
.then(done)
.catch(done.fail);
});
});
});
});

View File

@ -12,6 +12,7 @@ import actions, {
fetchDiffFiles,
fetchDiffFilesBatch,
fetchDiffFilesMeta,
fetchCoverageFiles,
assignDiscussionsToDiff,
removeDiscussionsFromDiff,
startRenderDiffsQueue,
@ -73,6 +74,7 @@ describe('DiffsStoreActions', () => {
const endpoint = '/diffs/set/endpoint';
const endpointMetadata = '/diffs/set/endpoint/metadata';
const endpointBatch = '/diffs/set/endpoint/batch';
const endpointCoverage = '/diffs/set/coverage_reports';
const projectPath = '/root/project';
const dismissEndpoint = '/-/user_callouts';
const showSuggestPopover = false;
@ -84,6 +86,7 @@ describe('DiffsStoreActions', () => {
endpoint,
endpointBatch,
endpointMetadata,
endpointCoverage,
projectPath,
dismissEndpoint,
showSuggestPopover,
@ -93,6 +96,7 @@ describe('DiffsStoreActions', () => {
endpoint: '',
endpointBatch: '',
endpointMetadata: '',
endpointCoverage: '',
projectPath: '',
dismissEndpoint: '',
showSuggestPopover: true,
@ -105,6 +109,7 @@ describe('DiffsStoreActions', () => {
endpoint,
endpointMetadata,
endpointBatch,
endpointCoverage,
projectPath,
dismissEndpoint,
showSuggestPopover,
@ -318,6 +323,44 @@ describe('DiffsStoreActions', () => {
});
});
describe('fetchCoverageFiles', () => {
let mock;
const endpointCoverage = '/fetch';
beforeEach(() => {
mock = new MockAdapter(axios);
});
afterEach(() => mock.restore());
it('should commit SET_COVERAGE_DATA with received response', done => {
const data = { files: { 'app.js': { '1': 0, '2': 1 } } };
mock.onGet(endpointCoverage).reply(200, { data });
testAction(
fetchCoverageFiles,
{},
{ endpointCoverage },
[{ type: types.SET_COVERAGE_DATA, payload: { data } }],
[],
done,
);
});
it('should show flash on API error', done => {
const flashSpy = spyOnDependency(actions, 'createFlash');
mock.onGet(endpointCoverage).reply(400);
testAction(fetchCoverageFiles, {}, { endpointCoverage }, [], [], () => {
expect(flashSpy).toHaveBeenCalledTimes(1);
expect(flashSpy).toHaveBeenCalledWith(jasmine.stringMatching('Something went wrong'));
done();
});
});
});
describe('setHighlightedRow', () => {
it('should mark currently selected diff and set lineHash and fileHash of highlightedRow', () => {
testAction(setHighlightedRow, 'ABC_123', {}, [

View File

@ -282,4 +282,34 @@ describe('Diffs Module Getters', () => {
expect(getters.currentDiffIndex(localState)).toEqual(0);
});
});
describe('fileLineCoverage', () => {
beforeEach(() => {
Object.assign(localState.coverageFiles, { files: { 'app.js': { '1': 0, '2': 5 } } });
});
it('returns empty object when no coverage data is available', () => {
Object.assign(localState.coverageFiles, {});
expect(getters.fileLineCoverage(localState)('test.js', 2)).toEqual({});
});
it('returns empty object when unknown filename is passed', () => {
expect(getters.fileLineCoverage(localState)('test.js', 2)).toEqual({});
});
it('returns no-coverage info when correct filename and line is passed', () => {
expect(getters.fileLineCoverage(localState)('app.js', 1)).toEqual({
text: 'No test coverage',
class: 'no-coverage',
});
});
it('returns coverage info when correct filename and line is passed', () => {
expect(getters.fileLineCoverage(localState)('app.js', 2)).toEqual({
text: 'Test coverage: 5 hits',
class: 'coverage',
});
});
});
});

View File

@ -123,6 +123,17 @@ describe('DiffsStoreMutations', () => {
});
});
describe('SET_COVERAGE_DATA', () => {
it('should set coverage data properly', () => {
const state = { coverageFiles: {} };
const coverage = { 'app.js': { '1': 0, '2': 1 } };
mutations[types.SET_COVERAGE_DATA](state, coverage);
expect(state.coverageFiles).toEqual(coverage);
});
});
describe('SET_DIFF_VIEW_TYPE', () => {
it('should set diff view type properly', () => {
const state = {};

View File

@ -45,6 +45,7 @@ describe Gitlab::Ci::Config::Entry::Reports do
:performance | 'performance.json'
:lsif | 'lsif.json'
:dotenv | 'build.dotenv'
:cobertura | 'cobertura-coverage.xml'
end
with_them do

View File

@ -0,0 +1,176 @@
# frozen_string_literal: true
require 'fast_spec_helper'
describe Gitlab::Ci::Parsers::Coverage::Cobertura do
describe '#parse!' do
subject { described_class.new.parse!(cobertura, coverage_report) }
let(:coverage_report) { Gitlab::Ci::Reports::CoverageReports.new }
context 'when data is Cobertura style XML' do
context 'when there is no <class>' do
let(:cobertura) { '' }
it 'parses XML and returns empty coverage' do
expect { subject }.not_to raise_error
expect(coverage_report.files).to eq({})
end
end
context 'when there is a single <class>' do
context 'with no lines' do
let(:cobertura) do
<<-EOF.strip_heredoc
<classes><class filename="app.rb"></class></classes>
EOF
end
it 'parses XML and returns empty coverage' do
expect { subject }.not_to raise_error
expect(coverage_report.files).to eq({})
end
end
context 'with a single line' do
let(:cobertura) do
<<-EOF.strip_heredoc
<classes>
<class filename="app.rb"><lines>
<line number="1" hits="2"/>
</lines></class>
</classes>
EOF
end
it 'parses XML and returns a single file with coverage' do
expect { subject }.not_to raise_error
expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2 } })
end
end
context 'with multipe lines and methods info' do
let(:cobertura) do
<<-EOF.strip_heredoc
<classes>
<class filename="app.rb"><methods/><lines>
<line number="1" hits="2"/>
<line number="2" hits="0"/>
</lines></class>
</classes>
EOF
end
it 'parses XML and returns a single file with coverage' do
expect { subject }.not_to raise_error
expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2, 2 => 0 } })
end
end
end
context 'when there are multipe <class>' do
context 'with the same filename and different lines' do
let(:cobertura) do
<<-EOF.strip_heredoc
<classes>
<class filename="app.rb"><methods/><lines>
<line number="1" hits="2"/>
<line number="2" hits="0"/>
</lines></class>
<class filename="app.rb"><methods/><lines>
<line number="6" hits="1"/>
<line number="7" hits="1"/>
</lines></class>
</classes>
EOF
end
it 'parses XML and returns a single file with merged coverage' do
expect { subject }.not_to raise_error
expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2, 2 => 0, 6 => 1, 7 => 1 } })
end
end
context 'with the same filename and lines' do
let(:cobertura) do
<<-EOF.strip_heredoc
<packages><package><classes>
<class filename="app.rb"><methods/><lines>
<line number="1" hits="2"/>
<line number="2" hits="0"/>
</lines></class>
<class filename="app.rb"><methods/><lines>
<line number="1" hits="1"/>
<line number="2" hits="1"/>
</lines></class>
</classes></package></packages>
EOF
end
it 'parses XML and returns a single file with summed-up coverage' do
expect { subject }.not_to raise_error
expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 3, 2 => 1 } })
end
end
context 'with missing filename' do
let(:cobertura) do
<<-EOF.strip_heredoc
<classes>
<class filename="app.rb"><methods/><lines>
<line number="1" hits="2"/>
<line number="2" hits="0"/>
</lines></class>
<class><methods/><lines>
<line number="6" hits="1"/>
<line number="7" hits="1"/>
</lines></class>
</classes>
EOF
end
it 'parses XML and ignores class with missing name' do
expect { subject }.not_to raise_error
expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2, 2 => 0 } })
end
end
context 'with invalid line information' do
let(:cobertura) do
<<-EOF.strip_heredoc
<classes>
<class filename="app.rb"><methods/><lines>
<line number="1" hits="2"/>
<line number="2" hits="0"/>
</lines></class>
<class filename="app.rb"><methods/><lines>
<line null="test" hits="1"/>
<line number="7" hits="1"/>
</lines></class>
</classes>
EOF
end
it 'raises an error' do
expect { subject }.to raise_error(described_class::CoberturaParserError)
end
end
end
end
context 'when data is not Cobertura style XML' do
let(:cobertura) { { coverage: '12%' }.to_json }
it 'raises an error' do
expect { subject }.to raise_error(described_class::CoberturaParserError)
end
end
end
end

Some files were not shown because too many files have changed in this diff Show More