Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
9b09561f47
commit
03d56c8af0
99 changed files with 2109 additions and 394 deletions
|
@ -602,8 +602,6 @@
|
|||
|
||||
.rails:rules:detect-tests:
|
||||
rules:
|
||||
- <<: *if-not-ee
|
||||
when: never
|
||||
- <<: *if-default-refs
|
||||
changes: *code-backstage-patterns
|
||||
- <<: *if-merge-request-title-run-all-rspec
|
||||
|
|
|
@ -1 +1 @@
|
|||
c0ea152ccad891cda5fd255c1fea78562aae5e4a
|
||||
14b4e7cba593bccd9093fd231cdbd3f016688451
|
||||
|
|
|
@ -1,5 +1,10 @@
|
|||
<script>
|
||||
import UsersTable from './users_table.vue';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
UsersTable,
|
||||
},
|
||||
props: {
|
||||
users: {
|
||||
type: Array,
|
||||
|
@ -16,6 +21,6 @@ export default {
|
|||
|
||||
<template>
|
||||
<div>
|
||||
<!-- Temporary empty app -->
|
||||
<users-table :users="users" :paths="paths" />
|
||||
</div>
|
||||
</template>
|
||||
|
|
|
@ -0,0 +1,63 @@
|
|||
<script>
|
||||
import { GlTable } from '@gitlab/ui';
|
||||
import { __ } from '~/locale';
|
||||
|
||||
const DEFAULT_TH_CLASSES =
|
||||
'gl-bg-transparent! gl-border-b-solid! gl-border-b-gray-100! gl-p-5! gl-border-b-1!';
|
||||
const thWidthClass = width => `gl-w-${width}p ${DEFAULT_TH_CLASSES}`;
|
||||
|
||||
export default {
|
||||
components: {
|
||||
GlTable,
|
||||
},
|
||||
props: {
|
||||
users: {
|
||||
type: Array,
|
||||
required: true,
|
||||
},
|
||||
paths: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
key: 'name',
|
||||
label: __('Name'),
|
||||
thClass: thWidthClass(40),
|
||||
},
|
||||
{
|
||||
key: 'projectsCount',
|
||||
label: __('Projects'),
|
||||
thClass: thWidthClass(10),
|
||||
},
|
||||
{
|
||||
key: 'createdAt',
|
||||
label: __('Created on'),
|
||||
thClass: thWidthClass(15),
|
||||
},
|
||||
{
|
||||
key: 'lastActivityOn',
|
||||
label: __('Last activity'),
|
||||
thClass: thWidthClass(15),
|
||||
},
|
||||
{
|
||||
key: 'settings',
|
||||
label: '',
|
||||
thClass: thWidthClass(20),
|
||||
},
|
||||
],
|
||||
};
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div>
|
||||
<gl-table
|
||||
:items="users"
|
||||
:fields="$options.fields"
|
||||
:empty-text="s__('AdminUsers|No users found')"
|
||||
show-empty
|
||||
stacked="md"
|
||||
/>
|
||||
</div>
|
||||
</template>
|
|
@ -2,7 +2,6 @@
|
|||
import { mapGetters, mapActions, mapState } from 'vuex';
|
||||
import BoardListHeader from 'ee_else_ce/boards/components/board_list_header_new.vue';
|
||||
import BoardList from './board_list_new.vue';
|
||||
import { ListType } from '../constants';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
|
@ -36,16 +35,11 @@ export default {
|
|||
listIssues() {
|
||||
return this.getIssuesByList(this.list.id);
|
||||
},
|
||||
shouldFetchIssues() {
|
||||
return this.list.type !== ListType.blank;
|
||||
},
|
||||
},
|
||||
watch: {
|
||||
filterParams: {
|
||||
handler() {
|
||||
if (this.shouldFetchIssues) {
|
||||
this.fetchIssuesForList({ listId: this.list.id });
|
||||
}
|
||||
this.fetchIssuesForList({ listId: this.list.id });
|
||||
},
|
||||
deep: true,
|
||||
immediate: true,
|
||||
|
|
|
@ -72,9 +72,7 @@ export default {
|
|||
return this.list?.label?.description || this.list.title || '';
|
||||
},
|
||||
showListHeaderButton() {
|
||||
return (
|
||||
!this.disabled && this.listType !== ListType.closed && this.listType !== ListType.blank
|
||||
);
|
||||
return !this.disabled && this.listType !== ListType.closed;
|
||||
},
|
||||
showMilestoneListDetails() {
|
||||
return (
|
||||
|
@ -106,9 +104,6 @@ export default {
|
|||
this.listType !== ListType.backlog && this.showListHeaderButton && this.list.isExpanded
|
||||
);
|
||||
},
|
||||
showBoardListAndBoardInfo() {
|
||||
return this.listType !== ListType.blank;
|
||||
},
|
||||
uniqueKey() {
|
||||
// eslint-disable-next-line @gitlab/require-i18n-strings
|
||||
return `boards.${this.boardId}.${this.listType}.${this.list.id}`;
|
||||
|
@ -286,7 +281,6 @@ export default {
|
|||
</gl-tooltip>
|
||||
|
||||
<div
|
||||
v-if="showBoardListAndBoardInfo"
|
||||
class="issue-count-badge gl-display-inline-flex gl-pr-0 no-drag text-secondary"
|
||||
:class="{
|
||||
'gl-display-none!': !list.isExpanded && isSwimlanesHeader,
|
||||
|
|
|
@ -75,9 +75,7 @@ export default {
|
|||
return this.list?.label?.description || this.list.title || '';
|
||||
},
|
||||
showListHeaderButton() {
|
||||
return (
|
||||
!this.disabled && this.listType !== ListType.closed && this.listType !== ListType.blank
|
||||
);
|
||||
return !this.disabled && this.listType !== ListType.closed;
|
||||
},
|
||||
showMilestoneListDetails() {
|
||||
return (
|
||||
|
@ -111,9 +109,6 @@ export default {
|
|||
this.listType !== ListType.backlog && this.showListHeaderButton && this.list.isExpanded
|
||||
);
|
||||
},
|
||||
showBoardListAndBoardInfo() {
|
||||
return this.listType !== ListType.blank;
|
||||
},
|
||||
uniqueKey() {
|
||||
// eslint-disable-next-line @gitlab/require-i18n-strings
|
||||
return `boards.${this.boardId}.${this.listType}.${this.list.id}`;
|
||||
|
@ -299,7 +294,6 @@ export default {
|
|||
<!-- EE end -->
|
||||
|
||||
<div
|
||||
v-if="showBoardListAndBoardInfo"
|
||||
class="issue-count-badge gl-display-inline-flex gl-pr-0 no-drag gl-text-gray-500"
|
||||
:class="{
|
||||
'gl-display-none!': !list.isExpanded && isSwimlanesHeader,
|
||||
|
|
|
@ -9,7 +9,6 @@ export const ListType = {
|
|||
backlog: 'backlog',
|
||||
closed: 'closed',
|
||||
label: 'label',
|
||||
blank: 'blank',
|
||||
};
|
||||
|
||||
export const inactiveId = 0;
|
||||
|
@ -17,11 +16,7 @@ export const inactiveId = 0;
|
|||
export const ISSUABLE = 'issuable';
|
||||
export const LIST = 'list';
|
||||
|
||||
/* eslint-disable-next-line @gitlab/require-i18n-strings */
|
||||
export const DEFAULT_LABELS = ['to do', 'doing'];
|
||||
|
||||
export default {
|
||||
BoardType,
|
||||
ListType,
|
||||
DEFAULT_LABELS,
|
||||
};
|
||||
|
|
|
@ -181,7 +181,6 @@ export default () => {
|
|||
.then(res => res.data)
|
||||
.then(lists => {
|
||||
lists.forEach(list => boardsStore.addList(list));
|
||||
boardsStore.addBlankState();
|
||||
this.loading = false;
|
||||
})
|
||||
.catch(() => {
|
||||
|
|
|
@ -3,7 +3,7 @@ import { pick } from 'lodash';
|
|||
import boardListsQuery from 'ee_else_ce/boards/graphql/board_lists.query.graphql';
|
||||
import createGqClient, { fetchPolicies } from '~/lib/graphql';
|
||||
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
|
||||
import { BoardType, ListType, inactiveId, DEFAULT_LABELS } from '~/boards/constants';
|
||||
import { BoardType, ListType, inactiveId } from '~/boards/constants';
|
||||
import * as types from './mutation_types';
|
||||
import {
|
||||
formatBoardLists,
|
||||
|
@ -89,7 +89,6 @@ export default {
|
|||
if (!lists.nodes.find(l => l.listType === ListType.backlog) && !hideBacklogList) {
|
||||
dispatch('createList', { backlog: true });
|
||||
}
|
||||
dispatch('generateDefaultLists');
|
||||
})
|
||||
.catch(() => commit(types.RECEIVE_BOARD_LISTS_FAILURE));
|
||||
},
|
||||
|
@ -150,31 +149,6 @@ export default {
|
|||
.catch(() => commit(types.RECEIVE_LABELS_FAILURE));
|
||||
},
|
||||
|
||||
generateDefaultLists: async ({ state, commit, dispatch }) => {
|
||||
if (state.disabled) {
|
||||
return;
|
||||
}
|
||||
if (
|
||||
Object.entries(state.boardLists).find(
|
||||
([, list]) => list.type !== ListType.backlog && list.type !== ListType.closed,
|
||||
)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
const fetchLabelsAndCreateList = label => {
|
||||
return dispatch('fetchLabels', label)
|
||||
.then(res => {
|
||||
if (res.length > 0) {
|
||||
dispatch('createList', { labelId: res[0].id });
|
||||
}
|
||||
})
|
||||
.catch(() => commit(types.GENERATE_DEFAULT_LISTS_FAILURE));
|
||||
};
|
||||
|
||||
await Promise.all(DEFAULT_LABELS.map(fetchLabelsAndCreateList));
|
||||
},
|
||||
|
||||
moveList: (
|
||||
{ state, commit, dispatch },
|
||||
{ listId, replacedListId, newIndex, adjustmentValue },
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
/* global ListIssue */
|
||||
import { sortBy, pick } from 'lodash';
|
||||
import Vue from 'vue';
|
||||
import Cookies from 'js-cookie';
|
||||
import BoardsStoreEE from 'ee_else_ce/boards/stores/boards_store_ee';
|
||||
import {
|
||||
urlParamsToObject,
|
||||
|
@ -125,20 +124,6 @@ const boardsStore = {
|
|||
.querySelector(`.js-board-list-${getIdFromGraphQLId(listId)}`)
|
||||
?.classList.remove('is-active');
|
||||
},
|
||||
shouldAddBlankState() {
|
||||
// Decide whether to add the blank state
|
||||
return !this.state.lists.filter(list => list.type !== 'backlog' && list.type !== 'closed')[0];
|
||||
},
|
||||
addBlankState() {
|
||||
if (!this.shouldAddBlankState() || this.welcomeIsHidden()) return;
|
||||
|
||||
this.generateDefaultLists()
|
||||
.then(res => res.data)
|
||||
.then(data => Promise.all(data.map(list => this.addList(list))))
|
||||
.catch(() => {
|
||||
this.removeList(undefined, 'label');
|
||||
});
|
||||
},
|
||||
|
||||
findIssueLabel(issue, findLabel) {
|
||||
return issue.labels.find(label => label.id === findLabel.id);
|
||||
|
@ -202,9 +187,6 @@ const boardsStore = {
|
|||
return list.issues.find(issue => issue.id === id);
|
||||
},
|
||||
|
||||
welcomeIsHidden() {
|
||||
return parseBoolean(Cookies.get('issue_board_welcome_hidden'));
|
||||
},
|
||||
removeList(id, type = 'blank') {
|
||||
const list = this.findList('id', id, type);
|
||||
|
||||
|
@ -562,10 +544,6 @@ const boardsStore = {
|
|||
return axios.get(this.state.endpoints.listsEndpoint);
|
||||
},
|
||||
|
||||
generateDefaultLists() {
|
||||
return axios.post(this.state.endpoints.listsEndpointGenerate, {});
|
||||
},
|
||||
|
||||
createList(entityId, entityType) {
|
||||
const list = {
|
||||
[entityType]: entityId,
|
||||
|
|
|
@ -32,11 +32,10 @@ export const addIssueToList = ({ state, listId, issueId, moveBeforeId, moveAfter
|
|||
|
||||
export default {
|
||||
[mutationTypes.SET_INITIAL_BOARD_DATA](state, data) {
|
||||
const { boardType, disabled, showPromotion, ...endpoints } = data;
|
||||
const { boardType, disabled, ...endpoints } = data;
|
||||
state.endpoints = endpoints;
|
||||
state.boardType = boardType;
|
||||
state.disabled = disabled;
|
||||
state.showPromotion = showPromotion;
|
||||
},
|
||||
|
||||
[mutationTypes.RECEIVE_BOARD_LISTS_SUCCESS]: (state, lists) => {
|
||||
|
|
|
@ -4,7 +4,6 @@ export default () => ({
|
|||
endpoints: {},
|
||||
boardType: null,
|
||||
disabled: false,
|
||||
showPromotion: false,
|
||||
isShowingLabels: true,
|
||||
activeId: inactiveId,
|
||||
sidebarType: '',
|
||||
|
|
|
@ -2,7 +2,6 @@ import Vue from 'vue';
|
|||
import Translate from '../vue_shared/translate';
|
||||
import ImportProjectsTable from './components/import_projects_table.vue';
|
||||
import { parseBoolean } from '../lib/utils/common_utils';
|
||||
import { queryToObject } from '../lib/utils/url_utility';
|
||||
import createStore from './store';
|
||||
|
||||
Vue.use(Translate);
|
||||
|
@ -20,18 +19,12 @@ export function initStoreFromElement(element) {
|
|||
paginatable,
|
||||
} = element.dataset;
|
||||
|
||||
const params = queryToObject(document.location.search);
|
||||
const page = parseInt(params.page ?? 1, 10);
|
||||
|
||||
return createStore({
|
||||
initialState: {
|
||||
defaultTargetNamespace: gon.current_username,
|
||||
ciCdOnly: parseBoolean(ciCdOnly),
|
||||
canSelectNamespace: parseBoolean(canSelectNamespace),
|
||||
provider,
|
||||
pageInfo: {
|
||||
page,
|
||||
},
|
||||
},
|
||||
endpoints: {
|
||||
reposPath,
|
||||
|
|
|
@ -1,66 +1,202 @@
|
|||
<script>
|
||||
import dateFormat from 'dateformat';
|
||||
import { GlColumnChart } from '@gitlab/ui/dist/charts';
|
||||
import { __, sprintf } from '~/locale';
|
||||
import { GlAlert } from '@gitlab/ui';
|
||||
import { __, s__, sprintf } from '~/locale';
|
||||
import { getDateInPast } from '~/lib/utils/datetime_utility';
|
||||
import getPipelineCountByStatus from '../graphql/queries/get_pipeline_count_by_status.query.graphql';
|
||||
import getProjectPipelineStatistics from '../graphql/queries/get_project_pipeline_statistics.query.graphql';
|
||||
import StatisticsList from './statistics_list.vue';
|
||||
import PipelinesAreaChart from './pipelines_area_chart.vue';
|
||||
import {
|
||||
CHART_CONTAINER_HEIGHT,
|
||||
INNER_CHART_HEIGHT,
|
||||
X_AXIS_LABEL_ROTATION,
|
||||
X_AXIS_TITLE_OFFSET,
|
||||
CHART_DATE_FORMAT,
|
||||
DEFAULT,
|
||||
INNER_CHART_HEIGHT,
|
||||
LOAD_ANALYTICS_FAILURE,
|
||||
LOAD_PIPELINES_FAILURE,
|
||||
ONE_WEEK_AGO_DAYS,
|
||||
ONE_MONTH_AGO_DAYS,
|
||||
PARSE_FAILURE,
|
||||
UNSUPPORTED_DATA,
|
||||
X_AXIS_LABEL_ROTATION,
|
||||
X_AXIS_TITLE_OFFSET,
|
||||
} from '../constants';
|
||||
|
||||
const defaultCountValues = {
|
||||
totalPipelines: {
|
||||
count: 0,
|
||||
},
|
||||
successfulPipelines: {
|
||||
count: 0,
|
||||
},
|
||||
};
|
||||
|
||||
const defaultAnalyticsValues = {
|
||||
weekPipelinesTotals: [],
|
||||
weekPipelinesLabels: [],
|
||||
weekPipelinesSuccessful: [],
|
||||
monthPipelinesLabels: [],
|
||||
monthPipelinesTotals: [],
|
||||
monthPipelinesSuccessful: [],
|
||||
yearPipelinesLabels: [],
|
||||
yearPipelinesTotals: [],
|
||||
yearPipelinesSuccessful: [],
|
||||
pipelineTimesLabels: [],
|
||||
pipelineTimesValues: [],
|
||||
};
|
||||
|
||||
export default {
|
||||
components: {
|
||||
StatisticsList,
|
||||
GlAlert,
|
||||
GlColumnChart,
|
||||
StatisticsList,
|
||||
PipelinesAreaChart,
|
||||
},
|
||||
props: {
|
||||
counts: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
timesChartData: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
lastWeekChartData: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
lastMonthChartData: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
lastYearChartData: {
|
||||
type: Object,
|
||||
required: true,
|
||||
inject: {
|
||||
projectPath: {
|
||||
type: String,
|
||||
default: '',
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
timesChartTransformedData: [
|
||||
{
|
||||
name: 'full',
|
||||
data: this.mergeLabelsAndValues(this.timesChartData.labels, this.timesChartData.values),
|
||||
},
|
||||
],
|
||||
counts: {
|
||||
...defaultCountValues,
|
||||
},
|
||||
analytics: {
|
||||
...defaultAnalyticsValues,
|
||||
},
|
||||
showFailureAlert: false,
|
||||
failureType: null,
|
||||
};
|
||||
},
|
||||
apollo: {
|
||||
counts: {
|
||||
query: getPipelineCountByStatus,
|
||||
variables() {
|
||||
return {
|
||||
projectPath: this.projectPath,
|
||||
};
|
||||
},
|
||||
update(data) {
|
||||
return data?.project;
|
||||
},
|
||||
error() {
|
||||
this.reportFailure(LOAD_PIPELINES_FAILURE);
|
||||
},
|
||||
},
|
||||
analytics: {
|
||||
query: getProjectPipelineStatistics,
|
||||
variables() {
|
||||
return {
|
||||
projectPath: this.projectPath,
|
||||
};
|
||||
},
|
||||
update(data) {
|
||||
return data?.project?.pipelineAnalytics;
|
||||
},
|
||||
error() {
|
||||
this.reportFailure(LOAD_ANALYTICS_FAILURE);
|
||||
},
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
failure() {
|
||||
switch (this.failureType) {
|
||||
case LOAD_ANALYTICS_FAILURE:
|
||||
return {
|
||||
text: this.$options.errorTexts[LOAD_ANALYTICS_FAILURE],
|
||||
variant: 'danger',
|
||||
};
|
||||
case PARSE_FAILURE:
|
||||
return {
|
||||
text: this.$options.errorTexts[PARSE_FAILURE],
|
||||
variant: 'danger',
|
||||
};
|
||||
case UNSUPPORTED_DATA:
|
||||
return {
|
||||
text: this.$options.errorTexts[UNSUPPORTED_DATA],
|
||||
variant: 'info',
|
||||
};
|
||||
default:
|
||||
return {
|
||||
text: this.$options.errorTexts[DEFAULT],
|
||||
variant: 'danger',
|
||||
};
|
||||
}
|
||||
},
|
||||
successRatio() {
|
||||
const { successfulPipelines, failedPipelines } = this.counts;
|
||||
const successfulCount = successfulPipelines?.count;
|
||||
const failedCount = failedPipelines?.count;
|
||||
const ratio = (successfulCount / (successfulCount + failedCount)) * 100;
|
||||
|
||||
return failedCount === 0 ? 100 : ratio;
|
||||
},
|
||||
formattedCounts() {
|
||||
const {
|
||||
totalPipelines,
|
||||
successfulPipelines,
|
||||
failedPipelines,
|
||||
totalPipelineDuration,
|
||||
} = this.counts;
|
||||
|
||||
return {
|
||||
total: totalPipelines?.count,
|
||||
success: successfulPipelines?.count,
|
||||
failed: failedPipelines?.count,
|
||||
successRatio: this.successRatio,
|
||||
totalDuration: totalPipelineDuration,
|
||||
};
|
||||
},
|
||||
areaCharts() {
|
||||
const { lastWeek, lastMonth, lastYear } = this.$options.chartTitles;
|
||||
let areaChartsData = [];
|
||||
|
||||
try {
|
||||
areaChartsData = [
|
||||
this.buildAreaChartData(lastWeek, this.lastWeekChartData),
|
||||
this.buildAreaChartData(lastMonth, this.lastMonthChartData),
|
||||
this.buildAreaChartData(lastYear, this.lastYearChartData),
|
||||
];
|
||||
} catch {
|
||||
areaChartsData = [];
|
||||
this.reportFailure(PARSE_FAILURE);
|
||||
}
|
||||
|
||||
return areaChartsData;
|
||||
},
|
||||
lastWeekChartData() {
|
||||
return {
|
||||
labels: this.analytics.weekPipelinesLabels,
|
||||
totals: this.analytics.weekPipelinesTotals,
|
||||
success: this.analytics.weekPipelinesSuccessful,
|
||||
};
|
||||
},
|
||||
lastMonthChartData() {
|
||||
return {
|
||||
labels: this.analytics.monthPipelinesLabels,
|
||||
totals: this.analytics.monthPipelinesTotals,
|
||||
success: this.analytics.monthPipelinesSuccessful,
|
||||
};
|
||||
},
|
||||
lastYearChartData() {
|
||||
return {
|
||||
labels: this.analytics.yearPipelinesLabels,
|
||||
totals: this.analytics.yearPipelinesTotals,
|
||||
success: this.analytics.yearPipelinesSuccessful,
|
||||
};
|
||||
},
|
||||
timesChartTransformedData() {
|
||||
return [
|
||||
this.buildAreaChartData(lastWeek, this.lastWeekChartData),
|
||||
this.buildAreaChartData(lastMonth, this.lastMonthChartData),
|
||||
this.buildAreaChartData(lastYear, this.lastYearChartData),
|
||||
{
|
||||
name: 'full',
|
||||
data: this.mergeLabelsAndValues(
|
||||
this.analytics.pipelineTimesLabels,
|
||||
this.analytics.pipelineTimesValues,
|
||||
),
|
||||
},
|
||||
];
|
||||
},
|
||||
},
|
||||
|
@ -85,6 +221,13 @@ export default {
|
|||
],
|
||||
};
|
||||
},
|
||||
hideAlert() {
|
||||
this.showFailureAlert = false;
|
||||
},
|
||||
reportFailure(type) {
|
||||
this.showFailureAlert = true;
|
||||
this.failureType = type;
|
||||
},
|
||||
},
|
||||
chartContainerHeight: CHART_CONTAINER_HEIGHT,
|
||||
timesChartOptions: {
|
||||
|
@ -96,6 +239,16 @@ export default {
|
|||
nameGap: X_AXIS_TITLE_OFFSET,
|
||||
},
|
||||
},
|
||||
errorTexts: {
|
||||
[LOAD_ANALYTICS_FAILURE]: s__(
|
||||
'PipelineCharts|An error has ocurred when retrieving the analytics data',
|
||||
),
|
||||
[LOAD_PIPELINES_FAILURE]: s__(
|
||||
'PipelineCharts|An error has ocurred when retrieving the pipelines data',
|
||||
),
|
||||
[PARSE_FAILURE]: s__('PipelineCharts|There was an error parsing the data for the charts.'),
|
||||
[DEFAULT]: s__('PipelineCharts|An unknown error occurred while processing CI/CD analytics.'),
|
||||
},
|
||||
get chartTitles() {
|
||||
const today = dateFormat(new Date(), CHART_DATE_FORMAT);
|
||||
const pastDate = timeScale =>
|
||||
|
@ -116,13 +269,16 @@ export default {
|
|||
</script>
|
||||
<template>
|
||||
<div>
|
||||
<div class="mb-3">
|
||||
<gl-alert v-if="showFailureAlert" :variant="failure.variant" @dismiss="hideAlert">
|
||||
{{ failure.text }}
|
||||
</gl-alert>
|
||||
<div class="gl-mb-3">
|
||||
<h3>{{ s__('PipelineCharts|CI / CD Analytics') }}</h3>
|
||||
</div>
|
||||
<h4 class="my-4">{{ s__('PipelineCharts|Overall statistics') }}</h4>
|
||||
<h4 class="gl-my-4">{{ s__('PipelineCharts|Overall statistics') }}</h4>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<statistics-list :counts="counts" />
|
||||
<statistics-list :counts="formattedCounts" />
|
||||
</div>
|
||||
<div class="col-md-6">
|
||||
<strong>
|
||||
|
@ -139,7 +295,7 @@ export default {
|
|||
</div>
|
||||
</div>
|
||||
<hr />
|
||||
<h4 class="my-4">{{ __('Pipelines charts') }}</h4>
|
||||
<h4 class="gl-my-4">{{ __('Pipelines charts') }}</h4>
|
||||
<pipelines-area-chart
|
||||
v-for="(chart, index) in areaCharts"
|
||||
:key="index"
|
||||
|
|
|
@ -0,0 +1,151 @@
|
|||
<script>
|
||||
import dateFormat from 'dateformat';
|
||||
import { GlColumnChart } from '@gitlab/ui/dist/charts';
|
||||
import { __, sprintf } from '~/locale';
|
||||
import { getDateInPast } from '~/lib/utils/datetime_utility';
|
||||
import StatisticsList from './statistics_list.vue';
|
||||
import PipelinesAreaChart from './pipelines_area_chart.vue';
|
||||
import {
|
||||
CHART_CONTAINER_HEIGHT,
|
||||
INNER_CHART_HEIGHT,
|
||||
X_AXIS_LABEL_ROTATION,
|
||||
X_AXIS_TITLE_OFFSET,
|
||||
CHART_DATE_FORMAT,
|
||||
ONE_WEEK_AGO_DAYS,
|
||||
ONE_MONTH_AGO_DAYS,
|
||||
} from '../constants';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
StatisticsList,
|
||||
GlColumnChart,
|
||||
PipelinesAreaChart,
|
||||
},
|
||||
props: {
|
||||
counts: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
timesChartData: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
lastWeekChartData: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
lastMonthChartData: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
lastYearChartData: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
timesChartTransformedData: [
|
||||
{
|
||||
name: 'full',
|
||||
data: this.mergeLabelsAndValues(this.timesChartData.labels, this.timesChartData.values),
|
||||
},
|
||||
],
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
areaCharts() {
|
||||
const { lastWeek, lastMonth, lastYear } = this.$options.chartTitles;
|
||||
|
||||
return [
|
||||
this.buildAreaChartData(lastWeek, this.lastWeekChartData),
|
||||
this.buildAreaChartData(lastMonth, this.lastMonthChartData),
|
||||
this.buildAreaChartData(lastYear, this.lastYearChartData),
|
||||
];
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
mergeLabelsAndValues(labels, values) {
|
||||
return labels.map((label, index) => [label, values[index]]);
|
||||
},
|
||||
buildAreaChartData(title, data) {
|
||||
const { labels, totals, success } = data;
|
||||
|
||||
return {
|
||||
title,
|
||||
data: [
|
||||
{
|
||||
name: 'all',
|
||||
data: this.mergeLabelsAndValues(labels, totals),
|
||||
},
|
||||
{
|
||||
name: 'success',
|
||||
data: this.mergeLabelsAndValues(labels, success),
|
||||
},
|
||||
],
|
||||
};
|
||||
},
|
||||
},
|
||||
chartContainerHeight: CHART_CONTAINER_HEIGHT,
|
||||
timesChartOptions: {
|
||||
height: INNER_CHART_HEIGHT,
|
||||
xAxis: {
|
||||
axisLabel: {
|
||||
rotate: X_AXIS_LABEL_ROTATION,
|
||||
},
|
||||
nameGap: X_AXIS_TITLE_OFFSET,
|
||||
},
|
||||
},
|
||||
get chartTitles() {
|
||||
const today = dateFormat(new Date(), CHART_DATE_FORMAT);
|
||||
const pastDate = timeScale =>
|
||||
dateFormat(getDateInPast(new Date(), timeScale), CHART_DATE_FORMAT);
|
||||
return {
|
||||
lastWeek: sprintf(__('Pipelines for last week (%{oneWeekAgo} - %{today})'), {
|
||||
oneWeekAgo: pastDate(ONE_WEEK_AGO_DAYS),
|
||||
today,
|
||||
}),
|
||||
lastMonth: sprintf(__('Pipelines for last month (%{oneMonthAgo} - %{today})'), {
|
||||
oneMonthAgo: pastDate(ONE_MONTH_AGO_DAYS),
|
||||
today,
|
||||
}),
|
||||
lastYear: __('Pipelines for last year'),
|
||||
};
|
||||
},
|
||||
};
|
||||
</script>
|
||||
<template>
|
||||
<div>
|
||||
<div class="mb-3">
|
||||
<h3>{{ s__('PipelineCharts|CI / CD Analytics') }}</h3>
|
||||
</div>
|
||||
<h4 class="my-4">{{ s__('PipelineCharts|Overall statistics') }}</h4>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<statistics-list :counts="counts" />
|
||||
</div>
|
||||
<div class="col-md-6">
|
||||
<strong>
|
||||
{{ __('Duration for the last 30 commits') }}
|
||||
</strong>
|
||||
<gl-column-chart
|
||||
:height="$options.chartContainerHeight"
|
||||
:option="$options.timesChartOptions"
|
||||
:bars="timesChartTransformedData"
|
||||
:y-axis-title="__('Minutes')"
|
||||
:x-axis-title="__('Commit')"
|
||||
x-axis-type="category"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<hr />
|
||||
<h4 class="my-4">{{ __('Pipelines charts') }}</h4>
|
||||
<pipelines-area-chart
|
||||
v-for="(chart, index) in areaCharts"
|
||||
:key="index"
|
||||
:chart-data="chart.data"
|
||||
>
|
||||
{{ chart.title }}
|
||||
</pipelines-area-chart>
|
||||
</div>
|
||||
</template>
|
|
@ -1,7 +1,10 @@
|
|||
<script>
|
||||
import { formatTime } from '~/lib/utils/datetime_utility';
|
||||
import { SUPPORTED_FORMATS, getFormatter } from '~/lib/utils/unit_format';
|
||||
import { s__, n__ } from '~/locale';
|
||||
|
||||
const defaultPrecision = 2;
|
||||
|
||||
export default {
|
||||
props: {
|
||||
counts: {
|
||||
|
@ -14,6 +17,8 @@ export default {
|
|||
return formatTime(this.counts.totalDuration);
|
||||
},
|
||||
statistics() {
|
||||
const formatter = getFormatter(SUPPORTED_FORMATS.percentHundred);
|
||||
|
||||
return [
|
||||
{
|
||||
title: s__('PipelineCharts|Total:'),
|
||||
|
@ -29,7 +34,7 @@ export default {
|
|||
},
|
||||
{
|
||||
title: s__('PipelineCharts|Success ratio:'),
|
||||
value: `${this.counts.successRatio}%`,
|
||||
value: formatter(this.counts.successRatio, defaultPrecision),
|
||||
},
|
||||
{
|
||||
title: s__('PipelineCharts|Total duration:'),
|
||||
|
|
|
@ -11,3 +11,9 @@ export const ONE_WEEK_AGO_DAYS = 7;
|
|||
export const ONE_MONTH_AGO_DAYS = 31;
|
||||
|
||||
export const CHART_DATE_FORMAT = 'dd mmm';
|
||||
|
||||
export const DEFAULT = 'default';
|
||||
export const PARSE_FAILURE = 'parse_failure';
|
||||
export const LOAD_ANALYTICS_FAILURE = 'load_analytics_failure';
|
||||
export const LOAD_PIPELINES_FAILURE = 'load_analytics_failure';
|
||||
export const UNSUPPORTED_DATA = 'unsupported_data';
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
query getPipelineCountByStatus($projectPath: ID!) {
|
||||
project(fullPath: $projectPath) {
|
||||
totalPipelines: pipelines {
|
||||
count
|
||||
}
|
||||
successfulPipelines: pipelines(status: SUCCESS) {
|
||||
count
|
||||
}
|
||||
failedPipelines: pipelines(status: FAILED) {
|
||||
count
|
||||
}
|
||||
totalPipelineDuration
|
||||
}
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
query getProjectPipelineStatistics($projectPath: ID!) {
|
||||
project(fullPath: $projectPath) {
|
||||
pipelineAnalytics {
|
||||
weekPipelinesTotals
|
||||
weekPipelinesLabels
|
||||
weekPipelinesSuccessful
|
||||
monthPipelinesLabels
|
||||
monthPipelinesTotals
|
||||
monthPipelinesSuccessful
|
||||
yearPipelinesLabels
|
||||
yearPipelinesTotals
|
||||
yearPipelinesSuccessful
|
||||
pipelineTimesLabels
|
||||
pipelineTimesValues
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,8 +1,20 @@
|
|||
import Vue from 'vue';
|
||||
import VueApollo from 'vue-apollo';
|
||||
import createDefaultClient from '~/lib/graphql';
|
||||
import ProjectPipelinesChartsLegacy from './components/app_legacy.vue';
|
||||
import ProjectPipelinesCharts from './components/app.vue';
|
||||
|
||||
export default () => {
|
||||
const el = document.querySelector('#js-project-pipelines-charts-app');
|
||||
Vue.use(VueApollo);
|
||||
|
||||
const apolloProvider = new VueApollo({
|
||||
defaultClient: createDefaultClient(),
|
||||
});
|
||||
|
||||
const mountPipelineChartsApp = el => {
|
||||
// Not all of the values will be defined since some them will be
|
||||
// empty depending on the value of the graphql_pipeline_analytics
|
||||
// feature flag, once the rollout of the feature flag is completed
|
||||
// the undefined values will be deleted
|
||||
const {
|
||||
countsFailed,
|
||||
countsSuccess,
|
||||
|
@ -20,22 +32,48 @@ export default () => {
|
|||
lastYearChartLabels,
|
||||
lastYearChartTotals,
|
||||
lastYearChartSuccess,
|
||||
projectPath,
|
||||
} = el.dataset;
|
||||
|
||||
const parseAreaChartData = (labels, totals, success) => ({
|
||||
labels: JSON.parse(labels),
|
||||
totals: JSON.parse(totals),
|
||||
success: JSON.parse(success),
|
||||
});
|
||||
const parseAreaChartData = (labels, totals, success) => {
|
||||
let parsedData = {};
|
||||
|
||||
try {
|
||||
parsedData = {
|
||||
labels: JSON.parse(labels),
|
||||
totals: JSON.parse(totals),
|
||||
success: JSON.parse(success),
|
||||
};
|
||||
} catch {
|
||||
parsedData = {};
|
||||
}
|
||||
|
||||
return parsedData;
|
||||
};
|
||||
|
||||
if (gon?.features?.graphqlPipelineAnalytics) {
|
||||
return new Vue({
|
||||
el,
|
||||
name: 'ProjectPipelinesChartsApp',
|
||||
components: {
|
||||
ProjectPipelinesCharts,
|
||||
},
|
||||
apolloProvider,
|
||||
provide: {
|
||||
projectPath,
|
||||
},
|
||||
render: createElement => createElement(ProjectPipelinesCharts, {}),
|
||||
});
|
||||
}
|
||||
|
||||
return new Vue({
|
||||
el,
|
||||
name: 'ProjectPipelinesChartsApp',
|
||||
name: 'ProjectPipelinesChartsAppLegacy',
|
||||
components: {
|
||||
ProjectPipelinesCharts,
|
||||
ProjectPipelinesChartsLegacy,
|
||||
},
|
||||
render: createElement =>
|
||||
createElement(ProjectPipelinesCharts, {
|
||||
createElement(ProjectPipelinesChartsLegacy, {
|
||||
props: {
|
||||
counts: {
|
||||
failed: countsFailed,
|
||||
|
@ -67,3 +105,8 @@ export default () => {
|
|||
}),
|
||||
});
|
||||
};
|
||||
|
||||
export default () => {
|
||||
const el = document.querySelector('#js-project-pipelines-charts-app');
|
||||
return !el ? {} : mountPipelineChartsApp(el);
|
||||
};
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
.project-last-commit {
|
||||
min-height: 4.75rem;
|
||||
}
|
||||
|
||||
.tree-holder {
|
||||
.nav-block {
|
||||
margin: 16px 0;
|
||||
|
|
|
@ -143,3 +143,16 @@
|
|||
flex-direction: column !important;
|
||||
}
|
||||
}
|
||||
|
||||
// These will be moved to @gitlab/ui in https://gitlab.com/gitlab-org/gitlab-ui/-/issues/1091
|
||||
.gl-w-10p {
|
||||
width: 10%;
|
||||
}
|
||||
|
||||
.gl-w-20p {
|
||||
width: 20%;
|
||||
}
|
||||
|
||||
.gl-w-40p {
|
||||
width: 40%;
|
||||
}
|
||||
|
|
|
@ -27,29 +27,9 @@ class JiraConnect::AppDescriptorController < JiraConnect::ApplicationController
|
|||
authentication: {
|
||||
type: 'jwt'
|
||||
},
|
||||
modules: modules,
|
||||
scopes: %w(READ WRITE DELETE),
|
||||
apiVersion: 1,
|
||||
modules: {
|
||||
jiraDevelopmentTool: {
|
||||
key: 'gitlab-development-tool',
|
||||
application: {
|
||||
value: 'GitLab'
|
||||
},
|
||||
name: {
|
||||
value: 'GitLab'
|
||||
},
|
||||
url: 'https://gitlab.com',
|
||||
logoUrl: view_context.image_url('gitlab_logo.png'),
|
||||
capabilities: %w(branch commit pull_request)
|
||||
},
|
||||
postInstallPage: {
|
||||
key: 'gitlab-configuration',
|
||||
name: {
|
||||
value: 'GitLab Configuration'
|
||||
},
|
||||
url: relative_to_base_path(jira_connect_subscriptions_path)
|
||||
}
|
||||
},
|
||||
apiMigrations: {
|
||||
gdpr: true
|
||||
}
|
||||
|
@ -58,6 +38,55 @@ class JiraConnect::AppDescriptorController < JiraConnect::ApplicationController
|
|||
|
||||
private
|
||||
|
||||
HOME_URL = 'https://gitlab.com'
|
||||
DOC_URL = 'https://docs.gitlab.com/ee/user/project/integrations/jira.html#gitlab-jira-integration'
|
||||
|
||||
def modules
|
||||
modules = {
|
||||
jiraDevelopmentTool: {
|
||||
key: 'gitlab-development-tool',
|
||||
application: {
|
||||
value: 'GitLab'
|
||||
},
|
||||
name: {
|
||||
value: 'GitLab'
|
||||
},
|
||||
url: HOME_URL,
|
||||
logoUrl: logo_url,
|
||||
capabilities: %w(branch commit pull_request)
|
||||
},
|
||||
postInstallPage: {
|
||||
key: 'gitlab-configuration',
|
||||
name: {
|
||||
value: 'GitLab Configuration'
|
||||
},
|
||||
url: relative_to_base_path(jira_connect_subscriptions_path)
|
||||
}
|
||||
}
|
||||
|
||||
modules.merge!(build_information_module)
|
||||
|
||||
modules
|
||||
end
|
||||
|
||||
def logo_url
|
||||
view_context.image_url('gitlab_logo.png')
|
||||
end
|
||||
|
||||
# See: https://developer.atlassian.com/cloud/jira/software/modules/build/
|
||||
def build_information_module
|
||||
{
|
||||
jiraBuildInfoProvider: {
|
||||
homeUrl: HOME_URL,
|
||||
logoUrl: logo_url,
|
||||
documentationUrl: DOC_URL,
|
||||
actions: {},
|
||||
name: { value: "GitLab CI" },
|
||||
key: "gitlab-ci"
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
def relative_to_base_path(full_path)
|
||||
full_path.sub(/^#{jira_connect_base_path}/, '')
|
||||
end
|
||||
|
|
|
@ -17,6 +17,7 @@ class Projects::PipelinesController < Projects::ApplicationController
|
|||
push_frontend_feature_flag(:new_pipeline_form, project, default_enabled: true)
|
||||
push_frontend_feature_flag(:graphql_pipeline_header, project, type: :development, default_enabled: false)
|
||||
push_frontend_feature_flag(:graphql_pipeline_details, project, type: :development, default_enabled: false)
|
||||
push_frontend_feature_flag(:graphql_pipeline_analytics, project, type: :development)
|
||||
push_frontend_feature_flag(:new_pipeline_form_prefilled_vars, project, type: :development)
|
||||
end
|
||||
before_action :ensure_pipeline, only: [:show]
|
||||
|
|
|
@ -71,8 +71,6 @@ class ProjectsController < Projects::ApplicationController
|
|||
@project = ::Projects::CreateService.new(current_user, project_params(attributes: project_params_create_attributes)).execute
|
||||
|
||||
if @project.saved?
|
||||
cookies[:issue_board_welcome_hidden] = { path: project_path(@project), value: nil, expires: Time.zone.at(0) }
|
||||
|
||||
redirect_to(
|
||||
project_path(@project, custom_import_params),
|
||||
notice: _("Project '%{project_name}' was successfully created.") % { project_name: @project.name }
|
||||
|
|
|
@ -23,7 +23,10 @@ module Resolvers
|
|||
|
||||
def resolve(platform:, architecture:, **args)
|
||||
instructions = Gitlab::Ci::RunnerInstructions.new(
|
||||
{ current_user: current_user, os: platform, arch: architecture }.merge(target_param(args))
|
||||
current_user: current_user,
|
||||
os: platform,
|
||||
arch: architecture,
|
||||
**target_param(args)
|
||||
)
|
||||
|
||||
{
|
||||
|
|
|
@ -367,11 +367,11 @@ class ApplicationSetting < ApplicationRecord
|
|||
|
||||
validates :eks_access_key_id,
|
||||
length: { in: 16..128 },
|
||||
if: :eks_integration_enabled?
|
||||
if: -> (setting) { setting.eks_integration_enabled? && setting.eks_access_key_id.present? }
|
||||
|
||||
validates :eks_secret_access_key,
|
||||
presence: true,
|
||||
if: :eks_integration_enabled?
|
||||
if: -> (setting) { setting.eks_integration_enabled? && setting.eks_access_key_id.present? }
|
||||
|
||||
validates_with X509CertificateCredentialsValidator,
|
||||
certificate: :external_auth_client_cert,
|
||||
|
|
|
@ -259,6 +259,16 @@ module Ci
|
|||
end
|
||||
end
|
||||
|
||||
after_transition any => any do |pipeline|
|
||||
next unless Feature.enabled?(:jira_sync_builds, pipeline.project)
|
||||
|
||||
pipeline.run_after_commit do
|
||||
# Passing the seq-id ensures this is idempotent
|
||||
seq_id = ::Atlassian::JiraConnect::Client.generate_update_sequence_id
|
||||
::JiraConnect::SyncBuildsWorker.perform_async(pipeline.id, seq_id)
|
||||
end
|
||||
end
|
||||
|
||||
after_transition any => [:success, :failed] do |pipeline|
|
||||
ref_status = pipeline.ci_ref&.update_status_by!(pipeline)
|
||||
|
||||
|
|
|
@ -304,14 +304,12 @@ module Issuable
|
|||
end
|
||||
|
||||
def order_labels_priority(direction = 'ASC', excluded_labels: [], extra_select_columns: [], with_cte: false)
|
||||
params = {
|
||||
highest_priority = highest_label_priority(
|
||||
target_type: name,
|
||||
target_column: "#{table_name}.id",
|
||||
project_column: "#{table_name}.#{project_foreign_key}",
|
||||
excluded_labels: excluded_labels
|
||||
}
|
||||
|
||||
highest_priority = highest_label_priority(params).to_sql
|
||||
).to_sql
|
||||
|
||||
# When using CTE make sure to select the same columns that are on the group_by clause.
|
||||
# This prevents errors when ignored columns are present in the database.
|
||||
|
|
|
@ -257,7 +257,7 @@ class Label < ApplicationRecord
|
|||
end
|
||||
|
||||
def present(attributes)
|
||||
super(attributes.merge(presenter_class: ::LabelPresenter))
|
||||
super(**attributes.merge(presenter_class: ::LabelPresenter))
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
@ -139,13 +139,11 @@ class Todo < ApplicationRecord
|
|||
# Todos with highest priority first then oldest todos
|
||||
# Need to order by created_at last because of differences on Mysql and Postgres when joining by type "Merge_request/Issue"
|
||||
def order_by_labels_priority
|
||||
params = {
|
||||
highest_priority = highest_label_priority(
|
||||
target_type_column: "todos.target_type",
|
||||
target_column: "todos.target_id",
|
||||
project_column: "todos.project_id"
|
||||
}
|
||||
|
||||
highest_priority = highest_label_priority(params).to_sql
|
||||
).to_sql
|
||||
|
||||
select("#{table_name}.*, (#{highest_priority}) AS highest_priority")
|
||||
.order(Gitlab::Database.nulls_last_order('highest_priority', 'ASC'))
|
||||
|
|
|
@ -91,7 +91,9 @@ module Ci
|
|||
# rubocop: enable Metrics/ParameterLists
|
||||
|
||||
def execute!(*args, &block)
|
||||
execute(*args, &block).tap do |pipeline|
|
||||
source, params = args[0], Hash(args[1])
|
||||
|
||||
execute(source, **params, &block).tap do |pipeline|
|
||||
unless pipeline.persisted?
|
||||
raise CreateError, pipeline.full_error_messages
|
||||
end
|
||||
|
|
|
@ -30,10 +30,17 @@ module Clusters
|
|||
attr_reader :provider, :region
|
||||
|
||||
def client
|
||||
::Aws::STS::Client.new(credentials: gitlab_credentials, region: region)
|
||||
::Aws::STS::Client.new(**client_args)
|
||||
end
|
||||
|
||||
def client_args
|
||||
{ region: region, credentials: gitlab_credentials }.compact
|
||||
end
|
||||
|
||||
def gitlab_credentials
|
||||
# These are not needed for IAM instance profiles
|
||||
return unless access_key_id.present? && secret_access_key.present?
|
||||
|
||||
::Aws::Credentials.new(access_key_id, secret_access_key)
|
||||
end
|
||||
|
||||
|
|
|
@ -135,11 +135,12 @@ module Git
|
|||
# We only need the last commit for the event push, and we don't
|
||||
# need the full deltas either.
|
||||
@event_push_data ||= Gitlab::DataBuilder::Push.build(
|
||||
push_data_params(commits: commits.last, with_changed_files: false))
|
||||
**push_data_params(commits: commits.last, with_changed_files: false)
|
||||
)
|
||||
end
|
||||
|
||||
def push_data
|
||||
@push_data ||= Gitlab::DataBuilder::Push.build(push_data_params(commits: limited_commits))
|
||||
@push_data ||= Gitlab::DataBuilder::Push.build(**push_data_params(commits: limited_commits))
|
||||
|
||||
# Dependent code may modify the push data, so return a duplicate each time
|
||||
@push_data.dup
|
||||
|
|
|
@ -6,13 +6,15 @@ module JiraConnect
|
|||
self.project = project
|
||||
end
|
||||
|
||||
def execute(commits: nil, branches: nil, merge_requests: nil, update_sequence_id: nil)
|
||||
JiraConnectInstallation.for_project(project).each do |installation|
|
||||
# Parameters: see Atlassian::JiraConnect::Client#send_info
|
||||
# Includes: update_sequence_id, commits, branches, merge_requests, pipelines
|
||||
def execute(**args)
|
||||
JiraConnectInstallation.for_project(project).flat_map do |installation|
|
||||
client = Atlassian::JiraConnect::Client.new(installation.base_url, installation.shared_secret)
|
||||
|
||||
response = client.store_dev_info(project: project, commits: commits, branches: branches, merge_requests: merge_requests, update_sequence_id: update_sequence_id)
|
||||
responses = client.send_info(project: project, **args)
|
||||
|
||||
log_response(response)
|
||||
responses.each { |r| log_response(r) }
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -29,7 +31,7 @@ module JiraConnect
|
|||
jira_response: response&.to_json
|
||||
}
|
||||
|
||||
if response && response['errorMessages']
|
||||
if response && (response['errorMessages'] || response['rejectedBuilds'].present?)
|
||||
logger.error(message)
|
||||
else
|
||||
logger.info(message)
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
class JsonSchemaValidator < ActiveModel::EachValidator
|
||||
FILENAME_ALLOWED = /\A[a-z0-9_-]*\Z/.freeze
|
||||
FilenameError = Class.new(StandardError)
|
||||
JSON_VALIDATOR_MAX_DRAFT_VERSION = 4
|
||||
|
||||
def initialize(options)
|
||||
raise ArgumentError, "Expected 'filename' as an argument" unless options[:filename]
|
||||
|
@ -29,10 +30,18 @@ class JsonSchemaValidator < ActiveModel::EachValidator
|
|||
private
|
||||
|
||||
def valid_schema?(value)
|
||||
JSON::Validator.validate(schema_path, value)
|
||||
if draft_version > JSON_VALIDATOR_MAX_DRAFT_VERSION
|
||||
JSONSchemer.schema(Pathname.new(schema_path)).valid?(value)
|
||||
else
|
||||
JSON::Validator.validate(schema_path, value)
|
||||
end
|
||||
end
|
||||
|
||||
def schema_path
|
||||
Rails.root.join('app', 'validators', 'json_schemas', "#{options[:filename]}.json").to_s
|
||||
end
|
||||
|
||||
def draft_version
|
||||
options[:draft] || JSON_VALIDATOR_MAX_DRAFT_VERSION
|
||||
end
|
||||
end
|
||||
|
|
|
@ -24,8 +24,13 @@
|
|||
.form-group
|
||||
= f.label :eks_access_key_id, 'Access key ID', class: 'label-bold'
|
||||
= f.text_field :eks_access_key_id, class: 'form-control'
|
||||
.form-text.text-muted
|
||||
= _('AWS Access Key. Only required if not using role instance credentials')
|
||||
|
||||
.form-group
|
||||
= f.label :eks_secret_access_key, 'Secret access key', class: 'label-bold'
|
||||
= f.password_field :eks_secret_access_key, autocomplete: 'off', class: 'form-control'
|
||||
.form-text.text-muted
|
||||
= _('AWS Secret Access Key. Only required if not using role instance credentials')
|
||||
|
||||
= f.submit 'Save changes', class: "gl-button btn btn-success"
|
||||
|
|
|
@ -69,13 +69,13 @@
|
|||
= link_to admin_users_path(sort: value, filter: params[:filter], search_query: params[:search_query]) do
|
||||
= title
|
||||
|
||||
- if @users.empty?
|
||||
.nothing-here-block.border-top-0
|
||||
= s_('AdminUsers|No users found')
|
||||
- elsif Feature.enabled?(:vue_admin_users)
|
||||
- if Feature.enabled?(:vue_admin_users)
|
||||
#js-admin-users-app{ data: admin_users_data_attributes(@users) }
|
||||
.gl-spinner-container.gl-my-7
|
||||
%span.gl-vertical-align-bottom.gl-spinner.gl-spinner-dark.gl-spinner-lg{ aria: { label: _('Loading') } }
|
||||
- elsif @users.empty?
|
||||
.nothing-here-block.border-top-0
|
||||
= s_('AdminUsers|No users found')
|
||||
- else
|
||||
.table-holder
|
||||
.thead-white.text-nowrap.gl-responsive-table-row.table-row-header{ role: 'row' }
|
||||
|
|
|
@ -11,6 +11,9 @@
|
|||
= render 'projects/tree/tree_header', tree: @tree
|
||||
|
||||
#js-last-commit
|
||||
.info-well.gl-display-none.gl-display-sm-flex.project-last-commit
|
||||
.gl-spinner-container.m-auto
|
||||
= loading_icon(size: 'md', color: 'dark', css_class: 'align-text-bottom')
|
||||
|
||||
- if is_project_overview
|
||||
.project-buttons.gl-mb-3.js-show-on-project-root
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
- page_title _('CI / CD Analytics')
|
||||
|
||||
#js-project-pipelines-charts-app{ data: { counts: @counts, success_ratio: success_ratio(@counts),
|
||||
times_chart: { labels: @charts[:pipeline_times].labels, values: @charts[:pipeline_times].pipeline_times },
|
||||
last_week_chart: { labels: @charts[:week].labels, totals: @charts[:week].total, success: @charts[:week].success },
|
||||
last_month_chart: { labels: @charts[:month].labels, totals: @charts[:month].total, success: @charts[:month].success },
|
||||
last_year_chart: { labels: @charts[:year].labels, totals: @charts[:year].total, success: @charts[:year].success } } }
|
||||
- if Feature.enabled?(:graphql_pipeline_analytics)
|
||||
#js-project-pipelines-charts-app{ data: { project_path: @project.full_path } }
|
||||
- else
|
||||
#js-project-pipelines-charts-app{ data: { counts: @counts, success_ratio: success_ratio(@counts),
|
||||
times_chart: { labels: @charts[:pipeline_times].labels, values: @charts[:pipeline_times].pipeline_times },
|
||||
last_week_chart: { labels: @charts[:week].labels, totals: @charts[:week].total, success: @charts[:week].success },
|
||||
last_month_chart: { labels: @charts[:month].labels, totals: @charts[:month].total, success: @charts[:month].success },
|
||||
last_year_chart: { labels: @charts[:year].labels, totals: @charts[:year].total, success: @charts[:year].success } } }
|
||||
|
|
|
@ -877,15 +877,23 @@
|
|||
:tags: []
|
||||
- :name: jira_connect:jira_connect_sync_branch
|
||||
:feature_category: :integrations
|
||||
:has_external_dependencies:
|
||||
:has_external_dependencies: true
|
||||
:urgency: :low
|
||||
:resource_boundary: :unknown
|
||||
:weight: 1
|
||||
:idempotent:
|
||||
:tags: []
|
||||
- :name: jira_connect:jira_connect_sync_builds
|
||||
:feature_category: :integrations
|
||||
:has_external_dependencies: true
|
||||
:urgency: :low
|
||||
:resource_boundary: :unknown
|
||||
:weight: 1
|
||||
:idempotent: true
|
||||
:tags: []
|
||||
- :name: jira_connect:jira_connect_sync_merge_request
|
||||
:feature_category: :integrations
|
||||
:has_external_dependencies:
|
||||
:has_external_dependencies: true
|
||||
:urgency: :low
|
||||
:resource_boundary: :unknown
|
||||
:weight: 1
|
||||
|
|
|
@ -7,6 +7,7 @@ module JiraConnect
|
|||
queue_namespace :jira_connect
|
||||
feature_category :integrations
|
||||
loggable_arguments 1, 2
|
||||
worker_has_external_dependencies!
|
||||
|
||||
def perform(project_id, branch_name, commit_shas, update_sequence_id = nil)
|
||||
project = Project.find_by_id(project_id)
|
||||
|
|
24
app/workers/jira_connect/sync_builds_worker.rb
Normal file
24
app/workers/jira_connect/sync_builds_worker.rb
Normal file
|
@ -0,0 +1,24 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module JiraConnect
|
||||
class SyncBuildsWorker
|
||||
include ApplicationWorker
|
||||
|
||||
idempotent!
|
||||
worker_has_external_dependencies!
|
||||
|
||||
queue_namespace :jira_connect
|
||||
feature_category :integrations
|
||||
|
||||
def perform(pipeline_id, sequence_id)
|
||||
pipeline = Ci::Pipeline.find_by_id(pipeline_id)
|
||||
|
||||
return unless pipeline
|
||||
return unless Feature.enabled?(:jira_sync_builds, pipeline.project)
|
||||
|
||||
::JiraConnect::SyncService
|
||||
.new(pipeline.project)
|
||||
.execute(pipelines: [pipeline], update_sequence_id: sequence_id)
|
||||
end
|
||||
end
|
||||
end
|
|
@ -7,6 +7,8 @@ module JiraConnect
|
|||
queue_namespace :jira_connect
|
||||
feature_category :integrations
|
||||
|
||||
worker_has_external_dependencies!
|
||||
|
||||
def perform(merge_request_id, update_sequence_id = nil)
|
||||
merge_request = MergeRequest.find_by_id(merge_request_id)
|
||||
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Remove unneeded pagination code for project importers.
|
||||
merge_request: 49589
|
||||
author:
|
||||
type: changed
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add validating jsonb fields with json schema draft-07
|
||||
merge_request: 49451
|
||||
author:
|
||||
type: added
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Boards - Remove default labels lists generation
|
||||
merge_request: 49071
|
||||
author:
|
||||
type: changed
|
5
changelogs/unreleased/sh-aws-sdk-use-iam-profile.yml
Normal file
5
changelogs/unreleased/sh-aws-sdk-use-iam-profile.yml
Normal file
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Support instance profiles for IAM role for Amazon EKS integration
|
||||
merge_request: 49212
|
||||
author:
|
||||
type: added
|
5
changelogs/unreleased/tz-reduce-last-commit-cls.yml
Normal file
5
changelogs/unreleased/tz-reduce-last-commit-cls.yml
Normal file
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Rendering Loading State of Last Commit earlier
|
||||
merge_request: 49362
|
||||
author:
|
||||
type: performance
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
name: graphql_pipeline_analytics
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/48267
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/290153
|
||||
milestone: '13.7'
|
||||
type: development
|
||||
group: group::continuos integration
|
||||
default_enabled: false
|
8
config/feature_flags/development/jira_sync_builds.yml
Normal file
8
config/feature_flags/development/jira_sync_builds.yml
Normal file
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
name: jira_sync_builds
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/49348
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/292013
|
||||
milestone: '13.7'
|
||||
type: development
|
||||
group: group::ecosystem
|
||||
default_enabled: false
|
|
@ -132,7 +132,8 @@ Example response:
|
|||
|
||||
The Group Audit Events API allows you to retrieve [group audit events](../administration/audit_events.md#group-events).
|
||||
|
||||
To retrieve group audit events using the API, you must [authenticate yourself](README.md#authentication) as an Administrator or an owner of the group.
|
||||
A user with a Owner role (or above) can retrieve group audit events of all users.
|
||||
A user with a Developer or Maintainer role is limited to group audit events based on their individual actions.
|
||||
|
||||
### Retrieve all group audit events
|
||||
|
||||
|
@ -238,7 +239,8 @@ Example response:
|
|||
|
||||
The Project Audit Events API allows you to retrieve [project audit events](../administration/audit_events.md#project-events).
|
||||
|
||||
To retrieve project audit events using the API, you must [authenticate yourself](README.md#authentication) as a Maintainer or an Owner of the project.
|
||||
A user with a Maintainer role (or above) can retrieve project audit events of all users.
|
||||
A user with a Developer role is limited to project audit events based on their individual actions.
|
||||
|
||||
### Retrieve all project audit events
|
||||
|
||||
|
|
|
@ -160,6 +160,7 @@ See [database guidelines](database/index.md).
|
|||
- [Security Scanners](integrations/secure.md)
|
||||
- [Secure Partner Integration](integrations/secure_partner_integration.md)
|
||||
- [How to run Jenkins in development environment](integrations/jenkins.md)
|
||||
- [How to run local Codesandbox integration for Web IDE Live Preview](integrations/codesandbox.md)
|
||||
|
||||
## Testing guides
|
||||
|
||||
|
|
|
@ -1811,9 +1811,9 @@ Tier badges are displayed as orange text next to a heading. For example:
|
|||
You must assign a tier badge:
|
||||
|
||||
- To [all H1 topic headings](#product-tier-badges-on-headings).
|
||||
- To all H2 or higher topic headings that apply to a tier other than Core.
|
||||
- To topic headings that don't apply to the same tier as the H1.
|
||||
- To [sections of a topic](#product-tier-badges-on-other-content),
|
||||
if they apply to a tier other than Core.
|
||||
if they apply to a tier other than what applies to the H1.
|
||||
|
||||
#### Product tier badges on headings
|
||||
|
||||
|
|
140
doc/development/integrations/codesandbox.md
Normal file
140
doc/development/integrations/codesandbox.md
Normal file
|
@ -0,0 +1,140 @@
|
|||
# Set up local Codesandbox development environment
|
||||
|
||||
This guide walks through setting up a local [Codesandbox repository](https://github.com/codesandbox/codesandbox-client) and integrating it with a local GitLab instance. Codesandbox
|
||||
is used to power the Web IDE's [Live Preview feature](../../user/project/web_ide/index.md#live-preview). Having a local Codesandbox setup is useful for debugging upstream issues or
|
||||
creating upstream contributions like [this one](https://github.com/codesandbox/codesandbox-client/pull/5137).
|
||||
|
||||
## Initial setup
|
||||
|
||||
Before using Codesandbox with your local GitLab instance, you must:
|
||||
|
||||
1. Enable HTTPS on your GDK. Codesandbox uses Service Workers that require `https`.
|
||||
Follow the GDK [NGINX configuration instructions](https://gitlab.com/gitlab-org/gitlab-development-kit/-/blob/master/doc/howto/nginx.md) to enable HTTPS for GDK.
|
||||
1. Clone the [`codesandbox-client` project](https://github.com/codesandbox/codesandbox-client)
|
||||
locally. If you plan on contributing upstream, you might want to fork and clone first.
|
||||
1. (Optional) Use correct `python` and `nodejs` versions. Otherwise, `yarn` may fail to
|
||||
install or build some packages. If you're using `asdf` you can run the following commands:
|
||||
|
||||
```shell
|
||||
asdf local nodejs 10.14.2
|
||||
asdf local python 2.7.18
|
||||
```
|
||||
|
||||
1. Run the following commands in the `codesandbox-client` project checkout:
|
||||
|
||||
```shell
|
||||
# This might be necessary for the `prepublishOnly` job that is run later
|
||||
yarn global add lerna
|
||||
|
||||
# Install packages
|
||||
yarn
|
||||
```
|
||||
|
||||
You can run `yarn build:clean` to clean up the build assets.
|
||||
|
||||
## Use local GitLab instance with local Codesandbox
|
||||
|
||||
GitLab integrates with two parts of Codesandbox:
|
||||
|
||||
- An NPM package called `smooshpack` (called `sandpack` in the `codesandbox-client` project).
|
||||
This exposes an entrypoint for us to kick off Codesandbox's bundler.
|
||||
- A server that houses Codesandbox assets for bundling and previewing. This is hosted
|
||||
on a separate server for security.
|
||||
|
||||
Each time you want to run GitLab and Codesandbox together, you need to perform the
|
||||
steps in the following sections.
|
||||
|
||||
### Use local `smooshpack` for GitLab
|
||||
|
||||
GitLab usually satisfies its `smooshpack` dependency with a remote module, but we want
|
||||
to use a locally-built module. To build and use a local `smooshpack` module:
|
||||
|
||||
1. In the `codesandbox-client` project directory, run:
|
||||
|
||||
```shell
|
||||
cd standalone-packages/sandpack
|
||||
yarn link
|
||||
|
||||
# (Optional) you might want to start a development build
|
||||
yarn run start
|
||||
```
|
||||
|
||||
Now, in the GitLab project, you can run `yarn link "smooshpack"`. `yarn` looks
|
||||
for `smooshpack` **on disk** as opposed to the one hosted remotely.
|
||||
|
||||
1. In the `gitlab` project directory, run:
|
||||
|
||||
```shell
|
||||
# Remove and reinstall node_modules just to be safe
|
||||
rm -rf node_modules
|
||||
yarn install
|
||||
|
||||
# Use the "smooshpack" package on disk
|
||||
yarn link "smooshpack"
|
||||
```
|
||||
|
||||
### Fix possible GDK webpack problem
|
||||
|
||||
`webpack` in GDK can fail to find packages inside a linked package. This step can help
|
||||
you avoid `webpack` breaking with messages saying that it can't resolve packages from
|
||||
`smooshpack/dist/sandpack.es5.js`.
|
||||
|
||||
In the `codesandbox-client` project directory, run:
|
||||
|
||||
```shell
|
||||
cd standalone-packages
|
||||
|
||||
mkdir node_modules
|
||||
ln -s $PATH_TO_LOCAL_GITLAB/node_modules/core-js ./node_modules/core-js
|
||||
```
|
||||
|
||||
### Start building codesandbox app assets
|
||||
|
||||
In the `codesandbox-client` project directory:
|
||||
|
||||
```shell
|
||||
cd packages/app
|
||||
|
||||
yarn start:sandpack-sandbox
|
||||
```
|
||||
|
||||
### Create HTTPS proxy for Codesandbox `sandpack` assets
|
||||
|
||||
Because we need `https`, we need to create a proxy to the webpack server. We can use
|
||||
[`http-server`](https://www.npmjs.com/package/http-server), which can do this proxying
|
||||
out of the box:
|
||||
|
||||
```shell
|
||||
npx http-server --proxy http://localhost:3000 -S -C $PATH_TO_CERT_PEM -K $PATH_TO_KEY_PEM -p 8044 -d false
|
||||
```
|
||||
|
||||
### Update `bundler_url` setting in GitLab
|
||||
|
||||
We need to update our `application_setting_implementation.rb` to point to the server that hosts the
|
||||
Codesandbox `sandpack` assets. For instance, if these assets are hosted by a server at `https://sandpack.local:8044`:
|
||||
|
||||
```patch
|
||||
diff --git a/app/models/application_setting_implementation.rb b/app/models/application_setting_implementation.rb
|
||||
index 6eed627b502..1824669e881 100644
|
||||
--- a/app/models/application_setting_implementation.rb
|
||||
+++ b/app/models/application_setting_implementation.rb
|
||||
@@ -391,7 +391,7 @@ def static_objects_external_storage_enabled?
|
||||
# This will eventually be configurable
|
||||
# https://gitlab.com/gitlab-org/gitlab/issues/208161
|
||||
def web_ide_clientside_preview_bundler_url
|
||||
- 'https://sandbox-prod.gitlab-static.net'
|
||||
+ 'https://sandpack.local:8044'
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
```
|
||||
|
||||
NOTE:
|
||||
You can apply this patch by copying it to your clipboard and running `pbpaste | git apply`.
|
||||
|
||||
You'll might want to restart the GitLab Rails server after making this change:
|
||||
|
||||
```shell
|
||||
gdk restart rails-web
|
||||
```
|
|
@ -154,6 +154,17 @@ Below is a list of supported `data-track-*` attributes:
|
|||
| `data-track-value` | false | The `value` as described in our [Structured event taxonomy](#structured-event-taxonomy). If omitted, this is the element's `value` property or an empty string. For checkboxes, the default value is the element's checked attribute or `false` when unchecked. |
|
||||
| `data-track-context` | false | The `context` as described in our [Structured event taxonomy](#structured-event-taxonomy). |
|
||||
|
||||
#### Caveats
|
||||
|
||||
When using the GitLab helper method [`nav_link`](https://gitlab.com/gitlab-org/gitlab/-/blob/898b286de322e5df6a38d257b10c94974d580df8/app/helpers/tab_helper.rb#L69) be sure to wrap `html_options` under the `html_options` keyword argument.
|
||||
Be careful, as this behavior can be confused with the `ActionView` helper method [`link_to`](https://api.rubyonrails.org/v5.2.3/classes/ActionView/Helpers/UrlHelper.html#method-i-link_to) that does not require additional wrapping of `html_options`
|
||||
|
||||
`nav_link(controller: ['dashboard/groups', 'explore/groups'], html_options: { data: { track_label: "groups_dropdown", track_event: "click_dropdown" } })`
|
||||
|
||||
vs
|
||||
|
||||
`link_to assigned_issues_dashboard_path, title: _('Issues'), data: { track_label: 'main_navigation', track_event: 'click_issues_link' }`
|
||||
|
||||
### Tracking within Vue components
|
||||
|
||||
There's a tracking Vue mixin that can be used in components if more complex tracking is required. To use it, first import the `Tracking` library and request a mixin.
|
||||
|
|
|
@ -351,9 +351,8 @@ Implemented using Redis methods [PFADD](https://redis.io/commands/pfadd) and [PF
|
|||
be `{i_compliance_credential_inventory}-2020-34`.
|
||||
- `expiry`: expiry time in days. Default: 29 days for daily aggregation and 6 weeks for weekly
|
||||
aggregation.
|
||||
- `aggregation`: aggregation `:daily` or `:weekly`. The argument defines how we build the Redis
|
||||
keys for data storage. For `daily` we keep a key for metric per day of the year, for `weekly` we
|
||||
keep a key for metric per week of the year.
|
||||
- `aggregation`: may be set to a `:daily` or `:weekly` key. Defines how counting data is stored in Redis.
|
||||
Aggregation on a `daily` basis does not pull more fine grained data.
|
||||
- `feature_flag`: optional. For details, see our [GitLab internal Feature flags](../feature_flags/) documentation.
|
||||
|
||||
1. Track event in controller using `RedisTracking` module with `track_redis_hll_event(*controller_actions, name:, feature:, feature_default_enabled: false)`.
|
||||
|
|
|
@ -33,20 +33,19 @@ CI/CD templates, which you can use to get started, are in [this repo](https://gi
|
|||
|
||||
Learn more about using CI/CD to build:
|
||||
|
||||
- [Maven packages](../maven_repository/index.md#create-maven-packages-with-gitlab-cicd)
|
||||
- [NPM packages](../npm_registry/index.md#publish-an-npm-package-by-using-cicd)
|
||||
- [Composer packages](../composer_repository/index.md#publish-a-composer-package-by-using-cicd)
|
||||
- [NuGet packages](../nuget_repository/index.md#publish-a-nuget-package-by-using-cicd)
|
||||
- [Conan packages](../conan_repository/index.md#publish-a-conan-package-by-using-cicd)
|
||||
- [Generic packages](../generic_packages/index.md#publish-a-generic-package-by-using-cicd)
|
||||
- [Maven packages](../maven_repository/index.md#create-maven-packages-with-gitlab-cicd)
|
||||
- [NPM packages](../npm_registry/index.md#publish-an-npm-package-by-using-cicd)
|
||||
- [NuGet packages](../nuget_repository/index.md#publish-a-nuget-package-by-using-cicd)
|
||||
|
||||
If you use CI/CD to build a package, extended activity information is displayed
|
||||
when you view the package details:
|
||||
|
||||
![Package CI/CD activity](img/package_activity_v12_10.png)
|
||||
|
||||
When using Maven and NPM, you can view which pipeline published the package, and
|
||||
the commit and user who triggered it.
|
||||
You can view which pipeline published the package, and the commit and user who triggered it. However, the history is limited to five updates of a given package.
|
||||
|
||||
## Download a package
|
||||
|
||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 14 KiB |
BIN
doc/user/project/img/protected_branches_deploy_keys_v13_5.png
Normal file
BIN
doc/user/project/img/protected_branches_deploy_keys_v13_5.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 45 KiB |
|
@ -394,19 +394,6 @@ status.
|
|||
If you're not able to do some of the things above, make sure you have the right
|
||||
[permissions](#permissions).
|
||||
|
||||
### First time using an issue board
|
||||
|
||||
> The automatic creation of the **To Do** and **Doing** lists was [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/202144) in GitLab 13.5.
|
||||
|
||||
The first time you open an issue board, you are presented with the default lists
|
||||
(**Open**, **To Do**, **Doing**, and **Closed**).
|
||||
|
||||
If the **To Do** and **Doing** labels don't exist in the project or group, they are created, and
|
||||
their lists appear as empty. If any of them already exists, the list is filled with the issues that
|
||||
have that label.
|
||||
|
||||
![issue board default lists](img/issue_board_default_lists_v13_4.png)
|
||||
|
||||
### Create a new list
|
||||
|
||||
Create a new list by clicking the **Add list** dropdown button in the upper right corner of the issue board.
|
||||
|
@ -566,6 +553,22 @@ To select and move multiple cards:
|
|||
|
||||
![Multi-select Issue Cards](img/issue_boards_multi_select_v12_4.png)
|
||||
|
||||
### First time using an issue board
|
||||
|
||||
> - The automatic creation of the **To Do** and **Doing** lists [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/202144) in GitLab 13.5.
|
||||
> - [Deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/270583) in GitLab 13.7. In GitLab 13.7 and later, the **To Do** and **Doing** columns are not automatically created.
|
||||
|
||||
WARNING:
|
||||
This feature was [deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/270583) in GitLab 13.7.
|
||||
The **To Do** and **Doing** columns are no longer automatically created.
|
||||
|
||||
In GitLab 13.5 and 13.6, the first time you open an issue board, you are presented with the default lists
|
||||
(**Open**, **To Do**, **Doing**, and **Closed**).
|
||||
|
||||
If the **To Do** and **Doing** labels don't exist in the project or group, they are created, and
|
||||
their lists appear as empty. If any of them already exists, the list is filled with the issues that
|
||||
have that label.
|
||||
|
||||
## Tips
|
||||
|
||||
A few things to remember:
|
||||
|
|
|
@ -74,6 +74,33 @@ dropdown list in the "Already protected" area.
|
|||
If you don't choose any of those options while creating a protected branch,
|
||||
they are set to "Maintainers" by default.
|
||||
|
||||
### Allow Deploy Keys to push to a protected branch
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/30769) in GitLab 13.7.
|
||||
> - This feature is being selectively deployed in GitLab.com 13.7, and may not be available for all users.
|
||||
|
||||
You can allow specific machines to access protected branches in your repository with
|
||||
[Deploy Keys](deploy_keys/index.md). This can be useful for your CI/CD workflow,
|
||||
for example.
|
||||
|
||||
Deploy keys can be selected in the **Allowed to push** dropdown when:
|
||||
|
||||
- Defining a protected branch.
|
||||
- Updating an existing branch.
|
||||
|
||||
Select a deploy key to allow the owner of the key to push to the chosen protected branch,
|
||||
even if they aren't a member of the related project. The owner of the selected deploy
|
||||
key must have at least read access to the given project.
|
||||
|
||||
For a deploy key to be selectable:
|
||||
|
||||
- It must be [enabled for your project](deploy_keys/index.md#how-to-enable-deploy-keys).
|
||||
- It must have [write access](deploy_keys/index.md#deploy-keys-permissions) to your project repository.
|
||||
|
||||
Deploy Keys are not available in the **Allowed to merge** dropdown.
|
||||
|
||||
![Deploy Keys on protected branches](img/protected_branches_deploy_keys_v13_5.png)
|
||||
|
||||
## Restricting push and merge access to certain users **(STARTER)**
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/5081) in [GitLab Starter](https://about.gitlab.com/pricing/) 8.11.
|
||||
|
@ -197,6 +224,10 @@ for details about the pipelines security model.
|
|||
|
||||
## Changelog
|
||||
|
||||
**13.5**
|
||||
|
||||
- [Allow Deploy keys to push to protected branches once more](https://gitlab.com/gitlab-org/gitlab/-/issues/30769).
|
||||
|
||||
**11.9**
|
||||
|
||||
- [Allow protected branches to be created](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/53361) by Developers (and users with higher permission levels) through the API and the user interface.
|
||||
|
|
|
@ -12,32 +12,69 @@ module Atlassian
|
|||
@shared_secret = shared_secret
|
||||
end
|
||||
|
||||
def store_dev_info(project:, commits: nil, branches: nil, merge_requests: nil, update_sequence_id: nil)
|
||||
dev_info_json = {
|
||||
repositories: [
|
||||
Serializers::RepositoryEntity.represent(
|
||||
project,
|
||||
commits: commits,
|
||||
branches: branches,
|
||||
merge_requests: merge_requests,
|
||||
user_notes_count: user_notes_count(merge_requests),
|
||||
update_sequence_id: update_sequence_id
|
||||
)
|
||||
]
|
||||
}.to_json
|
||||
def send_info(project:, update_sequence_id: nil, **args)
|
||||
common = { project: project, update_sequence_id: update_sequence_id }
|
||||
dev_info = args.slice(:commits, :branches, :merge_requests)
|
||||
build_info = args.slice(:pipelines)
|
||||
|
||||
uri = URI.join(@base_uri, '/rest/devinfo/0.10/bulk')
|
||||
responses = []
|
||||
|
||||
headers = {
|
||||
'Authorization' => "JWT #{jwt_token('POST', uri)}",
|
||||
'Content-Type' => 'application/json'
|
||||
}
|
||||
responses << store_dev_info(**common, **dev_info) if dev_info.present?
|
||||
responses << store_build_info(**common, **build_info) if build_info.present?
|
||||
raise ArgumentError, 'Invalid arguments' if responses.empty?
|
||||
|
||||
self.class.post(uri, headers: headers, body: dev_info_json)
|
||||
responses.compact
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def store_build_info(project:, pipelines:, update_sequence_id: nil)
|
||||
return unless Feature.enabled?(:jira_sync_builds, project)
|
||||
|
||||
builds = pipelines.map do |pipeline|
|
||||
build = Serializers::BuildEntity.represent(
|
||||
pipeline,
|
||||
update_sequence_id: update_sequence_id
|
||||
)
|
||||
next if build.issue_keys.empty?
|
||||
|
||||
build
|
||||
end.compact
|
||||
return if builds.empty?
|
||||
|
||||
post('/rest/builds/0.1/bulk', { builds: builds })
|
||||
end
|
||||
|
||||
def store_dev_info(project:, commits: nil, branches: nil, merge_requests: nil, update_sequence_id: nil)
|
||||
repo = Serializers::RepositoryEntity.represent(
|
||||
project,
|
||||
commits: commits,
|
||||
branches: branches,
|
||||
merge_requests: merge_requests,
|
||||
user_notes_count: user_notes_count(merge_requests),
|
||||
update_sequence_id: update_sequence_id
|
||||
)
|
||||
|
||||
post('/rest/devinfo/0.10/bulk', { repositories: [repo] })
|
||||
end
|
||||
|
||||
def post(path, payload)
|
||||
uri = URI.join(@base_uri, path)
|
||||
|
||||
self.class.post(uri, headers: headers(uri), body: metadata.merge(payload).to_json)
|
||||
end
|
||||
|
||||
def headers(uri)
|
||||
{
|
||||
'Authorization' => "JWT #{jwt_token('POST', uri)}",
|
||||
'Content-Type' => 'application/json'
|
||||
}
|
||||
end
|
||||
|
||||
def metadata
|
||||
{ providerMetadata: { product: "GitLab #{Gitlab::VERSION}" } }
|
||||
end
|
||||
|
||||
def user_notes_count(merge_requests)
|
||||
return unless merge_requests
|
||||
|
||||
|
|
|
@ -11,6 +11,12 @@ module Atlassian
|
|||
|
||||
expose :update_sequence_id, as: :updateSequenceId
|
||||
|
||||
def eql(other)
|
||||
other.is_a?(self.class) && to_json == other.to_json
|
||||
end
|
||||
|
||||
alias_method :==, :eql
|
||||
|
||||
private
|
||||
|
||||
def update_sequence_id
|
||||
|
|
94
lib/atlassian/jira_connect/serializers/build_entity.rb
Normal file
94
lib/atlassian/jira_connect/serializers/build_entity.rb
Normal file
|
@ -0,0 +1,94 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Atlassian
|
||||
module JiraConnect
|
||||
module Serializers
|
||||
# A Jira 'build' represents what we call a 'pipeline'
|
||||
class BuildEntity < Grape::Entity
|
||||
include Gitlab::Routing
|
||||
|
||||
format_with(:iso8601, &:iso8601)
|
||||
|
||||
expose :schema_version, as: :schemaVersion
|
||||
expose :pipeline_id, as: :pipelineId
|
||||
expose :iid, as: :buildNumber
|
||||
expose :update_sequence_id, as: :updateSequenceNumber
|
||||
expose :source_ref, as: :displayName
|
||||
expose :url
|
||||
expose :state
|
||||
expose :updated_at, as: :lastUpdated, format_with: :iso8601
|
||||
expose :issue_keys, as: :issueKeys
|
||||
expose :test_info, as: :testInfo
|
||||
expose :references
|
||||
|
||||
def issue_keys
|
||||
# extract Jira issue keys from either the source branch/ref or the
|
||||
# merge request title.
|
||||
@issue_keys ||= begin
|
||||
src = "#{pipeline.source_ref} #{pipeline.merge_request&.title}"
|
||||
JiraIssueKeyExtractor.new(src).issue_keys
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
alias_method :pipeline, :object
|
||||
delegate :project, to: :object
|
||||
|
||||
def url
|
||||
project_pipeline_url(project, pipeline)
|
||||
end
|
||||
|
||||
# translate to Jira status
|
||||
def state
|
||||
case pipeline.status
|
||||
when 'scheduled', 'created', 'pending', 'preparing', 'waiting_for_resource' then 'pending'
|
||||
when 'running' then 'in_progress'
|
||||
when 'success' then 'successful'
|
||||
when 'failed' then 'failed'
|
||||
when 'canceled', 'skipped' then 'cancelled'
|
||||
else
|
||||
'unknown'
|
||||
end
|
||||
end
|
||||
|
||||
def pipeline_id
|
||||
pipeline.ensure_ci_ref!
|
||||
|
||||
pipeline.ci_ref.id.to_s
|
||||
end
|
||||
|
||||
def schema_version
|
||||
'1.0'
|
||||
end
|
||||
|
||||
def test_info
|
||||
builds = pipeline.builds.pluck(:status) # rubocop: disable CodeReuse/ActiveRecord
|
||||
n = builds.size
|
||||
passed = builds.count { |s| s == 'success' }
|
||||
failed = builds.count { |s| s == 'failed' }
|
||||
|
||||
{
|
||||
totalNumber: n,
|
||||
numberPassed: passed,
|
||||
numberFailed: failed,
|
||||
numberSkipped: n - (passed + failed)
|
||||
}
|
||||
end
|
||||
|
||||
def references
|
||||
ref = pipeline.source_ref
|
||||
|
||||
[{
|
||||
commit: { id: pipeline.sha, repositoryUri: project_url(project) },
|
||||
ref: { name: ref, uri: project_commits_url(project, ref) }
|
||||
}]
|
||||
end
|
||||
|
||||
def update_sequence_id
|
||||
options[:update_sequence_id] || Client.generate_update_sequence_id
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -20104,6 +20104,15 @@ msgstr ""
|
|||
msgid "Pipeline: %{status}"
|
||||
msgstr ""
|
||||
|
||||
msgid "PipelineCharts|An error has ocurred when retrieving the analytics data"
|
||||
msgstr ""
|
||||
|
||||
msgid "PipelineCharts|An error has ocurred when retrieving the pipelines data"
|
||||
msgstr ""
|
||||
|
||||
msgid "PipelineCharts|An unknown error occurred while processing CI/CD analytics."
|
||||
msgstr ""
|
||||
|
||||
msgid "PipelineCharts|CI / CD Analytics"
|
||||
msgstr ""
|
||||
|
||||
|
@ -20119,6 +20128,9 @@ msgstr ""
|
|||
msgid "PipelineCharts|Successful:"
|
||||
msgstr ""
|
||||
|
||||
msgid "PipelineCharts|There was an error parsing the data for the charts."
|
||||
msgstr ""
|
||||
|
||||
msgid "PipelineCharts|Total duration:"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
@ -24,6 +24,14 @@ FactoryBot.define do
|
|||
trait :with_diffs do
|
||||
end
|
||||
|
||||
trait :jira_title do
|
||||
title { generate(:jira_title) }
|
||||
end
|
||||
|
||||
trait :jira_branch do
|
||||
source_branch { generate(:jira_branch) }
|
||||
end
|
||||
|
||||
trait :with_image_diffs do
|
||||
source_branch { "add_images_and_changes" }
|
||||
target_branch { "master" }
|
||||
|
|
|
@ -15,4 +15,6 @@ FactoryBot.define do
|
|||
sequence(:sha) { |n| Digest::SHA1.hexdigest("commit-like-#{n}") }
|
||||
sequence(:oid) { |n| Digest::SHA2.hexdigest("oid-like-#{n}") }
|
||||
sequence(:variable) { |n| "var#{n}" }
|
||||
sequence(:jira_title) { |n| "[PROJ-#{n}]: fix bug" }
|
||||
sequence(:jira_branch) { |n| "feature/PROJ-#{n}" }
|
||||
end
|
||||
|
|
|
@ -27,11 +27,11 @@ RSpec.describe 'Issue Boards', :js do
|
|||
end
|
||||
|
||||
it 'creates default lists' do
|
||||
lists = ['Open', 'To Do', 'Doing', 'Closed']
|
||||
lists = %w[Open Closed]
|
||||
|
||||
wait_for_requests
|
||||
|
||||
expect(page).to have_selector('.board', count: 4)
|
||||
expect(page).to have_selector('.board', count: 2)
|
||||
|
||||
page.all('.board').each_with_index do |list, i|
|
||||
expect(list.find('.board-title')).to have_content(lists[i])
|
||||
|
|
37
spec/frontend/admin/users/components/app_spec.js
Normal file
37
spec/frontend/admin/users/components/app_spec.js
Normal file
|
@ -0,0 +1,37 @@
|
|||
import { shallowMount } from '@vue/test-utils';
|
||||
|
||||
import AdminUsersApp from '~/admin/users/components/app.vue';
|
||||
import AdminUsersTable from '~/admin/users/components/users_table.vue';
|
||||
import { users, paths } from '../mock_data';
|
||||
|
||||
describe('AdminUsersApp component', () => {
|
||||
let wrapper;
|
||||
|
||||
const initComponent = (props = {}) => {
|
||||
wrapper = shallowMount(AdminUsersApp, {
|
||||
propsData: {
|
||||
users,
|
||||
paths,
|
||||
...props,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
afterEach(() => {
|
||||
wrapper.destroy();
|
||||
wrapper = null;
|
||||
});
|
||||
|
||||
describe('when initialized', () => {
|
||||
beforeEach(() => {
|
||||
initComponent();
|
||||
});
|
||||
|
||||
it('renders the admin users table with props', () => {
|
||||
expect(wrapper.find(AdminUsersTable).props()).toEqual({
|
||||
users,
|
||||
paths,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
61
spec/frontend/admin/users/components/users_table_spec.js
Normal file
61
spec/frontend/admin/users/components/users_table_spec.js
Normal file
|
@ -0,0 +1,61 @@
|
|||
import { GlTable } from '@gitlab/ui';
|
||||
import { mount } from '@vue/test-utils';
|
||||
|
||||
import AdminUsersTable from '~/admin/users/components/users_table.vue';
|
||||
import { users, paths } from '../mock_data';
|
||||
|
||||
describe('AdminUsersTable component', () => {
|
||||
let wrapper;
|
||||
|
||||
const getCellByLabel = (trIdx, label) => {
|
||||
return wrapper
|
||||
.find(GlTable)
|
||||
.find('tbody')
|
||||
.findAll('tr')
|
||||
.at(trIdx)
|
||||
.find(`[data-label="${label}"][role="cell"]`);
|
||||
};
|
||||
|
||||
const initComponent = (props = {}) => {
|
||||
wrapper = mount(AdminUsersTable, {
|
||||
propsData: {
|
||||
users,
|
||||
paths,
|
||||
...props,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
afterEach(() => {
|
||||
wrapper.destroy();
|
||||
wrapper = null;
|
||||
});
|
||||
|
||||
describe('when there are users', () => {
|
||||
const user = users[0];
|
||||
|
||||
beforeEach(() => {
|
||||
initComponent();
|
||||
});
|
||||
|
||||
it.each`
|
||||
key | label
|
||||
${'name'} | ${'Name'}
|
||||
${'projectsCount'} | ${'Projects'}
|
||||
${'createdAt'} | ${'Created on'}
|
||||
${'lastActivityOn'} | ${'Last activity'}
|
||||
`('renders users.$key for $label', ({ key, label }) => {
|
||||
expect(getCellByLabel(0, label).text()).toBe(`${user[key]}`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when users is an empty array', () => {
|
||||
beforeEach(() => {
|
||||
initComponent({ users: [] });
|
||||
});
|
||||
|
||||
it('renders a "No users found" message', () => {
|
||||
expect(wrapper.text()).toContain('No users found');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -5,7 +5,7 @@ export const users = [
|
|||
createdAt: '2020-11-13T12:26:54.177Z',
|
||||
email: 'nikki@example.com',
|
||||
username: 'nikki',
|
||||
lastActivityOn: null,
|
||||
lastActivityOn: '2020-12-09',
|
||||
avatarUrl:
|
||||
'https://secure.gravatar.com/avatar/054f062d8b1a42b123f17e13a173cda8?s=80\\u0026d=identicon',
|
||||
badges: [],
|
||||
|
|
|
@ -66,23 +66,6 @@ describe('boardsStore', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('generateDefaultLists', () => {
|
||||
const listsEndpointGenerate = `${endpoints.listsEndpoint}/generate.json`;
|
||||
|
||||
it('makes a request to generate default lists', () => {
|
||||
axiosMock.onPost(listsEndpointGenerate).replyOnce(200, dummyResponse);
|
||||
const expectedResponse = expect.objectContaining({ data: dummyResponse });
|
||||
|
||||
return expect(boardsStore.generateDefaultLists()).resolves.toEqual(expectedResponse);
|
||||
});
|
||||
|
||||
it('fails for error response', () => {
|
||||
axiosMock.onPost(listsEndpointGenerate).replyOnce(500);
|
||||
|
||||
return expect(boardsStore.generateDefaultLists()).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('createList', () => {
|
||||
const entityType = 'moorhen';
|
||||
const entityId = 'quack';
|
||||
|
@ -727,24 +710,6 @@ describe('boardsStore', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('check for blank state adding', () => {
|
||||
expect(boardsStore.shouldAddBlankState()).toBe(true);
|
||||
});
|
||||
|
||||
it('check for blank state not adding', () => {
|
||||
boardsStore.addList(listObj);
|
||||
|
||||
expect(boardsStore.shouldAddBlankState()).toBe(false);
|
||||
});
|
||||
|
||||
it('check for blank state adding when closed list exist', () => {
|
||||
boardsStore.addList({
|
||||
list_type: 'closed',
|
||||
});
|
||||
|
||||
expect(boardsStore.shouldAddBlankState()).toBe(true);
|
||||
});
|
||||
|
||||
it('removes list from state', () => {
|
||||
boardsStore.addList(listObj);
|
||||
|
||||
|
|
|
@ -79,7 +79,7 @@ describe('Board List Header Component', () => {
|
|||
const findCaret = () => wrapper.find('.board-title-caret');
|
||||
|
||||
describe('Add issue button', () => {
|
||||
const hasNoAddButton = [ListType.blank, ListType.closed];
|
||||
const hasNoAddButton = [ListType.closed];
|
||||
const hasAddButton = [ListType.backlog, ListType.label, ListType.milestone, ListType.assignee];
|
||||
|
||||
it.each(hasNoAddButton)('does not render when List Type is `%s`', listType => {
|
||||
|
|
|
@ -73,7 +73,7 @@ describe('Board List Header Component', () => {
|
|||
const findCaret = () => wrapper.find('.board-title-caret');
|
||||
|
||||
describe('Add issue button', () => {
|
||||
const hasNoAddButton = [ListType.blank, ListType.closed];
|
||||
const hasNoAddButton = [ListType.closed];
|
||||
const hasAddButton = [ListType.backlog, ListType.label, ListType.milestone, ListType.assignee];
|
||||
|
||||
it.each(hasNoAddButton)('does not render when List Type is `%s`', listType => {
|
||||
|
|
|
@ -123,7 +123,7 @@ describe('fetchLists', () => {
|
|||
payload: formattedLists,
|
||||
},
|
||||
],
|
||||
[{ type: 'generateDefaultLists' }],
|
||||
[],
|
||||
done,
|
||||
);
|
||||
});
|
||||
|
@ -153,37 +153,12 @@ describe('fetchLists', () => {
|
|||
payload: formattedLists,
|
||||
},
|
||||
],
|
||||
[{ type: 'createList', payload: { backlog: true } }, { type: 'generateDefaultLists' }],
|
||||
[{ type: 'createList', payload: { backlog: true } }],
|
||||
done,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('generateDefaultLists', () => {
|
||||
let store;
|
||||
beforeEach(() => {
|
||||
const state = {
|
||||
endpoints: { fullPath: 'gitlab-org', boardId: '1' },
|
||||
boardType: 'group',
|
||||
disabled: false,
|
||||
boardLists: [{ type: 'backlog' }, { type: 'closed' }],
|
||||
};
|
||||
|
||||
store = {
|
||||
commit: jest.fn(),
|
||||
dispatch: jest.fn(() => Promise.resolve()),
|
||||
state,
|
||||
};
|
||||
});
|
||||
|
||||
it('should dispatch fetchLabels', () => {
|
||||
return actions.generateDefaultLists(store).then(() => {
|
||||
expect(store.dispatch.mock.calls[0]).toEqual(['fetchLabels', 'to do']);
|
||||
expect(store.dispatch.mock.calls[1]).toEqual(['fetchLabels', 'doing']);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('createList', () => {
|
||||
it('should dispatch addList action when creating backlog list', done => {
|
||||
const backlogList = {
|
||||
|
|
|
@ -33,19 +33,16 @@ describe('Board Store Mutations', () => {
|
|||
};
|
||||
const boardType = 'group';
|
||||
const disabled = false;
|
||||
const showPromotion = false;
|
||||
|
||||
mutations[types.SET_INITIAL_BOARD_DATA](state, {
|
||||
...endpoints,
|
||||
boardType,
|
||||
disabled,
|
||||
showPromotion,
|
||||
});
|
||||
|
||||
expect(state.endpoints).toEqual(endpoints);
|
||||
expect(state.boardType).toEqual(boardType);
|
||||
expect(state.disabled).toEqual(disabled);
|
||||
expect(state.showPromotion).toEqual(showPromotion);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`StatisticsList matches the snapshot 1`] = `
|
||||
exports[`StatisticsList displays the counts data with labels 1`] = `
|
||||
<ul>
|
||||
<li>
|
||||
<span>
|
||||
|
@ -35,7 +35,7 @@ exports[`StatisticsList matches the snapshot 1`] = `
|
|||
</span>
|
||||
|
||||
<strong>
|
||||
50%
|
||||
50.00%
|
||||
</strong>
|
||||
</li>
|
||||
<li>
|
||||
|
|
|
@ -0,0 +1,72 @@
|
|||
import { shallowMount } from '@vue/test-utils';
|
||||
import { GlColumnChart } from '@gitlab/ui/dist/charts';
|
||||
import Component from '~/projects/pipelines/charts/components/app_legacy.vue';
|
||||
import StatisticsList from '~/projects/pipelines/charts/components/statistics_list.vue';
|
||||
import PipelinesAreaChart from '~/projects/pipelines/charts/components/pipelines_area_chart.vue';
|
||||
import {
|
||||
counts,
|
||||
timesChartData,
|
||||
areaChartData as lastWeekChartData,
|
||||
areaChartData as lastMonthChartData,
|
||||
lastYearChartData,
|
||||
} from '../mock_data';
|
||||
|
||||
describe('ProjectsPipelinesChartsApp', () => {
|
||||
let wrapper;
|
||||
|
||||
beforeEach(() => {
|
||||
wrapper = shallowMount(Component, {
|
||||
propsData: {
|
||||
counts,
|
||||
timesChartData,
|
||||
lastWeekChartData,
|
||||
lastMonthChartData,
|
||||
lastYearChartData,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
wrapper.destroy();
|
||||
wrapper = null;
|
||||
});
|
||||
|
||||
describe('overall statistics', () => {
|
||||
it('displays the statistics list', () => {
|
||||
const list = wrapper.find(StatisticsList);
|
||||
|
||||
expect(list.exists()).toBeTruthy();
|
||||
expect(list.props('counts')).toBe(counts);
|
||||
});
|
||||
|
||||
it('displays the commit duration chart', () => {
|
||||
const chart = wrapper.find(GlColumnChart);
|
||||
|
||||
expect(chart.exists()).toBeTruthy();
|
||||
expect(chart.props('yAxisTitle')).toBe('Minutes');
|
||||
expect(chart.props('xAxisTitle')).toBe('Commit');
|
||||
expect(chart.props('bars')).toBe(wrapper.vm.timesChartTransformedData);
|
||||
expect(chart.props('option')).toBe(wrapper.vm.$options.timesChartOptions);
|
||||
});
|
||||
});
|
||||
|
||||
describe('pipelines charts', () => {
|
||||
it('displays 3 area charts', () => {
|
||||
expect(wrapper.findAll(PipelinesAreaChart).length).toBe(3);
|
||||
});
|
||||
|
||||
describe('displays individual correctly', () => {
|
||||
it('renders with the correct data', () => {
|
||||
const charts = wrapper.findAll(PipelinesAreaChart);
|
||||
|
||||
for (let i = 0; i < charts.length; i += 1) {
|
||||
const chart = charts.at(i);
|
||||
|
||||
expect(chart.exists()).toBeTruthy();
|
||||
expect(chart.props('chartData')).toBe(wrapper.vm.areaCharts[i].data);
|
||||
expect(chart.text()).toBe(wrapper.vm.areaCharts[i].title);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,29 +1,45 @@
|
|||
import { shallowMount } from '@vue/test-utils';
|
||||
import { createLocalVue, shallowMount } from '@vue/test-utils';
|
||||
import VueApollo from 'vue-apollo';
|
||||
import createMockApollo from 'jest/helpers/mock_apollo_helper';
|
||||
import { GlColumnChart } from '@gitlab/ui/dist/charts';
|
||||
import Component from '~/projects/pipelines/charts/components/app.vue';
|
||||
import StatisticsList from '~/projects/pipelines/charts/components/statistics_list.vue';
|
||||
import PipelinesAreaChart from '~/projects/pipelines/charts/components/pipelines_area_chart.vue';
|
||||
import {
|
||||
counts,
|
||||
timesChartData,
|
||||
areaChartData as lastWeekChartData,
|
||||
areaChartData as lastMonthChartData,
|
||||
lastYearChartData,
|
||||
} from '../mock_data';
|
||||
import getPipelineCountByStatus from '~/projects/pipelines/charts/graphql/queries/get_pipeline_count_by_status.query.graphql';
|
||||
import getProjectPipelineStatistics from '~/projects/pipelines/charts/graphql/queries/get_project_pipeline_statistics.query.graphql';
|
||||
import { mockPipelineCount, mockPipelineStatistics } from '../mock_data';
|
||||
|
||||
const projectPath = 'gitlab-org/gitlab';
|
||||
const localVue = createLocalVue();
|
||||
localVue.use(VueApollo);
|
||||
|
||||
describe('ProjectsPipelinesChartsApp', () => {
|
||||
let wrapper;
|
||||
|
||||
beforeEach(() => {
|
||||
wrapper = shallowMount(Component, {
|
||||
propsData: {
|
||||
counts,
|
||||
timesChartData,
|
||||
lastWeekChartData,
|
||||
lastMonthChartData,
|
||||
lastYearChartData,
|
||||
function createMockApolloProvider() {
|
||||
const requestHandlers = [
|
||||
[getPipelineCountByStatus, jest.fn().mockResolvedValue(mockPipelineCount)],
|
||||
[getProjectPipelineStatistics, jest.fn().mockResolvedValue(mockPipelineStatistics)],
|
||||
];
|
||||
|
||||
return createMockApollo(requestHandlers);
|
||||
}
|
||||
|
||||
function createComponent(options = {}) {
|
||||
const { fakeApollo } = options;
|
||||
|
||||
return shallowMount(Component, {
|
||||
provide: {
|
||||
projectPath,
|
||||
},
|
||||
localVue,
|
||||
apolloProvider: fakeApollo,
|
||||
});
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
const fakeApollo = createMockApolloProvider();
|
||||
wrapper = createComponent({ fakeApollo });
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
|
@ -35,14 +51,20 @@ describe('ProjectsPipelinesChartsApp', () => {
|
|||
it('displays the statistics list', () => {
|
||||
const list = wrapper.find(StatisticsList);
|
||||
|
||||
expect(list.exists()).toBeTruthy();
|
||||
expect(list.props('counts')).toBe(counts);
|
||||
expect(list.exists()).toBe(true);
|
||||
expect(list.props('counts')).toMatchObject({
|
||||
failed: 1,
|
||||
success: 23,
|
||||
total: 34,
|
||||
successRatio: 95.83333333333334,
|
||||
totalDuration: 2471,
|
||||
});
|
||||
});
|
||||
|
||||
it('displays the commit duration chart', () => {
|
||||
const chart = wrapper.find(GlColumnChart);
|
||||
|
||||
expect(chart.exists()).toBeTruthy();
|
||||
expect(chart.exists()).toBe(true);
|
||||
expect(chart.props('yAxisTitle')).toBe('Minutes');
|
||||
expect(chart.props('xAxisTitle')).toBe('Commit');
|
||||
expect(chart.props('bars')).toBe(wrapper.vm.timesChartTransformedData);
|
||||
|
@ -52,7 +74,7 @@ describe('ProjectsPipelinesChartsApp', () => {
|
|||
|
||||
describe('pipelines charts', () => {
|
||||
it('displays 3 area charts', () => {
|
||||
expect(wrapper.findAll(PipelinesAreaChart).length).toBe(3);
|
||||
expect(wrapper.findAll(PipelinesAreaChart)).toHaveLength(3);
|
||||
});
|
||||
|
||||
describe('displays individual correctly', () => {
|
||||
|
@ -62,7 +84,9 @@ describe('ProjectsPipelinesChartsApp', () => {
|
|||
for (let i = 0; i < charts.length; i += 1) {
|
||||
const chart = charts.at(i);
|
||||
|
||||
expect(chart.exists()).toBeTruthy();
|
||||
expect(chart.exists()).toBe(true);
|
||||
// TODO: Refactor this to use the mocked data instead of the vm data
|
||||
// https://gitlab.com/gitlab-org/gitlab/-/issues/292085
|
||||
expect(chart.props('chartData')).toBe(wrapper.vm.areaCharts[i].data);
|
||||
expect(chart.text()).toBe(wrapper.vm.areaCharts[i].title);
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@ describe('StatisticsList', () => {
|
|||
wrapper = null;
|
||||
});
|
||||
|
||||
it('matches the snapshot', () => {
|
||||
it('displays the counts data with labels', () => {
|
||||
expect(wrapper.element).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -32,3 +32,218 @@ export const transformedAreaChartData = [
|
|||
data: [['01 Jan', 3], ['02 Jan', 3], ['03 Jan', 3], ['04 Jan', 3], ['05 Jan', 5]],
|
||||
},
|
||||
];
|
||||
|
||||
export const mockPipelineCount = {
|
||||
data: {
|
||||
project: {
|
||||
totalPipelines: { count: 34, __typename: 'PipelineConnection' },
|
||||
successfulPipelines: { count: 23, __typename: 'PipelineConnection' },
|
||||
failedPipelines: { count: 1, __typename: 'PipelineConnection' },
|
||||
totalPipelineDuration: 2471,
|
||||
__typename: 'Project',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const mockPipelineStatistics = {
|
||||
data: {
|
||||
project: {
|
||||
pipelineAnalytics: {
|
||||
weekPipelinesTotals: [0, 0, 0, 0, 0, 0, 0, 0],
|
||||
weekPipelinesLabels: [
|
||||
'24 November',
|
||||
'25 November',
|
||||
'26 November',
|
||||
'27 November',
|
||||
'28 November',
|
||||
'29 November',
|
||||
'30 November',
|
||||
'01 December',
|
||||
],
|
||||
weekPipelinesSuccessful: [0, 0, 0, 0, 0, 0, 0, 0],
|
||||
monthPipelinesLabels: [
|
||||
'01 November',
|
||||
'02 November',
|
||||
'03 November',
|
||||
'04 November',
|
||||
'05 November',
|
||||
'06 November',
|
||||
'07 November',
|
||||
'08 November',
|
||||
'09 November',
|
||||
'10 November',
|
||||
'11 November',
|
||||
'12 November',
|
||||
'13 November',
|
||||
'14 November',
|
||||
'15 November',
|
||||
'16 November',
|
||||
'17 November',
|
||||
'18 November',
|
||||
'19 November',
|
||||
'20 November',
|
||||
'21 November',
|
||||
'22 November',
|
||||
'23 November',
|
||||
'24 November',
|
||||
'25 November',
|
||||
'26 November',
|
||||
'27 November',
|
||||
'28 November',
|
||||
'29 November',
|
||||
'30 November',
|
||||
'01 December',
|
||||
],
|
||||
monthPipelinesTotals: [
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
2,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
],
|
||||
monthPipelinesSuccessful: [
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
],
|
||||
yearPipelinesLabels: [
|
||||
'December 2019',
|
||||
'January 2020',
|
||||
'February 2020',
|
||||
'March 2020',
|
||||
'April 2020',
|
||||
'May 2020',
|
||||
'June 2020',
|
||||
'July 2020',
|
||||
'August 2020',
|
||||
'September 2020',
|
||||
'October 2020',
|
||||
'November 2020',
|
||||
'December 2020',
|
||||
],
|
||||
yearPipelinesTotals: [0, 0, 0, 0, 0, 0, 0, 0, 23, 7, 2, 2, 0],
|
||||
yearPipelinesSuccessful: [0, 0, 0, 0, 0, 0, 0, 0, 17, 5, 1, 0, 0],
|
||||
pipelineTimesLabels: [
|
||||
'b3781247',
|
||||
'b3781247',
|
||||
'a50ba059',
|
||||
'8e414f3b',
|
||||
'b2964d50',
|
||||
'7caa525b',
|
||||
'761b164e',
|
||||
'd3eccd18',
|
||||
'e2750f63',
|
||||
'e2750f63',
|
||||
'1dfb4b96',
|
||||
'b49d6f94',
|
||||
'66fa2f80',
|
||||
'e2750f63',
|
||||
'fc82cf15',
|
||||
'19fb20b2',
|
||||
'25f03a24',
|
||||
'e054110f',
|
||||
'0278b7b2',
|
||||
'38478c16',
|
||||
'38478c16',
|
||||
'38478c16',
|
||||
'1fb2103e',
|
||||
'97b99fb5',
|
||||
'8abc6e87',
|
||||
'c94e80e3',
|
||||
'5d349a50',
|
||||
'5d349a50',
|
||||
'9c581037',
|
||||
'02d95fb2',
|
||||
],
|
||||
pipelineTimesValues: [
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
1,
|
||||
1,
|
||||
2,
|
||||
1,
|
||||
0,
|
||||
1,
|
||||
2,
|
||||
2,
|
||||
0,
|
||||
4,
|
||||
2,
|
||||
1,
|
||||
2,
|
||||
1,
|
||||
1,
|
||||
0,
|
||||
1,
|
||||
1,
|
||||
0,
|
||||
1,
|
||||
5,
|
||||
2,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
],
|
||||
__typename: 'Analytics',
|
||||
},
|
||||
__typename: 'Project',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
|
|
@ -7,6 +7,8 @@ RSpec.describe Atlassian::JiraConnect::Client do
|
|||
|
||||
subject { described_class.new('https://gitlab-test.atlassian.net', 'sample_secret') }
|
||||
|
||||
let_it_be(:project) { create_default(:project, :repository) }
|
||||
|
||||
around do |example|
|
||||
freeze_time { example.run }
|
||||
end
|
||||
|
@ -19,41 +21,158 @@ RSpec.describe Atlassian::JiraConnect::Client do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#store_dev_info' do
|
||||
let_it_be(:project) { create_default(:project, :repository) }
|
||||
let_it_be(:merge_requests) { create_list(:merge_request, 2, :unique_branches) }
|
||||
describe '#send_info' do
|
||||
it 'calls store_build_info and store_dev_info as appropriate' do
|
||||
expect(subject).to receive(:store_build_info).with(
|
||||
project: project,
|
||||
update_sequence_id: :x,
|
||||
pipelines: :y
|
||||
).and_return(:build_stored)
|
||||
|
||||
let(:expected_jwt) do
|
||||
Atlassian::Jwt.encode(
|
||||
Atlassian::Jwt.build_claims(
|
||||
Atlassian::JiraConnect.app_key,
|
||||
'/rest/devinfo/0.10/bulk',
|
||||
'POST'
|
||||
),
|
||||
'sample_secret'
|
||||
)
|
||||
expect(subject).to receive(:store_dev_info).with(
|
||||
project: project,
|
||||
update_sequence_id: :x,
|
||||
commits: :a,
|
||||
branches: :b,
|
||||
merge_requests: :c
|
||||
).and_return(:dev_stored)
|
||||
|
||||
args = {
|
||||
project: project,
|
||||
update_sequence_id: :x,
|
||||
commits: :a,
|
||||
branches: :b,
|
||||
merge_requests: :c,
|
||||
pipelines: :y
|
||||
}
|
||||
|
||||
expect(subject.send_info(**args)).to contain_exactly(:dev_stored, :build_stored)
|
||||
end
|
||||
|
||||
it 'only calls methods that we need to call' do
|
||||
expect(subject).to receive(:store_dev_info).with(
|
||||
project: project,
|
||||
update_sequence_id: :x,
|
||||
commits: :a
|
||||
).and_return(:dev_stored)
|
||||
|
||||
args = {
|
||||
project: project,
|
||||
update_sequence_id: :x,
|
||||
commits: :a
|
||||
}
|
||||
|
||||
expect(subject.send_info(**args)).to contain_exactly(:dev_stored)
|
||||
end
|
||||
|
||||
it 'raises an argument error if there is nothing to send (probably a typo?)' do
|
||||
expect { subject.send_info(project: project, builds: :x) }
|
||||
.to raise_error(ArgumentError)
|
||||
end
|
||||
end
|
||||
|
||||
def expected_headers(path)
|
||||
expected_jwt = Atlassian::Jwt.encode(
|
||||
Atlassian::Jwt.build_claims(Atlassian::JiraConnect.app_key, path, 'POST'),
|
||||
'sample_secret'
|
||||
)
|
||||
|
||||
{
|
||||
'Authorization' => "JWT #{expected_jwt}",
|
||||
'Content-Type' => 'application/json'
|
||||
}
|
||||
end
|
||||
|
||||
describe '#store_build_info' do
|
||||
let_it_be(:mrs_by_title) { create_list(:merge_request, 4, :unique_branches, :jira_title) }
|
||||
let_it_be(:mrs_by_branch) { create_list(:merge_request, 2, :jira_branch) }
|
||||
let_it_be(:red_herrings) { create_list(:merge_request, 1, :unique_branches) }
|
||||
|
||||
let_it_be(:pipelines) do
|
||||
(red_herrings + mrs_by_branch + mrs_by_title).map do |mr|
|
||||
create(:ci_pipeline, merge_request: mr)
|
||||
end
|
||||
end
|
||||
|
||||
let(:build_info_payload_schema) do
|
||||
Atlassian::Schemata.build_info_payload
|
||||
end
|
||||
|
||||
let(:body) do
|
||||
matcher = be_valid_json.according_to_schema(build_info_payload_schema)
|
||||
|
||||
->(text) { matcher.matches?(text) }
|
||||
end
|
||||
|
||||
before do
|
||||
stub_full_request('https://gitlab-test.atlassian.net/rest/devinfo/0.10/bulk', method: :post)
|
||||
.with(
|
||||
headers: {
|
||||
'Authorization' => "JWT #{expected_jwt}",
|
||||
'Content-Type' => 'application/json'
|
||||
}
|
||||
)
|
||||
path = '/rest/builds/0.1/bulk'
|
||||
stub_full_request('https://gitlab-test.atlassian.net' + path, method: :post)
|
||||
.with(body: body, headers: expected_headers(path))
|
||||
end
|
||||
|
||||
it "calls the API with auth headers" do
|
||||
subject.store_dev_info(project: project)
|
||||
subject.send(:store_build_info, project: project, pipelines: pipelines)
|
||||
end
|
||||
|
||||
it 'only sends information about relevant MRs' do
|
||||
expect(subject).to receive(:post).with('/rest/builds/0.1/bulk', { builds: have_attributes(size: 6) })
|
||||
|
||||
subject.send(:store_build_info, project: project, pipelines: pipelines)
|
||||
end
|
||||
|
||||
it 'does not call the API if there is nothing to report' do
|
||||
expect(subject).not_to receive(:post)
|
||||
|
||||
subject.send(:store_build_info, project: project, pipelines: pipelines.take(1))
|
||||
end
|
||||
|
||||
it 'does not call the API if the feature flag is not enabled' do
|
||||
stub_feature_flags(jira_sync_builds: false)
|
||||
|
||||
expect(subject).not_to receive(:post)
|
||||
|
||||
subject.send(:store_build_info, project: project, pipelines: pipelines)
|
||||
end
|
||||
|
||||
it 'does call the API if the feature flag enabled for the project' do
|
||||
stub_feature_flags(jira_sync_builds: project)
|
||||
|
||||
expect(subject).to receive(:post).with('/rest/builds/0.1/bulk', { builds: Array })
|
||||
|
||||
subject.send(:store_build_info, project: project, pipelines: pipelines)
|
||||
end
|
||||
|
||||
it 'avoids N+1 database queries' do
|
||||
control_count = ActiveRecord::QueryRecorder.new { subject.store_dev_info(project: project, merge_requests: merge_requests) }.count
|
||||
baseline = ActiveRecord::QueryRecorder.new do
|
||||
subject.send(:store_build_info, project: project, pipelines: pipelines)
|
||||
end
|
||||
|
||||
pipelines << create(:ci_pipeline, head_pipeline_of: create(:merge_request, :jira_branch))
|
||||
|
||||
expect { subject.send(:store_build_info, project: project, pipelines: pipelines) }.not_to exceed_query_limit(baseline)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#store_dev_info' do
|
||||
let_it_be(:merge_requests) { create_list(:merge_request, 2, :unique_branches) }
|
||||
|
||||
before do
|
||||
path = '/rest/devinfo/0.10/bulk'
|
||||
|
||||
stub_full_request('https://gitlab-test.atlassian.net' + path, method: :post)
|
||||
.with(headers: expected_headers(path))
|
||||
end
|
||||
|
||||
it "calls the API with auth headers" do
|
||||
subject.send(:store_dev_info, project: project)
|
||||
end
|
||||
|
||||
it 'avoids N+1 database queries' do
|
||||
control_count = ActiveRecord::QueryRecorder.new { subject.send(:store_dev_info, project: project, merge_requests: merge_requests) }.count
|
||||
|
||||
merge_requests << create(:merge_request, :unique_branches)
|
||||
|
||||
expect { subject.store_dev_info(project: project, merge_requests: merge_requests) }.not_to exceed_query_limit(control_count)
|
||||
expect { subject.send(:store_dev_info, project: project, merge_requests: merge_requests) }.not_to exceed_query_limit(control_count)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -0,0 +1,52 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Atlassian::JiraConnect::Serializers::BuildEntity do
|
||||
let_it_be(:user) { create_default(:user) }
|
||||
let_it_be(:project) { create_default(:project) }
|
||||
|
||||
subject { described_class.represent(pipeline) }
|
||||
|
||||
context 'when the pipeline does not belong to any Jira issue' do
|
||||
let_it_be(:pipeline) { create(:ci_pipeline) }
|
||||
|
||||
describe '#issue_keys' do
|
||||
it 'is empty' do
|
||||
expect(subject.issue_keys).to be_empty
|
||||
end
|
||||
end
|
||||
|
||||
describe '#to_json' do
|
||||
it 'can encode the object' do
|
||||
expect(subject.to_json).to be_valid_json
|
||||
end
|
||||
|
||||
it 'is invalid, since it has no issue keys' do
|
||||
expect(subject.to_json).not_to be_valid_json.according_to_schema(Atlassian::Schemata.build_info)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the pipeline does belong to a Jira issue' do
|
||||
let(:pipeline) { create(:ci_pipeline, merge_request: merge_request) }
|
||||
|
||||
%i[jira_branch jira_title].each do |trait|
|
||||
context "because it belongs to an MR with a #{trait}" do
|
||||
let(:merge_request) { create(:merge_request, trait) }
|
||||
|
||||
describe '#issue_keys' do
|
||||
it 'is not empty' do
|
||||
expect(subject.issue_keys).not_to be_empty
|
||||
end
|
||||
end
|
||||
|
||||
describe '#to_json' do
|
||||
it 'is valid according to the build info schema' do
|
||||
expect(subject.to_json).to be_valid_json.according_to_schema(Atlassian::Schemata.build_info)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -259,7 +259,18 @@ RSpec.describe ApplicationSetting do
|
|||
it { is_expected.to allow_value('access-key-id-12').for(:eks_access_key_id) }
|
||||
it { is_expected.not_to allow_value('a' * 129).for(:eks_access_key_id) }
|
||||
it { is_expected.not_to allow_value('short-key').for(:eks_access_key_id) }
|
||||
it { is_expected.not_to allow_value(nil).for(:eks_access_key_id) }
|
||||
it { is_expected.to allow_value(nil).for(:eks_access_key_id) }
|
||||
|
||||
it { is_expected.to allow_value('secret-access-key').for(:eks_secret_access_key) }
|
||||
it { is_expected.to allow_value(nil).for(:eks_secret_access_key) }
|
||||
end
|
||||
|
||||
context 'access key is specified' do
|
||||
let(:eks_enabled) { true }
|
||||
|
||||
before do
|
||||
setting.eks_access_key_id = '123456789012'
|
||||
end
|
||||
|
||||
it { is_expected.to allow_value('secret-access-key').for(:eks_secret_access_key) }
|
||||
it { is_expected.not_to allow_value(nil).for(:eks_secret_access_key) }
|
||||
|
|
|
@ -1206,6 +1206,40 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
|
|||
end
|
||||
end
|
||||
|
||||
describe 'synching status to Jira' do
|
||||
let(:worker) { ::JiraConnect::SyncBuildsWorker }
|
||||
|
||||
%i[prepare! run! skip! drop! succeed! cancel! block! delay!].each do |event|
|
||||
context "when we call pipeline.#{event}" do
|
||||
it 'triggers a Jira synch worker' do
|
||||
expect(worker).to receive(:perform_async).with(pipeline.id, Integer)
|
||||
|
||||
pipeline.send(event)
|
||||
end
|
||||
|
||||
context 'the feature is disabled' do
|
||||
it 'does not trigger a worker' do
|
||||
stub_feature_flags(jira_sync_builds: false)
|
||||
|
||||
expect(worker).not_to receive(:perform_async)
|
||||
|
||||
pipeline.send(event)
|
||||
end
|
||||
end
|
||||
|
||||
context 'the feature is enabled for this project' do
|
||||
it 'does trigger a worker' do
|
||||
stub_feature_flags(jira_sync_builds: pipeline.project)
|
||||
|
||||
expect(worker).to receive(:perform_async)
|
||||
|
||||
pipeline.send(event)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#duration', :sidekiq_inline do
|
||||
context 'when multiple builds are finished' do
|
||||
before do
|
||||
|
|
|
@ -81,5 +81,59 @@ RSpec.describe Clusters::Aws::FetchCredentialsService do
|
|||
expect { subject }.to raise_error(described_class::MissingRoleError, 'AWS provisioning role not configured')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with an instance profile attached to an IAM role' do
|
||||
let(:sts_client) { Aws::STS::Client.new(region: region, stub_responses: true) }
|
||||
let(:provision_role) { create(:aws_role, user: user, region: 'custom-region') }
|
||||
|
||||
before do
|
||||
stub_application_setting(eks_access_key_id: nil)
|
||||
stub_application_setting(eks_secret_access_key: nil)
|
||||
|
||||
expect(Aws::STS::Client).to receive(:new)
|
||||
.with(region: region)
|
||||
.and_return(sts_client)
|
||||
|
||||
expect(Aws::AssumeRoleCredentials).to receive(:new)
|
||||
.with(
|
||||
client: sts_client,
|
||||
role_arn: provision_role.role_arn,
|
||||
role_session_name: session_name,
|
||||
external_id: provision_role.role_external_id,
|
||||
policy: session_policy
|
||||
).and_call_original
|
||||
end
|
||||
|
||||
context 'provider is specified' do
|
||||
let(:region) { provider.region }
|
||||
let(:session_name) { "gitlab-eks-cluster-#{provider.cluster_id}-user-#{user.id}" }
|
||||
let(:session_policy) { nil }
|
||||
|
||||
it 'returns credentials', :aggregate_failures do
|
||||
expect(subject.access_key_id).to be_present
|
||||
expect(subject.secret_access_key).to be_present
|
||||
expect(subject.session_token).to be_present
|
||||
end
|
||||
end
|
||||
|
||||
context 'provider is not specifed' do
|
||||
let(:provider) { nil }
|
||||
let(:region) { provision_role.region }
|
||||
let(:session_name) { "gitlab-eks-autofill-user-#{user.id}" }
|
||||
let(:session_policy) { 'policy-document' }
|
||||
|
||||
before do
|
||||
stub_file_read(Rails.root.join('vendor', 'aws', 'iam', 'eks_cluster_read_only_policy.json'), content: session_policy)
|
||||
end
|
||||
|
||||
subject { described_class.new(provision_role, provider: provider).execute }
|
||||
|
||||
it 'returns credentials', :aggregate_failures do
|
||||
expect(subject.access_key_id).to be_present
|
||||
expect(subject.secret_access_key).to be_present
|
||||
expect(subject.session_token).to be_present
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -3,30 +3,23 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe JiraConnect::SyncService do
|
||||
include AfterNextHelpers
|
||||
|
||||
describe '#execute' do
|
||||
let_it_be(:project) { create(:project, :repository) }
|
||||
let(:branches) { [project.repository.find_branch('master')] }
|
||||
let(:commits) { project.commits_by(oids: %w[b83d6e3 5a62481]) }
|
||||
let(:merge_requests) { [create(:merge_request, source_project: project, target_project: project)] }
|
||||
let(:client) { Atlassian::JiraConnect::Client }
|
||||
let(:info) { { a: 'Some', b: 'Info' } }
|
||||
|
||||
subject do
|
||||
described_class.new(project).execute(commits: commits, branches: branches, merge_requests: merge_requests)
|
||||
described_class.new(project).execute(**info)
|
||||
end
|
||||
|
||||
before do
|
||||
create(:jira_connect_subscription, namespace: project.namespace)
|
||||
end
|
||||
|
||||
def expect_jira_client_call(return_value = { 'status': 'success' })
|
||||
expect_next_instance_of(Atlassian::JiraConnect::Client) do |instance|
|
||||
expect(instance).to receive(:store_dev_info).with(
|
||||
project: project,
|
||||
commits: commits,
|
||||
branches: [instance_of(Gitlab::Git::Branch)],
|
||||
merge_requests: merge_requests,
|
||||
update_sequence_id: anything
|
||||
).and_return(return_value)
|
||||
end
|
||||
def store_info(return_values = [{ 'status': 'success' }])
|
||||
receive(:send_info).with(project: project, **info).and_return(return_values)
|
||||
end
|
||||
|
||||
def expect_log(type, message)
|
||||
|
@ -41,20 +34,22 @@ RSpec.describe JiraConnect::SyncService do
|
|||
end
|
||||
|
||||
it 'calls Atlassian::JiraConnect::Client#store_dev_info and logs the response' do
|
||||
expect_jira_client_call
|
||||
expect_next(client).to store_info
|
||||
|
||||
expect_log(:info, { 'status': 'success' })
|
||||
|
||||
subject
|
||||
end
|
||||
|
||||
context 'when request returns an error' do
|
||||
context 'when a request returns an error' do
|
||||
it 'logs the response as an error' do
|
||||
expect_jira_client_call({
|
||||
'errorMessages' => ['some error message']
|
||||
})
|
||||
expect_next(client).to store_info([
|
||||
{ 'errorMessages' => ['some error message'] },
|
||||
{ 'rejectedBuilds' => ['x'] }
|
||||
])
|
||||
|
||||
expect_log(:error, { 'errorMessages' => ['some error message'] })
|
||||
expect_log(:error, { 'rejectedBuilds' => ['x'] })
|
||||
|
||||
subject
|
||||
end
|
||||
|
|
83
spec/support/atlassian/jira_connect/schemata.rb
Normal file
83
spec/support/atlassian/jira_connect/schemata.rb
Normal file
|
@ -0,0 +1,83 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Atlassian
|
||||
module Schemata
|
||||
def self.build_info
|
||||
{
|
||||
'type' => 'object',
|
||||
'required' => %w(schemaVersion pipelineId buildNumber updateSequenceNumber displayName url state issueKeys testInfo references),
|
||||
'properties' => {
|
||||
'schemaVersion' => { 'type' => 'string', 'pattern' => '1.0' },
|
||||
'pipelineId' => { 'type' => 'string' },
|
||||
'buildNumber' => { 'type' => 'integer' },
|
||||
'updateSequenceNumber' => { 'type' => 'integer' },
|
||||
'displayName' => { 'type' => 'string' },
|
||||
'url' => { 'type' => 'string' },
|
||||
'state' => {
|
||||
'type' => 'string',
|
||||
'pattern' => '(pending|in_progress|successful|failed|cancelled)'
|
||||
},
|
||||
'issueKeys' => {
|
||||
'type' => 'array',
|
||||
'items' => { 'type' => 'string' },
|
||||
'minItems' => 1
|
||||
},
|
||||
'testInfo' => {
|
||||
'type' => 'object',
|
||||
'required' => %w(totalNumber numberPassed numberFailed numberSkipped),
|
||||
'properties' => {
|
||||
'totalNumber' => { 'type' => 'integer' },
|
||||
'numberFailed' => { 'type' => 'integer' },
|
||||
'numberPassed' => { 'type' => 'integer' },
|
||||
'numberSkipped' => { 'type' => 'integer' }
|
||||
}
|
||||
},
|
||||
'references' => {
|
||||
'type' => 'array',
|
||||
'items' => {
|
||||
'type' => 'object',
|
||||
'required' => %w(commit ref),
|
||||
'properties' => {
|
||||
'commit' => {
|
||||
'type' => 'object',
|
||||
'required' => %w(id repositoryUri),
|
||||
'properties' => {
|
||||
'id' => { 'type' => 'string' },
|
||||
'repositoryUri' => { 'type' => 'string' }
|
||||
}
|
||||
},
|
||||
'ref' => {
|
||||
'type' => 'object',
|
||||
'required' => %w(name uri),
|
||||
'properties' => {
|
||||
'name' => { 'type' => 'string' },
|
||||
'uri' => { 'type' => 'string' }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
def self.build_info_payload
|
||||
{
|
||||
'type' => 'object',
|
||||
'required' => %w(providerMetadata builds),
|
||||
'properties' => {
|
||||
'providerMetadata' => provider_metadata,
|
||||
'builds' => { 'type' => 'array', 'items' => build_info }
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
def self.provider_metadata
|
||||
{
|
||||
'type' => 'object',
|
||||
'required' => %w(product),
|
||||
'properties' => { 'product' => { 'type' => 'string' } }
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
|
@ -30,7 +30,11 @@ module AfterNextHelpers
|
|||
msg = asserted ? :to : :not_to
|
||||
case level
|
||||
when :expect
|
||||
expect_next_instance_of(klass, *args) { |instance| expect(instance).send(msg, condition) }
|
||||
if asserted
|
||||
expect_next_instance_of(klass, *args) { |instance| expect(instance).send(msg, condition) }
|
||||
else
|
||||
allow_next_instance_of(klass, *args) { |instance| expect(instance).send(msg, condition) }
|
||||
end
|
||||
when :allow
|
||||
allow_next_instance_of(klass, *args) { |instance| allow(instance).send(msg, condition) }
|
||||
else
|
||||
|
|
|
@ -2,17 +2,26 @@
|
|||
|
||||
module NextInstanceOf
|
||||
def expect_next_instance_of(klass, *new_args, &blk)
|
||||
stub_new(expect(klass), *new_args, &blk)
|
||||
stub_new(expect(klass), nil, *new_args, &blk)
|
||||
end
|
||||
|
||||
def expect_next_instances_of(klass, number, *new_args, &blk)
|
||||
stub_new(expect(klass), number, *new_args, &blk)
|
||||
end
|
||||
|
||||
def allow_next_instance_of(klass, *new_args, &blk)
|
||||
stub_new(allow(klass), *new_args, &blk)
|
||||
stub_new(allow(klass), nil, *new_args, &blk)
|
||||
end
|
||||
|
||||
def allow_next_instances_of(klass, number, *new_args, &blk)
|
||||
stub_new(allow(klass), number, *new_args, &blk)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def stub_new(target, *new_args, &blk)
|
||||
def stub_new(target, number, *new_args, &blk)
|
||||
receive_new = receive(:new)
|
||||
receive_new.exactly(number).times if number
|
||||
receive_new.with(*new_args) if new_args.any?
|
||||
|
||||
target.to receive_new.and_wrap_original do |method, *original_args|
|
||||
|
|
32
spec/support/matchers/be_valid_json.rb
Normal file
32
spec/support/matchers/be_valid_json.rb
Normal file
|
@ -0,0 +1,32 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
RSpec::Matchers.define :be_valid_json do
|
||||
def according_to_schema(schema)
|
||||
@schema = schema
|
||||
self
|
||||
end
|
||||
|
||||
match do |actual|
|
||||
data = Gitlab::Json.parse(actual)
|
||||
|
||||
if @schema.present?
|
||||
@validation_errors = JSON::Validator.fully_validate(@schema, data)
|
||||
@validation_errors.empty?
|
||||
else
|
||||
data.present?
|
||||
end
|
||||
rescue JSON::ParserError => e
|
||||
@error = e
|
||||
false
|
||||
end
|
||||
|
||||
def failure_message
|
||||
if @error
|
||||
"Parse failed with error: #{@error}"
|
||||
elsif @validation_errors.present?
|
||||
"Validation failed because #{@validation_errors.join(', and ')}"
|
||||
else
|
||||
"Parsing did not return any data"
|
||||
end
|
||||
end
|
||||
end
|
|
@ -85,7 +85,7 @@ RSpec.shared_examples 'multiple issue boards' do
|
|||
|
||||
wait_for_requests
|
||||
|
||||
expect(page).to have_selector('.board', count: 5)
|
||||
expect(page).to have_selector('.board', count: 3)
|
||||
|
||||
in_boards_switcher_dropdown do
|
||||
click_link board.name
|
||||
|
@ -93,7 +93,7 @@ RSpec.shared_examples 'multiple issue boards' do
|
|||
|
||||
wait_for_requests
|
||||
|
||||
expect(page).to have_selector('.board', count: 4)
|
||||
expect(page).to have_selector('.board', count: 2)
|
||||
end
|
||||
|
||||
it 'maintains sidebar state over board switch' do
|
||||
|
|
|
@ -29,6 +29,36 @@ RSpec.describe JsonSchemaValidator do
|
|||
expect(build_report_result.errors.full_messages).to eq(["Data must be a valid json schema"])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when draft is > 4' do
|
||||
let(:validator) { described_class.new(attributes: [:data], filename: "build_report_result_data", draft: 6) }
|
||||
|
||||
it 'uses JSONSchemer to perform validations' do
|
||||
expect(JSONSchemer).to receive(:schema).with(Pathname.new(Rails.root.join('app', 'validators', 'json_schemas', 'build_report_result_data.json').to_s)).and_call_original
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
|
||||
context 'when draft is <= 4' do
|
||||
let(:validator) { described_class.new(attributes: [:data], filename: "build_report_result_data", draft: 4) }
|
||||
|
||||
it 'uses JSON::Validator to perform validations' do
|
||||
expect(JSON::Validator).to receive(:validate).with(Rails.root.join('app', 'validators', 'json_schemas', 'build_report_result_data.json').to_s, build_report_result.data)
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
|
||||
context 'when draft value is not provided' do
|
||||
let(:validator) { described_class.new(attributes: [:data], filename: "build_report_result_data") }
|
||||
|
||||
it 'uses JSON::Validator to perform validations' do
|
||||
expect(JSON::Validator).to receive(:validate).with(Rails.root.join('app', 'validators', 'json_schemas', 'build_report_result_data.json').to_s, build_report_result.data)
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when filename is not set' do
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe JiraConnect::SyncBranchWorker do
|
||||
include AfterNextHelpers
|
||||
|
||||
describe '#perform' do
|
||||
let_it_be(:group) { create(:group) }
|
||||
let_it_be(:project) { create(:project, :repository, group: group) }
|
||||
|
@ -67,7 +69,7 @@ RSpec.describe JiraConnect::SyncBranchWorker do
|
|||
|
||||
context 'with update_sequence_id' do
|
||||
let(:update_sequence_id) { 1 }
|
||||
let(:request_url) { 'https://sample.atlassian.net/rest/devinfo/0.10/bulk' }
|
||||
let(:request_path) { '/rest/devinfo/0.10/bulk' }
|
||||
let(:request_body) do
|
||||
{
|
||||
repositories: [
|
||||
|
@ -78,14 +80,13 @@ RSpec.describe JiraConnect::SyncBranchWorker do
|
|||
update_sequence_id: update_sequence_id
|
||||
)
|
||||
]
|
||||
}.to_json
|
||||
}
|
||||
end
|
||||
|
||||
subject { described_class.new.perform(project_id, branch_name, commit_shas, update_sequence_id) }
|
||||
|
||||
it 'sends the reqeust with custom update_sequence_id' do
|
||||
expect(Atlassian::JiraConnect::Client).to receive(:post)
|
||||
.with(URI(request_url), headers: anything, body: request_body)
|
||||
expect_next(Atlassian::JiraConnect::Client).to receive(:post).with(request_path, request_body)
|
||||
|
||||
subject
|
||||
end
|
||||
|
|
60
spec/workers/jira_connect/sync_builds_worker_spec.rb
Normal file
60
spec/workers/jira_connect/sync_builds_worker_spec.rb
Normal file
|
@ -0,0 +1,60 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe ::JiraConnect::SyncBuildsWorker do
|
||||
include AfterNextHelpers
|
||||
include ServicesHelper
|
||||
|
||||
describe '#perform' do
|
||||
let_it_be(:pipeline) { create(:ci_pipeline) }
|
||||
|
||||
let(:sequence_id) { Random.random_number(1..10_000) }
|
||||
let(:pipeline_id) { pipeline.id }
|
||||
|
||||
subject { described_class.new.perform(pipeline_id, sequence_id) }
|
||||
|
||||
context 'when pipeline exists' do
|
||||
it 'calls the Jira sync service' do
|
||||
expect_next(::JiraConnect::SyncService, pipeline.project)
|
||||
.to receive(:execute).with(pipelines: contain_exactly(pipeline), update_sequence_id: sequence_id)
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
|
||||
context 'when pipeline does not exist' do
|
||||
let(:pipeline_id) { non_existing_record_id }
|
||||
|
||||
it 'does not call the sync service' do
|
||||
expect_next(::JiraConnect::SyncService).not_to receive(:execute)
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the feature flag is disabled' do
|
||||
before do
|
||||
stub_feature_flags(jira_sync_builds: false)
|
||||
end
|
||||
|
||||
it 'does not call the sync service' do
|
||||
expect_next(::JiraConnect::SyncService).not_to receive(:execute)
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the feature flag is enabled for this project' do
|
||||
before do
|
||||
stub_feature_flags(jira_sync_builds: pipeline.project)
|
||||
end
|
||||
|
||||
it 'calls the sync service' do
|
||||
expect_next(::JiraConnect::SyncService).to receive(:execute)
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -3,6 +3,8 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe JiraConnect::SyncMergeRequestWorker do
|
||||
include AfterNextHelpers
|
||||
|
||||
describe '#perform' do
|
||||
let_it_be(:group) { create(:group) }
|
||||
let_it_be(:project) { create(:project, :repository, group: group) }
|
||||
|
@ -33,7 +35,7 @@ RSpec.describe JiraConnect::SyncMergeRequestWorker do
|
|||
|
||||
context 'with update_sequence_id' do
|
||||
let(:update_sequence_id) { 1 }
|
||||
let(:request_url) { 'https://sample.atlassian.net/rest/devinfo/0.10/bulk' }
|
||||
let(:request_path) { '/rest/devinfo/0.10/bulk' }
|
||||
let(:request_body) do
|
||||
{
|
||||
repositories: [
|
||||
|
@ -43,14 +45,13 @@ RSpec.describe JiraConnect::SyncMergeRequestWorker do
|
|||
update_sequence_id: update_sequence_id
|
||||
)
|
||||
]
|
||||
}.to_json
|
||||
}
|
||||
end
|
||||
|
||||
subject { described_class.new.perform(merge_request_id, update_sequence_id) }
|
||||
|
||||
it 'sends the request with custom update_sequence_id' do
|
||||
expect(Atlassian::JiraConnect::Client).to receive(:post)
|
||||
.with(URI(request_url), headers: anything, body: request_body)
|
||||
expect_next(Atlassian::JiraConnect::Client).to receive(:post).with(request_path, request_body)
|
||||
|
||||
subject
|
||||
end
|
||||
|
|
|
@ -36,7 +36,7 @@ RSpec.describe JiraConnect::SyncProjectWorker, factory_default: :keep do
|
|||
end
|
||||
|
||||
it_behaves_like 'an idempotent worker' do
|
||||
let(:request_url) { 'https://sample.atlassian.net/rest/devinfo/0.10/bulk' }
|
||||
let(:request_path) { '/rest/devinfo/0.10/bulk' }
|
||||
let(:request_body) do
|
||||
{
|
||||
repositories: [
|
||||
|
@ -46,13 +46,13 @@ RSpec.describe JiraConnect::SyncProjectWorker, factory_default: :keep do
|
|||
update_sequence_id: update_sequence_id
|
||||
)
|
||||
]
|
||||
}.to_json
|
||||
}
|
||||
end
|
||||
|
||||
it 'sends the request with custom update_sequence_id' do
|
||||
expect(Atlassian::JiraConnect::Client).to receive(:post)
|
||||
.exactly(IdempotentWorkerHelper::WORKER_EXEC_TIMES).times
|
||||
.with(URI(request_url), headers: anything, body: request_body)
|
||||
allow_next_instances_of(Atlassian::JiraConnect::Client, IdempotentWorkerHelper::WORKER_EXEC_TIMES) do |client|
|
||||
expect(client).to receive(:post).with(request_path, request_body)
|
||||
end
|
||||
|
||||
subject
|
||||
end
|
||||
|
|
Loading…
Reference in a new issue