Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
c66aadd2ee
commit
b3ce1ce452
63 changed files with 1370 additions and 539 deletions
|
@ -1 +1 @@
|
|||
8.34.0
|
||||
8.35.0
|
||||
|
|
70
app/assets/javascripts/boards/components/board_content.vue
Normal file
70
app/assets/javascripts/boards/components/board_content.vue
Normal file
|
@ -0,0 +1,70 @@
|
|||
<script>
|
||||
import { mapState } from 'vuex';
|
||||
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
|
||||
import BoardColumn from 'ee_else_ce/boards/components/board_column.vue';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
BoardColumn,
|
||||
},
|
||||
mixins: [glFeatureFlagMixin()],
|
||||
props: {
|
||||
lists: {
|
||||
type: Array,
|
||||
required: true,
|
||||
},
|
||||
canAdminList: {
|
||||
type: Boolean,
|
||||
required: true,
|
||||
},
|
||||
groupId: {
|
||||
type: Number,
|
||||
required: false,
|
||||
default: null,
|
||||
},
|
||||
disabled: {
|
||||
type: Boolean,
|
||||
required: true,
|
||||
},
|
||||
issueLinkBase: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
rootPath: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
boardId: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
...mapState(['isShowingEpicsSwimlanes']),
|
||||
isSwimlanesOn() {
|
||||
return this.glFeatures.boardsWithSwimlanes && this.isShowingEpicsSwimlanes;
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div
|
||||
v-if="!isSwimlanesOn"
|
||||
class="boards-list w-100 py-3 px-2 text-nowrap"
|
||||
data-qa-selector="boards_list"
|
||||
>
|
||||
<board-column
|
||||
v-for="list in lists"
|
||||
:key="list.id"
|
||||
ref="board"
|
||||
:can-admin-list="canAdminList"
|
||||
:group-id="groupId"
|
||||
:list="list"
|
||||
:disabled="disabled"
|
||||
:issue-link-base="issueLinkBase"
|
||||
:root-path="rootPath"
|
||||
:board-id="boardId"
|
||||
/>
|
||||
</div>
|
||||
</template>
|
|
@ -1,8 +1,10 @@
|
|||
import $ from 'jquery';
|
||||
import Vue from 'vue';
|
||||
import { mapActions } from 'vuex';
|
||||
|
||||
import 'ee_else_ce/boards/models/issue';
|
||||
import 'ee_else_ce/boards/models/list';
|
||||
import BoardContent from '~/boards/components/board_content.vue';
|
||||
import BoardSidebar from 'ee_else_ce/boards/components/board_sidebar';
|
||||
import initNewListDropdown from 'ee_else_ce/boards/components/new_list_dropdown';
|
||||
import boardConfigToggle from 'ee_else_ce/boards/config_toggle';
|
||||
|
@ -77,6 +79,7 @@ export default () => {
|
|||
issueBoardsApp = new Vue({
|
||||
el: $boardApp,
|
||||
components: {
|
||||
BoardContent,
|
||||
Board: () =>
|
||||
window?.gon?.features?.sfcIssueBoards
|
||||
? import('ee_else_ce/boards/components/board_column.vue')
|
||||
|
@ -115,14 +118,16 @@ export default () => {
|
|||
},
|
||||
},
|
||||
created() {
|
||||
boardsStore.setEndpoints({
|
||||
const endpoints = {
|
||||
boardsEndpoint: this.boardsEndpoint,
|
||||
recentBoardsEndpoint: this.recentBoardsEndpoint,
|
||||
listsEndpoint: this.listsEndpoint,
|
||||
bulkUpdatePath: this.bulkUpdatePath,
|
||||
boardId: this.boardId,
|
||||
fullPath: $boardApp.dataset.fullPath,
|
||||
});
|
||||
};
|
||||
this.setEndpoints(endpoints);
|
||||
boardsStore.setEndpoints(endpoints);
|
||||
boardsStore.rootPath = this.boardsEndpoint;
|
||||
|
||||
eventHub.$on('updateTokens', this.updateTokens);
|
||||
|
@ -193,6 +198,7 @@ export default () => {
|
|||
}
|
||||
},
|
||||
methods: {
|
||||
...mapActions(['setEndpoints']),
|
||||
updateTokens() {
|
||||
this.filterManager.updateTokens();
|
||||
},
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
import * as types from './mutation_types';
|
||||
|
||||
const notImplemented = () => {
|
||||
/* eslint-disable-next-line @gitlab/require-i18n-strings */
|
||||
throw new Error('Not implemented!');
|
||||
};
|
||||
|
||||
export default {
|
||||
setEndpoints: () => {
|
||||
notImplemented();
|
||||
setEndpoints: ({ commit }, endpoints) => {
|
||||
commit(types.SET_ENDPOINTS, endpoints);
|
||||
},
|
||||
|
||||
fetchLists: () => {
|
||||
|
|
|
@ -6,8 +6,8 @@ const notImplemented = () => {
|
|||
};
|
||||
|
||||
export default {
|
||||
[mutationTypes.SET_ENDPOINTS]: () => {
|
||||
notImplemented();
|
||||
[mutationTypes.SET_ENDPOINTS]: (state, endpoints) => {
|
||||
state.endpoints = endpoints;
|
||||
},
|
||||
|
||||
[mutationTypes.REQUEST_ADD_LIST]: () => {
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import { inactiveListId } from '~/boards/constants';
|
||||
|
||||
export default () => ({
|
||||
endpoints: {},
|
||||
isShowingLabels: true,
|
||||
activeListId: inactiveListId,
|
||||
});
|
||||
|
|
|
@ -233,8 +233,8 @@ export default {
|
|||
Tracking.event(category, action);
|
||||
},
|
||||
trackStatusUpdate(status) {
|
||||
const { category, action, label } = trackErrorStatusUpdateOptions;
|
||||
Tracking.event(category, action, { label, property: status });
|
||||
const { category, action } = trackErrorStatusUpdateOptions(status);
|
||||
Tracking.event(category, action);
|
||||
},
|
||||
},
|
||||
};
|
||||
|
|
|
@ -218,8 +218,8 @@ export default {
|
|||
Tracking.event(category, action);
|
||||
},
|
||||
trackStatusUpdate(status) {
|
||||
const { category, action, label } = trackErrorStatusUpdateOptions;
|
||||
Tracking.event(category, action, { label, property: status });
|
||||
const { category, action } = trackErrorStatusUpdateOptions(status);
|
||||
Tracking.event(category, action);
|
||||
},
|
||||
},
|
||||
};
|
||||
|
|
|
@ -30,8 +30,7 @@ export const trackErrorDetailsViewsOptions = {
|
|||
/**
|
||||
* Tracks snowplow event when error status is updated
|
||||
*/
|
||||
export const trackErrorStatusUpdateOptions = {
|
||||
export const trackErrorStatusUpdateOptions = status => ({
|
||||
category: 'Error Tracking',
|
||||
action: 'update_error_status',
|
||||
label: 'Status',
|
||||
};
|
||||
action: `update_${status}_status`,
|
||||
});
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
/* eslint-disable @gitlab/vue-require-i18n-strings */
|
||||
import Icon from '~/vue_shared/components/icon.vue';
|
||||
import Timeago from '~/vue_shared/components/time_ago_tooltip.vue';
|
||||
import router from '../../ide_router';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
|
@ -26,7 +25,7 @@ export default {
|
|||
},
|
||||
computed: {
|
||||
branchHref() {
|
||||
return router.resolve(`/project/${this.projectId}/edit/${this.item.name}`).href;
|
||||
return this.$router.resolve(`/project/${this.projectId}/edit/${this.item.name}`).href;
|
||||
},
|
||||
},
|
||||
};
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
<script>
|
||||
import Icon from '../../../vue_shared/components/icon.vue';
|
||||
import router from '../../ide_router';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
|
@ -33,7 +32,7 @@ export default {
|
|||
mergeRequestHref() {
|
||||
const path = `/project/${this.item.projectPathWithNamespace}/merge_requests/${this.item.iid}`;
|
||||
|
||||
return router.resolve(path).href;
|
||||
return this.$router.resolve(path).href;
|
||||
},
|
||||
},
|
||||
};
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
<script>
|
||||
import { mapActions } from 'vuex';
|
||||
import RepoTab from './repo_tab.vue';
|
||||
import router from '../ide_router';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
|
@ -28,7 +27,7 @@ export default {
|
|||
|
||||
if (this.activeFile.pending) {
|
||||
return this.removePendingTab(this.activeFile).then(() => {
|
||||
router.push(`/project${this.activeFile.url}`);
|
||||
this.$router.push(`/project${this.activeFile.url}`);
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
import * as types from './mutation_types';
|
||||
|
||||
// eslint-disable-next-line import/prefer-default-export
|
||||
export const push = ({ commit }, fullPath) => {
|
||||
commit(types.PUSH, fullPath);
|
||||
};
|
10
app/assets/javascripts/ide/stores/modules/router/index.js
Normal file
10
app/assets/javascripts/ide/stores/modules/router/index.js
Normal file
|
@ -0,0 +1,10 @@
|
|||
import state from './state';
|
||||
import mutations from './mutations';
|
||||
import * as actions from './actions';
|
||||
|
||||
export default {
|
||||
namespaced: true,
|
||||
state,
|
||||
mutations,
|
||||
actions,
|
||||
};
|
|
@ -0,0 +1,2 @@
|
|||
// eslint-disable-next-line import/prefer-default-export
|
||||
export const PUSH = 'PUSH';
|
|
@ -0,0 +1,7 @@
|
|||
import * as types from './mutation_types';
|
||||
|
||||
export default {
|
||||
[types.PUSH](state, fullPath) {
|
||||
state.fullPath = fullPath;
|
||||
},
|
||||
};
|
|
@ -0,0 +1,3 @@
|
|||
export default () => ({
|
||||
fullPath: '',
|
||||
});
|
55
app/assets/javascripts/ide/sync_router_and_store.js
Normal file
55
app/assets/javascripts/ide/sync_router_and_store.js
Normal file
|
@ -0,0 +1,55 @@
|
|||
/* eslint-disable import/prefer-default-export */
|
||||
/**
|
||||
* This method adds listeners to the given router and store and syncs their state with eachother
|
||||
*
|
||||
* ### Why?
|
||||
*
|
||||
* Previously the IDE had a circular dependency between a singleton router and a singleton store.
|
||||
* This causes some integration testing headaches...
|
||||
*
|
||||
* At the time, the most effecient way to break this ciruclar dependency was to:
|
||||
*
|
||||
* - Replace the router with a factory function that receives a store reference
|
||||
* - Have the store write to a certain state that can be watched by the router
|
||||
*
|
||||
* Hence... This helper function...
|
||||
*/
|
||||
export const syncRouterAndStore = (router, store) => {
|
||||
const disposables = [];
|
||||
|
||||
let currentPath = '';
|
||||
|
||||
// sync store to router
|
||||
disposables.push(
|
||||
store.watch(
|
||||
state => state.router.fullPath,
|
||||
fullPath => {
|
||||
if (currentPath === fullPath) {
|
||||
return;
|
||||
}
|
||||
|
||||
currentPath = fullPath;
|
||||
|
||||
router.push(fullPath);
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
// sync router to store
|
||||
disposables.push(
|
||||
router.afterEach(to => {
|
||||
if (currentPath === to.fullPath) {
|
||||
return;
|
||||
}
|
||||
|
||||
currentPath = to.fullPath;
|
||||
store.dispatch('router/push', currentPath, { root: true });
|
||||
}),
|
||||
);
|
||||
|
||||
const unsync = () => {
|
||||
disposables.forEach(fn => fn());
|
||||
};
|
||||
|
||||
return unsync;
|
||||
};
|
|
@ -5,7 +5,8 @@ import { getSvgIconPathContent } from '~/lib/utils/icon_utils';
|
|||
import { chartHeight } from '../../constants';
|
||||
import { makeDataSeries } from '~/helpers/monitor_helper';
|
||||
import { graphDataValidatorForValues } from '../../utils';
|
||||
import { getYAxisOptions, getChartGrid } from './options';
|
||||
import { getTimeAxisOptions, getYAxisOptions, getChartGrid } from './options';
|
||||
import { timezones } from '../../format_date';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
|
@ -20,6 +21,11 @@ export default {
|
|||
required: true,
|
||||
validator: graphDataValidatorForValues.bind(null, false),
|
||||
},
|
||||
timezone: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: timezones.LOCAL,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
|
@ -43,6 +49,8 @@ export default {
|
|||
};
|
||||
},
|
||||
chartOptions() {
|
||||
const xAxis = getTimeAxisOptions({ timezone: this.timezone });
|
||||
|
||||
const yAxis = {
|
||||
...getYAxisOptions(this.graphData.yAxis),
|
||||
scale: false,
|
||||
|
@ -50,8 +58,9 @@ export default {
|
|||
|
||||
return {
|
||||
grid: getChartGrid(),
|
||||
xAxis,
|
||||
yAxis,
|
||||
dataZoom: this.dataZoomConfig,
|
||||
dataZoom: [this.dataZoomConfig],
|
||||
};
|
||||
},
|
||||
xAxisTitle() {
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { SUPPORTED_FORMATS, getFormatter } from '~/lib/utils/unit_format';
|
||||
import { s__ } from '~/locale';
|
||||
import { __, s__ } from '~/locale';
|
||||
import { formatDate, timezones, formats } from '../../format_date';
|
||||
|
||||
const yAxisBoundaryGap = [0.1, 0.1];
|
||||
/**
|
||||
|
@ -58,6 +59,17 @@ export const getYAxisOptions = ({
|
|||
};
|
||||
};
|
||||
|
||||
export const getTimeAxisOptions = ({ timezone = timezones.LOCAL } = {}) => ({
|
||||
name: __('Time'),
|
||||
type: 'time',
|
||||
axisLabel: {
|
||||
formatter: date => formatDate(date, { format: formats.shortTime, timezone }),
|
||||
},
|
||||
axisPointer: {
|
||||
snap: true,
|
||||
},
|
||||
});
|
||||
|
||||
// Chart grid
|
||||
|
||||
/**
|
||||
|
|
|
@ -2,15 +2,15 @@
|
|||
import { omit, throttle } from 'lodash';
|
||||
import { GlLink, GlDeprecatedButton, GlTooltip, GlResizeObserverDirective } from '@gitlab/ui';
|
||||
import { GlAreaChart, GlLineChart, GlChartSeriesLabel } from '@gitlab/ui/dist/charts';
|
||||
import { s__, __ } from '~/locale';
|
||||
import { s__ } from '~/locale';
|
||||
import { getSvgIconPathContent } from '~/lib/utils/icon_utils';
|
||||
import Icon from '~/vue_shared/components/icon.vue';
|
||||
import { panelTypes, chartHeight, lineTypes, lineWidths } from '../../constants';
|
||||
import { getYAxisOptions, getChartGrid, getTooltipFormatter } from './options';
|
||||
import { getYAxisOptions, getTimeAxisOptions, getChartGrid, getTooltipFormatter } from './options';
|
||||
import { annotationsYAxis, generateAnnotationsSeries } from './annotations';
|
||||
import { makeDataSeries } from '~/helpers/monitor_helper';
|
||||
import { graphDataValidatorForValues } from '../../utils';
|
||||
import { formatDate, timezones, formats } from '../../format_date';
|
||||
import { formatDate, timezones } from '../../format_date';
|
||||
|
||||
export const timestampToISODate = timestamp => new Date(timestamp).toISOString();
|
||||
|
||||
|
@ -160,24 +160,16 @@ export default {
|
|||
const { yAxis, xAxis } = this.option;
|
||||
const option = omit(this.option, ['series', 'yAxis', 'xAxis']);
|
||||
|
||||
const timeXAxis = {
|
||||
...getTimeAxisOptions({ timezone: this.timezone }),
|
||||
...xAxis,
|
||||
};
|
||||
|
||||
const dataYAxis = {
|
||||
...getYAxisOptions(this.graphData.yAxis),
|
||||
...yAxis,
|
||||
};
|
||||
|
||||
const timeXAxis = {
|
||||
name: __('Time'),
|
||||
type: 'time',
|
||||
axisLabel: {
|
||||
formatter: date =>
|
||||
formatDate(date, { format: formats.shortTime, timezone: this.timezone }),
|
||||
},
|
||||
axisPointer: {
|
||||
snap: true,
|
||||
},
|
||||
...xAxis,
|
||||
};
|
||||
|
||||
return {
|
||||
series: this.chartOptionSeries,
|
||||
xAxis: timeXAxis,
|
||||
|
|
|
@ -26,6 +26,7 @@ import DashboardsDropdown from './dashboards_dropdown.vue';
|
|||
import TrackEventDirective from '~/vue_shared/directives/track_event';
|
||||
import { getAddMetricTrackingOptions, timeRangeToUrl } from '../utils';
|
||||
import { timeRanges } from '~/vue_shared/constants';
|
||||
import { timezones } from '../format_date';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
|
@ -104,6 +105,7 @@ export default {
|
|||
'currentEnvironmentName',
|
||||
'isUpdatingStarredValue',
|
||||
'showEmptyState',
|
||||
'dashboardTimezone',
|
||||
]),
|
||||
...mapGetters('monitoringDashboard', ['selectedDashboard', 'filteredEnvironments']),
|
||||
shouldShowEnvironmentsDropdownNoMatchedMsg() {
|
||||
|
@ -122,6 +124,9 @@ export default {
|
|||
showRearrangePanelsBtn() {
|
||||
return !this.showEmptyState && this.rearrangePanelsAvailable;
|
||||
},
|
||||
displayUtc() {
|
||||
return this.dashboardTimezone === timezones.UTC;
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
...mapActions('monitoringDashboard', [
|
||||
|
@ -240,6 +245,7 @@ export default {
|
|||
data-qa-selector="range_picker_dropdown"
|
||||
:value="selectedTimeRange"
|
||||
:options="$options.timeRanges"
|
||||
:utc="displayUtc"
|
||||
@input="onDateTimePickerInput"
|
||||
@invalid="onDateTimePickerInvalid"
|
||||
/>
|
||||
|
|
|
@ -118,6 +118,9 @@ export default {
|
|||
timeRange(state) {
|
||||
return state[this.namespace].timeRange;
|
||||
},
|
||||
dashboardTimezone(state) {
|
||||
return state[this.namespace].dashboardTimezone;
|
||||
},
|
||||
metricsSavedToDb(state, getters) {
|
||||
return getters[`${this.namespace}/metricsSavedToDb`];
|
||||
},
|
||||
|
@ -398,6 +401,7 @@ export default {
|
|||
:is="basicChartComponent"
|
||||
v-else-if="basicChartComponent"
|
||||
:graph-data="graphData"
|
||||
:timezone="dashboardTimezone"
|
||||
v-bind="$attrs"
|
||||
v-on="$listeners"
|
||||
/>
|
||||
|
@ -411,6 +415,7 @@ export default {
|
|||
:project-path="projectPath"
|
||||
:thresholds="getGraphAlertValues(graphData.metrics)"
|
||||
:group-id="groupId"
|
||||
:timezone="dashboardTimezone"
|
||||
v-bind="$attrs"
|
||||
v-on="$listeners"
|
||||
@datazoom="onDatazoom"
|
||||
|
|
|
@ -20,6 +20,7 @@ export default (props = {}) => {
|
|||
projectPath,
|
||||
logsPath,
|
||||
currentEnvironmentName,
|
||||
dashboardTimezone,
|
||||
...dataProps
|
||||
} = el.dataset;
|
||||
|
||||
|
@ -28,6 +29,7 @@ export default (props = {}) => {
|
|||
deploymentsEndpoint,
|
||||
dashboardEndpoint,
|
||||
dashboardsEndpoint,
|
||||
dashboardTimezone,
|
||||
projectPath,
|
||||
logsPath,
|
||||
currentEnvironmentName,
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import invalidUrl from '~/lib/utils/invalid_url';
|
||||
import { timezones } from '../format_date';
|
||||
|
||||
export default () => ({
|
||||
// API endpoints
|
||||
|
@ -45,6 +46,7 @@ export default () => ({
|
|||
*/
|
||||
links: {},
|
||||
// Other project data
|
||||
dashboardTimezone: timezones.LOCAL,
|
||||
annotations: [],
|
||||
deploymentData: [],
|
||||
environments: [],
|
||||
|
|
|
@ -155,8 +155,9 @@ export default {
|
|||
const errors = baseObj?.errors;
|
||||
if (errors.length) {
|
||||
this.flashAPIFailure(errors[0]);
|
||||
} else {
|
||||
redirectTo(baseObj.snippet.webUrl);
|
||||
}
|
||||
redirectTo(baseObj.snippet.webUrl);
|
||||
})
|
||||
.catch(e => {
|
||||
this.flashAPIFailure(e);
|
||||
|
@ -215,7 +216,7 @@ export default {
|
|||
variant="success"
|
||||
:disabled="updatePrevented"
|
||||
data-qa-selector="submit_button"
|
||||
@click="handleFormSubmit"
|
||||
@click.prevent="handleFormSubmit"
|
||||
>{{ saveButtonLabel }}</gl-button
|
||||
>
|
||||
</template>
|
||||
|
|
|
@ -1,16 +1,15 @@
|
|||
<script>
|
||||
import { __ } from '~/locale';
|
||||
import { GlIcon, GlLoadingIcon, GlSprintf } from '@gitlab/ui';
|
||||
import { GlIcon, GlLink, GlLoadingIcon, GlSprintf } from '@gitlab/ui';
|
||||
import axios from '~/lib/utils/axios_utils';
|
||||
import CiIcon from '../../vue_shared/components/ci_icon.vue';
|
||||
import flash from '~/flash';
|
||||
import Poll from '~/lib/utils/poll';
|
||||
|
||||
export default {
|
||||
name: 'MRWidgetTerraformPlan',
|
||||
components: {
|
||||
CiIcon,
|
||||
GlIcon,
|
||||
GlLink,
|
||||
GlLoadingIcon,
|
||||
GlSprintf,
|
||||
},
|
||||
|
@ -36,12 +35,6 @@ export default {
|
|||
deleteNum() {
|
||||
return Number(this.plan.delete);
|
||||
},
|
||||
iconStatusObj() {
|
||||
return {
|
||||
group: 'warning',
|
||||
icon: 'status_warning',
|
||||
};
|
||||
},
|
||||
logUrl() {
|
||||
return this.plan.job_path;
|
||||
},
|
||||
|
@ -90,7 +83,7 @@ export default {
|
|||
<section class="mr-widget-section">
|
||||
<div class="mr-widget-body media d-flex flex-row">
|
||||
<span class="append-right-default align-self-start align-self-lg-center">
|
||||
<ci-icon :status="iconStatusObj" :size="24" />
|
||||
<gl-icon name="status_warning" :size="24" />
|
||||
</span>
|
||||
|
||||
<div class="d-flex flex-fill flex-column flex-md-row">
|
||||
|
@ -125,7 +118,7 @@ export default {
|
|||
</div>
|
||||
|
||||
<div class="terraform-mr-plan-actions">
|
||||
<a
|
||||
<gl-link
|
||||
v-if="logUrl"
|
||||
:href="logUrl"
|
||||
target="_blank"
|
||||
|
@ -137,7 +130,7 @@ export default {
|
|||
>
|
||||
{{ __('View full log') }}
|
||||
<gl-icon name="external-link" />
|
||||
</a>
|
||||
</gl-link>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -9,6 +9,7 @@ class Groups::BoardsController < Groups::ApplicationController
|
|||
before_action do
|
||||
push_frontend_feature_flag(:multi_select_board, default_enabled: true)
|
||||
push_frontend_feature_flag(:sfc_issue_boards, default_enabled: true)
|
||||
push_frontend_feature_flag(:boards_with_swimlanes, group, default_enabled: false)
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
@ -284,8 +284,8 @@ module ProjectsHelper
|
|||
"xcode://clone?repo=#{CGI.escape(default_url_to_repo(project))}"
|
||||
end
|
||||
|
||||
def link_to_bfg
|
||||
link_to 'BFG', 'https://rtyley.github.io/bfg-repo-cleaner/', target: '_blank', rel: 'noopener noreferrer'
|
||||
def link_to_filter_repo
|
||||
link_to 'git filter-repo', 'https://github.com/newren/git-filter-repo', target: '_blank', rel: 'noopener noreferrer'
|
||||
end
|
||||
|
||||
def explore_projects_tab?
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
%button.btn.js-settings-toggle
|
||||
= expanded ? _('Collapse') : _('Expand')
|
||||
%p
|
||||
= _("Clean up after running %{bfg} on the repository" % { bfg: link_to_bfg }).html_safe
|
||||
= _("Clean up after running %{filter_repo} on the repository" % { filter_repo: link_to_filter_repo }).html_safe
|
||||
= link_to icon('question-circle'),
|
||||
help_page_path('user/project/repository/reducing_the_repo_size_using_git.md'),
|
||||
target: '_blank', rel: 'noopener noreferrer'
|
||||
|
@ -22,7 +22,7 @@
|
|||
= _("Choose a file")
|
||||
%span.prepend-left-default.js-filename
|
||||
= _("No file selected")
|
||||
= f.file_field :bfg_object_map, accept: 'text/plain', class: "hidden js-object-map-input", required: true
|
||||
= f.file_field :bfg_object_map, class: "hidden js-object-map-input", required: true
|
||||
.form-text.text-muted
|
||||
= _("The maximum file size allowed is %{size}.") % { size: number_to_human_size(Gitlab::CurrentSettings.max_attachment_size.megabytes) }
|
||||
= f.submit _('Start cleanup'), class: 'btn btn-success'
|
||||
|
|
|
@ -20,20 +20,31 @@
|
|||
#board-app.boards-app.position-relative{ "v-cloak" => "true", data: board_data, ":class" => "{ 'is-compact': detailIssueVisible }" }
|
||||
= render 'shared/issuable/search_bar', type: :boards, board: board
|
||||
|
||||
.boards-list.w-100.py-3.px-2.text-nowrap{ data: { qa_selector: "boards_list" } }
|
||||
.boards-app-loading.w-100.text-center{ "v-if" => "loading" }
|
||||
= icon("spinner spin 2x")
|
||||
%board{ "v-cloak" => "true",
|
||||
"v-for" => "list in state.lists",
|
||||
"ref" => "board",
|
||||
- if Feature.enabled?(:boards_with_swimlanes, current_board_parent)
|
||||
%board-content{ "v-cloak" => "true",
|
||||
"ref" => "board_content",
|
||||
":lists" => "state.lists",
|
||||
":can-admin-list" => can_admin_list,
|
||||
":group-id" => group_id,
|
||||
":list" => "list",
|
||||
":disabled" => "disabled",
|
||||
":issue-link-base" => "issueLinkBase",
|
||||
":root-path" => "rootPath",
|
||||
":board-id" => "boardId",
|
||||
":key" => "list.id" }
|
||||
":board-id" => "boardId" }
|
||||
- else
|
||||
.boards-list.w-100.py-3.px-2.text-nowrap{ data: { qa_selector: "boards_list" } }
|
||||
.boards-app-loading.w-100.text-center{ "v-if" => "loading" }
|
||||
= icon("spinner spin 2x")
|
||||
%board{ "v-cloak" => "true",
|
||||
"v-for" => "list in state.lists",
|
||||
"ref" => "board",
|
||||
":can-admin-list" => can_admin_list,
|
||||
":group-id" => group_id,
|
||||
":list" => "list",
|
||||
":disabled" => "disabled",
|
||||
":issue-link-base" => "issueLinkBase",
|
||||
":root-path" => "rootPath",
|
||||
":board-id" => "boardId",
|
||||
":key" => "list.id" }
|
||||
= render "shared/boards/components/sidebar", group: group
|
||||
= render_if_exists 'shared/boards/components/board_settings_sidebar'
|
||||
- if @project
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Track Sentry error status updates with dedicated actions
|
||||
merge_request: 33623
|
||||
author:
|
||||
type: changed
|
5
changelogs/unreleased/219210-column-date-format.yml
Normal file
5
changelogs/unreleased/219210-column-date-format.yml
Normal file
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Format metrics column chart x axis dates
|
||||
merge_request: 33681
|
||||
author:
|
||||
type: changed
|
5
changelogs/unreleased/reduce-repo-size.yml
Normal file
5
changelogs/unreleased/reduce-repo-size.yml
Normal file
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add support for `git filter-repo` to repository cleanup
|
||||
merge_request: 33576
|
||||
author:
|
||||
type: added
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Update Workhorse to v8.35.0
|
||||
merge_request: 33817
|
||||
author:
|
||||
type: other
|
12
db/migrate/20200429023324_add_composer_metadata.rb
Normal file
12
db/migrate/20200429023324_add_composer_metadata.rb
Normal file
|
@ -0,0 +1,12 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddComposerMetadata < ActiveRecord::Migration[6.0]
|
||||
DOWNTIME = false
|
||||
|
||||
def change
|
||||
create_table :packages_composer_metadata, id: false do |t|
|
||||
t.references :package, primary_key: true, index: false, default: nil, foreign_key: { to_table: :packages_packages, on_delete: :cascade }, type: :bigint
|
||||
t.binary :target_sha, null: false
|
||||
end
|
||||
end
|
||||
end
|
19
db/migrate/20200528054112_add_index_to_package_name.rb
Normal file
19
db/migrate/20200528054112_add_index_to_package_name.rb
Normal file
|
@ -0,0 +1,19 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddIndexToPackageName < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
INDEX_NAME = 'package_name_index'.freeze
|
||||
|
||||
def up
|
||||
add_concurrent_index(:packages_packages, :name, name: INDEX_NAME)
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index(:packages_packages, :name, name: INDEX_NAME)
|
||||
end
|
||||
end
|
|
@ -4611,6 +4611,11 @@ CREATE SEQUENCE public.packages_build_infos_id_seq
|
|||
|
||||
ALTER SEQUENCE public.packages_build_infos_id_seq OWNED BY public.packages_build_infos.id;
|
||||
|
||||
CREATE TABLE public.packages_composer_metadata (
|
||||
package_id bigint NOT NULL,
|
||||
target_sha bytea NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE public.packages_conan_file_metadata (
|
||||
id bigint NOT NULL,
|
||||
package_file_id bigint NOT NULL,
|
||||
|
@ -8618,6 +8623,9 @@ ALTER TABLE ONLY public.operations_user_lists
|
|||
ALTER TABLE ONLY public.packages_build_infos
|
||||
ADD CONSTRAINT packages_build_infos_pkey PRIMARY KEY (id);
|
||||
|
||||
ALTER TABLE ONLY public.packages_composer_metadata
|
||||
ADD CONSTRAINT packages_composer_metadata_pkey PRIMARY KEY (package_id);
|
||||
|
||||
ALTER TABLE ONLY public.packages_conan_file_metadata
|
||||
ADD CONSTRAINT packages_conan_file_metadata_pkey PRIMARY KEY (id);
|
||||
|
||||
|
@ -11074,6 +11082,8 @@ CREATE INDEX note_mentions_temp_index ON public.notes USING btree (id, noteable_
|
|||
|
||||
CREATE UNIQUE INDEX one_canonical_wiki_page_slug_per_metadata ON public.wiki_page_slugs USING btree (wiki_page_meta_id) WHERE (canonical = true);
|
||||
|
||||
CREATE INDEX package_name_index ON public.packages_packages USING btree (name);
|
||||
|
||||
CREATE INDEX packages_packages_verification_checksum_partial ON public.packages_package_files USING btree (verification_checksum) WHERE (verification_checksum IS NOT NULL);
|
||||
|
||||
CREATE INDEX packages_packages_verification_failure_partial ON public.packages_package_files USING btree (verification_failure) WHERE (verification_failure IS NOT NULL);
|
||||
|
@ -12449,6 +12459,9 @@ ALTER TABLE ONLY public.ci_build_trace_sections
|
|||
ALTER TABLE ONLY public.clusters
|
||||
ADD CONSTRAINT fk_rails_ac3a663d79 FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE SET NULL;
|
||||
|
||||
ALTER TABLE ONLY public.packages_composer_metadata
|
||||
ADD CONSTRAINT fk_rails_ad48c2e5bb FOREIGN KEY (package_id) REFERENCES public.packages_packages(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY public.analytics_cycle_analytics_group_stages
|
||||
ADD CONSTRAINT fk_rails_ae5da3409b FOREIGN KEY (group_id) REFERENCES public.namespaces(id) ON DELETE CASCADE;
|
||||
|
||||
|
@ -13698,6 +13711,7 @@ COPY "schema_migrations" (version) FROM STDIN;
|
|||
20200429001827
|
||||
20200429002150
|
||||
20200429015603
|
||||
20200429023324
|
||||
20200429181335
|
||||
20200429181955
|
||||
20200429182245
|
||||
|
@ -13775,6 +13789,7 @@ COPY "schema_migrations" (version) FROM STDIN;
|
|||
20200527151413
|
||||
20200527152116
|
||||
20200527152657
|
||||
20200528054112
|
||||
20200528123703
|
||||
20200603073101
|
||||
\.
|
||||
|
|
|
@ -699,6 +699,12 @@ stop action when the associated branch is deleted. The `stop_review` job must
|
|||
be in the same `stage` as the `deploy_review` job in order for the environment
|
||||
to automatically stop.
|
||||
|
||||
Additionally, both jobs should have matching [`rules`](../yaml/README.md#onlyexcept-basic)
|
||||
or [`only/except`](../yaml/README.md#onlyexcept-basic) configuration. In the example
|
||||
above, if the configuration is not identical, the `stop_review` job might not be
|
||||
included in all pipelines that include the `deploy_review` job, and it will not be
|
||||
possible to trigger the `action: stop` to stop the environment automatically.
|
||||
|
||||
You can read more in the [`.gitlab-ci.yml` reference](../yaml/README.md#environmenton_stop).
|
||||
|
||||
#### Environments auto-stop
|
||||
|
|
|
@ -98,12 +98,12 @@ GitLab uses [RE2 syntax](https://github.com/google/re2/wiki/Syntax) for regular
|
|||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/385) in [GitLab Starter](https://about.gitlab.com/pricing/) 8.12.
|
||||
|
||||
Secrets such as credential files, SSH private keys, and other files containing secrets should never be committed to source control.
|
||||
GitLab allows you to turn on a predefined blacklist of files which won't be allowed to be
|
||||
GitLab allows you to turn on a predefined denylist of files which won't be allowed to be
|
||||
pushed to a repository, stopping those commits from reaching the remote repository.
|
||||
|
||||
By selecting the checkbox *Prevent committing secrets to Git*, GitLab prevents
|
||||
pushes to the repository when a file matches a regular expression as read from
|
||||
[`files_blacklist.yml`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/gitlab/checks/files_blacklist.yml) (make sure you are at the right branch
|
||||
[`files_denylist.yml`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/gitlab/checks/files_denylist.yml) (make sure you are at the right branch
|
||||
as your GitLab version when viewing this file).
|
||||
|
||||
NOTE: **Note:**
|
||||
|
|
|
@ -97,7 +97,7 @@ Read the documentation on how to [migrate an existing Git repo with Git LFS](mig
|
|||
|
||||
To remove objects from LFS:
|
||||
|
||||
1. Use [BFG-Cleaner](../../../user/project/repository/reducing_the_repo_size_using_git.md#using-the-bfg-repo-cleaner) or [filter-branch](../../../user/project/repository/reducing_the_repo_size_using_git.md#using-git-filter-branch) to remove the objects from the repository.
|
||||
1. Use [`git filter-repo`](../../../user/project/repository/reducing_the_repo_size_using_git.md) to remove the objects from the repository.
|
||||
1. Delete the relevant LFS lines for the objects you have removed from your `.gitattributes` file and commit those changes.
|
||||
|
||||
## File Locking
|
||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 7.9 KiB |
|
@ -7,8 +7,205 @@ type: howto
|
|||
|
||||
# Reducing the repository size using Git
|
||||
|
||||
A GitLab Enterprise Edition administrator can set a [repository size limit](../../admin_area/settings/account_and_limit_settings.md)
|
||||
which will prevent you from exceeding it.
|
||||
When large files are added to a Git repository this makes fetching the
|
||||
repository slower, because everyone will need to download the file. These files
|
||||
can also take up a large amount of storage space on the server over time.
|
||||
|
||||
Rewriting a repository can remove unwanted history to make the repository
|
||||
smaller. [`git filter-repo`](https://github.com/newren/git-filter-repo) is a
|
||||
tool for quickly rewriting Git repository history, and is recommended over [`git
|
||||
filter-branch`](https://git-scm.com/docs/git-filter-branch) and
|
||||
[BFG](https://rtyley.github.io/bfg-repo-cleaner/).
|
||||
|
||||
DANGER: **Danger:**
|
||||
Rewriting repository history is a destructive operation. Make sure to backup
|
||||
your repository before you begin. The best way is to [export the
|
||||
project](../settings/import_export.html#exporting-a-project-and-its-data).
|
||||
|
||||
## Purging files from your repository history
|
||||
|
||||
To make cloning your project faster, rewrite branches and tags to remove
|
||||
unwanted files.
|
||||
|
||||
1. [Install `git
|
||||
filter-repo`](https://github.com/newren/git-filter-repo/blob/master/INSTALL.md)
|
||||
using a supported package manager, or from source.
|
||||
|
||||
1. Clone a fresh copy of the repository using `--bare`.
|
||||
|
||||
```shell
|
||||
git clone --bare https://example.gitlab.com/my/project.git
|
||||
```
|
||||
|
||||
1. Using `git filter-repo`, purge any files from the history of your repository.
|
||||
|
||||
To purge all large files, the `--strip-blobs-bigger-than` option can be used:
|
||||
|
||||
```shell
|
||||
git filter-repo --strip-blobs-bigger-than 10M
|
||||
```
|
||||
|
||||
To purge specific large files by path, the `--path` and `--invert-paths`
|
||||
options can be combined.
|
||||
|
||||
```shell
|
||||
git filter-repo --path path/to/big/file.m4v --invert-paths
|
||||
```
|
||||
|
||||
See the [`git filter-repo`
|
||||
documentation](https://htmlpreview.github.io/?https://github.com/newren/git-filter-repo/blob/docs/html/git-filter-repo.html#EXAMPLES)
|
||||
for more examples, and the complete documentation.
|
||||
|
||||
1. Force push your changes to overwrite all branches on GitLab.
|
||||
|
||||
```shell
|
||||
git push origin --force --all
|
||||
```
|
||||
|
||||
[Protected Branches](../protected_branches.md) will cause this to fail. To
|
||||
proceed you will need to remove branch protection, push, and then
|
||||
reconfigure protected branches.
|
||||
|
||||
1. To remove large files from tagged releases, force push your changes to all
|
||||
tags on GitLab.
|
||||
|
||||
```shell
|
||||
git push origin --force --tags
|
||||
```
|
||||
|
||||
[Protected Tags](../protected_tags.md) will cause this to
|
||||
fail. To proceed you will need to remove tag protection, push, and then
|
||||
reconfigure protected tags.
|
||||
|
||||
## Purging files from GitLab storage
|
||||
|
||||
To reduce the size of your repository in GitLab you will need to remove GitLab
|
||||
internal refs that reference commits contain large files. Before completing
|
||||
these steps, first [purged files from your repository
|
||||
history](#purging-files-from-your-repository-history).
|
||||
|
||||
As well as branches and tags, which are a type of Git ref, GitLab automatically
|
||||
creates other refs. These refs prevent dead links to commits, or missing diffs
|
||||
when viewing merge requests. [Repository cleanup](#repository-cleanup) can be
|
||||
used to remove these from GitLab.
|
||||
|
||||
The internal refs for merge requests (`refs/merge-requests/*`),
|
||||
[pipelines](../../../ci/pipelines/index.md#troubleshooting-fatal-reference-is-not-a-tree)
|
||||
(`refs/pipelines/*`), and environments (`refs/environments/*`) are not
|
||||
advertised, which means they are not included when fetching, which makes
|
||||
fetching faster. The hidden refs to prevent commits with discussion from being
|
||||
deleted (`refs/keep-around/*`) cannot be fetched at all. These refs can,
|
||||
however, be accessed from the Git bundle inside the project export.
|
||||
|
||||
1. [Install `git
|
||||
filter-repo`](https://github.com/newren/git-filter-repo/blob/master/INSTALL.md)
|
||||
using a supported package manager, or from source.
|
||||
|
||||
1. Generate a fresh [export the
|
||||
project](../settings/import_export.html#exporting-a-project-and-its-data) and
|
||||
download to your computer.
|
||||
|
||||
1. Decompress the backup using `tar`
|
||||
|
||||
```shell
|
||||
tar xzf project-backup.tar.gz
|
||||
```
|
||||
|
||||
This will contain a `project.bundle` file, which was created by [`git
|
||||
bundle`](https://git-scm.com/docs/git-bundle)
|
||||
|
||||
1. Clone a fresh copy of the repository from the bundle.
|
||||
|
||||
```shell
|
||||
git clone --bare --mirror /path/to/project.bundle
|
||||
```
|
||||
|
||||
1. Using `git filter-repo`, purge any files from the history of your repository.
|
||||
Because we are trying to remove internal refs, we will rely on the
|
||||
`commit-map` produced by each run to tell us which internal refs to remove.
|
||||
|
||||
NOTE:**Note:**
|
||||
`git filter-repo` creates a new `commit-map` file every run, and overwrite the
|
||||
`commit-map` from the previous run. You will need this file from **every**
|
||||
run. Do the next step every time you run `git filter-repo`.
|
||||
|
||||
To purge all large files, the `--strip-blobs-bigger-than` option can be used:
|
||||
|
||||
```shell
|
||||
git filter-repo --strip-blobs-bigger-than 10M
|
||||
```
|
||||
|
||||
To purge specific large files by path, the `--path` and `--invert-paths`
|
||||
options can be combined.
|
||||
|
||||
```shell
|
||||
git filter-repo --path path/to/big/file.m4v --invert-paths
|
||||
```
|
||||
|
||||
See the [`git filter-repo`
|
||||
documentation](https://htmlpreview.github.io/?https://github.com/newren/git-filter-repo/blob/docs/html/git-filter-repo.html#EXAMPLES)
|
||||
for more examples, and the complete documentation.
|
||||
|
||||
1. After running `git filter-repo`, the header and unchanged commits need to be
|
||||
removed from the `commit-map` before uploading to GitLab.
|
||||
|
||||
```shell
|
||||
tail -n +2 filter-repo/commit-map | grep -E -v '^(\w+) \1$' >> commit-map.txt
|
||||
```
|
||||
|
||||
This command can be run after each run of `git filter-repo` to append the
|
||||
output of the run to `commit-map.txt`
|
||||
|
||||
1. Navigate to **Project > Settings > Repository > Repository Cleanup**.
|
||||
|
||||
Upload the `commit-map.txt` file and press **Start cleanup**. This will
|
||||
remove any internal Git references to the old commits, and run `git gc`
|
||||
against the repository. You will receive an email once it has completed.
|
||||
|
||||
## Repository cleanup
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/19376) in GitLab 11.6.
|
||||
|
||||
Repository cleanup allows you to upload a text file of objects and GitLab will remove
|
||||
internal Git references to these objects.
|
||||
|
||||
To clean up a repository:
|
||||
|
||||
1. Go to the project for the repository.
|
||||
1. Navigate to **{settings}** **Settings > Repository**.
|
||||
1. Upload a list of objects.
|
||||
1. Click **Start cleanup**.
|
||||
|
||||
This will remove any internal Git references to old commits, and run `git gc`
|
||||
against the repository. You will receive an email once it has completed.
|
||||
|
||||
These tools produce suitable output for purging history on the server:
|
||||
|
||||
- [`git filter-repo`](https://github.com/newren/git-filter-repo): use the
|
||||
`commit-map` file.
|
||||
|
||||
- [BFG](https://rtyley.github.io/bfg-repo-cleaner/): use the
|
||||
`object-id-map.old-new.txt` file.
|
||||
|
||||
NOTE: **Note:**
|
||||
Housekeeping prunes loose objects older than 2 weeks. This means objects added
|
||||
in the last 2 weeks will not be removed immediately. If you have access to the
|
||||
Gitaly server, you may run `git gc --prune=now` to prune all loose object
|
||||
immediately.
|
||||
|
||||
NOTE: **Note:**
|
||||
This process will remove some copies of the rewritten commits from GitLab's
|
||||
cache and database, but there are still numerous gaps in coverage - at present,
|
||||
some of the copies may persist indefinitely. [Clearing the instance
|
||||
cache](../../../administration/raketasks/maintenance.md#clear-redis-cache) may
|
||||
help to remove some of them, but it should not be depended on for security
|
||||
purposes!
|
||||
|
||||
## Exceeding storage limit
|
||||
|
||||
A GitLab Enterprise Edition administrator can set a [repository size
|
||||
limit](../../admin_area/settings/account_and_limit_settings.md) which will
|
||||
prevent you from exceeding it.
|
||||
|
||||
When a project has reached its size limit, you will not be able to push to it,
|
||||
create a new merge request, or merge existing ones. You will still be able to
|
||||
|
@ -23,7 +220,7 @@ a commit doesn't actually reduce the size of the repo since the earlier commits
|
|||
and blobs are still around. What you need to do is rewrite history with Git's
|
||||
[`filter-branch` option](https://git-scm.com/book/en/v2/Git-Tools-Rewriting-History#The-Nuclear-Option:-filter-branch),
|
||||
or an open source community-maintained tool like the
|
||||
[BFG](https://rtyley.github.io/bfg-repo-cleaner/).
|
||||
[`git filter-repo`](https://github.com/newren/git-filter-repo).
|
||||
|
||||
Note that even with that method, until `git gc` runs on the GitLab side, the
|
||||
"removed" commits and blobs will still be around. You also need to be able to
|
||||
|
@ -38,117 +235,12 @@ temporarily increase it for you, your only option is to prune all the unneeded
|
|||
stuff locally, and then create a new project on GitLab and start using that
|
||||
instead.
|
||||
|
||||
If you can continue to use the original project, we recommend [using
|
||||
BFG](#using-the-bfg-repo-cleaner), a tool that's built and
|
||||
maintained by the open source community. It's faster and simpler than
|
||||
`git filter-branch`, and GitLab can use its account of what has changed to clean
|
||||
up its own internal state, maximizing the space saved.
|
||||
|
||||
CAUTION: **Caution:**
|
||||
Make sure to first make a copy of your repository since rewriting history will
|
||||
purge the files and information you are about to delete. Also make sure to
|
||||
inform any collaborators to not use `pull` after your changes, but use `rebase`.
|
||||
|
||||
CAUTION: **Caution:**
|
||||
This process is not suitable for removing sensitive data like password or keys
|
||||
from your repository. Information about commits, including file content, is
|
||||
cached in the database, and will remain visible even after they have been
|
||||
removed from the repository.
|
||||
|
||||
## Using the BFG Repo-Cleaner
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/19376) in GitLab 11.6.
|
||||
|
||||
1. [Install BFG](https://rtyley.github.io/bfg-repo-cleaner/) from its open source community repository.
|
||||
|
||||
1. Navigate to your repository:
|
||||
|
||||
```shell
|
||||
cd my_repository/
|
||||
```
|
||||
|
||||
1. Change to the branch you want to remove the big file from:
|
||||
|
||||
```shell
|
||||
git checkout master
|
||||
```
|
||||
|
||||
1. Create a commit removing the large file from the branch, if it still exists:
|
||||
|
||||
```shell
|
||||
git rm path/to/big_file.mpg
|
||||
git commit -m 'Remove unneeded large file'
|
||||
```
|
||||
|
||||
1. Rewrite history:
|
||||
|
||||
```shell
|
||||
bfg --delete-files path/to/big_file.mpg
|
||||
```
|
||||
|
||||
An object map file will be written to `object-id-map.old-new.txt`. Keep it
|
||||
around - you'll need it for the final step!
|
||||
|
||||
1. Force-push the changes to GitLab:
|
||||
|
||||
```shell
|
||||
git push --force-with-lease origin master
|
||||
```
|
||||
|
||||
If this step fails, someone has changed the `master` branch while you were
|
||||
rewriting history. You could restore the branch and re-run BFG to preserve
|
||||
their changes, or use `git push --force` to overwrite their changes.
|
||||
|
||||
1. Navigate to **Project > Settings > Repository > Repository Cleanup**:
|
||||
|
||||
![Repository settings cleanup form](img/repository_cleanup.png)
|
||||
|
||||
Upload the `object-id-map.old-new.txt` file and press **Start cleanup**.
|
||||
This will remove any internal Git references to the old commits, and run
|
||||
`git gc` against the repository. You will receive an email once it has
|
||||
completed.
|
||||
|
||||
NOTE: **Note:**
|
||||
This process will remove some copies of the rewritten commits from GitLab's
|
||||
cache and database, but there are still numerous gaps in coverage - at present,
|
||||
some of the copies may persist indefinitely. [Clearing the instance cache](../../../administration/raketasks/maintenance.md#clear-redis-cache)
|
||||
may help to remove some of them, but it should not be depended on for security
|
||||
purposes!
|
||||
|
||||
## Using `git filter-branch`
|
||||
|
||||
1. Navigate to your repository:
|
||||
|
||||
```shell
|
||||
cd my_repository/
|
||||
```
|
||||
|
||||
1. Change to the branch you want to remove the big file from:
|
||||
|
||||
```shell
|
||||
git checkout master
|
||||
```
|
||||
|
||||
1. Use `filter-branch` to remove the big file:
|
||||
|
||||
```shell
|
||||
git filter-branch --force --tree-filter 'rm -f path/to/big_file.mpg' HEAD
|
||||
```
|
||||
|
||||
1. Instruct Git to purge the unwanted data:
|
||||
|
||||
```shell
|
||||
git reflog expire --expire=now --all && git gc --prune=now --aggressive
|
||||
```
|
||||
|
||||
1. Lastly, force push to the repository:
|
||||
|
||||
```shell
|
||||
git push --force origin master
|
||||
```
|
||||
|
||||
Your repository should now be below the size limit.
|
||||
|
||||
<!-- ## Troubleshooting
|
||||
|
||||
Include any troubleshooting steps that you can foresee. If you know beforehand what issues
|
||||
|
|
|
@ -3,79 +3,8 @@
|
|||
# Module providing methods for dealing with separating a tree-ish string and a
|
||||
# file path string when combined in a request parameter
|
||||
module ExtractsPath
|
||||
# Raised when given an invalid file path
|
||||
InvalidPathError = Class.new(StandardError)
|
||||
|
||||
# Given a string containing both a Git tree-ish, such as a branch or tag, and
|
||||
# a filesystem path joined by forward slashes, attempts to separate the two.
|
||||
#
|
||||
# Expects a @project instance variable to contain the active project. This is
|
||||
# used to check the input against a list of valid repository refs.
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# # No @project available
|
||||
# extract_ref('master')
|
||||
# # => ['', '']
|
||||
#
|
||||
# extract_ref('master')
|
||||
# # => ['master', '']
|
||||
#
|
||||
# extract_ref("f4b14494ef6abf3d144c28e4af0c20143383e062/CHANGELOG")
|
||||
# # => ['f4b14494ef6abf3d144c28e4af0c20143383e062', 'CHANGELOG']
|
||||
#
|
||||
# extract_ref("v2.0.0/README.md")
|
||||
# # => ['v2.0.0', 'README.md']
|
||||
#
|
||||
# extract_ref('master/app/models/project.rb')
|
||||
# # => ['master', 'app/models/project.rb']
|
||||
#
|
||||
# extract_ref('issues/1234/app/models/project.rb')
|
||||
# # => ['issues/1234', 'app/models/project.rb']
|
||||
#
|
||||
# # Given an invalid branch, we fall back to just splitting on the first slash
|
||||
# extract_ref('non/existent/branch/README.md')
|
||||
# # => ['non', 'existent/branch/README.md']
|
||||
#
|
||||
# Returns an Array where the first value is the tree-ish and the second is the
|
||||
# path
|
||||
def extract_ref(id)
|
||||
pair = ['', '']
|
||||
|
||||
return pair unless @project # rubocop:disable Gitlab/ModuleWithInstanceVariables
|
||||
|
||||
if id =~ /^(\h{40})(.+)/
|
||||
# If the ref appears to be a SHA, we're done, just split the string
|
||||
pair = $~.captures
|
||||
else
|
||||
# Otherwise, attempt to detect the ref using a list of the project's
|
||||
# branches and tags
|
||||
|
||||
# Append a trailing slash if we only get a ref and no file path
|
||||
unless id.ends_with?('/')
|
||||
id = [id, '/'].join
|
||||
end
|
||||
|
||||
valid_refs = ref_names.select { |v| id.start_with?("#{v}/") }
|
||||
|
||||
if valid_refs.empty?
|
||||
# No exact ref match, so just try our best
|
||||
pair = id.match(%r{([^/]+)(.*)}).captures
|
||||
else
|
||||
# There is a distinct possibility that multiple refs prefix the ID.
|
||||
# Use the longest match to maximize the chance that we have the
|
||||
# right ref.
|
||||
best_match = valid_refs.max_by(&:length)
|
||||
# Partition the string into the ref and the path, ignoring the empty first value
|
||||
pair = id.partition(best_match)[1..-1]
|
||||
end
|
||||
end
|
||||
|
||||
# Remove ending slashes from path
|
||||
pair[1].gsub!(%r{^/|/$}, '')
|
||||
|
||||
pair
|
||||
end
|
||||
extend ::Gitlab::Utils::Override
|
||||
include ExtractsRef
|
||||
|
||||
# If we have an ID of 'foo.atom', and the controller provides Atom and HTML
|
||||
# formats, then we have to check if the request was for the Atom version of
|
||||
|
@ -90,34 +19,17 @@ module ExtractsPath
|
|||
valid_refs.max_by(&:length)
|
||||
end
|
||||
|
||||
# Assigns common instance variables for views working with Git tree-ish objects
|
||||
#
|
||||
# Assignments are:
|
||||
#
|
||||
# - @id - A string representing the joined ref and path
|
||||
# - @ref - A string representing the ref (e.g., the branch, tag, or commit SHA)
|
||||
# - @path - A string representing the filesystem path
|
||||
# - @commit - A Commit representing the commit from the given ref
|
||||
#
|
||||
# If the :id parameter appears to be requesting a specific response format,
|
||||
# that will be handled as well.
|
||||
#
|
||||
# If there is no path and the ref doesn't exist in the repo, try to resolve
|
||||
# the ref without an '.atom' suffix. If _that_ ref is found, set the request's
|
||||
# format to Atom manually.
|
||||
# Extends the method to handle if there is no path and the ref doesn't
|
||||
# exist in the repo, try to resolve the ref without an '.atom' suffix.
|
||||
# If _that_ ref is found, set the request's format to Atom manually.
|
||||
#
|
||||
# Automatically renders `not_found!` if a valid tree path could not be
|
||||
# resolved (e.g., when a user inserts an invalid path or ref).
|
||||
#
|
||||
# rubocop:disable Gitlab/ModuleWithInstanceVariables
|
||||
override :assign_ref_vars
|
||||
def assign_ref_vars
|
||||
@id = get_id
|
||||
@ref, @path = extract_ref(@id)
|
||||
@repo = @project.repository
|
||||
@ref.strip!
|
||||
|
||||
raise InvalidPathError if @ref.match?(/\s/)
|
||||
|
||||
@commit = @repo.commit(@ref)
|
||||
super
|
||||
|
||||
if @path.empty? && !@commit && @id.ends_with?('.atom')
|
||||
@id = @ref = extract_ref_without_atom(@id)
|
||||
|
@ -135,10 +47,6 @@ module ExtractsPath
|
|||
end
|
||||
# rubocop:enable Gitlab/ModuleWithInstanceVariables
|
||||
|
||||
def tree
|
||||
@tree ||= @repo.tree(@commit.id, @path) # rubocop:disable Gitlab/ModuleWithInstanceVariables
|
||||
end
|
||||
|
||||
def lfs_blob_ids
|
||||
blob_ids = tree.blobs.map(&:id)
|
||||
|
||||
|
@ -146,21 +54,13 @@ module ExtractsPath
|
|||
# the current Blob in order to determine if it's a LFS object
|
||||
blob_ids = Array.wrap(@repo.blob_at(@commit.id, @path)&.id) if blob_ids.empty? # rubocop:disable Gitlab/ModuleWithInstanceVariables
|
||||
|
||||
@lfs_blob_ids = Gitlab::Git::Blob.batch_lfs_pointers(@project.repository, blob_ids).map(&:id) # rubocop:disable Gitlab/ModuleWithInstanceVariables
|
||||
@lfs_blob_ids = Gitlab::Git::Blob.batch_lfs_pointers(repository_container.repository, blob_ids).map(&:id) # rubocop:disable Gitlab/ModuleWithInstanceVariables
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# overridden in subclasses, do not remove
|
||||
def get_id
|
||||
id = [params[:id] || params[:ref]]
|
||||
id << "/" + params[:path] unless params[:path].blank?
|
||||
id.join
|
||||
end
|
||||
|
||||
def ref_names
|
||||
return [] unless @project # rubocop:disable Gitlab/ModuleWithInstanceVariables
|
||||
|
||||
@ref_names ||= @project.repository.ref_names # rubocop:disable Gitlab/ModuleWithInstanceVariables
|
||||
override :repository_container
|
||||
def repository_container
|
||||
@project
|
||||
end
|
||||
end
|
||||
|
|
128
lib/extracts_ref.rb
Normal file
128
lib/extracts_ref.rb
Normal file
|
@ -0,0 +1,128 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# Module providing methods for dealing with separating a tree-ish string and a
|
||||
# file path string when combined in a request parameter
|
||||
# Can be extended for different types of repository object, e.g. Project or Snippet
|
||||
module ExtractsRef
|
||||
InvalidPathError = Class.new(StandardError)
|
||||
|
||||
# Given a string containing both a Git tree-ish, such as a branch or tag, and
|
||||
# a filesystem path joined by forward slashes, attempts to separate the two.
|
||||
#
|
||||
# Expects a repository_container method that returns the active repository object. This is
|
||||
# used to check the input against a list of valid repository refs.
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# # No repository_container available
|
||||
# extract_ref('master')
|
||||
# # => ['', '']
|
||||
#
|
||||
# extract_ref('master')
|
||||
# # => ['master', '']
|
||||
#
|
||||
# extract_ref("f4b14494ef6abf3d144c28e4af0c20143383e062/CHANGELOG")
|
||||
# # => ['f4b14494ef6abf3d144c28e4af0c20143383e062', 'CHANGELOG']
|
||||
#
|
||||
# extract_ref("v2.0.0/README.md")
|
||||
# # => ['v2.0.0', 'README.md']
|
||||
#
|
||||
# extract_ref('master/app/models/project.rb')
|
||||
# # => ['master', 'app/models/project.rb']
|
||||
#
|
||||
# extract_ref('issues/1234/app/models/project.rb')
|
||||
# # => ['issues/1234', 'app/models/project.rb']
|
||||
#
|
||||
# # Given an invalid branch, we fall back to just splitting on the first slash
|
||||
# extract_ref('non/existent/branch/README.md')
|
||||
# # => ['non', 'existent/branch/README.md']
|
||||
#
|
||||
# Returns an Array where the first value is the tree-ish and the second is the
|
||||
# path
|
||||
def extract_ref(id)
|
||||
pair = ['', '']
|
||||
|
||||
return pair unless repository_container
|
||||
|
||||
if id =~ /^(\h{40})(.+)/
|
||||
# If the ref appears to be a SHA, we're done, just split the string
|
||||
pair = $~.captures
|
||||
else
|
||||
# Otherwise, attempt to detect the ref using a list of the repository_container's
|
||||
# branches and tags
|
||||
|
||||
# Append a trailing slash if we only get a ref and no file path
|
||||
unless id.ends_with?('/')
|
||||
id = [id, '/'].join
|
||||
end
|
||||
|
||||
valid_refs = ref_names.select { |v| id.start_with?("#{v}/") }
|
||||
|
||||
if valid_refs.empty?
|
||||
# No exact ref match, so just try our best
|
||||
pair = id.match(%r{([^/]+)(.*)}).captures
|
||||
else
|
||||
# There is a distinct possibility that multiple refs prefix the ID.
|
||||
# Use the longest match to maximize the chance that we have the
|
||||
# right ref.
|
||||
best_match = valid_refs.max_by(&:length)
|
||||
# Partition the string into the ref and the path, ignoring the empty first value
|
||||
pair = id.partition(best_match)[1..-1]
|
||||
end
|
||||
end
|
||||
|
||||
pair[0] = pair[0].strip
|
||||
|
||||
# Remove ending slashes from path
|
||||
pair[1].gsub!(%r{^/|/$}, '')
|
||||
|
||||
pair
|
||||
end
|
||||
|
||||
# Assigns common instance variables for views working with Git tree-ish objects
|
||||
#
|
||||
# Assignments are:
|
||||
#
|
||||
# - @id - A string representing the joined ref and path
|
||||
# - @ref - A string representing the ref (e.g., the branch, tag, or commit SHA)
|
||||
# - @path - A string representing the filesystem path
|
||||
# - @commit - A Commit representing the commit from the given ref
|
||||
#
|
||||
# If the :id parameter appears to be requesting a specific response format,
|
||||
# that will be handled as well.
|
||||
#
|
||||
# rubocop:disable Gitlab/ModuleWithInstanceVariables
|
||||
def assign_ref_vars
|
||||
@id = get_id
|
||||
@ref, @path = extract_ref(@id)
|
||||
@repo = repository_container.repository
|
||||
|
||||
raise InvalidPathError if @ref.match?(/\s/)
|
||||
|
||||
@commit = @repo.commit(@ref)
|
||||
end
|
||||
# rubocop:enable Gitlab/ModuleWithInstanceVariables
|
||||
|
||||
def tree
|
||||
@tree ||= @repo.tree(@commit.id, @path) # rubocop:disable Gitlab/ModuleWithInstanceVariables
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# overridden in subclasses, do not remove
|
||||
def get_id
|
||||
id = [params[:id] || params[:ref]]
|
||||
id << "/" + params[:path] unless params[:path].blank?
|
||||
id.join
|
||||
end
|
||||
|
||||
def ref_names
|
||||
return [] unless repository_container
|
||||
|
||||
@ref_names ||= repository_container.repository.ref_names # rubocop:disable Gitlab/ModuleWithInstanceVariables
|
||||
end
|
||||
|
||||
def repository_container
|
||||
raise NotImplementedError
|
||||
end
|
||||
end
|
|
@ -1,4 +1,6 @@
|
|||
import actions from '~/boards/stores/actions';
|
||||
import * as types from '~/boards/stores/mutation_types';
|
||||
import testAction from 'helpers/vuex_action_helper';
|
||||
|
||||
const expectNotImplemented = action => {
|
||||
it('is not implemented', () => {
|
||||
|
@ -7,7 +9,20 @@ const expectNotImplemented = action => {
|
|||
};
|
||||
|
||||
describe('setEndpoints', () => {
|
||||
expectNotImplemented(actions.setEndpoints);
|
||||
it('sets endpoints object', () => {
|
||||
const mockEndpoints = {
|
||||
foo: 'bar',
|
||||
bar: 'baz',
|
||||
};
|
||||
|
||||
return testAction(
|
||||
actions.setEndpoints,
|
||||
mockEndpoints,
|
||||
{},
|
||||
[{ type: types.SET_ENDPOINTS, payload: mockEndpoints }],
|
||||
[],
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('fetchLists', () => {
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
import mutations from '~/boards/stores/mutations';
|
||||
import * as types from '~/boards/stores/mutation_types';
|
||||
import defaultState from '~/boards/stores/state';
|
||||
|
||||
const expectNotImplemented = action => {
|
||||
it('is not implemented', () => {
|
||||
|
@ -6,86 +8,107 @@ const expectNotImplemented = action => {
|
|||
});
|
||||
};
|
||||
|
||||
describe('SET_ENDPOINTS', () => {
|
||||
expectNotImplemented(mutations.SET_ENDPOINTS);
|
||||
});
|
||||
describe('Board Store Mutations', () => {
|
||||
let state;
|
||||
|
||||
describe('REQUEST_ADD_LIST', () => {
|
||||
expectNotImplemented(mutations.REQUEST_ADD_LIST);
|
||||
});
|
||||
beforeEach(() => {
|
||||
state = defaultState();
|
||||
});
|
||||
|
||||
describe('RECEIVE_ADD_LIST_SUCCESS', () => {
|
||||
expectNotImplemented(mutations.RECEIVE_ADD_LIST_SUCCESS);
|
||||
});
|
||||
describe('SET_ENDPOINTS', () => {
|
||||
it('Should set initial Boards data to state', () => {
|
||||
const endpoints = {
|
||||
boardsEndpoint: '/boards/',
|
||||
recentBoardsEndpoint: '/boards/',
|
||||
listsEndpoint: '/boards/lists',
|
||||
bulkUpdatePath: '/boards/bulkUpdate',
|
||||
boardId: 1,
|
||||
fullPath: 'gitlab-org',
|
||||
};
|
||||
|
||||
describe('RECEIVE_ADD_LIST_ERROR', () => {
|
||||
expectNotImplemented(mutations.RECEIVE_ADD_LIST_ERROR);
|
||||
});
|
||||
mutations[types.SET_ENDPOINTS](state, endpoints);
|
||||
|
||||
describe('REQUEST_UPDATE_LIST', () => {
|
||||
expectNotImplemented(mutations.REQUEST_UPDATE_LIST);
|
||||
});
|
||||
expect(state.endpoints).toEqual(endpoints);
|
||||
});
|
||||
});
|
||||
|
||||
describe('RECEIVE_UPDATE_LIST_SUCCESS', () => {
|
||||
expectNotImplemented(mutations.RECEIVE_UPDATE_LIST_SUCCESS);
|
||||
});
|
||||
describe('REQUEST_ADD_LIST', () => {
|
||||
expectNotImplemented(mutations.REQUEST_ADD_LIST);
|
||||
});
|
||||
|
||||
describe('RECEIVE_UPDATE_LIST_ERROR', () => {
|
||||
expectNotImplemented(mutations.RECEIVE_UPDATE_LIST_ERROR);
|
||||
});
|
||||
describe('RECEIVE_ADD_LIST_SUCCESS', () => {
|
||||
expectNotImplemented(mutations.RECEIVE_ADD_LIST_SUCCESS);
|
||||
});
|
||||
|
||||
describe('REQUEST_REMOVE_LIST', () => {
|
||||
expectNotImplemented(mutations.REQUEST_REMOVE_LIST);
|
||||
});
|
||||
describe('RECEIVE_ADD_LIST_ERROR', () => {
|
||||
expectNotImplemented(mutations.RECEIVE_ADD_LIST_ERROR);
|
||||
});
|
||||
|
||||
describe('RECEIVE_REMOVE_LIST_SUCCESS', () => {
|
||||
expectNotImplemented(mutations.RECEIVE_REMOVE_LIST_SUCCESS);
|
||||
});
|
||||
describe('REQUEST_UPDATE_LIST', () => {
|
||||
expectNotImplemented(mutations.REQUEST_UPDATE_LIST);
|
||||
});
|
||||
|
||||
describe('RECEIVE_REMOVE_LIST_ERROR', () => {
|
||||
expectNotImplemented(mutations.RECEIVE_REMOVE_LIST_ERROR);
|
||||
});
|
||||
describe('RECEIVE_UPDATE_LIST_SUCCESS', () => {
|
||||
expectNotImplemented(mutations.RECEIVE_UPDATE_LIST_SUCCESS);
|
||||
});
|
||||
|
||||
describe('REQUEST_ADD_ISSUE', () => {
|
||||
expectNotImplemented(mutations.REQUEST_ADD_ISSUE);
|
||||
});
|
||||
describe('RECEIVE_UPDATE_LIST_ERROR', () => {
|
||||
expectNotImplemented(mutations.RECEIVE_UPDATE_LIST_ERROR);
|
||||
});
|
||||
|
||||
describe('RECEIVE_ADD_ISSUE_SUCCESS', () => {
|
||||
expectNotImplemented(mutations.RECEIVE_ADD_ISSUE_SUCCESS);
|
||||
});
|
||||
describe('REQUEST_REMOVE_LIST', () => {
|
||||
expectNotImplemented(mutations.REQUEST_REMOVE_LIST);
|
||||
});
|
||||
|
||||
describe('RECEIVE_ADD_ISSUE_ERROR', () => {
|
||||
expectNotImplemented(mutations.RECEIVE_ADD_ISSUE_ERROR);
|
||||
});
|
||||
describe('RECEIVE_REMOVE_LIST_SUCCESS', () => {
|
||||
expectNotImplemented(mutations.RECEIVE_REMOVE_LIST_SUCCESS);
|
||||
});
|
||||
|
||||
describe('REQUEST_MOVE_ISSUE', () => {
|
||||
expectNotImplemented(mutations.REQUEST_MOVE_ISSUE);
|
||||
});
|
||||
describe('RECEIVE_REMOVE_LIST_ERROR', () => {
|
||||
expectNotImplemented(mutations.RECEIVE_REMOVE_LIST_ERROR);
|
||||
});
|
||||
|
||||
describe('RECEIVE_MOVE_ISSUE_SUCCESS', () => {
|
||||
expectNotImplemented(mutations.RECEIVE_MOVE_ISSUE_SUCCESS);
|
||||
});
|
||||
describe('REQUEST_ADD_ISSUE', () => {
|
||||
expectNotImplemented(mutations.REQUEST_ADD_ISSUE);
|
||||
});
|
||||
|
||||
describe('RECEIVE_MOVE_ISSUE_ERROR', () => {
|
||||
expectNotImplemented(mutations.RECEIVE_MOVE_ISSUE_ERROR);
|
||||
});
|
||||
describe('RECEIVE_ADD_ISSUE_SUCCESS', () => {
|
||||
expectNotImplemented(mutations.RECEIVE_ADD_ISSUE_SUCCESS);
|
||||
});
|
||||
|
||||
describe('REQUEST_UPDATE_ISSUE', () => {
|
||||
expectNotImplemented(mutations.REQUEST_UPDATE_ISSUE);
|
||||
});
|
||||
describe('RECEIVE_ADD_ISSUE_ERROR', () => {
|
||||
expectNotImplemented(mutations.RECEIVE_ADD_ISSUE_ERROR);
|
||||
});
|
||||
|
||||
describe('RECEIVE_UPDATE_ISSUE_SUCCESS', () => {
|
||||
expectNotImplemented(mutations.RECEIVE_UPDATE_ISSUE_SUCCESS);
|
||||
});
|
||||
describe('REQUEST_MOVE_ISSUE', () => {
|
||||
expectNotImplemented(mutations.REQUEST_MOVE_ISSUE);
|
||||
});
|
||||
|
||||
describe('RECEIVE_UPDATE_ISSUE_ERROR', () => {
|
||||
expectNotImplemented(mutations.RECEIVE_UPDATE_ISSUE_ERROR);
|
||||
});
|
||||
describe('RECEIVE_MOVE_ISSUE_SUCCESS', () => {
|
||||
expectNotImplemented(mutations.RECEIVE_MOVE_ISSUE_SUCCESS);
|
||||
});
|
||||
|
||||
describe('SET_CURRENT_PAGE', () => {
|
||||
expectNotImplemented(mutations.SET_CURRENT_PAGE);
|
||||
});
|
||||
describe('RECEIVE_MOVE_ISSUE_ERROR', () => {
|
||||
expectNotImplemented(mutations.RECEIVE_MOVE_ISSUE_ERROR);
|
||||
});
|
||||
|
||||
describe('TOGGLE_EMPTY_STATE', () => {
|
||||
expectNotImplemented(mutations.TOGGLE_EMPTY_STATE);
|
||||
describe('REQUEST_UPDATE_ISSUE', () => {
|
||||
expectNotImplemented(mutations.REQUEST_UPDATE_ISSUE);
|
||||
});
|
||||
|
||||
describe('RECEIVE_UPDATE_ISSUE_SUCCESS', () => {
|
||||
expectNotImplemented(mutations.RECEIVE_UPDATE_ISSUE_SUCCESS);
|
||||
});
|
||||
|
||||
describe('RECEIVE_UPDATE_ISSUE_ERROR', () => {
|
||||
expectNotImplemented(mutations.RECEIVE_UPDATE_ISSUE_ERROR);
|
||||
});
|
||||
|
||||
describe('SET_CURRENT_PAGE', () => {
|
||||
expectNotImplemented(mutations.SET_CURRENT_PAGE);
|
||||
});
|
||||
|
||||
describe('TOGGLE_EMPTY_STATE', () => {
|
||||
expectNotImplemented(mutations.TOGGLE_EMPTY_STATE);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -519,11 +519,8 @@ describe('ErrorDetails', () => {
|
|||
Tracking.event.mockClear();
|
||||
findUpdateIgnoreStatusButton().vm.$emit('click');
|
||||
setImmediate(() => {
|
||||
const { category, action, label } = trackErrorStatusUpdateOptions;
|
||||
expect(Tracking.event).toHaveBeenCalledWith(category, action, {
|
||||
label,
|
||||
property: 'ignored',
|
||||
});
|
||||
const { category, action } = trackErrorStatusUpdateOptions('ignored');
|
||||
expect(Tracking.event).toHaveBeenCalledWith(category, action);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -531,11 +528,8 @@ describe('ErrorDetails', () => {
|
|||
Tracking.event.mockClear();
|
||||
findUpdateResolveStatusButton().vm.$emit('click');
|
||||
setImmediate(() => {
|
||||
const { category, action, label } = trackErrorStatusUpdateOptions;
|
||||
expect(Tracking.event).toHaveBeenCalledWith(category, action, {
|
||||
label,
|
||||
property: 'resolved',
|
||||
});
|
||||
const { category, action } = trackErrorStatusUpdateOptions('resolved');
|
||||
expect(Tracking.event).toHaveBeenCalledWith(category, action);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -491,11 +491,8 @@ describe('ErrorTrackingList', () => {
|
|||
});
|
||||
|
||||
setImmediate(() => {
|
||||
const { category, action, label } = trackErrorStatusUpdateOptions;
|
||||
expect(Tracking.event).toHaveBeenCalledWith(category, action, {
|
||||
label,
|
||||
property: status,
|
||||
});
|
||||
const { category, action } = trackErrorStatusUpdateOptions(status);
|
||||
expect(Tracking.event).toHaveBeenCalledWith(category, action);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -22,6 +22,7 @@ describe('IDE branch item', () => {
|
|||
isActive: false,
|
||||
...props,
|
||||
},
|
||||
router,
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -1,63 +1,77 @@
|
|||
import Vue from 'vue';
|
||||
import { mount } from '@vue/test-utils';
|
||||
import router from '~/ide/ide_router';
|
||||
import Item from '~/ide/components/merge_requests/item.vue';
|
||||
import mountCompontent from '../../../helpers/vue_mount_component_helper';
|
||||
|
||||
const TEST_ITEM = {
|
||||
iid: 1,
|
||||
projectPathWithNamespace: 'gitlab-org/gitlab-ce',
|
||||
title: 'Merge request title',
|
||||
};
|
||||
|
||||
describe('IDE merge request item', () => {
|
||||
const Component = Vue.extend(Item);
|
||||
let vm;
|
||||
let wrapper;
|
||||
|
||||
beforeEach(() => {
|
||||
vm = mountCompontent(Component, {
|
||||
item: {
|
||||
iid: 1,
|
||||
projectPathWithNamespace: 'gitlab-org/gitlab-ce',
|
||||
title: 'Merge request title',
|
||||
const createComponent = (props = {}) => {
|
||||
wrapper = mount(Item, {
|
||||
propsData: {
|
||||
item: {
|
||||
...TEST_ITEM,
|
||||
},
|
||||
currentId: `${TEST_ITEM.iid}`,
|
||||
currentProjectId: TEST_ITEM.projectPathWithNamespace,
|
||||
...props,
|
||||
},
|
||||
currentId: '1',
|
||||
currentProjectId: 'gitlab-org/gitlab-ce',
|
||||
router,
|
||||
});
|
||||
});
|
||||
};
|
||||
const findIcon = () => wrapper.find('.ic-mobile-issue-close');
|
||||
|
||||
afterEach(() => {
|
||||
vm.$destroy();
|
||||
wrapper.destroy();
|
||||
wrapper = null;
|
||||
});
|
||||
|
||||
it('renders merge requests data', () => {
|
||||
expect(vm.$el.textContent).toContain('Merge request title');
|
||||
expect(vm.$el.textContent).toContain('gitlab-org/gitlab-ce!1');
|
||||
});
|
||||
describe('default', () => {
|
||||
beforeEach(() => {
|
||||
createComponent();
|
||||
});
|
||||
|
||||
it('renders link with href', () => {
|
||||
const expectedHref = router.resolve(
|
||||
`/project/${vm.item.projectPathWithNamespace}/merge_requests/${vm.item.iid}`,
|
||||
).href;
|
||||
it('renders merge requests data', () => {
|
||||
expect(wrapper.text()).toContain('Merge request title');
|
||||
expect(wrapper.text()).toContain('gitlab-org/gitlab-ce!1');
|
||||
});
|
||||
|
||||
expect(vm.$el.tagName.toLowerCase()).toBe('a');
|
||||
expect(vm.$el).toHaveAttr('href', expectedHref);
|
||||
});
|
||||
it('renders link with href', () => {
|
||||
const expectedHref = router.resolve(
|
||||
`/project/${TEST_ITEM.projectPathWithNamespace}/merge_requests/${TEST_ITEM.iid}`,
|
||||
).href;
|
||||
|
||||
it('renders icon if ID matches currentId', () => {
|
||||
expect(vm.$el.querySelector('.ic-mobile-issue-close')).not.toBe(null);
|
||||
});
|
||||
expect(wrapper.element.tagName.toLowerCase()).toBe('a');
|
||||
expect(wrapper.attributes('href')).toBe(expectedHref);
|
||||
});
|
||||
|
||||
it('does not render icon if ID does not match currentId', done => {
|
||||
vm.currentId = '2';
|
||||
|
||||
vm.$nextTick(() => {
|
||||
expect(vm.$el.querySelector('.ic-mobile-issue-close')).toBe(null);
|
||||
|
||||
done();
|
||||
it('renders icon if ID matches currentId', () => {
|
||||
expect(findIcon().exists()).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
it('does not render icon if project ID does not match', done => {
|
||||
vm.currentProjectId = 'test/test';
|
||||
describe('with different currentId', () => {
|
||||
beforeEach(() => {
|
||||
createComponent({ currentId: `${TEST_ITEM.iid + 1}` });
|
||||
});
|
||||
|
||||
vm.$nextTick(() => {
|
||||
expect(vm.$el.querySelector('.ic-mobile-issue-close')).toBe(null);
|
||||
it('does not render icon', () => {
|
||||
expect(findIcon().exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
done();
|
||||
describe('with different project ID', () => {
|
||||
beforeEach(() => {
|
||||
createComponent({ currentProjectId: 'test/test' });
|
||||
});
|
||||
|
||||
it('does not render icon', () => {
|
||||
expect(findIcon().exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
19
spec/frontend/ide/stores/modules/router/actions_spec.js
Normal file
19
spec/frontend/ide/stores/modules/router/actions_spec.js
Normal file
|
@ -0,0 +1,19 @@
|
|||
import * as actions from '~/ide/stores/modules/router/actions';
|
||||
import * as types from '~/ide/stores/modules/router/mutation_types';
|
||||
import testAction from 'helpers/vuex_action_helper';
|
||||
|
||||
const TEST_PATH = 'test/path/abc';
|
||||
|
||||
describe('ide/stores/modules/router/actions', () => {
|
||||
describe('push', () => {
|
||||
it('commits mutation', () => {
|
||||
return testAction(
|
||||
actions.push,
|
||||
TEST_PATH,
|
||||
{},
|
||||
[{ type: types.PUSH, payload: TEST_PATH }],
|
||||
[],
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
23
spec/frontend/ide/stores/modules/router/mutations_spec.js
Normal file
23
spec/frontend/ide/stores/modules/router/mutations_spec.js
Normal file
|
@ -0,0 +1,23 @@
|
|||
import mutations from '~/ide/stores/modules/router/mutations';
|
||||
import * as types from '~/ide/stores/modules/router/mutation_types';
|
||||
import createState from '~/ide/stores/modules/router/state';
|
||||
|
||||
const TEST_PATH = 'test/path/abc';
|
||||
|
||||
describe('ide/stores/modules/router/mutations', () => {
|
||||
let state;
|
||||
|
||||
beforeEach(() => {
|
||||
state = createState();
|
||||
});
|
||||
|
||||
describe(types.PUSH, () => {
|
||||
it('updates state', () => {
|
||||
expect(state.fullPath).toBe('');
|
||||
|
||||
mutations[types.PUSH](state, TEST_PATH);
|
||||
|
||||
expect(state.fullPath).toBe(TEST_PATH);
|
||||
});
|
||||
});
|
||||
});
|
158
spec/frontend/ide/sync_router_and_store_spec.js
Normal file
158
spec/frontend/ide/sync_router_and_store_spec.js
Normal file
|
@ -0,0 +1,158 @@
|
|||
import Vue from 'vue';
|
||||
import VueRouter from 'vue-router';
|
||||
import Vuex from 'vuex';
|
||||
import routerModule from '~/ide/stores/modules/router';
|
||||
import { syncRouterAndStore } from '~/ide/sync_router_and_store';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
|
||||
const TEST_ROUTE = '/test/lorem/ipsum';
|
||||
|
||||
Vue.use(Vuex);
|
||||
|
||||
describe('~/ide/sync_router_and_store', () => {
|
||||
let unsync;
|
||||
let router;
|
||||
let store;
|
||||
let onRouterChange;
|
||||
|
||||
const createSync = () => {
|
||||
unsync = syncRouterAndStore(router, store);
|
||||
};
|
||||
|
||||
const getRouterCurrentPath = () => router.currentRoute.fullPath;
|
||||
const getStoreCurrentPath = () => store.state.router.fullPath;
|
||||
const updateRouter = path => {
|
||||
router.push(path);
|
||||
return waitForPromises();
|
||||
};
|
||||
const updateStore = path => {
|
||||
store.dispatch('router/push', path);
|
||||
return waitForPromises();
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
router = new VueRouter();
|
||||
store = new Vuex.Store({
|
||||
modules: {
|
||||
router: routerModule,
|
||||
},
|
||||
});
|
||||
jest.spyOn(store, 'dispatch');
|
||||
|
||||
onRouterChange = jest.fn();
|
||||
router.beforeEach((to, from, next) => {
|
||||
onRouterChange(to, from);
|
||||
next();
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
unsync();
|
||||
unsync = null;
|
||||
});
|
||||
|
||||
it('keeps store and router in sync', async () => {
|
||||
createSync();
|
||||
|
||||
await updateRouter('/test/test');
|
||||
await updateRouter('/test/test');
|
||||
await updateStore('123/abc');
|
||||
await updateRouter('def');
|
||||
|
||||
// Even though we pused relative paths, the store and router kept track of the resulting fullPath
|
||||
expect(getRouterCurrentPath()).toBe('/test/123/def');
|
||||
expect(getStoreCurrentPath()).toBe('/test/123/def');
|
||||
});
|
||||
|
||||
describe('default', () => {
|
||||
beforeEach(() => {
|
||||
createSync();
|
||||
});
|
||||
|
||||
it('store is default', () => {
|
||||
expect(store.dispatch).not.toHaveBeenCalled();
|
||||
expect(getStoreCurrentPath()).toBe('');
|
||||
});
|
||||
|
||||
it('router is default', () => {
|
||||
expect(onRouterChange).not.toHaveBeenCalled();
|
||||
expect(getRouterCurrentPath()).toBe('/');
|
||||
});
|
||||
|
||||
describe('when store changes', () => {
|
||||
beforeEach(() => {
|
||||
updateStore(TEST_ROUTE);
|
||||
});
|
||||
|
||||
it('store is updated', () => {
|
||||
// let's make sure the action isn't dispatched more than necessary
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(1);
|
||||
expect(getStoreCurrentPath()).toBe(TEST_ROUTE);
|
||||
});
|
||||
|
||||
it('router is updated', () => {
|
||||
expect(onRouterChange).toHaveBeenCalledTimes(1);
|
||||
expect(getRouterCurrentPath()).toBe(TEST_ROUTE);
|
||||
});
|
||||
|
||||
describe('when store changes again to the same thing', () => {
|
||||
beforeEach(() => {
|
||||
onRouterChange.mockClear();
|
||||
updateStore(TEST_ROUTE);
|
||||
});
|
||||
|
||||
it('doesnt change router again', () => {
|
||||
expect(onRouterChange).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when router changes', () => {
|
||||
beforeEach(() => {
|
||||
updateRouter(TEST_ROUTE);
|
||||
});
|
||||
|
||||
it('store is updated', () => {
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(1);
|
||||
expect(getStoreCurrentPath()).toBe(TEST_ROUTE);
|
||||
});
|
||||
|
||||
it('router is updated', () => {
|
||||
// let's make sure the router change isn't triggered more than necessary
|
||||
expect(onRouterChange).toHaveBeenCalledTimes(1);
|
||||
expect(getRouterCurrentPath()).toBe(TEST_ROUTE);
|
||||
});
|
||||
|
||||
describe('when router changes again to the same thing', () => {
|
||||
beforeEach(() => {
|
||||
store.dispatch.mockClear();
|
||||
updateRouter(TEST_ROUTE);
|
||||
});
|
||||
|
||||
it('doesnt change store again', () => {
|
||||
expect(store.dispatch).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when disposed', () => {
|
||||
beforeEach(() => {
|
||||
unsync();
|
||||
});
|
||||
|
||||
it('a store change does not trigger a router change', () => {
|
||||
updateStore(TEST_ROUTE);
|
||||
|
||||
expect(getRouterCurrentPath()).toBe('/');
|
||||
expect(onRouterChange).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('a router change does not trigger a store change', () => {
|
||||
updateRouter(TEST_ROUTE);
|
||||
|
||||
expect(getStoreCurrentPath()).toBe('');
|
||||
expect(store.dispatch).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,4 +1,5 @@
|
|||
import { shallowMount } from '@vue/test-utils';
|
||||
import timezoneMock from 'timezone-mock';
|
||||
import { GlColumnChart } from '@gitlab/ui/dist/charts';
|
||||
import ColumnChart from '~/monitoring/components/charts/column.vue';
|
||||
|
||||
|
@ -18,10 +19,7 @@ const dataValues = [
|
|||
describe('Column component', () => {
|
||||
let wrapper;
|
||||
|
||||
const findChart = () => wrapper.find(GlColumnChart);
|
||||
const chartProps = prop => findChart().props(prop);
|
||||
|
||||
beforeEach(() => {
|
||||
const createWrapper = (props = {}) => {
|
||||
wrapper = shallowMount(ColumnChart, {
|
||||
propsData: {
|
||||
graphData: {
|
||||
|
@ -41,14 +39,60 @@ describe('Column component', () => {
|
|||
},
|
||||
],
|
||||
},
|
||||
...props,
|
||||
},
|
||||
});
|
||||
};
|
||||
const findChart = () => wrapper.find(GlColumnChart);
|
||||
const chartProps = prop => findChart().props(prop);
|
||||
|
||||
beforeEach(() => {
|
||||
createWrapper();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
wrapper.destroy();
|
||||
});
|
||||
|
||||
describe('xAxisLabel', () => {
|
||||
const mockDate = Date.UTC(2020, 4, 26, 20); // 8:00 PM in GMT
|
||||
|
||||
const useXAxisFormatter = date => {
|
||||
const { xAxis } = chartProps('option');
|
||||
const { formatter } = xAxis.axisLabel;
|
||||
return formatter(date);
|
||||
};
|
||||
|
||||
it('x-axis is formatted correctly in AM/PM format', () => {
|
||||
expect(useXAxisFormatter(mockDate)).toEqual('8:00 PM');
|
||||
});
|
||||
|
||||
describe('when in PT timezone', () => {
|
||||
beforeAll(() => {
|
||||
timezoneMock.register('US/Pacific');
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
timezoneMock.unregister();
|
||||
});
|
||||
|
||||
it('by default, values are formatted in PT', () => {
|
||||
createWrapper();
|
||||
expect(useXAxisFormatter(mockDate)).toEqual('1:00 PM');
|
||||
});
|
||||
|
||||
it('when the chart uses local timezone, y-axis is formatted in PT', () => {
|
||||
createWrapper({ timezone: 'LOCAL' });
|
||||
expect(useXAxisFormatter(mockDate)).toEqual('1:00 PM');
|
||||
});
|
||||
|
||||
it('when the chart uses UTC, y-axis is formatted in UTC', () => {
|
||||
createWrapper({ timezone: 'UTC' });
|
||||
expect(useXAxisFormatter(mockDate)).toEqual('8:00 PM');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('wrapped components', () => {
|
||||
describe('GitLab UI column chart', () => {
|
||||
it('is a Vue instance', () => {
|
||||
|
|
|
@ -513,6 +513,34 @@ describe('Dashboard Panel', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('panel timezone', () => {
|
||||
it('displays a time chart in local timezone', () => {
|
||||
createWrapper();
|
||||
expect(findTimeChart().props('timezone')).toBe('LOCAL');
|
||||
});
|
||||
|
||||
it('displays a heatmap in local timezone', () => {
|
||||
createWrapper({ graphData: graphDataPrometheusQueryRangeMultiTrack });
|
||||
expect(wrapper.find(MonitorHeatmapChart).props('timezone')).toBe('LOCAL');
|
||||
});
|
||||
|
||||
describe('when timezone is set to UTC', () => {
|
||||
beforeEach(() => {
|
||||
store = createStore({ dashboardTimezone: 'UTC' });
|
||||
});
|
||||
|
||||
it('displays a time chart with UTC', () => {
|
||||
createWrapper();
|
||||
expect(findTimeChart().props('timezone')).toBe('UTC');
|
||||
});
|
||||
|
||||
it('displays a heatmap with UTC', () => {
|
||||
createWrapper({ graphData: graphDataPrometheusQueryRangeMultiTrack });
|
||||
expect(wrapper.find(MonitorHeatmapChart).props('timezone')).toBe('UTC');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Expand to full screen', () => {
|
||||
const findExpandBtn = () => wrapper.find({ ref: 'expandBtn' });
|
||||
|
||||
|
|
|
@ -36,8 +36,9 @@ describe('Dashboard', () => {
|
|||
let wrapper;
|
||||
let mock;
|
||||
|
||||
const findDashboardHeader = () => wrapper.find(DashboardHeader);
|
||||
const findEnvironmentsDropdown = () =>
|
||||
wrapper.find(DashboardHeader).find({ ref: 'monitorEnvironmentsDropdown' });
|
||||
findDashboardHeader().find({ ref: 'monitorEnvironmentsDropdown' });
|
||||
const findAllEnvironmentsDropdownItems = () => findEnvironmentsDropdown().findAll(GlDropdownItem);
|
||||
const setSearchTerm = searchTerm => {
|
||||
store.commit(`monitoringDashboard/${types.SET_ENVIRONMENTS_FILTER}`, searchTerm);
|
||||
|
@ -805,6 +806,57 @@ describe('Dashboard', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('dashboard timezone', () => {
|
||||
const setupWithTimezone = value => {
|
||||
store = createStore({ dashboardTimezone: value });
|
||||
setupStoreWithData(store);
|
||||
createShallowWrapper({ hasMetrics: true });
|
||||
return wrapper.vm.$nextTick;
|
||||
};
|
||||
|
||||
describe('local timezone is enabled by default', () => {
|
||||
beforeEach(() => {
|
||||
return setupWithTimezone();
|
||||
});
|
||||
|
||||
it('shows the data time picker in local timezone', () => {
|
||||
expect(
|
||||
findDashboardHeader()
|
||||
.find(DateTimePicker)
|
||||
.props('utc'),
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when LOCAL timezone is enabled', () => {
|
||||
beforeEach(() => {
|
||||
return setupWithTimezone('LOCAL');
|
||||
});
|
||||
|
||||
it('shows the data time picker in local timezone', () => {
|
||||
expect(
|
||||
findDashboardHeader()
|
||||
.find(DateTimePicker)
|
||||
.props('utc'),
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when UTC timezone is enabled', () => {
|
||||
beforeEach(() => {
|
||||
return setupWithTimezone('UTC');
|
||||
});
|
||||
|
||||
it('shows the data time picker in UTC format', () => {
|
||||
expect(
|
||||
findDashboardHeader()
|
||||
.find(DateTimePicker)
|
||||
.props('utc'),
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('cluster health', () => {
|
||||
beforeEach(() => {
|
||||
createShallowWrapper({ hasMetrics: true, showHeader: false });
|
||||
|
|
|
@ -38,6 +38,7 @@ const rawPathMock = '/foo/bar';
|
|||
const rawProjectPathMock = '/project/path';
|
||||
const newlyEditedSnippetUrl = 'http://foo.bar';
|
||||
const apiError = { message: 'Ufff' };
|
||||
const mutationError = 'Bummer';
|
||||
|
||||
const defaultProps = {
|
||||
snippetGid: 'gid://gitlab/PersonalSnippet/42',
|
||||
|
@ -60,10 +61,26 @@ describe('Snippet Edit app', () => {
|
|||
},
|
||||
});
|
||||
|
||||
const resolveMutateWithErrors = jest.fn().mockResolvedValue({
|
||||
data: {
|
||||
updateSnippet: {
|
||||
errors: [mutationError],
|
||||
snippet: {
|
||||
webUrl: newlyEditedSnippetUrl,
|
||||
},
|
||||
},
|
||||
createSnippet: {
|
||||
errors: [mutationError],
|
||||
snippet: null,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const rejectMutation = jest.fn().mockRejectedValue(apiError);
|
||||
|
||||
const mutationTypes = {
|
||||
RESOLVE: resolveMutate,
|
||||
RESOLVE_WITH_ERRORS: resolveMutateWithErrors,
|
||||
REJECT: rejectMutation,
|
||||
};
|
||||
|
||||
|
@ -284,6 +301,35 @@ describe('Snippet Edit app', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it.each`
|
||||
newSnippet | projectPath | mutationName
|
||||
${true} | ${rawProjectPathMock} | ${'CreateSnippetMutation with projectPath'}
|
||||
${true} | ${''} | ${'CreateSnippetMutation without projectPath'}
|
||||
${false} | ${rawProjectPathMock} | ${'UpdateSnippetMutation with projectPath'}
|
||||
${false} | ${''} | ${'UpdateSnippetMutation without projectPath'}
|
||||
`(
|
||||
'does not redirect to snippet view if the seemingly successful' +
|
||||
' $mutationName response contains errors',
|
||||
({ newSnippet, projectPath }) => {
|
||||
createComponent({
|
||||
data: {
|
||||
newSnippet,
|
||||
},
|
||||
props: {
|
||||
...defaultProps,
|
||||
projectPath,
|
||||
},
|
||||
mutationRes: mutationTypes.RESOLVE_WITH_ERRORS,
|
||||
});
|
||||
|
||||
wrapper.vm.handleFormSubmit();
|
||||
return waitForPromises().then(() => {
|
||||
expect(redirectTo).not.toHaveBeenCalled();
|
||||
expect(flashSpy).toHaveBeenCalledWith(mutationError);
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
it('flashes an error if mutation failed', () => {
|
||||
createComponent({
|
||||
mutationRes: mutationTypes.REJECT,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { GlLoadingIcon, GlSprintf } from '@gitlab/ui';
|
||||
import { GlLink, GlLoadingIcon, GlSprintf } from '@gitlab/ui';
|
||||
import { shallowMount } from '@vue/test-utils';
|
||||
import axios from '~/lib/utils/axios_utils';
|
||||
import MockAdapter from 'axios-mock-adapter';
|
||||
|
@ -80,7 +80,7 @@ describe('MrWidgetTerraformPlan', () => {
|
|||
});
|
||||
|
||||
it('renders button when url is found', () => {
|
||||
expect(wrapper.find('a').text()).toContain('View full log');
|
||||
expect(wrapper.find(GlLink).exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('does not make additional requests after poll is successful', () => {
|
||||
|
@ -101,7 +101,7 @@ describe('MrWidgetTerraformPlan', () => {
|
|||
);
|
||||
|
||||
expect(wrapper.find('.js-terraform-report-link').exists()).toBe(false);
|
||||
expect(wrapper.text()).not.toContain('View full log');
|
||||
expect(wrapper.find(GlLink).exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -685,11 +685,11 @@ describe ProjectsHelper do
|
|||
end
|
||||
end
|
||||
|
||||
describe 'link_to_bfg' do
|
||||
subject { helper.link_to_bfg }
|
||||
describe 'link_to_filter_repo' do
|
||||
subject { helper.link_to_filter_repo }
|
||||
|
||||
it 'generates a hardcoded link to the BFG Repo-Cleaner' do
|
||||
result = helper.link_to_bfg
|
||||
it 'generates a hardcoded link to git filter-repo' do
|
||||
result = helper.link_to_filter_repo
|
||||
doc = Nokogiri::HTML.fragment(result)
|
||||
|
||||
expect(doc.children.size).to eq(1)
|
||||
|
@ -702,8 +702,8 @@ describe ProjectsHelper do
|
|||
expect(link.name).to eq('a')
|
||||
expect(link[:target]).to eq('_blank')
|
||||
expect(link[:rel]).to eq('noopener noreferrer')
|
||||
expect(link[:href]).to eq('https://rtyley.github.io/bfg-repo-cleaner/')
|
||||
expect(link.inner_html).to eq('BFG')
|
||||
expect(link[:href]).to eq('https://github.com/newren/git-filter-repo')
|
||||
expect(link.inner_html).to eq('git filter-repo')
|
||||
|
||||
expect(result).to be_html_safe
|
||||
end
|
||||
|
|
|
@ -7,17 +7,15 @@ describe ExtractsPath do
|
|||
include RepoHelpers
|
||||
include Gitlab::Routing
|
||||
|
||||
let(:project) { double('project') }
|
||||
let_it_be(:owner) { create(:user) }
|
||||
let_it_be(:container) { create(:project, :repository, creator: owner) }
|
||||
let(:request) { double('request') }
|
||||
|
||||
before do
|
||||
@project = project
|
||||
@project = container
|
||||
ref_names = ['master', 'foo/bar/baz', 'v1.0.0', 'v2.0.0', 'release/app', 'release/app/v1.0.0']
|
||||
|
||||
repo = double(ref_names: ['master', 'foo/bar/baz', 'v1.0.0', 'v2.0.0',
|
||||
'release/app', 'release/app/v1.0.0'])
|
||||
allow(project).to receive(:repository).and_return(repo)
|
||||
allow(project).to receive(:full_path)
|
||||
.and_return('gitlab/gitlab-ci')
|
||||
allow(container.repository).to receive(:ref_names).and_return(ref_names)
|
||||
allow(request).to receive(:format=)
|
||||
end
|
||||
|
||||
|
@ -25,47 +23,14 @@ describe ExtractsPath do
|
|||
let(:ref) { sample_commit[:id] }
|
||||
let(:params) { { path: sample_commit[:line_code_path], ref: ref } }
|
||||
|
||||
before do
|
||||
@project = create(:project, :repository)
|
||||
end
|
||||
it_behaves_like 'assigns ref vars'
|
||||
|
||||
it "log tree path has no escape sequences" do
|
||||
it 'log tree path has no escape sequences' do
|
||||
assign_ref_vars
|
||||
|
||||
expect(@logs_path).to eq("/#{@project.full_path}/-/refs/#{ref}/logs_tree/files/ruby/popen.rb")
|
||||
end
|
||||
|
||||
context 'ref contains %20' do
|
||||
let(:ref) { 'foo%20bar' }
|
||||
|
||||
it 'is not converted to a space in @id' do
|
||||
@project.repository.add_branch(@project.owner, 'foo%20bar', 'master')
|
||||
|
||||
assign_ref_vars
|
||||
|
||||
expect(@id).to start_with('foo%20bar/')
|
||||
end
|
||||
end
|
||||
|
||||
context 'ref contains trailing space' do
|
||||
let(:ref) { 'master ' }
|
||||
|
||||
it 'strips surrounding space' do
|
||||
assign_ref_vars
|
||||
|
||||
expect(@ref).to eq('master')
|
||||
end
|
||||
end
|
||||
|
||||
context 'ref contains leading space' do
|
||||
let(:ref) { ' master ' }
|
||||
|
||||
it 'strips surrounding space' do
|
||||
assign_ref_vars
|
||||
|
||||
expect(@ref).to eq('master')
|
||||
end
|
||||
end
|
||||
|
||||
context 'ref contains space in the middle' do
|
||||
let(:ref) { 'master plan ' }
|
||||
|
||||
|
@ -76,28 +41,6 @@ describe ExtractsPath do
|
|||
end
|
||||
end
|
||||
|
||||
context 'path contains space' do
|
||||
let(:params) { { path: 'with space', ref: '38008cb17ce1466d8fec2dfa6f6ab8dcfe5cf49e' } }
|
||||
|
||||
it 'is not converted to %20 in @path' do
|
||||
assign_ref_vars
|
||||
|
||||
expect(@path).to eq(params[:path])
|
||||
end
|
||||
end
|
||||
|
||||
context 'subclass overrides get_id' do
|
||||
it 'uses ref returned by get_id' do
|
||||
allow_next_instance_of(self.class) do |instance|
|
||||
allow(instance).to receive(:get_id) { '38008cb17ce1466d8fec2dfa6f6ab8dcfe5cf49e' }
|
||||
end
|
||||
|
||||
assign_ref_vars
|
||||
|
||||
expect(@id).to eq(get_id)
|
||||
end
|
||||
end
|
||||
|
||||
context 'ref only exists without .atom suffix' do
|
||||
context 'with a path' do
|
||||
let(:params) { { ref: 'v1.0.0.atom', path: 'README.md' } }
|
||||
|
@ -171,58 +114,7 @@ describe ExtractsPath do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#extract_ref' do
|
||||
it "returns an empty pair when no @project is set" do
|
||||
@project = nil
|
||||
expect(extract_ref('master/CHANGELOG')).to eq(['', ''])
|
||||
end
|
||||
|
||||
context "without a path" do
|
||||
it "extracts a valid branch" do
|
||||
expect(extract_ref('master')).to eq(['master', ''])
|
||||
end
|
||||
|
||||
it "extracts a valid tag" do
|
||||
expect(extract_ref('v2.0.0')).to eq(['v2.0.0', ''])
|
||||
end
|
||||
|
||||
it "extracts a valid commit ref without a path" do
|
||||
expect(extract_ref('f4b14494ef6abf3d144c28e4af0c20143383e062')).to eq(
|
||||
['f4b14494ef6abf3d144c28e4af0c20143383e062', '']
|
||||
)
|
||||
end
|
||||
|
||||
it "falls back to a primitive split for an invalid ref" do
|
||||
expect(extract_ref('stable')).to eq(['stable', ''])
|
||||
end
|
||||
|
||||
it "extracts the longest matching ref" do
|
||||
expect(extract_ref('release/app/v1.0.0/README.md')).to eq(
|
||||
['release/app/v1.0.0', 'README.md'])
|
||||
end
|
||||
end
|
||||
|
||||
context "with a path" do
|
||||
it "extracts a valid branch" do
|
||||
expect(extract_ref('foo/bar/baz/CHANGELOG')).to eq(
|
||||
['foo/bar/baz', 'CHANGELOG'])
|
||||
end
|
||||
|
||||
it "extracts a valid tag" do
|
||||
expect(extract_ref('v2.0.0/CHANGELOG')).to eq(['v2.0.0', 'CHANGELOG'])
|
||||
end
|
||||
|
||||
it "extracts a valid commit SHA" do
|
||||
expect(extract_ref('f4b14494ef6abf3d144c28e4af0c20143383e062/CHANGELOG')).to eq(
|
||||
%w(f4b14494ef6abf3d144c28e4af0c20143383e062 CHANGELOG)
|
||||
)
|
||||
end
|
||||
|
||||
it "falls back to a primitive split for an invalid ref" do
|
||||
expect(extract_ref('stable/CHANGELOG')).to eq(%w(stable CHANGELOG))
|
||||
end
|
||||
end
|
||||
end
|
||||
it_behaves_like 'extracts refs'
|
||||
|
||||
describe '#extract_ref_without_atom' do
|
||||
it 'ignores any matching refs suffixed with atom' do
|
||||
|
|
23
spec/lib/extracts_ref_spec.rb
Normal file
23
spec/lib/extracts_ref_spec.rb
Normal file
|
@ -0,0 +1,23 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe ExtractsRef do
|
||||
include described_class
|
||||
include RepoHelpers
|
||||
|
||||
let_it_be(:owner) { create(:user) }
|
||||
let_it_be(:container) { create(:snippet, :repository, author: owner) }
|
||||
let(:ref) { sample_commit[:id] }
|
||||
let(:params) { { path: sample_commit[:line_code_path], ref: ref } }
|
||||
|
||||
before do
|
||||
ref_names = ['master', 'foo/bar/baz', 'v1.0.0', 'v2.0.0', 'release/app', 'release/app/v1.0.0']
|
||||
|
||||
allow(container.repository).to receive(:ref_names).and_return(ref_names)
|
||||
allow_any_instance_of(described_class).to receive(:repository_container).and_return(container)
|
||||
end
|
||||
|
||||
it_behaves_like 'assigns ref vars'
|
||||
it_behaves_like 'extracts refs'
|
||||
end
|
118
spec/support/shared_examples/path_extraction_shared_examples.rb
Normal file
118
spec/support/shared_examples/path_extraction_shared_examples.rb
Normal file
|
@ -0,0 +1,118 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
RSpec.shared_examples 'assigns ref vars' do
|
||||
it 'assigns the repository var' do
|
||||
assign_ref_vars
|
||||
|
||||
expect(@repo).to eq container.repository
|
||||
end
|
||||
|
||||
context 'ref contains %20' do
|
||||
let(:ref) { 'foo%20bar' }
|
||||
|
||||
it 'is not converted to a space in @id' do
|
||||
container.repository.add_branch(owner, 'foo%20bar', 'master')
|
||||
|
||||
assign_ref_vars
|
||||
|
||||
expect(@id).to start_with('foo%20bar/')
|
||||
end
|
||||
end
|
||||
|
||||
context 'ref contains trailing space' do
|
||||
let(:ref) { 'master ' }
|
||||
|
||||
it 'strips surrounding space' do
|
||||
assign_ref_vars
|
||||
|
||||
expect(@ref).to eq('master')
|
||||
end
|
||||
end
|
||||
|
||||
context 'ref contains leading space' do
|
||||
let(:ref) { ' master ' }
|
||||
|
||||
it 'strips surrounding space' do
|
||||
assign_ref_vars
|
||||
|
||||
expect(@ref).to eq('master')
|
||||
end
|
||||
end
|
||||
|
||||
context 'path contains space' do
|
||||
let(:params) { { path: 'with space', ref: '38008cb17ce1466d8fec2dfa6f6ab8dcfe5cf49e' } }
|
||||
|
||||
it 'is not converted to %20 in @path' do
|
||||
assign_ref_vars
|
||||
|
||||
expect(@path).to eq(params[:path])
|
||||
end
|
||||
end
|
||||
|
||||
context 'subclass overrides get_id' do
|
||||
it 'uses ref returned by get_id' do
|
||||
allow_next_instance_of(self.class) do |instance|
|
||||
allow(instance).to receive(:get_id) { '38008cb17ce1466d8fec2dfa6f6ab8dcfe5cf49e' }
|
||||
end
|
||||
|
||||
assign_ref_vars
|
||||
|
||||
expect(@id).to eq(get_id)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
RSpec.shared_examples 'extracts refs' do
|
||||
describe '#extract_ref' do
|
||||
it 'returns an empty pair when no repository_container is set' do
|
||||
allow_any_instance_of(described_class).to receive(:repository_container).and_return(nil)
|
||||
expect(extract_ref('master/CHANGELOG')).to eq(['', ''])
|
||||
end
|
||||
|
||||
context 'without a path' do
|
||||
it 'extracts a valid branch' do
|
||||
expect(extract_ref('master')).to eq(['master', ''])
|
||||
end
|
||||
|
||||
it 'extracts a valid tag' do
|
||||
expect(extract_ref('v2.0.0')).to eq(['v2.0.0', ''])
|
||||
end
|
||||
|
||||
it 'extracts a valid commit ref without a path' do
|
||||
expect(extract_ref('f4b14494ef6abf3d144c28e4af0c20143383e062')).to eq(
|
||||
['f4b14494ef6abf3d144c28e4af0c20143383e062', '']
|
||||
)
|
||||
end
|
||||
|
||||
it 'falls back to a primitive split for an invalid ref' do
|
||||
expect(extract_ref('stable')).to eq(['stable', ''])
|
||||
end
|
||||
|
||||
it 'extracts the longest matching ref' do
|
||||
expect(extract_ref('release/app/v1.0.0/README.md')).to eq(
|
||||
['release/app/v1.0.0', 'README.md'])
|
||||
end
|
||||
end
|
||||
|
||||
context 'with a path' do
|
||||
it 'extracts a valid branch' do
|
||||
expect(extract_ref('foo/bar/baz/CHANGELOG')).to eq(
|
||||
['foo/bar/baz', 'CHANGELOG'])
|
||||
end
|
||||
|
||||
it 'extracts a valid tag' do
|
||||
expect(extract_ref('v2.0.0/CHANGELOG')).to eq(['v2.0.0', 'CHANGELOG'])
|
||||
end
|
||||
|
||||
it 'extracts a valid commit SHA' do
|
||||
expect(extract_ref('f4b14494ef6abf3d144c28e4af0c20143383e062/CHANGELOG')).to eq(
|
||||
%w(f4b14494ef6abf3d144c28e4af0c20143383e062 CHANGELOG)
|
||||
)
|
||||
end
|
||||
|
||||
it 'falls back to a primitive split for an invalid ref' do
|
||||
expect(extract_ref('stable/CHANGELOG')).to eq(%w(stable CHANGELOG))
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
Loading…
Reference in a new issue