Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
6c346448fd
commit
b9664970c3
93 changed files with 2082 additions and 2083 deletions
|
@ -5512,7 +5512,6 @@ Layout/LineLength:
|
|||
- 'spec/presenters/alert_management/alert_presenter_spec.rb'
|
||||
- 'spec/presenters/blob_presenter_spec.rb'
|
||||
- 'spec/presenters/blobs/notebook_presenter_spec.rb'
|
||||
- 'spec/presenters/ci/legacy_stage_presenter_spec.rb'
|
||||
- 'spec/presenters/ci/pipeline_artifacts/code_quality_mr_diff_presenter_spec.rb'
|
||||
- 'spec/presenters/ci/pipeline_presenter_spec.rb'
|
||||
- 'spec/presenters/clusters/cluster_presenter_spec.rb'
|
||||
|
|
|
@ -554,7 +554,6 @@ Rails/SkipsModelValidations:
|
|||
- 'spec/models/ci/build_dependencies_spec.rb'
|
||||
- 'spec/models/ci/build_spec.rb'
|
||||
- 'spec/models/ci/group_spec.rb'
|
||||
- 'spec/models/ci/legacy_stage_spec.rb'
|
||||
- 'spec/models/ci/pipeline_schedule_spec.rb'
|
||||
- 'spec/models/ci/pipeline_spec.rb'
|
||||
- 'spec/models/ci/processable_spec.rb'
|
||||
|
|
|
@ -2620,7 +2620,6 @@ RSpec/ContextWording:
|
|||
- 'spec/models/ci/deleted_object_spec.rb'
|
||||
- 'spec/models/ci/job_artifact_spec.rb'
|
||||
- 'spec/models/ci/job_token/project_scope_link_spec.rb'
|
||||
- 'spec/models/ci/legacy_stage_spec.rb'
|
||||
- 'spec/models/ci/namespace_mirror_spec.rb'
|
||||
- 'spec/models/ci/pending_build_spec.rb'
|
||||
- 'spec/models/ci/pipeline_artifact_spec.rb'
|
||||
|
|
|
@ -153,7 +153,6 @@ Style/IfUnlessModifier:
|
|||
- 'app/models/ci/build.rb'
|
||||
- 'app/models/ci/build_trace_chunk.rb'
|
||||
- 'app/models/ci/job_artifact.rb'
|
||||
- 'app/models/ci/legacy_stage.rb'
|
||||
- 'app/models/ci/pipeline.rb'
|
||||
- 'app/models/ci/runner.rb'
|
||||
- 'app/models/ci/running_build.rb'
|
||||
|
|
|
@ -79,7 +79,6 @@ Style/PercentLiteralDelimiters:
|
|||
- 'app/models/bulk_imports/file_transfer/project_config.rb'
|
||||
- 'app/models/ci/build.rb'
|
||||
- 'app/models/ci/build_runner_session.rb'
|
||||
- 'app/models/ci/legacy_stage.rb'
|
||||
- 'app/models/ci/pipeline.rb'
|
||||
- 'app/models/clusters/applications/cert_manager.rb'
|
||||
- 'app/models/clusters/platforms/kubernetes.rb'
|
||||
|
|
|
@ -1 +1 @@
|
|||
a46121713a40b8c30794009eb4c40864a089e5a6
|
||||
43cb85d43809733551d9ad682987d89a2f4afb36
|
||||
|
|
|
@ -15,7 +15,7 @@ PATH
|
|||
specs:
|
||||
ipynbdiff (0.4.7)
|
||||
diffy (~> 3.3)
|
||||
json (~> 2.5, >= 2.5.1)
|
||||
oj (~> 3.13.16)
|
||||
|
||||
PATH
|
||||
remote: vendor/gems/mail-smtp_pool
|
||||
|
|
|
@ -3,7 +3,6 @@ import {
|
|||
GlAlert,
|
||||
GlButton,
|
||||
GlIcon,
|
||||
GlLink,
|
||||
GlLoadingIcon,
|
||||
GlModal,
|
||||
GlModalDirective,
|
||||
|
@ -14,7 +13,6 @@ import {
|
|||
} from '@gitlab/ui';
|
||||
import * as Sentry from '@sentry/browser';
|
||||
import Api, { DEFAULT_PER_PAGE } from '~/api';
|
||||
import { helpPagePath } from '~/helpers/help_page_helper';
|
||||
import httpStatusCodes from '~/lib/utils/http_status';
|
||||
import { __, s__, sprintf } from '~/locale';
|
||||
import Tracking from '~/tracking';
|
||||
|
@ -25,7 +23,6 @@ export default {
|
|||
GlAlert,
|
||||
GlButton,
|
||||
GlIcon,
|
||||
GlLink,
|
||||
GlLoadingIcon,
|
||||
GlModal,
|
||||
GlPagination,
|
||||
|
@ -39,21 +36,16 @@ export default {
|
|||
},
|
||||
mixins: [Tracking.mixin()],
|
||||
inject: ['projectId', 'admin', 'fileSizeLimit'],
|
||||
docsLink: helpPagePath('ci/secure_files/index'),
|
||||
DEFAULT_PER_PAGE,
|
||||
i18n: {
|
||||
deleteLabel: __('Delete File'),
|
||||
uploadLabel: __('Upload File'),
|
||||
uploadingLabel: __('Uploading...'),
|
||||
noFilesMessage: __('There are no secure files yet.'),
|
||||
pagination: {
|
||||
next: __('Next'),
|
||||
prev: __('Prev'),
|
||||
},
|
||||
title: __('Secure Files'),
|
||||
overviewMessage: __(
|
||||
'Use Secure Files to store files used by your pipelines such as Android keystores, or Apple provisioning profiles and signing certificates.',
|
||||
),
|
||||
moreInformation: __('More information'),
|
||||
uploadErrorMessages: {
|
||||
duplicate: __('A file with this name already exists.'),
|
||||
tooLarge: __('File too large. Secure Files must be less than %{limit} MB.'),
|
||||
|
@ -81,12 +73,12 @@ export default {
|
|||
fields: [
|
||||
{
|
||||
key: 'name',
|
||||
label: __('Filename'),
|
||||
label: __('File name'),
|
||||
tdClass: 'gl-vertical-align-middle!',
|
||||
},
|
||||
{
|
||||
key: 'created_at',
|
||||
label: __('Uploaded'),
|
||||
label: __('Uploaded date'),
|
||||
tdClass: 'gl-vertical-align-middle!',
|
||||
},
|
||||
{
|
||||
|
@ -163,7 +155,7 @@ export default {
|
|||
}
|
||||
return message;
|
||||
},
|
||||
loadFileSelctor() {
|
||||
loadFileSelector() {
|
||||
this.$refs.fileUpload.click();
|
||||
},
|
||||
setDeleteModalData(secureFile) {
|
||||
|
@ -183,91 +175,74 @@ export default {
|
|||
|
||||
<template>
|
||||
<div>
|
||||
<gl-alert v-if="error" variant="danger" class="gl-mt-6" @dismiss="error = null">
|
||||
{{ errorMessage }}
|
||||
</gl-alert>
|
||||
<div class="row">
|
||||
<div class="col-md-12 col-lg-6 gl-display-flex">
|
||||
<div class="gl-flex-direction-column gl-flex-wrap">
|
||||
<h1 class="gl-font-size-h1 gl-mt-3 gl-mb-0">
|
||||
{{ $options.i18n.title }}
|
||||
</h1>
|
||||
</div>
|
||||
</div>
|
||||
<div class="ci-secure-files-table">
|
||||
<gl-alert v-if="error" variant="danger" class="gl-mt-6" @dismiss="error = null">
|
||||
{{ errorMessage }}
|
||||
</gl-alert>
|
||||
|
||||
<div class="col-md-12 col-lg-6">
|
||||
<div class="gl-display-flex gl-flex-wrap gl-justify-content-end">
|
||||
<gl-button v-if="admin" class="gl-mt-3" variant="confirm" @click="loadFileSelctor">
|
||||
<span v-if="uploading">
|
||||
<gl-loading-icon size="sm" class="gl-my-5" inline />
|
||||
{{ $options.i18n.uploadingLabel }}
|
||||
</span>
|
||||
<span v-else>
|
||||
<gl-icon name="upload" class="gl-mr-2" /> {{ $options.i18n.uploadLabel }}
|
||||
</span>
|
||||
</gl-button>
|
||||
<input
|
||||
id="file-upload"
|
||||
ref="fileUpload"
|
||||
type="file"
|
||||
class="hidden"
|
||||
data-qa-selector="file_upload_field"
|
||||
@change="uploadSecureFile"
|
||||
<gl-table
|
||||
:busy="loading"
|
||||
:fields="fields"
|
||||
:items="projectSecureFiles"
|
||||
tbody-tr-class="js-ci-secure-files-row"
|
||||
data-qa-selector="ci_secure_files_table_content"
|
||||
sort-by="key"
|
||||
sort-direction="asc"
|
||||
stacked="lg"
|
||||
table-class="text-secondary"
|
||||
show-empty
|
||||
sort-icon-left
|
||||
no-sort-reset
|
||||
:empty-text="$options.i18n.noFilesMessage"
|
||||
>
|
||||
<template #table-busy>
|
||||
<gl-loading-icon size="lg" class="gl-my-5" />
|
||||
</template>
|
||||
|
||||
<template #cell(name)="{ item }">
|
||||
{{ item.name }}
|
||||
</template>
|
||||
|
||||
<template #cell(created_at)="{ item }">
|
||||
<timeago-tooltip :time="item.created_at" />
|
||||
</template>
|
||||
|
||||
<template #cell(actions)="{ item }">
|
||||
<gl-button
|
||||
v-if="admin"
|
||||
v-gl-modal="$options.deleteModalId"
|
||||
v-gl-tooltip.hover.top="$options.i18n.deleteLabel"
|
||||
category="secondary"
|
||||
variant="danger"
|
||||
icon="remove"
|
||||
:aria-label="$options.i18n.deleteLabel"
|
||||
data-testid="delete-button"
|
||||
@click="setDeleteModalData(item)"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
</gl-table>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<div class="col-md-12 col-lg-12 gl-my-4">
|
||||
<span data-testid="info-message">
|
||||
{{ $options.i18n.overviewMessage }}
|
||||
<gl-link :href="$options.docsLink" target="_blank">{{
|
||||
$options.i18n.moreInformation
|
||||
}}</gl-link>
|
||||
<div class="gl-display-flex gl-mt-5">
|
||||
<gl-button v-if="admin" variant="confirm" @click="loadFileSelector">
|
||||
<span v-if="uploading">
|
||||
<gl-loading-icon class="gl-my-5" inline />
|
||||
{{ $options.i18n.uploadingLabel }}
|
||||
</span>
|
||||
</div>
|
||||
<span v-else>
|
||||
<gl-icon name="upload" class="gl-mr-2" /> {{ $options.i18n.uploadLabel }}
|
||||
</span>
|
||||
</gl-button>
|
||||
<input
|
||||
id="file-upload"
|
||||
ref="fileUpload"
|
||||
type="file"
|
||||
class="hidden"
|
||||
data-qa-selector="file_upload_field"
|
||||
@change="uploadSecureFile"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<gl-table
|
||||
:busy="loading"
|
||||
:fields="fields"
|
||||
:items="projectSecureFiles"
|
||||
tbody-tr-class="js-ci-secure-files-row"
|
||||
data-qa-selector="ci_secure_files_table_content"
|
||||
sort-by="key"
|
||||
sort-direction="asc"
|
||||
stacked="lg"
|
||||
table-class="text-secondary"
|
||||
show-empty
|
||||
sort-icon-left
|
||||
no-sort-reset
|
||||
>
|
||||
<template #table-busy>
|
||||
<gl-loading-icon size="lg" class="gl-my-5" />
|
||||
</template>
|
||||
|
||||
<template #cell(name)="{ item }">
|
||||
{{ item.name }}
|
||||
</template>
|
||||
|
||||
<template #cell(created_at)="{ item }">
|
||||
<timeago-tooltip :time="item.created_at" />
|
||||
</template>
|
||||
|
||||
<template #cell(actions)="{ item }">
|
||||
<gl-button
|
||||
v-if="admin"
|
||||
v-gl-modal="$options.deleteModalId"
|
||||
v-gl-tooltip.hover.top="$options.i18n.deleteLabel"
|
||||
variant="danger"
|
||||
icon="remove"
|
||||
:aria-label="$options.i18n.deleteLabel"
|
||||
@click="setDeleteModalData(item)"
|
||||
/>
|
||||
</template>
|
||||
</gl-table>
|
||||
|
||||
<gl-pagination
|
||||
v-if="!loading"
|
||||
v-model="page"
|
||||
|
|
|
@ -68,6 +68,11 @@ export default {
|
|||
default: null,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
searchResults: [],
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
...mapState([
|
||||
'isLoading',
|
||||
|
@ -184,6 +189,9 @@ export default {
|
|||
|
||||
this.throttled();
|
||||
},
|
||||
setSearchResults(searchResults) {
|
||||
this.searchResults = searchResults;
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
@ -279,10 +287,12 @@ export default {
|
|||
:is-scroll-top-disabled="isScrollTopDisabled"
|
||||
:is-job-log-size-visible="isJobLogSizeVisible"
|
||||
:is-scrolling-down="isScrollingDown"
|
||||
:job-log="jobLog"
|
||||
@scrollJobLogTop="scrollTop"
|
||||
@scrollJobLogBottom="scrollBottom"
|
||||
@searchResults="setSearchResults"
|
||||
/>
|
||||
<log :job-log="jobLog" :is-complete="isJobLogComplete" />
|
||||
<log :job-log="jobLog" :is-complete="isJobLogComplete" :search-results="searchResults" />
|
||||
</div>
|
||||
<!-- EO job log -->
|
||||
|
||||
|
|
|
@ -1,21 +1,34 @@
|
|||
<script>
|
||||
import { GlTooltipDirective, GlLink, GlButton } from '@gitlab/ui';
|
||||
import { GlTooltipDirective, GlLink, GlButton, GlSearchBoxByClick } from '@gitlab/ui';
|
||||
import { scrollToElement } from '~/lib/utils/common_utils';
|
||||
import { numberToHumanSize } from '~/lib/utils/number_utils';
|
||||
import { __, s__, sprintf } from '~/locale';
|
||||
import HelpPopover from '~/vue_shared/components/help_popover.vue';
|
||||
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
|
||||
|
||||
export default {
|
||||
i18n: {
|
||||
scrollToBottomButtonLabel: s__('Job|Scroll to bottom'),
|
||||
scrollToTopButtonLabel: s__('Job|Scroll to top'),
|
||||
showRawButtonLabel: s__('Job|Show complete raw'),
|
||||
searchPlaceholder: s__('Job|Search job log'),
|
||||
noResults: s__('Job|No search results found'),
|
||||
searchPopoverTitle: s__('Job|Job log search'),
|
||||
searchPopoverDescription: s__(
|
||||
'Job|Search for substrings in your job log output. Currently search is only supported for the visible job log output, not for any log output that is truncated due to size.',
|
||||
),
|
||||
logLineNumberNotFound: s__('Job|We could not find this element'),
|
||||
},
|
||||
components: {
|
||||
GlLink,
|
||||
GlButton,
|
||||
GlSearchBoxByClick,
|
||||
HelpPopover,
|
||||
},
|
||||
directives: {
|
||||
GlTooltip: GlTooltipDirective,
|
||||
},
|
||||
mixins: [glFeatureFlagMixin()],
|
||||
props: {
|
||||
size: {
|
||||
type: Number,
|
||||
|
@ -42,6 +55,16 @@ export default {
|
|||
type: Boolean,
|
||||
required: true,
|
||||
},
|
||||
jobLog: {
|
||||
type: Array,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
searchTerm: '',
|
||||
searchResults: [],
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
jobLogSize() {
|
||||
|
@ -49,6 +72,9 @@ export default {
|
|||
size: numberToHumanSize(this.size),
|
||||
});
|
||||
},
|
||||
showJobLogSearch() {
|
||||
return this.glFeatures.jobLogSearch;
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
handleScrollToTop() {
|
||||
|
@ -57,6 +83,54 @@ export default {
|
|||
handleScrollToBottom() {
|
||||
this.$emit('scrollJobLogBottom');
|
||||
},
|
||||
searchJobLog() {
|
||||
this.searchResults = [];
|
||||
|
||||
if (!this.searchTerm) return;
|
||||
|
||||
const compactedLog = [];
|
||||
|
||||
this.jobLog.forEach((obj) => {
|
||||
if (obj.lines && obj.lines.length > 0) {
|
||||
compactedLog.push(...obj.lines);
|
||||
}
|
||||
|
||||
if (!obj.lines && obj.content.length > 0) {
|
||||
compactedLog.push(obj);
|
||||
}
|
||||
});
|
||||
|
||||
compactedLog.forEach((line) => {
|
||||
const lineText = line.content[0].text;
|
||||
|
||||
if (lineText.toLocaleLowerCase().includes(this.searchTerm.toLocaleLowerCase())) {
|
||||
this.searchResults.push(line);
|
||||
}
|
||||
});
|
||||
|
||||
if (this.searchResults.length > 0) {
|
||||
this.$emit('searchResults', this.searchResults);
|
||||
|
||||
// BE returns zero based index, we need to add one to match the line numbers in the DOM
|
||||
const firstSearchResult = `#L${this.searchResults[0].lineNumber + 1}`;
|
||||
const logLine = document.querySelector(`.js-line ${firstSearchResult}`);
|
||||
|
||||
if (logLine) {
|
||||
setTimeout(() => scrollToElement(logLine));
|
||||
|
||||
const message = sprintf(s__('Job|%{searchLength} results found for %{searchTerm}'), {
|
||||
searchLength: this.searchResults.length,
|
||||
searchTerm: this.searchTerm,
|
||||
});
|
||||
|
||||
this.$toast.show(message);
|
||||
} else {
|
||||
this.$toast.show(this.$options.i18n.logLineNumberNotFound);
|
||||
}
|
||||
} else {
|
||||
this.$toast.show(this.$options.i18n.noResults);
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
@ -81,6 +155,25 @@ export default {
|
|||
<!-- eo truncate information -->
|
||||
|
||||
<div class="controllers gl-float-right">
|
||||
<template v-if="showJobLogSearch">
|
||||
<gl-search-box-by-click
|
||||
v-model="searchTerm"
|
||||
class="gl-mr-3"
|
||||
:placeholder="$options.i18n.searchPlaceholder"
|
||||
data-testid="job-log-search-box"
|
||||
@clear="$emit('searchResults', [])"
|
||||
@submit="searchJobLog"
|
||||
/>
|
||||
|
||||
<help-popover class="gl-mr-3">
|
||||
<template #title>{{ $options.i18n.searchPopoverTitle }}</template>
|
||||
|
||||
<p class="gl-mb-0">
|
||||
{{ $options.i18n.searchPopoverDescription }}
|
||||
</p>
|
||||
</help-popover>
|
||||
</template>
|
||||
|
||||
<!-- links -->
|
||||
<gl-button
|
||||
v-if="rawPath"
|
||||
|
|
|
@ -17,6 +17,11 @@ export default {
|
|||
type: String,
|
||||
required: true,
|
||||
},
|
||||
searchResults: {
|
||||
type: Array,
|
||||
required: false,
|
||||
default: () => [],
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
badgeDuration() {
|
||||
|
@ -45,6 +50,7 @@ export default {
|
|||
:key="line.offset"
|
||||
:line="line"
|
||||
:path="jobLogEndpoint"
|
||||
:search-results="searchResults"
|
||||
/>
|
||||
</template>
|
||||
</div>
|
||||
|
|
|
@ -14,9 +14,14 @@ export default {
|
|||
type: String,
|
||||
required: true,
|
||||
},
|
||||
searchResults: {
|
||||
type: Array,
|
||||
required: false,
|
||||
default: () => [],
|
||||
},
|
||||
},
|
||||
render(h, { props }) {
|
||||
const { line, path } = props;
|
||||
const { line, path, searchResults } = props;
|
||||
|
||||
const chars = line.content.map((content) => {
|
||||
return h(
|
||||
|
@ -46,15 +51,33 @@ export default {
|
|||
);
|
||||
});
|
||||
|
||||
return h('div', { class: 'js-line log-line' }, [
|
||||
h(LineNumber, {
|
||||
props: {
|
||||
lineNumber: line.lineNumber,
|
||||
path,
|
||||
},
|
||||
}),
|
||||
...chars,
|
||||
]);
|
||||
let applyHighlight = false;
|
||||
|
||||
if (searchResults.length > 0) {
|
||||
const linesToHighlight = searchResults.map((searchResultLine) => searchResultLine.lineNumber);
|
||||
|
||||
linesToHighlight.forEach((num) => {
|
||||
if (num === line.lineNumber) {
|
||||
applyHighlight = true;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return h(
|
||||
'div',
|
||||
{
|
||||
class: ['js-line', 'log-line', applyHighlight ? 'gl-bg-gray-500' : ''],
|
||||
},
|
||||
[
|
||||
h(LineNumber, {
|
||||
props: {
|
||||
lineNumber: line.lineNumber,
|
||||
path,
|
||||
},
|
||||
}),
|
||||
...chars,
|
||||
],
|
||||
);
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
|
|
@ -8,6 +8,13 @@ export default {
|
|||
CollapsibleLogSection,
|
||||
LogLine,
|
||||
},
|
||||
props: {
|
||||
searchResults: {
|
||||
type: Array,
|
||||
required: false,
|
||||
default: () => [],
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
...mapState([
|
||||
'jobLogEndpoint',
|
||||
|
@ -56,9 +63,16 @@ export default {
|
|||
:key="`collapsible-${index}`"
|
||||
:section="section"
|
||||
:job-log-endpoint="jobLogEndpoint"
|
||||
:search-results="searchResults"
|
||||
@onClickCollapsibleLine="handleOnClickCollapsibleLine"
|
||||
/>
|
||||
<log-line v-else :key="section.offset" :line="section" :path="jobLogEndpoint" />
|
||||
<log-line
|
||||
v-else
|
||||
:key="section.offset"
|
||||
:line="section"
|
||||
:path="jobLogEndpoint"
|
||||
:search-results="searchResults"
|
||||
/>
|
||||
</template>
|
||||
|
||||
<div v-if="!isJobLogComplete" class="js-log-animation loader-animation pt-3 pl-3">
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
import { GlToast } from '@gitlab/ui';
|
||||
import Vue from 'vue';
|
||||
import JobApp from './components/job_app.vue';
|
||||
import createStore from './store';
|
||||
|
||||
Vue.use(GlToast);
|
||||
|
||||
const initializeJobPage = (element) => {
|
||||
const store = createStore();
|
||||
|
||||
|
|
|
@ -9,6 +9,7 @@ import { initInstallRunner } from '~/pages/shared/mount_runner_instructions';
|
|||
import initSharedRunnersToggle from '~/projects/settings/mount_shared_runners_toggle';
|
||||
import initSettingsPanels from '~/settings_panels';
|
||||
import { initTokenAccess } from '~/token_access';
|
||||
import { initCiSecureFiles } from '~/ci_secure_files';
|
||||
|
||||
// Initialize expandable settings panels
|
||||
initSettingsPanels();
|
||||
|
@ -41,3 +42,4 @@ initSharedRunnersToggle();
|
|||
initInstallRunner();
|
||||
initRunnerAwsDeployments();
|
||||
initTokenAccess();
|
||||
initCiSecureFiles();
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
<script>
|
||||
import { GlBadge, GlLink } from '@gitlab/ui';
|
||||
import { GlLink } from '@gitlab/ui';
|
||||
import { createAlert } from '~/flash';
|
||||
import { updateHistory } from '~/lib/utils/url_utility';
|
||||
import { formatNumber } from '~/locale';
|
||||
import { fetchPolicies } from '~/lib/graphql';
|
||||
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
|
||||
|
||||
|
@ -20,18 +19,8 @@ import RunnerActionsCell from '../components/cells/runner_actions_cell.vue';
|
|||
import { pausedTokenConfig } from '../components/search_tokens/paused_token_config';
|
||||
import { statusTokenConfig } from '../components/search_tokens/status_token_config';
|
||||
import { tagTokenConfig } from '../components/search_tokens/tag_token_config';
|
||||
import {
|
||||
ADMIN_FILTERED_SEARCH_NAMESPACE,
|
||||
INSTANCE_TYPE,
|
||||
GROUP_TYPE,
|
||||
PROJECT_TYPE,
|
||||
STATUS_ONLINE,
|
||||
STATUS_OFFLINE,
|
||||
STATUS_STALE,
|
||||
I18N_FETCH_ERROR,
|
||||
} from '../constants';
|
||||
import { ADMIN_FILTERED_SEARCH_NAMESPACE, INSTANCE_TYPE, I18N_FETCH_ERROR } from '../constants';
|
||||
import runnersAdminQuery from '../graphql/list/admin_runners.query.graphql';
|
||||
import runnersAdminCountQuery from '../graphql/list/admin_runners_count.query.graphql';
|
||||
import {
|
||||
fromUrlQueryToSearch,
|
||||
fromSearchToUrl,
|
||||
|
@ -40,54 +29,9 @@ import {
|
|||
} from '../runner_search_utils';
|
||||
import { captureException } from '../sentry_utils';
|
||||
|
||||
const countSmartQuery = () => ({
|
||||
query: runnersAdminCountQuery,
|
||||
fetchPolicy: fetchPolicies.NETWORK_ONLY,
|
||||
update(data) {
|
||||
return data?.runners?.count;
|
||||
},
|
||||
error(error) {
|
||||
this.reportToSentry(error);
|
||||
},
|
||||
});
|
||||
|
||||
const tabCountSmartQuery = ({ type }) => {
|
||||
return {
|
||||
...countSmartQuery(),
|
||||
variables() {
|
||||
return {
|
||||
...this.countVariables,
|
||||
type,
|
||||
};
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
const statusCountSmartQuery = ({ status, name }) => {
|
||||
return {
|
||||
...countSmartQuery(),
|
||||
skip() {
|
||||
// skip if filtering by status and not using _this_ status as filter
|
||||
if (this.countVariables.status && this.countVariables.status !== status) {
|
||||
// reset count for given status
|
||||
this[name] = null;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
variables() {
|
||||
return {
|
||||
...this.countVariables,
|
||||
status,
|
||||
};
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export default {
|
||||
name: 'AdminRunnersApp',
|
||||
components: {
|
||||
GlBadge,
|
||||
GlLink,
|
||||
RegistrationDropdown,
|
||||
RunnerFilteredSearchBar,
|
||||
|
@ -137,31 +81,6 @@ export default {
|
|||
this.reportToSentry(error);
|
||||
},
|
||||
},
|
||||
|
||||
// Tabs counts
|
||||
allRunnersCount: {
|
||||
...tabCountSmartQuery({ type: null }),
|
||||
},
|
||||
instanceRunnersCount: {
|
||||
...tabCountSmartQuery({ type: INSTANCE_TYPE }),
|
||||
},
|
||||
groupRunnersCount: {
|
||||
...tabCountSmartQuery({ type: GROUP_TYPE }),
|
||||
},
|
||||
projectRunnersCount: {
|
||||
...tabCountSmartQuery({ type: PROJECT_TYPE }),
|
||||
},
|
||||
|
||||
// Runner stats
|
||||
onlineRunnersTotal: {
|
||||
...statusCountSmartQuery({ status: STATUS_ONLINE, name: 'onlineRunnersTotal' }),
|
||||
},
|
||||
offlineRunnersTotal: {
|
||||
...statusCountSmartQuery({ status: STATUS_OFFLINE, name: 'offlineRunnersTotal' }),
|
||||
},
|
||||
staleRunnersTotal: {
|
||||
...statusCountSmartQuery({ status: STATUS_STALE, name: 'staleRunnersTotal' }),
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
variables() {
|
||||
|
@ -214,39 +133,10 @@ export default {
|
|||
this.reportToSentry(error);
|
||||
},
|
||||
methods: {
|
||||
tabCount({ runnerType }) {
|
||||
let count;
|
||||
switch (runnerType) {
|
||||
case null:
|
||||
count = this.allRunnersCount;
|
||||
break;
|
||||
case INSTANCE_TYPE:
|
||||
count = this.instanceRunnersCount;
|
||||
break;
|
||||
case GROUP_TYPE:
|
||||
count = this.groupRunnersCount;
|
||||
break;
|
||||
case PROJECT_TYPE:
|
||||
count = this.projectRunnersCount;
|
||||
break;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
if (typeof count === 'number') {
|
||||
return formatNumber(count);
|
||||
}
|
||||
return '';
|
||||
},
|
||||
refetchFilteredCounts() {
|
||||
this.$apollo.queries.allRunnersCount.refetch();
|
||||
this.$apollo.queries.instanceRunnersCount.refetch();
|
||||
this.$apollo.queries.groupRunnersCount.refetch();
|
||||
this.$apollo.queries.projectRunnersCount.refetch();
|
||||
},
|
||||
onToggledPaused() {
|
||||
// When a runner is Paused, the tab count can
|
||||
// When a runner becomes Paused, the tab count can
|
||||
// become stale, refetch outdated counts.
|
||||
this.refetchFilteredCounts();
|
||||
this.$refs['runner-type-tabs'].refetch();
|
||||
},
|
||||
onDeleted({ message }) {
|
||||
this.$root.$toast?.show(message);
|
||||
|
@ -271,18 +161,14 @@ export default {
|
|||
class="gl-display-flex gl-align-items-center gl-flex-direction-column-reverse gl-md-flex-direction-row gl-mt-3 gl-md-mt-0"
|
||||
>
|
||||
<runner-type-tabs
|
||||
ref="runner-type-tabs"
|
||||
v-model="search"
|
||||
:count-scope="$options.INSTANCE_TYPE"
|
||||
:count-variables="countVariables"
|
||||
class="gl-w-full"
|
||||
content-class="gl-display-none"
|
||||
nav-class="gl-border-none!"
|
||||
>
|
||||
<template #title="{ tab }">
|
||||
{{ tab.title }}
|
||||
<gl-badge v-if="tabCount(tab)" class="gl-ml-1" size="sm">
|
||||
{{ tabCount(tab) }}
|
||||
</gl-badge>
|
||||
</template>
|
||||
</runner-type-tabs>
|
||||
/>
|
||||
|
||||
<registration-dropdown
|
||||
class="gl-w-full gl-sm-w-auto gl-mr-auto"
|
||||
|
@ -298,11 +184,7 @@ export default {
|
|||
:namespace="$options.filteredSearchNamespace"
|
||||
/>
|
||||
|
||||
<runner-stats
|
||||
:online-runners-count="onlineRunnersTotal"
|
||||
:offline-runners-count="offlineRunnersTotal"
|
||||
:stale-runners-count="staleRunnersTotal"
|
||||
/>
|
||||
<runner-stats :scope="$options.INSTANCE_TYPE" :variables="countVariables" />
|
||||
|
||||
<runner-list-empty-state
|
||||
v-if="noRunnersFound"
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
<script>
|
||||
import { GlTabs, GlTab } from '@gitlab/ui';
|
||||
import { GlBadge, GlTabs, GlTab } from '@gitlab/ui';
|
||||
import { searchValidator } from '~/runner/runner_search_utils';
|
||||
import { formatNumber } from '~/locale';
|
||||
import {
|
||||
INSTANCE_TYPE,
|
||||
GROUP_TYPE,
|
||||
|
@ -10,6 +11,7 @@ import {
|
|||
I18N_GROUP_TYPE,
|
||||
I18N_PROJECT_TYPE,
|
||||
} from '../constants';
|
||||
import RunnerCount from './stat/runner_count.vue';
|
||||
|
||||
const I18N_TAB_TITLES = {
|
||||
[INSTANCE_TYPE]: I18N_INSTANCE_TYPE,
|
||||
|
@ -17,10 +19,14 @@ const I18N_TAB_TITLES = {
|
|||
[PROJECT_TYPE]: I18N_PROJECT_TYPE,
|
||||
};
|
||||
|
||||
const TAB_COUNT_REF = 'tab-count';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
GlBadge,
|
||||
GlTabs,
|
||||
GlTab,
|
||||
RunnerCount,
|
||||
},
|
||||
props: {
|
||||
runnerTypes: {
|
||||
|
@ -33,6 +39,14 @@ export default {
|
|||
required: true,
|
||||
validator: searchValidator,
|
||||
},
|
||||
countScope: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
countVariables: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
tabs() {
|
||||
|
@ -62,7 +76,25 @@ export default {
|
|||
isTabActive({ runnerType }) {
|
||||
return runnerType === this.value.runnerType;
|
||||
},
|
||||
tabBadgeCountVariables(runnerType) {
|
||||
return { ...this.countVariables, type: runnerType };
|
||||
},
|
||||
tabCount(count) {
|
||||
if (typeof count === 'number') {
|
||||
return formatNumber(count);
|
||||
}
|
||||
return '';
|
||||
},
|
||||
|
||||
// Component API
|
||||
refetch() {
|
||||
// Refresh all of the counts here, can be called by parent component
|
||||
this.$refs[TAB_COUNT_REF].forEach((countComponent) => {
|
||||
countComponent.refetch();
|
||||
});
|
||||
},
|
||||
},
|
||||
TAB_COUNT_REF,
|
||||
};
|
||||
</script>
|
||||
<template>
|
||||
|
@ -74,7 +106,17 @@ export default {
|
|||
@click="onTabSelected(tab)"
|
||||
>
|
||||
<template #title>
|
||||
<slot name="title" :tab="tab">{{ tab.title }}</slot>
|
||||
{{ tab.title }}
|
||||
<runner-count
|
||||
#default="{ count }"
|
||||
:ref="$options.TAB_COUNT_REF"
|
||||
:scope="countScope"
|
||||
:variables="tabBadgeCountVariables(tab.runnerType)"
|
||||
>
|
||||
<gl-badge v-if="tabCount(count)" class="gl-ml-1" size="sm">
|
||||
{{ tabCount(count) }}
|
||||
</gl-badge>
|
||||
</runner-count>
|
||||
</template>
|
||||
</gl-tab>
|
||||
</gl-tabs>
|
||||
|
|
103
app/assets/javascripts/runner/components/stat/runner_count.vue
Normal file
103
app/assets/javascripts/runner/components/stat/runner_count.vue
Normal file
|
@ -0,0 +1,103 @@
|
|||
<script>
|
||||
import { fetchPolicies } from '~/lib/graphql';
|
||||
import { captureException } from '../../sentry_utils';
|
||||
import runnersAdminCountQuery from '../../graphql/list/admin_runners_count.query.graphql';
|
||||
import groupRunnersCountQuery from '../../graphql/list/group_runners_count.query.graphql';
|
||||
import { INSTANCE_TYPE, GROUP_TYPE } from '../../constants';
|
||||
|
||||
/**
|
||||
* Renderless component that wraps a "count" query for the
|
||||
* number of runners that follow a filter criteria.
|
||||
*
|
||||
* Example usage:
|
||||
*
|
||||
* Render the count of "online" runners in the instance in a
|
||||
* <strong/> tag.
|
||||
*
|
||||
* ```vue
|
||||
* <runner-count-stat
|
||||
* #default="{ count }"
|
||||
* :scope="INSTANCE_TYPE"
|
||||
* :variables="{ status: 'ONLINE' }"
|
||||
* >
|
||||
* <strong>{{ count }}</strong>
|
||||
* </runner-count-stat>
|
||||
* ```
|
||||
*
|
||||
* Use `:skip="true"` to prevent data from being fetched and
|
||||
* even rendered.
|
||||
*/
|
||||
export default {
|
||||
name: 'RunnerCount',
|
||||
props: {
|
||||
scope: {
|
||||
type: String,
|
||||
required: true,
|
||||
validator: (val) => [INSTANCE_TYPE, GROUP_TYPE].includes(val),
|
||||
},
|
||||
variables: {
|
||||
type: Object,
|
||||
required: false,
|
||||
default: () => {},
|
||||
},
|
||||
skip: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return { count: null };
|
||||
},
|
||||
apollo: {
|
||||
count: {
|
||||
query() {
|
||||
if (this.scope === INSTANCE_TYPE) {
|
||||
return runnersAdminCountQuery;
|
||||
} else if (this.scope === GROUP_TYPE) {
|
||||
return groupRunnersCountQuery;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
fetchPolicy: fetchPolicies.NETWORK_ONLY,
|
||||
variables() {
|
||||
return this.variables;
|
||||
},
|
||||
skip() {
|
||||
if (this.skip) {
|
||||
// Don't show data for skipped stats
|
||||
this.count = null;
|
||||
}
|
||||
return this.skip;
|
||||
},
|
||||
update(data) {
|
||||
if (this.scope === INSTANCE_TYPE) {
|
||||
return data?.runners?.count;
|
||||
} else if (this.scope === GROUP_TYPE) {
|
||||
return data?.group?.runners?.count;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
error(error) {
|
||||
this.reportToSentry(error);
|
||||
},
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
reportToSentry(error) {
|
||||
captureException({ error, component: this.$options.name });
|
||||
},
|
||||
|
||||
// Component API
|
||||
refetch() {
|
||||
// Parent components can use this method to refresh the count
|
||||
this.$apollo.queries.count.refetch();
|
||||
},
|
||||
},
|
||||
render() {
|
||||
return this.$scopedSlots.default({
|
||||
count: this.count,
|
||||
});
|
||||
},
|
||||
};
|
||||
</script>
|
|
@ -1,49 +1,47 @@
|
|||
<script>
|
||||
import { STATUS_ONLINE, STATUS_OFFLINE, STATUS_STALE } from '../../constants';
|
||||
import RunnerCount from './runner_count.vue';
|
||||
import RunnerStatusStat from './runner_status_stat.vue';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
RunnerCount,
|
||||
RunnerStatusStat,
|
||||
},
|
||||
props: {
|
||||
onlineRunnersCount: {
|
||||
type: Number,
|
||||
required: false,
|
||||
default: null,
|
||||
scope: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
offlineRunnersCount: {
|
||||
type: Number,
|
||||
variables: {
|
||||
type: Object,
|
||||
required: false,
|
||||
default: null,
|
||||
},
|
||||
staleRunnersCount: {
|
||||
type: Number,
|
||||
required: false,
|
||||
default: null,
|
||||
default: () => {},
|
||||
},
|
||||
},
|
||||
STATUS_ONLINE,
|
||||
STATUS_OFFLINE,
|
||||
STATUS_STALE,
|
||||
methods: {
|
||||
countVariables(vars) {
|
||||
return { ...this.variables, ...vars };
|
||||
},
|
||||
statusCountSkip(status) {
|
||||
// Show an empty result when we already filter by another status
|
||||
return this.variables.status && this.variables.status !== status;
|
||||
},
|
||||
},
|
||||
STATUS_LIST: [STATUS_ONLINE, STATUS_OFFLINE, STATUS_STALE],
|
||||
};
|
||||
</script>
|
||||
<template>
|
||||
<div class="gl-display-flex gl-py-6">
|
||||
<runner-status-stat
|
||||
class="gl-px-5"
|
||||
:status="$options.STATUS_ONLINE"
|
||||
:value="onlineRunnersCount"
|
||||
/>
|
||||
<runner-status-stat
|
||||
class="gl-px-5"
|
||||
:status="$options.STATUS_OFFLINE"
|
||||
:value="offlineRunnersCount"
|
||||
/>
|
||||
<runner-status-stat
|
||||
class="gl-px-5"
|
||||
:status="$options.STATUS_STALE"
|
||||
:value="staleRunnersCount"
|
||||
/>
|
||||
<runner-count
|
||||
v-for="status in $options.STATUS_LIST"
|
||||
#default="{ count }"
|
||||
:key="status"
|
||||
:scope="scope"
|
||||
:variables="countVariables({ status })"
|
||||
:skip="statusCountSkip(status)"
|
||||
>
|
||||
<runner-status-stat class="gl-px-5" :status="status" :value="count" />
|
||||
</runner-count>
|
||||
</div>
|
||||
</template>
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
<script>
|
||||
import { GlBadge, GlLink } from '@gitlab/ui';
|
||||
import { GlLink } from '@gitlab/ui';
|
||||
import { createAlert } from '~/flash';
|
||||
import { updateHistory } from '~/lib/utils/url_utility';
|
||||
import { formatNumber } from '~/locale';
|
||||
import { fetchPolicies } from '~/lib/graphql';
|
||||
|
||||
import RegistrationDropdown from '../components/registration/registration_dropdown.vue';
|
||||
|
@ -21,13 +20,9 @@ import {
|
|||
GROUP_FILTERED_SEARCH_NAMESPACE,
|
||||
GROUP_TYPE,
|
||||
PROJECT_TYPE,
|
||||
STATUS_ONLINE,
|
||||
STATUS_OFFLINE,
|
||||
STATUS_STALE,
|
||||
I18N_FETCH_ERROR,
|
||||
} from '../constants';
|
||||
import groupRunnersQuery from '../graphql/list/group_runners.query.graphql';
|
||||
import groupRunnersCountQuery from '../graphql/list/group_runners_count.query.graphql';
|
||||
import {
|
||||
fromUrlQueryToSearch,
|
||||
fromSearchToUrl,
|
||||
|
@ -36,54 +31,9 @@ import {
|
|||
} from '../runner_search_utils';
|
||||
import { captureException } from '../sentry_utils';
|
||||
|
||||
const countSmartQuery = () => ({
|
||||
query: groupRunnersCountQuery,
|
||||
fetchPolicy: fetchPolicies.NETWORK_ONLY,
|
||||
update(data) {
|
||||
return data?.group?.runners?.count;
|
||||
},
|
||||
error(error) {
|
||||
this.reportToSentry(error);
|
||||
},
|
||||
});
|
||||
|
||||
const tabCountSmartQuery = ({ type }) => {
|
||||
return {
|
||||
...countSmartQuery(),
|
||||
variables() {
|
||||
return {
|
||||
...this.countVariables,
|
||||
type,
|
||||
};
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
const statusCountSmartQuery = ({ status, name }) => {
|
||||
return {
|
||||
...countSmartQuery(),
|
||||
skip() {
|
||||
// skip if filtering by status and not using _this_ status as filter
|
||||
if (this.countVariables.status && this.countVariables.status !== status) {
|
||||
// reset count for given status
|
||||
this[name] = null;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
variables() {
|
||||
return {
|
||||
...this.countVariables,
|
||||
status,
|
||||
};
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export default {
|
||||
name: 'GroupRunnersApp',
|
||||
components: {
|
||||
GlBadge,
|
||||
GlLink,
|
||||
RegistrationDropdown,
|
||||
RunnerFilteredSearchBar,
|
||||
|
@ -153,28 +103,6 @@ export default {
|
|||
this.reportToSentry(error);
|
||||
},
|
||||
},
|
||||
|
||||
// Tabs counts
|
||||
allRunnersCount: {
|
||||
...tabCountSmartQuery({ type: null }),
|
||||
},
|
||||
groupRunnersCount: {
|
||||
...tabCountSmartQuery({ type: GROUP_TYPE }),
|
||||
},
|
||||
projectRunnersCount: {
|
||||
...tabCountSmartQuery({ type: PROJECT_TYPE }),
|
||||
},
|
||||
|
||||
// Runner status summary
|
||||
onlineRunnersTotal: {
|
||||
...statusCountSmartQuery({ status: STATUS_ONLINE, name: 'onlineRunnersTotal' }),
|
||||
},
|
||||
offlineRunnersTotal: {
|
||||
...statusCountSmartQuery({ status: STATUS_OFFLINE, name: 'offlineRunnersTotal' }),
|
||||
},
|
||||
staleRunnersTotal: {
|
||||
...statusCountSmartQuery({ status: STATUS_STALE, name: 'staleRunnersTotal' }),
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
variables() {
|
||||
|
@ -221,41 +149,16 @@ export default {
|
|||
this.reportToSentry(error);
|
||||
},
|
||||
methods: {
|
||||
tabCount({ runnerType }) {
|
||||
let count;
|
||||
switch (runnerType) {
|
||||
case null:
|
||||
count = this.allRunnersCount;
|
||||
break;
|
||||
case GROUP_TYPE:
|
||||
count = this.groupRunnersCount;
|
||||
break;
|
||||
case PROJECT_TYPE:
|
||||
count = this.projectRunnersCount;
|
||||
break;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
if (typeof count === 'number') {
|
||||
return formatNumber(count);
|
||||
}
|
||||
return null;
|
||||
},
|
||||
webUrl(runner) {
|
||||
return this.runners.urlsById[runner.id]?.web;
|
||||
},
|
||||
editUrl(runner) {
|
||||
return this.runners.urlsById[runner.id]?.edit;
|
||||
},
|
||||
refetchFilteredCounts() {
|
||||
this.$apollo.queries.allRunnersCount.refetch();
|
||||
this.$apollo.queries.groupRunnersCount.refetch();
|
||||
this.$apollo.queries.projectRunnersCount.refetch();
|
||||
},
|
||||
onToggledPaused() {
|
||||
// When a runner is Paused, the tab count can
|
||||
// When a runner becomes Paused, the tab count can
|
||||
// become stale, refetch outdated counts.
|
||||
this.refetchFilteredCounts();
|
||||
this.$refs['runner-type-tabs'].refetch();
|
||||
},
|
||||
onDeleted({ message }) {
|
||||
this.$root.$toast?.show(message);
|
||||
|
@ -273,18 +176,15 @@ export default {
|
|||
<div>
|
||||
<div class="gl-display-flex gl-align-items-center">
|
||||
<runner-type-tabs
|
||||
ref="runner-type-tabs"
|
||||
v-model="search"
|
||||
:count-scope="$options.GROUP_TYPE"
|
||||
:count-variables="countVariables"
|
||||
:runner-types="$options.TABS_RUNNER_TYPES"
|
||||
class="gl-w-full"
|
||||
content-class="gl-display-none"
|
||||
nav-class="gl-border-none!"
|
||||
>
|
||||
<template #title="{ tab }">
|
||||
{{ tab.title }}
|
||||
<gl-badge v-if="tabCount(tab)" class="gl-ml-1" size="sm">
|
||||
{{ tabCount(tab) }}
|
||||
</gl-badge>
|
||||
</template>
|
||||
</runner-type-tabs>
|
||||
/>
|
||||
|
||||
<registration-dropdown
|
||||
class="gl-ml-auto"
|
||||
|
@ -300,11 +200,7 @@ export default {
|
|||
:namespace="filteredSearchNamespace"
|
||||
/>
|
||||
|
||||
<runner-stats
|
||||
:online-runners-count="onlineRunnersTotal"
|
||||
:offline-runners-count="offlineRunnersTotal"
|
||||
:stale-runners-count="staleRunnersTotal"
|
||||
/>
|
||||
<runner-stats :scope="$options.GROUP_TYPE" :variables="countVariables" />
|
||||
|
||||
<runner-list-empty-state
|
||||
v-if="noRunnersFound"
|
||||
|
|
|
@ -315,7 +315,8 @@
|
|||
}
|
||||
|
||||
.ci-variable-table,
|
||||
.deploy-freeze-table {
|
||||
.deploy-freeze-table,
|
||||
.ci-secure-files-table {
|
||||
table {
|
||||
thead {
|
||||
border-bottom: 1px solid $white-normal;
|
||||
|
|
|
@ -20,6 +20,7 @@ class Projects::JobsController < Projects::ApplicationController
|
|||
before_action :verify_proxy_request!, only: :proxy_websocket_authorize
|
||||
before_action :push_jobs_table_vue, only: [:index]
|
||||
before_action :push_jobs_table_vue_search, only: [:index]
|
||||
before_action :push_job_log_search, only: [:show]
|
||||
before_action :reject_if_build_artifacts_size_refreshing!, only: [:erase]
|
||||
|
||||
layout 'project'
|
||||
|
@ -257,4 +258,8 @@ class Projects::JobsController < Projects::ApplicationController
|
|||
def push_jobs_table_vue_search
|
||||
push_frontend_feature_flag(:jobs_table_vue_search, @project)
|
||||
end
|
||||
|
||||
def push_job_log_search
|
||||
push_frontend_feature_flag(:job_log_search, @project)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -175,7 +175,7 @@ class Projects::PipelinesController < Projects::ApplicationController
|
|||
end
|
||||
|
||||
def stage
|
||||
@stage = pipeline.legacy_stage(params[:stage])
|
||||
@stage = pipeline.stage(params[:stage])
|
||||
return not_found unless @stage
|
||||
|
||||
render json: StageSerializer
|
||||
|
|
|
@ -1,73 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Ci
|
||||
# Currently this is artificial object, constructed dynamically
|
||||
# We should migrate this object to actual database record in the future
|
||||
class LegacyStage
|
||||
include StaticModel
|
||||
include Presentable
|
||||
|
||||
attr_reader :pipeline, :name
|
||||
|
||||
delegate :project, to: :pipeline
|
||||
|
||||
def initialize(pipeline, name:, status: nil, warnings: nil)
|
||||
@pipeline = pipeline
|
||||
@name = name
|
||||
@status = status
|
||||
# support ints and booleans
|
||||
@has_warnings = ActiveRecord::Type::Boolean.new.cast(warnings)
|
||||
end
|
||||
|
||||
def groups
|
||||
@groups ||= Ci::Group.fabricate(project, self)
|
||||
end
|
||||
|
||||
def to_param
|
||||
name
|
||||
end
|
||||
|
||||
def statuses_count
|
||||
@statuses_count ||= statuses.count
|
||||
end
|
||||
|
||||
def status
|
||||
@status ||= statuses.latest.composite_status(project: project)
|
||||
end
|
||||
|
||||
def detailed_status(current_user)
|
||||
Gitlab::Ci::Status::Stage::Factory
|
||||
.new(self, current_user)
|
||||
.fabricate!
|
||||
end
|
||||
|
||||
def latest_statuses
|
||||
statuses.ordered.latest
|
||||
end
|
||||
|
||||
def statuses
|
||||
@statuses ||= pipeline.statuses.where(stage: name)
|
||||
end
|
||||
|
||||
def builds
|
||||
@builds ||= pipeline.builds.where(stage: name)
|
||||
end
|
||||
|
||||
def success?
|
||||
status.to_s == 'success'
|
||||
end
|
||||
|
||||
def has_warnings?
|
||||
# lazilly calculate the warnings
|
||||
if @has_warnings.nil?
|
||||
@has_warnings = statuses.latest.failed_but_allowed.any?
|
||||
end
|
||||
|
||||
@has_warnings
|
||||
end
|
||||
|
||||
def manual_playable?
|
||||
%[manual scheduled skipped].include?(status.to_s)
|
||||
end
|
||||
end
|
||||
end
|
|
@ -496,40 +496,16 @@ module Ci
|
|||
.pluck(:stage, :stage_idx).map(&:first)
|
||||
end
|
||||
|
||||
def legacy_stage(name)
|
||||
stage = Ci::LegacyStage.new(self, name: name)
|
||||
stage unless stage.statuses_count == 0
|
||||
end
|
||||
|
||||
def ref_exists?
|
||||
project.repository.ref_exists?(git_ref)
|
||||
rescue Gitlab::Git::Repository::NoRepository
|
||||
false
|
||||
end
|
||||
|
||||
def legacy_stages_using_composite_status
|
||||
stages = latest_statuses_ordered_by_stage.group_by(&:stage)
|
||||
|
||||
stages.map do |stage_name, jobs|
|
||||
composite_status = Gitlab::Ci::Status::Composite
|
||||
.new(jobs)
|
||||
|
||||
Ci::LegacyStage.new(self,
|
||||
name: stage_name,
|
||||
status: composite_status.status,
|
||||
warnings: composite_status.warnings?)
|
||||
end
|
||||
end
|
||||
|
||||
def triggered_pipelines_with_preloads
|
||||
triggered_pipelines.preload(:source_job)
|
||||
end
|
||||
|
||||
# TODO: Remove usage of this method in templates
|
||||
def legacy_stages
|
||||
legacy_stages_using_composite_status
|
||||
end
|
||||
|
||||
def valid_commit_sha
|
||||
if self.sha == Gitlab::Git::BLANK_SHA
|
||||
self.errors.add(:sha, " cant be 00000000 (branch removal)")
|
||||
|
@ -1232,6 +1208,10 @@ module Ci
|
|||
Gitlab::Utils.slugify(source_ref.to_s)
|
||||
end
|
||||
|
||||
def stage(name)
|
||||
stages.find_by(name: name)
|
||||
end
|
||||
|
||||
def find_stage_by_name!(name)
|
||||
stages.find_by!(name: name)
|
||||
end
|
||||
|
|
|
@ -1,23 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Ci
|
||||
class LegacyStagePresenter < Gitlab::View::Presenter::Delegated
|
||||
presents ::Ci::LegacyStage, as: :legacy_stage
|
||||
|
||||
def latest_ordered_statuses
|
||||
preload_statuses(legacy_stage.statuses.latest_ordered)
|
||||
end
|
||||
|
||||
def retried_ordered_statuses
|
||||
preload_statuses(legacy_stage.statuses.retried_ordered)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def preload_statuses(statuses)
|
||||
Preloaders::CommitStatusPreloader.new(statuses).execute(Ci::StagePresenter::PRELOADED_RELATIONS)
|
||||
|
||||
statuses
|
||||
end
|
||||
end
|
||||
end
|
|
@ -36,8 +36,6 @@ module Ci
|
|||
private
|
||||
|
||||
def key(base_pipeline, head_pipeline)
|
||||
return super unless Feature.enabled?(:ci_child_pipeline_coverage_reports, head_pipeline.project)
|
||||
|
||||
[
|
||||
base_pipeline&.id, last_update_timestamp(base_pipeline),
|
||||
head_pipeline&.id, last_update_timestamp(head_pipeline)
|
||||
|
|
|
@ -9,11 +9,6 @@ module Ci
|
|||
end
|
||||
|
||||
def execute
|
||||
unless Feature.enabled?(:ci_child_pipeline_coverage_reports, pipeline.project) ||
|
||||
!pipeline.has_coverage_reports?
|
||||
return
|
||||
end
|
||||
|
||||
return if report.empty?
|
||||
|
||||
Ci::PipelineArtifact.create_or_replace_for_pipeline!(**pipeline_artifact_params).tap do |pipeline_artifact|
|
||||
|
|
|
@ -109,13 +109,15 @@
|
|||
= render 'ci/token_access/index'
|
||||
|
||||
- if show_secure_files_setting(@project, current_user)
|
||||
%section.settings
|
||||
%section.settings.no-animate#js-secure-files{ class: ('expanded' if expanded) }
|
||||
.settings-header
|
||||
%h4.settings-title
|
||||
%h4.settings-title.js-settings-toggle.js-settings-toggle-trigger-only
|
||||
= _("Secure Files")
|
||||
= button_to project_ci_secure_files_path(@project), method: :get, class: 'btn gl-button btn-default' do
|
||||
= _('Manage')
|
||||
= render Pajamas::ButtonComponent.new(button_options: { class: 'js-settings-toggle' }) do
|
||||
= expanded ? _('Collapse') : _('Expand')
|
||||
%p
|
||||
= _("Use Secure Files to store files used by your pipelines such as Android keystores, or Apple provisioning profiles and signing certificates.")
|
||||
= link_to _('Learn more'), help_page_path('ci/secure_files/index'), target: '_blank', rel: 'noopener noreferrer'
|
||||
.settings-content
|
||||
#js-ci-secure-files{ data: { project_id: @project.id, admin: can?(current_user, :admin_secure_files, @project).to_s, file_size_limit: Ci::SecureFile::FILE_SIZE_LIMIT.to_mb } }
|
||||
|
||||
|
|
|
@ -19,14 +19,10 @@ module Ci
|
|||
|
||||
return unless pipeline
|
||||
|
||||
if Feature.enabled?(:ci_child_pipeline_coverage_reports, pipeline.project)
|
||||
pipeline.root_ancestor.try do |root_ancestor_pipeline|
|
||||
next unless root_ancestor_pipeline.self_and_descendants_complete?
|
||||
pipeline.root_ancestor.try do |root_ancestor_pipeline|
|
||||
next unless root_ancestor_pipeline.self_and_descendants_complete?
|
||||
|
||||
Ci::PipelineArtifacts::CoverageReportService.new(root_ancestor_pipeline).execute
|
||||
end
|
||||
else
|
||||
Ci::PipelineArtifacts::CoverageReportService.new(pipeline).execute
|
||||
Ci::PipelineArtifacts::CoverageReportService.new(root_ancestor_pipeline).execute
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
---
|
||||
name: ci_child_pipeline_coverage_reports
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/88626
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/363557
|
||||
milestone: '15.1'
|
||||
name: job_log_search
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/91293
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/366455
|
||||
milestone: '15.2'
|
||||
type: development
|
||||
group: group::pipeline insights
|
||||
group: group::pipeline execution
|
||||
default_enabled: false
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddUniqueIndexOnCiRunnerVersionsOnStatusAndVersion < Gitlab::Database::Migration[2.0]
|
||||
disable_ddl_transaction!
|
||||
|
||||
INDEX_NAME = 'index_ci_runner_versions_on_unique_status_and_version'
|
||||
|
||||
def up
|
||||
add_concurrent_index :ci_runner_versions, [:status, :version], name: INDEX_NAME, unique: true
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index_by_name :ci_runner_versions, INDEX_NAME
|
||||
end
|
||||
end
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class DropIndexOnCiRunnerVersionsOnVersion < Gitlab::Database::Migration[2.0]
|
||||
disable_ddl_transaction!
|
||||
|
||||
INDEX_NAME = 'index_ci_runner_versions_on_version'
|
||||
|
||||
def up
|
||||
remove_concurrent_index_by_name :ci_runner_versions, INDEX_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
add_concurrent_index :ci_runner_versions, :version, name: INDEX_NAME
|
||||
end
|
||||
end
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class DropIndexOnCiRunnerVersionsOnStatus < Gitlab::Database::Migration[2.0]
|
||||
disable_ddl_transaction!
|
||||
|
||||
INDEX_NAME = 'index_ci_runner_versions_on_status'
|
||||
|
||||
def up
|
||||
remove_concurrent_index_by_name :ci_runner_versions, INDEX_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
add_concurrent_index :ci_runner_versions, :version, name: INDEX_NAME
|
||||
end
|
||||
end
|
1
db/schema_migrations/20220705114635
Normal file
1
db/schema_migrations/20220705114635
Normal file
|
@ -0,0 +1 @@
|
|||
b9d37f6b3f59c4d2a08533fd1e2dc91403081fdf5691c86a1874079cb7937588
|
1
db/schema_migrations/20220708100508
Normal file
1
db/schema_migrations/20220708100508
Normal file
|
@ -0,0 +1 @@
|
|||
041c729542e7bf418ee805d6c1878aa62fd274a97583cc11dfebae9e7bdac896
|
1
db/schema_migrations/20220708100532
Normal file
1
db/schema_migrations/20220708100532
Normal file
|
@ -0,0 +1 @@
|
|||
28cf54895ada6e5d501bd5dcb9e7e161fd44ce51494b984dde7beadd0895c952
|
|
@ -27674,9 +27674,7 @@ CREATE UNIQUE INDEX index_ci_runner_namespaces_on_runner_id_and_namespace_id ON
|
|||
|
||||
CREATE INDEX index_ci_runner_projects_on_project_id ON ci_runner_projects USING btree (project_id);
|
||||
|
||||
CREATE INDEX index_ci_runner_versions_on_status ON ci_runner_versions USING btree (status);
|
||||
|
||||
CREATE INDEX index_ci_runner_versions_on_version ON ci_runner_versions USING btree (version);
|
||||
CREATE UNIQUE INDEX index_ci_runner_versions_on_unique_status_and_version ON ci_runner_versions USING btree (status, version);
|
||||
|
||||
CREATE INDEX index_ci_runners_on_active ON ci_runners USING btree (active, id);
|
||||
|
||||
|
|
|
@ -80,23 +80,17 @@ to draw the visualization on the merge request expires **one week** after creati
|
|||
|
||||
### Coverage report from child pipeline
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/363301) in GitLab 15.1 [with a flag](../../administration/feature_flags.md). Disabled by default.
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/363301) in GitLab 15.1 [with a flag](../../administration/feature_flags.md) named `ci_child_pipeline_coverage_reports`. Disabled by default.
|
||||
> - [Enabled on GitLab.com and self-managed](https://gitlab.com/gitlab-org/gitlab/-/issues/363557) and feature flag `ci_child_pipeline_coverage_reports` removed in GitLab 15.2.
|
||||
|
||||
FLAG:
|
||||
On self-managed GitLab, by default this feature is not available. To make it available, ask an administrator to [enable the feature flag](../../administration/feature_flags.md) named `ci_child_pipeline_coverage_reports`.
|
||||
On GitLab.com, this feature is not available.
|
||||
The feature is not ready for production use.
|
||||
|
||||
If the test coverage is created in jobs that are in a child pipeline, the parent pipeline must use
|
||||
`strategy: depend`.
|
||||
If a job in a child pipeline creates a coverage report, the report is included in
|
||||
the parent pipeline's coverage report.
|
||||
|
||||
```yaml
|
||||
child_test_pipeline:
|
||||
trigger:
|
||||
include:
|
||||
- local: path/to/child_pipeline.yml
|
||||
- template: Security/SAST.gitlab-ci.yml
|
||||
strategy: depend
|
||||
- local: path/to/child_pipeline_with_coverage.yml
|
||||
```
|
||||
|
||||
### Automatic class path correction
|
||||
|
|
|
@ -154,8 +154,9 @@ GitLab also provides a [KPT package for the agent](https://gitlab.com/gitlab-org
|
|||
|
||||
To configure your agent, add content to the `config.yaml` file:
|
||||
|
||||
- [View the configuration reference](../gitops.md#gitops-configuration-reference) for a GitOps workflow.
|
||||
- [View the configuration reference](../ci_cd_workflow.md) for a GitLab CI/CD workflow.
|
||||
- For a GitOps workflow, [view the configuration reference](../gitops.md#gitops-configuration-reference).
|
||||
- For a GitLab CI/CD workflow, [authorize the agent to access your projects](../ci_cd_workflow.md#authorize-the-agent). Then
|
||||
[add `kubectl` commands to your `.gitlab-ci.yml` file](../ci_cd_workflow.md#update-your-gitlab-ciyml-file-to-run-kubectl-commands).
|
||||
|
||||
## Install multiple agents in your cluster
|
||||
|
||||
|
|
|
@ -68,7 +68,7 @@ module Gitlab
|
|||
def valid_json?(metadata)
|
||||
Oj.load(metadata)
|
||||
true
|
||||
rescue Oj::ParseError, EncodingError, Json::ParseError, Encoding::UndefinedConversionError
|
||||
rescue Oj::ParseError, EncodingError, JSON::ParserError, Encoding::UndefinedConversionError
|
||||
false
|
||||
end
|
||||
|
||||
|
|
|
@ -35,17 +35,7 @@ module Gitlab
|
|||
private
|
||||
|
||||
def report_builds
|
||||
if child_pipeline_feature_enabled?
|
||||
@pipeline.latest_report_builds_in_self_and_descendants(::Ci::JobArtifact.coverage_reports)
|
||||
else
|
||||
@pipeline.latest_report_builds(::Ci::JobArtifact.coverage_reports)
|
||||
end
|
||||
end
|
||||
|
||||
def child_pipeline_feature_enabled?
|
||||
strong_memoize(:feature_enabled) do
|
||||
Feature.enabled?(:ci_child_pipeline_coverage_reports, @pipeline.project)
|
||||
end
|
||||
@pipeline.latest_report_builds_in_self_and_descendants(::Ci::JobArtifact.coverage_reports)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -79,7 +79,7 @@ module Gitlab
|
|||
rescue Timeout::Error => e
|
||||
rendered_timeout.increment(source: Gitlab::Runtime.sidekiq? ? BACKGROUND_EXECUTION : FOREGROUND_EXECUTION)
|
||||
log_event(LOG_IPYNBDIFF_TIMEOUT, e)
|
||||
rescue IpynbDiff::InvalidNotebookError, IpynbDiff::InvalidTokenError => e
|
||||
rescue IpynbDiff::InvalidNotebookError => e
|
||||
log_event(LOG_IPYNBDIFF_INVALID, e)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -91,7 +91,7 @@ module Gitlab
|
|||
|
||||
return 0 unless line_in_source.present?
|
||||
|
||||
line_in_source + 1
|
||||
line_in_source
|
||||
end
|
||||
|
||||
def image_as_rich_text(line_text)
|
||||
|
|
|
@ -38,6 +38,7 @@ module Gitlab
|
|||
|
||||
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Pages::PageDeployedEvent
|
||||
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Pages::PageDeletedEvent
|
||||
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Projects::ProjectDeletedEvent
|
||||
end
|
||||
private_class_method :configure!
|
||||
end
|
||||
|
|
|
@ -95,7 +95,7 @@ module Gitlab
|
|||
opts = standardize_opts(opts)
|
||||
|
||||
Oj.load(string, opts)
|
||||
rescue Oj::ParseError, EncodingError, JSON::ParseError, Encoding::UndefinedConversionError => ex
|
||||
rescue Oj::ParseError, EncodingError, Encoding::UndefinedConversionError => ex
|
||||
raise parser_error, ex
|
||||
end
|
||||
|
||||
|
|
|
@ -22355,6 +22355,9 @@ msgstr ""
|
|||
msgid "Job|%{boldStart}Pipeline%{boldEnd} %{id} for %{ref}"
|
||||
msgstr ""
|
||||
|
||||
msgid "Job|%{searchLength} results found for %{searchTerm}"
|
||||
msgstr ""
|
||||
|
||||
msgid "Job|Are you sure you want to erase this job log and artifacts?"
|
||||
msgstr ""
|
||||
|
||||
|
@ -22394,12 +22397,18 @@ msgstr ""
|
|||
msgid "Job|Job has been erased by %{userLink}"
|
||||
msgstr ""
|
||||
|
||||
msgid "Job|Job log search"
|
||||
msgstr ""
|
||||
|
||||
msgid "Job|Keep"
|
||||
msgstr ""
|
||||
|
||||
msgid "Job|Manual"
|
||||
msgstr ""
|
||||
|
||||
msgid "Job|No search results found"
|
||||
msgstr ""
|
||||
|
||||
msgid "Job|Passed"
|
||||
msgstr ""
|
||||
|
||||
|
@ -22424,6 +22433,12 @@ msgstr ""
|
|||
msgid "Job|Scroll to top"
|
||||
msgstr ""
|
||||
|
||||
msgid "Job|Search for substrings in your job log output. Currently search is only supported for the visible job log output, not for any log output that is truncated due to size."
|
||||
msgstr ""
|
||||
|
||||
msgid "Job|Search job log"
|
||||
msgstr ""
|
||||
|
||||
msgid "Job|Show complete raw"
|
||||
msgstr ""
|
||||
|
||||
|
@ -22460,6 +22475,9 @@ msgstr ""
|
|||
msgid "Job|Waiting for resource"
|
||||
msgstr ""
|
||||
|
||||
msgid "Job|We could not find this element"
|
||||
msgstr ""
|
||||
|
||||
msgid "Job|allowed to fail"
|
||||
msgstr ""
|
||||
|
||||
|
@ -23643,9 +23661,6 @@ msgstr ""
|
|||
msgid "Makes this issue confidential."
|
||||
msgstr ""
|
||||
|
||||
msgid "Manage"
|
||||
msgstr ""
|
||||
|
||||
msgid "Manage %{workspace} labels"
|
||||
msgstr ""
|
||||
|
||||
|
@ -39027,6 +39042,9 @@ msgstr ""
|
|||
msgid "There are no projects shared with this group yet"
|
||||
msgstr ""
|
||||
|
||||
msgid "There are no secure files yet."
|
||||
msgstr ""
|
||||
|
||||
msgid "There are no topics to show."
|
||||
msgstr ""
|
||||
|
||||
|
@ -41383,7 +41401,7 @@ msgstr ""
|
|||
msgid "Upload object map"
|
||||
msgstr ""
|
||||
|
||||
msgid "Uploaded"
|
||||
msgid "Uploaded date"
|
||||
msgstr ""
|
||||
|
||||
msgid "Uploading changes to terminal"
|
||||
|
@ -44115,9 +44133,6 @@ msgstr ""
|
|||
msgid "You can only add up to %{max_contacts} contacts at one time"
|
||||
msgstr ""
|
||||
|
||||
msgid "You can only approve an indivdual user, member, or all members"
|
||||
msgstr ""
|
||||
|
||||
msgid "You can only edit files when you are on a branch"
|
||||
msgstr ""
|
||||
|
||||
|
@ -44157,9 +44172,6 @@ msgstr ""
|
|||
msgid "You cannot access the raw file. Please wait a minute."
|
||||
msgstr ""
|
||||
|
||||
msgid "You cannot approve all pending members on a free plan"
|
||||
msgstr ""
|
||||
|
||||
msgid "You cannot approve your own deployment."
|
||||
msgstr ""
|
||||
|
||||
|
|
|
@ -40,8 +40,14 @@ module QA
|
|||
sign_in(as: Runtime::User.admin, address: address, admin: true)
|
||||
end
|
||||
|
||||
def sign_in_unless_signed_in(as: nil, address: :gitlab)
|
||||
sign_in(as: as, address: address) unless Page::Main::Menu.perform(&:signed_in?)
|
||||
def sign_in_unless_signed_in(user: nil, address: :gitlab)
|
||||
if user
|
||||
sign_in(as: user, address: address) unless Page::Main::Menu.perform do |menu|
|
||||
menu.signed_in_as_user?(user)
|
||||
end
|
||||
else
|
||||
sign_in(address: address) unless Page::Main::Menu.perform(&:signed_in?)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -113,6 +113,14 @@ module QA
|
|||
has_personal_area?(wait: 0)
|
||||
end
|
||||
|
||||
def signed_in_as_user?(user)
|
||||
return false if has_no_personal_area?
|
||||
|
||||
within_user_menu do
|
||||
has_element?(:user_profile_link, text: /#{user.username}/)
|
||||
end
|
||||
end
|
||||
|
||||
def not_signed_in?
|
||||
return true if Page::Main::Login.perform(&:on_login_page?)
|
||||
|
||||
|
@ -202,7 +210,7 @@ module QA
|
|||
|
||||
def within_user_menu(&block)
|
||||
within_top_menu do
|
||||
click_element :user_avatar
|
||||
click_element :user_avatar unless has_element?(:user_profile_link, wait: 1)
|
||||
|
||||
within_element(:user_menu, &block)
|
||||
end
|
||||
|
|
|
@ -61,7 +61,7 @@ module QA
|
|||
end
|
||||
|
||||
def fabricate!
|
||||
Flow::Login.sign_in_unless_signed_in(as: user)
|
||||
Flow::Login.sign_in_unless_signed_in(user: user)
|
||||
|
||||
Page::Main::Menu.perform(&:click_edit_profile_link)
|
||||
Page::Profile::Menu.perform(&:click_access_tokens)
|
||||
|
|
|
@ -8,305 +8,294 @@ RSpec.describe Projects::Settings::CiCdController do
|
|||
|
||||
let(:project) { project_auto_devops.project }
|
||||
|
||||
before do
|
||||
project.add_maintainer(user)
|
||||
sign_in(user)
|
||||
end
|
||||
|
||||
describe 'GET show' do
|
||||
let_it_be(:parent_group) { create(:group) }
|
||||
let_it_be(:group) { create(:group, parent: parent_group) }
|
||||
let_it_be(:other_project) { create(:project, group: group) }
|
||||
|
||||
it 'renders show with 200 status code' do
|
||||
get :show, params: { namespace_id: project.namespace, project_id: project }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response).to render_template(:show)
|
||||
end
|
||||
|
||||
context 'with CI/CD disabled' do
|
||||
before do
|
||||
project.project_feature.update_attribute(:builds_access_level, ProjectFeature::DISABLED)
|
||||
end
|
||||
|
||||
it 'renders show with 404 status code' do
|
||||
get :show, params: { namespace_id: project.namespace, project_id: project }
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with group runners' do
|
||||
let_it_be(:group_runner) { create(:ci_runner, :group, groups: [group]) }
|
||||
let_it_be(:project_runner) { create(:ci_runner, :project, projects: [other_project]) }
|
||||
let_it_be(:shared_runner) { create(:ci_runner, :instance) }
|
||||
|
||||
it 'sets assignable project runners only' do
|
||||
group.add_maintainer(user)
|
||||
|
||||
get :show, params: { namespace_id: project.namespace, project_id: project }
|
||||
|
||||
expect(assigns(:assignable_runners)).to contain_exactly(project_runner)
|
||||
end
|
||||
end
|
||||
|
||||
context 'prevents N+1 queries for tags' do
|
||||
render_views
|
||||
|
||||
def show
|
||||
get :show, params: { namespace_id: project.namespace, project_id: project }
|
||||
end
|
||||
|
||||
it 'has the same number of queries with one tag or with many tags', :request_store do
|
||||
group.add_maintainer(user)
|
||||
|
||||
show # warmup
|
||||
|
||||
# with one tag
|
||||
create(:ci_runner, :instance, tag_list: %w(shared_runner))
|
||||
create(:ci_runner, :project, projects: [other_project], tag_list: %w(project_runner))
|
||||
create(:ci_runner, :group, groups: [group], tag_list: %w(group_runner))
|
||||
control = ActiveRecord::QueryRecorder.new { show }
|
||||
|
||||
# with several tags
|
||||
create(:ci_runner, :instance, tag_list: %w(shared_runner tag2 tag3))
|
||||
create(:ci_runner, :project, projects: [other_project], tag_list: %w(project_runner tag2 tag3))
|
||||
create(:ci_runner, :group, groups: [group], tag_list: %w(group_runner tag2 tag3))
|
||||
|
||||
expect { show }.not_to exceed_query_limit(control)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#reset_cache' do
|
||||
context 'as a maintainer' do
|
||||
before do
|
||||
sign_in(user)
|
||||
|
||||
project.add_maintainer(user)
|
||||
|
||||
allow(ResetProjectCacheService).to receive_message_chain(:new, :execute).and_return(true)
|
||||
sign_in(user)
|
||||
end
|
||||
|
||||
subject { post :reset_cache, params: { namespace_id: project.namespace, project_id: project }, format: :json }
|
||||
describe 'GET show' do
|
||||
let_it_be(:parent_group) { create(:group) }
|
||||
let_it_be(:group) { create(:group, parent: parent_group) }
|
||||
let_it_be(:other_project) { create(:project, group: group) }
|
||||
|
||||
it 'calls reset project cache service' do
|
||||
expect(ResetProjectCacheService).to receive_message_chain(:new, :execute)
|
||||
|
||||
subject
|
||||
end
|
||||
|
||||
context 'when service returns successfully' do
|
||||
it 'returns a success header' do
|
||||
subject
|
||||
it 'renders show with 200 status code' do
|
||||
get :show, params: { namespace_id: project.namespace, project_id: project }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when service does not return successfully' do
|
||||
before do
|
||||
allow(ResetProjectCacheService).to receive_message_chain(:new, :execute).and_return(false)
|
||||
expect(response).to render_template(:show)
|
||||
end
|
||||
|
||||
it 'returns an error header' do
|
||||
subject
|
||||
context 'with CI/CD disabled' do
|
||||
before do
|
||||
project.project_feature.update_attribute(:builds_access_level, ProjectFeature::DISABLED)
|
||||
end
|
||||
|
||||
expect(response).to have_gitlab_http_status(:bad_request)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'PUT #reset_registration_token' do
|
||||
subject { put :reset_registration_token, params: { namespace_id: project.namespace, project_id: project } }
|
||||
|
||||
it 'resets runner registration token' do
|
||||
expect { subject }.to change { project.reload.runners_token }
|
||||
expect(flash[:toast]).to eq('New runners registration token has been generated!')
|
||||
end
|
||||
|
||||
it 'redirects the user to admin runners page' do
|
||||
subject
|
||||
|
||||
expect(response).to redirect_to(namespace_project_settings_ci_cd_path)
|
||||
end
|
||||
end
|
||||
|
||||
describe 'PATCH update' do
|
||||
let(:params) { { ci_config_path: '' } }
|
||||
|
||||
subject do
|
||||
patch :update,
|
||||
params: {
|
||||
namespace_id: project.namespace.to_param,
|
||||
project_id: project,
|
||||
project: params
|
||||
}
|
||||
end
|
||||
|
||||
it 'redirects to the settings page' do
|
||||
subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:found)
|
||||
expect(flash[:toast]).to eq("Pipelines settings for '#{project.name}' were successfully updated.")
|
||||
end
|
||||
|
||||
context 'when updating the auto_devops settings' do
|
||||
let(:params) { { auto_devops_attributes: { enabled: '' } } }
|
||||
|
||||
context 'following the instance default' do
|
||||
let(:params) { { auto_devops_attributes: { enabled: '' } } }
|
||||
|
||||
it 'allows enabled to be set to nil' do
|
||||
subject
|
||||
project_auto_devops.reload
|
||||
|
||||
expect(project_auto_devops.enabled).to be_nil
|
||||
it 'renders show with 404 status code' do
|
||||
get :show, params: { namespace_id: project.namespace, project_id: project }
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when run_auto_devops_pipeline is true' do
|
||||
context 'with group runners' do
|
||||
let_it_be(:group_runner) { create(:ci_runner, :group, groups: [group]) }
|
||||
let_it_be(:project_runner) { create(:ci_runner, :project, projects: [other_project]) }
|
||||
let_it_be(:shared_runner) { create(:ci_runner, :instance) }
|
||||
|
||||
it 'sets assignable project runners only' do
|
||||
group.add_maintainer(user)
|
||||
|
||||
get :show, params: { namespace_id: project.namespace, project_id: project }
|
||||
|
||||
expect(assigns(:assignable_runners)).to contain_exactly(project_runner)
|
||||
end
|
||||
end
|
||||
|
||||
context 'prevents N+1 queries for tags' do
|
||||
render_views
|
||||
|
||||
def show
|
||||
get :show, params: { namespace_id: project.namespace, project_id: project }
|
||||
end
|
||||
|
||||
it 'has the same number of queries with one tag or with many tags', :request_store do
|
||||
group.add_maintainer(user)
|
||||
|
||||
show # warmup
|
||||
|
||||
# with one tag
|
||||
create(:ci_runner, :instance, tag_list: %w(shared_runner))
|
||||
create(:ci_runner, :project, projects: [other_project], tag_list: %w(project_runner))
|
||||
create(:ci_runner, :group, groups: [group], tag_list: %w(group_runner))
|
||||
control = ActiveRecord::QueryRecorder.new { show }
|
||||
|
||||
# with several tags
|
||||
create(:ci_runner, :instance, tag_list: %w(shared_runner tag2 tag3))
|
||||
create(:ci_runner, :project, projects: [other_project], tag_list: %w(project_runner tag2 tag3))
|
||||
create(:ci_runner, :group, groups: [group], tag_list: %w(group_runner tag2 tag3))
|
||||
|
||||
expect { show }.not_to exceed_query_limit(control)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#reset_cache' do
|
||||
before do
|
||||
sign_in(user)
|
||||
|
||||
project.add_maintainer(user)
|
||||
|
||||
allow(ResetProjectCacheService).to receive_message_chain(:new, :execute).and_return(true)
|
||||
end
|
||||
|
||||
subject { post :reset_cache, params: { namespace_id: project.namespace, project_id: project }, format: :json }
|
||||
|
||||
it 'calls reset project cache service' do
|
||||
expect(ResetProjectCacheService).to receive_message_chain(:new, :execute)
|
||||
|
||||
subject
|
||||
end
|
||||
|
||||
context 'when service returns successfully' do
|
||||
it 'returns a success header' do
|
||||
subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when service does not return successfully' do
|
||||
before do
|
||||
expect_next_instance_of(Projects::UpdateService) do |instance|
|
||||
expect(instance).to receive(:run_auto_devops_pipeline?).and_return(true)
|
||||
allow(ResetProjectCacheService).to receive_message_chain(:new, :execute).and_return(false)
|
||||
end
|
||||
|
||||
it 'returns an error header' do
|
||||
subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:bad_request)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'PUT #reset_registration_token' do
|
||||
subject { put :reset_registration_token, params: { namespace_id: project.namespace, project_id: project } }
|
||||
|
||||
it 'resets runner registration token' do
|
||||
expect { subject }.to change { project.reload.runners_token }
|
||||
expect(flash[:toast]).to eq('New runners registration token has been generated!')
|
||||
end
|
||||
|
||||
it 'redirects the user to admin runners page' do
|
||||
subject
|
||||
|
||||
expect(response).to redirect_to(namespace_project_settings_ci_cd_path)
|
||||
end
|
||||
end
|
||||
|
||||
describe 'PATCH update' do
|
||||
let(:params) { { ci_config_path: '' } }
|
||||
|
||||
subject do
|
||||
patch :update,
|
||||
params: {
|
||||
namespace_id: project.namespace.to_param,
|
||||
project_id: project,
|
||||
project: params
|
||||
}
|
||||
end
|
||||
|
||||
it 'redirects to the settings page' do
|
||||
subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:found)
|
||||
expect(flash[:toast]).to eq("Pipelines settings for '#{project.name}' were successfully updated.")
|
||||
end
|
||||
|
||||
context 'when updating the auto_devops settings' do
|
||||
let(:params) { { auto_devops_attributes: { enabled: '' } } }
|
||||
|
||||
context 'following the instance default' do
|
||||
let(:params) { { auto_devops_attributes: { enabled: '' } } }
|
||||
|
||||
it 'allows enabled to be set to nil' do
|
||||
subject
|
||||
project_auto_devops.reload
|
||||
|
||||
expect(project_auto_devops.enabled).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the project repository is empty' do
|
||||
it 'sets a notice flash' do
|
||||
subject
|
||||
context 'when run_auto_devops_pipeline is true' do
|
||||
before do
|
||||
expect_next_instance_of(Projects::UpdateService) do |instance|
|
||||
expect(instance).to receive(:run_auto_devops_pipeline?).and_return(true)
|
||||
end
|
||||
end
|
||||
|
||||
expect(controller).to set_flash[:notice]
|
||||
context 'when the project repository is empty' do
|
||||
it 'sets a notice flash' do
|
||||
subject
|
||||
|
||||
expect(controller).to set_flash[:notice]
|
||||
end
|
||||
|
||||
it 'does not queue a CreatePipelineWorker' do
|
||||
expect(CreatePipelineWorker).not_to receive(:perform_async).with(project.id, user.id, project.default_branch, :web, any_args)
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the project repository is not empty' do
|
||||
let(:project) { create(:project, :repository) }
|
||||
|
||||
it 'displays a toast message' do
|
||||
allow(CreatePipelineWorker).to receive(:perform_async).with(project.id, user.id, project.default_branch, :web, any_args)
|
||||
|
||||
subject
|
||||
|
||||
expect(controller).to set_flash[:toast]
|
||||
end
|
||||
|
||||
it 'queues a CreatePipelineWorker' do
|
||||
expect(CreatePipelineWorker).to receive(:perform_async).with(project.id, user.id, project.default_branch, :web, any_args)
|
||||
|
||||
subject
|
||||
end
|
||||
|
||||
it 'creates a pipeline', :sidekiq_inline do
|
||||
project.repository.create_file(user, 'Gemfile', 'Gemfile contents',
|
||||
message: 'Add Gemfile',
|
||||
branch_name: 'master')
|
||||
|
||||
expect { subject }.to change { Ci::Pipeline.count }.by(1)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when run_auto_devops_pipeline is not true' do
|
||||
before do
|
||||
expect_next_instance_of(Projects::UpdateService) do |instance|
|
||||
expect(instance).to receive(:run_auto_devops_pipeline?).and_return(false)
|
||||
end
|
||||
end
|
||||
|
||||
it 'does not queue a CreatePipelineWorker' do
|
||||
expect(CreatePipelineWorker).not_to receive(:perform_async).with(project.id, user.id, project.default_branch, :web, any_args)
|
||||
expect(CreatePipelineWorker).not_to receive(:perform_async).with(project.id, user.id, :web, any_args)
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the project repository is not empty' do
|
||||
let(:project) { create(:project, :repository) }
|
||||
|
||||
it 'displays a toast message' do
|
||||
allow(CreatePipelineWorker).to receive(:perform_async).with(project.id, user.id, project.default_branch, :web, any_args)
|
||||
|
||||
subject
|
||||
|
||||
expect(controller).to set_flash[:toast]
|
||||
end
|
||||
|
||||
it 'queues a CreatePipelineWorker' do
|
||||
expect(CreatePipelineWorker).to receive(:perform_async).with(project.id, user.id, project.default_branch, :web, any_args)
|
||||
|
||||
subject
|
||||
end
|
||||
|
||||
it 'creates a pipeline', :sidekiq_inline do
|
||||
project.repository.create_file(user, 'Gemfile', 'Gemfile contents',
|
||||
message: 'Add Gemfile',
|
||||
branch_name: 'master')
|
||||
|
||||
expect { subject }.to change { Ci::Pipeline.count }.by(1)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when run_auto_devops_pipeline is not true' do
|
||||
before do
|
||||
expect_next_instance_of(Projects::UpdateService) do |instance|
|
||||
expect(instance).to receive(:run_auto_devops_pipeline?).and_return(false)
|
||||
end
|
||||
end
|
||||
context 'when updating general settings' do
|
||||
context 'when build_timeout_human_readable is not specified' do
|
||||
let(:params) { { build_timeout_human_readable: '' } }
|
||||
|
||||
it 'does not queue a CreatePipelineWorker' do
|
||||
expect(CreatePipelineWorker).not_to receive(:perform_async).with(project.id, user.id, :web, any_args)
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when updating general settings' do
|
||||
context 'when build_timeout_human_readable is not specified' do
|
||||
let(:params) { { build_timeout_human_readable: '' } }
|
||||
|
||||
it 'set default timeout' do
|
||||
subject
|
||||
|
||||
project.reload
|
||||
expect(project.build_timeout).to eq(3600)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when build_timeout_human_readable is specified' do
|
||||
let(:params) { { build_timeout_human_readable: '1h 30m' } }
|
||||
|
||||
it 'set specified timeout' do
|
||||
subject
|
||||
|
||||
project.reload
|
||||
expect(project.build_timeout).to eq(5400)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when build_timeout_human_readable is invalid' do
|
||||
let(:params) { { build_timeout_human_readable: '5m' } }
|
||||
|
||||
it 'set specified timeout' do
|
||||
subject
|
||||
|
||||
expect(controller).to set_flash[:alert]
|
||||
expect(response).to redirect_to(namespace_project_settings_ci_cd_path)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when default_git_depth is not specified' do
|
||||
let(:params) { { ci_cd_settings_attributes: { default_git_depth: 10 } } }
|
||||
|
||||
before do
|
||||
project.ci_cd_settings.update!(default_git_depth: nil)
|
||||
end
|
||||
|
||||
it 'set specified git depth' do
|
||||
subject
|
||||
|
||||
project.reload
|
||||
expect(project.ci_default_git_depth).to eq(10)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when forward_deployment_enabled is not specified' do
|
||||
let(:params) { { ci_cd_settings_attributes: { forward_deployment_enabled: false } } }
|
||||
|
||||
before do
|
||||
project.ci_cd_settings.update!(forward_deployment_enabled: nil)
|
||||
end
|
||||
|
||||
it 'sets forward deployment enabled' do
|
||||
subject
|
||||
|
||||
project.reload
|
||||
expect(project.ci_forward_deployment_enabled).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when max_artifacts_size is specified' do
|
||||
let(:params) { { max_artifacts_size: 10 } }
|
||||
|
||||
context 'and user is not an admin' do
|
||||
it 'does not set max_artifacts_size' do
|
||||
it 'set default timeout' do
|
||||
subject
|
||||
|
||||
project.reload
|
||||
expect(project.max_artifacts_size).to be_nil
|
||||
expect(project.build_timeout).to eq(3600)
|
||||
end
|
||||
end
|
||||
|
||||
context 'and user is an admin' do
|
||||
let(:user) { create(:admin) }
|
||||
context 'when build_timeout_human_readable is specified' do
|
||||
let(:params) { { build_timeout_human_readable: '1h 30m' } }
|
||||
|
||||
context 'with admin mode disabled' do
|
||||
it 'set specified timeout' do
|
||||
subject
|
||||
|
||||
project.reload
|
||||
expect(project.build_timeout).to eq(5400)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when build_timeout_human_readable is invalid' do
|
||||
let(:params) { { build_timeout_human_readable: '5m' } }
|
||||
|
||||
it 'set specified timeout' do
|
||||
subject
|
||||
|
||||
expect(controller).to set_flash[:alert]
|
||||
expect(response).to redirect_to(namespace_project_settings_ci_cd_path)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when default_git_depth is not specified' do
|
||||
let(:params) { { ci_cd_settings_attributes: { default_git_depth: 10 } } }
|
||||
|
||||
before do
|
||||
project.ci_cd_settings.update!(default_git_depth: nil)
|
||||
end
|
||||
|
||||
it 'set specified git depth' do
|
||||
subject
|
||||
|
||||
project.reload
|
||||
expect(project.ci_default_git_depth).to eq(10)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when forward_deployment_enabled is not specified' do
|
||||
let(:params) { { ci_cd_settings_attributes: { forward_deployment_enabled: false } } }
|
||||
|
||||
before do
|
||||
project.ci_cd_settings.update!(forward_deployment_enabled: nil)
|
||||
end
|
||||
|
||||
it 'sets forward deployment enabled' do
|
||||
subject
|
||||
|
||||
project.reload
|
||||
expect(project.ci_forward_deployment_enabled).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when max_artifacts_size is specified' do
|
||||
let(:params) { { max_artifacts_size: 10 } }
|
||||
|
||||
context 'and user is not an admin' do
|
||||
it 'does not set max_artifacts_size' do
|
||||
subject
|
||||
|
||||
|
@ -315,33 +304,81 @@ RSpec.describe Projects::Settings::CiCdController do
|
|||
end
|
||||
end
|
||||
|
||||
context 'with admin mode enabled', :enable_admin_mode do
|
||||
it 'sets max_artifacts_size' do
|
||||
subject
|
||||
context 'and user is an admin' do
|
||||
let(:user) { create(:admin) }
|
||||
|
||||
project.reload
|
||||
expect(project.max_artifacts_size).to eq(10)
|
||||
context 'with admin mode disabled' do
|
||||
it 'does not set max_artifacts_size' do
|
||||
subject
|
||||
|
||||
project.reload
|
||||
expect(project.max_artifacts_size).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'with admin mode enabled', :enable_admin_mode do
|
||||
it 'sets max_artifacts_size' do
|
||||
subject
|
||||
|
||||
project.reload
|
||||
expect(project.max_artifacts_size).to eq(10)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'GET #runner_setup_scripts' do
|
||||
it 'renders the setup scripts' do
|
||||
get :runner_setup_scripts, params: { os: 'linux', arch: 'amd64', namespace_id: project.namespace, project_id: project }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response).to have_key("install")
|
||||
expect(json_response).to have_key("register")
|
||||
end
|
||||
|
||||
it 'renders errors if they occur' do
|
||||
get :runner_setup_scripts, params: { os: 'foo', arch: 'bar', namespace_id: project.namespace, project_id: project }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:bad_request)
|
||||
expect(json_response).to have_key("errors")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'GET #runner_setup_scripts' do
|
||||
it 'renders the setup scripts' do
|
||||
get :runner_setup_scripts, params: { os: 'linux', arch: 'amd64', namespace_id: project.namespace, project_id: project }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response).to have_key("install")
|
||||
expect(json_response).to have_key("register")
|
||||
context 'as a developer' do
|
||||
before do
|
||||
sign_in(user)
|
||||
project.add_developer(user)
|
||||
get :show, params: { namespace_id: project.namespace, project_id: project }
|
||||
end
|
||||
|
||||
it 'renders errors if they occur' do
|
||||
get :runner_setup_scripts, params: { os: 'foo', arch: 'bar', namespace_id: project.namespace, project_id: project }
|
||||
it 'responds with 404' do
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
end
|
||||
|
||||
expect(response).to have_gitlab_http_status(:bad_request)
|
||||
expect(json_response).to have_key("errors")
|
||||
context 'as a reporter' do
|
||||
before do
|
||||
sign_in(user)
|
||||
project.add_reporter(user)
|
||||
get :show, params: { namespace_id: project.namespace, project_id: project }
|
||||
end
|
||||
|
||||
it 'responds with 404' do
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
end
|
||||
|
||||
context 'as an unauthenticated user' do
|
||||
before do
|
||||
get :show, params: { namespace_id: project.namespace, project_id: project }
|
||||
end
|
||||
|
||||
it 'redirects to sign in' do
|
||||
expect(response).to have_gitlab_http_status(:found)
|
||||
expect(response).to redirect_to('/users/sign_in')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,23 +1,6 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
FactoryBot.define do
|
||||
factory :ci_stage, class: 'Ci::LegacyStage' do
|
||||
skip_create
|
||||
|
||||
transient do
|
||||
name { 'test' }
|
||||
status { nil }
|
||||
warnings { nil }
|
||||
pipeline factory: :ci_empty_pipeline
|
||||
end
|
||||
|
||||
initialize_with do
|
||||
Ci::LegacyStage.new(pipeline, name: name,
|
||||
status: status,
|
||||
warnings: warnings)
|
||||
end
|
||||
end
|
||||
|
||||
factory :ci_stage_entity, class: 'Ci::Stage' do
|
||||
project factory: :project
|
||||
pipeline factory: :ci_empty_pipeline
|
||||
|
|
|
@ -14,7 +14,7 @@ RSpec.describe 'Secure Files', :js do
|
|||
|
||||
it 'user sees the Secure Files list component' do
|
||||
visit project_ci_secure_files_path(project)
|
||||
expect(page).to have_content('There are no records to show')
|
||||
expect(page).to have_content('There are no secure files yet.')
|
||||
end
|
||||
|
||||
it 'prompts the user to confirm before deleting a file' do
|
||||
|
@ -37,7 +37,7 @@ RSpec.describe 'Secure Files', :js do
|
|||
|
||||
it 'displays an uploaded file in the file list' do
|
||||
visit project_ci_secure_files_path(project)
|
||||
expect(page).to have_content('There are no records to show')
|
||||
expect(page).to have_content('There are no secure files yet.')
|
||||
|
||||
page.attach_file('spec/fixtures/ci_secure_files/upload-keystore.jks') do
|
||||
click_button 'Upload File'
|
||||
|
|
|
@ -90,4 +90,27 @@ RSpec.describe 'User browses a job', :js do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'job log search' do
|
||||
before do
|
||||
visit(project_job_path(project, build))
|
||||
wait_for_all_requests
|
||||
end
|
||||
|
||||
it 'searches for supplied substring' do
|
||||
find('[data-testid="job-log-search-box"] input').set('GroupsHelper')
|
||||
|
||||
find('[data-testid="search-button"]').click
|
||||
|
||||
expect(page).to have_content('26 results found for GroupsHelper')
|
||||
end
|
||||
|
||||
it 'shows no results for supplied substring' do
|
||||
find('[data-testid="job-log-search-box"] input').set('YouWontFindMe')
|
||||
|
||||
find('[data-testid="search-button"]').click
|
||||
|
||||
expect(page).to have_content('No search results found')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,46 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe 'Secure Files Settings' do
|
||||
let_it_be(:maintainer) { create(:user) }
|
||||
let_it_be(:project) { create(:project, creator_id: maintainer.id) }
|
||||
|
||||
before_all do
|
||||
project.add_maintainer(maintainer)
|
||||
end
|
||||
|
||||
context 'when the :ci_secure_files feature flag is enabled' do
|
||||
before do
|
||||
stub_feature_flags(ci_secure_files: true)
|
||||
|
||||
sign_in(user)
|
||||
visit project_settings_ci_cd_path(project)
|
||||
end
|
||||
|
||||
context 'authenticated user with admin permissions' do
|
||||
let(:user) { maintainer }
|
||||
|
||||
it 'shows the secure files settings' do
|
||||
expect(page).to have_content('Secure Files')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the :ci_secure_files feature flag is disabled' do
|
||||
before do
|
||||
stub_feature_flags(ci_secure_files: false)
|
||||
|
||||
sign_in(user)
|
||||
visit project_settings_ci_cd_path(project)
|
||||
end
|
||||
|
||||
context 'authenticated user with admin permissions' do
|
||||
let(:user) { maintainer }
|
||||
|
||||
it 'does not shows the secure files settings' do
|
||||
expect(page).not_to have_content('Secure Files')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
101
spec/features/projects/settings/secure_files_spec.rb
Normal file
101
spec/features/projects/settings/secure_files_spec.rb
Normal file
|
@ -0,0 +1,101 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe 'Secure Files', :js do
|
||||
let(:project) { create(:project) }
|
||||
let(:user) { create(:user) }
|
||||
|
||||
before do
|
||||
stub_feature_flags(ci_secure_files_read_only: false)
|
||||
project.add_maintainer(user)
|
||||
sign_in(user)
|
||||
end
|
||||
|
||||
context 'when the :ci_secure_files feature flag is enabled' do
|
||||
before do
|
||||
stub_feature_flags(ci_secure_files: true)
|
||||
|
||||
visit project_settings_ci_cd_path(project)
|
||||
end
|
||||
|
||||
context 'authenticated user with admin permissions' do
|
||||
it 'shows the secure files settings' do
|
||||
expect(page).to have_content('Secure Files')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the :ci_secure_files feature flag is disabled' do
|
||||
before do
|
||||
stub_feature_flags(ci_secure_files: false)
|
||||
|
||||
visit project_settings_ci_cd_path(project)
|
||||
end
|
||||
|
||||
context 'authenticated user with admin permissions' do
|
||||
it 'does not shows the secure files settings' do
|
||||
expect(page).not_to have_content('Secure Files')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
it 'user sees the Secure Files list component' do
|
||||
visit project_settings_ci_cd_path(project)
|
||||
|
||||
within '#js-secure-files' do
|
||||
expect(page).to have_content('There are no secure files yet.')
|
||||
end
|
||||
end
|
||||
|
||||
it 'prompts the user to confirm before deleting a file' do
|
||||
file = create(:ci_secure_file, project: project)
|
||||
|
||||
visit project_settings_ci_cd_path(project)
|
||||
|
||||
within '#js-secure-files' do
|
||||
expect(page).to have_content(file.name)
|
||||
|
||||
find('button.btn-danger-secondary').click
|
||||
end
|
||||
|
||||
expect(page).to have_content("Delete #{file.name}?")
|
||||
|
||||
click_on('Delete secure file')
|
||||
|
||||
visit project_settings_ci_cd_path(project)
|
||||
|
||||
within '#js-secure-files' do
|
||||
expect(page).not_to have_content(file.name)
|
||||
end
|
||||
end
|
||||
|
||||
it 'displays an uploaded file in the file list' do
|
||||
visit project_settings_ci_cd_path(project)
|
||||
|
||||
within '#js-secure-files' do
|
||||
expect(page).to have_content('There are no secure files yet.')
|
||||
|
||||
page.attach_file('spec/fixtures/ci_secure_files/upload-keystore.jks') do
|
||||
click_button 'Upload File'
|
||||
end
|
||||
|
||||
expect(page).to have_content('upload-keystore.jks')
|
||||
end
|
||||
end
|
||||
|
||||
it 'displays an error when a duplicate file upload is attempted' do
|
||||
create(:ci_secure_file, project: project, name: 'upload-keystore.jks')
|
||||
visit project_settings_ci_cd_path(project)
|
||||
|
||||
within '#js-secure-files' do
|
||||
expect(page).to have_content('upload-keystore.jks')
|
||||
|
||||
page.attach_file('spec/fixtures/ci_secure_files/upload-keystore.jks') do
|
||||
click_button 'Upload File'
|
||||
end
|
||||
|
||||
expect(page).to have_content('A file with this name already exists.')
|
||||
end
|
||||
end
|
||||
end
|
|
@ -59,7 +59,7 @@ describe('SecureFilesList', () => {
|
|||
const findUploadButton = () => wrapper.findAll('span.gl-button-text');
|
||||
const findDeleteModal = () => wrapper.findComponent(GlModal);
|
||||
const findUploadInput = () => wrapper.findAll('input[type="file"]').at(0);
|
||||
const findDeleteButton = () => wrapper.findAll('tbody tr td button.btn-danger');
|
||||
const findDeleteButton = () => wrapper.findAll('[data-testid="delete-button"]');
|
||||
|
||||
describe('when secure files exist in a project', () => {
|
||||
beforeEach(async () => {
|
||||
|
@ -71,7 +71,7 @@ describe('SecureFilesList', () => {
|
|||
});
|
||||
|
||||
it('displays a table with expected headers', () => {
|
||||
const headers = ['Filename', 'Uploaded'];
|
||||
const headers = ['File name', 'Uploaded date'];
|
||||
headers.forEach((header, i) => {
|
||||
expect(findHeaderAt(i).text()).toBe(header);
|
||||
});
|
||||
|
@ -121,14 +121,14 @@ describe('SecureFilesList', () => {
|
|||
});
|
||||
|
||||
it('displays a table with expected headers', () => {
|
||||
const headers = ['Filename', 'Uploaded'];
|
||||
const headers = ['File name', 'Uploaded date'];
|
||||
headers.forEach((header, i) => {
|
||||
expect(findHeaderAt(i).text()).toBe(header);
|
||||
});
|
||||
});
|
||||
|
||||
it('displays a table with a no records message', () => {
|
||||
expect(findCell(0, 0).text()).toBe('There are no records to show');
|
||||
expect(findCell(0, 0).text()).toBe('There are no secure files yet.');
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -1,6 +1,11 @@
|
|||
import { GlSearchBoxByClick } from '@gitlab/ui';
|
||||
import { mount } from '@vue/test-utils';
|
||||
import { nextTick } from 'vue';
|
||||
import JobLogControllers from '~/jobs/components/job_log_controllers.vue';
|
||||
import HelpPopover from '~/vue_shared/components/help_popover.vue';
|
||||
import { mockJobLog } from '../mock_data';
|
||||
|
||||
const mockToastShow = jest.fn();
|
||||
|
||||
describe('Job log controllers', () => {
|
||||
let wrapper;
|
||||
|
@ -19,14 +24,30 @@ describe('Job log controllers', () => {
|
|||
isScrollBottomDisabled: false,
|
||||
isScrollingDown: true,
|
||||
isJobLogSizeVisible: true,
|
||||
jobLog: mockJobLog,
|
||||
};
|
||||
|
||||
const createWrapper = (props) => {
|
||||
const createWrapper = (props, jobLogSearch = false) => {
|
||||
wrapper = mount(JobLogControllers, {
|
||||
propsData: {
|
||||
...defaultProps,
|
||||
...props,
|
||||
},
|
||||
provide: {
|
||||
glFeatures: {
|
||||
jobLogSearch,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
searchTerm: '82',
|
||||
};
|
||||
},
|
||||
mocks: {
|
||||
$toast: {
|
||||
show: mockToastShow,
|
||||
},
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -35,6 +56,8 @@ describe('Job log controllers', () => {
|
|||
const findRawLinkController = () => wrapper.find('[data-testid="job-raw-link-controller"]');
|
||||
const findScrollTop = () => wrapper.find('[data-testid="job-controller-scroll-top"]');
|
||||
const findScrollBottom = () => wrapper.find('[data-testid="job-controller-scroll-bottom"]');
|
||||
const findJobLogSearch = () => wrapper.findComponent(GlSearchBoxByClick);
|
||||
const findSearchHelp = () => wrapper.findComponent(HelpPopover);
|
||||
|
||||
describe('Truncate information', () => {
|
||||
describe('with isJobLogSizeVisible', () => {
|
||||
|
@ -179,4 +202,40 @@ describe('Job log controllers', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Job log search', () => {
|
||||
describe('with feature flag off', () => {
|
||||
it('does not display job log search', () => {
|
||||
createWrapper();
|
||||
|
||||
expect(findJobLogSearch().exists()).toBe(false);
|
||||
expect(findSearchHelp().exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('with feature flag on', () => {
|
||||
beforeEach(() => {
|
||||
createWrapper({}, { jobLogSearch: true });
|
||||
});
|
||||
|
||||
it('displays job log search', () => {
|
||||
expect(findJobLogSearch().exists()).toBe(true);
|
||||
expect(findSearchHelp().exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('emits search results', () => {
|
||||
const expectedSearchResults = [[[mockJobLog[6].lines[1], mockJobLog[6].lines[2]]]];
|
||||
|
||||
findJobLogSearch().vm.$emit('submit');
|
||||
|
||||
expect(wrapper.emitted('searchResults')).toEqual(expectedSearchResults);
|
||||
});
|
||||
|
||||
it('clears search results', () => {
|
||||
findJobLogSearch().vm.$emit('clear');
|
||||
|
||||
expect(wrapper.emitted('searchResults')).toEqual([[[]]]);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -179,4 +179,46 @@ describe('Job Log Line', () => {
|
|||
expect(findLink().exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('job log search', () => {
|
||||
const mockSearchResults = [
|
||||
{
|
||||
offset: 1533,
|
||||
content: [{ text: '$ echo "82.71"', style: 'term-fg-l-green term-bold' }],
|
||||
section: 'step-script',
|
||||
lineNumber: 20,
|
||||
},
|
||||
{ offset: 1560, content: [{ text: '82.71' }], section: 'step-script', lineNumber: 21 },
|
||||
];
|
||||
|
||||
it('applies highlight class to search result elements', () => {
|
||||
createComponent({
|
||||
line: {
|
||||
offset: 1560,
|
||||
content: [{ text: '82.71' }],
|
||||
section: 'step-script',
|
||||
lineNumber: 21,
|
||||
},
|
||||
path: '/root/ci-project/-/jobs/1089',
|
||||
searchResults: mockSearchResults,
|
||||
});
|
||||
|
||||
expect(wrapper.classes()).toContain('gl-bg-gray-500');
|
||||
});
|
||||
|
||||
it('does not apply highlight class to search result elements', () => {
|
||||
createComponent({
|
||||
line: {
|
||||
offset: 1560,
|
||||
content: [{ text: 'docker' }],
|
||||
section: 'step-script',
|
||||
lineNumber: 29,
|
||||
},
|
||||
path: '/root/ci-project/-/jobs/1089',
|
||||
searchResults: mockSearchResults,
|
||||
});
|
||||
|
||||
expect(wrapper.classes()).not.toContain('gl-bg-gray-500');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1414,3 +1414,167 @@ export const unscheduleMutationResponse = {
|
|||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const mockJobLog = [
|
||||
{ offset: 0, content: [{ text: 'Running with gitlab-runner 15.0.0 (febb2a09)' }], lineNumber: 0 },
|
||||
{ offset: 54, content: [{ text: ' on colima-docker EwM9WzgD' }], lineNumber: 1 },
|
||||
{
|
||||
isClosed: false,
|
||||
isHeader: true,
|
||||
line: {
|
||||
offset: 91,
|
||||
content: [{ text: 'Resolving secrets', style: 'term-fg-l-cyan term-bold' }],
|
||||
section: 'resolve-secrets',
|
||||
section_header: true,
|
||||
lineNumber: 2,
|
||||
section_duration: '00:00',
|
||||
},
|
||||
lines: [],
|
||||
},
|
||||
{
|
||||
isClosed: false,
|
||||
isHeader: true,
|
||||
line: {
|
||||
offset: 218,
|
||||
content: [{ text: 'Preparing the "docker" executor', style: 'term-fg-l-cyan term-bold' }],
|
||||
section: 'prepare-executor',
|
||||
section_header: true,
|
||||
lineNumber: 4,
|
||||
section_duration: '00:01',
|
||||
},
|
||||
lines: [
|
||||
{
|
||||
offset: 317,
|
||||
content: [{ text: 'Using Docker executor with image ruby:2.7 ...' }],
|
||||
section: 'prepare-executor',
|
||||
lineNumber: 5,
|
||||
},
|
||||
{
|
||||
offset: 372,
|
||||
content: [{ text: 'Pulling docker image ruby:2.7 ...' }],
|
||||
section: 'prepare-executor',
|
||||
lineNumber: 6,
|
||||
},
|
||||
{
|
||||
offset: 415,
|
||||
content: [
|
||||
{
|
||||
text:
|
||||
'Using docker image sha256:55106bf6ba7f452c38d01ea760affc6ceb67d4b60068ffadab98d1b7b007668c for ruby:2.7 with digest ruby@sha256:23d08a4bae1a12ee3fce017f83204fcf9a02243443e4a516e65e5ff73810a449 ...',
|
||||
},
|
||||
],
|
||||
section: 'prepare-executor',
|
||||
lineNumber: 7,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
isClosed: false,
|
||||
isHeader: true,
|
||||
line: {
|
||||
offset: 665,
|
||||
content: [{ text: 'Preparing environment', style: 'term-fg-l-cyan term-bold' }],
|
||||
section: 'prepare-script',
|
||||
section_header: true,
|
||||
lineNumber: 9,
|
||||
section_duration: '00:01',
|
||||
},
|
||||
lines: [
|
||||
{
|
||||
offset: 752,
|
||||
content: [
|
||||
{ text: 'Running on runner-ewm9wzgd-project-20-concurrent-0 via 8ea689ec6969...' },
|
||||
],
|
||||
section: 'prepare-script',
|
||||
lineNumber: 10,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
isClosed: false,
|
||||
isHeader: true,
|
||||
line: {
|
||||
offset: 865,
|
||||
content: [{ text: 'Getting source from Git repository', style: 'term-fg-l-cyan term-bold' }],
|
||||
section: 'get-sources',
|
||||
section_header: true,
|
||||
lineNumber: 12,
|
||||
section_duration: '00:01',
|
||||
},
|
||||
lines: [
|
||||
{
|
||||
offset: 962,
|
||||
content: [
|
||||
{
|
||||
text: 'Fetching changes with git depth set to 20...',
|
||||
style: 'term-fg-l-green term-bold',
|
||||
},
|
||||
],
|
||||
section: 'get-sources',
|
||||
lineNumber: 13,
|
||||
},
|
||||
{
|
||||
offset: 1019,
|
||||
content: [
|
||||
{ text: 'Reinitialized existing Git repository in /builds/root/ci-project/.git/' },
|
||||
],
|
||||
section: 'get-sources',
|
||||
lineNumber: 14,
|
||||
},
|
||||
{
|
||||
offset: 1090,
|
||||
content: [{ text: 'Checking out e0f63d76 as main...', style: 'term-fg-l-green term-bold' }],
|
||||
section: 'get-sources',
|
||||
lineNumber: 15,
|
||||
},
|
||||
{
|
||||
offset: 1136,
|
||||
content: [{ text: 'Skipping Git submodules setup', style: 'term-fg-l-green term-bold' }],
|
||||
section: 'get-sources',
|
||||
lineNumber: 16,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
isClosed: false,
|
||||
isHeader: true,
|
||||
line: {
|
||||
offset: 1217,
|
||||
content: [
|
||||
{
|
||||
text: 'Executing "step_script" stage of the job script',
|
||||
style: 'term-fg-l-cyan term-bold',
|
||||
},
|
||||
],
|
||||
section: 'step-script',
|
||||
section_header: true,
|
||||
lineNumber: 18,
|
||||
section_duration: '00:00',
|
||||
},
|
||||
lines: [
|
||||
{
|
||||
offset: 1327,
|
||||
content: [
|
||||
{
|
||||
text:
|
||||
'Using docker image sha256:55106bf6ba7f452c38d01ea760affc6ceb67d4b60068ffadab98d1b7b007668c for ruby:2.7 with digest ruby@sha256:23d08a4bae1a12ee3fce017f83204fcf9a02243443e4a516e65e5ff73810a449 ...',
|
||||
},
|
||||
],
|
||||
section: 'step-script',
|
||||
lineNumber: 19,
|
||||
},
|
||||
{
|
||||
offset: 1533,
|
||||
content: [{ text: '$ echo "82.71"', style: 'term-fg-l-green term-bold' }],
|
||||
section: 'step-script',
|
||||
lineNumber: 20,
|
||||
},
|
||||
{ offset: 1560, content: [{ text: '82.71' }], section: 'step-script', lineNumber: 21 },
|
||||
],
|
||||
},
|
||||
{
|
||||
offset: 1605,
|
||||
content: [{ text: 'Job succeeded', style: 'term-fg-l-green term-bold' }],
|
||||
lineNumber: 23,
|
||||
},
|
||||
];
|
||||
|
|
|
@ -10,6 +10,7 @@ import {
|
|||
} from 'helpers/vue_test_utils_helper';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import { createAlert } from '~/flash';
|
||||
import { s__ } from '~/locale';
|
||||
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
|
||||
import { updateHistory } from '~/lib/utils/url_utility';
|
||||
|
||||
|
@ -20,6 +21,7 @@ import RunnerFilteredSearchBar from '~/runner/components/runner_filtered_search_
|
|||
import RunnerList from '~/runner/components/runner_list.vue';
|
||||
import RunnerListEmptyState from '~/runner/components/runner_list_empty_state.vue';
|
||||
import RunnerStats from '~/runner/components/stat/runner_stats.vue';
|
||||
import RunnerCount from '~/runner/components/stat/runner_count.vue';
|
||||
import RunnerActionsCell from '~/runner/components/cells/runner_actions_cell.vue';
|
||||
import RegistrationDropdown from '~/runner/components/registration/registration_dropdown.vue';
|
||||
import RunnerPagination from '~/runner/components/runner_pagination.vue';
|
||||
|
@ -30,8 +32,6 @@ import {
|
|||
CREATED_DESC,
|
||||
DEFAULT_SORT,
|
||||
INSTANCE_TYPE,
|
||||
GROUP_TYPE,
|
||||
PROJECT_TYPE,
|
||||
PARAM_KEY_PAUSED,
|
||||
PARAM_KEY_STATUS,
|
||||
PARAM_KEY_TAG,
|
||||
|
@ -59,6 +59,9 @@ const mockRegistrationToken = 'MOCK_REGISTRATION_TOKEN';
|
|||
const mockRunners = runnersData.data.runners.nodes;
|
||||
const mockRunnersCount = runnersCountData.data.runners.count;
|
||||
|
||||
const mockRunnersQuery = jest.fn();
|
||||
const mockRunnersCountQuery = jest.fn();
|
||||
|
||||
jest.mock('~/flash');
|
||||
jest.mock('~/runner/sentry_utils');
|
||||
jest.mock('~/lib/utils/url_utility', () => ({
|
||||
|
@ -71,8 +74,6 @@ Vue.use(GlToast);
|
|||
|
||||
describe('AdminRunnersApp', () => {
|
||||
let wrapper;
|
||||
let mockRunnersQuery;
|
||||
let mockRunnersCountQuery;
|
||||
let cacheConfig;
|
||||
let localMutations;
|
||||
|
||||
|
@ -116,15 +117,13 @@ describe('AdminRunnersApp', () => {
|
|||
},
|
||||
...options,
|
||||
});
|
||||
|
||||
return waitForPromises();
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
setWindowLocation('/admin/runners');
|
||||
|
||||
mockRunnersQuery = jest.fn().mockResolvedValue(runnersData);
|
||||
mockRunnersCountQuery = jest.fn().mockResolvedValue(runnersCountData);
|
||||
createComponent();
|
||||
await waitForPromises();
|
||||
beforeEach(() => {
|
||||
mockRunnersQuery.mockResolvedValue(runnersData);
|
||||
mockRunnersCountQuery.mockResolvedValue(runnersCountData);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
|
@ -134,92 +133,46 @@ describe('AdminRunnersApp', () => {
|
|||
});
|
||||
|
||||
it('shows the runner tabs with a runner count for each type', async () => {
|
||||
mockRunnersCountQuery.mockImplementation(({ type }) => {
|
||||
let count;
|
||||
switch (type) {
|
||||
case INSTANCE_TYPE:
|
||||
count = 3;
|
||||
break;
|
||||
case GROUP_TYPE:
|
||||
count = 2;
|
||||
break;
|
||||
case PROJECT_TYPE:
|
||||
count = 1;
|
||||
break;
|
||||
default:
|
||||
count = 6;
|
||||
break;
|
||||
}
|
||||
return Promise.resolve({ data: { runners: { count } } });
|
||||
});
|
||||
|
||||
createComponent({ mountFn: mountExtended });
|
||||
await waitForPromises();
|
||||
await createComponent({ mountFn: mountExtended });
|
||||
|
||||
expect(findRunnerTypeTabs().text()).toMatchInterpolatedText(
|
||||
`All 6 Instance 3 Group 2 Project 1`,
|
||||
);
|
||||
});
|
||||
|
||||
it('shows the runner tabs with a formatted runner count', async () => {
|
||||
mockRunnersCountQuery.mockImplementation(({ type }) => {
|
||||
let count;
|
||||
switch (type) {
|
||||
case INSTANCE_TYPE:
|
||||
count = 3000;
|
||||
break;
|
||||
case GROUP_TYPE:
|
||||
count = 2000;
|
||||
break;
|
||||
case PROJECT_TYPE:
|
||||
count = 1000;
|
||||
break;
|
||||
default:
|
||||
count = 6000;
|
||||
break;
|
||||
}
|
||||
return Promise.resolve({ data: { runners: { count } } });
|
||||
});
|
||||
|
||||
createComponent({ mountFn: mountExtended });
|
||||
await waitForPromises();
|
||||
|
||||
expect(findRunnerTypeTabs().text()).toMatchInterpolatedText(
|
||||
`All 6,000 Instance 3,000 Group 2,000 Project 1,000`,
|
||||
`All ${mockRunnersCount} Instance ${mockRunnersCount} Group ${mockRunnersCount} Project ${mockRunnersCount}`,
|
||||
);
|
||||
});
|
||||
|
||||
it('shows the runner setup instructions', () => {
|
||||
createComponent();
|
||||
|
||||
expect(findRegistrationDropdown().props('registrationToken')).toBe(mockRegistrationToken);
|
||||
expect(findRegistrationDropdown().props('type')).toBe(INSTANCE_TYPE);
|
||||
});
|
||||
|
||||
it('shows total runner counts', async () => {
|
||||
expect(mockRunnersCountQuery).toHaveBeenCalledWith({
|
||||
status: STATUS_ONLINE,
|
||||
});
|
||||
expect(mockRunnersCountQuery).toHaveBeenCalledWith({
|
||||
status: STATUS_OFFLINE,
|
||||
});
|
||||
expect(mockRunnersCountQuery).toHaveBeenCalledWith({
|
||||
status: STATUS_STALE,
|
||||
});
|
||||
await createComponent({ mountFn: mountExtended });
|
||||
|
||||
expect(findRunnerStats().props()).toMatchObject({
|
||||
onlineRunnersCount: mockRunnersCount,
|
||||
offlineRunnersCount: mockRunnersCount,
|
||||
staleRunnersCount: mockRunnersCount,
|
||||
});
|
||||
expect(mockRunnersCountQuery).toHaveBeenCalledWith({ status: STATUS_ONLINE });
|
||||
expect(mockRunnersCountQuery).toHaveBeenCalledWith({ status: STATUS_OFFLINE });
|
||||
expect(mockRunnersCountQuery).toHaveBeenCalledWith({ status: STATUS_STALE });
|
||||
|
||||
expect(findRunnerStats().text()).toContain(
|
||||
`${s__('Runners|Online runners')} ${mockRunnersCount}`,
|
||||
);
|
||||
expect(findRunnerStats().text()).toContain(
|
||||
`${s__('Runners|Offline runners')} ${mockRunnersCount}`,
|
||||
);
|
||||
expect(findRunnerStats().text()).toContain(
|
||||
`${s__('Runners|Stale runners')} ${mockRunnersCount}`,
|
||||
);
|
||||
});
|
||||
|
||||
it('shows the runners list', () => {
|
||||
it('shows the runners list', async () => {
|
||||
await createComponent();
|
||||
|
||||
expect(findRunnerList().props('runners')).toEqual(mockRunners);
|
||||
});
|
||||
|
||||
it('runner item links to the runner admin page', async () => {
|
||||
createComponent({ mountFn: mountExtended });
|
||||
|
||||
await waitForPromises();
|
||||
await createComponent({ mountFn: mountExtended });
|
||||
|
||||
const { id, shortSha } = mockRunners[0];
|
||||
const numericId = getIdFromGraphQLId(id);
|
||||
|
@ -231,12 +184,9 @@ describe('AdminRunnersApp', () => {
|
|||
});
|
||||
|
||||
it('renders runner actions for each runner', async () => {
|
||||
createComponent({ mountFn: mountExtended });
|
||||
|
||||
await waitForPromises();
|
||||
await createComponent({ mountFn: mountExtended });
|
||||
|
||||
const runnerActions = wrapper.find('tr [data-testid="td-actions"]').find(RunnerActionsCell);
|
||||
|
||||
const runner = mockRunners[0];
|
||||
|
||||
expect(runnerActions.props()).toEqual({
|
||||
|
@ -245,7 +195,9 @@ describe('AdminRunnersApp', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('requests the runners with no filters', () => {
|
||||
it('requests the runners with no filters', async () => {
|
||||
await createComponent();
|
||||
|
||||
expect(mockRunnersQuery).toHaveBeenLastCalledWith({
|
||||
status: undefined,
|
||||
type: undefined,
|
||||
|
@ -284,10 +236,8 @@ describe('AdminRunnersApp', () => {
|
|||
beforeEach(async () => {
|
||||
mockRunnersCountQuery.mockClear();
|
||||
|
||||
createComponent({ mountFn: mountExtended });
|
||||
await createComponent({ mountFn: mountExtended });
|
||||
showToast = jest.spyOn(wrapper.vm.$root.$toast, 'show');
|
||||
|
||||
await waitForPromises();
|
||||
});
|
||||
|
||||
it('Links to the runner page', async () => {
|
||||
|
@ -303,7 +253,6 @@ describe('AdminRunnersApp', () => {
|
|||
findRunnerActionsCell().vm.$emit('toggledPaused');
|
||||
|
||||
expect(mockRunnersCountQuery).toHaveBeenCalledTimes(COUNT_QUERIES + FILTERED_COUNT_QUERIES);
|
||||
|
||||
expect(showToast).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
|
@ -319,8 +268,12 @@ describe('AdminRunnersApp', () => {
|
|||
beforeEach(async () => {
|
||||
setWindowLocation(`?status[]=${STATUS_ONLINE}&runner_type[]=${INSTANCE_TYPE}&tag[]=tag1`);
|
||||
|
||||
createComponent();
|
||||
await waitForPromises();
|
||||
await createComponent({
|
||||
stubs: {
|
||||
RunnerStats,
|
||||
RunnerCount,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('sets the filters in the search bar', () => {
|
||||
|
@ -351,16 +304,17 @@ describe('AdminRunnersApp', () => {
|
|||
status: STATUS_ONLINE,
|
||||
tagList: ['tag1'],
|
||||
});
|
||||
|
||||
expect(findRunnerStats().props()).toMatchObject({
|
||||
onlineRunnersCount: mockRunnersCount,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when a filter is selected by the user', () => {
|
||||
beforeEach(() => {
|
||||
mockRunnersCountQuery.mockClear();
|
||||
createComponent({
|
||||
stubs: {
|
||||
RunnerStats,
|
||||
RunnerCount,
|
||||
},
|
||||
});
|
||||
|
||||
findRunnerFilteredSearchBar().vm.$emit('input', {
|
||||
runnerType: null,
|
||||
|
@ -375,7 +329,7 @@ describe('AdminRunnersApp', () => {
|
|||
it('updates the browser url', () => {
|
||||
expect(updateHistory).toHaveBeenLastCalledWith({
|
||||
title: expect.any(String),
|
||||
url: 'http://test.host/admin/runners?status[]=ONLINE&tag[]=tag1&sort=CREATED_ASC',
|
||||
url: expect.stringContaining('?status[]=ONLINE&tag[]=tag1&sort=CREATED_ASC'),
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -393,26 +347,6 @@ describe('AdminRunnersApp', () => {
|
|||
tagList: ['tag1'],
|
||||
status: STATUS_ONLINE,
|
||||
});
|
||||
|
||||
expect(findRunnerStats().props()).toMatchObject({
|
||||
onlineRunnersCount: mockRunnersCount,
|
||||
});
|
||||
});
|
||||
|
||||
it('skips fetching count results for status that were not in filter', () => {
|
||||
expect(mockRunnersCountQuery).not.toHaveBeenCalledWith({
|
||||
tagList: ['tag1'],
|
||||
status: STATUS_OFFLINE,
|
||||
});
|
||||
expect(mockRunnersCountQuery).not.toHaveBeenCalledWith({
|
||||
tagList: ['tag1'],
|
||||
status: STATUS_STALE,
|
||||
});
|
||||
|
||||
expect(findRunnerStats().props()).toMatchObject({
|
||||
offlineRunnersCount: null,
|
||||
staleRunnersCount: null,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -458,14 +392,13 @@ describe('AdminRunnersApp', () => {
|
|||
|
||||
describe('when no runners are found', () => {
|
||||
beforeEach(async () => {
|
||||
mockRunnersQuery = jest.fn().mockResolvedValue({
|
||||
mockRunnersQuery.mockResolvedValue({
|
||||
data: {
|
||||
runners: { nodes: [] },
|
||||
},
|
||||
});
|
||||
|
||||
createComponent();
|
||||
await waitForPromises();
|
||||
await createComponent();
|
||||
});
|
||||
|
||||
it('shows an empty state', () => {
|
||||
|
@ -490,9 +423,8 @@ describe('AdminRunnersApp', () => {
|
|||
|
||||
describe('when runners query fails', () => {
|
||||
beforeEach(async () => {
|
||||
mockRunnersQuery = jest.fn().mockRejectedValue(new Error('Error!'));
|
||||
createComponent();
|
||||
await waitForPromises();
|
||||
mockRunnersQuery.mockRejectedValue(new Error('Error!'));
|
||||
await createComponent();
|
||||
});
|
||||
|
||||
it('error is shown to the user', async () => {
|
||||
|
@ -509,10 +441,9 @@ describe('AdminRunnersApp', () => {
|
|||
|
||||
describe('Pagination', () => {
|
||||
beforeEach(async () => {
|
||||
mockRunnersQuery = jest.fn().mockResolvedValue(runnersDataPaginated);
|
||||
mockRunnersQuery.mockResolvedValue(runnersDataPaginated);
|
||||
|
||||
createComponent({ mountFn: mountExtended });
|
||||
await waitForPromises();
|
||||
await createComponent({ mountFn: mountExtended });
|
||||
});
|
||||
|
||||
it('navigates to the next page', async () => {
|
||||
|
|
|
@ -1,10 +1,30 @@
|
|||
import { GlTab } from '@gitlab/ui';
|
||||
import { shallowMount } from '@vue/test-utils';
|
||||
import RunnerTypeTabs from '~/runner/components/runner_type_tabs.vue';
|
||||
import RunnerCount from '~/runner/components/stat/runner_count.vue';
|
||||
import { INSTANCE_TYPE, GROUP_TYPE, PROJECT_TYPE } from '~/runner/constants';
|
||||
|
||||
const mockSearch = { runnerType: null, filters: [], pagination: { page: 1 }, sort: 'CREATED_DESC' };
|
||||
|
||||
const mockCount = (type, multiplier = 1) => {
|
||||
let count;
|
||||
switch (type) {
|
||||
case INSTANCE_TYPE:
|
||||
count = 3;
|
||||
break;
|
||||
case GROUP_TYPE:
|
||||
count = 2;
|
||||
break;
|
||||
case PROJECT_TYPE:
|
||||
count = 1;
|
||||
break;
|
||||
default:
|
||||
count = 6;
|
||||
break;
|
||||
}
|
||||
return count * multiplier;
|
||||
};
|
||||
|
||||
describe('RunnerTypeTabs', () => {
|
||||
let wrapper;
|
||||
|
||||
|
@ -13,33 +33,94 @@ describe('RunnerTypeTabs', () => {
|
|||
findTabs()
|
||||
.filter((tab) => tab.attributes('active') === 'true')
|
||||
.at(0);
|
||||
const getTabsTitles = () => findTabs().wrappers.map((tab) => tab.text());
|
||||
const getTabsTitles = () => findTabs().wrappers.map((tab) => tab.text().replace(/\s+/g, ' '));
|
||||
|
||||
const createComponent = ({ props, ...options } = {}) => {
|
||||
const createComponent = ({ props, stubs, ...options } = {}) => {
|
||||
wrapper = shallowMount(RunnerTypeTabs, {
|
||||
propsData: {
|
||||
value: mockSearch,
|
||||
countScope: INSTANCE_TYPE,
|
||||
countVariables: {},
|
||||
...props,
|
||||
},
|
||||
stubs: {
|
||||
GlTab,
|
||||
...stubs,
|
||||
},
|
||||
...options,
|
||||
});
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
createComponent();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
wrapper.destroy();
|
||||
});
|
||||
|
||||
it('Renders all options to filter runners by default', () => {
|
||||
createComponent();
|
||||
|
||||
expect(getTabsTitles()).toEqual(['All', 'Instance', 'Group', 'Project']);
|
||||
});
|
||||
|
||||
it('Shows count when receiving a number', () => {
|
||||
createComponent({
|
||||
stubs: {
|
||||
RunnerCount: {
|
||||
props: ['variables'],
|
||||
render() {
|
||||
return this.$scopedSlots.default({
|
||||
count: mockCount(this.variables.type),
|
||||
});
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(getTabsTitles()).toEqual([`All 6`, `Instance 3`, `Group 2`, `Project 1`]);
|
||||
});
|
||||
|
||||
it('Shows formatted count when receiving a large number', () => {
|
||||
createComponent({
|
||||
stubs: {
|
||||
RunnerCount: {
|
||||
props: ['variables'],
|
||||
render() {
|
||||
return this.$scopedSlots.default({
|
||||
count: mockCount(this.variables.type, 1000),
|
||||
});
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(getTabsTitles()).toEqual([
|
||||
`All 6,000`,
|
||||
`Instance 3,000`,
|
||||
`Group 2,000`,
|
||||
`Project 1,000`,
|
||||
]);
|
||||
});
|
||||
|
||||
it('Renders a count next to each tab', () => {
|
||||
const mockVariables = {
|
||||
paused: true,
|
||||
status: 'ONLINE',
|
||||
};
|
||||
|
||||
createComponent({
|
||||
props: {
|
||||
countVariables: mockVariables,
|
||||
},
|
||||
});
|
||||
|
||||
findTabs().wrappers.forEach((tab) => {
|
||||
expect(tab.find(RunnerCount).props()).toEqual({
|
||||
scope: INSTANCE_TYPE,
|
||||
skip: false,
|
||||
variables: expect.objectContaining(mockVariables),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Renders fewer options to filter runners', () => {
|
||||
createComponent({
|
||||
props: {
|
||||
|
@ -51,6 +132,8 @@ describe('RunnerTypeTabs', () => {
|
|||
});
|
||||
|
||||
it('"All" is selected by default', () => {
|
||||
createComponent();
|
||||
|
||||
expect(findActiveTab().text()).toBe('All');
|
||||
});
|
||||
|
||||
|
@ -71,6 +154,7 @@ describe('RunnerTypeTabs', () => {
|
|||
const emittedValue = () => wrapper.emitted('input')[0][0];
|
||||
|
||||
beforeEach(() => {
|
||||
createComponent();
|
||||
findTabs().at(2).vm.$emit('click');
|
||||
});
|
||||
|
||||
|
@ -89,27 +173,30 @@ describe('RunnerTypeTabs', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('When using a custom slot', () => {
|
||||
const mockContent = 'content';
|
||||
describe('Component API', () => {
|
||||
describe('When .refetch() is called', () => {
|
||||
let mockRefetch;
|
||||
|
||||
beforeEach(() => {
|
||||
createComponent({
|
||||
scopedSlots: {
|
||||
title: `
|
||||
<span>
|
||||
{{props.tab.title}} ${mockContent}
|
||||
</span>`,
|
||||
},
|
||||
beforeEach(() => {
|
||||
mockRefetch = jest.fn();
|
||||
|
||||
createComponent({
|
||||
stubs: {
|
||||
RunnerCount: {
|
||||
methods: {
|
||||
refetch: mockRefetch,
|
||||
},
|
||||
render() {},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
wrapper.vm.refetch();
|
||||
});
|
||||
});
|
||||
|
||||
it('Renders tabs with additional information', () => {
|
||||
expect(findTabs().wrappers.map((tab) => tab.text())).toEqual([
|
||||
`All ${mockContent}`,
|
||||
`Instance ${mockContent}`,
|
||||
`Group ${mockContent}`,
|
||||
`Project ${mockContent}`,
|
||||
]);
|
||||
it('refetch is called for each count', () => {
|
||||
expect(mockRefetch).toHaveBeenCalledTimes(4);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
148
spec/frontend/runner/components/stat/runner_count_spec.js
Normal file
148
spec/frontend/runner/components/stat/runner_count_spec.js
Normal file
|
@ -0,0 +1,148 @@
|
|||
import Vue, { nextTick } from 'vue';
|
||||
import VueApollo from 'vue-apollo';
|
||||
import { shallowMount } from '@vue/test-utils';
|
||||
import RunnerCount from '~/runner/components/stat/runner_count.vue';
|
||||
import { INSTANCE_TYPE, GROUP_TYPE } from '~/runner/constants';
|
||||
import createMockApollo from 'helpers/mock_apollo_helper';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import { captureException } from '~/runner/sentry_utils';
|
||||
|
||||
import adminRunnersCountQuery from '~/runner/graphql/list/admin_runners_count.query.graphql';
|
||||
import getGroupRunnersCountQuery from '~/runner/graphql/list/group_runners_count.query.graphql';
|
||||
|
||||
import { runnersCountData, groupRunnersCountData } from '../../mock_data';
|
||||
|
||||
jest.mock('~/runner/sentry_utils');
|
||||
|
||||
Vue.use(VueApollo);
|
||||
|
||||
describe('RunnerCount', () => {
|
||||
let wrapper;
|
||||
let mockRunnersCountQuery;
|
||||
let mockGroupRunnersCountQuery;
|
||||
|
||||
const createComponent = ({ props = {}, ...options } = {}) => {
|
||||
const handlers = [
|
||||
[adminRunnersCountQuery, mockRunnersCountQuery],
|
||||
[getGroupRunnersCountQuery, mockGroupRunnersCountQuery],
|
||||
];
|
||||
|
||||
wrapper = shallowMount(RunnerCount, {
|
||||
apolloProvider: createMockApollo(handlers),
|
||||
propsData: {
|
||||
...props,
|
||||
},
|
||||
scopedSlots: {
|
||||
default: '<strong>{{props.count}}</strong>',
|
||||
},
|
||||
...options,
|
||||
});
|
||||
|
||||
return waitForPromises();
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockRunnersCountQuery = jest.fn().mockResolvedValue(runnersCountData);
|
||||
mockGroupRunnersCountQuery = jest.fn().mockResolvedValue(groupRunnersCountData);
|
||||
});
|
||||
|
||||
describe('in admin scope', () => {
|
||||
const mockVariables = { status: 'ONLINE' };
|
||||
|
||||
beforeEach(async () => {
|
||||
await createComponent({ props: { scope: INSTANCE_TYPE } });
|
||||
});
|
||||
|
||||
it('fetches data from admin query', () => {
|
||||
expect(mockRunnersCountQuery).toHaveBeenCalledTimes(1);
|
||||
expect(mockRunnersCountQuery).toHaveBeenCalledWith({});
|
||||
});
|
||||
|
||||
it('fetches data with filters', async () => {
|
||||
await createComponent({ props: { scope: INSTANCE_TYPE, variables: mockVariables } });
|
||||
|
||||
expect(mockRunnersCountQuery).toHaveBeenCalledTimes(2);
|
||||
expect(mockRunnersCountQuery).toHaveBeenCalledWith(mockVariables);
|
||||
|
||||
expect(wrapper.html()).toBe(`<strong>${runnersCountData.data.runners.count}</strong>`);
|
||||
});
|
||||
|
||||
it('does not fetch from the group query', async () => {
|
||||
expect(mockGroupRunnersCountQuery).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
describe('when this query is skipped after data was loaded', () => {
|
||||
beforeEach(async () => {
|
||||
wrapper.setProps({ skip: true });
|
||||
|
||||
await nextTick();
|
||||
});
|
||||
|
||||
it('clears current data', () => {
|
||||
expect(wrapper.html()).toBe('<strong></strong>');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when skipping query', () => {
|
||||
beforeEach(async () => {
|
||||
await createComponent({ props: { scope: INSTANCE_TYPE, skip: true } });
|
||||
});
|
||||
|
||||
it('does not fetch data', async () => {
|
||||
expect(mockRunnersCountQuery).not.toHaveBeenCalled();
|
||||
expect(mockGroupRunnersCountQuery).not.toHaveBeenCalled();
|
||||
|
||||
expect(wrapper.html()).toBe('<strong></strong>');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when runners query fails', () => {
|
||||
const mockError = new Error('error!');
|
||||
|
||||
beforeEach(async () => {
|
||||
mockRunnersCountQuery.mockRejectedValue(mockError);
|
||||
|
||||
await createComponent({ props: { scope: INSTANCE_TYPE } });
|
||||
});
|
||||
|
||||
it('data is not shown and error is reported', async () => {
|
||||
expect(wrapper.html()).toBe('<strong></strong>');
|
||||
|
||||
expect(captureException).toHaveBeenCalledWith({
|
||||
component: 'RunnerCount',
|
||||
error: mockError,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('in group scope', () => {
|
||||
beforeEach(async () => {
|
||||
await createComponent({ props: { scope: GROUP_TYPE } });
|
||||
});
|
||||
|
||||
it('fetches data from the group query', async () => {
|
||||
expect(mockGroupRunnersCountQuery).toHaveBeenCalledTimes(1);
|
||||
expect(mockGroupRunnersCountQuery).toHaveBeenCalledWith({});
|
||||
|
||||
expect(wrapper.html()).toBe(
|
||||
`<strong>${groupRunnersCountData.data.group.runners.count}</strong>`,
|
||||
);
|
||||
});
|
||||
|
||||
it('does not fetch from the group query', () => {
|
||||
expect(mockRunnersCountQuery).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when .refetch() is called', () => {
|
||||
beforeEach(async () => {
|
||||
await createComponent({ props: { scope: INSTANCE_TYPE } });
|
||||
wrapper.vm.refetch();
|
||||
});
|
||||
|
||||
it('data is not shown and error is reported', async () => {
|
||||
expect(mockRunnersCountQuery).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,21 +1,24 @@
|
|||
import { shallowMount, mount } from '@vue/test-utils';
|
||||
import { s__ } from '~/locale';
|
||||
import RunnerStats from '~/runner/components/stat/runner_stats.vue';
|
||||
import RunnerCount from '~/runner/components/stat/runner_count.vue';
|
||||
import RunnerStatusStat from '~/runner/components/stat/runner_status_stat.vue';
|
||||
import { STATUS_ONLINE, STATUS_OFFLINE, STATUS_STALE } from '~/runner/constants';
|
||||
import { INSTANCE_TYPE, STATUS_ONLINE, STATUS_OFFLINE, STATUS_STALE } from '~/runner/constants';
|
||||
|
||||
describe('RunnerStats', () => {
|
||||
let wrapper;
|
||||
|
||||
const findRunnerCountAt = (i) => wrapper.findAllComponents(RunnerCount).at(i);
|
||||
const findRunnerStatusStatAt = (i) => wrapper.findAllComponents(RunnerStatusStat).at(i);
|
||||
|
||||
const createComponent = ({ props = {}, mountFn = shallowMount } = {}) => {
|
||||
const createComponent = ({ props = {}, mountFn = shallowMount, ...options } = {}) => {
|
||||
wrapper = mountFn(RunnerStats, {
|
||||
propsData: {
|
||||
onlineRunnersCount: 3,
|
||||
offlineRunnersCount: 2,
|
||||
staleRunnersCount: 1,
|
||||
scope: INSTANCE_TYPE,
|
||||
variables: {},
|
||||
...props,
|
||||
},
|
||||
...options,
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -24,13 +27,46 @@ describe('RunnerStats', () => {
|
|||
});
|
||||
|
||||
it('Displays all the stats', () => {
|
||||
createComponent({ mountFn: mount });
|
||||
const mockCounts = {
|
||||
[STATUS_ONLINE]: 3,
|
||||
[STATUS_OFFLINE]: 2,
|
||||
[STATUS_STALE]: 1,
|
||||
};
|
||||
|
||||
const stats = wrapper.text();
|
||||
createComponent({
|
||||
mountFn: mount,
|
||||
stubs: {
|
||||
RunnerCount: {
|
||||
props: ['variables'],
|
||||
render() {
|
||||
return this.$scopedSlots.default({
|
||||
count: mockCounts[this.variables.status],
|
||||
});
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(stats).toMatch('Online runners 3');
|
||||
expect(stats).toMatch('Offline runners 2');
|
||||
expect(stats).toMatch('Stale runners 1');
|
||||
const text = wrapper.text();
|
||||
expect(text).toMatch(`${s__('Runners|Online runners')} 3`);
|
||||
expect(text).toMatch(`${s__('Runners|Offline runners')} 2`);
|
||||
expect(text).toMatch(`${s__('Runners|Stale runners')} 1`);
|
||||
});
|
||||
|
||||
it('Displays counts for filtered searches', () => {
|
||||
createComponent({ props: { variables: { paused: true } } });
|
||||
|
||||
expect(findRunnerCountAt(0).props('variables').paused).toBe(true);
|
||||
expect(findRunnerCountAt(1).props('variables').paused).toBe(true);
|
||||
expect(findRunnerCountAt(2).props('variables').paused).toBe(true);
|
||||
});
|
||||
|
||||
it('Skips overlapping statuses', () => {
|
||||
createComponent({ props: { variables: { status: STATUS_ONLINE } } });
|
||||
|
||||
expect(findRunnerCountAt(0).props('skip')).toBe(false);
|
||||
expect(findRunnerCountAt(1).props('skip')).toBe(true);
|
||||
expect(findRunnerCountAt(2).props('skip')).toBe(true);
|
||||
});
|
||||
|
||||
it.each`
|
||||
|
@ -38,9 +74,10 @@ describe('RunnerStats', () => {
|
|||
${0} | ${STATUS_ONLINE}
|
||||
${1} | ${STATUS_OFFLINE}
|
||||
${2} | ${STATUS_STALE}
|
||||
`('Displays status types at index $i', ({ i, status }) => {
|
||||
createComponent();
|
||||
`('Displays status $status at index $i', ({ i, status }) => {
|
||||
createComponent({ mountFn: mount });
|
||||
|
||||
expect(findRunnerCountAt(i).props('variables').status).toBe(status);
|
||||
expect(findRunnerStatusStatAt(i).props('status')).toBe(status);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -10,6 +10,7 @@ import {
|
|||
} from 'helpers/vue_test_utils_helper';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import { createAlert } from '~/flash';
|
||||
import { s__ } from '~/locale';
|
||||
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
|
||||
import { updateHistory } from '~/lib/utils/url_utility';
|
||||
|
||||
|
@ -18,6 +19,7 @@ import RunnerFilteredSearchBar from '~/runner/components/runner_filtered_search_
|
|||
import RunnerList from '~/runner/components/runner_list.vue';
|
||||
import RunnerListEmptyState from '~/runner/components/runner_list_empty_state.vue';
|
||||
import RunnerStats from '~/runner/components/stat/runner_stats.vue';
|
||||
import RunnerCount from '~/runner/components/stat/runner_count.vue';
|
||||
import RunnerActionsCell from '~/runner/components/cells/runner_actions_cell.vue';
|
||||
import RegistrationDropdown from '~/runner/components/registration/registration_dropdown.vue';
|
||||
import RunnerPagination from '~/runner/components/runner_pagination.vue';
|
||||
|
@ -28,7 +30,6 @@ import {
|
|||
DEFAULT_SORT,
|
||||
INSTANCE_TYPE,
|
||||
GROUP_TYPE,
|
||||
PROJECT_TYPE,
|
||||
PARAM_KEY_PAUSED,
|
||||
PARAM_KEY_STATUS,
|
||||
PARAM_KEY_TAG,
|
||||
|
@ -61,6 +62,9 @@ const mockRegistrationToken = 'AABBCC';
|
|||
const mockGroupRunnersEdges = groupRunnersData.data.group.runners.edges;
|
||||
const mockGroupRunnersCount = mockGroupRunnersEdges.length;
|
||||
|
||||
const mockGroupRunnersQuery = jest.fn();
|
||||
const mockGroupRunnersCountQuery = jest.fn();
|
||||
|
||||
jest.mock('~/flash');
|
||||
jest.mock('~/runner/sentry_utils');
|
||||
jest.mock('~/lib/utils/url_utility', () => ({
|
||||
|
@ -70,8 +74,6 @@ jest.mock('~/lib/utils/url_utility', () => ({
|
|||
|
||||
describe('GroupRunnersApp', () => {
|
||||
let wrapper;
|
||||
let mockGroupRunnersQuery;
|
||||
let mockGroupRunnersCountQuery;
|
||||
|
||||
const findRunnerStats = () => wrapper.findComponent(RunnerStats);
|
||||
const findRunnerActionsCell = () => wrapper.findComponent(RunnerActionsCell);
|
||||
|
@ -85,12 +87,7 @@ describe('GroupRunnersApp', () => {
|
|||
const findRunnerFilteredSearchBar = () => wrapper.findComponent(RunnerFilteredSearchBar);
|
||||
const findFilteredSearch = () => wrapper.findComponent(FilteredSearch);
|
||||
|
||||
const mockCountQueryResult = (count) =>
|
||||
Promise.resolve({
|
||||
data: { group: { id: groupRunnersCountData.data.group.id, runners: { count } } },
|
||||
});
|
||||
|
||||
const createComponent = ({ props = {}, mountFn = shallowMountExtended } = {}) => {
|
||||
const createComponent = ({ props = {}, mountFn = shallowMountExtended, ...options } = {}) => {
|
||||
const handlers = [
|
||||
[getGroupRunnersQuery, mockGroupRunnersQuery],
|
||||
[getGroupRunnersCountQuery, mockGroupRunnersCountQuery],
|
||||
|
@ -110,89 +107,75 @@ describe('GroupRunnersApp', () => {
|
|||
emptyStateSvgPath,
|
||||
emptyStateFilteredSvgPath,
|
||||
},
|
||||
...options,
|
||||
});
|
||||
|
||||
return waitForPromises();
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
setWindowLocation(`/groups/${mockGroupFullPath}/-/runners`);
|
||||
|
||||
mockGroupRunnersQuery = jest.fn().mockResolvedValue(groupRunnersData);
|
||||
mockGroupRunnersCountQuery = jest.fn().mockResolvedValue(groupRunnersCountData);
|
||||
|
||||
createComponent();
|
||||
await waitForPromises();
|
||||
mockGroupRunnersQuery.mockResolvedValue(groupRunnersData);
|
||||
mockGroupRunnersCountQuery.mockResolvedValue(groupRunnersCountData);
|
||||
});
|
||||
|
||||
it('shows total runner counts', async () => {
|
||||
expect(mockGroupRunnersCountQuery).toHaveBeenCalledWith({
|
||||
groupFullPath: mockGroupFullPath,
|
||||
status: STATUS_ONLINE,
|
||||
});
|
||||
expect(mockGroupRunnersCountQuery).toHaveBeenCalledWith({
|
||||
groupFullPath: mockGroupFullPath,
|
||||
status: STATUS_OFFLINE,
|
||||
});
|
||||
expect(mockGroupRunnersCountQuery).toHaveBeenCalledWith({
|
||||
groupFullPath: mockGroupFullPath,
|
||||
status: STATUS_STALE,
|
||||
});
|
||||
|
||||
expect(findRunnerStats().props()).toMatchObject({
|
||||
onlineRunnersCount: mockGroupRunnersCount,
|
||||
offlineRunnersCount: mockGroupRunnersCount,
|
||||
staleRunnersCount: mockGroupRunnersCount,
|
||||
});
|
||||
afterEach(() => {
|
||||
mockGroupRunnersQuery.mockReset();
|
||||
mockGroupRunnersCountQuery.mockReset();
|
||||
wrapper.destroy();
|
||||
});
|
||||
|
||||
it('shows the runner tabs with a runner count for each type', async () => {
|
||||
mockGroupRunnersCountQuery.mockImplementation(({ type }) => {
|
||||
switch (type) {
|
||||
case GROUP_TYPE:
|
||||
return mockCountQueryResult(2);
|
||||
case PROJECT_TYPE:
|
||||
return mockCountQueryResult(1);
|
||||
default:
|
||||
return mockCountQueryResult(4);
|
||||
}
|
||||
});
|
||||
|
||||
createComponent({ mountFn: mountExtended });
|
||||
await waitForPromises();
|
||||
|
||||
expect(findRunnerTypeTabs().text()).toMatchInterpolatedText('All 4 Group 2 Project 1');
|
||||
});
|
||||
|
||||
it('shows the runner tabs with a formatted runner count', async () => {
|
||||
mockGroupRunnersCountQuery.mockImplementation(({ type }) => {
|
||||
switch (type) {
|
||||
case GROUP_TYPE:
|
||||
return mockCountQueryResult(2000);
|
||||
case PROJECT_TYPE:
|
||||
return mockCountQueryResult(1000);
|
||||
default:
|
||||
return mockCountQueryResult(3000);
|
||||
}
|
||||
});
|
||||
|
||||
createComponent({ mountFn: mountExtended });
|
||||
await waitForPromises();
|
||||
await createComponent({ mountFn: mountExtended });
|
||||
|
||||
expect(findRunnerTypeTabs().text()).toMatchInterpolatedText(
|
||||
'All 3,000 Group 2,000 Project 1,000',
|
||||
`All ${mockGroupRunnersCount} Group ${mockGroupRunnersCount} Project ${mockGroupRunnersCount}`,
|
||||
);
|
||||
});
|
||||
|
||||
it('shows the runner setup instructions', () => {
|
||||
createComponent();
|
||||
|
||||
expect(findRegistrationDropdown().props('registrationToken')).toBe(mockRegistrationToken);
|
||||
expect(findRegistrationDropdown().props('type')).toBe(GROUP_TYPE);
|
||||
});
|
||||
|
||||
it('shows the runners list', () => {
|
||||
it('shows total runner counts', async () => {
|
||||
await createComponent({ mountFn: mountExtended });
|
||||
|
||||
expect(mockGroupRunnersCountQuery).toHaveBeenCalledWith({
|
||||
status: STATUS_ONLINE,
|
||||
groupFullPath: mockGroupFullPath,
|
||||
});
|
||||
expect(mockGroupRunnersCountQuery).toHaveBeenCalledWith({
|
||||
status: STATUS_OFFLINE,
|
||||
groupFullPath: mockGroupFullPath,
|
||||
});
|
||||
expect(mockGroupRunnersCountQuery).toHaveBeenCalledWith({
|
||||
status: STATUS_STALE,
|
||||
groupFullPath: mockGroupFullPath,
|
||||
});
|
||||
|
||||
expect(findRunnerStats().text()).toContain(
|
||||
`${s__('Runners|Online runners')} ${mockGroupRunnersCount}`,
|
||||
);
|
||||
expect(findRunnerStats().text()).toContain(
|
||||
`${s__('Runners|Offline runners')} ${mockGroupRunnersCount}`,
|
||||
);
|
||||
expect(findRunnerStats().text()).toContain(
|
||||
`${s__('Runners|Stale runners')} ${mockGroupRunnersCount}`,
|
||||
);
|
||||
});
|
||||
|
||||
it('shows the runners list', async () => {
|
||||
await createComponent();
|
||||
|
||||
const runners = findRunnerList().props('runners');
|
||||
expect(runners).toEqual(mockGroupRunnersEdges.map(({ node }) => node));
|
||||
});
|
||||
|
||||
it('requests the runners with group path and no other filters', () => {
|
||||
it('requests the runners with group path and no other filters', async () => {
|
||||
await createComponent();
|
||||
|
||||
expect(mockGroupRunnersQuery).toHaveBeenLastCalledWith({
|
||||
groupFullPath: mockGroupFullPath,
|
||||
status: undefined,
|
||||
|
@ -229,12 +212,8 @@ describe('GroupRunnersApp', () => {
|
|||
const FILTERED_COUNT_QUERIES = 3; // Smart queries that display a count of runners in tabs
|
||||
|
||||
beforeEach(async () => {
|
||||
mockGroupRunnersCountQuery.mockClear();
|
||||
|
||||
createComponent({ mountFn: mountExtended });
|
||||
await createComponent({ mountFn: mountExtended });
|
||||
showToast = jest.spyOn(wrapper.vm.$root.$toast, 'show');
|
||||
|
||||
await waitForPromises();
|
||||
});
|
||||
|
||||
it('view link is displayed correctly', () => {
|
||||
|
@ -277,8 +256,12 @@ describe('GroupRunnersApp', () => {
|
|||
beforeEach(async () => {
|
||||
setWindowLocation(`?status[]=${STATUS_ONLINE}&runner_type[]=${INSTANCE_TYPE}`);
|
||||
|
||||
createComponent();
|
||||
await waitForPromises();
|
||||
await createComponent({
|
||||
stubs: {
|
||||
RunnerStats,
|
||||
RunnerCount,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('sets the filters in the search bar', () => {
|
||||
|
@ -306,15 +289,18 @@ describe('GroupRunnersApp', () => {
|
|||
type: INSTANCE_TYPE,
|
||||
status: STATUS_ONLINE,
|
||||
});
|
||||
|
||||
expect(findRunnerStats().props()).toMatchObject({
|
||||
onlineRunnersCount: mockGroupRunnersCount,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when a filter is selected by the user', () => {
|
||||
beforeEach(async () => {
|
||||
createComponent({
|
||||
stubs: {
|
||||
RunnerStats,
|
||||
RunnerCount,
|
||||
},
|
||||
});
|
||||
|
||||
findRunnerFilteredSearchBar().vm.$emit('input', {
|
||||
runnerType: null,
|
||||
filters: [
|
||||
|
@ -330,7 +316,7 @@ describe('GroupRunnersApp', () => {
|
|||
it('updates the browser url', () => {
|
||||
expect(updateHistory).toHaveBeenLastCalledWith({
|
||||
title: expect.any(String),
|
||||
url: 'http://test.host/groups/group1/-/runners?status[]=ONLINE&tag[]=tag1&sort=CREATED_ASC',
|
||||
url: expect.stringContaining('?status[]=ONLINE&tag[]=tag1&sort=CREATED_ASC'),
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -350,28 +336,6 @@ describe('GroupRunnersApp', () => {
|
|||
tagList: ['tag1'],
|
||||
status: STATUS_ONLINE,
|
||||
});
|
||||
|
||||
expect(findRunnerStats().props()).toMatchObject({
|
||||
onlineRunnersCount: mockGroupRunnersCount,
|
||||
});
|
||||
});
|
||||
|
||||
it('skips fetching count results for status that were not in filter', () => {
|
||||
expect(mockGroupRunnersCountQuery).not.toHaveBeenCalledWith({
|
||||
groupFullPath: mockGroupFullPath,
|
||||
tagList: ['tag1'],
|
||||
status: STATUS_OFFLINE,
|
||||
});
|
||||
expect(mockGroupRunnersCountQuery).not.toHaveBeenCalledWith({
|
||||
groupFullPath: mockGroupFullPath,
|
||||
tagList: ['tag1'],
|
||||
status: STATUS_STALE,
|
||||
});
|
||||
|
||||
expect(findRunnerStats().props()).toMatchObject({
|
||||
offlineRunnersCount: null,
|
||||
staleRunnersCount: null,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -382,7 +346,7 @@ describe('GroupRunnersApp', () => {
|
|||
|
||||
describe('when no runners are found', () => {
|
||||
beforeEach(async () => {
|
||||
mockGroupRunnersQuery = jest.fn().mockResolvedValue({
|
||||
mockGroupRunnersQuery.mockResolvedValue({
|
||||
data: {
|
||||
group: {
|
||||
id: '1',
|
||||
|
@ -390,8 +354,7 @@ describe('GroupRunnersApp', () => {
|
|||
},
|
||||
},
|
||||
});
|
||||
createComponent();
|
||||
await waitForPromises();
|
||||
await createComponent();
|
||||
});
|
||||
|
||||
it('shows an empty state', async () => {
|
||||
|
@ -401,9 +364,8 @@ describe('GroupRunnersApp', () => {
|
|||
|
||||
describe('when runners query fails', () => {
|
||||
beforeEach(async () => {
|
||||
mockGroupRunnersQuery = jest.fn().mockRejectedValue(new Error('Error!'));
|
||||
createComponent();
|
||||
await waitForPromises();
|
||||
mockGroupRunnersQuery.mockRejectedValue(new Error('Error!'));
|
||||
await createComponent();
|
||||
});
|
||||
|
||||
it('error is shown to the user', async () => {
|
||||
|
@ -420,10 +382,9 @@ describe('GroupRunnersApp', () => {
|
|||
|
||||
describe('Pagination', () => {
|
||||
beforeEach(async () => {
|
||||
mockGroupRunnersQuery = jest.fn().mockResolvedValue(groupRunnersDataPaginated);
|
||||
mockGroupRunnersQuery.mockResolvedValue(groupRunnersDataPaginated);
|
||||
|
||||
createComponent({ mountFn: mountExtended });
|
||||
await waitForPromises();
|
||||
await createComponent({ mountFn: mountExtended });
|
||||
});
|
||||
|
||||
it('navigates to the next page', async () => {
|
||||
|
|
|
@ -75,16 +75,6 @@ RSpec.describe Gitlab::Ci::Reports::CoverageReportGenerator, factory_default: :k
|
|||
end
|
||||
|
||||
it_behaves_like 'having a coverage report'
|
||||
|
||||
context 'when feature flag ci_child_pipeline_coverage_reports is disabled' do
|
||||
before do
|
||||
stub_feature_flags(ci_child_pipeline_coverage_reports: false)
|
||||
end
|
||||
|
||||
it 'returns empty coverage reports' do
|
||||
expect(subject).to be_empty
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when both parent and child pipeline have builds with coverage reports' do
|
||||
|
|
|
@ -8,7 +8,7 @@ RSpec.describe Gitlab::Ci::Status::Stage::Common do
|
|||
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
|
||||
|
||||
let(:stage) do
|
||||
build(:ci_stage, pipeline: pipeline, name: 'test')
|
||||
build(:ci_stage_entity, pipeline: pipeline, name: 'test')
|
||||
end
|
||||
|
||||
subject do
|
||||
|
|
|
@ -7,9 +7,7 @@ RSpec.describe Gitlab::Ci::Status::Stage::Factory do
|
|||
let(:project) { create(:project) }
|
||||
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
|
||||
|
||||
let(:stage) do
|
||||
build(:ci_stage, pipeline: pipeline, name: 'test')
|
||||
end
|
||||
let(:stage) { create(:ci_stage_entity, pipeline: pipeline) }
|
||||
|
||||
subject do
|
||||
described_class.new(stage, user)
|
||||
|
@ -26,11 +24,7 @@ RSpec.describe Gitlab::Ci::Status::Stage::Factory do
|
|||
context 'when stage has a core status' do
|
||||
(Ci::HasStatus::AVAILABLE_STATUSES - %w(manual skipped scheduled)).each do |core_status|
|
||||
context "when core status is #{core_status}" do
|
||||
before do
|
||||
create(:ci_build, pipeline: pipeline, stage: 'test', status: core_status)
|
||||
create(:commit_status, pipeline: pipeline, stage: 'test', status: core_status)
|
||||
create(:ci_build, pipeline: pipeline, stage: 'build', status: :failed)
|
||||
end
|
||||
let(:stage) { create(:ci_stage_entity, pipeline: pipeline, status: core_status) }
|
||||
|
||||
it "fabricates a core status #{core_status}" do
|
||||
expect(status).to be_a(
|
||||
|
@ -48,12 +42,12 @@ RSpec.describe Gitlab::Ci::Status::Stage::Factory do
|
|||
|
||||
context 'when stage has warnings' do
|
||||
let(:stage) do
|
||||
build(:ci_stage, name: 'test', status: :success, pipeline: pipeline)
|
||||
create(:ci_stage_entity, status: :success, pipeline: pipeline)
|
||||
end
|
||||
|
||||
before do
|
||||
create(:ci_build, :allowed_to_fail, :failed,
|
||||
stage: 'test', pipeline: stage.pipeline)
|
||||
stage_id: stage.id, pipeline: stage.pipeline)
|
||||
end
|
||||
|
||||
it 'fabricates extended "success with warnings" status' do
|
||||
|
@ -70,11 +64,7 @@ RSpec.describe Gitlab::Ci::Status::Stage::Factory do
|
|||
context 'when stage has manual builds' do
|
||||
(Ci::HasStatus::BLOCKED_STATUS + ['skipped']).each do |core_status|
|
||||
context "when status is #{core_status}" do
|
||||
before do
|
||||
create(:ci_build, pipeline: pipeline, stage: 'test', status: core_status)
|
||||
create(:commit_status, pipeline: pipeline, stage: 'test', status: core_status)
|
||||
create(:ci_build, pipeline: pipeline, stage: 'build', status: :manual)
|
||||
end
|
||||
let(:stage) { create(:ci_stage_entity, pipeline: pipeline, status: core_status) }
|
||||
|
||||
it 'fabricates a play manual status' do
|
||||
expect(status).to be_a(Gitlab::Ci::Status::Stage::PlayManual)
|
||||
|
|
|
@ -39,7 +39,7 @@ RSpec.describe Gitlab::Diff::Rendered::Notebook::DiffFileHelper do
|
|||
where(:case, :transformed_blocks, :result) do
|
||||
'if transformed diff is empty' | [] | 0
|
||||
'if the transformed line does not map to any in the original file' | [{ source_line: nil }] | 0
|
||||
'if the transformed line maps to a line in the source file' | [{ source_line: 2 }] | 3
|
||||
'if the transformed line maps to a line in the source file' | [{ source_line: 3 }] | 3
|
||||
end
|
||||
|
||||
with_them do
|
||||
|
@ -81,8 +81,8 @@ RSpec.describe Gitlab::Diff::Rendered::Notebook::DiffFileHelper do
|
|||
|
||||
let(:blocks) do
|
||||
{
|
||||
from: [0, 2, 1, nil, nil, 3].map { |i| { source_line: i } },
|
||||
to: [0, 1, nil, 2, nil, 3].map { |i| { source_line: i } }
|
||||
from: [1, 3, 2, nil, nil, 4].map { |i| { source_line: i } },
|
||||
to: [1, 2, nil, 3, nil, 4].map { |i| { source_line: i } }
|
||||
}
|
||||
end
|
||||
|
||||
|
|
|
@ -1,268 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Ci::LegacyStage do
|
||||
let(:stage) { build(:ci_stage) }
|
||||
let(:pipeline) { stage.pipeline }
|
||||
let(:stage_name) { stage.name }
|
||||
|
||||
describe '#expectations' do
|
||||
subject { stage }
|
||||
|
||||
it { is_expected.to include_module(StaticModel) }
|
||||
|
||||
it { is_expected.to respond_to(:pipeline) }
|
||||
it { is_expected.to respond_to(:name) }
|
||||
|
||||
it { is_expected.to delegate_method(:project).to(:pipeline) }
|
||||
end
|
||||
|
||||
describe '#statuses' do
|
||||
let!(:stage_build) { create_job(:ci_build) }
|
||||
let!(:commit_status) { create_job(:commit_status) }
|
||||
let!(:other_build) { create_job(:ci_build, stage: 'other stage') }
|
||||
|
||||
subject { stage.statuses }
|
||||
|
||||
it "returns only matching statuses" do
|
||||
is_expected.to contain_exactly(stage_build, commit_status)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#groups' do
|
||||
before do
|
||||
create_job(:ci_build, name: 'rspec 0 2')
|
||||
create_job(:ci_build, name: 'rspec 0 1')
|
||||
create_job(:ci_build, name: 'spinach 0 1')
|
||||
create_job(:commit_status, name: 'aaaaa')
|
||||
end
|
||||
|
||||
it 'returns an array of three groups' do
|
||||
expect(stage.groups).to be_a Array
|
||||
expect(stage.groups).to all(be_a Ci::Group)
|
||||
expect(stage.groups.size).to eq 3
|
||||
end
|
||||
|
||||
it 'returns groups with correctly ordered statuses' do
|
||||
expect(stage.groups.first.jobs.map(&:name))
|
||||
.to eq ['aaaaa']
|
||||
expect(stage.groups.second.jobs.map(&:name))
|
||||
.to eq ['rspec 0 1', 'rspec 0 2']
|
||||
expect(stage.groups.third.jobs.map(&:name))
|
||||
.to eq ['spinach 0 1']
|
||||
end
|
||||
|
||||
it 'returns groups with correct names' do
|
||||
expect(stage.groups.map(&:name))
|
||||
.to eq %w[aaaaa rspec spinach]
|
||||
end
|
||||
|
||||
context 'when a name is nil on legacy pipelines' do
|
||||
before do
|
||||
pipeline.builds.first.update_attribute(:name, nil)
|
||||
end
|
||||
|
||||
it 'returns an array of three groups' do
|
||||
expect(stage.groups.map(&:name))
|
||||
.to eq ['', 'aaaaa', 'rspec', 'spinach']
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#statuses_count' do
|
||||
before do
|
||||
create_job(:ci_build)
|
||||
create_job(:ci_build, stage: 'other stage')
|
||||
end
|
||||
|
||||
subject { stage.statuses_count }
|
||||
|
||||
it "counts statuses only from current stage" do
|
||||
is_expected.to eq(1)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#builds' do
|
||||
let!(:stage_build) { create_job(:ci_build) }
|
||||
let!(:commit_status) { create_job(:commit_status) }
|
||||
|
||||
subject { stage.builds }
|
||||
|
||||
it "returns only builds" do
|
||||
is_expected.to contain_exactly(stage_build)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#status' do
|
||||
subject { stage.status }
|
||||
|
||||
context 'if status is already defined' do
|
||||
let(:stage) { build(:ci_stage, status: 'success') }
|
||||
|
||||
it "returns defined status" do
|
||||
is_expected.to eq('success')
|
||||
end
|
||||
end
|
||||
|
||||
context 'if status has to be calculated' do
|
||||
let!(:stage_build) { create_job(:ci_build, status: :failed) }
|
||||
|
||||
it "returns status of a build" do
|
||||
is_expected.to eq('failed')
|
||||
end
|
||||
|
||||
context 'and builds are retried' do
|
||||
let!(:new_build) { create_job(:ci_build, status: :success) }
|
||||
|
||||
before do
|
||||
stage_build.update!(retried: true)
|
||||
end
|
||||
|
||||
it "returns status of latest build" do
|
||||
is_expected.to eq('success')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#detailed_status' do
|
||||
let(:user) { create(:user) }
|
||||
|
||||
subject { stage.detailed_status(user) }
|
||||
|
||||
context 'when build is created' do
|
||||
let!(:stage_build) { create_job(:ci_build, status: :created) }
|
||||
|
||||
it 'returns detailed status for created stage' do
|
||||
expect(subject.text).to eq s_('CiStatusText|created')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when build is pending' do
|
||||
let!(:stage_build) { create_job(:ci_build, status: :pending) }
|
||||
|
||||
it 'returns detailed status for pending stage' do
|
||||
expect(subject.text).to eq s_('CiStatusText|pending')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when build is running' do
|
||||
let!(:stage_build) { create_job(:ci_build, status: :running) }
|
||||
|
||||
it 'returns detailed status for running stage' do
|
||||
expect(subject.text).to eq s_('CiStatus|running')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when build is successful' do
|
||||
let!(:stage_build) { create_job(:ci_build, status: :success) }
|
||||
|
||||
it 'returns detailed status for successful stage' do
|
||||
expect(subject.text).to eq s_('CiStatusText|passed')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when build is failed' do
|
||||
let!(:stage_build) { create_job(:ci_build, status: :failed) }
|
||||
|
||||
it 'returns detailed status for failed stage' do
|
||||
expect(subject.text).to eq s_('CiStatusText|failed')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when build is canceled' do
|
||||
let!(:stage_build) { create_job(:ci_build, status: :canceled) }
|
||||
|
||||
it 'returns detailed status for canceled stage' do
|
||||
expect(subject.text).to eq s_('CiStatusText|canceled')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when build is skipped' do
|
||||
let!(:stage_build) { create_job(:ci_build, status: :skipped) }
|
||||
|
||||
it 'returns detailed status for skipped stage' do
|
||||
expect(subject.text).to eq s_('CiStatusText|skipped')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#success?' do
|
||||
context 'when stage is successful' do
|
||||
before do
|
||||
create_job(:ci_build, status: :success)
|
||||
create_job(:generic_commit_status, status: :success)
|
||||
end
|
||||
|
||||
it 'is successful' do
|
||||
expect(stage).to be_success
|
||||
end
|
||||
end
|
||||
|
||||
context 'when stage is not successful' do
|
||||
before do
|
||||
create_job(:ci_build, status: :failed)
|
||||
create_job(:generic_commit_status, status: :success)
|
||||
end
|
||||
|
||||
it 'is not successful' do
|
||||
expect(stage).not_to be_success
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#has_warnings?' do
|
||||
context 'when stage has warnings' do
|
||||
context 'when using memoized warnings flag' do
|
||||
context 'when there are warnings' do
|
||||
let(:stage) { build(:ci_stage, warnings: true) }
|
||||
|
||||
it 'returns true using memoized value' do
|
||||
expect(stage).not_to receive(:statuses)
|
||||
expect(stage).to have_warnings
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there are no warnings' do
|
||||
let(:stage) { build(:ci_stage, warnings: false) }
|
||||
|
||||
it 'returns false using memoized value' do
|
||||
expect(stage).not_to receive(:statuses)
|
||||
expect(stage).not_to have_warnings
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when calculating warnings from statuses' do
|
||||
before do
|
||||
create(:ci_build, :failed, :allowed_to_fail,
|
||||
stage: stage_name, pipeline: pipeline)
|
||||
end
|
||||
|
||||
it 'has warnings calculated from statuses' do
|
||||
expect(stage).to receive(:statuses).and_call_original
|
||||
expect(stage).to have_warnings
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when stage does not have warnings' do
|
||||
before do
|
||||
create(:ci_build, :success, stage: stage_name,
|
||||
pipeline: pipeline)
|
||||
end
|
||||
|
||||
it 'does not have warnings calculated from statuses' do
|
||||
expect(stage).to receive(:statuses).and_call_original
|
||||
expect(stage).not_to have_warnings
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def create_job(type, status: 'success', stage: stage_name, **opts)
|
||||
create(type, pipeline: pipeline, stage: stage, status: status, **opts)
|
||||
end
|
||||
|
||||
it_behaves_like 'manual playable stage', :ci_stage
|
||||
end
|
|
@ -1333,48 +1333,6 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
|
|||
status: 'success')
|
||||
end
|
||||
|
||||
describe '#legacy_stages' do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
subject { pipeline.legacy_stages }
|
||||
|
||||
context 'stages list' do
|
||||
it 'returns ordered list of stages' do
|
||||
expect(subject.map(&:name)).to eq(%w[build test deploy])
|
||||
end
|
||||
end
|
||||
|
||||
context 'stages with statuses' do
|
||||
let(:statuses) do
|
||||
subject.map { |stage| [stage.name, stage.status] }
|
||||
end
|
||||
|
||||
it 'returns list of stages with correct statuses' do
|
||||
expect(statuses).to eq([%w(build failed),
|
||||
%w(test success),
|
||||
%w(deploy running)])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there is a stage with warnings' do
|
||||
before do
|
||||
create(:commit_status, pipeline: pipeline,
|
||||
stage: 'deploy',
|
||||
name: 'prod:2',
|
||||
stage_idx: 2,
|
||||
status: 'failed',
|
||||
allow_failure: true)
|
||||
end
|
||||
|
||||
it 'populates stage with correct number of warnings' do
|
||||
deploy_stage = pipeline.legacy_stages.third
|
||||
|
||||
expect(deploy_stage).not_to receive(:statuses)
|
||||
expect(deploy_stage).to have_warnings
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#stages_count' do
|
||||
it 'returns a valid number of stages' do
|
||||
expect(pipeline.stages_count).to eq(3)
|
||||
|
@ -1388,32 +1346,6 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#legacy_stage' do
|
||||
subject { pipeline.legacy_stage('test') }
|
||||
|
||||
let(:pipeline) { build(:ci_empty_pipeline, :created) }
|
||||
|
||||
context 'with status in stage' do
|
||||
before do
|
||||
create(:commit_status, pipeline: pipeline, stage: 'test')
|
||||
end
|
||||
|
||||
it { expect(subject).to be_a Ci::LegacyStage }
|
||||
it { expect(subject.name).to eq 'test' }
|
||||
it { expect(subject.statuses).not_to be_empty }
|
||||
end
|
||||
|
||||
context 'without status in stage' do
|
||||
before do
|
||||
create(:commit_status, pipeline: pipeline, stage: 'build')
|
||||
end
|
||||
|
||||
it 'return stage object' do
|
||||
is_expected.to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#stages' do
|
||||
let(:pipeline) { build(:ci_empty_pipeline, :created) }
|
||||
|
||||
|
@ -4320,7 +4252,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#find_stage_by_name' do
|
||||
describe 'fetching a stage by name' do
|
||||
let_it_be(:pipeline) { create(:ci_pipeline) }
|
||||
|
||||
let(:stage_name) { 'test' }
|
||||
|
@ -4336,19 +4268,37 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
|
|||
create_list(:ci_build, 2, pipeline: pipeline, stage: stage.name)
|
||||
end
|
||||
|
||||
subject { pipeline.find_stage_by_name!(stage_name) }
|
||||
describe '#stage' do
|
||||
subject { pipeline.stage(stage_name) }
|
||||
|
||||
context 'when stage exists' do
|
||||
it { is_expected.to eq(stage) }
|
||||
context 'when stage exists' do
|
||||
it { is_expected.to eq(stage) }
|
||||
end
|
||||
|
||||
context 'when stage does not exist' do
|
||||
let(:stage_name) { 'build' }
|
||||
|
||||
it 'returns nil' do
|
||||
is_expected.to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when stage does not exist' do
|
||||
let(:stage_name) { 'build' }
|
||||
describe '#find_stage_by_name' do
|
||||
subject { pipeline.find_stage_by_name!(stage_name) }
|
||||
|
||||
it 'raises an ActiveRecord exception' do
|
||||
expect do
|
||||
subject
|
||||
end.to raise_exception(ActiveRecord::RecordNotFound)
|
||||
context 'when stage exists' do
|
||||
it { is_expected.to eq(stage) }
|
||||
end
|
||||
|
||||
context 'when stage does not exist' do
|
||||
let(:stage_name) { 'build' }
|
||||
|
||||
it 'raises an ActiveRecord exception' do
|
||||
expect do
|
||||
subject
|
||||
end.to raise_exception(ActiveRecord::RecordNotFound)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,47 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Ci::LegacyStagePresenter do
|
||||
let(:legacy_stage) { create(:ci_stage) }
|
||||
let(:presenter) { described_class.new(legacy_stage) }
|
||||
|
||||
let!(:build) { create(:ci_build, :tags, :artifacts, pipeline: legacy_stage.pipeline, stage: legacy_stage.name) }
|
||||
let!(:retried_build) { create(:ci_build, :tags, :artifacts, :retried, pipeline: legacy_stage.pipeline, stage: legacy_stage.name) }
|
||||
|
||||
before do
|
||||
create(:generic_commit_status, pipeline: legacy_stage.pipeline, stage: legacy_stage.name)
|
||||
end
|
||||
|
||||
describe '#latest_ordered_statuses' do
|
||||
subject(:latest_ordered_statuses) { presenter.latest_ordered_statuses }
|
||||
|
||||
it 'preloads build tags' do
|
||||
expect(latest_ordered_statuses.second.association(:tags)).to be_loaded
|
||||
end
|
||||
|
||||
it 'preloads build artifacts archive' do
|
||||
expect(latest_ordered_statuses.second.association(:job_artifacts_archive)).to be_loaded
|
||||
end
|
||||
|
||||
it 'preloads build artifacts metadata' do
|
||||
expect(latest_ordered_statuses.second.association(:metadata)).to be_loaded
|
||||
end
|
||||
end
|
||||
|
||||
describe '#retried_ordered_statuses' do
|
||||
subject(:retried_ordered_statuses) { presenter.retried_ordered_statuses }
|
||||
|
||||
it 'preloads build tags' do
|
||||
expect(retried_ordered_statuses.first.association(:tags)).to be_loaded
|
||||
end
|
||||
|
||||
it 'preloads build artifacts archive' do
|
||||
expect(retried_ordered_statuses.first.association(:job_artifacts_archive)).to be_loaded
|
||||
end
|
||||
|
||||
it 'preloads build artifacts metadata' do
|
||||
expect(retried_ordered_statuses.first.association(:metadata)).to be_loaded
|
||||
end
|
||||
end
|
||||
end
|
|
@ -3,7 +3,7 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Ci::StagePresenter do
|
||||
let(:stage) { create(:ci_stage) }
|
||||
let(:stage) { create(:ci_stage_entity) }
|
||||
let(:presenter) { described_class.new(stage) }
|
||||
|
||||
let!(:build) { create(:ci_build, :tags, :artifacts, pipeline: stage.pipeline, stage: stage.name) }
|
||||
|
|
|
@ -5,7 +5,7 @@ require 'spec_helper'
|
|||
RSpec.describe Ci::DagJobGroupEntity do
|
||||
let_it_be(:request) { double(:request) }
|
||||
let_it_be(:pipeline) { create(:ci_pipeline) }
|
||||
let_it_be(:stage) { create(:ci_stage, pipeline: pipeline) }
|
||||
let_it_be(:stage) { create(:ci_stage_entity, pipeline: pipeline) }
|
||||
|
||||
let(:group) { Ci::Group.new(pipeline.project, stage, name: 'test', jobs: jobs) }
|
||||
let(:entity) { described_class.new(group, request: request) }
|
||||
|
@ -14,7 +14,7 @@ RSpec.describe Ci::DagJobGroupEntity do
|
|||
subject { entity.as_json }
|
||||
|
||||
context 'when group contains 1 job' do
|
||||
let(:job) { create(:ci_build, stage: stage, pipeline: pipeline, name: 'test') }
|
||||
let(:job) { create(:ci_build, stage_id: stage.id, pipeline: pipeline, name: 'test') }
|
||||
let(:jobs) { [job] }
|
||||
|
||||
it 'exposes a name' do
|
||||
|
@ -38,8 +38,8 @@ RSpec.describe Ci::DagJobGroupEntity do
|
|||
end
|
||||
|
||||
context 'when group contains multiple parallel jobs' do
|
||||
let(:job_1) { create(:ci_build, stage: stage, pipeline: pipeline, name: 'test 1/2') }
|
||||
let(:job_2) { create(:ci_build, stage: stage, pipeline: pipeline, name: 'test 2/2') }
|
||||
let(:job_1) { create(:ci_build, stage_id: stage.id, pipeline: pipeline, name: 'test 1/2') }
|
||||
let(:job_2) { create(:ci_build, stage_id: stage.id, pipeline: pipeline, name: 'test 2/2') }
|
||||
let(:jobs) { [job_1, job_2] }
|
||||
|
||||
it 'exposes a name' do
|
||||
|
|
|
@ -6,10 +6,10 @@ RSpec.describe Ci::DagStageEntity do
|
|||
let_it_be(:pipeline) { create(:ci_pipeline) }
|
||||
let_it_be(:request) { double(:request) }
|
||||
|
||||
let(:stage) { build(:ci_stage, pipeline: pipeline, name: 'test') }
|
||||
let(:stage) { create(:ci_stage_entity, pipeline: pipeline, name: 'test') }
|
||||
let(:entity) { described_class.new(stage, request: request) }
|
||||
|
||||
let!(:job) { create(:ci_build, :success, pipeline: pipeline) }
|
||||
let!(:job) { create(:ci_build, :success, pipeline: pipeline, stage_id: stage.id) }
|
||||
|
||||
describe '#as_json' do
|
||||
subject { entity.as_json }
|
||||
|
|
|
@ -12,12 +12,12 @@ RSpec.describe StageEntity do
|
|||
end
|
||||
|
||||
let(:stage) do
|
||||
build(:ci_stage, pipeline: pipeline, name: 'test')
|
||||
create(:ci_stage_entity, pipeline: pipeline, status: :success)
|
||||
end
|
||||
|
||||
before do
|
||||
allow(request).to receive(:current_user).and_return(user)
|
||||
create(:ci_build, :success, pipeline: pipeline)
|
||||
create(:ci_build, :success, pipeline: pipeline, stage_id: stage.id)
|
||||
end
|
||||
|
||||
describe '#as_json' do
|
||||
|
|
|
@ -88,20 +88,6 @@ RSpec.describe Ci::GenerateCoverageReportsService do
|
|||
end
|
||||
|
||||
it { is_expected.to be_falsy }
|
||||
|
||||
context 'when feature flag ci_child_pipeline_coverage_reports is disabled' do
|
||||
let!(:key) do
|
||||
# `let!` is executed before `before` block. If the feature flag
|
||||
# is stubbed in `before`, the first call to `#key` uses the
|
||||
# default feature flag value (enabled).
|
||||
# The feature flag needs to be stubbed before the first call to `#key`
|
||||
# so that the first and second key are calculated using the same method.
|
||||
stub_feature_flags(ci_child_pipeline_coverage_reports: false)
|
||||
service.send(:key, base_pipeline, head_pipeline)
|
||||
end
|
||||
|
||||
it { is_expected.to be_truthy }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -69,16 +69,6 @@ RSpec.describe Ci::PipelineArtifacts::CoverageReportService do
|
|||
end
|
||||
|
||||
it_behaves_like 'creating or updating a pipeline coverage report'
|
||||
|
||||
context 'when the feature flag is disabled' do
|
||||
before do
|
||||
stub_feature_flags(ci_child_pipeline_coverage_reports: false)
|
||||
end
|
||||
|
||||
it 'does not change existing pipeline artifact' do
|
||||
expect { subject }.not_to change { pipeline_artifact.reload.updated_at }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when pipeline is running and coverage report does not exist' do
|
||||
|
|
|
@ -46,20 +46,6 @@ RSpec.describe Ci::PipelineArtifacts::CoverageReportWorker do
|
|||
subject
|
||||
end
|
||||
end
|
||||
|
||||
context 'when feature flag is disabled' do
|
||||
before do
|
||||
stub_feature_flags(ci_child_pipeline_coverage_reports: false)
|
||||
end
|
||||
|
||||
it 'calls the pipeline coverage report service on the pipeline' do
|
||||
expect_next_instance_of(::Ci::PipelineArtifacts::CoverageReportService, pipeline) do |service|
|
||||
expect(service).to receive(:execute)
|
||||
end
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when pipeline does not exist' do
|
||||
|
|
|
@ -3,27 +3,38 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Pages::InvalidateDomainCacheWorker do
|
||||
let(:event) do
|
||||
Pages::PageDeployedEvent.new(data: {
|
||||
project_id: 1,
|
||||
namespace_id: 2,
|
||||
root_namespace_id: 3
|
||||
})
|
||||
end
|
||||
|
||||
subject { consume_event(subscriber: described_class, event: event) }
|
||||
|
||||
it_behaves_like 'subscribes to event'
|
||||
|
||||
it 'enqueues ScheduleAggregationWorker' do
|
||||
expect_next_instance_of(Gitlab::Pages::CacheControl, type: :project, id: 1) do |cache_control|
|
||||
expect(cache_control).to receive(:clear_cache)
|
||||
shared_examples 'clears caches with' do |event_class:, event_data:, caches:|
|
||||
let(:event) do
|
||||
event_class.new(data: event_data)
|
||||
end
|
||||
|
||||
expect_next_instance_of(Gitlab::Pages::CacheControl, type: :namespace, id: 3) do |cache_control|
|
||||
expect(cache_control).to receive(:clear_cache)
|
||||
end
|
||||
subject { consume_event(subscriber: described_class, event: event) }
|
||||
|
||||
subject
|
||||
it_behaves_like 'subscribes to event'
|
||||
|
||||
it 'clears the cache with Gitlab::Pages::CacheControl' do
|
||||
caches.each do |cache_type, cache_id|
|
||||
expect_next_instance_of(Gitlab::Pages::CacheControl, type: cache_type, id: cache_id) do |cache_control|
|
||||
expect(cache_control).to receive(:clear_cache)
|
||||
end
|
||||
end
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'clears caches with',
|
||||
event_class: Pages::PageDeployedEvent,
|
||||
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
|
||||
caches: { namespace: 3, project: 1 }
|
||||
|
||||
it_behaves_like 'clears caches with',
|
||||
event_class: Pages::PageDeletedEvent,
|
||||
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
|
||||
caches: { namespace: 3, project: 1 }
|
||||
|
||||
it_behaves_like 'clears caches with',
|
||||
event_class: Projects::ProjectDeletedEvent,
|
||||
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
|
||||
caches: { namespace: 3, project: 1 }
|
||||
end
|
||||
|
|
10
vendor/gems/ipynbdiff/Gemfile.lock
vendored
10
vendor/gems/ipynbdiff/Gemfile.lock
vendored
|
@ -3,18 +3,21 @@ PATH
|
|||
specs:
|
||||
ipynbdiff (0.4.7)
|
||||
diffy (~> 3.3)
|
||||
json (~> 2.5, >= 2.5.1)
|
||||
oj (~> 3.13.16)
|
||||
|
||||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
ast (2.4.2)
|
||||
benchmark-memory (0.2.0)
|
||||
memory_profiler (~> 1)
|
||||
binding_ninja (0.2.3)
|
||||
coderay (1.1.3)
|
||||
diff-lcs (1.5.0)
|
||||
diffy (3.4.2)
|
||||
json (2.6.2)
|
||||
memory_profiler (1.0.0)
|
||||
method_source (1.0.0)
|
||||
oj (3.13.16)
|
||||
parser (3.1.2.0)
|
||||
ast (~> 2.4.1)
|
||||
proc_to_ast (0.1.0)
|
||||
|
@ -37,7 +40,7 @@ GEM
|
|||
rspec-mocks (3.11.1)
|
||||
diff-lcs (>= 1.2.0, < 2.0)
|
||||
rspec-support (~> 3.11.0)
|
||||
rspec-parameterized (0.5.1)
|
||||
rspec-parameterized (0.5.2)
|
||||
binding_ninja (>= 0.2.3)
|
||||
parser
|
||||
proc_to_ast
|
||||
|
@ -53,6 +56,7 @@ PLATFORMS
|
|||
x86_64-linux
|
||||
|
||||
DEPENDENCIES
|
||||
benchmark-memory (~> 0.2.0)
|
||||
bundler (~> 2.2)
|
||||
ipynbdiff!
|
||||
pry (~> 0.14)
|
||||
|
|
3
vendor/gems/ipynbdiff/ipynbdiff.gemspec
vendored
3
vendor/gems/ipynbdiff/ipynbdiff.gemspec
vendored
|
@ -23,11 +23,12 @@ Gem::Specification.new do |s|
|
|||
s.require_paths = ['lib']
|
||||
|
||||
s.add_runtime_dependency 'diffy', '~> 3.3'
|
||||
s.add_runtime_dependency 'json', '~> 2.5', '>= 2.5.1'
|
||||
s.add_runtime_dependency 'oj', '~> 3.13.16'
|
||||
|
||||
s.add_development_dependency 'bundler', '~> 2.2'
|
||||
s.add_development_dependency 'pry', '~> 0.14'
|
||||
s.add_development_dependency 'rake', '~> 13.0'
|
||||
s.add_development_dependency 'rspec', '~> 3.10'
|
||||
s.add_development_dependency 'rspec-parameterized', '~> 0.5.1'
|
||||
s.add_development_dependency 'benchmark-memory', '~>0.2.0'
|
||||
end
|
||||
|
|
218
vendor/gems/ipynbdiff/lib/ipynb_symbol_map.rb
vendored
218
vendor/gems/ipynbdiff/lib/ipynb_symbol_map.rb
vendored
|
@ -1,218 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module IpynbDiff
|
||||
class InvalidTokenError < StandardError
|
||||
end
|
||||
|
||||
# Creates a symbol map for a ipynb file (JSON format)
|
||||
class IpynbSymbolMap
|
||||
class << self
|
||||
def parse(notebook, objects_to_ignore = [])
|
||||
IpynbSymbolMap.new(notebook, objects_to_ignore).parse('')
|
||||
end
|
||||
end
|
||||
|
||||
attr_reader :current_line, :char_idx, :results
|
||||
|
||||
WHITESPACE_CHARS = ["\t", "\r", ' ', "\n"].freeze
|
||||
|
||||
VALUE_STOPPERS = [',', '[', ']', '{', '}', *WHITESPACE_CHARS].freeze
|
||||
|
||||
def initialize(notebook, objects_to_ignore = [])
|
||||
@chars = notebook.chars
|
||||
@current_line = 0
|
||||
@char_idx = 0
|
||||
@results = {}
|
||||
@objects_to_ignore = objects_to_ignore
|
||||
end
|
||||
|
||||
def parse(prefix = '.')
|
||||
raise_if_file_ended
|
||||
|
||||
skip_whitespaces
|
||||
|
||||
if (c = current_char) == '"'
|
||||
parse_string
|
||||
elsif c == '['
|
||||
parse_array(prefix)
|
||||
elsif c == '{'
|
||||
parse_object(prefix)
|
||||
else
|
||||
parse_value
|
||||
end
|
||||
|
||||
results
|
||||
end
|
||||
|
||||
def parse_array(prefix)
|
||||
# [1, 2, {"some": "object"}, [1]]
|
||||
|
||||
i = 0
|
||||
|
||||
current_should_be '['
|
||||
|
||||
loop do
|
||||
raise_if_file_ended
|
||||
|
||||
break if skip_beginning(']')
|
||||
|
||||
new_prefix = "#{prefix}.#{i}"
|
||||
|
||||
add_result(new_prefix, current_line)
|
||||
|
||||
parse(new_prefix)
|
||||
|
||||
i += 1
|
||||
end
|
||||
end
|
||||
|
||||
def parse_object(prefix)
|
||||
# {"name":"value", "another_name": [1, 2, 3]}
|
||||
|
||||
current_should_be '{'
|
||||
|
||||
loop do
|
||||
raise_if_file_ended
|
||||
|
||||
break if skip_beginning('}')
|
||||
|
||||
prop_name = parse_string(return_value: true)
|
||||
|
||||
next_and_skip_whitespaces
|
||||
|
||||
current_should_be ':'
|
||||
|
||||
next_and_skip_whitespaces
|
||||
|
||||
if @objects_to_ignore.include? prop_name
|
||||
skip
|
||||
else
|
||||
new_prefix = "#{prefix}.#{prop_name}"
|
||||
|
||||
add_result(new_prefix, current_line)
|
||||
|
||||
parse(new_prefix)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def parse_string(return_value: false)
|
||||
current_should_be '"'
|
||||
init_idx = @char_idx
|
||||
|
||||
loop do
|
||||
increment_char_index
|
||||
|
||||
raise_if_file_ended
|
||||
|
||||
if current_char == '"' && !prev_backslash?
|
||||
init_idx += 1
|
||||
break
|
||||
end
|
||||
end
|
||||
|
||||
@chars[init_idx...@char_idx].join if return_value
|
||||
end
|
||||
|
||||
def add_result(key, line_number)
|
||||
@results[key] = line_number
|
||||
end
|
||||
|
||||
def parse_value
|
||||
increment_char_index until raise_if_file_ended || VALUE_STOPPERS.include?(current_char)
|
||||
end
|
||||
|
||||
def skip_whitespaces
|
||||
while WHITESPACE_CHARS.include?(current_char)
|
||||
raise_if_file_ended
|
||||
check_for_new_line
|
||||
increment_char_index
|
||||
end
|
||||
end
|
||||
|
||||
def increment_char_index
|
||||
@char_idx += 1
|
||||
end
|
||||
|
||||
def next_and_skip_whitespaces
|
||||
increment_char_index
|
||||
skip_whitespaces
|
||||
end
|
||||
|
||||
def current_char
|
||||
raise_if_file_ended
|
||||
|
||||
@chars[@char_idx]
|
||||
end
|
||||
|
||||
def prev_backslash?
|
||||
@chars[@char_idx - 1] == '\\' && @chars[@char_idx - 2] != '\\'
|
||||
end
|
||||
|
||||
def current_should_be(another_char)
|
||||
raise InvalidTokenError unless current_char == another_char
|
||||
end
|
||||
|
||||
def check_for_new_line
|
||||
@current_line += 1 if current_char == "\n"
|
||||
end
|
||||
|
||||
def raise_if_file_ended
|
||||
@char_idx >= @chars.size && raise(InvalidTokenError)
|
||||
end
|
||||
|
||||
def skip
|
||||
raise_if_file_ended
|
||||
|
||||
skip_whitespaces
|
||||
|
||||
if (c = current_char) == '"'
|
||||
parse_string
|
||||
elsif c == '['
|
||||
skip_array
|
||||
elsif c == '{'
|
||||
skip_object
|
||||
else
|
||||
parse_value
|
||||
end
|
||||
end
|
||||
|
||||
def skip_array
|
||||
loop do
|
||||
raise_if_file_ended
|
||||
|
||||
break if skip_beginning(']')
|
||||
|
||||
skip
|
||||
end
|
||||
end
|
||||
|
||||
def skip_object
|
||||
loop do
|
||||
raise_if_file_ended
|
||||
|
||||
break if skip_beginning('}')
|
||||
|
||||
parse_string
|
||||
|
||||
next_and_skip_whitespaces
|
||||
|
||||
current_should_be ':'
|
||||
|
||||
next_and_skip_whitespaces
|
||||
|
||||
skip
|
||||
end
|
||||
end
|
||||
|
||||
def skip_beginning(closing_char)
|
||||
check_for_new_line
|
||||
|
||||
next_and_skip_whitespaces
|
||||
|
||||
return true if current_char == closing_char
|
||||
|
||||
next_and_skip_whitespaces if current_char == ','
|
||||
end
|
||||
end
|
||||
end
|
|
@ -14,7 +14,7 @@ module IpynbDiff
|
|||
'stream' => %w[text]
|
||||
}.freeze
|
||||
|
||||
def initialize(hide_images: false)
|
||||
def initialize(hide_images = false)
|
||||
@hide_images = hide_images
|
||||
end
|
||||
|
||||
|
|
107
vendor/gems/ipynbdiff/lib/symbol_map.rb
vendored
Normal file
107
vendor/gems/ipynbdiff/lib/symbol_map.rb
vendored
Normal file
|
@ -0,0 +1,107 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module IpynbDiff
|
||||
require 'oj'
|
||||
|
||||
# Creates a map from a symbol to the line number it appears in a Json file
|
||||
#
|
||||
# Example:
|
||||
#
|
||||
# Input:
|
||||
#
|
||||
# 1. {
|
||||
# 2. 'obj1': [
|
||||
# 3. {
|
||||
# 4. 'obj2': 5
|
||||
# 5. },
|
||||
# 6. 3,
|
||||
# 7. {
|
||||
# 8. 'obj3': {
|
||||
# 9. 'obj4': 'b'
|
||||
# 10. }
|
||||
# 11. }
|
||||
# 12. ]
|
||||
# 13.}
|
||||
#
|
||||
# Output:
|
||||
#
|
||||
# Symbol Line Number
|
||||
# .obj1 -> 2
|
||||
# .obj1.0 -> 3
|
||||
# .obj1.0 -> 3
|
||||
# .obj1.0.obj2 -> 4
|
||||
# .obj1.1 -> 6
|
||||
# .obj1.2 -> 7
|
||||
# .obj1.2.obj3 -> 8
|
||||
# .obj1.2.obj3.obj4 -> 9
|
||||
#
|
||||
class SymbolMap
|
||||
class << self
|
||||
def handler
|
||||
@handler ||= SymbolMap.new
|
||||
end
|
||||
|
||||
def parser
|
||||
@parser ||= Oj::Parser.new(:saj).tap { |p| p.handler = handler }
|
||||
end
|
||||
|
||||
def parse(notebook, *args)
|
||||
handler.reset
|
||||
parser.parse(notebook)
|
||||
handler.symbols
|
||||
end
|
||||
end
|
||||
|
||||
attr_accessor :symbols
|
||||
|
||||
def hash_start(key, line, column)
|
||||
add_symbol(key_or_index(key), line)
|
||||
end
|
||||
|
||||
def hash_end(key, line, column)
|
||||
@current_path.pop
|
||||
end
|
||||
|
||||
def array_start(key, line, column)
|
||||
@current_array_index << 0
|
||||
|
||||
add_symbol(key, line)
|
||||
end
|
||||
|
||||
def array_end(key, line, column)
|
||||
@current_path.pop
|
||||
@current_array_index.pop
|
||||
end
|
||||
|
||||
def add_value(value, key, line, column)
|
||||
add_symbol(key_or_index(key), line)
|
||||
|
||||
@current_path.pop
|
||||
end
|
||||
|
||||
def add_symbol(symbol, line)
|
||||
@symbols[@current_path.append(symbol).join('.')] = line if symbol
|
||||
end
|
||||
|
||||
def key_or_index(key)
|
||||
if key.nil? # value in an array
|
||||
if @current_path.empty?
|
||||
@current_path = ['']
|
||||
return nil
|
||||
end
|
||||
|
||||
symbol = @current_array_index.last
|
||||
@current_array_index[-1] += 1
|
||||
symbol
|
||||
else
|
||||
key
|
||||
end
|
||||
end
|
||||
|
||||
def reset
|
||||
@current_path = []
|
||||
@symbols = {}
|
||||
@current_array_index = []
|
||||
end
|
||||
end
|
||||
end
|
13
vendor/gems/ipynbdiff/lib/transformer.rb
vendored
13
vendor/gems/ipynbdiff/lib/transformer.rb
vendored
|
@ -1,6 +1,8 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module IpynbDiff
|
||||
require 'oj'
|
||||
|
||||
class InvalidNotebookError < StandardError
|
||||
end
|
||||
|
||||
|
@ -10,26 +12,25 @@ module IpynbDiff
|
|||
require 'yaml'
|
||||
require 'output_transformer'
|
||||
require 'symbolized_markdown_helper'
|
||||
require 'ipynb_symbol_map'
|
||||
require 'symbol_map'
|
||||
require 'transformed_notebook'
|
||||
include SymbolizedMarkdownHelper
|
||||
|
||||
@include_frontmatter = true
|
||||
@objects_to_ignore = ['application/javascript', 'application/vnd.holoviews_load.v0+json']
|
||||
|
||||
def initialize(include_frontmatter: true, hide_images: false)
|
||||
@include_frontmatter = include_frontmatter
|
||||
@hide_images = hide_images
|
||||
@out_transformer = OutputTransformer.new(hide_images: hide_images)
|
||||
@out_transformer = OutputTransformer.new(hide_images)
|
||||
end
|
||||
|
||||
def validate_notebook(notebook)
|
||||
notebook_json = JSON.parse(notebook)
|
||||
notebook_json = Oj::Parser.usual.parse(notebook)
|
||||
|
||||
return notebook_json if notebook_json.key?('cells')
|
||||
|
||||
raise InvalidNotebookError
|
||||
rescue JSON::ParserError
|
||||
rescue EncodingError, Oj::ParseError, JSON::ParserError
|
||||
raise InvalidNotebookError
|
||||
end
|
||||
|
||||
|
@ -38,7 +39,7 @@ module IpynbDiff
|
|||
|
||||
notebook_json = validate_notebook(notebook)
|
||||
transformed = transform_document(notebook_json)
|
||||
symbol_map = IpynbSymbolMap.parse(notebook)
|
||||
symbol_map = SymbolMap.parse(notebook)
|
||||
|
||||
TransformedNotebook.new(transformed, symbol_map)
|
||||
end
|
||||
|
|
64
vendor/gems/ipynbdiff/spec/benchmark.rb
vendored
Normal file
64
vendor/gems/ipynbdiff/spec/benchmark.rb
vendored
Normal file
File diff suppressed because one or more lines are too long
165
vendor/gems/ipynbdiff/spec/ipynb_symbol_map_spec.rb
vendored
165
vendor/gems/ipynbdiff/spec/ipynb_symbol_map_spec.rb
vendored
|
@ -1,165 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rspec'
|
||||
require 'json'
|
||||
require 'rspec-parameterized'
|
||||
require 'ipynb_symbol_map'
|
||||
|
||||
describe IpynbDiff::IpynbSymbolMap do
|
||||
def res(*cases)
|
||||
cases&.to_h || []
|
||||
end
|
||||
|
||||
describe '#parse_string' do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
let(:mapper) { IpynbDiff::IpynbSymbolMap.new(input) }
|
||||
|
||||
where(:input, :result) do
|
||||
# Empty string
|
||||
'""' | ''
|
||||
# Some string with quotes
|
||||
'"he\nll\"o"' | 'he\nll\"o'
|
||||
end
|
||||
|
||||
with_them do
|
||||
it { expect(mapper.parse_string(return_value: true)).to eq(result) }
|
||||
it { expect(mapper.parse_string).to be_nil }
|
||||
it { expect(mapper.results).to be_empty }
|
||||
end
|
||||
|
||||
it 'raises if invalid string' do
|
||||
mapper = IpynbDiff::IpynbSymbolMap.new('"')
|
||||
|
||||
expect { mapper.parse_string }.to raise_error(IpynbDiff::InvalidTokenError)
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
describe '#parse_object' do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
let(:mapper) { IpynbDiff::IpynbSymbolMap.new(notebook, objects_to_ignore) }
|
||||
|
||||
before do
|
||||
mapper.parse_object('')
|
||||
end
|
||||
|
||||
where(:notebook, :objects_to_ignore, :result) do
|
||||
# Empty object
|
||||
'{ }' | [] | res
|
||||
# Object with string
|
||||
'{ "hello" : "world" }' | [] | res(['.hello', 0])
|
||||
# Object with boolean
|
||||
'{ "hello" : true }' | [] | res(['.hello', 0])
|
||||
# Object with integer
|
||||
'{ "hello" : 1 }' | [] | res(['.hello', 0])
|
||||
# Object with 2 properties in the same line
|
||||
'{ "hello" : "world" , "my" : "bad" }' | [] | res(['.hello', 0], ['.my', 0])
|
||||
# Object with 2 properties in the different lines line
|
||||
"{ \"hello\" : \"world\" , \n \n \"my\" : \"bad\" }" | [] | res(['.hello', 0], ['.my', 2])
|
||||
# Object with 2 properties, but one is ignored
|
||||
"{ \"hello\" : \"world\" , \n \n \"my\" : \"bad\" }" | ['hello'] | res(['.my', 2])
|
||||
end
|
||||
|
||||
with_them do
|
||||
it { expect(mapper.results).to include(result) }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#parse_array' do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
where(:notebook, :result) do
|
||||
# Empty Array
|
||||
'[]' | res
|
||||
# Array with string value
|
||||
'["a"]' | res(['.0', 0])
|
||||
# Array with boolean
|
||||
'[ true ]' | res(['.0', 0])
|
||||
# Array with integer
|
||||
'[ 1 ]' | res(['.0', 0])
|
||||
# Two values on the same line
|
||||
'["a", "b"]' | res(['.0', 0], ['.1', 0])
|
||||
# With line breaks'
|
||||
"[\n \"a\" \n , \n \"b\" ]" | res(['.0', 1], ['.1', 3])
|
||||
end
|
||||
|
||||
let(:mapper) { IpynbDiff::IpynbSymbolMap.new(notebook) }
|
||||
|
||||
before do
|
||||
mapper.parse_array('')
|
||||
end
|
||||
|
||||
with_them do
|
||||
it { expect(mapper.results).to match_array(result) }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#skip_object' do
|
||||
subject { IpynbDiff::IpynbSymbolMap.parse(JSON.pretty_generate(source)) }
|
||||
end
|
||||
|
||||
describe '#parse' do
|
||||
|
||||
let(:objects_to_ignore) { [] }
|
||||
|
||||
subject { IpynbDiff::IpynbSymbolMap.parse(JSON.pretty_generate(source), objects_to_ignore) }
|
||||
|
||||
context 'Empty object' do
|
||||
let(:source) { {} }
|
||||
|
||||
it { is_expected.to be_empty }
|
||||
end
|
||||
|
||||
context 'Object with inner object and number' do
|
||||
let(:source) { { obj1: { obj2: 1 } } }
|
||||
|
||||
it { is_expected.to match_array(res(['.obj1', 1], ['.obj1.obj2', 2])) }
|
||||
end
|
||||
|
||||
context 'Object with inner object and number, string and array with object' do
|
||||
let(:source) { { obj1: { obj2: [123, 2, true], obj3: "hel\nlo", obj4: true, obj5: 123, obj6: 'a' } } }
|
||||
|
||||
it do
|
||||
is_expected.to match_array(
|
||||
res(['.obj1', 1],
|
||||
['.obj1.obj2', 2],
|
||||
['.obj1.obj2.0', 3],
|
||||
['.obj1.obj2.1', 4],
|
||||
['.obj1.obj2.2', 5],
|
||||
['.obj1.obj3', 7],
|
||||
['.obj1.obj4', 8],
|
||||
['.obj1.obj5', 9],
|
||||
['.obj1.obj6', 10])
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'When index is exceeded because of failure' do
|
||||
it 'raises an exception' do
|
||||
source = '{"\\a": "a\""}'
|
||||
|
||||
mapper = IpynbDiff::IpynbSymbolMap.new(source)
|
||||
|
||||
expect(mapper).to receive(:prev_backslash?).at_least(1).time.and_return(false)
|
||||
|
||||
expect { mapper.parse('') }.to raise_error(IpynbDiff::InvalidTokenError)
|
||||
end
|
||||
end
|
||||
|
||||
context 'Object with inner object and number, string and array with object' do
|
||||
let(:source) { { obj1: { obj2: [123, 2, true], obj3: "hel\nlo", obj4: true, obj5: 123, obj6: { obj7: 'a' } } } }
|
||||
let(:objects_to_ignore) { %w(obj2 obj6) }
|
||||
it do
|
||||
is_expected.to match_array(
|
||||
res(['.obj1', 1],
|
||||
['.obj1.obj3', 7],
|
||||
['.obj1.obj4', 8],
|
||||
['.obj1.obj5', 9],
|
||||
)
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
58
vendor/gems/ipynbdiff/spec/symbol_map_spec.rb
vendored
Normal file
58
vendor/gems/ipynbdiff/spec/symbol_map_spec.rb
vendored
Normal file
|
@ -0,0 +1,58 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rspec'
|
||||
require 'json'
|
||||
require 'rspec-parameterized'
|
||||
require 'symbol_map'
|
||||
|
||||
describe IpynbDiff::SymbolMap do
|
||||
def res(*cases)
|
||||
cases&.to_h || []
|
||||
end
|
||||
|
||||
describe '#parse' do
|
||||
subject { IpynbDiff::SymbolMap.parse(JSON.pretty_generate(source)) }
|
||||
|
||||
context 'Object with blank key' do
|
||||
let(:source) { { "": { "": 5 } }}
|
||||
|
||||
it { is_expected.to match_array(res([".", 2], ["..", 3])) }
|
||||
end
|
||||
|
||||
context 'Empty object' do
|
||||
let(:source) { {} }
|
||||
|
||||
it { is_expected.to be_empty }
|
||||
end
|
||||
|
||||
context 'Empty array' do
|
||||
let(:source) { [] }
|
||||
|
||||
it { is_expected.to be_empty }
|
||||
end
|
||||
|
||||
context 'Object with inner object and number' do
|
||||
let(:source) { { obj1: { obj2: 1 } } }
|
||||
|
||||
it { is_expected.to match_array(res( ['.obj1', 2], ['.obj1.obj2', 3])) }
|
||||
end
|
||||
|
||||
context 'Object with inner object and number, string and array with object' do
|
||||
let(:source) { { obj1: { obj2: [123, 2, true], obj3: "hel\nlo", obj4: true, obj5: 123, obj6: 'a' } } }
|
||||
|
||||
it do
|
||||
is_expected.to match_array(
|
||||
res(['.obj1', 2],
|
||||
['.obj1.obj2', 3],
|
||||
['.obj1.obj2.0', 4],
|
||||
['.obj1.obj2.1', 5],
|
||||
['.obj1.obj2.2', 6],
|
||||
['.obj1.obj3', 8],
|
||||
['.obj1.obj4', 9],
|
||||
['.obj1.obj5', 10],
|
||||
['.obj1.obj6', 11])
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
23
vendor/gems/ipynbdiff/spec/test_helper.rb
vendored
Normal file
23
vendor/gems/ipynbdiff/spec/test_helper.rb
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
BASE_PATH = File.join(File.expand_path(File.dirname(__FILE__)), 'testdata')
|
||||
|
||||
FROM_PATH = File.join(BASE_PATH, 'from.ipynb')
|
||||
TO_PATH = File.join(BASE_PATH, 'to.ipynb')
|
||||
|
||||
FROM_IPYNB = File.read(FROM_PATH)
|
||||
TO_IPYNB = File.read(TO_PATH)
|
||||
|
||||
def input_for_test(test_case)
|
||||
File.join(BASE_PATH, test_case, 'input.ipynb')
|
||||
end
|
||||
|
||||
def expected_symbols(test_case)
|
||||
File.join(BASE_PATH, test_case, 'expected_symbols.txt')
|
||||
end
|
||||
|
||||
def expected_md(test_case)
|
||||
File.join(BASE_PATH, test_case, 'expected.md')
|
||||
end
|
||||
|
||||
def expected_line_numbers(test_case)
|
||||
File.join(BASE_PATH, test_case, 'expected_line_numbers.txt')
|
||||
end
|
|
@ -57,8 +57,7 @@
|
|||
"tags": [
|
||||
"senoid"
|
||||
]
|
||||
},
|
||||
"outputs": [
|
||||
}, "outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
|
|
14
vendor/gems/ipynbdiff/spec/testdata/text_png_output/expected_line_numbers.txt
vendored
Normal file
14
vendor/gems/ipynbdiff/spec/testdata/text_png_output/expected_line_numbers.txt
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
3
|
||||
|
||||
36
|
||||
37
|
||||
38
|
||||
39
|
||||
40
|
||||
|
||||
|
||||
12
|
||||
|
||||
16
|
||||
|
||||
25
|
23
vendor/gems/ipynbdiff/spec/transformer_spec.rb
vendored
23
vendor/gems/ipynbdiff/spec/transformer_spec.rb
vendored
|
@ -5,10 +5,10 @@ require 'ipynbdiff'
|
|||
require 'json'
|
||||
require 'rspec-parameterized'
|
||||
|
||||
BASE_PATH = File.join(File.expand_path(File.dirname(__FILE__)), 'testdata')
|
||||
TRANSFORMER_BASE_PATH = File.join(File.expand_path(File.dirname(__FILE__)), 'testdata')
|
||||
|
||||
def read_file(*paths)
|
||||
File.read(File.join(BASE_PATH, *paths))
|
||||
File.read(File.join(TRANSFORMER_BASE_PATH, *paths))
|
||||
end
|
||||
|
||||
def default_config
|
||||
|
@ -68,12 +68,27 @@ describe IpynbDiff::Transformer do
|
|||
expect(transformed.as_text).to eq expected_md
|
||||
end
|
||||
|
||||
it 'generates the expected symbol map' do
|
||||
expect(transformed.blocks.map { |b| b[:source_symbol] }.join("\n")).to eq expected_symbols
|
||||
it 'marks the lines correctly' do
|
||||
blocks = transformed.blocks.map { |b| b[:source_symbol] }.join("\n")
|
||||
result = expected_symbols
|
||||
|
||||
expect(blocks).to eq result
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
it 'generates the correct transformed to source line map' do
|
||||
input = read_file('text_png_output', 'input.ipynb' )
|
||||
expected_line_numbers = read_file('text_png_output', 'expected_line_numbers.txt' )
|
||||
|
||||
transformed = IpynbDiff::Transformer.new(**{ include_frontmatter: false }).transform(input)
|
||||
|
||||
line_numbers = transformed.blocks.map { |b| b[:source_line] }.join("\n")
|
||||
|
||||
expect(line_numbers).to eq(expected_line_numbers)
|
||||
|
||||
end
|
||||
|
||||
context 'When the notebook is invalid' do
|
||||
[
|
||||
['because the json is invalid', 'a'],
|
||||
|
|
Loading…
Reference in a new issue