Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-10-20 15:10:58 +00:00
parent bf18f3295b
commit 049d16d168
122 changed files with 2797 additions and 640 deletions

View File

@ -955,6 +955,7 @@ lib/gitlab/checks/** @proglottis @toon @zj-gitlab
/doc/user/project/pages/getting_started/ @ashrafkhamis
/doc/user/project/quick_actions.md @msedlakjakubowski
/doc/user/project/releases/ @rdickenson
/doc/user/project/remote_development/ @ashrafkhamis
/doc/user/project/repository/ @aqualls
/doc/user/project/repository/branches/ @aqualls
/doc/user/project/repository/file_finder.md @ashrafkhamis

View File

@ -2,13 +2,34 @@ include:
- template: DAST-API.gitlab-ci.yml
dast_api:
variables:
DAST_API_PROFILE: Passive
DAST_API_GRAPHQL: /api/graphql
DAST_API_TARGET_URL: ${CI_ENVIRONMENT_URL}
DAST_API_OVERRIDES_ENV: "{\"headers\":{\"Authorization\":\"Bearer $REVIEW_APPS_ROOT_TOKEN\"}}"
needs: ["review-deploy"]
# Uncomment resource_group if DAST_API_PROFILE is changed to an active scan
# resource_group: dast_api_scan
rules:
- when: never
dast_api_graphql:
extends: dast_api
variables:
DAST_API_GRAPHQL: /api/graphql
DAST_API_PROFILE: Passive
DAST_API_TARGET_URL: ${CI_ENVIRONMENT_URL}
DAST_API_OVERRIDES_ENV: "{\"headers\":{\"Authorization\":\"Bearer $REVIEW_APPS_ROOT_TOKEN\"}}"
rules:
- !reference [".reports:rules:schedule-dast", rules]
#
# To run this job in an MR pipeline, use this rule:
# - !reference [".reports:rules:test-dast", rules]
dast_api_rest:
extends: dast_api
variables:
DAST_API_OPENAPI: doc/api/openapi/openapi_v2.yaml
DAST_API_PROFILE: Passive
DAST_API_TARGET_URL: ${CI_ENVIRONMENT_URL}
DAST_API_OVERRIDES_ENV: "{\"headers\":{\"Authorization\":\"Bearer $REVIEW_APPS_ROOT_TOKEN\"}}"
rules:
- !reference [".reports:rules:schedule-dast", rules]
#
# To run this job in an MR pipeline, use this rule:
# - !reference [".reports:rules:test-dast", rules]

View File

@ -1690,6 +1690,12 @@
when: never
- <<: *if-dot-com-ee-schedule-nightly-child-pipeline
.reports:rules:test-dast:
rules:
- if: '$DAST_DISABLED || $GITLAB_FEATURES !~ /\bdast\b/'
when: never
- <<: *if-merge-request
.reports:rules:package_hunter-yarn:
rules:
- if: "$PACKAGE_HUNTER_USER == null || $PACKAGE_HUNTER_USER == ''"

View File

@ -122,6 +122,8 @@ rubocop:
needs:
- job: detect-tests
optional: true
variables:
RUBOCOP_TARGET_FILES: "tmp/rubocop_target_files.txt"
script:
- |
# For non-merge request, or when RUN_ALL_RUBOCOP is 'true', run all RuboCop rules
@ -132,8 +134,13 @@ rubocop:
unset CI_SLACK_WEBHOOK_URL
run_timed_command "bundle exec rake rubocop:check:graceful"
else
cat ${RSPEC_CHANGED_FILES_PATH} | ruby -e 'puts $stdin.read.split(" ").select { |f| File.exist?(f) }.join(" ")' > tmp/rubocop_target_files.txt
run_timed_command "bundle exec rubocop --parallel --force-exclusion $(cat tmp/rubocop_target_files.txt)"
cat "${RSPEC_CHANGED_FILES_PATH}" | ruby -e 'print $stdin.read.split(" ").select { |f| File.exist?(f) }.join(" ")' > "$RUBOCOP_TARGET_FILES"
# Skip running RuboCop if there's no target files
if [ -s "${RUBOCOP_TARGET_FILES}" ]; then
run_timed_command "bundle exec rubocop --parallel --force-exclusion $(cat ${RUBOCOP_TARGET_FILES})"
else
echoinfo "Nothing interesting changed for RuboCop. Skipping."
fi
fi
qa:metadata-lint:

View File

@ -0,0 +1,88 @@
<script>
import { GlButtonGroup, GlButton, GlBadge } from '@gitlab/ui';
import { numberToHumanSize } from '~/lib/utils/number_utils';
import { I18N_EXPIRED, I18N_DOWNLOAD, I18N_DELETE } from '../constants';
export default {
name: 'ArtifactRow',
components: {
GlButtonGroup,
GlButton,
GlBadge,
},
props: {
artifact: {
type: Object,
required: true,
},
isLoading: {
type: Boolean,
required: true,
},
isLastRow: {
type: Boolean,
required: true,
},
},
computed: {
isExpired() {
if (!this.artifact.expireAt) {
return false;
}
return Date.now() > new Date(this.artifact.expireAt).getTime();
},
artifactSize() {
return numberToHumanSize(this.artifact.size);
},
},
i18n: {
expired: I18N_EXPIRED,
download: I18N_DOWNLOAD,
delete: I18N_DELETE,
},
};
</script>
<template>
<div
class="gl-py-4"
:class="{ 'gl-border-b-solid gl-border-b-1 gl-border-gray-100': !isLastRow }"
>
<div class="gl-display-inline-flex gl-align-items-center gl-w-full">
<span class="gl-w-half gl-pl-8 gl-display-flex" data-testid="job-artifact-row-name">
{{ artifact.name }}
<gl-badge size="sm" variant="neutral" class="gl-ml-2">
{{ artifact.fileType.toLowerCase() }}
</gl-badge>
<gl-badge v-if="isExpired" size="sm" variant="warning" icon="expire" class="gl-ml-2">
{{ $options.i18n.expired }}
</gl-badge>
</span>
<span class="gl-w-quarter gl-text-right gl-pr-5" data-testid="job-artifact-row-size">
{{ artifactSize }}
</span>
<span class="gl-w-quarter gl-text-right gl-pr-5">
<gl-button-group>
<gl-button
category="tertiary"
icon="download"
:title="$options.i18n.download"
:aria-label="$options.i18n.download"
:href="artifact.downloadPath"
data-testid="job-artifact-row-download-button"
/>
<gl-button
category="tertiary"
icon="remove"
:title="$options.i18n.delete"
:aria-label="$options.i18n.delete"
:loading="isLoading"
data-testid="job-artifact-row-delete-button"
@click="$emit('delete')"
/>
</gl-button-group>
</span>
</div>
</div>
</template>

View File

@ -0,0 +1,89 @@
<script>
import { createAlert } from '~/flash';
import { DynamicScroller, DynamicScrollerItem } from 'vendor/vue-virtual-scroller';
import getJobArtifactsQuery from '../graphql/queries/get_job_artifacts.query.graphql';
import destroyArtifactMutation from '../graphql/mutations/destroy_artifact.mutation.graphql';
import { removeArtifactFromStore } from '../graphql/cache_update';
import {
I18N_DESTROY_ERROR,
ARTIFACT_ROW_HEIGHT,
ARTIFACTS_SHOWN_WITHOUT_SCROLLING,
} from '../constants';
import ArtifactRow from './artifact_row.vue';
export default {
name: 'ArtifactsTableRowDetails',
components: {
DynamicScroller,
DynamicScrollerItem,
ArtifactRow,
},
props: {
artifacts: {
type: Object,
required: true,
},
queryVariables: {
type: Object,
required: true,
},
},
data() {
return {
deletingArtifactId: null,
};
},
computed: {
scrollContainerStyle() {
/*
limit the height of the expanded artifacts container to a number of artifacts
if a job has more artifacts than ARTIFACTS_SHOWN_WITHOUT_SCROLLING, scroll to see the rest
add one pixel to row height to account for borders
*/
return { maxHeight: `${ARTIFACTS_SHOWN_WITHOUT_SCROLLING * (ARTIFACT_ROW_HEIGHT + 1)}px` };
},
},
methods: {
isLastRow(index) {
return index === this.artifacts.nodes.length - 1;
},
destroyArtifact(id) {
this.deletingArtifactId = id;
this.$apollo
.mutate({
mutation: destroyArtifactMutation,
variables: { id },
update: (store) => {
removeArtifactFromStore(store, id, getJobArtifactsQuery, this.queryVariables);
},
})
.catch(() => {
createAlert({
message: I18N_DESTROY_ERROR,
});
this.$emit('refetch');
})
.finally(() => {
this.deletingArtifactId = null;
});
},
},
ARTIFACT_ROW_HEIGHT,
};
</script>
<template>
<div :style="scrollContainerStyle">
<dynamic-scroller :items="artifacts.nodes" :min-item-size="$options.ARTIFACT_ROW_HEIGHT">
<template #default="{ item, index, active }">
<dynamic-scroller-item :item="item" :active="active" :class="{ active }">
<artifact-row
:artifact="item"
:is-last-row="isLastRow(index)"
:is-loading="item.id === deletingArtifactId"
@delete="destroyArtifact(item.id)"
/>
</dynamic-scroller-item>
</template>
</dynamic-scroller>
</div>
</template>

View File

@ -0,0 +1,314 @@
<script>
import {
GlLoadingIcon,
GlTable,
GlLink,
GlButtonGroup,
GlButton,
GlBadge,
GlIcon,
GlPagination,
} from '@gitlab/ui';
import { createAlert } from '~/flash';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
import getJobArtifactsQuery from '../graphql/queries/get_job_artifacts.query.graphql';
import { totalArtifactsSizeForJob, mapArchivesToJobNodes, mapBooleansToJobNodes } from '../utils';
import {
STATUS_BADGE_VARIANTS,
I18N_DOWNLOAD,
I18N_BROWSE,
I18N_DELETE,
I18N_EXPIRED,
I18N_DESTROY_ERROR,
I18N_FETCH_ERROR,
I18N_ARTIFACTS,
I18N_JOB,
I18N_SIZE,
I18N_CREATED,
I18N_ARTIFACTS_COUNT,
INITIAL_CURRENT_PAGE,
INITIAL_PREVIOUS_PAGE_CURSOR,
INITIAL_NEXT_PAGE_CURSOR,
JOBS_PER_PAGE,
INITIAL_LAST_PAGE_SIZE,
} from '../constants';
import ArtifactsTableRowDetails from './artifacts_table_row_details.vue';
const INITIAL_PAGINATION_STATE = {
currentPage: INITIAL_CURRENT_PAGE,
prevPageCursor: INITIAL_PREVIOUS_PAGE_CURSOR,
nextPageCursor: INITIAL_NEXT_PAGE_CURSOR,
firstPageSize: JOBS_PER_PAGE,
lastPageSize: INITIAL_LAST_PAGE_SIZE,
};
export default {
name: 'JobArtifactsTable',
components: {
GlLoadingIcon,
GlTable,
GlLink,
GlButtonGroup,
GlButton,
GlBadge,
GlIcon,
GlPagination,
CiIcon,
TimeAgo,
ArtifactsTableRowDetails,
},
inject: ['projectPath'],
apollo: {
jobArtifacts: {
query: getJobArtifactsQuery,
variables() {
return this.queryVariables;
},
update({ project: { jobs: { nodes = [], pageInfo = {}, count = 0 } = {} } }) {
return {
nodes: nodes.map(mapArchivesToJobNodes).map(mapBooleansToJobNodes),
count,
pageInfo,
};
},
error() {
createAlert({
message: I18N_FETCH_ERROR,
});
},
},
},
data() {
return {
jobArtifacts: {
nodes: [],
count: 0,
pageInfo: {},
},
pagination: INITIAL_PAGINATION_STATE,
};
},
computed: {
queryVariables() {
return {
projectPath: this.projectPath,
firstPageSize: this.pagination.firstPageSize,
lastPageSize: this.pagination.lastPageSize,
prevPageCursor: this.pagination.prevPageCursor,
nextPageCursor: this.pagination.nextPageCursor,
};
},
showPagination() {
return this.jobArtifacts.count > JOBS_PER_PAGE;
},
prevPage() {
return Number(this.jobArtifacts.pageInfo.hasPreviousPage);
},
nextPage() {
return Number(this.jobArtifacts.pageInfo.hasNextPage);
},
},
methods: {
refetchArtifacts() {
this.$apollo.queries.jobArtifacts.refetch();
},
artifactsSize(item) {
return totalArtifactsSizeForJob(item);
},
pipelineId(item) {
const id = getIdFromGraphQLId(item.pipeline.id);
return `#${id}`;
},
handlePageChange(page) {
const { startCursor, endCursor } = this.jobArtifacts.pageInfo;
if (page > this.pagination.currentPage) {
this.pagination = {
...INITIAL_PAGINATION_STATE,
nextPageCursor: endCursor,
currentPage: page,
};
} else {
this.pagination = {
lastPageSize: JOBS_PER_PAGE,
firstPageSize: null,
prevPageCursor: startCursor,
currentPage: page,
};
}
},
handleRowToggle(toggleDetails, hasArtifacts) {
if (!hasArtifacts) return;
toggleDetails();
},
},
fields: [
{
key: 'artifacts',
label: I18N_ARTIFACTS,
thClass: 'gl-w-quarter',
},
{
key: 'job',
label: I18N_JOB,
thClass: 'gl-w-35p',
},
{
key: 'size',
label: I18N_SIZE,
thClass: 'gl-w-15p gl-text-right',
tdClass: 'gl-text-right',
},
{
key: 'created',
label: I18N_CREATED,
thClass: 'gl-w-eighth gl-text-center',
tdClass: 'gl-text-center',
},
{
key: 'actions',
label: '',
thClass: 'gl-w-eighth',
tdClass: 'gl-text-right',
},
],
STATUS_BADGE_VARIANTS,
i18n: {
download: I18N_DOWNLOAD,
browse: I18N_BROWSE,
delete: I18N_DELETE,
expired: I18N_EXPIRED,
destroyArtifactError: I18N_DESTROY_ERROR,
fetchArtifactsError: I18N_FETCH_ERROR,
artifactsLabel: I18N_ARTIFACTS,
jobLabel: I18N_JOB,
sizeLabel: I18N_SIZE,
createdLabel: I18N_CREATED,
artifactsCount: I18N_ARTIFACTS_COUNT,
},
};
</script>
<template>
<div>
<gl-table
:items="jobArtifacts.nodes"
:fields="$options.fields"
:busy="$apollo.queries.jobArtifacts.loading"
stacked="sm"
details-td-class="gl-bg-gray-10! gl-p-0! gl-overflow-auto"
>
<template #table-busy>
<gl-loading-icon size="lg" />
</template>
<template
#cell(artifacts)="{ item: { artifacts, hasArtifacts }, toggleDetails, detailsShowing }"
>
<span
:class="{ 'gl-cursor-pointer': hasArtifacts }"
data-testid="job-artifacts-count"
@click="handleRowToggle(toggleDetails, hasArtifacts)"
>
<gl-icon
v-if="hasArtifacts"
:name="detailsShowing ? 'chevron-down' : 'chevron-right'"
class="gl-mr-2"
/>
<strong>
{{ $options.i18n.artifactsCount(artifacts.nodes.length) }}
</strong>
</span>
</template>
<template #cell(job)="{ item }">
<span class="gl-display-inline-flex gl-align-items-center gl-w-full gl-mb-4">
<span data-testid="job-artifacts-job-status">
<ci-icon v-if="item.succeeded" :status="item.detailedStatus" class="gl-mr-3" />
<gl-badge
v-else
:icon="item.detailedStatus.icon"
:variant="$options.STATUS_BADGE_VARIANTS[item.detailedStatus.group]"
class="gl-mr-3"
>
{{ item.detailedStatus.label }}
</gl-badge>
</span>
<gl-link :href="item.webPath" class="gl-font-weight-bold">
{{ item.name }}
</gl-link>
</span>
<span class="gl-display-inline-flex">
<gl-icon name="pipeline" class="gl-mr-2" />
<gl-link
:href="item.pipeline.path"
class="gl-text-black-normal gl-text-decoration-underline gl-mr-4"
>
{{ pipelineId(item) }}
</gl-link>
<gl-icon name="branch" class="gl-mr-2" />
<gl-link
:href="item.refPath"
class="gl-text-black-normal gl-text-decoration-underline gl-mr-4"
>
{{ item.refName }}
</gl-link>
<gl-icon name="commit" class="gl-mr-2" />
<gl-link
:href="item.commitPath"
class="gl-text-black-normal gl-text-decoration-underline gl-mr-4"
>
{{ item.shortSha }}
</gl-link>
</span>
</template>
<template #cell(size)="{ item }">
<span data-testid="job-artifacts-size">{{ artifactsSize(item) }}</span>
</template>
<template #cell(created)="{ item }">
<time-ago data-testid="job-artifacts-created" :time="item.finishedAt" />
</template>
<template #cell(actions)="{ item }">
<gl-button-group>
<gl-button
icon="download"
:disabled="!item.archive.downloadPath"
:href="item.archive.downloadPath"
:title="$options.i18n.download"
:aria-label="$options.i18n.download"
data-testid="job-artifacts-download-button"
/>
<gl-button
icon="folder-open"
:title="$options.i18n.browse"
:aria-label="$options.i18n.browse"
data-testid="job-artifacts-browse-button"
disabled
/>
<gl-button
icon="remove"
:title="$options.i18n.delete"
:aria-label="$options.i18n.delete"
data-testid="job-artifacts-delete-button"
disabled
/>
</gl-button-group>
</template>
<template #row-details="{ item: { artifacts } }">
<artifacts-table-row-details
:artifacts="artifacts"
:query-variables="queryVariables"
@refetch="refetchArtifacts"
/>
</template>
</gl-table>
<gl-pagination
v-if="showPagination"
:value="pagination.currentPage"
:prev-page="prevPage"
:next-page="nextPage"
align="center"
class="gl-mt-3"
@input="handlePageChange"
/>
</div>
</template>

View File

@ -0,0 +1,47 @@
import { __, s__, n__ } from '~/locale';
export const JOB_STATUS_GROUP_SUCCESS = 'success';
export const STATUS_BADGE_VARIANTS = {
success: 'success',
passed: 'success',
error: 'danger',
failed: 'danger',
pending: 'warning',
'waiting-for-resource': 'warning',
'failed-with-warnings': 'warning',
'success-with-warnings': 'warning',
running: 'info',
canceled: 'neutral',
disabled: 'neutral',
scheduled: 'neutral',
manual: 'neutral',
notification: 'muted',
preparing: 'muted',
created: 'muted',
skipped: 'muted',
notfound: 'muted',
};
export const I18N_DOWNLOAD = __('Download');
export const I18N_BROWSE = s__('Artifacts|Browse');
export const I18N_DELETE = __('Delete');
export const I18N_EXPIRED = __('Expired');
export const I18N_DESTROY_ERROR = s__('Artifacts|An error occurred while deleting the artifact');
export const I18N_FETCH_ERROR = s__('Artifacts|An error occurred while retrieving job artifacts');
export const I18N_ARTIFACTS = __('Artifacts');
export const I18N_JOB = __('Job');
export const I18N_SIZE = __('Size');
export const I18N_CREATED = __('Created');
export const I18N_ARTIFACTS_COUNT = (count) => n__('%d file', '%d files', count);
export const INITIAL_CURRENT_PAGE = 1;
export const INITIAL_PREVIOUS_PAGE_CURSOR = '';
export const INITIAL_NEXT_PAGE_CURSOR = '';
export const JOBS_PER_PAGE = 20;
export const INITIAL_LAST_PAGE_SIZE = null;
export const ARCHIVE_FILE_TYPE = 'ARCHIVE';
export const ARTIFACT_ROW_HEIGHT = 56;
export const ARTIFACTS_SHOWN_WITHOUT_SCROLLING = 4;

View File

@ -0,0 +1,30 @@
import produce from 'immer';
export const hasErrors = ({ errors = [] }) => errors?.length;
export function removeArtifactFromStore(store, deletedArtifactId, query, variables) {
if (!hasErrors(deletedArtifactId)) {
const sourceData = store.readQuery({
query,
variables,
});
const data = produce(sourceData, (draftData) => {
draftData.project.jobs.nodes = draftData.project.jobs.nodes.map((jobNode) => {
return {
...jobNode,
artifacts: {
...jobNode.artifacts,
nodes: jobNode.artifacts.nodes.filter(({ id }) => id !== deletedArtifactId),
},
};
});
});
store.writeQuery({
query,
variables,
data,
});
}
}

View File

@ -0,0 +1,7 @@
mutation destroyArtifact($id: CiJobArtifactID!) {
artifactDestroy(input: { id: $id }) {
artifact {
id
}
}
}

View File

@ -0,0 +1,56 @@
#import "~/graphql_shared/fragments/page_info.fragment.graphql"
query getJobArtifacts(
$projectPath: ID!
$firstPageSize: Int
$lastPageSize: Int
$prevPageCursor: String = ""
$nextPageCursor: String = ""
) {
project(fullPath: $projectPath) {
id
jobs(
statuses: [SUCCESS, FAILED]
first: $firstPageSize
last: $lastPageSize
after: $nextPageCursor
before: $prevPageCursor
) {
count
nodes {
id
name
webPath
detailedStatus {
id
group
icon
label
}
pipeline {
id
iid
path
}
refName
refPath
shortSha
commitPath
finishedAt
artifacts {
nodes {
id
name
fileType
downloadPath
size
expireAt
}
}
}
pageInfo {
...PageInfo
}
}
}
}

View File

@ -0,0 +1,29 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createDefaultClient from '~/lib/graphql';
import JobArtifactsTable from './components/job_artifacts_table.vue';
Vue.use(VueApollo);
const apolloProvider = new VueApollo({
defaultClient: createDefaultClient(),
});
export const initArtifactsTable = () => {
const el = document.querySelector('#js-artifact-management');
if (!el) {
return false;
}
const { projectPath } = el.dataset;
return new Vue({
el,
apolloProvider,
provide: {
projectPath,
},
render: (createElement) => createElement(JobArtifactsTable),
});
};

View File

@ -0,0 +1,26 @@
import { numberToHumanSize } from '~/lib/utils/number_utils';
import { ARCHIVE_FILE_TYPE, JOB_STATUS_GROUP_SUCCESS } from './constants';
export const totalArtifactsSizeForJob = (job) =>
numberToHumanSize(
job.artifacts.nodes
.map((artifact) => artifact.size)
.reduce((total, artifact) => total + artifact, 0),
);
export const mapArchivesToJobNodes = (jobNode) => {
return {
archive: {
...jobNode.artifacts.nodes.find((artifact) => artifact.fileType === ARCHIVE_FILE_TYPE),
},
...jobNode,
};
};
export const mapBooleansToJobNodes = (jobNode) => {
return {
succeeded: jobNode.detailedStatus.group === JOB_STATUS_GROUP_SUCCESS,
hasArtifacts: jobNode.artifacts.nodes.length > 0,
...jobNode,
};
};

View File

@ -57,21 +57,32 @@ export const classNameMapCell = ({ line, hll, isLoggedIn, isHover }) => {
export const addCommentTooltip = (line) => {
let tooltip;
if (!line) return tooltip;
if (!line) {
return tooltip;
}
tooltip = __('Add a comment to this line or drag for multiple lines');
const brokenSymlinks = line.commentsDisabled;
if (brokenSymlinks) {
if (brokenSymlinks.wasSymbolic || brokenSymlinks.isSymbolic) {
if (!line.problems) {
return tooltip;
}
const { brokenSymlink, brokenLineCode, fileOnlyMoved } = line.problems;
if (brokenSymlink) {
if (brokenSymlink.wasSymbolic || brokenSymlink.isSymbolic) {
tooltip = __(
'Commenting on symbolic links that replace or are replaced by files is currently not supported.',
);
} else if (brokenSymlinks.wasReal || brokenSymlinks.isReal) {
} else if (brokenSymlink.wasReal || brokenSymlink.isReal) {
tooltip = __(
'Commenting on files that replace or are replaced by symbolic links is currently not supported.',
);
}
} else if (fileOnlyMoved) {
tooltip = __('Commenting on files that are only moved or renamed is currently not supported');
} else if (brokenLineCode) {
tooltip = __('Commenting on this line is currently not supported');
}
return tooltip;

View File

@ -324,15 +324,24 @@ function cleanRichText(text) {
}
function prepareLine(line, file) {
const problems = {
brokenSymlink: file.brokenSymlink,
brokenLineCode: !line.line_code,
fileOnlyMoved: file.renamed_file && file.added_lines === 0 && file.removed_lines === 0,
};
if (!line.alreadyPrepared) {
Object.assign(line, {
commentsDisabled: file.brokenSymlink,
commentsDisabled: Boolean(
problems.brokenSymlink || problems.fileOnlyMoved || problems.brokenLineCode,
),
rich_text: cleanRichText(line.rich_text),
discussionsExpanded: true,
discussions: [],
hasForm: false,
text: undefined,
alreadyPrepared: true,
problems,
});
}
}

View File

@ -0,0 +1,3 @@
import { initArtifactsTable } from '~/artifacts/index';
initArtifactsTable();

View File

@ -9,6 +9,8 @@ import eventHub from '~/sidebar/event_hub';
import Store from '~/sidebar/stores/sidebar_store';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import getMergeRequestReviewersQuery from '~/vue_shared/components/sidebar/queries/get_merge_request_reviewers.query.graphql';
import mergeRequestReviewersUpdatedSubscription from '~/vue_shared/components/sidebar/queries/merge_request_reviewers.subscription.graphql';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import ReviewerTitle from './reviewer_title.vue';
import Reviewers from './reviewers.vue';
@ -66,6 +68,36 @@ export default {
error() {
createAlert({ message: __('An error occurred while fetching reviewers.') });
},
subscribeToMore: {
document() {
return mergeRequestReviewersUpdatedSubscription;
},
variables() {
return {
issuableId: this.issuable?.id,
};
},
skip() {
return !this.issuable?.id || !this.isRealtimeEnabled;
},
updateQuery(
_,
{
subscriptionData: {
data: { mergeRequestReviewersUpdated },
},
},
) {
if (mergeRequestReviewersUpdated) {
this.store.setReviewersFromRealtime(
mergeRequestReviewersUpdated.reviewers.nodes.map((r) => ({
...r,
id: getIdFromGraphQLId(r.id),
})),
);
}
},
},
},
},
data() {
@ -87,6 +119,9 @@ export default {
canUpdate() {
return this.issuable.userPermissions?.adminMergeRequest || false;
},
isRealtimeEnabled() {
return this.glFeatures.realtimeReviewers;
},
},
created() {
this.store = new Store();

View File

@ -0,0 +1,34 @@
<script>
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { state } from './sidebar_reviewers.vue';
export default {
data() {
return state;
},
computed: {
reviewers() {
return this.issuable?.reviewers?.nodes || [];
},
},
methods: {
getIdFromGraphQLId,
},
};
</script>
<template>
<div>
<input
v-for="reviewer in reviewers"
:key="reviewer.id"
type="hidden"
name="merge_request[reviewer_ids][]"
:value="getIdFromGraphQLId(reviewer.id)"
:data-avatar-url="reviewer.avatarUrl"
:data-name="reviewer.name"
:data-username="reviewer.username"
:data-can-merge="reviewer.mergeRequestInteraction.canMerge"
/>
</div>
</template>

View File

@ -33,6 +33,7 @@ import CopyEmailToClipboard from './components/copy_email_to_clipboard.vue';
import SidebarEscalationStatus from './components/incidents/sidebar_escalation_status.vue';
import IssuableLockForm from './components/lock/issuable_lock_form.vue';
import SidebarReviewers from './components/reviewers/sidebar_reviewers.vue';
import SidebarReviewersInputs from './components/reviewers/sidebar_reviewers_inputs.vue';
import SidebarSeverity from './components/severity/sidebar_severity.vue';
import SidebarSubscriptionsWidget from './components/subscriptions/sidebar_subscriptions_widget.vue';
import SidebarTimeTracking from './components/time_tracking/sidebar_time_tracking.vue';
@ -210,6 +211,18 @@ function mountReviewersComponent(mediator) {
}),
});
const reviewersInputEl = document.querySelector('.js-reviewers-inputs');
if (reviewersInputEl) {
// eslint-disable-next-line no-new
new Vue({
el: reviewersInputEl,
render(createElement) {
return createElement(SidebarReviewersInputs);
},
});
}
const reviewerDropdown = document.querySelector('.js-sidebar-reviewer-dropdown');
if (reviewerDropdown) {

View File

@ -138,6 +138,10 @@ export default class SidebarStore {
this.assignees = data;
}
setReviewersFromRealtime(data) {
this.reviewers = data;
}
setAutocompleteProjects(projects) {
this.autocompleteProjects = projects;
}

View File

@ -431,6 +431,10 @@ function UsersSelect(currentUser, els, options = {}) {
hidden() {
if ($dropdown.hasClass('js-multiselect')) {
if ($dropdown.hasClass(elsClassName)) {
if (window.gon?.features?.realtimeReviewers) {
$dropdown.data('deprecatedJQueryDropdown').clearMenu();
$dropdown.closest('.selectbox').children('input[type="hidden"]').remove();
}
emitSidebarEvent('sidebar.saveReviewers');
} else {
emitSidebarEvent('sidebar.saveAssignees');

View File

@ -0,0 +1,22 @@
#import "~/graphql_shared/fragments/user.fragment.graphql"
#import "~/graphql_shared/fragments/user_availability.fragment.graphql"
subscription mergeRequestReviewersUpdated($issuableId: IssuableID!) {
mergeRequestReviewersUpdated(issuableId: $issuableId) {
... on MergeRequest {
id
reviewers {
nodes {
...User
...UserAvailability
mergeRequestInteraction {
canMerge
canUpdate
approved
reviewed
}
}
}
}
}
}

View File

@ -968,7 +968,7 @@ $system-note-svg-size: 1rem;
height: 12px;
}
&:hover,
&:hover:not([disabled]),
&.inverted {
&::before {
background-color: $white;

View File

@ -66,11 +66,10 @@ class Dashboard::ProjectsController < Dashboard::ApplicationController
end
def load_projects(finder_params)
@total_user_projects_count = ProjectsFinder.new(params: { non_public: true, without_deleted: true, not_aimed_for_deletion: true }, current_user: current_user).execute
@total_starred_projects_count = ProjectsFinder.new(params: { starred: true, without_deleted: true, not_aimed_for_deletion: true }, current_user: current_user).execute
@total_user_projects_count = ProjectsFinder.new(params: { non_public: true, not_aimed_for_deletion: true }, current_user: current_user).execute
@total_starred_projects_count = ProjectsFinder.new(params: { starred: true, not_aimed_for_deletion: true }, current_user: current_user).execute
finder_params[:use_cte] = true if use_cte_for_finder?
finder_params[:without_deleted] = true
projects = ProjectsFinder.new(params: finder_params, current_user: current_user).execute
@ -93,7 +92,7 @@ class Dashboard::ProjectsController < Dashboard::ApplicationController
def load_events
projects = ProjectsFinder
.new(params: params.merge(non_public: true, without_deleted: true), current_user: current_user)
.new(params: params.merge(non_public: true, not_aimed_for_deletion: true), current_user: current_user)
.execute
@events = EventCollection

View File

@ -44,6 +44,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
push_frontend_feature_flag(:paginated_mr_discussions, project)
push_frontend_feature_flag(:mr_review_submit_comment, project)
push_frontend_feature_flag(:mr_experience_survey, project)
push_frontend_feature_flag(:realtime_reviewers, project)
end
before_action do

View File

@ -274,8 +274,6 @@ class UsersController < ApplicationController
def finder_params
{
# don't display projects pending deletion
without_deleted: true,
# don't display projects marked for deletion
not_aimed_for_deletion: true
}

View File

@ -27,7 +27,6 @@
# last_activity_after: datetime
# last_activity_before: datetime
# repository_storage: string
# without_deleted: boolean
# not_aimed_for_deletion: boolean
#
class ProjectsFinder < UnionFinder
@ -76,6 +75,7 @@ class ProjectsFinder < UnionFinder
# EE would override this to add more filters
def filter_projects(collection)
collection = collection.without_deleted
collection = by_ids(collection)
collection = by_personal(collection)
collection = by_starred(collection)
@ -86,7 +86,6 @@ class ProjectsFinder < UnionFinder
collection = by_search(collection)
collection = by_archived(collection)
collection = by_custom_attributes(collection)
collection = by_deleted_status(collection)
collection = by_not_aimed_for_deletion(collection)
collection = by_last_activity_after(collection)
collection = by_last_activity_before(collection)
@ -212,10 +211,6 @@ class ProjectsFinder < UnionFinder
items.optionally_search(params[:search], include_namespace: params[:search_namespaces].present?)
end
def by_deleted_status(items)
params[:without_deleted].present? ? items.without_deleted : items
end
def by_not_aimed_for_deletion(items)
params[:not_aimed_for_deletion].present? ? items.not_aimed_for_deletion : items
end

View File

@ -25,7 +25,6 @@ module ProjectSearchArguments
def project_finder_params(params)
{
without_deleted: true,
non_public: params[:membership],
search: params[:search],
search_namespaces: params[:search_namespaces],

View File

@ -7,7 +7,6 @@ module ProtectedRef
belongs_to :project, touch: true
validates :name, presence: true
validates :project, presence: true
delegate :matching, :matches?, :wildcard?, to: :ref_matcher

View File

@ -119,6 +119,8 @@ class Group < Namespace
has_many :group_callouts, class_name: 'Users::GroupCallout', foreign_key: :group_id
has_many :protected_branches, inverse_of: :group
has_one :group_feature, inverse_of: :group, class_name: 'Groups::FeatureSetting'
delegate :prevent_sharing_groups_outside_hierarchy, :new_user_signups_cap, :setup_for_company, :jobs_to_be_done, to: :namespace_settings

View File

@ -31,6 +31,7 @@ class ProjectAuthorization < ApplicationRecord
def self.insert_all_in_batches(attributes, per_batch = BATCH_SIZE)
add_delay = add_delay_between_batches?(entire_size: attributes.size, batch_size: per_batch)
log_details(entire_size: attributes.size) if add_delay
attributes.each_slice(per_batch) do |attributes_batch|
insert_all(attributes_batch)
@ -40,6 +41,7 @@ class ProjectAuthorization < ApplicationRecord
def self.delete_all_in_batches_for_project(project:, user_ids:, per_batch: BATCH_SIZE)
add_delay = add_delay_between_batches?(entire_size: user_ids.size, batch_size: per_batch)
log_details(entire_size: user_ids.size) if add_delay
user_ids.each_slice(per_batch) do |user_ids_batch|
project.project_authorizations.where(user_id: user_ids_batch).delete_all
@ -49,6 +51,7 @@ class ProjectAuthorization < ApplicationRecord
def self.delete_all_in_batches_for_user(user:, project_ids:, per_batch: BATCH_SIZE)
add_delay = add_delay_between_batches?(entire_size: project_ids.size, batch_size: per_batch)
log_details(entire_size: project_ids.size) if add_delay
project_ids.each_slice(per_batch) do |project_ids_batch|
user.project_authorizations.where(project_id: project_ids_batch).delete_all
@ -65,6 +68,13 @@ class ProjectAuthorization < ApplicationRecord
Feature.enabled?(:enable_minor_delay_during_project_authorizations_refresh)
end
private_class_method def self.log_details(entire_size:)
Gitlab::AppLogger.info(
entire_size: entire_size,
message: 'Project authorizations refresh performed with delay'
)
end
private_class_method def self.perform_delay
sleep(SLEEP_DELAY)
end

View File

@ -4,6 +4,10 @@ class ProtectedBranch < ApplicationRecord
include ProtectedRef
include Gitlab::SQL::Pattern
belongs_to :group, foreign_key: :namespace_id, touch: true, inverse_of: :protected_branches
validate :validate_either_project_or_top_group
scope :requiring_code_owner_approval,
-> { where(code_owner_approval_required: true) }
@ -99,6 +103,18 @@ class ProtectedBranch < ApplicationRecord
def default_branch?
name == project.default_branch
end
private
def validate_either_project_or_top_group
if !project && !group
errors.add(:base, _('must be associated with a Group or a Project'))
elsif project && group
errors.add(:base, _('cannot be associated with both a Group and a Project'))
elsif group && group.root_ancestor != group
errors.add(:base, _('cannot be associated with a subgroup'))
end
end
end
ProtectedBranch.prepend_mod_with('ProtectedBranch')

View File

@ -4,6 +4,7 @@ class ProtectedTag < ApplicationRecord
include ProtectedRef
validates :name, uniqueness: { scope: :project_id }
validates :project, presence: true
protected_ref_access_levels :create

View File

@ -69,4 +69,6 @@
= render 'admin/application_settings/invitation_flow_enforcement', form: f
= render 'admin/application_settings/user_restrictions', form: f
= render_if_exists 'admin/application_settings/availability_on_namespace_setting', form: f
-# This is added for Jihu edition which should not be deleted without notifying Jihu
= render_if_exists 'admin/application_settings/password_expiration_setting', form: f
= f.submit _('Save changes'), pajamas_button: true, data: { qa_selector: 'save_changes_button' }

View File

@ -1,61 +0,0 @@
.gl-responsive-table-row.px-md-3
.table-section.section-25.section-wrap.commit
.table-mobile-header{ role: 'rowheader' }= _('Job')
.table-mobile-content
.branch-commit.cgray
- if can?(current_user, :read_build, @project)
= link_to project_job_path(@project, artifact.job) do
%span.build-link ##{artifact.job_id}
- else
%span.build-link ##{artifact.job_id}
- if artifact.job.ref
.icon-container.gl-display-inline-block{ "aria-label" => artifact.job.tag? ? _('Tag') : _('Branch') }
= artifact.job.tag? ? sprite_icon('tag', css_class: 'sprite') : sprite_icon('branch', css_class: 'sprite')
= link_to artifact.job.ref, project_ref_path(@project, artifact.job.ref), class: 'ref-name'
- else
.light= _('none')
.icon-container.commit-icon{ "aria-label" => _('Commit') }
= sprite_icon('commit')
- if artifact.job.sha
= link_to artifact.job.short_sha, project_commit_path(@project, artifact.job.sha), class: 'commit-sha mr-0'
.table-section.section-15.section-wrap
.table-mobile-header{ role: 'rowheader' }= _('Name')
.table-mobile-content
= artifact.job.name
.table-section.section-20
.table-mobile-header{ role: 'rowheader' }= _('Creation date')
.table-mobile-content
%p.finished-at
= sprite_icon("calendar")
%span= time_ago_with_tooltip(artifact.created_at)
.table-section.section-20
.table-mobile-header{ role: 'rowheader' }= _('Expiration date')
.table-mobile-content
- if artifact.expire_at
%p.finished-at
= sprite_icon("calendar")
%span= time_ago_with_tooltip(artifact.expire_at)
.table-section.section-10
.table-mobile-header{ role: 'rowheader' }= _('Size')
.table-mobile-content
= number_to_human_size(artifact.size, precision: 2)
.table-section.table-button-footer.section-10
.table-action-buttons
.btn-group
- if can?(current_user, :read_build, @project)
= link_to download_project_job_artifacts_path(@project, artifact.job), rel: 'nofollow', download: '', title: _('Download artifacts'), data: { placement: 'top', container: 'body' }, ref: 'tooltip', aria: { label: _('Download artifacts') }, class: 'gl-button btn btn-default btn-icon has-tooltip' do
= sprite_icon('download', css_class: 'gl-icon')
= link_to browse_project_job_artifacts_path(@project, artifact.job), rel: 'nofollow', title: _('Browse artifacts'), data: { placement: 'top', container: 'body' }, ref: 'tooltip', aria: { label: _('Browse artifacts') }, class: 'gl-button btn btn-default btn-icon has-tooltip' do
= sprite_icon('folder-open', css_class: 'gl-icon')
- if can?(current_user, :destroy_artifacts, @project)
= link_to project_artifact_path(@project, artifact), data: { placement: 'top', container: 'body', confirm: _('Are you sure you want to delete these artifacts?'), confirm_btn_variant: "danger" }, method: :delete, title: _('Delete artifacts'), ref: 'tooltip', aria: { label: _('Delete artifacts') }, class: 'gl-button btn btn-danger btn-icon has-tooltip' do
= sprite_icon('remove', css_class: 'gl-icon')

View File

@ -1,16 +0,0 @@
- if artifacts.blank?
.nothing-here-block= _('No jobs to show')
- else
.table-holder
.ci-table
.gl-responsive-table-row.table-row-header.px-md-3{ role: 'row' }
.table-section.section-25{ role: 'rowheader' }= _('Job')
.table-section.section-15{ role: 'rowheader' }= _('Name')
.table-section.section-20{ role: 'rowheader' }= _('Creation date')
.table-section.section-20{ role: 'rowheader' }= _('Expiration date')
.table-section.section-10{ role: 'rowheader' }= _('Size')
.table-section.section-10{ role: 'rowheader' }
= render partial: 'artifact', collection: artifacts, as: :artifact
= paginate artifacts, theme: "gitlab", total_pages: @total_pages

View File

@ -1,10 +1,9 @@
- @no_container = true
- page_title _('Artifacts')
%div{ class: container_class }
.top-area.py-3
.align-self-center
= _('Total artifacts size: %{total_size}') % { total_size: number_to_human_size(@total_size, precicion: 2) }
.content-list.builds-content-list
= render "table", artifacts: @artifacts, project: @project
%h1.page-title.gl-font-size-h-display.gl-mb-0
= s_('Artifacts|Artifacts')
.gl-mb-6
%strong= s_('Artifacts|Total artifacts size')
= number_to_human_size(@total_size, precicion: 2)
#js-artifact-management{ data: { "project-path" => @project.full_path } }

View File

@ -6,11 +6,7 @@
= gl_loading_icon(inline: true)
.selectbox.hide-collapsed
- if reviewers.none?
= hidden_field_tag "#{issuable_type}[reviewer_ids][]", 0, id: nil
- else
- reviewers.each do |reviewer|
= hidden_field_tag "#{issuable_type}[reviewer_ids][]", reviewer.id, id: nil, data: reviewer_sidebar_data(reviewer, merge_request: @merge_request)
.js-reviewers-inputs
- options = { toggle_class: 'js-reviewer-search js-author-search',
title: _('Request review from'),
@ -32,8 +28,7 @@
- dropdown_options = reviewers_dropdown_options(issuable_type)
- title = dropdown_options[:title]
- options[:toggle_class] += ' js-multiselect js-save-user-data'
- data = { field_name: "#{issuable_type}[reviewer_ids][]" }
- data[:multi_select] = true
- data = { multi_select: true }
- data['dropdown-title'] = title
- data['dropdown-header'] = dropdown_options[:data][:'dropdown-header']
- data[:suggested_reviewers_header] = dropdown_options[:data][:suggested_reviewers_header]

View File

@ -0,0 +1,8 @@
---
name: realtime_reviewers
introduced_by_url: 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/99137'
rollout_issue_url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/378299'
milestone: '15.5'
type: development
group: group::code review
default_enabled: false

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddNamespaceIdToProtectedBranches < Gitlab::Database::Migration[2.0]
enable_lock_retries!
def change
add_column :protected_branches, :namespace_id, :bigint
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class AddNamespaceIdIndexesForeignKeyToProtectedBranches < Gitlab::Database::Migration[2.0]
disable_ddl_transaction!
INDEX_NAME = 'index_protected_branches_namespace_id'
def up
add_concurrent_index :protected_branches, :namespace_id, name: INDEX_NAME, where: 'namespace_id IS NOT NULL'
add_concurrent_foreign_key :protected_branches, :namespaces, column: :namespace_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :protected_branches, column: :namespace_id
end
remove_concurrent_index :protected_branches, :namespace_id, name: INDEX_NAME
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class AddGroupOrProjectConstraintInProtectedBranches < Gitlab::Database::Migration[2.0]
disable_ddl_transaction!
CONSTRAINT_NAME = 'protected_branches_project_id_namespace_id_any_not_null'
def up
constraint = <<~CONSTRAINT
(project_id IS NULL) <> (namespace_id IS NULL)
CONSTRAINT
add_check_constraint :protected_branches, constraint, CONSTRAINT_NAME
end
def down
remove_check_constraint :protected_branches, CONSTRAINT_NAME
end
end

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
class ChangeProjectIdNullInProtectedBranches < Gitlab::Database::Migration[2.0]
enable_lock_retries!
def up
change_column_null :protected_branches, :project_id, true
end
def down
change_column_null :protected_branches, :project_id, false
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class RemoveTmpIndexApprovalMergeRequestRulesOnReportType < Gitlab::Database::Migration[2.0]
INDEX_NAME = 'tmp_index_approval_merge_request_rules_on_report_type_equal_one'
disable_ddl_transaction!
def up
remove_concurrent_index_by_name :approval_merge_request_rules, INDEX_NAME
end
def down
add_concurrent_index :approval_merge_request_rules,
[:id, :report_type],
name: INDEX_NAME,
where: "report_type = 1"
end
end

View File

@ -0,0 +1 @@
04a04a34de63b17f02a6b1333854638ae3b44d284e5ce2fcbee6fb3ec06b7757

View File

@ -0,0 +1 @@
3d1b1394aa1b5db83867b284f119ec711255d2a01b78720d42c0a1acfe93c94f

View File

@ -0,0 +1 @@
30d48cf8219cb4bcfeac454d7baf70d05f0285bdac519e4a1fb9f1c412267a9d

View File

@ -0,0 +1 @@
c8c26dad8d11b3715fce07ee9bedc9c4f66d2454646d58994e1568758f240299

View File

@ -0,0 +1 @@
6990eb33313f6c0a82409fde69c74a88d0a9db2cd144322bcff4428261bbf1e4

View File

@ -20471,12 +20471,14 @@ ALTER SEQUENCE protected_branch_unprotect_access_levels_id_seq OWNED BY protecte
CREATE TABLE protected_branches (
id integer NOT NULL,
project_id integer NOT NULL,
project_id integer,
name character varying NOT NULL,
created_at timestamp without time zone,
updated_at timestamp without time zone,
code_owner_approval_required boolean DEFAULT false NOT NULL,
allow_force_push boolean DEFAULT false NOT NULL
allow_force_push boolean DEFAULT false NOT NULL,
namespace_id bigint,
CONSTRAINT protected_branches_project_id_namespace_id_any_not_null CHECK (((project_id IS NULL) <> (namespace_id IS NULL)))
);
CREATE SEQUENCE protected_branches_id_seq
@ -30225,6 +30227,8 @@ CREATE INDEX index_protected_branch_unprotect_access_levels_on_group_id ON prote
CREATE INDEX index_protected_branch_unprotect_access_levels_on_user_id ON protected_branch_unprotect_access_levels USING btree (user_id);
CREATE INDEX index_protected_branches_namespace_id ON protected_branches USING btree (namespace_id) WHERE (namespace_id IS NOT NULL);
CREATE INDEX index_protected_branches_on_project_id ON protected_branches USING btree (project_id);
CREATE INDEX index_protected_environment_approval_rules_on_group_id ON protected_environment_approval_rules USING btree (group_id);
@ -31099,8 +31103,6 @@ CREATE INDEX tmp_idx_project_features_on_releases_al_and_repo_al_partial ON proj
CREATE INDEX tmp_idx_vulnerabilities_on_id_where_report_type_7_99 ON vulnerabilities USING btree (id) WHERE (report_type = ANY (ARRAY[7, 99]));
CREATE INDEX tmp_index_approval_merge_request_rules_on_report_type_equal_one ON approval_merge_request_rules USING btree (id, report_type) WHERE (report_type = 1);
CREATE INDEX tmp_index_ci_job_artifacts_on_expire_at_where_locked_unknown ON ci_job_artifacts USING btree (expire_at, job_id) WHERE ((locked = 2) AND (expire_at IS NOT NULL));
CREATE INDEX tmp_index_ci_job_artifacts_on_id_expire_at_file_type_trace ON ci_job_artifacts USING btree (id) WHERE (((date_part('day'::text, timezone('UTC'::text, expire_at)) = ANY (ARRAY[(21)::double precision, (22)::double precision, (23)::double precision])) AND (date_part('minute'::text, timezone('UTC'::text, expire_at)) = ANY (ARRAY[(0)::double precision, (30)::double precision, (45)::double precision])) AND (date_part('second'::text, timezone('UTC'::text, expire_at)) = (0)::double precision)) OR (file_type = 3));
@ -33332,6 +33334,9 @@ ALTER TABLE ONLY security_scans
ALTER TABLE ONLY epics
ADD CONSTRAINT fk_dccd3f98fc FOREIGN KEY (assignee_id) REFERENCES users(id) ON DELETE SET NULL;
ALTER TABLE ONLY protected_branches
ADD CONSTRAINT fk_de9216e774 FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY issues
ADD CONSTRAINT fk_df75a7c8b8 FOREIGN KEY (promoted_to_epic_id) REFERENCES epics(id) ON DELETE SET NULL;

View File

@ -494,7 +494,7 @@ Upload a recipe file to the package registry. You must use an upload URL that th
returned.
```plaintext
GET packages/conan/v1/files/:package_name/:package_version/:package_username/:package_channel/:recipe_revision/export/:file_name
PUT packages/conan/v1/files/:package_name/:package_version/:package_username/:package_channel/:recipe_revision/export/:file_name
```
| Attribute | Type | Required | Description |
@ -509,7 +509,8 @@ GET packages/conan/v1/files/:package_name/:package_version/:package_username/:pa
Provide the file context in the request body:
```shell
curl --header "Authorization: Bearer <authenticate_token>" \
curl --request PUT \
--user <username>:<personal_access_token> \
--upload-file path/to/conanfile.py \
"https://gitlab.example.com/api/v4/packages/conan/v1/files/my-package/1.0/my-group+my-project/stable/0/export/conanfile.py"
```
@ -558,7 +559,7 @@ Upload a package file to the package registry. You must use an upload URL that t
returned.
```plaintext
GET packages/conan/v1/files/:package_name/:package_version/:package_username/:package_channel/:recipe_revision/package/:conan_package_reference/:package_revision/:file_name
PUT packages/conan/v1/files/:package_name/:package_version/:package_username/:package_channel/:recipe_revision/package/:conan_package_reference/:package_revision/:file_name
```
| Attribute | Type | Required | Description |
@ -575,9 +576,10 @@ GET packages/conan/v1/files/:package_name/:package_version/:package_username/:pa
Provide the file context in the request body:
```shell
curl --header "Authorization: Bearer <authenticate_token>" \
curl --request PUT \
--user <username>:<personal_access_token> \
--upload-file path/to/conaninfo.txt \
"https://gitlab.example.com/api/v4/packages/conan/v1/files/my-package/1.0/my-group+my-project/stable/packages/103f6067a947f366ef91fc1b7da351c588d1827f/0/conaninfo.txt"
"https://gitlab.example.com/api/v4/packages/conan/v1/files/my-package/1.0/my-group+my-project/stable/0/package/103f6067a947f366ef91fc1b7da351c588d1827f/0/conaninfo.txt"
```
## Delete a Package (delete a Conan recipe)

View File

@ -61,8 +61,8 @@ PUT projects/:id/packages/debian/:file_name
```shell
curl --request PUT \
--user <username>:<personal_access_token> \
--upload-file path/to/mypkg.deb \
--header "Private-Token: <personal_access_token>" \
"https://gitlab.example.com/api/v4/projects/1/packages/debian/mypkg.deb"
```

View File

@ -65,7 +65,7 @@ curl --request PUT
--header "Content-Type: application/json"
--data @./path/to/metadata/file.json
--header "Authorization: Bearer <personal_access_token>" \
"https://gitlab.example.com/api/v4/projects/1/packages/npm/@myscope/my-pkg"
"https://gitlab.example.com/api/v4/projects/1/packages/npm/@myscope%2fmy-pkg"
```
The metadata file content is generated by npm, but looks something like this:

View File

@ -264,8 +264,10 @@ PUT projects/:id/packages/pypi
| `requires_python` | string | no | The PyPI required version. |
```shell
curl --request PUT \
--upload-file path/to/my.pypi.package-0.0.1.tar.gz \
curl --request POST \
--form 'content=@path/to/my.pypi.package-0.0.1.tar.gz' \
--form 'name=my.pypi.package'
--form 'version=1.3.7'
--user <username>:<personal_access_token> \
"https://gitlab.example.com/api/v4/projects/1/packages/pypi"
```

View File

@ -251,7 +251,14 @@ Based on user research, we may want to change certain features to work across or
- Specific features allow for cross-organization interactions, for example forking, search.
### Links
## Technical Proposals
The Pods architecture do have long lasting implications to data processing, location, scalability and the GitLab architecture.
This section links all different technical proposals that are being evaluated.
- [Stateless Router That Uses a Cache to Pick Pods and Is Redirected When Wrong Pod Is Reached](proposal-stateless-router.md)
## Links
- [Internal Pods presentation](https://docs.google.com/presentation/d/1x1uIiN8FR9fhL7pzFh9juHOVcSxEY7d2_q4uiKKGD44/edit#slide=id.ge7acbdc97a_0_155)
- [Pods Epic](https://gitlab.com/groups/gitlab-org/-/epics/7582)
@ -259,7 +266,7 @@ Based on user research, we may want to change certain features to work across or
- [Shopify Pods architecture](https://shopify.engineering/a-pods-architecture-to-allow-shopify-to-scale)
- [Opstrace architecture](https://gitlab.com/gitlab-org/opstrace/opstrace/-/blob/main/docs/architecture/overview.md)
### Who
## Who
| Role | Who
|------------------------------|-------------------------|

View File

@ -0,0 +1,642 @@
---
stage: enablement
group: pods
comments: false
description: 'Pods Stateless Router Proposal'
---
DISCLAIMER:
This page may contain information related to upcoming products, features and
functionality. It is important to note that the information presented is for
informational purposes only, so please do not rely on the information for
purchasing or planning purposes. Just like with all projects, the items
mentioned on the page are subject to change or delay, and the development,
release, and timing of any products, features, or functionality remain at the
sole discretion of GitLab Inc.
This document is a work-in-progress and represents a very early state of the
Pods design. Significant aspects are not documented, though we expect to add
them in the future. This is one possible architecture for Pods, and we intend to
contrast this with alternatives before deciding which approach to implement.
This documentation will be kept even if we decide not to implement this so that
we can document the reasons for not choosing this approach.
# Proposal: Stateless Router
We will decompose `gitlab_users`, `gitlab_routes` and `gitlab_admin` related
tables so that they can be shared between all pods and allow any pod to
authenticate a user and route requests to the correct pod. Pods may receive
requests for the resources they don't own, but they know how to redirect back
to the correct pod.
The router is stateless and does not read from the `routes` database which
means that all interactions with the database still happen from the Rails
monolith. This architecture also supports regions by allowing for low traffic
databases to be replicated across regions.
Users are not directly exposed to the concept of Pods but instead they see
different data dependent on their currently chosen "organization".
[Organizations](index.md#organizations) will be a new model introduced to enforce isolation in the
application and allow us to decide which request route to which pod, since an
organization can only be on a single pod.
## Summary in diagrams
This shows how a user request routes via DNS to the nearest router and the router chooses a pod to send the request to.
```mermaid
graph TD;
user((User));
dns[DNS];
router_us(Router);
router_eu(Router);
pod_us0{Pod US0};
pod_us1{Pod US1};
pod_eu0{Pod EU0};
pod_eu1{Pod EU1};
user-->dns;
dns-->router_us;
dns-->router_eu;
subgraph Europe
router_eu-->pod_eu0;
router_eu-->pod_eu1;
end
subgraph United States
router_us-->pod_us0;
router_us-->pod_us1;
end
```
<details><summary>More detail</summary>
This shows that the router can actually send requests to any pod. The user will
get the closest router to them geographically.
```mermaid
graph TD;
user((User));
dns[DNS];
router_us(Router);
router_eu(Router);
pod_us0{Pod US0};
pod_us1{Pod US1};
pod_eu0{Pod EU0};
pod_eu1{Pod EU1};
user-->dns;
dns-->router_us;
dns-->router_eu;
subgraph Europe
router_eu-->pod_eu0;
router_eu-->pod_eu1;
end
subgraph United States
router_us-->pod_us0;
router_us-->pod_us1;
end
router_eu-.->pod_us0;
router_eu-.->pod_us1;
router_us-.->pod_eu0;
router_us-.->pod_eu1;
```
</details>
<details><summary>Even more detail</summary>
This shows the databases. `gitlab_users` and `gitlab_routes` exist only in the
US region but are replicated to other regions. Replication does not have an
arrow because it's too hard to read the diagram.
```mermaid
graph TD;
user((User));
dns[DNS];
router_us(Router);
router_eu(Router);
pod_us0{Pod US0};
pod_us1{Pod US1};
pod_eu0{Pod EU0};
pod_eu1{Pod EU1};
db_gitlab_users[(gitlab_users Primary)];
db_gitlab_routes[(gitlab_routes Primary)];
db_gitlab_users_replica[(gitlab_users Replica)];
db_gitlab_routes_replica[(gitlab_routes Replica)];
db_pod_us0[(gitlab_main/gitlab_ci Pod US0)];
db_pod_us1[(gitlab_main/gitlab_ci Pod US1)];
db_pod_eu0[(gitlab_main/gitlab_ci Pod EU0)];
db_pod_eu1[(gitlab_main/gitlab_ci Pod EU1)];
user-->dns;
dns-->router_us;
dns-->router_eu;
subgraph Europe
router_eu-->pod_eu0;
router_eu-->pod_eu1;
pod_eu0-->db_pod_eu0;
pod_eu0-->db_gitlab_users_replica;
pod_eu0-->db_gitlab_routes_replica;
pod_eu1-->db_gitlab_users_replica;
pod_eu1-->db_gitlab_routes_replica;
pod_eu1-->db_pod_eu1;
end
subgraph United States
router_us-->pod_us0;
router_us-->pod_us1;
pod_us0-->db_pod_us0;
pod_us0-->db_gitlab_users;
pod_us0-->db_gitlab_routes;
pod_us1-->db_gitlab_users;
pod_us1-->db_gitlab_routes;
pod_us1-->db_pod_us1;
end
router_eu-.->pod_us0;
router_eu-.->pod_us1;
router_us-.->pod_eu0;
router_us-.->pod_eu1;
```
</details>
## Summary of changes
1. Tables related to User data (including profile settings, authentication credentials, personal access tokens) are decomposed into a `gitlab_users` schema
1. The `routes` table is decomposed into `gitlab_routes` schema
1. The `application_settings` (and probably a few other instance level tables) are decomposed into `gitlab_admin` schema
1. A new column `routes.pod_id` is added to `routes` table
1. A new Router service exists to choose which pod to route a request to.
1. A new concept will be introduced in GitLab called an organization and a user can select a "default organization" and this will be a user level setting. The default organization is used to redirect users away from ambiguous routes like `/dashboard` to organization scoped routes like `/organizations/my-organization/-/dashboard`. Legacy users will have a special default organization that allows them to keep using global resources on `Pod US0`. All existing namespaces will initially move to this public organization.
1. If a pod receives a request for a `routes.pod_id` that it does not own it returns a `302` with `X-Gitlab-Pod-Redirect` header so that the router can send the request to the correct pod. The correct pod can also set a header `X-Gitlab-Pod-Cache` which contains information about how this request should be cached to remember the pod. For example if the request was `/gitlab-org/gitlab` then the header would encode `/gitlab-org/* => Pod US0` (ie. any requests starting with `/gitlab-org/` can always be routed to `Pod US0`
1. When the pod does not know (from the cache) which pod to send a request to it just picks a random pod within it's region
1. Writes to `gitlab_users` and `gitlab_routes` are sent to a primary PostgreSQL server in our `US` region but reads can come from replicas in the same region. This will add latency for these writes but we expect they are infrequent relative to the rest of GitLab.
## Detailed explanation of default organization in the first iteration
All users will get a new column `users.default_organization` which they can
control in user settings. We will introduce a concept of the
`GitLab.com Public` organization. This will be set as the default organization for all existing
users. This organization will allow the user to see data from all namespaces in
`Pod US0` (ie. our original GitLab.com instance). This behavior can be invisible to
existing users such that they don't even get told when they are viewing a
global page like `/dashboard` that it's even scoped to an organization.
Any new users with a default organization other than `GitLab.com Public` will have
a distinct user experience and will be fully aware that every page they load is
only ever scoped to a single organization. These users can never
load any global pages like `/dashboard` and will end up being redirected to
`/organizations/<DEFAULT_ORGANIZATION>/-/dashboard`. This may also be the case
for legacy APIs and such users may only ever be able to use APIs scoped to a
organization.
## Detailed explanation of admin settings
We believe that maintaining and synchronizing admin settings will be
frustrating and painful so to avoid this we will decompose and share all admin
settings in the `gitlab_admin` schema. This should be safe (similar to other
shared schemas) because these receive very little write traffic.
In cases where different pods need different settings (eg. the
Elasticsearch URL), we will either decide to use a templated
format in the relevant `application_settings` row which allows it to be dynamic
per pod. Alternatively if that proves difficult we'll introduce a new table
called `per_pod_application_settings` and this will have 1 row per pod to allow
setting different settings per pod. It will still be part of the `gitlab_admin`
schema and shared which will allow us to centrally manage it and simplify
keeping settings in sync for all pods.
## Pros
1. Router is stateless and can live in many regions. We use Anycast DNS to resolve to nearest region for the user.
1. Pods can receive requests for namespaces in the wrong pod and the user
still gets the right response as well as caching at the router that
ensures the next request is sent to the correct pod so the next request
will go to the correct pod
1. The majority of the code still lives in `gitlab` rails codebase. The Router doesn't actually need to understand how GitLab URLs are composed.
1. Since the responsibility to read and write `gitlab_users`,
`gitlab_routes` and `gitlab_admin` still lives in Rails it means minimal
changes will be needed to the Rails application compared to extracting
services that need to isolate the domain models and build new interfaces.
1. Compared to a separate routing service this allows the Rails application
to encode more complex rules around how to map URLs to the correct pod
and may work for some existing API endpoints.
1. All the new infrastructure (just a router) is optional and a single-pod
self-managed installation does not even need to run the Router and there are
no other new services.
## Cons
1. `gitlab_users`, `gitlab_routes` and `gitlab_admin` databases may need to be
replicated across regions and writes need to go across regions. We need to
do an analysis on write TPS for the relevant tables to determine if this is
feasible.
1. Sharing access to the database from many different Pods means that they are
all coupled at the Postgres schema level and this means changes to the
database schema need to be done carefully in sync with the deployment of all
Pods. This limits us to ensure that Pods are kept in closely similar
versions compared to an architecture with shared services that have an API
we control.
1. Although most data is stored in the right region there can be requests
proxied from another region which may be an issue for certain types
of compliance.
1. Data in `gitlab_users` and `gitlab_routes` databases must be replicated in
all regions which may be an issue for certain types of compliance.
1. The router cache may need to be very large if we get a wide variety of URLs
(ie. long tail). In such a case we may need to implement a 2nd level of
caching in user cookies so their frequently accessed pages always go to the
right pod the first time.
1. Having shared database access for `gitlab_users` and `gitlab_routes`
from multiple pods is an unusual architecture decision compared to
extracting services that are called from multiple pods.
1. It is very likely we won't be able to find cacheable elements of a
GraphQL URL and often existing GraphQL endpoints are heavily dependent on
ids that won't be in the `routes` table so pods won't necessarily know
what pod has the data. As such we'll probably have to update our GraphQL
calls to include an organization context in the path like
`/api/organizations/<organization>/graphql`.
1. This architecture implies that implemented endpoints can only access data
that are readily accessible on a given Pod, but are unlikely
to aggregate information from many Pods.
## Example database configuration
Handling shared `gitlab_users`, `gitlab_routes` and `gitlab_admin` databases, while having dedicated `gitlab_main` and `gitlab_ci` databases should already be handled by the way we use `config/database.yml`. We should also, already be able to handle the dedicated EU replicas while having a single US primary for `gitlab_users` and `gitlab_routes`. Below is a snippet of part of the database configuration for the Pod architecture described above.
<details><summary>Pod US0</summary>
```yaml
# config/database.yml
production:
main:
host: postgres-main.pod-us0.primary.consul
load_balancing:
discovery: postgres-main.pod-us0.replicas.consul
ci:
host: postgres-ci.pod-us0.primary.consul
load_balancing:
discovery: postgres-ci.pod-us0.replicas.consul
users:
host: postgres-users-primary.consul
load_balancing:
discovery: postgres-users-replicas.us.consul
routes:
host: postgres-routes-primary.consul
load_balancing:
discovery: postgres-routes-replicas.us.consul
admin:
host: postgres-admin-primary.consul
load_balancing:
discovery: postgres-admin-replicas.us.consul
```
</details>
<details><summary>Pod EU0</summary>
```yaml
# config/database.yml
production:
main:
host: postgres-main.pod-eu0.primary.consul
load_balancing:
discovery: postgres-main.pod-eu0.replicas.consul
ci:
host: postgres-ci.pod-eu0.primary.consul
load_balancing:
discovery: postgres-ci.pod-eu0.replicas.consul
users:
host: postgres-users-primary.consul
load_balancing:
discovery: postgres-users-replicas.eu.consul
routes:
host: postgres-routes-primary.consul
load_balancing:
discovery: postgres-routes-replicas.eu.consul
admin:
host: postgres-admin-primary.consul
load_balancing:
discovery: postgres-admin-replicas.eu.consul
```
</details>
## Request flows
1. `gitlab-org` is a top level namespace and lives in `Pod US0` in the `GitLab.com Public` organization
1. `my-company` is a top level namespace and lives in `Pod EU0` in the `my-organization` organization
### Experience for paying user that is part of `my-organization`
Such a user will have a default organization set to `/my-organization` and will be
unable to load any global routes outside of this organization. They may load other
projects/namespaces but their MR/Todo/Issue counts at the top of the page will
not be correctly populated in the first iteration. The user will be aware of
this limitation.
#### Navigates to `/my-company/my-project` while logged in
1. User is in Europe so DNS resolves to the router in Europe
1. They request `/my-company/my-project` without the router cache, so the router chooses randomly `Pod EU1`
1. `Pod EU1` does not have `/my-company`, but it knows that it lives in `Pod EU0` so it redirects the router to `Pod EU0`
1. `Pod EU0` returns the correct response as well as setting the cache headers for the router `/my-company/* => Pod EU0`
1. The router now caches and remembers any request paths matching `/my-company/*` should go to `Pod EU0`
```mermaid
sequenceDiagram
participant user as User
participant router_eu as Router EU
participant pod_eu0 as Pod EU0
participant pod_eu1 as Pod EU1
user->>router_eu: GET /my-company/my-project
router_eu->>pod_eu1: GET /my-company/my-project
pod_eu1->>router_eu: 302 /my-company/my-project X-Gitlab-Pod-Redirect={pod:Pod EU0}
router_eu->>pod_eu0: GET /my-company/my-project
pod_eu0->>user: <h1>My Project... X-Gitlab-Pod-Cache={path_prefix:/my-company/}
```
#### Navigates to `/my-company/my-project` while not logged in
1. User is in Europe so DNS resolves to the router in Europe
1. The router does not have `/my-company/*` cached yet so it chooses randomly `Pod EU1`
1. `Pod EU1` redirects them through a login flow
1. Stil they request `/my-company/my-project` without the router cache, so the router chooses a random pod `Pod EU1`
1. `Pod EU1` does not have `/my-company`, but it knows that it lives in `Pod EU0` so it redirects the router to `Pod EU0`
1. `Pod EU0` returns the correct response as well as setting the cache headers for the router `/my-company/* => Pod EU0`
1. The router now caches and remembers any request paths matching `/my-company/*` should go to `Pod EU0`
```mermaid
sequenceDiagram
participant user as User
participant router_eu as Router EU
participant pod_eu0 as Pod EU0
participant pod_eu1 as Pod EU1
user->>router_eu: GET /my-company/my-project
router_eu->>pod_eu1: GET /my-company/my-project
pod_eu1->>user: 302 /users/sign_in?redirect=/my-company/my-project
user->>router_eu: GET /users/sign_in?redirect=/my-company/my-project
router_eu->>pod_eu1: GET /users/sign_in?redirect=/my-company/my-project
pod_eu1->>user: <h1>Sign in...
user->>router_eu: POST /users/sign_in?redirect=/my-company/my-project
router_eu->>pod_eu1: POST /users/sign_in?redirect=/my-company/my-project
pod_eu1->>user: 302 /my-company/my-project
user->>router_eu: GET /my-company/my-project
router_eu->>pod_eu1: GET /my-company/my-project
pod_eu1->>router_eu: 302 /my-company/my-project X-Gitlab-Pod-Redirect={pod:Pod EU0}
router_eu->>pod_eu0: GET /my-company/my-project
pod_eu0->>user: <h1>My Project... X-Gitlab-Pod-Cache={path_prefix:/my-company/}
```
#### Navigates to `/my-company/my-other-project` after last step
1. User is in Europe so DNS resolves to the router in Europe
1. The router cache now has `/my-company/* => Pod EU0`, so the router chooses `Pod EU0`
1. `Pod EU0` returns the correct response as well as the cache header again
```mermaid
sequenceDiagram
participant user as User
participant router_eu as Router EU
participant pod_eu0 as Pod EU0
participant pod_eu1 as Pod EU1
user->>router_eu: GET /my-company/my-project
router_eu->>pod_eu0: GET /my-company/my-project
pod_eu0->>user: <h1>My Project... X-Gitlab-Pod-Cache={path_prefix:/my-company/}
```
#### Navigates to `/gitlab-org/gitlab` after last step
1. User is in Europe so DNS resolves to the router in Europe
1. The router has no cached value for this URL so randomly chooses `Pod EU0`
1. `Pod EU0` redirects the router to `Pod US0`
1. `Pod US0` returns the correct response as well as the cache header again
```mermaid
sequenceDiagram
participant user as User
participant router_eu as Router EU
participant pod_eu0 as Pod EU0
participant pod_us0 as Pod US0
user->>router_eu: GET /gitlab-org/gitlab
router_eu->>pod_eu0: GET /gitlab-org/gitlab
pod_eu0->>router_eu: 302 /gitlab-org/gitlab X-Gitlab-Pod-Redirect={pod:Pod US0}
router_eu->>pod_us0: GET /gitlab-org/gitlab
pod_us0->>user: <h1>GitLab.org... X-Gitlab-Pod-Cache={path_prefix:/gitlab-org/}
```
In this case the user is not on their "default organization" so their TODO
counter will not include their normal todos. We may choose to highlight this in
the UI somewhere. A future iteration may be able to fetch that for them from
their default organization.
#### Navigates to `/`
1. User is in Europe so DNS resolves to the router in Europe
1. Router does not have a cache for `/` route (specifically rails never tells it to cache this route)
1. The Router choose `Pod EU0` randomly
1. The Rails application knows the users default organization is `/my-organization`, so
it redirects the user to `/organizations/my-organization/-/dashboard`
1. The Router has a cached value for `/organizations/my-organization/*` so it then sends the
request to `POD EU0`
1. `Pod EU0` serves up a new page `/organizations/my-organization/-/dashboard` which is the same
dashboard view we have today but scoped to an organization clearly in the UI
1. The user is (optionally) presented with a message saying that data on this page is only
from their default organization and that they can change their default
organization if it's not right.
```mermaid
sequenceDiagram
participant user as User
participant router_eu as Router EU
participant pod_eu0 as Pod EU0
user->>router_eu: GET /
router_eu->>pod_eu0: GET /
pod_eu0->>user: 302 /organizations/my-organization/-/dashboard
user->>router: GET /organizations/my-organization/-/dashboard
router->>pod_eu0: GET /organizations/my-organization/-/dashboard
pod_eu0->>user: <h1>My Company Dashboard... X-Gitlab-Pod-Cache={path_prefix:/organizations/my-organization/}
```
#### Navigates to `/dashboard`
As above, they will end up on `/organizations/my-organization/-/dashboard` as
the rails application will already redirect `/` to the dashboard page.
### Navigates to `/not-my-company/not-my-project` while logged in (but they don't have access since this project/group is private)
1. User is in Europe so DNS resolves to the router in Europe
1. The router knows that `/not-my-company` lives in `Pod US1` so sends the request to this
1. The user does not have access so `Pod US1` returns 404
```mermaid
sequenceDiagram
participant user as User
participant router_eu as Router EU
participant pod_us1 as Pod US1
user->>router_eu: GET /not-my-company/not-my-project
router_eu->>pod_us1: GET /not-my-company/not-my-project
pod_us1->>user: 404
```
#### Creates a new top level namespace
The user will be asked which organization they want the namespace to belong to.
If they select `my-organization` then it will end up on the same pod as all
other namespaces in `my-organization`. If they select nothing we default to
`GitLab.com Public` and it is clear to the user that this is isolated from
their existing organization such that they won't be able to see data from both
on a single page.
### Experience for GitLab team member that is part of `/gitlab-org`
Such a user is considered a legacy user and has their default organization set to
`GitLab.com Public`. This is a "meta" organization that does not really exist but
the Rails application knows to interpret this organization to mean that they are
allowed to use legacy global functionality like `/dashboard` to see data across
namespaces located on `Pod US0`. The rails backend also knows that the default pod to render any ambiguous
routes like `/dashboard` is `Pod US0`. Lastly the user will be allowed to
navigate to organizations on another pod like `/my-organization` but when they do the
user will see a message indicating that some data may be missing (eg. the
MRs/Issues/Todos) counts.
#### Navigates to `/gitlab-org/gitlab` while not logged in
1. User is in the US so DNS resolves to the US router
1. The router knows that `/gitlab-org` lives in `Pod US0` so sends the request
to this pod
1. `Pod US0` serves up the response
```mermaid
sequenceDiagram
participant user as User
participant router_us as Router US
participant pod_us0 as Pod US0
user->>router_us: GET /gitlab-org/gitlab
router_us->>pod_us0: GET /gitlab-org/gitlab
pod_us0->>user: <h1>GitLab.org... X-Gitlab-Pod-Cache={path_prefix:/gitlab-org/}
```
#### Navigates to `/`
1. User is in US so DNS resolves to the router in US
1. Router does not have a cache for `/` route (specifically rails never tells it to cache this route)
1. The Router chooses `Pod US1` randomly
1. The Rails application knows the users default organization is `GitLab.com Public`, so
it redirects the user to `/dashboards` (only legacy users can see
`/dashboard` global view)
1. Router does not have a cache for `/dashboard` route (specifically rails never tells it to cache this route)
1. The Router chooses `Pod US1` randomly
1. The Rails application knows the users default organization is `GitLab.com Public`, so
it allows the user to load `/dashboards` (only legacy users can see
`/dashboard` global view) and redirects to router the legacy pod which is `Pod US0`
1. `Pod US0` serves up the global view dashboard page `/dashboard` which is the same
dashboard view we have today
```mermaid
sequenceDiagram
participant user as User
participant router_us as Router US
participant pod_us0 as Pod US0
participant pod_us1 as Pod US1
user->>router_us: GET /
router_us->>pod_us1: GET /
pod_us1->>user: 302 /dashboard
user->>router_us: GET /dashboard
router_us->>pod_us1: GET /dashboard
pod_us1->>router_us: 302 /dashboard X-Gitlab-Pod-Redirect={pod:Pod US0}
router_us->>pod_us0: GET /dashboard
pod_us0->>user: <h1>Dashboard...
```
#### Navigates to `/my-company/my-other-project` while logged in (but they don't have access since this project is private)
They get a 404.
### Experience for non-logged in users
Flow is similar to logged in users except global routes like `/dashboard` will
redirect to the login page as there is no default organization to choose from.
### A new customers signs up
They will be asked if they are already part of an organization or if they'd
like to create one. If they choose neither they end up no the default
`GitLab.com Public` organization.
### An organization is moved from 1 pod to another
TODO
### GraphQL/API requests which don't include the namespace in the URL
TODO
### The autocomplete suggestion functionality in the search bar which remembers recent issues/MRs
TODO
### Global search
TODO
## Administrator
### Loads `/admin` page
1. Router picks a random pod `Pod US0`
1. Pod US0 redirects user to `/admin/pods/podus0`
1. Pod US0 renders an admin page and also returns a cache header to cache `/admin/podss/podus0/* => Pod US0`. The admin page contains a dropdown showing other pods they could select and it changes the query parameter.
Admin settings are in Postgres are all shared across all pods to avoid
divergence but we still make it clear in the URL and UI which pod is serving
the admin page as there is dynamic data being generated from these pages and
the operator may want to view a specific pod.
## More Technical Problems To Solve
### Replicating User Sessions Between All Pods
Today user sessions live in Redis but each pod will have their own Redis instance. We already use a dedicated Redis instance for sessions so we could consider sharing this with all pods like we do with `gitlab_users` PostgreSQL database. But an important consideration will be latency as we would still want to mostly fetch sessions from the same region.
An alternative might be that user sessions get moved to a JWT payload that encodes all the session data but this has downsides. For example, it is difficult to expire a user session, when their password changes or for other reasons, if the session lives in a JWT controlled by the user.
### How do we migrate between Pods
Migrating data between pods will need to factor all data stores:
1. PostgreSQL
1. Redis Shared State
1. Gitaly
1. Elasticsearch
### Is it still possible to leak the existence of private groups via a timing attack?
If you have router in EU, and you know that EU router by default redirects
to EU located Pods, you know their latency (lets assume 10ms). Now, if your
request is bounced back and redirected to US which has different latency
(lets assume that roundtrip will be around 60ms) you can deduce that 404 was
returned by US Pod and know that your 404 is in fact 403.
We may defer this until we actually implement a pod in a different region. Such timing attacks are already theoretically possible with the way we do permission checks today but the timing difference is probably too small to be able to detect.
One technique to mitigate this risk might be to have the router add a random
delay to any request that returns 404 from a pod.
## Should runners be shared across all pods?
We have 2 options and we should decide which is easier:
1. Decompose runner registration and queuing tables and share them across all
pods. This may have implications for scalability, and we'd need to consider
if this would include group/project runners as this may have scalability
concerns as these are high traffic tables that would need to be shared.
1. Runners are registered per-pod and, we probably have a separate fleet of
runners for every pod or just register the same runners to many pods which
may have implications for queueing
## How do we guarantee unique ids across all pods for things that cannot conflict?
This project assumes at least namespaces and projects have unique ids across
all pods as many requests need to be routed based on their ID. Since those
tables are across different databases then guaranteeing a unique ID will
require a new solution. There are likely other tables where unique IDs are
necessary and depending on how we resolve routing for GraphQL and other APIs
and other design goals it may be determined that we want the primary key to be
unique for all tables.

View File

@ -470,3 +470,14 @@ class ConvertTableToListPartitioning < Gitlab::Database::Migration[2.0]
end
end
```
NOTE:
Do not forget to set the sequence name explicitly in your model because it will
be owned by the routing table and `ActiveRecord` can't determine it. This can
be cleaned up after the `table_name` is changed to the routing table.
```ruby
class Model < ApplicationRecord
self.sequence_name = 'model_id_seq'
end
```

View File

@ -351,6 +351,12 @@ RSpec.describe MergeRequests::UpdateHeadPipelineWorker do
let(:event) { pipeline_created_event }
end
# This shared example ensures that an published event is ignored. This might be useful for
# conditional dispatch testing.
it_behaves_like 'ignores the published event' do
let(:event) { pipeline_created_event }
end
it 'does something' do
# This helper directly executes `perform` ensuring that `handle_event` is called correctly.
consume_event(subscriber: described_class, event: pipeline_created_event)

View File

@ -1,6 +1,6 @@
---
stage: none
group: unassigned
group: Tutorials
info: For assistance with this tutorials page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments-to-other-projects-and-subjects.
---

View File

@ -1,6 +1,6 @@
---
stage: none
group: unassigned
group: Tutorials
info: For assistance with this tutorial, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments-to-other-projects-and-subjects.
---

View File

@ -1,6 +1,6 @@
---
stage: none
group: unassigned
group: Tutorials
info: For assistance with this tutorial, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments-to-other-projects-and-subjects.
---

View File

@ -342,7 +342,7 @@ To move an issue:
#### From the issues list
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/15991) in GitLab 15.5.
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/15991) in GitLab 15.6.
You can move multiple issues at the same time when youre in a project.
You can't move tasks or test cases.
@ -712,8 +712,11 @@ Up to five similar issues, sorted by most recently updated, are displayed below
> - [Feature flag removed](https://gitlab.com/gitlab-org/gitlab/-/issues/213567) in GitLab 13.7.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/218618) in GitLab 15.4: health status is visible on issue cards in issue boards.
To help you track issue statuses, you can assign a status to each issue.
This status marks issues as progressing as planned or needing attention to keep on schedule.
To better track the risk in meeting your plans, you can assign a health status to each issue.
You can use health status to signal to others in your organization whether issues are progressing
as planned or need attention to stay on schedule.
Incorporate a review of issue health status into your daily stand-up, project status reports, or weekly meetings to address risks to timely delivery of your planned work.
Prerequisites:

View File

@ -23,8 +23,20 @@ module API
content_type :json, DOCKER_DISTRIBUTION_EVENTS_V1_JSON
format :json
desc 'Receives notifications from the container registry when an operation occurs' do
detail 'This feature was introduced in GitLab 12.10'
consumes [:json, DOCKER_DISTRIBUTION_EVENTS_V1_JSON]
end
params do
requires :events, type: Array
requires :events, type: Array, desc: 'Event notifications' do
requires :action, type: String, desc: 'The action to perform, `push`, `delete`',
values: %w[push delete].freeze
optional :target, type: Hash, desc: 'The target of the action' do
optional :tag, type: String, desc: 'The target tag'
optional :repository, type: String, desc: 'The target repository'
optional :digest, type: String, desc: 'Unique identifier for target image manifest'
end
end
end
# This endpoint is used by Docker Registry to push a set of event

View File

@ -673,7 +673,6 @@ module API
finder_params[:with_issues_enabled] = true if params[:with_issues_enabled].present?
finder_params[:with_merge_requests_enabled] = true if params[:with_merge_requests_enabled].present?
finder_params[:without_deleted] = true
finder_params[:search_namespaces] = true if params[:search_namespaces].present?
finder_params[:user] = params.delete(:user) if params[:user]
finder_params[:id_after] = sanitize_id_param(params[:id_after]) if params[:id_after]

View File

@ -104,7 +104,7 @@ module API
end
params do
requires :access_level, type: Integer, desc: 'A valid access level (defaults: `30`, developer access level)'
requires :user_id, types: [Integer, String], desc: 'The user ID of the new member or multiple IDs separated by commas.'
requires :user_id, types: Array[Integer], coerce_with: Validations::Types::CommaSeparatedToIntegerArray.coerce, desc: 'The user ID of the new member or multiple IDs separated by commas.'
optional :expires_at, type: DateTime, desc: 'Date string in the format YEAR-MONTH-DAY'
optional :invite_source, type: String, desc: 'Source that triggered the member creation process', default: 'members-api'
optional :tasks_to_be_done, type: Array[String], coerce_with: Validations::Types::CommaSeparatedToArray.coerce, desc: 'Tasks the inviter wants the member to do'

View File

@ -173,7 +173,7 @@ module API
params do
requires :from, type: String, desc: 'The commit, branch name, or tag name to start comparison'
requires :to, type: String, desc: 'The commit, branch name, or tag name to stop comparison'
optional :from_project_id, type: String, desc: 'The project to compare from'
optional :from_project_id, type: Integer, desc: 'The project to compare from'
optional :straight, type: Boolean, desc: 'Comparison method, `true` for direct comparison between `from` and `to` (`from`..`to`), `false` to compare using merge base (`from`...`to`)', default: false
end
get ':id/repository/compare', urgency: :low do
@ -215,7 +215,7 @@ module API
success Entities::Commit
end
params do
requires :refs, type: Array[String], coerce_with: ::API::Validations::Types::CommaSeparatedToArray.coerce
requires :refs, type: Array[String], coerce_with: ::API::Validations::Types::CommaSeparatedToArray.coerce, desc: 'The refs to find the common ancestor of, multiple refs can be passed'
end
get ':id/repository/merge_base' do
refs = params[:refs]

View File

@ -6,6 +6,7 @@ module Gitlab
include Migrations::ReestablishedConnectionStack
include Migrations::BackgroundMigrationHelpers
include Migrations::BatchedBackgroundMigrationHelpers
include Migrations::LockRetriesHelpers
include DynamicModelHelpers
include RenameTableHelpers
include AsyncIndexes::MigrationHelpers
@ -405,52 +406,6 @@ module Gitlab
end
end
# Executes the block with a retry mechanism that alters the +lock_timeout+ and +sleep_time+ between attempts.
# The timings can be controlled via the +timing_configuration+ parameter.
# If the lock was not acquired within the retry period, a last attempt is made without using +lock_timeout+.
#
# Note this helper uses subtransactions when run inside an already open transaction.
#
# ==== Examples
# # Invoking without parameters
# with_lock_retries do
# drop_table :my_table
# end
#
# # Invoking with custom +timing_configuration+
# t = [
# [1.second, 1.second],
# [2.seconds, 2.seconds]
# ]
#
# with_lock_retries(timing_configuration: t) do
# drop_table :my_table # this will be retried twice
# end
#
# # Disabling the retries using an environment variable
# > export DISABLE_LOCK_RETRIES=true
#
# with_lock_retries do
# drop_table :my_table # one invocation, it will not retry at all
# end
#
# ==== Parameters
# * +timing_configuration+ - [[ActiveSupport::Duration, ActiveSupport::Duration], ...] lock timeout for the block, sleep time before the next iteration, defaults to `Gitlab::Database::WithLockRetries::DEFAULT_TIMING_CONFIGURATION`
# * +logger+ - [Gitlab::JsonLogger]
# * +env+ - [Hash] custom environment hash, see the example with `DISABLE_LOCK_RETRIES`
def with_lock_retries(*args, **kwargs, &block)
raise_on_exhaustion = !!kwargs.delete(:raise_on_exhaustion)
merged_args = {
connection: connection,
klass: self.class,
logger: Gitlab::BackgroundMigration::Logger,
allow_savepoints: true
}.merge(kwargs)
Gitlab::Database::WithLockRetries.new(**merged_args)
.run(raise_on_exhaustion: raise_on_exhaustion, &block)
end
def true_value
Database.true_value
end

View File

@ -0,0 +1,57 @@
# frozen_string_literal: true
module Gitlab
module Database
module Migrations
module LockRetriesHelpers
# Executes the block with a retry mechanism that alters the +lock_timeout+ and +sleep_time+ between attempts.
# The timings can be controlled via the +timing_configuration+ parameter.
# If the lock was not acquired within the retry period, a last attempt is made without using +lock_timeout+.
#
# Note this helper uses subtransactions when run inside an already open transaction.
#
# ==== Examples
# # Invoking without parameters
# with_lock_retries do
# drop_table :my_table
# end
#
# # Invoking with custom +timing_configuration+
# t = [
# [1.second, 1.second],
# [2.seconds, 2.seconds]
# ]
#
# with_lock_retries(timing_configuration: t) do
# drop_table :my_table # this will be retried twice
# end
#
# # Disabling the retries using an environment variable
# > export DISABLE_LOCK_RETRIES=true
#
# with_lock_retries do
# drop_table :my_table # one invocation, it will not retry at all
# end
#
# ==== Parameters
# * +timing_configuration+ - [[ActiveSupport::Duration, ActiveSupport::Duration], ...] lock timeout for the
# block, sleep time before the next iteration, defaults to
# `Gitlab::Database::WithLockRetries::DEFAULT_TIMING_CONFIGURATION`
# * +logger+ - [Gitlab::JsonLogger]
# * +env+ - [Hash] custom environment hash, see the example with `DISABLE_LOCK_RETRIES`
def with_lock_retries(*args, **kwargs, &block)
raise_on_exhaustion = !!kwargs.delete(:raise_on_exhaustion)
merged_args = {
connection: connection,
klass: self.class,
logger: Gitlab::BackgroundMigration::Logger,
allow_savepoints: true
}.merge(kwargs)
Gitlab::Database::WithLockRetries.new(**merged_args)
.run(raise_on_exhaustion: raise_on_exhaustion, &block)
end
end
end
end
end

View File

@ -87,7 +87,6 @@ module Gitlab
def validate_archive_path
Gitlab::Utils.check_path_traversal!(@archive_path)
raise(ServiceError, 'Archive path is not a string') unless @archive_path.is_a?(String)
raise(ServiceError, 'Archive path is a symlink') if File.lstat(@archive_path).symlink?
raise(ServiceError, 'Archive path is not a file') unless File.file?(@archive_path)
end

View File

@ -14,7 +14,10 @@ module Gitlab
# Also see https://gitlab.com/gitlab-org/gitlab/-/merge_requests/24223#note_284122580
# It also checks for ALT_SEPARATOR aka '\' (forward slash)
def check_path_traversal!(path)
return unless path.is_a?(String)
return unless path
path = path.to_s if path.is_a?(Gitlab::HashedPath)
raise PathTraversalAttackError, 'Invalid path' unless path.is_a?(String)
path = decode_path(path)
path_regex = %r{(\A(\.{1,2})\z|\A\.\.[/\\]|[/\\]\.\.\z|[/\\]\.\.[/\\]|\n)}

View File

@ -74,6 +74,7 @@ namespace :tw do
CodeOwnerRule.new('Style Guide', '@sselhorn'),
CodeOwnerRule.new('Testing', '@eread'),
CodeOwnerRule.new('Threat Insights', '@claytoncornell'),
CodeOwnerRule.new('Tutorials', '@kpaizee'),
CodeOwnerRule.new('Utilization', '@fneill'),
CodeOwnerRule.new('Vulnerability Research', '@claytoncornell'),
CodeOwnerRule.new('Workspace', '@lciutacu')

View File

@ -5114,9 +5114,6 @@ msgstr ""
msgid "Are you sure you want to delete %{name}?"
msgstr ""
msgid "Are you sure you want to delete these artifacts?"
msgstr ""
msgid "Are you sure you want to delete this %{commentType}?"
msgstr ""
@ -5269,6 +5266,21 @@ msgstr ""
msgid "Artifacts"
msgstr ""
msgid "Artifacts|An error occurred while deleting the artifact"
msgstr ""
msgid "Artifacts|An error occurred while retrieving job artifacts"
msgstr ""
msgid "Artifacts|Artifacts"
msgstr ""
msgid "Artifacts|Browse"
msgstr ""
msgid "Artifacts|Total artifacts size"
msgstr ""
msgid "As we continue to build more features for SAST, we'd love your feedback on the SAST configuration feature in %{linkStart}this issue%{linkEnd}."
msgstr ""
@ -7092,9 +7104,6 @@ msgstr ""
msgid "Browse Files"
msgstr ""
msgid "Browse artifacts"
msgstr ""
msgid "Browse files"
msgstr ""
@ -9700,12 +9709,18 @@ msgstr ""
msgid "Comment/Reply (quoting selected text)"
msgstr ""
msgid "Commenting on files that are only moved or renamed is currently not supported"
msgstr ""
msgid "Commenting on files that replace or are replaced by symbolic links is currently not supported."
msgstr ""
msgid "Commenting on symbolic links that replace or are replaced by files is currently not supported."
msgstr ""
msgid "Commenting on this line is currently not supported"
msgstr ""
msgid "Comments"
msgstr ""
@ -11508,9 +11523,6 @@ msgstr ""
msgid "Creating graphs uses the data from the Prometheus server. If this takes a long time, ensure that data is available."
msgstr ""
msgid "Creation date"
msgstr ""
msgid "Creator"
msgstr ""
@ -12757,9 +12769,6 @@ msgstr ""
msgid "Delete account"
msgstr ""
msgid "Delete artifacts"
msgstr ""
msgid "Delete asset"
msgstr ""
@ -27060,9 +27069,6 @@ msgstr ""
msgid "No job log"
msgstr ""
msgid "No jobs to show"
msgstr ""
msgid "No label"
msgstr ""
@ -42543,9 +42549,6 @@ msgstr ""
msgid "Total Score"
msgstr ""
msgid "Total artifacts size: %{total_size}"
msgstr ""
msgid "Total cores (CPUs)"
msgstr ""
@ -47341,6 +47344,12 @@ msgstr ""
msgid "cannot be added since you've reached your %{free_limit} member limit for %{namespace_name}"
msgstr ""
msgid "cannot be associated with a subgroup"
msgstr ""
msgid "cannot be associated with both a Group and a Project"
msgstr ""
msgid "cannot be changed"
msgstr ""
@ -48617,6 +48626,9 @@ msgstr ""
msgid "must be an email you have verified"
msgstr ""
msgid "must be associated with a Group or a Project"
msgstr ""
msgid "must be greater than start date"
msgstr ""

View File

@ -1,5 +1,7 @@
# frozen_string_literal: true
require_relative '../../../lib/gitlab/json'
module RuboCop
module Cop
module Gitlab

View File

@ -8,10 +8,6 @@ RSpec.describe GraphqlController do
# two days is enough to make timezones irrelevant
let_it_be(:last_activity_on) { 2.days.ago.to_date }
before do
stub_feature_flags(graphql: true)
end
describe 'rescue_from' do
let_it_be(:message) { 'green ideas sleep furiously' }

View File

@ -13,7 +13,8 @@ FactoryBot.define do
end
after(:create) do |protected_branch, evaluator|
break unless protected_branch.project&.persisted?
# Do not use `break` because it will cause `LocalJumpError`
next unless protected_branch.project&.persisted?
ProtectedBranches::CacheService.new(protected_branch.project).refresh
end

View File

@ -350,43 +350,6 @@ RSpec.describe ProjectsFinder do
end
end
describe 'filter by without_deleted' do
let_it_be(:pending_delete_project) { create(:project, :public, pending_delete: true) }
let(:params) { { without_deleted: without_deleted } }
shared_examples 'returns all projects' do
it { expect(subject).to include(public_project, internal_project, pending_delete_project) }
end
context 'when without_deleted is true' do
let(:without_deleted) { true }
it 'returns projects that are not pending_delete' do
expect(subject).not_to include(pending_delete_project)
expect(subject).to include(public_project, internal_project)
end
end
context 'when without_deleted is false' do
let(:without_deleted) { false }
it_behaves_like 'returns all projects'
end
context 'when without_deleted is nil' do
let(:without_deleted) { nil }
it_behaves_like 'returns all projects'
end
context 'when without_deleted is not present' do
let(:params) { {} }
it_behaves_like 'returns all projects'
end
end
describe 'filter by last_activity_after' do
let(:params) { { last_activity_after: 60.minutes.ago } }
@ -398,6 +361,15 @@ RSpec.describe ProjectsFinder do
it { is_expected.to match_array([internal_project]) }
end
describe 'always filters by without_deleted' do
let_it_be(:pending_delete_project) { create(:project, :public, pending_delete: true) }
it 'returns projects that are not pending_delete' do
expect(subject).not_to include(pending_delete_project)
expect(subject).to include(public_project, internal_project)
end
end
describe 'filter by last_activity_before' do
let(:params) { { last_activity_before: 60.minutes.ago } }

View File

@ -0,0 +1,67 @@
import { GlBadge, GlButton } from '@gitlab/ui';
import mockGetJobArtifactsResponse from 'test_fixtures/graphql/artifacts/graphql/queries/get_job_artifacts.query.graphql.json';
import { numberToHumanSize } from '~/lib/utils/number_utils';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import ArtifactRow from '~/artifacts/components/artifact_row.vue';
describe('ArtifactRow component', () => {
let wrapper;
const artifact = mockGetJobArtifactsResponse.data.project.jobs.nodes[0].artifacts.nodes[0];
const findName = () => wrapper.findByTestId('job-artifact-row-name');
const findBadge = () => wrapper.findComponent(GlBadge);
const findSize = () => wrapper.findByTestId('job-artifact-row-size');
const findDownloadButton = () => wrapper.findByTestId('job-artifact-row-download-button');
const findDeleteButton = () => wrapper.findByTestId('job-artifact-row-delete-button');
const createComponent = (mountFn = shallowMountExtended) => {
wrapper = mountFn(ArtifactRow, {
propsData: {
artifact,
isLoading: false,
isLastRow: false,
},
stubs: { GlBadge, GlButton },
});
};
afterEach(() => {
wrapper.destroy();
});
describe('artifact details', () => {
beforeEach(async () => {
createComponent();
await waitForPromises();
});
it('displays the artifact name and type', () => {
expect(findName().text()).toContain(artifact.name);
expect(findBadge().text()).toBe(artifact.fileType.toLowerCase());
});
it('displays the artifact size', () => {
expect(findSize().text()).toBe(numberToHumanSize(artifact.size));
});
it('displays the download button as a link to the download path', () => {
expect(findDownloadButton().attributes('href')).toBe(artifact.downloadPath);
});
it('displays the delete button', () => {
expect(findDeleteButton().exists()).toBe(true);
});
it('emits the delete event when the delete button is clicked', async () => {
expect(wrapper.emitted('delete')).toBeUndefined();
findDeleteButton().trigger('click');
await waitForPromises();
expect(wrapper.emitted('delete')).toBeDefined();
});
});
});

View File

@ -0,0 +1,107 @@
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import getJobArtifactsResponse from 'test_fixtures/graphql/artifacts/graphql/queries/get_job_artifacts.query.graphql.json';
import waitForPromises from 'helpers/wait_for_promises';
import ArtifactsTableRowDetails from '~/artifacts/components/artifacts_table_row_details.vue';
import ArtifactRow from '~/artifacts/components/artifact_row.vue';
import createMockApollo from 'helpers/mock_apollo_helper';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import destroyArtifactMutation from '~/artifacts/graphql/mutations/destroy_artifact.mutation.graphql';
import { I18N_DESTROY_ERROR } from '~/artifacts/constants';
import { createAlert } from '~/flash';
jest.mock('~/flash');
const { artifacts } = getJobArtifactsResponse.data.project.jobs.nodes[0];
const refetchArtifacts = jest.fn();
Vue.use(VueApollo);
describe('ArtifactsTableRowDetails component', () => {
let wrapper;
let requestHandlers;
const createComponent = (
handlers = {
destroyArtifactMutation: jest.fn(),
},
) => {
requestHandlers = handlers;
wrapper = mountExtended(ArtifactsTableRowDetails, {
apolloProvider: createMockApollo([
[destroyArtifactMutation, requestHandlers.destroyArtifactMutation],
]),
propsData: {
artifacts,
refetchArtifacts,
queryVariables: {},
},
data() {
return { deletingArtifactId: null };
},
});
};
afterEach(() => {
wrapper.destroy();
});
describe('passes correct props', () => {
beforeEach(() => {
createComponent();
});
it('to the artifact rows', () => {
[0, 1, 2].forEach((index) => {
expect(wrapper.findAllComponents(ArtifactRow).at(index).props()).toMatchObject({
artifact: artifacts.nodes[index],
isLoading: false,
});
});
});
});
describe('when an artifact row emits the delete event', () => {
it('sets isLoading to true for that row', async () => {
createComponent();
await waitForPromises();
wrapper.findComponent(ArtifactRow).vm.$emit('delete');
await nextTick();
[
{ index: 0, expectedLoading: true },
{ index: 1, expectedLoading: false },
].forEach(({ index, expectedLoading }) => {
expect(wrapper.findAllComponents(ArtifactRow).at(index).props('isLoading')).toBe(
expectedLoading,
);
});
});
it('triggers the destroyArtifact GraphQL mutation', async () => {
createComponent();
await waitForPromises();
wrapper.findComponent(ArtifactRow).vm.$emit('delete');
expect(requestHandlers.destroyArtifactMutation).toHaveBeenCalled();
});
it('displays a flash message and refetches artifacts when the mutation fails', async () => {
createComponent({
destroyArtifactMutation: jest.fn().mockRejectedValue(new Error('Error!')),
});
await waitForPromises();
expect(wrapper.emitted('refetch')).toBeUndefined();
wrapper.findComponent(ArtifactRow).vm.$emit('delete');
await waitForPromises();
expect(createAlert).toHaveBeenCalledWith({ message: I18N_DESTROY_ERROR });
expect(wrapper.emitted('refetch')).toBeDefined();
});
});
});

View File

@ -0,0 +1,222 @@
import { GlLoadingIcon, GlTable, GlLink, GlBadge, GlPagination } from '@gitlab/ui';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import getJobArtifactsResponse from 'test_fixtures/graphql/artifacts/graphql/queries/get_job_artifacts.query.graphql.json';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
import waitForPromises from 'helpers/wait_for_promises';
import JobArtifactsTable from '~/artifacts/components/job_artifacts_table.vue';
import createMockApollo from 'helpers/mock_apollo_helper';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import getJobArtifactsQuery from '~/artifacts/graphql/queries/get_job_artifacts.query.graphql';
import destroyArtifactMutation from '~/artifacts/graphql/mutations/destroy_artifact.mutation.graphql';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { ARCHIVE_FILE_TYPE, JOBS_PER_PAGE, I18N_FETCH_ERROR } from '~/artifacts/constants';
import { totalArtifactsSizeForJob } from '~/artifacts/utils';
import { createAlert } from '~/flash';
jest.mock('~/flash');
Vue.use(VueApollo);
describe('JobArtifactsTable component', () => {
let wrapper;
let requestHandlers;
const findLoadingState = () => wrapper.findComponent(GlLoadingIcon);
const findTable = () => wrapper.findComponent(GlTable);
const findCount = () => wrapper.findByTestId('job-artifacts-count');
const findStatuses = () => wrapper.findAllByTestId('job-artifacts-job-status');
const findSuccessfulJobStatus = () => findStatuses().at(0);
const findFailedJobStatus = () => findStatuses().at(1);
const findLinks = () => wrapper.findAllComponents(GlLink);
const findJobLink = () => findLinks().at(0);
const findPipelineLink = () => findLinks().at(1);
const findRefLink = () => findLinks().at(2);
const findCommitLink = () => findLinks().at(3);
const findSize = () => wrapper.findByTestId('job-artifacts-size');
const findCreated = () => wrapper.findByTestId('job-artifacts-created');
const findDownloadButton = () => wrapper.findByTestId('job-artifacts-download-button');
const findBrowseButton = () => wrapper.findByTestId('job-artifacts-browse-button');
const findDeleteButton = () => wrapper.findByTestId('job-artifacts-delete-button');
const findPagination = () => wrapper.findComponent(GlPagination);
const setPage = async (page) => {
findPagination().vm.$emit('input', page);
await waitForPromises();
};
let enoughJobsToPaginate = [...getJobArtifactsResponse.data.project.jobs.nodes];
while (enoughJobsToPaginate.length <= JOBS_PER_PAGE) {
enoughJobsToPaginate = [
...enoughJobsToPaginate,
...getJobArtifactsResponse.data.project.jobs.nodes,
];
}
const getJobArtifactsResponseThatPaginates = {
data: { project: { jobs: { nodes: enoughJobsToPaginate } } },
};
const createComponent = (
handlers = {
getJobArtifactsQuery: jest.fn().mockResolvedValue(getJobArtifactsResponse),
destroyArtifactMutation: jest.fn(),
},
data = {},
) => {
requestHandlers = handlers;
wrapper = mountExtended(JobArtifactsTable, {
apolloProvider: createMockApollo([
[getJobArtifactsQuery, requestHandlers.getJobArtifactsQuery],
[destroyArtifactMutation, requestHandlers.destroyArtifactMutation],
]),
provide: { projectPath: 'project/path' },
data() {
return data;
},
});
};
afterEach(() => {
wrapper.destroy();
});
it('when loading, shows a loading state', () => {
createComponent();
expect(findLoadingState().exists()).toBe(true);
});
it('on error, shows an alert', async () => {
createComponent({
getJobArtifactsQuery: jest.fn().mockRejectedValue(new Error('Error!')),
});
await waitForPromises();
expect(createAlert).toHaveBeenCalledWith({ message: I18N_FETCH_ERROR });
});
it('with data, renders the table', async () => {
createComponent();
await waitForPromises();
expect(findTable().exists()).toBe(true);
});
describe('job details', () => {
const job = getJobArtifactsResponse.data.project.jobs.nodes[0];
const archiveArtifact = job.artifacts.nodes.find(
(artifact) => artifact.fileType === ARCHIVE_FILE_TYPE,
);
beforeEach(async () => {
createComponent();
await waitForPromises();
});
it('shows the artifact count', () => {
expect(findCount().text()).toBe(`${job.artifacts.nodes.length} files`);
});
it('expands to show the list of artifacts', async () => {
jest.spyOn(wrapper.vm, 'handleRowToggle');
findCount().trigger('click');
expect(wrapper.vm.handleRowToggle).toHaveBeenCalled();
});
it('shows the job status as an icon for a successful job', () => {
expect(findSuccessfulJobStatus().findComponent(CiIcon).exists()).toBe(true);
expect(findSuccessfulJobStatus().findComponent(GlBadge).exists()).toBe(false);
});
it('shows the job status as a badge for other job statuses', () => {
expect(findFailedJobStatus().findComponent(GlBadge).exists()).toBe(true);
expect(findFailedJobStatus().findComponent(CiIcon).exists()).toBe(false);
});
it('shows links to the job, pipeline, ref, and commit', () => {
expect(findJobLink().text()).toBe(job.name);
expect(findJobLink().attributes('href')).toBe(job.webPath);
expect(findPipelineLink().text()).toBe(`#${getIdFromGraphQLId(job.pipeline.id)}`);
expect(findPipelineLink().attributes('href')).toBe(job.pipeline.path);
expect(findRefLink().text()).toBe(job.refName);
expect(findRefLink().attributes('href')).toBe(job.refPath);
expect(findCommitLink().text()).toBe(job.shortSha);
expect(findCommitLink().attributes('href')).toBe(job.commitPath);
});
it('shows the total size of artifacts', () => {
expect(findSize().text()).toBe(totalArtifactsSizeForJob(job));
});
it('shows the created time', () => {
expect(findCreated().text()).toBe('5 years ago');
});
it('shows the download, browse, and delete buttons', () => {
expect(findDownloadButton().attributes('href')).toBe(archiveArtifact.downloadPath);
expect(findBrowseButton().attributes('disabled')).toBe('disabled');
expect(findDeleteButton().attributes('disabled')).toBe('disabled');
});
});
describe('pagination', () => {
const { pageInfo } = getJobArtifactsResponse.data.project.jobs;
beforeEach(async () => {
createComponent(
{
getJobArtifactsQuery: jest.fn().mockResolvedValue(getJobArtifactsResponseThatPaginates),
},
{
jobArtifacts: {
count: enoughJobsToPaginate.length,
pageInfo,
},
},
);
await waitForPromises();
});
it('renders pagination and passes page props', () => {
expect(findPagination().exists()).toBe(true);
expect(findPagination().props()).toMatchObject({
value: wrapper.vm.pagination.currentPage,
prevPage: wrapper.vm.prevPage,
nextPage: wrapper.vm.nextPage,
});
});
it('updates query variables when going to previous page', () => {
return setPage(1).then(() => {
expect(wrapper.vm.queryVariables).toMatchObject({
projectPath: 'project/path',
nextPageCursor: undefined,
prevPageCursor: pageInfo.startCursor,
});
});
});
it('updates query variables when going to next page', () => {
return setPage(2).then(() => {
expect(wrapper.vm.queryVariables).toMatchObject({
lastPageSize: null,
nextPageCursor: pageInfo.endCursor,
prevPageCursor: '',
});
});
});
});
});

View File

@ -0,0 +1,67 @@
import getJobArtifactsQuery from '~/artifacts/graphql/queries/get_job_artifacts.query.graphql';
import { removeArtifactFromStore } from '~/artifacts/graphql/cache_update';
describe('Artifact table cache updates', () => {
let store;
const cacheMock = {
project: {
jobs: {
nodes: [
{ artifacts: { nodes: [{ id: 'foo' }] } },
{ artifacts: { nodes: [{ id: 'bar' }] } },
],
},
},
};
const query = getJobArtifactsQuery;
const variables = { fullPath: 'path/to/project' };
beforeEach(() => {
store = {
readQuery: jest.fn().mockReturnValue(cacheMock),
writeQuery: jest.fn(),
};
});
describe('removeArtifactFromStore', () => {
it('calls readQuery', () => {
removeArtifactFromStore(store, 'foo', query, variables);
expect(store.readQuery).toHaveBeenCalledWith({ query, variables });
});
it('writes the correct result in the cache', () => {
removeArtifactFromStore(store, 'foo', query, variables);
expect(store.writeQuery).toHaveBeenCalledWith({
query,
variables,
data: {
project: {
jobs: {
nodes: [{ artifacts: { nodes: [] } }, { artifacts: { nodes: [{ id: 'bar' }] } }],
},
},
},
});
});
it('does not remove an unknown artifact', () => {
removeArtifactFromStore(store, 'baz', query, variables);
expect(store.writeQuery).toHaveBeenCalledWith({
query,
variables,
data: {
project: {
jobs: {
nodes: [
{ artifacts: { nodes: [{ id: 'foo' }] } },
{ artifacts: { nodes: [{ id: 'bar' }] } },
],
},
},
},
});
});
});
});

View File

@ -9,6 +9,18 @@ import {
const LINE_CODE = 'abc123';
function problemsClone({
brokenSymlink = false,
brokenLineCode = false,
fileOnlyMoved = false,
} = {}) {
return {
brokenSymlink,
brokenLineCode,
fileOnlyMoved,
};
}
describe('isHighlighted', () => {
it('should return true if line is highlighted', () => {
const line = { line_code: LINE_CODE };
@ -140,6 +152,9 @@ describe('addCommentTooltip', () => {
'Commenting on symbolic links that replace or are replaced by files is currently not supported.';
const brokenRealTooltip =
'Commenting on files that replace or are replaced by symbolic links is currently not supported.';
const lineMovedOrRenamedFileTooltip =
'Commenting on files that are only moved or renamed is currently not supported';
const lineWithNoLineCodeTooltip = 'Commenting on this line is currently not supported';
const dragTooltip = 'Add a comment to this line or drag for multiple lines';
it('should return default tooltip', () => {
@ -147,24 +162,38 @@ describe('addCommentTooltip', () => {
});
it('should return drag comment tooltip when dragging is enabled', () => {
expect(utils.addCommentTooltip({})).toEqual(dragTooltip);
expect(utils.addCommentTooltip({ problems: problemsClone() })).toEqual(dragTooltip);
});
it('should return broken symlink tooltip', () => {
expect(utils.addCommentTooltip({ commentsDisabled: { wasSymbolic: true } })).toEqual(
brokenSymLinkTooltip,
);
expect(utils.addCommentTooltip({ commentsDisabled: { isSymbolic: true } })).toEqual(
brokenSymLinkTooltip,
);
expect(
utils.addCommentTooltip({
problems: problemsClone({ brokenSymlink: { wasSymbolic: true } }),
}),
).toEqual(brokenSymLinkTooltip);
expect(
utils.addCommentTooltip({ problems: problemsClone({ brokenSymlink: { isSymbolic: true } }) }),
).toEqual(brokenSymLinkTooltip);
});
it('should return broken real tooltip', () => {
expect(utils.addCommentTooltip({ commentsDisabled: { wasReal: true } })).toEqual(
brokenRealTooltip,
expect(
utils.addCommentTooltip({ problems: problemsClone({ brokenSymlink: { wasReal: true } }) }),
).toEqual(brokenRealTooltip);
expect(
utils.addCommentTooltip({ problems: problemsClone({ brokenSymlink: { isReal: true } }) }),
).toEqual(brokenRealTooltip);
});
it('reports a tooltip when the line is in a file that has only been moved or renamed', () => {
expect(utils.addCommentTooltip({ problems: problemsClone({ fileOnlyMoved: true }) })).toEqual(
lineMovedOrRenamedFileTooltip,
);
expect(utils.addCommentTooltip({ commentsDisabled: { isReal: true } })).toEqual(
brokenRealTooltip,
});
it("reports a tooltip when the line doesn't have a line code to leave a comment on", () => {
expect(utils.addCommentTooltip({ problems: problemsClone({ brokenLineCode: true }) })).toEqual(
lineWithNoLineCodeTooltip,
);
});
});
@ -211,6 +240,7 @@ describe('mapParallel', () => {
discussions: [{}],
discussionsExpanded: true,
hasForm: true,
problems: problemsClone(),
};
const content = {
diffFile: {},

View File

@ -1,3 +1,11 @@
function problemsClone() {
return {
brokenSymlink: false,
brokenLineCode: false,
fileOnlyMoved: false,
};
}
export const getDiffFileMock = () => ({
submodule: false,
submodule_link: null,
@ -61,6 +69,7 @@ export const getDiffFileMock = () => ({
text: '<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
rich_text: '<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
meta_data: null,
problems: problemsClone(),
},
{
line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_2',
@ -71,6 +80,7 @@ export const getDiffFileMock = () => ({
text: '<span id="LC2" class="line" lang="plaintext"></span>\n',
rich_text: '<span id="LC2" class="line" lang="plaintext"></span>\n',
meta_data: null,
problems: problemsClone(),
},
{
line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_3',
@ -81,6 +91,7 @@ export const getDiffFileMock = () => ({
text: '<span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n',
rich_text: '<span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n',
meta_data: null,
problems: problemsClone(),
},
{
line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_2_4',
@ -91,6 +102,7 @@ export const getDiffFileMock = () => ({
text: '<span id="LC4" class="line" lang="plaintext"></span>\n',
rich_text: '<span id="LC4" class="line" lang="plaintext"></span>\n',
meta_data: null,
problems: problemsClone(),
},
{
line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_3_5',
@ -101,6 +113,7 @@ export const getDiffFileMock = () => ({
text: '<span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n',
rich_text: '<span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n',
meta_data: null,
problems: problemsClone(),
},
{
line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_6',
@ -111,6 +124,7 @@ export const getDiffFileMock = () => ({
text: '<span id="LC6" class="line" lang="plaintext"></span>\n',
rich_text: '<span id="LC6" class="line" lang="plaintext"></span>\n',
meta_data: null,
problems: problemsClone(),
},
{
line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_7',
@ -121,6 +135,7 @@ export const getDiffFileMock = () => ({
text: '<span id="LC7" class="line" lang="plaintext"></span>\n',
rich_text: '<span id="LC7" class="line" lang="plaintext"></span>\n',
meta_data: null,
problems: problemsClone(),
},
{
line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_9',
@ -131,6 +146,7 @@ export const getDiffFileMock = () => ({
text: '<span id="LC7" class="line" lang="plaintext"></span>\n',
rich_text: '<span id="LC7" class="line" lang="plaintext"></span>\n',
meta_data: null,
problems: problemsClone(),
},
{
line_code: null,
@ -144,6 +160,7 @@ export const getDiffFileMock = () => ({
old_pos: 3,
new_pos: 5,
},
problems: problemsClone(),
},
],
parallel_diff_lines: [
@ -158,6 +175,7 @@ export const getDiffFileMock = () => ({
text: '<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
rich_text: '<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
meta_data: null,
problems: problemsClone(),
},
},
{
@ -171,6 +189,7 @@ export const getDiffFileMock = () => ({
text: '<span id="LC2" class="line" lang="plaintext"></span>\n',
rich_text: '<span id="LC2" class="line" lang="plaintext"></span>\n',
meta_data: null,
problems: problemsClone(),
},
},
{
@ -183,6 +202,7 @@ export const getDiffFileMock = () => ({
text: '<span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n',
rich_text: '<span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n',
meta_data: null,
problems: problemsClone(),
},
right: {
line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_3',
@ -193,6 +213,7 @@ export const getDiffFileMock = () => ({
text: '<span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n',
rich_text: '<span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n',
meta_data: null,
problems: problemsClone(),
},
},
{
@ -205,6 +226,7 @@ export const getDiffFileMock = () => ({
text: '<span id="LC4" class="line" lang="plaintext"></span>\n',
rich_text: '<span id="LC4" class="line" lang="plaintext"></span>\n',
meta_data: null,
problems: problemsClone(),
},
right: {
line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_2_4',
@ -215,6 +237,7 @@ export const getDiffFileMock = () => ({
text: '<span id="LC4" class="line" lang="plaintext"></span>\n',
rich_text: '<span id="LC4" class="line" lang="plaintext"></span>\n',
meta_data: null,
problems: problemsClone(),
},
},
{
@ -227,6 +250,7 @@ export const getDiffFileMock = () => ({
text: '<span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n',
rich_text: '<span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n',
meta_data: null,
problems: problemsClone(),
},
right: {
line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_3_5',
@ -237,6 +261,7 @@ export const getDiffFileMock = () => ({
text: '<span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n',
rich_text: '<span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n',
meta_data: null,
problems: problemsClone(),
},
},
{
@ -249,6 +274,7 @@ export const getDiffFileMock = () => ({
text: '<span id="LC6" class="line" lang="plaintext"></span>\n',
rich_text: '<span id="LC6" class="line" lang="plaintext"></span>\n',
meta_data: null,
problems: problemsClone(),
},
right: {
line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_7',
@ -259,6 +285,7 @@ export const getDiffFileMock = () => ({
text: '<span id="LC7" class="line" lang="plaintext"></span>\n',
rich_text: '<span id="LC7" class="line" lang="plaintext"></span>\n',
meta_data: null,
problems: problemsClone(),
},
},
{
@ -272,6 +299,7 @@ export const getDiffFileMock = () => ({
text: '<span id="LC7" class="line" lang="plaintext"></span>\n',
rich_text: '<span id="LC7" class="line" lang="plaintext"></span>\n',
meta_data: null,
problems: problemsClone(),
},
},
{
@ -287,6 +315,7 @@ export const getDiffFileMock = () => ({
old_pos: 3,
new_pos: 5,
},
problems: problemsClone(),
},
right: {
line_code: null,
@ -300,6 +329,7 @@ export const getDiffFileMock = () => ({
old_pos: 3,
new_pos: 5,
},
problems: problemsClone(),
},
},
],

View File

@ -311,9 +311,14 @@ describe('DiffsStoreUtils', () => {
describe('prepareLineForRenamedFile', () => {
const diffFile = {
file_hash: 'file-hash',
brokenSymlink: false,
renamed_file: false,
added_lines: 1,
removed_lines: 1,
};
const lineIndex = 4;
const sourceLine = {
line_code: 'abc',
foo: 'test',
rich_text: ' <p>rich</p>', // Note the leading space
};
@ -328,6 +333,12 @@ describe('DiffsStoreUtils', () => {
hasForm: false,
text: undefined,
alreadyPrepared: true,
commentsDisabled: false,
problems: {
brokenLineCode: false,
brokenSymlink: false,
fileOnlyMoved: false,
},
};
let preppedLine;
@ -360,24 +371,35 @@ describe('DiffsStoreUtils', () => {
});
it.each`
brokenSymlink
${false}
${{}}
${'anything except `false`'}
brokenSymlink | renamed | added | removed | lineCode | commentsDisabled
${false} | ${false} | ${0} | ${0} | ${'a'} | ${false}
${{}} | ${false} | ${1} | ${1} | ${'a'} | ${true}
${'truthy'} | ${false} | ${1} | ${1} | ${'a'} | ${true}
${false} | ${true} | ${1} | ${1} | ${'a'} | ${false}
${false} | ${true} | ${1} | ${0} | ${'a'} | ${false}
${false} | ${true} | ${0} | ${1} | ${'a'} | ${false}
${false} | ${true} | ${0} | ${0} | ${'a'} | ${true}
`(
"properly assigns each line's `commentsDisabled` as the same value as the parent file's `brokenSymlink` value (`$brokenSymlink`)",
({ brokenSymlink }) => {
preppedLine = utils.prepareLineForRenamedFile({
diffViewType: INLINE_DIFF_VIEW_TYPE,
line: sourceLine,
index: lineIndex,
diffFile: {
"properly sets a line's `commentsDisabled` to '$commentsDisabled' for file and line settings { brokenSymlink: $brokenSymlink, renamed: $renamed, added: $added, removed: $removed, line_code: $lineCode }",
({ brokenSymlink, renamed, added, removed, lineCode, commentsDisabled }) => {
const line = {
...sourceLine,
line_code: lineCode,
};
const file = {
...diffFile,
brokenSymlink,
},
renamed_file: renamed,
added_lines: added,
removed_lines: removed,
};
const preparedLine = utils.prepareLineForRenamedFile({
index: lineIndex,
diffFile: file,
line,
});
expect(preppedLine.commentsDisabled).toStrictEqual(brokenSymlink);
expect(preparedLine.commentsDisabled).toBe(commentsDisabled);
},
);
});
@ -477,7 +499,7 @@ describe('DiffsStoreUtils', () => {
it('adds the `.brokenSymlink` property to each diff file', () => {
preparedDiff.diff_files.forEach((file) => {
expect(file).toEqual(expect.objectContaining({ brokenSymlink: false }));
expect(file).toHaveProperty('brokenSymlink', false);
});
});
@ -490,7 +512,7 @@ describe('DiffsStoreUtils', () => {
].flatMap((file) => [...file[INLINE_DIFF_LINES_KEY]]);
lines.forEach((line) => {
expect(line.commentsDisabled).toBe(false);
expect(line.problems.brokenSymlink).toBe(false);
});
});
});

View File

@ -0,0 +1,28 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Job Artifacts (GraphQL fixtures)' do
describe GraphQL::Query, type: :request do
include ApiHelpers
include GraphqlHelpers
include JavaScriptFixturesHelpers
let_it_be(:project) { create(:project, :repository, :public) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
let_it_be(:user) { create(:user) }
job_artifacts_query_path = 'artifacts/graphql/queries/get_job_artifacts.query.graphql'
it "graphql/#{job_artifacts_query_path}.json" do
create(:ci_build, :failed, :artifacts, :trace_artifact, pipeline: pipeline)
create(:ci_build, :success, :artifacts, :trace_artifact, pipeline: pipeline)
query = get_graphql_query_as_string(job_artifacts_query_path)
post_graphql(query, current_user: user, variables: { projectPath: project.full_path })
expect_graphql_errors_to_be_empty
end
end
end

View File

@ -0,0 +1,36 @@
import { shallowMount } from '@vue/test-utils';
import SidebarReviewersInputs from '~/sidebar/components/reviewers/sidebar_reviewers_inputs.vue';
import { state } from '~/sidebar/components/reviewers/sidebar_reviewers.vue';
let wrapper;
function factory() {
wrapper = shallowMount(SidebarReviewersInputs);
}
describe('Sidebar reviewers inputs component', () => {
it('renders hidden input', () => {
state.issuable.reviewers = {
nodes: [
{
id: 1,
avatarUrl: '',
name: 'root',
username: 'root',
mergeRequestInteraction: { canMerge: true },
},
{
id: 2,
avatarUrl: '',
name: 'root',
username: 'root',
mergeRequestInteraction: { canMerge: true },
},
],
};
factory();
expect(wrapper.findAll('input[type="hidden"]').length).toBe(2);
});
});

View File

@ -2526,48 +2526,6 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
describe '#with_lock_retries' do
let(:buffer) { StringIO.new }
let(:in_memory_logger) { Gitlab::JsonLogger.new(buffer) }
let(:env) { { 'DISABLE_LOCK_RETRIES' => 'true' } }
it 'sets the migration class name in the logs' do
model.with_lock_retries(env: env, logger: in_memory_logger) {}
buffer.rewind
expect(buffer.read).to include("\"class\":\"#{model.class}\"")
end
where(raise_on_exhaustion: [true, false])
with_them do
it 'sets raise_on_exhaustion as requested' do
with_lock_retries = double
expect(Gitlab::Database::WithLockRetries).to receive(:new).and_return(with_lock_retries)
expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: raise_on_exhaustion)
model.with_lock_retries(env: env, logger: in_memory_logger, raise_on_exhaustion: raise_on_exhaustion) {}
end
end
it 'does not raise on exhaustion by default' do
with_lock_retries = double
expect(Gitlab::Database::WithLockRetries).to receive(:new).and_return(with_lock_retries)
expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: false)
model.with_lock_retries(env: env, logger: in_memory_logger) {}
end
it 'defaults to allowing subtransactions' do
with_lock_retries = double
expect(Gitlab::Database::WithLockRetries).to receive(:new).with(hash_including(allow_savepoints: true)).and_return(with_lock_retries)
expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: false)
model.with_lock_retries(env: env, logger: in_memory_logger) {}
end
end
describe '#backfill_iids' do
include MigrationsHelpers

View File

@ -0,0 +1,52 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::LockRetriesHelpers do
let(:model) do
ActiveRecord::Migration.new.extend(described_class)
end
describe '#with_lock_retries' do
let(:buffer) { StringIO.new }
let(:in_memory_logger) { Gitlab::JsonLogger.new(buffer) }
let(:env) { { 'DISABLE_LOCK_RETRIES' => 'true' } }
it 'sets the migration class name in the logs' do
model.with_lock_retries(env: env, logger: in_memory_logger) {}
buffer.rewind
expect(buffer.read).to include("\"class\":\"#{model.class}\"")
end
where(raise_on_exhaustion: [true, false])
with_them do
it 'sets raise_on_exhaustion as requested' do
with_lock_retries = double
expect(Gitlab::Database::WithLockRetries).to receive(:new).and_return(with_lock_retries)
expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: raise_on_exhaustion)
model.with_lock_retries(env: env, logger: in_memory_logger, raise_on_exhaustion: raise_on_exhaustion) {}
end
end
it 'does not raise on exhaustion by default' do
with_lock_retries = double
expect(Gitlab::Database::WithLockRetries).to receive(:new).and_return(with_lock_retries)
expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: false)
model.with_lock_retries(env: env, logger: in_memory_logger) {}
end
it 'defaults to allowing subtransactions' do
with_lock_retries = double
expect(Gitlab::Database::WithLockRetries)
.to receive(:new).with(hash_including(allow_savepoints: true)).and_return(with_lock_retries)
expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: false)
model.with_lock_retries(env: env, logger: in_memory_logger) {}
end
end
end

View File

@ -361,6 +361,7 @@ hooks:
- web_hook_logs
protected_branches:
- project
- group
- merge_access_levels
- push_access_levels
- unprotect_access_levels

View File

@ -112,7 +112,7 @@ RSpec.describe Gitlab::ImportExport::DecompressedArchiveSizeValidator do
context 'when archive path is not a string' do
let(:filepath) { 123 }
let(:error_message) { 'Archive path is not a string' }
let(:error_message) { 'Invalid path' }
it 'returns false' do
expect(subject.valid?).to eq(false)

View File

@ -63,9 +63,21 @@ RSpec.describe Gitlab::Utils do
expect(check_path_traversal!('dir/.foo.rb')).to eq('dir/.foo.rb')
end
it 'does nothing for a non-string' do
it 'does nothing for nil' do
expect(check_path_traversal!(nil)).to be_nil
end
it 'does nothing for safe HashedPath' do
expect(check_path_traversal!(Gitlab::HashedPath.new('tmp', root_hash: 1))).to eq '6b/86/6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b/tmp'
end
it 'raises for unsafe HashedPath' do
expect { check_path_traversal!(Gitlab::HashedPath.new('tmp', '..', 'etc', 'passwd', root_hash: 1)) }.to raise_error(/Invalid path/)
end
it 'raises for other non-strings' do
expect { check_path_traversal!(%w[/tmp /tmp/../etc/passwd]) }.to raise_error(/Invalid path/)
end
end
describe '.check_allowed_absolute_path_and_path_traversal!' do

View File

@ -20,14 +20,14 @@ RSpec.describe Ci::Sources::Pipeline do
context 'loose foreign key on ci_sources_pipelines.source_project_id' do
it_behaves_like 'cleanup by a loose foreign key' do
let!(:parent) { create(:project) }
let!(:parent) { create(:project, namespace: create(:group)) }
let!(:model) { create(:ci_sources_pipeline, source_project: parent) }
end
end
context 'loose foreign key on ci_sources_pipelines.project_id' do
it_behaves_like 'cleanup by a loose foreign key' do
let!(:parent) { create(:project) }
let!(:parent) { create(:project, namespace: create(:group)) }
let!(:model) { create(:ci_sources_pipeline, project: parent) }
end
end

View File

@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Ci::UnitTest do
it_behaves_like 'cleanup by a loose foreign key' do
let!(:parent) { create(:project) }
let!(:parent) { create(:project, namespace: create(:group)) }
let!(:model) { create(:ci_unit_test, project: parent) }
end

View File

@ -40,6 +40,7 @@ RSpec.describe Group do
it { is_expected.to have_many(:bulk_import_exports).class_name('BulkImports::Export') }
it { is_expected.to have_many(:contacts).class_name('CustomerRelations::Contact') }
it { is_expected.to have_many(:organizations).class_name('CustomerRelations::Organization') }
it { is_expected.to have_many(:protected_branches) }
it { is_expected.to have_one(:crm_settings) }
it { is_expected.to have_one(:group_feature) }
it { is_expected.to have_one(:harbor_integration) }

View File

@ -60,10 +60,10 @@ RSpec.describe Integration do
describe 'Scopes' do
describe '.third_party_wikis' do
let!(:integration1) { create(:jira_integration) }
let!(:integration2) { create(:redmine_integration) }
let!(:integration3) { create(:confluence_integration) }
let!(:integration4) { create(:shimo_integration) }
let!(:integration1) { create(:jira_integration, project: project) }
let!(:integration2) { create(:redmine_integration, project: project) }
let!(:integration3) { create(:confluence_integration, project: project) }
let!(:integration4) { create(:shimo_integration, project: project) }
it 'returns the right group integration' do
expect(described_class.third_party_wikis).to contain_exactly(integration3, integration4)
@ -89,7 +89,7 @@ RSpec.describe Integration do
end
describe '.by_type' do
let!(:integration1) { create(:jira_integration) }
let!(:integration1) { create(:jira_integration, project: project) }
let!(:integration2) { create(:jira_integration) }
let!(:integration3) { create(:redmine_integration) }
@ -110,7 +110,7 @@ RSpec.describe Integration do
describe '.for_group' do
let!(:integration1) { create(:jira_integration, project_id: nil, group_id: group.id) }
let!(:integration2) { create(:jira_integration) }
let!(:integration2) { create(:jira_integration, project: project) }
it 'returns the right group integration' do
expect(described_class.for_group(group)).to contain_exactly(integration1)
@ -219,7 +219,7 @@ RSpec.describe Integration do
describe '.find_or_initialize_non_project_specific_integration' do
let!(:integration_1) { create(:jira_integration, project_id: nil, group_id: group.id) }
let!(:integration_2) { create(:jira_integration) }
let!(:integration_2) { create(:jira_integration, project: project) }
it 'returns the right integration' do
expect(Integration.find_or_initialize_non_project_specific_integration('jira', group_id: group))
@ -374,7 +374,7 @@ RSpec.describe Integration do
context 'when data is stored in properties' do
let(:properties) { data_params }
let!(:integration) do
create(:jira_integration, :without_properties_callback, properties: properties.merge(additional: 'something'))
create(:jira_integration, :without_properties_callback, project: project, properties: properties.merge(additional: 'something'))
end
it_behaves_like 'integration creation from an integration'
@ -382,7 +382,7 @@ RSpec.describe Integration do
context 'when data are stored in separated fields' do
let(:integration) do
create(:jira_integration, data_params.merge(properties: {}))
create(:jira_integration, data_params.merge(properties: {}, project: project))
end
it_behaves_like 'integration creation from an integration'
@ -391,7 +391,7 @@ RSpec.describe Integration do
context 'when data are stored in both properties and separated fields' do
let(:properties) { data_params }
let(:integration) do
create(:jira_integration, :without_properties_callback, active: true, properties: properties).tap do |integration|
create(:jira_integration, :without_properties_callback, project: project, active: true, properties: properties).tap do |integration|
create(:jira_tracker_data, data_params.merge(integration: integration))
end
end
@ -1233,11 +1233,11 @@ RSpec.describe Integration do
describe '#attributes' do
it 'does not include properties' do
expect(create(:integration).attributes).not_to have_key('properties')
expect(build(:integration, project: project).attributes).not_to have_key('properties')
end
it 'can be used in assign_attributes without nullifying properties' do
record = create(:integration, :instance, properties: { url: generate(:url) })
record = build(:integration, :instance, properties: { url: generate(:url) })
attrs = record.attributes
@ -1246,7 +1246,7 @@ RSpec.describe Integration do
end
describe '#dup' do
let(:original) { create(:integration, properties: { one: 1, two: 2, three: 3 }) }
let(:original) { build(:integration, project: project, properties: { one: 1, two: 2, three: 3 }) }
it 'results in distinct ciphertexts, but identical properties' do
copy = original.dup
@ -1259,7 +1259,7 @@ RSpec.describe Integration do
end
context 'when the model supports data-fields' do
let(:original) { create(:jira_integration, username: generate(:username), url: generate(:url)) }
let(:original) { build(:jira_integration, project: project, username: generate(:username), url: generate(:url)) }
it 'creates distinct but identical data-fields' do
copy = original.dup

View File

@ -19,7 +19,7 @@ RSpec.describe Integrations::BaseChatNotification do
let_it_be(:project) { create(:project, :repository) }
let(:user) { create(:user) }
let(:user) { build_stubbed(:user) }
let(:webhook_url) { 'https://example.gitlab.com/' }
let(:data) { Gitlab::DataBuilder::Push.build_sample(subject.project, user) }
@ -44,7 +44,7 @@ RSpec.describe Integrations::BaseChatNotification do
context 'with an empty repository' do
it 'returns true' do
subject.project = create(:project, :empty_repo)
subject.project = build_stubbed(:project, :empty_repo)
expect(chat_integration).to receive(:notify).and_return(true)
expect(chat_integration.execute(data)).to be true
@ -61,9 +61,9 @@ RSpec.describe Integrations::BaseChatNotification do
end
context 'when the data object has a label' do
let_it_be(:label) { create(:label, name: 'Bug') }
let_it_be(:label_2) { create(:label, name: 'Community contribution') }
let_it_be(:label_3) { create(:label, name: 'Backend') }
let_it_be(:label) { create(:label, project: project, name: 'Bug') }
let_it_be(:label_2) { create(:label, project: project, name: 'Community contribution') }
let_it_be(:label_3) { create(:label, project: project, name: 'Backend') }
let_it_be(:issue) { create(:labeled_issue, project: project, labels: [label, label_2, label_3]) }
let_it_be(:note) { create(:note, noteable: issue, project: project) }
@ -93,7 +93,7 @@ RSpec.describe Integrations::BaseChatNotification do
it_behaves_like 'notifies the chat integration'
context 'MergeRequest events' do
let(:data) { create(:merge_request, labels: [label]).to_hook_data(user) }
let(:data) { build_stubbed(:merge_request, source_project: project, labels: [label]).to_hook_data(user) }
it_behaves_like 'notifies the chat integration'
end

View File

@ -6,7 +6,7 @@ RSpec.describe Integrations::Buildkite, :use_clean_rails_memory_store_caching do
include ReactiveCachingHelpers
include StubRequests
let(:project) { create(:project) }
let_it_be(:project) { create(:project) }
subject(:integration) do
described_class.create!(

View File

@ -34,8 +34,8 @@ RSpec.describe Integrations::Campfire do
end
describe "#execute" do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:user) { build_stubbed(:user) }
let(:project) { build_stubbed(:project, :repository) }
before do
@campfire_integration = described_class.new

View File

@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Integrations::Confluence do
let_it_be(:project) { create(:project) }
describe 'Validations' do
before do
subject.active = active
@ -40,7 +42,6 @@ RSpec.describe Integrations::Confluence do
describe '#help' do
it 'can correctly return a link to the project wiki when active' do
project = create(:project)
subject.project = project
subject.active = true
@ -62,8 +63,6 @@ RSpec.describe Integrations::Confluence do
end
describe 'Caching has_confluence on project_settings' do
let(:project) { create(:project) }
subject { project.project_setting.has_confluence? }
it 'sets the property to true when integration is active' do

View File

@ -25,7 +25,7 @@ RSpec.describe Integrations::Discord do
let_it_be(:project) { create(:project, :repository) }
let(:user) { create(:user) }
let(:user) { build_stubbed(:user) }
let(:webhook_url) { "https://example.gitlab.com/" }
let(:sample_data) do
Gitlab::DataBuilder::Push.build_sample(project, user)

View File

@ -7,6 +7,8 @@ RSpec.describe Integrations::DroneCi, :use_clean_rails_memory_store_caching do
subject(:integration) { described_class.new }
let_it_be(:project) { create(:project, :repository, name: 'project') }
it_behaves_like Integrations::ResetSecretFields do
let(:integration) { subject }
end
@ -43,7 +45,6 @@ RSpec.describe Integrations::DroneCi, :use_clean_rails_memory_store_caching do
)
end
let(:project) { create(:project, :repository, name: 'project') }
let(:path) { project.full_path }
let(:drone_url) { 'http://drone.example.com' }
let(:sha) { '2ab7834c' }
@ -192,7 +193,7 @@ RSpec.describe Integrations::DroneCi, :use_clean_rails_memory_store_caching do
describe "execute" do
include_context :drone_ci_integration
let(:user) { create(:user, username: 'username') }
let(:user) { build(:user, username: 'username') }
let(:push_sample_data) do
Gitlab::DataBuilder::Push.build_sample(project, user)
end

Some files were not shown because too many files have changed in this diff Show More