Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-08-11 15:11:19 +00:00
parent 30f9120ba6
commit c79523e395
93 changed files with 1168 additions and 338 deletions

View File

@ -0,0 +1,29 @@
---
Gitlab/DeprecateTrackRedisHLLEvent:
Exclude:
- 'app/controllers/admin/dev_ops_report_controller.rb'
- 'app/controllers/admin/usage_trends_controller.rb'
- 'app/controllers/concerns/snippets_actions.rb'
- 'app/controllers/concerns/wiki_actions.rb'
- 'app/controllers/projects/blob_controller.rb'
- 'app/controllers/projects/cycle_analytics_controller.rb'
- 'app/controllers/projects/graphs_controller.rb'
- 'app/controllers/projects/pipelines_controller.rb'
- 'ee/app/controllers/admin/audit_logs_controller.rb'
- 'ee/app/controllers/admin/credentials_controller.rb'
- 'ee/app/controllers/ee/admin/dev_ops_report_controller.rb'
- 'ee/app/controllers/groups/analytics/ci_cd_analytics_controller.rb'
- 'ee/app/controllers/groups/analytics/devops_adoption_controller.rb'
- 'ee/app/controllers/groups/analytics/productivity_analytics_controller.rb'
- 'ee/app/controllers/groups/audit_events_controller.rb'
- 'ee/app/controllers/groups/contribution_analytics_controller.rb'
- 'ee/app/controllers/groups/epic_boards_controller.rb'
- 'ee/app/controllers/groups/insights_controller.rb'
- 'ee/app/controllers/groups/issues_analytics_controller.rb'
- 'ee/app/controllers/groups/security/compliance_dashboards_controller.rb'
- 'ee/app/controllers/projects/analytics/code_reviews_controller.rb'
- 'ee/app/controllers/projects/analytics/issues_analytics_controller.rb'
- 'ee/app/controllers/projects/analytics/merge_request_analytics_controller.rb'
- 'ee/app/controllers/projects/insights_controller.rb'
- 'ee/app/controllers/projects/integrations/jira/issues_controller.rb'
- 'spec/controllers/concerns/redis_tracking_spec.rb'

View File

@ -1 +1 @@
f3a6e61b3ca421866ee029c3f29c23fcea08a783
f20b8259797f3a1f3b59142c9789af97fcb0f69f

View File

@ -208,7 +208,7 @@ export default {
<p v-if="hasError" class="gl-field-error">
{{ addRelatedErrorMessage }}
</p>
<div class="gl-mt-5 gl-clearfix">
<div class="gl-mt-5">
<gl-button
ref="addButton"
category="primary"
@ -216,12 +216,13 @@ export default {
:disabled="isSubmitButtonDisabled"
:loading="isSubmitting"
type="submit"
class="gl-float-left"
size="small"
class="gl-mr-2"
data-qa-selector="add_issue_button"
>
{{ __('Add') }}
</gl-button>
<gl-button class="gl-float-right" @click="onFormCancel">
<gl-button size="small" @click="onFormCancel">
{{ __('Cancel') }}
</gl-button>
</div>

View File

@ -152,6 +152,9 @@ export default {
isChecked,
});
},
onPaginationInput(value) {
this.search.pagination = value;
},
},
filteredSearchNamespace: ADMIN_FILTERED_SEARCH_NAMESPACE,
INSTANCE_TYPE,
@ -217,11 +220,13 @@ export default {
/>
</template>
</runner-list>
<runner-pagination
v-model="search.pagination"
class="gl-mt-3"
:page-info="runners.pageInfo"
/>
</template>
<runner-pagination
class="gl-mt-3"
:disabled="runnersLoading"
:page-info="runners.pageInfo"
@input="onPaginationInput"
/>
</div>
</template>

View File

@ -64,19 +64,19 @@ export default {
},
methods: {
onFilter(filters) {
// Apply new filters, from page 1
// Apply new filters, resetting pagination
this.$emit('input', {
...this.value,
filters,
pagination: { page: 1 },
pagination: {},
});
},
onSort(sort) {
// Apply new sort, from page 1
// Apply new sort, resetting pagination
this.$emit('input', {
...this.value,
sort,
pagination: { page: 1 },
pagination: {},
});
},
},

View File

@ -27,9 +27,7 @@ export default {
items: [],
pageInfo: {},
},
pagination: {
page: 1,
},
pagination: {},
};
},
apollo: {
@ -62,6 +60,11 @@ export default {
return this.$apollo.queries.jobs.loading;
},
},
methods: {
onPaginationInput(value) {
this.pagination = value;
},
},
I18N_NO_JOBS_FOUND,
};
</script>
@ -74,6 +77,6 @@ export default {
<runner-jobs-table v-else-if="jobs.items.length" :jobs="jobs.items" />
<p v-else>{{ $options.I18N_NO_JOBS_FOUND }}</p>
<runner-pagination v-model="pagination" :disabled="loading" :page-info="jobs.pageInfo" />
<runner-pagination :disabled="loading" :page-info="jobs.pageInfo" @input="onPaginationInput" />
</div>
</template>

View File

@ -1,18 +1,12 @@
<script>
import { GlPagination } from '@gitlab/ui';
import { GlKeysetPagination } from '@gitlab/ui';
export default {
components: {
GlPagination,
GlKeysetPagination,
},
inheritAttrs: false,
props: {
value: {
required: false,
type: Object,
default: () => ({
page: 1,
}),
},
pageInfo: {
required: false,
type: Object,
@ -20,46 +14,37 @@ export default {
},
},
computed: {
prevPage() {
return this.pageInfo?.hasPreviousPage ? this.value.page - 1 : null;
paginationProps() {
return { ...this.pageInfo, ...this.$attrs };
},
nextPage() {
return this.pageInfo?.hasNextPage ? this.value.page + 1 : null;
isShown() {
const { hasPreviousPage, hasNextPage } = this.pageInfo;
return hasPreviousPage || hasNextPage;
},
},
methods: {
handlePageChange(page) {
if (page === 1) {
// Small optimization for first page
// If we have loaded using "first",
// page is already cached.
this.$emit('input', {
page,
});
} else if (page > this.value.page) {
this.$emit('input', {
page,
after: this.pageInfo.endCursor,
});
} else {
this.$emit('input', {
page,
before: this.pageInfo.startCursor,
});
}
prevPage() {
this.$emit('input', {
before: this.pageInfo.startCursor,
});
},
nextPage() {
this.$emit('input', {
after: this.pageInfo.endCursor,
});
},
},
};
</script>
<template>
<gl-pagination
v-bind="$attrs"
:value="value.page"
:prev-page="prevPage"
:next-page="nextPage"
align="center"
class="gl-pagination"
@input="handlePageChange"
/>
<div v-if="isShown" class="gl-text-center">
<gl-keyset-pagination
v-bind="paginationProps"
:prev-text="s__('Pagination|Prev')"
:next-text="s__('Pagination|Next')"
@prev="prevPage"
@next="nextPage"
/>
</div>
</template>

View File

@ -35,9 +35,7 @@ export default {
pageInfo: {},
count: 0,
},
pagination: {
page: 1,
},
pagination: {},
};
},
apollo: {
@ -82,6 +80,9 @@ export default {
isOwner(projectId) {
return projectId === this.projects.ownerProjectId;
},
onPaginationInput(value) {
this.pagination = value;
},
},
I18N_NONE,
};
@ -111,6 +112,10 @@ export default {
</template>
<span v-else class="gl-text-gray-500">{{ $options.I18N_NONE }}</span>
<runner-pagination v-model="pagination" :disabled="loading" :page-info="projects.pageInfo" />
<runner-pagination
:disabled="loading"
:page-info="projects.pageInfo"
@input="onPaginationInput"
/>
</div>
</template>

View File

@ -102,7 +102,6 @@ export const PARAM_KEY_TAG = 'tag';
export const PARAM_KEY_SEARCH = 'search';
export const PARAM_KEY_SORT = 'sort';
export const PARAM_KEY_PAGE = 'page';
export const PARAM_KEY_AFTER = 'after';
export const PARAM_KEY_BEFORE = 'before';

View File

@ -11,7 +11,6 @@ fragment GroupRunnerConnection on CiRunnerConnection {
}
}
pageInfo {
__typename
...PageInfo
}
}

View File

@ -167,6 +167,9 @@ export default {
reportToSentry(error) {
captureException({ error, component: this.$options.name });
},
onPaginationInput(value) {
this.search.pagination = value;
},
},
TABS_RUNNER_TYPES: [GROUP_TYPE, PROJECT_TYPE],
GROUP_TYPE,
@ -226,11 +229,13 @@ export default {
/>
</template>
</runner-list>
<runner-pagination
v-model="search.pagination"
class="gl-mt-3"
:page-info="runners.pageInfo"
/>
</template>
<runner-pagination
class="gl-mt-3"
:disabled="runnersLoading"
:page-info="runners.pageInfo"
@input="onPaginationInput"
/>
</div>
</template>

View File

@ -1,3 +1,4 @@
import { isEmpty } from 'lodash';
import { queryToObject, setUrlParams } from '~/lib/utils/url_utility';
import {
filterToQueryObject,
@ -13,7 +14,6 @@ import {
PARAM_KEY_TAG,
PARAM_KEY_SEARCH,
PARAM_KEY_SORT,
PARAM_KEY_PAGE,
PARAM_KEY_AFTER,
PARAM_KEY_BEFORE,
DEFAULT_SORT,
@ -41,7 +41,7 @@ import { getPaginationVariables } from './utils';
* sort: 'CREATED_DESC',
*
* // Pagination information
* pagination: { page: 1 },
* pagination: { "after": "..." },
* };
* ```
*
@ -66,25 +66,16 @@ export const searchValidator = ({ runnerType, filters, sort }) => {
};
const getPaginationFromParams = (params) => {
const page = parseInt(params[PARAM_KEY_PAGE], 10);
const after = params[PARAM_KEY_AFTER];
const before = params[PARAM_KEY_BEFORE];
if (page && (before || after)) {
return {
page,
before,
after,
};
}
return {
page: 1,
after: params[PARAM_KEY_AFTER],
before: params[PARAM_KEY_BEFORE],
};
};
// Outdated URL parameters
const STATUS_ACTIVE = 'ACTIVE';
const STATUS_PAUSED = 'PAUSED';
const PARAM_KEY_PAGE = 'page';
/**
* Replaces params into a URL
@ -97,6 +88,21 @@ const updateUrlParams = (url, params = {}) => {
return setUrlParams(params, url, false, true, true);
};
const outdatedStatusParams = (status) => {
if (status === STATUS_ACTIVE) {
return {
[PARAM_KEY_PAUSED]: ['false'],
[PARAM_KEY_STATUS]: [], // Important! clear PARAM_KEY_STATUS to avoid a redirection loop!
};
} else if (status === STATUS_PAUSED) {
return {
[PARAM_KEY_PAUSED]: ['true'],
[PARAM_KEY_STATUS]: [], // Important! clear PARAM_KEY_STATUS to avoid a redirection loop!
};
}
return {};
};
/**
* Returns an updated URL for old (or deprecated) admin runner URLs.
*
@ -108,25 +114,22 @@ const updateUrlParams = (url, params = {}) => {
export const updateOutdatedUrl = (url = window.location.href) => {
const urlObj = new URL(url);
const query = urlObj.search;
const params = queryToObject(query, { gatherArrays: true });
const status = params[PARAM_KEY_STATUS]?.[0] || null;
// Remove `page` completely, not needed for keyset pagination
const pageParams = PARAM_KEY_PAGE in params ? { [PARAM_KEY_PAGE]: null } : {};
switch (status) {
case STATUS_ACTIVE:
return updateUrlParams(url, {
[PARAM_KEY_PAUSED]: ['false'],
[PARAM_KEY_STATUS]: [], // Important! clear PARAM_KEY_STATUS to avoid a redirection loop!
});
case STATUS_PAUSED:
return updateUrlParams(url, {
[PARAM_KEY_PAUSED]: ['true'],
[PARAM_KEY_STATUS]: [], // Important! clear PARAM_KEY_STATUS to avoid a redirection loop!
});
default:
return null;
const status = params[PARAM_KEY_STATUS]?.[0];
const redirectParams = {
// Replace paused status (active, paused) with a paused flag
...outdatedStatusParams(status),
...pageParams,
};
if (!isEmpty(redirectParams)) {
return updateUrlParams(url, redirectParams);
}
return null;
};
/**
@ -182,13 +185,11 @@ export const fromSearchToUrl = (
}
const isDefaultSort = sort !== DEFAULT_SORT;
const isFirstPage = pagination?.page === 1;
const otherParams = {
// Sorting & Pagination
[PARAM_KEY_SORT]: isDefaultSort ? sort : null,
[PARAM_KEY_PAGE]: isFirstPage ? null : pagination.page,
[PARAM_KEY_BEFORE]: isFirstPage ? null : pagination.before,
[PARAM_KEY_AFTER]: isFirstPage ? null : pagination.after,
[PARAM_KEY_BEFORE]: pagination?.before || null,
[PARAM_KEY_AFTER]: pagination?.after || null,
};
return setUrlParams({ ...filterParams, ...otherParams }, url, false, true, true);
@ -247,6 +248,6 @@ export const fromSearchToVariables = ({
*/
export const isSearchFiltered = ({ runnerType = null, filters = [], pagination = {} } = {}) => {
return Boolean(
runnerType !== null || filters?.length !== 0 || (pagination && pagination?.page !== 1),
runnerType !== null || filters?.length !== 0 || pagination?.before || pagination?.after,
);
};

View File

@ -215,6 +215,7 @@ export default {
type="submit"
:disabled="search.length === 0"
data-testid="add-child-button"
class="gl-mr-2"
>
{{ $options.i18n.createChildOptionLabel }}
</gl-button>

View File

@ -31,7 +31,8 @@
width: 100%;
padding-left: 10px;
padding-right: 10px;
white-space: pre;
white-space: break-spaces;
word-break: break-word;
&:empty::before {
content: '\200b';

View File

@ -342,10 +342,10 @@ $comparison-empty-state-height: 62px;
.mr-compare {
.diff-file .file-title-flex-parent {
top: calc(#{$header-height} + #{$mr-tabs-height} + 36px);
top: calc(#{$header-height} + #{$mr-tabs-height});
.with-performance-bar & {
top: calc(#{$performance-bar-height} + #{$header-height} + #{$mr-tabs-height} + 36px);
top: calc(#{$performance-bar-height} + #{$header-height} + #{$mr-tabs-height});
}
}
}

View File

@ -19,6 +19,7 @@ module Ci
before_create :set_build_project
validates :build, presence: true
validates :id_tokens, json_schema: { filename: 'build_metadata_id_tokens' }
validates :secrets, json_schema: { filename: 'build_metadata_secrets' }
serialize :config_options, Serializers::SymbolizedJson # rubocop:disable Cop/ActiveRecordSerialize

View File

@ -11,6 +11,7 @@ module Ci
NotSupportedAdapterError = Class.new(StandardError)
FILE_FORMAT_ADAPTERS = {
gzip: Gitlab::Ci::Build::Artifacts::Adapters::GzipStream,
zip: Gitlab::Ci::Build::Artifacts::Adapters::ZipStream,
raw: Gitlab::Ci::Build::Artifacts::Adapters::RawStream
}.freeze

View File

@ -20,6 +20,8 @@ module Ci
delegate :interruptible, to: :metadata, prefix: false, allow_nil: true
delegate :environment_auto_stop_in, to: :metadata, prefix: false, allow_nil: true
delegate :set_cancel_gracefully, to: :metadata, prefix: false, allow_nil: false
delegate :id_tokens, to: :metadata, allow_nil: true
before_create :ensure_metadata
end
@ -77,6 +79,14 @@ module Ci
ensure_metadata.interruptible = value
end
def id_tokens?
!!metadata&.id_tokens?
end
def id_tokens=(value)
ensure_metadata.id_tokens = value
end
private
def read_metadata_attribute(legacy_key, metadata_key, default_value = nil)

View File

@ -32,7 +32,7 @@ module Projects
attr_reader :project, :payload, :integration
def valid_payload_size?
Gitlab::Utils::DeepSize.new(payload).valid?
Gitlab::Utils::DeepSize.new(payload.to_h).valid?
end
override :alert_source

View File

@ -56,7 +56,7 @@ module Projects
attr_reader :project, :payload
def valid_payload_size?
Gitlab::Utils::DeepSize.new(payload).valid?
Gitlab::Utils::DeepSize.new(payload.to_h).valid?
end
def max_alerts_exceeded?

View File

@ -0,0 +1,22 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "CI builds metadata ID tokens",
"type": "object",
"patternProperties": {
".*": {
"type": "object",
"patternProperties": {
"^id_token$": {
"type": "object",
"required": ["aud"],
"properties": {
"aud": { "type": "string" },
"field": { "type": "string" }
},
"additionalProperties": false
}
},
"additionalProperties": false
}
}
}

View File

@ -1,8 +0,0 @@
---
name: hard_failure_for_mirrors_without_license
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/92422
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/367851
milestone: '15.2'
type: development
group: group::source code
default_enabled: false

View File

@ -104,6 +104,12 @@
- 'i_code_review_merge_request_widget_terraform_expand_success'
- 'i_code_review_merge_request_widget_terraform_expand_warning'
- 'i_code_review_merge_request_widget_terraform_expand_failed'
- 'i_code_review_merge_request_widget_metrics_view'
- 'i_code_review_merge_request_widget_metrics_full_report_clicked'
- 'i_code_review_merge_request_widget_metrics_expand'
- 'i_code_review_merge_request_widget_metrics_expand_success'
- 'i_code_review_merge_request_widget_metrics_expand_warning'
- 'i_code_review_merge_request_widget_metrics_expand_failed'
- name: code_review_category_monthly_active_users
operator: OR
source: redis
@ -196,6 +202,12 @@
- 'i_code_review_merge_request_widget_terraform_expand_success'
- 'i_code_review_merge_request_widget_terraform_expand_warning'
- 'i_code_review_merge_request_widget_terraform_expand_failed'
- 'i_code_review_merge_request_widget_metrics_view'
- 'i_code_review_merge_request_widget_metrics_full_report_clicked'
- 'i_code_review_merge_request_widget_metrics_expand'
- 'i_code_review_merge_request_widget_metrics_expand_success'
- 'i_code_review_merge_request_widget_metrics_expand_warning'
- 'i_code_review_merge_request_widget_metrics_expand_failed'
- name: code_review_extension_category_monthly_active_users
operator: OR
source: redis

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddIdTokenToCiBuildsMetadata < Gitlab::Database::Migration[2.0]
enable_lock_retries!
def change
add_column :ci_builds_metadata, :id_tokens, :jsonb, null: false, default: {}
end
end

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
class BackfillProjectImportLevel < Gitlab::Database::Migration[2.0]
disable_ddl_transaction!
restrict_gitlab_migration gitlab_schema: :gitlab_main
MIGRATION = 'BackfillProjectImportLevel'
INTERVAL = 120.seconds
def up
queue_batched_background_migration(
MIGRATION,
:namespaces,
:id,
job_interval: INTERVAL
)
end
def down
delete_batched_background_migration(MIGRATION, :namespaces, :id, [])
end
end

View File

@ -0,0 +1 @@
76f4adebfb71dcd51f861097ba441ae5ee3f62eeb2060f147730d4e6c6006402

View File

@ -0,0 +1 @@
ab8dfd7549b2b61a5cf9d5b46935ec534ea77ec2025fdb58d03f654d81c8f6ee

View File

@ -12602,7 +12602,8 @@ CREATE TABLE ci_builds_metadata (
secrets jsonb DEFAULT '{}'::jsonb NOT NULL,
build_id bigint NOT NULL,
id bigint NOT NULL,
runtime_runner_features jsonb DEFAULT '{}'::jsonb NOT NULL
runtime_runner_features jsonb DEFAULT '{}'::jsonb NOT NULL,
id_tokens jsonb DEFAULT '{}'::jsonb NOT NULL
);
CREATE SEQUENCE ci_builds_metadata_id_seq

View File

@ -1418,6 +1418,51 @@ DELETE /groups/:id/ldap_group_links
NOTE:
To delete the LDAP group link, provide either a `cn` or a `filter`, but not both.
## SAML Group Links **(PREMIUM)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/290367) in GitLab 15.3.
List, add, and delete SAML group links.
### List SAML group links
Lists SAML group links.
```plaintext
GET /groups/:id/saml_group_links
```
| Attribute | Type | Required | Description |
| --------- | -------------- | -------- | ----------- |
| `id` | integer/string | yes | The ID or [URL-encoded path of the group](index.md#namespaced-path-encoding) |
### Add SAML group link
Adds a SAML group link for a group.
```plaintext
POST /groups/:id/saml_group_links
```
| Attribute | Type | Required | Description |
| --------- | -------------- | -------- | ----------- |
| `id` | integer/string | yes | The ID or [URL-encoded path of the group](index.md#namespaced-path-encoding) |
| `saml_group_name` | string | yes | The name of a SAML group |
| `access_level` | string | yes | Minimum [access level](members.md#valid-access-levels) for members of the SAML group |
### Delete SAML group link
Deletes a SAML group link for the group.
```plaintext
DELETE /groups/:id/saml_group_links/:saml_group_name
```
| Attribute | Type | Required | Description |
| --------- | -------------- | -------- | ----------- |
| `id` | integer/string | yes | The ID or [URL-encoded path of the group](index.md#namespaced-path-encoding) |
| `saml_group_name` | string | yes | The name of an SAML group |
## Namespaces in groups
By default, groups only get 20 namespaces at a time because the API results are paginated.

View File

@ -825,6 +825,66 @@ Parameters:
]
```
## Get single MR reviewers
Get a list of merge request reviewers.
```plaintext
GET /projects/:id/merge_requests/:merge_request_iid/reviewers
```
Parameters:
| Attribute | Type | Required | Description |
|---------------------|----------------|----------|-----------------------------------------------------------------------------------------------------------------|
| `id` | integer or string | yes | The ID or [URL-encoded path of the project](index.md#namespaced-path-encoding) owned by the authenticated user. |
| `merge_request_iid` | integer | yes | The internal ID of the merge request. |
```json
[
{
"user": {
"id": 1,
"name": "John Doe1",
"username": "user1",
"state": "active",
"avatar_url": "http://www.gravatar.com/avatar/c922747a93b40d1ea88262bf1aebee62?s=80&d=identicon",
"web_url": "http://localhost/user1"
},
"updated_state_by": {
"id": 1,
"name": "John Doe1",
"username": "user1",
"state": "active",
"avatar_url": "http://www.gravatar.com/avatar/c922747a93b40d1ea88262bf1aebee62?s=80&d=identicon",
"web_url": "http://localhost/user1"
},
"state": "unreviewed",
"created_at": "2022-07-27T17:03:27.684Z"
},
{
"user": {
"id": 2,
"name": "John Doe2",
"username": "user2",
"state": "active",
"avatar_url": "http://www.gravatar.com/avatar/10fc7f102be8de7657fb4d80898bbfe3?s=80&d=identicon",
"web_url": "http://localhost/user2"
},
"updated_state_by": {
"id": 1,
"name": "John Doe1",
"username": "user1",
"state": "active",
"avatar_url": "http://www.gravatar.com/avatar/c922747a93b40d1ea88262bf1aebee62?s=80&d=identicon",
"web_url": "http://localhost/user1"
},
"state": "reviewed",
"created_at": "2022-07-27T17:03:27.684Z"
}
]
```
## Get single MR commits
Get a list of merge request commits.

View File

@ -791,45 +791,15 @@ section of GitLab.
### Links to external documentation
When describing interactions with external software, it's often helpful to
include links to external documentation. When possible, make sure that you're
linking to an [**authoritative** source](#authoritative-sources). For example,
if you're describing a feature in Microsoft's Active Directory, include a link
to official Microsoft documentation.
When possible, avoid links to external documentation. These links can easily become outdated, and are difficult to maintain.
### Authoritative sources
- [They lead to link rot](https://en.wikipedia.org/wiki/Link_rot).
- [They create issues with maintenance](https://gitlab.com/gitlab-org/gitlab/-/issues/368300).
When citing external information, use sources that are written by the people who
created the item or product in question. These sources are the most likely to be
accurate and remain up to date.
Sometimes links are required. They might clarify troubleshooting steps or help prevent duplication of content.
Sometimes they are more precise and will be maintained more actively.
Examples of authoritative sources include:
- Specifications, such as a [Request for Comments](https://www.ietf.org/standards/rfcs/)
document from the Internet Engineering Task Force.
- Official documentation for a product. For example, if you're setting up an
interface with the Google OAuth 2 authorization server, include a link to
Google's documentation.
- Official documentation for a project. For example, if you're citing NodeJS
functionality, refer directly to [NodeJS documentation](https://nodejs.org/en/docs/).
- Books from an authoritative publisher.
Examples of sources to avoid include:
- Personal blog posts.
- Wikipedia.
- Non-trustworthy articles.
- Discussions on forums such as Stack Overflow.
- Documentation from a company that describes another company's product.
While many of these sources to avoid can help you learn skills and or features,
they can become obsolete quickly. Nobody is obliged to maintain any of these
sites. Therefore, we should avoid using them as reference literature.
NOTE:
Non-authoritative sources are acceptable only if there is no equivalent
authoritative source. Even then, focus on non-authoritative sources that are
extensively cited or peer-reviewed.
For each external link you add, weigh the customer benefit with the maintenance difficulties.
### Links requiring permissions

View File

@ -371,14 +371,15 @@ Implemented using Redis methods [PFADD](https://redis.io/commands/pfadd/) and [P
- In the controller using the `RedisTracking` module and the following format:
```ruby
track_redis_hll_event(*controller_actions, name:, if: nil, &block)
track_event(*controller_actions, name:, conditions: nil, destinations: [:redis_hll], &block)
```
Arguments:
- `controller_actions`: the controller actions to track.
- `name`: the event name.
- `if`: optional custom conditions. Uses the same format as Rails callbacks.
- `conditions`: optional custom conditions. Uses the same format as Rails callbacks.
- `destinations`: optional list of destinations. Currently supports `:redis_hll` and `:snowplow`. Default: [:redis_hll].
- `&block`: optional block that computes and returns the `custom_id` that we want to track. This overrides the `visitor_id`.
Example:
@ -389,7 +390,7 @@ Implemented using Redis methods [PFADD](https://redis.io/commands/pfadd/) and [P
include RedisTracking
skip_before_action :authenticate_user!, only: :show
track_redis_hll_event :index, :show, name: 'users_visiting_projects'
track_event :index, :show, name: 'users_visiting_projects'
def index
render html: 'index'

View File

@ -161,3 +161,9 @@ graph TB
GitLabGroupD --> |Member|GitLabUserC
GitLabGroupD --> |Member|GitLabUserD
```
### Use the API
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/290367) in GitLab 15.3.
You can use the GitLab API to [list, add, and delete](../../../api/groups.md#saml-group-links) SAML group links.

View File

@ -555,6 +555,12 @@ this setting. However, disabling the Container Registry disables all Container R
## Troubleshooting the GitLab Container Registry
## Migrating OCI container images to GitLab Container Registry
Migrating built container images to the GitLab registry is not a current feature. However, an [epic](https://gitlab.com/groups/gitlab-org/-/epics/5210) is open to track the work on this feature.
Some third-party tools can help migrate container images, for example, [skopeo](https://github.com/containers/skopeo), which can [copy container images](https://github.com/containers/skopeo#copying-images) between various storage mechanisms. You can use skopeo to copy from container registries, container storage backends, local directories, and local OCI-layout directories to the GitLab Container Registry.
### Docker connection error
A Docker connection error can occur when there are special characters in either the group,

View File

@ -0,0 +1,12 @@
# frozen_string_literal: true
module API
module Entities
class MergeRequestReviewer < Grape::Entity
expose :reviewer, as: :user, using: Entities::UserBasic
expose :updated_state_by, using: Entities::UserBasic
expose :state
expose :created_at
end
end
end

View File

@ -289,6 +289,17 @@ module API
present paginate(participants), with: Entities::UserBasic
end
desc 'Get the reviewers of a merge request' do
success Entities::MergeRequestReviewer
end
get ':id/merge_requests/:merge_request_iid/reviewers', feature_category: :code_review, urgency: :low do
merge_request = find_merge_request_with_access(params[:merge_request_iid])
reviewers = ::Kaminari.paginate_array(merge_request.merge_request_reviewers)
present paginate(reviewers), with: Entities::MergeRequestReviewer
end
desc 'Get the commits of a merge request' do
success Entities::Commit
end

View File

@ -0,0 +1,35 @@
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class BackfillProjectImportLevel < BatchedMigrationJob
LEVEL = {
Gitlab::Access::NO_ACCESS => [0],
Gitlab::Access::DEVELOPER => [2],
Gitlab::Access::MAINTAINER => [1],
Gitlab::Access::OWNER => [nil]
}.freeze
def perform
each_sub_batch(operation_name: :update_import_level) do |sub_batch|
update_import_level(sub_batch)
end
end
private
def update_import_level(relation)
LEVEL.each do |import_level, creation_level|
namespace_ids = relation
.where(type: 'Group', project_creation_level: creation_level)
NamespaceSetting.where(
namespace_id: namespace_ids
).update_all(project_import_level: import_level)
end
end
end
end
end
# rubocop:enable Style/Documentation

View File

@ -0,0 +1,61 @@
# frozen_string_literal: true
module Gitlab
module Ci
module Build
module Artifacts
module Adapters
class ZipStream
MAX_DECOMPRESSED_SIZE = 100.megabytes
MAX_FILES_PROCESSED = 50
attr_reader :stream
InvalidStreamError = Class.new(StandardError)
def initialize(stream)
raise InvalidStreamError, "Stream is required" unless stream
@stream = stream
@files_processed = 0
end
def each_blob
Zip::InputStream.open(stream) do |zio|
while entry = zio.get_next_entry
break if at_files_processed_limit?
next unless should_process?(entry)
@files_processed += 1
yield entry.get_input_stream.read
end
end
end
private
def should_process?(entry)
file?(entry) && !too_large?(entry)
end
def file?(entry)
# Check the file name as a workaround for incorrect
# file type detection when using InputStream
# https://github.com/rubyzip/rubyzip/issues/533
entry.file? && !entry.name.end_with?('/')
end
def too_large?(entry)
entry.size > MAX_DECOMPRESSED_SIZE
end
def at_files_processed_limit?
@files_processed >= MAX_FILES_PROCESSED
end
end
end
end
end
end
end

View File

@ -43,6 +43,7 @@ module Gitlab
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Projects::ProjectPathChangedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Projects::ProjectArchivedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Projects::ProjectTransferedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Groups::GroupTransferedEvent
store.subscribe ::MergeRequests::CreateApprovalEventWorker, to: ::MergeRequests::ApprovedEvent
store.subscribe ::MergeRequests::CreateApprovalNoteWorker, to: ::MergeRequests::ApprovedEvent

View File

@ -8,7 +8,7 @@ module Gitlab
@template.render Pajamas::ButtonComponent.new(
variant: :confirm,
type: :submit,
button_options: options.except!(:pajamas_button)
button_options: options.except(:pajamas_button)
) do
value
end

View File

@ -126,12 +126,16 @@ module Gitlab
end
end
# When an assignee did not exist in the members mapper, the importer is
# assigned. We only need to assign each user once.
def remove_duplicate_assignees
return unless @relation_hash['issue_assignees']
if @relation_hash['issue_assignees']
@relation_hash['issue_assignees'].uniq!(&:user_id)
end
# When an assignee did not exist in the members mapper, the importer is
# assigned. We only need to assign each user once.
@relation_hash['issue_assignees'].uniq!(&:user_id)
if @relation_hash['merge_request_assignees']
@relation_hash['merge_request_assignees'].uniq!(&:user_id)
end
end
def generate_imported_object

View File

@ -15,7 +15,7 @@ module Gitlab
include Gitlab::Utils::StrongMemoize
BATCH_SIZE = 100
MIN_RECORDS_SIZE = 5
MIN_RECORDS_SIZE = 1
# @param relation_object [Object] Object of a project/group, e.g. an issue
# @param relation_key [String] Name of the object association to group/project, e.g. :issues

View File

@ -55,6 +55,7 @@ tree:
- merge_requests:
- :metrics
- :award_emoji
- :merge_request_assignees
- notes:
- :author
- :award_emoji
@ -329,7 +330,6 @@ included_attributes:
- :source_branch
- :source_project_id
- :author_id
- :assignee_id
- :title
- :created_at
- :updated_at
@ -588,6 +588,10 @@ included_attributes:
- :author_id
issue_assignees:
- :user_id
merge_request_assignees:
- :user_id
- :created_at
- :state
sentry_issue:
- :sentry_issue_identifier
zoom_meetings:

View File

@ -400,3 +400,28 @@
redis_slot: code_review
category: code_review
aggregation: weekly
## Metrics
- name: i_code_review_merge_request_widget_metrics_view
redis_slot: code_review
category: code_review
aggregation: weekly
- name: i_code_review_merge_request_widget_metrics_full_report_clicked
redis_slot: code_review
category: code_review
aggregation: weekly
- name: i_code_review_merge_request_widget_metrics_expand
redis_slot: code_review
category: code_review
aggregation: weekly
- name: i_code_review_merge_request_widget_metrics_expand_success
redis_slot: code_review
category: code_review
aggregation: weekly
- name: i_code_review_merge_request_widget_metrics_expand_warning
redis_slot: code_review
category: code_review
aggregation: weekly
- name: i_code_review_merge_request_widget_metrics_expand_failed
redis_slot: code_review
category: code_review
aggregation: weekly

View File

@ -5,7 +5,7 @@ module Gitlab
class MergeRequestWidgetExtensionCounter < BaseCounter
KNOWN_EVENTS = %w[view full_report_clicked expand expand_success expand_warning expand_failed].freeze
PREFIX = 'i_code_review_merge_request_widget'
WIDGETS = %w[accessibility code_quality terraform test_summary].freeze
WIDGETS = %w[accessibility code_quality terraform test_summary metrics].freeze
class << self
private

View File

@ -31741,18 +31741,36 @@ msgstr ""
msgid "ProtectedEnvironment|%{environment_name} will be writable for developers. Are you sure?"
msgstr ""
msgid "ProtectedEnvironment|All environments specivied with the deployment tiers below are protected by a parent group. %{link_start}Learn More%{link_end}."
msgstr ""
msgid "ProtectedEnvironment|Allowed to deploy"
msgstr ""
msgid "ProtectedEnvironment|Allowed to deploy to %{project} / %{environment}"
msgstr ""
msgid "ProtectedEnvironment|Environment"
msgstr ""
msgid "ProtectedEnvironment|Environments protected upstream"
msgstr ""
msgid "ProtectedEnvironment|Failed to load details for this group."
msgstr ""
msgid "ProtectedEnvironment|No environments in this project are projected."
msgstr ""
msgid "ProtectedEnvironment|Only specified groups can execute deployments in protected environments."
msgstr ""
msgid "ProtectedEnvironment|Only specified users can execute deployments in a protected environment."
msgstr ""
msgid "ProtectedEnvironment|Parent group"
msgstr ""
msgid "ProtectedEnvironment|Protect"
msgstr ""
@ -34931,6 +34949,9 @@ msgstr ""
msgid "SecurityOrchestration|Failed to load cluster agents."
msgstr ""
msgid "SecurityOrchestration|Failed to load images."
msgstr ""
msgid "SecurityOrchestration|Failed to load vulnerability scanners."
msgstr ""
@ -35276,6 +35297,9 @@ msgstr ""
msgid "SecurityReports|Hide dismissed"
msgstr ""
msgid "SecurityReports|Image"
msgstr ""
msgid "SecurityReports|Issue Created"
msgstr ""
@ -45780,6 +45804,9 @@ msgstr ""
msgid "ciReport|All clusters"
msgstr ""
msgid "ciReport|All images"
msgstr ""
msgid "ciReport|All projects"
msgstr ""

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
require 'rack/utils'
module RuboCop
module Cop
module Gitlab
# This cop prevents from using deprecated `track_redis_hll_event` method.
#
# @example
#
# # bad
# track_redis_hll_event :show, name: 'p_analytics_valuestream'
#
# # good
# track_event :show, name: 'g_analytics_valuestream', destinations: [:redis_hll]
class DeprecateTrackRedisHLLEvent < RuboCop::Cop::Cop
MSG = '`track_redis_hll_event` is deprecated. Use `track_event` helper instead. ' \
'See https://docs.gitlab.com/ee/development/service_ping/implement.html#add-new-events'
def_node_matcher :track_redis_hll_event_used?, <<~PATTERN
(send _ :track_redis_hll_event ...)
PATTERN
def on_send(node)
return unless track_redis_hll_event_used?(node)
add_offense(node, location: :selector)
end
end
end
end
end

View File

@ -102,6 +102,28 @@ FactoryBot.define do
end
end
trait :zip_with_single_file do
file_type { :archive }
file_format { :zip }
after(:build) do |artifact, evaluator|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/single_file.zip'),
'application/zip')
end
end
trait :zip_with_multiple_files do
file_type { :archive }
file_format { :zip }
after(:build) do |artifact, evaluator|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/multiple_files.zip'),
'application/zip')
end
end
trait :junit do
file_type { :junit }
file_format { :gzip }

View File

@ -3149,6 +3149,28 @@
"created_at": "2020-01-07T11:21:21.235Z",
"updated_at": "2020-01-07T11:21:21.235Z"
}
],
"merge_request_assignees": [
{
"user_id": 1,
"created_at": "2020-01-07T11:21:21.235Z",
"state": "unreviewed"
},
{
"user_id": 15,
"created_at": "2020-01-08T11:21:21.235Z",
"state": "reviewed"
},
{
"user_id": 16,
"created_at": "2020-01-09T11:21:21.235Z",
"state": "attention_requested"
},
{
"user_id": 6,
"created_at": "2020-01-10T11:21:21.235Z",
"state": "unreviewed"
}
]
},
{
@ -3416,7 +3438,8 @@
"action": 1,
"author_id": 1
}
]
],
"merge_request_assignees": []
},
{
"id": 15,

File diff suppressed because one or more lines are too long

View File

@ -129,7 +129,7 @@ describe('DevopsScore', () => {
});
it('displays the correct badge', () => {
const badge = findUsageCol().find(GlBadge);
const badge = findUsageCol().findComponent(GlBadge);
expect(badge.exists()).toBe(true);
expect(badge.props('variant')).toBe('muted');

View File

@ -24,7 +24,7 @@ describe('Signup Form', () => {
const findByTestId = (id) => wrapper.find(`[data-testid="${id}"]`);
const findHiddenInput = () => findByTestId('input');
const findCheckbox = () => wrapper.find(GlFormCheckbox);
const findCheckbox = () => wrapper.findComponent(GlFormCheckbox);
const findCheckboxLabel = () => findByTestId('label');
const findHelpText = () => findByTestId('helpText');

View File

@ -28,7 +28,7 @@ describe('Signup Form', () => {
const findForm = () => wrapper.findByTestId('form');
const findInputCsrf = () => findForm().find('[name="authenticity_token"]');
const findFormSubmitButton = () => findForm().find(GlButton);
const findFormSubmitButton = () => findForm().findComponent(GlButton);
const findDenyListRawRadio = () => queryByLabelText('Enter denylist manually');
const findDenyListFileRadio = () => queryByLabelText('Upload denylist file');
@ -36,7 +36,7 @@ describe('Signup Form', () => {
const findDenyListRawInputGroup = () => wrapper.findByTestId('domain-denylist-raw-input-group');
const findDenyListFileInputGroup = () => wrapper.findByTestId('domain-denylist-file-input-group');
const findUserCapInput = () => wrapper.findByTestId('user-cap-input');
const findModal = () => wrapper.find(GlModal);
const findModal = () => wrapper.findComponent(GlModal);
afterEach(() => {
wrapper.destroy();

View File

@ -41,7 +41,7 @@ describe('Admin statistics app', () => {
store.dispatch('requestStatistics');
createComponent();
expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
});

View File

@ -12,7 +12,7 @@ import { paths } from '../../mock_data';
describe('Action components', () => {
let wrapper;
const findDropdownItem = () => wrapper.find(GlDropdownItem);
const findDropdownItem = () => wrapper.findComponent(GlDropdownItem);
const initComponent = ({ component, props } = {}) => {
wrapper = shallowMount(component, {

View File

@ -28,7 +28,7 @@ describe('AdminUsersApp component', () => {
});
it('renders the admin users table with props', () => {
expect(wrapper.find(AdminUsersTable).props()).toEqual({
expect(wrapper.findComponent(AdminUsersTable).props()).toEqual({
users,
paths,
});

View File

@ -17,7 +17,7 @@ describe('Delete user modal', () => {
const findButton = (variant, category) =>
wrapper
.findAll(GlButton)
.findAllComponents(GlButton)
.filter((w) => w.attributes('variant') === variant && w.attributes('category') === category)
.at(0);
const findForm = () => wrapper.find('form');

View File

@ -83,7 +83,7 @@ describe('AdminUserActions component', () => {
});
it.each(CONFIRMATION_ACTIONS)('renders an action component item for "%s"', (action) => {
const component = wrapper.find(Actions[capitalizeFirstCharacter(action)]);
const component = wrapper.findComponent(Actions[capitalizeFirstCharacter(action)]);
expect(component.props('username')).toBe(user.name);
expect(component.props('path')).toBe(userPaths[action]);
@ -119,7 +119,7 @@ describe('AdminUserActions component', () => {
});
it.each(DELETE_ACTIONS)('renders a delete action component item for "%s"', (action) => {
const component = wrapper.find(Actions[capitalizeFirstCharacter(action)]);
const component = wrapper.findComponent(Actions[capitalizeFirstCharacter(action)]);
expect(component.props('username')).toBe(user.name);
expect(component.props('paths')).toEqual(userPaths);

View File

@ -12,10 +12,10 @@ describe('AdminUserAvatar component', () => {
const user = users[0];
const adminUserPath = paths.adminUser;
const findNote = () => wrapper.find(GlIcon);
const findAvatar = () => wrapper.find(GlAvatarLabeled);
const findNote = () => wrapper.findComponent(GlIcon);
const findAvatar = () => wrapper.findComponent(GlAvatarLabeled);
const findUserLink = () => wrapper.find('.js-user-link');
const findAllBadges = () => wrapper.findAll(GlBadge);
const findAllBadges = () => wrapper.findAllComponents(GlBadge);
const findTooltip = () => getBinding(findNote().element, 'gl-tooltip');
const initComponent = (props = {}) => {

View File

@ -30,10 +30,10 @@ describe('AdminUsersTable component', () => {
const fetchGroupCountsResponse = createFetchGroupCount([{ id: user.id, groupCount: 5 }]);
const findUserGroupCount = (id) => wrapper.findByTestId(`user-group-count-${id}`);
const findUserGroupCountLoader = (id) => findUserGroupCount(id).find(GlSkeletonLoader);
const findUserGroupCountLoader = (id) => findUserGroupCount(id).findComponent(GlSkeletonLoader);
const getCellByLabel = (trIdx, label) => {
return wrapper
.find(GlTable)
.findComponent(GlTable)
.find('tbody')
.findAll('tr')
.at(trIdx)
@ -72,7 +72,7 @@ describe('AdminUsersTable component', () => {
});
it('renders the user actions', () => {
expect(wrapper.find(AdminUserActions).exists()).toBe(true);
expect(wrapper.findComponent(AdminUserActions).exists()).toBe(true);
});
it.each`
@ -81,7 +81,7 @@ describe('AdminUsersTable component', () => {
${AdminUserDate} | ${'Created on'}
${AdminUserDate} | ${'Last activity'}
`('renders the component for column $label', ({ component, label }) => {
expect(getCellByLabel(0, label).find(component).exists()).toBe(true);
expect(getCellByLabel(0, label).findComponent(component).exists()).toBe(true);
});
});

View File

@ -8,7 +8,7 @@ describe('initAdminUsersApp', () => {
let wrapper;
let el;
const findApp = () => wrapper.find(AdminUsersApp);
const findApp = () => wrapper.findComponent(AdminUsersApp);
beforeEach(() => {
el = document.createElement('div');
@ -36,7 +36,7 @@ describe('initAdminUserActions', () => {
let wrapper;
let el;
const findUserActions = () => wrapper.find(UserActions);
const findUserActions = () => wrapper.findComponent(UserActions);
beforeEach(() => {
el = document.createElement('div');

View File

@ -426,7 +426,7 @@ describe('Linked pipeline', () => {
jest.spyOn(wrapper.vm, '$emit');
findButton().trigger('click');
expect(wrapper.emitted().pipelineClicked).toBeTruthy();
expect(wrapper.emitted().pipelineClicked).toHaveLength(1);
});
it(`should emit ${BV_HIDE_TOOLTIP} to close the tooltip`, () => {

View File

@ -84,7 +84,7 @@ describe('AdminRunnersApp', () => {
const findRunnerList = () => wrapper.findComponent(RunnerList);
const findRunnerListEmptyState = () => wrapper.findComponent(RunnerListEmptyState);
const findRunnerPagination = () => extendedWrapper(wrapper.findComponent(RunnerPagination));
const findRunnerPaginationNext = () => findRunnerPagination().findByLabelText('Go to next page');
const findRunnerPaginationNext = () => findRunnerPagination().findByText(s__('Pagination|Next'));
const findRunnerFilteredSearchBar = () => wrapper.findComponent(RunnerFilteredSearchBar);
const createComponent = ({
@ -279,7 +279,7 @@ describe('AdminRunnersApp', () => {
{ type: PARAM_KEY_PAUSED, value: { data: 'true', operator: '=' } },
],
sort: 'CREATED_DESC',
pagination: { page: 1 },
pagination: {},
});
});
@ -340,6 +340,7 @@ describe('AdminRunnersApp', () => {
it('when runners have not loaded, shows a loading state', () => {
createComponent();
expect(findRunnerList().props('loading')).toBe(true);
expect(findRunnerPagination().attributes('disabled')).toBe('true');
});
describe('when bulk delete is enabled', () => {
@ -434,19 +435,25 @@ describe('AdminRunnersApp', () => {
});
describe('Pagination', () => {
const { pageInfo } = allRunnersDataPaginated.data.runners;
beforeEach(async () => {
mockRunnersHandler.mockResolvedValue(allRunnersDataPaginated);
await createComponent({ mountFn: mountExtended });
});
it('passes the page info', () => {
expect(findRunnerPagination().props('pageInfo')).toEqualGraphqlFixture(pageInfo);
});
it('navigates to the next page', async () => {
await findRunnerPaginationNext().trigger('click');
expect(mockRunnersHandler).toHaveBeenLastCalledWith({
sort: CREATED_DESC,
first: RUNNER_PAGE_SIZE,
after: allRunnersDataPaginated.data.runners.pageInfo.endCursor,
after: pageInfo.endCursor,
});
});
});

View File

@ -143,7 +143,7 @@ describe('RunnerList', () => {
runnerType: INSTANCE_TYPE,
filters: mockFilters,
sort: mockOtherSort,
pagination: { page: 1 },
pagination: {},
});
});
});
@ -156,7 +156,7 @@ describe('RunnerList', () => {
runnerType: null,
filters: mockFilters,
sort: mockDefaultSort,
pagination: { page: 1 },
pagination: {},
});
});
@ -167,7 +167,7 @@ describe('RunnerList', () => {
runnerType: null,
filters: [],
sort: mockOtherSort,
pagination: { page: 1 },
pagination: {},
});
});
});

View File

@ -1,5 +1,5 @@
import { GlPagination } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { GlKeysetPagination } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import RunnerPagination from '~/runner/components/runner_pagination.vue';
const mockStartCursor = 'START_CURSOR';
@ -8,21 +8,11 @@ const mockEndCursor = 'END_CURSOR';
describe('RunnerPagination', () => {
let wrapper;
const findPagination = () => wrapper.findComponent(GlPagination);
const findPagination = () => wrapper.findComponent(GlKeysetPagination);
const createComponent = ({ page = 1, hasPreviousPage = false, hasNextPage = true } = {}) => {
wrapper = mount(RunnerPagination, {
propsData: {
value: {
page,
},
pageInfo: {
hasPreviousPage,
hasNextPage,
startCursor: mockStartCursor,
endCursor: mockEndCursor,
},
},
const createComponent = (propsData = {}) => {
wrapper = shallowMount(RunnerPagination, {
propsData,
});
};
@ -30,114 +20,96 @@ describe('RunnerPagination', () => {
wrapper.destroy();
});
describe('When on the first page', () => {
beforeEach(() => {
createComponent({
page: 1,
hasPreviousPage: false,
hasNextPage: true,
});
});
it('Contains the current page information', () => {
expect(findPagination().props('value')).toBe(1);
expect(findPagination().props('prevPage')).toBe(null);
expect(findPagination().props('nextPage')).toBe(2);
});
it('Goes to the second page', () => {
findPagination().vm.$emit('input', 2);
expect(wrapper.emitted('input')[0]).toEqual([
{
after: mockEndCursor,
page: 2,
},
]);
});
});
describe('When in between pages', () => {
const mockPageInfo = {
startCursor: mockStartCursor,
endCursor: mockEndCursor,
hasPreviousPage: true,
hasNextPage: true,
};
beforeEach(() => {
createComponent({
page: 2,
hasPreviousPage: true,
hasNextPage: true,
pageInfo: mockPageInfo,
});
});
it('Contains the current page information', () => {
expect(findPagination().props('value')).toBe(2);
expect(findPagination().props('prevPage')).toBe(1);
expect(findPagination().props('nextPage')).toBe(3);
expect(findPagination().props()).toMatchObject(mockPageInfo);
});
it('Shows the next and previous pages', () => {
const links = findPagination().findAll('a');
it('Goes to the prev page', () => {
findPagination().vm.$emit('prev');
expect(links).toHaveLength(2);
expect(links.at(0).text()).toBe('Previous');
expect(links.at(1).text()).toBe('Next');
expect(wrapper.emitted('input')[0]).toEqual([
{
before: mockStartCursor,
},
]);
});
it('Goes to the last page', () => {
findPagination().vm.$emit('input', 3);
it('Goes to the next page', () => {
findPagination().vm.$emit('next');
expect(wrapper.emitted('input')[0]).toEqual([
{
after: mockEndCursor,
page: 3,
},
]);
});
it('Goes to the first page', () => {
findPagination().vm.$emit('input', 1);
expect(wrapper.emitted('input')[0]).toEqual([
{
page: 1,
},
]);
});
});
describe('When in the last page', () => {
describe.each`
page | hasPreviousPage | hasNextPage
${'first'} | ${false} | ${true}
${'last'} | ${true} | ${false}
`('When on the $page page', ({ page, hasPreviousPage, hasNextPage }) => {
const mockPageInfo = {
startCursor: mockStartCursor,
endCursor: mockEndCursor,
hasPreviousPage,
hasNextPage,
};
beforeEach(() => {
createComponent({
page: 3,
hasPreviousPage: true,
hasNextPage: false,
pageInfo: mockPageInfo,
});
});
it('Contains the current page', () => {
expect(findPagination().props('value')).toBe(3);
expect(findPagination().props('prevPage')).toBe(2);
expect(findPagination().props('nextPage')).toBe(null);
it(`Contains the ${page} page information`, () => {
expect(findPagination().props()).toMatchObject(mockPageInfo);
});
});
describe('When only one page', () => {
describe('When no other pages', () => {
beforeEach(() => {
createComponent({
page: 1,
hasPreviousPage: false,
hasNextPage: false,
pageInfo: {
hasPreviousPage: false,
hasNextPage: false,
},
});
});
it('does not display pagination', () => {
expect(wrapper.html()).toBe('');
it('is not shown', () => {
expect(findPagination().exists()).toBe(false);
});
});
describe('When adding more attributes', () => {
beforeEach(() => {
createComponent({
pageInfo: {
hasPreviousPage: true,
hasNextPage: false,
},
disabled: true,
});
});
it('Contains the current page', () => {
expect(findPagination().props('value')).toBe(1);
});
it('Shows no more page buttons', () => {
expect(findPagination().props('prevPage')).toBe(null);
expect(findPagination().props('nextPage')).toBe(null);
it('attributes are passed', () => {
expect(findPagination().props('disabled')).toBe(true);
});
});
});

View File

@ -82,7 +82,7 @@ describe('GroupRunnersApp', () => {
const findRunnerListEmptyState = () => wrapper.findComponent(RunnerListEmptyState);
const findRunnerRow = (id) => extendedWrapper(wrapper.findByTestId(`runner-row-${id}`));
const findRunnerPagination = () => extendedWrapper(wrapper.findComponent(RunnerPagination));
const findRunnerPaginationNext = () => findRunnerPagination().findByLabelText('Go to next page');
const findRunnerPaginationNext = () => findRunnerPagination().findByText(s__('Pagination|Next'));
const findRunnerFilteredSearchBar = () => wrapper.findComponent(RunnerFilteredSearchBar);
const createComponent = ({ props = {}, mountFn = shallowMountExtended, ...options } = {}) => {
@ -263,7 +263,7 @@ describe('GroupRunnersApp', () => {
runnerType: INSTANCE_TYPE,
filters: [{ type: 'status', value: { data: STATUS_ONLINE, operator: '=' } }],
sort: 'CREATED_DESC',
pagination: { page: 1 },
pagination: {},
});
});
@ -326,6 +326,7 @@ describe('GroupRunnersApp', () => {
it('when runners have not loaded, shows a loading state', () => {
createComponent();
expect(findRunnerList().props('loading')).toBe(true);
expect(findRunnerPagination().attributes('disabled')).toBe('true');
});
describe('when no runners are found', () => {
@ -372,12 +373,18 @@ describe('GroupRunnersApp', () => {
});
describe('Pagination', () => {
const { pageInfo } = groupRunnersDataPaginated.data.group.runners;
beforeEach(async () => {
mockGroupRunnersHandler.mockResolvedValue(groupRunnersDataPaginated);
await createComponent({ mountFn: mountExtended });
});
it('passes the page info', () => {
expect(findRunnerPagination().props('pageInfo')).toEqualGraphqlFixture(pageInfo);
});
it('navigates to the next page', async () => {
await findRunnerPaginationNext().trigger('click');
@ -385,7 +392,7 @@ describe('GroupRunnersApp', () => {
groupFullPath: mockGroupFullPath,
sort: CREATED_DESC,
first: RUNNER_PAGE_SIZE,
after: groupRunnersDataPaginated.data.group.runners.pageInfo.endCursor,
after: pageInfo.endCursor,
});
});
});

View File

@ -34,7 +34,7 @@ export const mockSearchExamples = [
{
name: 'a default query',
urlQuery: '',
search: { runnerType: null, filters: [], pagination: { page: 1 }, sort: 'CREATED_DESC' },
search: { runnerType: null, filters: [], pagination: {}, sort: 'CREATED_DESC' },
graphqlVariables: { sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
isDefault: true,
},
@ -44,7 +44,7 @@ export const mockSearchExamples = [
search: {
runnerType: null,
filters: [{ type: 'status', value: { data: 'ACTIVE', operator: '=' } }],
pagination: { page: 1 },
pagination: {},
sort: 'CREATED_DESC',
},
graphqlVariables: { status: 'ACTIVE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
@ -60,7 +60,7 @@ export const mockSearchExamples = [
value: { data: 'something' },
},
],
pagination: { page: 1 },
pagination: {},
sort: 'CREATED_DESC',
},
graphqlVariables: { search: 'something', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
@ -80,7 +80,7 @@ export const mockSearchExamples = [
value: { data: 'else' },
},
],
pagination: { page: 1 },
pagination: {},
sort: 'CREATED_DESC',
},
graphqlVariables: { search: 'something else', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
@ -91,7 +91,7 @@ export const mockSearchExamples = [
search: {
runnerType: 'INSTANCE_TYPE',
filters: [],
pagination: { page: 1 },
pagination: {},
sort: 'CREATED_DESC',
},
graphqlVariables: { type: 'INSTANCE_TYPE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
@ -105,7 +105,7 @@ export const mockSearchExamples = [
{ type: 'status', value: { data: 'ACTIVE', operator: '=' } },
{ type: 'status', value: { data: 'PAUSED', operator: '=' } },
],
pagination: { page: 1 },
pagination: {},
sort: 'CREATED_DESC',
},
graphqlVariables: { status: 'ACTIVE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
@ -116,7 +116,7 @@ export const mockSearchExamples = [
search: {
runnerType: 'INSTANCE_TYPE',
filters: [{ type: 'status', value: { data: 'ACTIVE', operator: '=' } }],
pagination: { page: 1 },
pagination: {},
sort: 'CREATED_ASC',
},
graphqlVariables: {
@ -132,7 +132,7 @@ export const mockSearchExamples = [
search: {
runnerType: null,
filters: [{ type: 'tag', value: { data: 'tag-1', operator: '=' } }],
pagination: { page: 1 },
pagination: {},
sort: 'CREATED_DESC',
},
graphqlVariables: {
@ -150,7 +150,7 @@ export const mockSearchExamples = [
{ type: 'tag', value: { data: 'tag-1', operator: '=' } },
{ type: 'tag', value: { data: 'tag-2', operator: '=' } },
],
pagination: { page: 1 },
pagination: {},
sort: 'CREATED_DESC',
},
graphqlVariables: {
@ -161,22 +161,22 @@ export const mockSearchExamples = [
},
{
name: 'the next page',
urlQuery: '?page=2&after=AFTER_CURSOR',
urlQuery: '?after=AFTER_CURSOR',
search: {
runnerType: null,
filters: [],
pagination: { page: 2, after: 'AFTER_CURSOR' },
pagination: { after: 'AFTER_CURSOR' },
sort: 'CREATED_DESC',
},
graphqlVariables: { sort: 'CREATED_DESC', after: 'AFTER_CURSOR', first: RUNNER_PAGE_SIZE },
},
{
name: 'the previous page',
urlQuery: '?page=2&before=BEFORE_CURSOR',
urlQuery: '?before=BEFORE_CURSOR',
search: {
runnerType: null,
filters: [],
pagination: { page: 2, before: 'BEFORE_CURSOR' },
pagination: { before: 'BEFORE_CURSOR' },
sort: 'CREATED_DESC',
},
graphqlVariables: { sort: 'CREATED_DESC', before: 'BEFORE_CURSOR', last: RUNNER_PAGE_SIZE },
@ -184,7 +184,7 @@ export const mockSearchExamples = [
{
name: 'the next page filtered by a status, an instance type, tags and a non default sort',
urlQuery:
'?status[]=ACTIVE&runner_type[]=INSTANCE_TYPE&tag[]=tag-1&tag[]=tag-2&sort=CREATED_ASC&page=2&after=AFTER_CURSOR',
'?status[]=ACTIVE&runner_type[]=INSTANCE_TYPE&tag[]=tag-1&tag[]=tag-2&sort=CREATED_ASC&after=AFTER_CURSOR',
search: {
runnerType: 'INSTANCE_TYPE',
filters: [
@ -192,7 +192,7 @@ export const mockSearchExamples = [
{ type: 'tag', value: { data: 'tag-1', operator: '=' } },
{ type: 'tag', value: { data: 'tag-2', operator: '=' } },
],
pagination: { page: 2, after: 'AFTER_CURSOR' },
pagination: { after: 'AFTER_CURSOR' },
sort: 'CREATED_ASC',
},
graphqlVariables: {
@ -210,7 +210,7 @@ export const mockSearchExamples = [
search: {
runnerType: null,
filters: [{ type: 'paused', value: { data: 'true', operator: '=' } }],
pagination: { page: 1 },
pagination: {},
sort: 'CREATED_DESC',
},
graphqlVariables: { paused: true, sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
@ -221,7 +221,7 @@ export const mockSearchExamples = [
search: {
runnerType: null,
filters: [{ type: 'paused', value: { data: 'false', operator: '=' } }],
pagination: { page: 1 },
pagination: {},
sort: 'CREATED_DESC',
},
graphqlVariables: { paused: false, sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },

View File

@ -24,11 +24,14 @@ describe('search_params.js', () => {
});
it.each`
query | updatedQuery
${'status[]=ACTIVE'} | ${'paused[]=false'}
${'status[]=ACTIVE&a=b'} | ${'a=b&paused[]=false'}
${'status[]=ACTIVE'} | ${'paused[]=false'}
${'status[]=PAUSED'} | ${'paused[]=true'}
query | updatedQuery
${'status[]=ACTIVE'} | ${'paused[]=false'}
${'status[]=ACTIVE&a=b'} | ${'a=b&paused[]=false'}
${'status[]=ACTIVE'} | ${'paused[]=false'}
${'status[]=PAUSED'} | ${'paused[]=true'}
${'page=2&after=AFTER'} | ${'after=AFTER'}
${'page=2&before=BEFORE'} | ${'before=BEFORE'}
${'status[]=PAUSED&page=2&after=AFTER'} | ${'after=AFTER&paused[]=true'}
`('updates "$query" to "$updatedQuery"', ({ query, updatedQuery }) => {
const mockUrl = 'http://test.host/admin/runners?';
@ -49,24 +52,6 @@ describe('search_params.js', () => {
{ type: 'filtered-search-term', value: { data: 'text' } },
]);
});
it('When a page cannot be parsed as a number, it defaults to `1`', () => {
expect(fromUrlQueryToSearch('?page=NONSENSE&after=AFTER_CURSOR').pagination).toEqual({
page: 1,
});
});
it('When a page is less than 1, it defaults to `1`', () => {
expect(fromUrlQueryToSearch('?page=0&after=AFTER_CURSOR').pagination).toEqual({
page: 1,
});
});
it('When a page with no cursor is given, it defaults to `1`', () => {
expect(fromUrlQueryToSearch('?page=2').pagination).toEqual({
page: 1,
});
});
});
describe('fromSearchToUrl', () => {
@ -143,8 +128,11 @@ describe('search_params.js', () => {
});
});
it('given a missing pagination, evaluates as not filtered', () => {
expect(isSearchFiltered({ pagination: null })).toBe(false);
});
it.each([null, undefined, {}])(
'given a missing pagination, evaluates as not filtered',
(mockPagination) => {
expect(isSearchFiltered({ pagination: mockPagination })).toBe(false);
},
);
});
});

View File

@ -0,0 +1,123 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
# rubocop:disable Layout/HashAlignment
RSpec.describe Gitlab::BackgroundMigration::BackfillProjectImportLevel do
let(:migration) do
described_class.new(
start_id: table(:namespaces).minimum(:id),
end_id: table(:namespaces).maximum(:id),
batch_table: :namespaces,
batch_column: :id,
sub_batch_size: 2,
pause_ms: 0,
connection: ApplicationRecord.connection
)
end
# rubocop:enable Layout/HashAlignment
let(:namespaces_table) { table(:namespaces) }
let(:namespace_settings_table) { table(:namespace_settings) }
let!(:user_namespace) do
namespaces_table.create!(
name: 'user_namespace',
path: 'user_namespace',
type: 'User',
project_creation_level: 100
)
end
let!(:group_namespace_nil) do
namespaces_table.create!(
name: 'group_namespace_nil',
path: 'group_namespace_nil',
type: 'Group',
project_creation_level: nil
)
end
let!(:group_namespace_0) do
namespaces_table.create!(
name: 'group_namespace_0',
path: 'group_namespace_0',
type: 'Group',
project_creation_level: 0
)
end
let!(:group_namespace_1) do
namespaces_table.create!(
name: 'group_namespace_1',
path: 'group_namespace_1',
type: 'Group',
project_creation_level: 1
)
end
let!(:group_namespace_2) do
namespaces_table.create!(
name: 'group_namespace_2',
path: 'group_namespace_2',
type: 'Group',
project_creation_level: 2
)
end
let!(:group_namespace_9999) do
namespaces_table.create!(
name: 'group_namespace_9999',
path: 'group_namespace_9999',
type: 'Group',
project_creation_level: 9999
)
end
subject(:perform_migration) { migration.perform }
before do
namespace_settings_table.create!(namespace_id: user_namespace.id)
namespace_settings_table.create!(namespace_id: group_namespace_nil.id)
namespace_settings_table.create!(namespace_id: group_namespace_0.id)
namespace_settings_table.create!(namespace_id: group_namespace_1.id)
namespace_settings_table.create!(namespace_id: group_namespace_2.id)
namespace_settings_table.create!(namespace_id: group_namespace_9999.id)
end
describe 'Groups' do
using RSpec::Parameterized::TableSyntax
where(:namespace_id, :prev_level, :new_level) do
lazy { group_namespace_0.id } | ::Gitlab::Access::OWNER | ::Gitlab::Access::NO_ACCESS
lazy { group_namespace_1.id } | ::Gitlab::Access::OWNER | ::Gitlab::Access::MAINTAINER
lazy { group_namespace_2.id } | ::Gitlab::Access::OWNER | ::Gitlab::Access::DEVELOPER
end
with_them do
it 'backfills the correct project_import_level of Group namespaces' do
expect { perform_migration }
.to change { namespace_settings_table.find_by(namespace_id: namespace_id).project_import_level }
.from(prev_level).to(new_level)
end
end
it 'does not update `User` namespaces or values outside range' do
expect { perform_migration }
.not_to change { namespace_settings_table.find_by(namespace_id: user_namespace.id).project_import_level }
expect { perform_migration }
.not_to change { namespace_settings_table.find_by(namespace_id: group_namespace_9999.id).project_import_level }
end
it 'maintains default import_level if creation_level is nil' do
project_import_level = namespace_settings_table.find_by(namespace_id: group_namespace_nil.id).project_import_level
expect { perform_migration }
.not_to change { project_import_level }
expect(project_import_level).to eq(::Gitlab::Access::OWNER)
end
end
end

View File

@ -0,0 +1,86 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Ci::Build::Artifacts::Adapters::ZipStream do
let(:file_name) { 'single_file.zip' }
let(:fixture_path) { "lib/gitlab/ci/build/artifacts/adapters/zip_stream/#{file_name}" }
let(:stream) { File.open(expand_fixture_path(fixture_path), 'rb') }
describe '#initialize' do
it 'initializes when stream is passed' do
expect { described_class.new(stream) }.not_to raise_error
end
context 'when stream is not passed' do
let(:stream) { nil }
it 'raises an error' do
expect { described_class.new(stream) }.to raise_error(described_class::InvalidStreamError)
end
end
end
describe '#each_blob' do
let(:adapter) { described_class.new(stream) }
context 'when stream is a zip file' do
it 'iterates file content when zip file contains one file' do
expect { |b| adapter.each_blob(&b) }
.to yield_with_args("file 1 content\n")
end
context 'when zip file contains multiple files' do
let(:file_name) { 'multiple_files.zip' }
it 'iterates content of all files' do
expect { |b| adapter.each_blob(&b) }
.to yield_successive_args("file 1 content\n", "file 2 content\n")
end
end
context 'when zip file includes files in a directory' do
let(:file_name) { 'with_directory.zip' }
it 'iterates contents from files only' do
expect { |b| adapter.each_blob(&b) }
.to yield_successive_args("file 1 content\n", "file 2 content\n")
end
end
context 'when zip contains a file which decompresses beyond the size limit' do
let(:file_name) { '200_mb_decompressed.zip' }
it 'does not read the file' do
expect { |b| adapter.each_blob(&b) }.not_to yield_control
end
end
context 'when the zip contains too many files' do
let(:file_name) { '100_files.zip' }
it 'stops processing when the limit is reached' do
expect { |b| adapter.each_blob(&b) }
.to yield_control.exactly(described_class::MAX_FILES_PROCESSED).times
end
end
context 'when stream is a zipbomb' do
let(:file_name) { 'zipbomb.zip' }
it 'does not read the file' do
expect { |b| adapter.each_blob(&b) }.not_to yield_control
end
end
end
context 'when stream is not a zip file' do
let(:stream) { File.open(expand_fixture_path('junit/junit.xml.gz'), 'rb') }
it 'does not yield any data' do
expect { |b| adapter.each_blob(&b) }.not_to yield_control
expect { adapter.each_blob { |b| b } }.not_to raise_error
end
end
end
end

View File

@ -653,6 +653,7 @@ search_data:
merge_request_assignees:
- merge_request
- assignee
- updated_state_by
lfs_file_locks:
- user
project_badges:

View File

@ -58,8 +58,8 @@ RSpec.describe Gitlab::ImportExport::Base::RelationObjectSaver do
end
context 'when subrelation collection count is small' do
let(:notes) { build_list(:note, 2, project: project, importing: true) }
let(:relation_object) { build(:issue, project: project, notes: notes) }
let(:note) { build(:note, project: project, importing: true) }
let(:relation_object) { build(:issue, project: project, notes: [note]) }
let(:relation_definition) { { 'notes' => {} } }
it 'saves subrelation as part of the relation object itself' do
@ -68,7 +68,7 @@ RSpec.describe Gitlab::ImportExport::Base::RelationObjectSaver do
saver.execute
issue = project.issues.last
expect(issue.notes.count).to eq(2)
expect(issue.notes.count).to eq(1)
end
end

View File

@ -254,6 +254,11 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
end
end
it 'has multiple merge request assignees' do
expect(MergeRequest.find_by(title: 'MR1').assignees).to contain_exactly(@user, *@existing_members)
expect(MergeRequest.find_by(title: 'MR2').assignees).to be_empty
end
it 'has labels associated to label links, associated to issues' do
expect(Label.first.label_links.first.target).not_to be_nil
end

View File

@ -103,6 +103,13 @@ RSpec.describe Gitlab::ImportExport::Project::TreeSaver do
it 'has merge request resource label events' do
expect(subject.first['resource_label_events']).not_to be_empty
end
it 'has merge request assignees' do
reviewer = subject.first['merge_request_assignees'].first
expect(reviewer).not_to be_nil
expect(reviewer['user_id']).to eq(user.id)
end
end
context 'with snippets' do
@ -468,7 +475,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeSaver do
create(:label_link, label: group_label, target: issue)
create(:label_priority, label: group_label, priority: 1)
milestone = create(:milestone, project: project)
merge_request = create(:merge_request, source_project: project, milestone: milestone)
merge_request = create(:merge_request, source_project: project, milestone: milestone, assignees: [user])
ci_build = create(:ci_build, project: project, when: nil)
ci_build.pipeline.update!(project: project)

View File

@ -741,6 +741,8 @@ MergeRequestAssignee:
- id
- user_id
- merge_request_id
- created_at
- state
ProjectMetricsSetting:
- project_id
- external_dashboard_url

View File

@ -0,0 +1,29 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe BackfillProjectImportLevel do
let_it_be(:batched_migration) { described_class::MIGRATION }
describe '#up' do
it 'schedules background jobs for each batch of namespaces' do
migrate!
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :namespaces,
column_name: :id,
interval: described_class::INTERVAL
)
end
end
describe '#down' do
it 'deletes all batched migration records' do
migrate!
schema_migrate_down!
expect(batched_migration).not_to have_scheduled_batched_migration
end
end
end

View File

@ -25,6 +25,8 @@ RSpec.describe Ci::Bridge do
expect(bridge).to have_many(:sourced_pipelines)
end
it_behaves_like 'has ID tokens', :ci_bridge
it 'has one downstream pipeline' do
expect(bridge).to have_one(:sourced_pipeline)
expect(bridge).to have_one(:downstream_pipeline)

View File

@ -105,6 +105,13 @@ RSpec.describe Ci::BuildMetadata do
}
}
}
metadata.id_tokens = {
TEST_JWT_TOKEN: {
id_token: {
aud: 'https://gitlab.test'
}
}
}
expect(metadata).to be_valid
end
@ -113,10 +120,14 @@ RSpec.describe Ci::BuildMetadata do
context 'when data is invalid' do
it 'returns errors' do
metadata.secrets = { DATABASE_PASSWORD: { vault: {} } }
metadata.id_tokens = { TEST_JWT_TOKEN: { id_token: { aud: nil } } }
aggregate_failures do
expect(metadata).to be_invalid
expect(metadata.errors.full_messages).to eq(["Secrets must be a valid json schema"])
expect(metadata.errors.full_messages).to contain_exactly(
'Secrets must be a valid json schema',
'Id tokens must be a valid json schema'
)
end
end
end

View File

@ -82,6 +82,8 @@ RSpec.describe Ci::Build do
end
end
it_behaves_like 'has ID tokens', :ci_build
describe '.manual_actions' do
let!(:manual_but_created) { create(:ci_build, :manual, status: :created, pipeline: pipeline) }
let!(:manual_but_succeeded) { create(:ci_build, :manual, status: :success, pipeline: pipeline) }

View File

@ -46,8 +46,30 @@ RSpec.describe Ci::Artifactable do
end
end
context 'when file format is zip' do
context 'when artifact contains one file' do
let(:artifact) { build(:ci_job_artifact, :zip_with_single_file) }
it 'iterates blob once' do
expect { |b| artifact.each_blob(&b) }.to yield_control.once
end
end
context 'when artifact contains two files' do
let(:artifact) { build(:ci_job_artifact, :zip_with_multiple_files) }
it 'iterates blob two times' do
expect { |b| artifact.each_blob(&b) }.to yield_control.exactly(2).times
end
end
end
context 'when there are no adapters for the file format' do
let(:artifact) { build(:ci_job_artifact, :junit, file_format: :zip) }
let(:artifact) { build(:ci_job_artifact, :junit) }
before do
allow(artifact).to receive(:file_format).and_return(:unknown)
end
it 'raises an error' do
expect { |b| artifact.each_blob(&b) }.to raise_error(described_class::NotSupportedAdapterError)

View File

@ -1512,6 +1512,45 @@ RSpec.describe API::MergeRequests do
end
end
describe 'GET /projects/:id/merge_requests/:merge_request_iid/reviewers' do
it 'returns reviewers' do
reviewer = create(:user)
merge_request.merge_request_reviewers.create!(reviewer: reviewer)
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/reviewers", user)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(merge_request.merge_request_reviewers.size)
expect(json_response.last['user']['id']).to eq(reviewer.id)
expect(json_response.last['user']['name']).to eq(reviewer.name)
expect(json_response.last['user']['username']).to eq(reviewer.username)
expect(json_response.last['state']).to eq('unreviewed')
expect(json_response.last['updated_state_by']).to be_nil
expect(json_response.last['created_at']).to be_present
end
it 'returns a 404 when iid does not exist' do
get api("/projects/#{project.id}/merge_requests/#{non_existing_record_iid}/reviewers", user)
expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns a 404 when id is used instead of iid' do
get api("/projects/#{project.id}/merge_requests/#{merge_request.id}/reviewers", user)
expect(response).to have_gitlab_http_status(:not_found)
end
context 'when merge request author has only guest access' do
it_behaves_like 'rejects user from accessing merge request info' do
let(:url) { "/projects/#{project.id}/merge_requests/#{merge_request.iid}/reviewers" }
end
end
end
describe 'GET /projects/:id/merge_requests/:merge_request_iid/commits' do
include_context 'with merge requests'

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
require 'fast_spec_helper'
require_relative '../../../../rubocop/cop/gitlab/deprecate_track_redis_hll_event'
RSpec.describe RuboCop::Cop::Gitlab::DeprecateTrackRedisHLLEvent do
subject(:cop) { described_class.new }
it 'does not flag the use of track_event' do
expect_no_offenses('track_event :show, name: "p_analytics_insights"')
end
it 'flags the use of track_redis_hll_event' do
expect_offense(<<~SOURCE)
track_redis_hll_event :show, name: 'p_analytics_valuestream'
^^^^^^^^^^^^^^^^^^^^^ `track_redis_hll_event` is deprecated[...]
SOURCE
end
end

View File

@ -119,10 +119,10 @@ RSpec.describe Projects::Alerting::NotifyService do
end
context 'with overlong payload' do
let(:deep_size_object) { instance_double(Gitlab::Utils::DeepSize, valid?: false) }
let(:payload_raw) { { 'the-payload-is-too-big' => true } }
before do
allow(Gitlab::Utils::DeepSize).to receive(:new).and_return(deep_size_object)
stub_const('::Gitlab::Utils::DeepSize::DEFAULT_MAX_DEPTH', 0)
end
it_behaves_like 'alerts service responds with an error and takes no actions', :bad_request

View File

@ -313,11 +313,11 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
end
context 'when the payload is too big' do
let(:payload) { { 'the-payload-is-too-big' => true } }
let(:deep_size_object) { instance_double(Gitlab::Utils::DeepSize, valid?: false) }
let(:payload_raw) { { 'the-payload-is-too-big' => true } }
let(:payload) { ActionController::Parameters.new(payload_raw).permit! }
before do
allow(Gitlab::Utils::DeepSize).to receive(:new).and_return(deep_size_object)
stub_const('::Gitlab::Utils::DeepSize::DEFAULT_MAX_DEPTH', 0)
end
it_behaves_like 'alerts service responds with an error and takes no actions', :bad_request

View File

@ -0,0 +1,44 @@
# frozen_string_literal: true
RSpec.shared_examples_for 'has ID tokens' do |ci_type|
subject(:ci) { FactoryBot.build(ci_type) }
describe 'delegations' do
it { is_expected.to delegate_method(:id_tokens).to(:metadata).allow_nil }
end
describe '#id_tokens?' do
subject { ci.id_tokens? }
context 'without metadata' do
let(:ci) { FactoryBot.build(ci_type) }
it { is_expected.to be_falsy }
end
context 'with metadata' do
let(:ci) { FactoryBot.build(ci_type, metadata: FactoryBot.build(:ci_build_metadata, id_tokens: id_tokens)) }
context 'when ID tokens exist' do
let(:id_tokens) { { TEST_JOB_JWT: { id_token: { aud: 'developers ' } } } }
it { is_expected.to be_truthy }
end
context 'when ID tokens do not exist' do
let(:id_tokens) { {} }
it { is_expected.to be_falsy }
end
end
end
describe '#id_tokens=' do
it 'assigns the ID tokens to the CI job' do
id_tokens = [{ 'JOB_ID_TOKEN' => { 'id_token' => { 'aud' => 'https://gitlab.test ' } } }]
ci.id_tokens = id_tokens
expect(ci.id_tokens).to match_array(id_tokens)
end
end
end

View File

@ -92,6 +92,18 @@ RSpec.describe Pages::InvalidateDomainCacheWorker do
{ type: :namespace, id: 5 }
]
it_behaves_like 'clears caches with',
event_class: Groups::GroupTransferedEvent,
event_data: {
group_id: 1,
old_root_namespace_id: 3,
new_root_namespace_id: 5
},
caches: [
{ type: :namespace, id: 3 },
{ type: :namespace, id: 5 }
]
context 'when namespace based cache keys are duplicated' do
# de-dups namespace cache keys
it_behaves_like 'clears caches with',