Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-07-07 09:10:00 +00:00
parent 28b137b094
commit f4a9d976cf
38 changed files with 678 additions and 301 deletions

View File

@ -1 +1 @@
9deccce765e2437e87563378f878b8604fc73a9a
9ff756d21305e63a256ba74c6f75b5c867d9fc22

View File

@ -57,10 +57,7 @@ export default {
id: this.namespaceId,
fullPath: this.namespaceFullPath,
}
: {
id: undefined,
fullPath: s__('ProjectsNew|Pick a group or namespace'),
},
: this.$options.emptyNameSpace,
shouldSkipQuery: true,
userNamespaceId: this.userNamespaceId,
};
@ -120,12 +117,18 @@ export default {
this.setNamespace({ id, fullPath });
},
setNamespace({ id, fullPath }) {
this.selectedNamespace = {
id: getIdFromGraphQLId(id),
fullPath,
};
this.selectedNamespace = id
? {
id: getIdFromGraphQLId(id),
fullPath,
}
: this.$options.emptyNameSpace;
},
},
emptyNameSpace: {
id: undefined,
fullPath: s__('ProjectsNew|Pick a group or namespace'),
},
};
</script>

View File

@ -342,6 +342,7 @@ const bindEvents = () => {
export default {
bindEvents,
validateGroupNamespaceDropdown,
deriveProjectPathFromUrl,
onProjectNameChange,
onProjectPathChange,

View File

@ -45,7 +45,7 @@ export default {
},
},
data() {
// filtered_search_bar_root.vue may mutate the inital
// filtered_search_bar_root.vue may mutate the initial
// filters. Use `cloneDeep` to prevent those mutations
// from affecting this component
const { filters, sort } = cloneDeep(this.value);
@ -54,6 +54,14 @@ export default {
initialSortBy: sort,
};
},
computed: {
validTokens() {
// Some filters are only available in EE
// EE-only tokens are represented by `null` or `undefined`
// values when in CE
return this.tokens.filter(Boolean);
},
},
methods: {
onFilter(filters) {
// Apply new filters, from page 1
@ -83,7 +91,7 @@ export default {
recent-searches-storage-key="runners-search"
:sort-options="$options.sortOptions"
:initial-filter-value="initialFilterValue"
:tokens="tokens"
:tokens="validTokens"
:initial-sort-by="initialSortBy"
:search-input-placeholder="__('Search or filter results...')"
data-testid="runners-filtered-search"

View File

@ -42,4 +42,41 @@ module VulnerabilityFindingHelpers
)
end
end
def build_vulnerability_finding(security_finding)
report_finding = report_finding_for(security_finding)
return Vulnerabilities::Finding.new unless report_finding
finding_data = report_finding.to_hash.except(:compare_key, :identifiers, :location, :scanner, :links, :signatures,
:flags, :evidence)
identifiers = report_finding.identifiers.map do |identifier|
Vulnerabilities::Identifier.new(identifier.to_hash)
end
signatures = report_finding.signatures.map do |signature|
Vulnerabilities::FindingSignature.new(signature.to_hash)
end
evidence = Vulnerabilities::Finding::Evidence.new(data: report_finding.evidence.data) if report_finding.evidence
Vulnerabilities::Finding.new(finding_data).tap do |finding|
finding.location_fingerprint = report_finding.location.fingerprint
finding.vulnerability = vulnerability_for(security_finding.uuid)
finding.project = project
finding.sha = pipeline.sha
finding.scanner = security_finding.scanner
finding.finding_evidence = evidence
if calculate_false_positive?
finding.vulnerability_flags = report_finding.flags.map do |flag|
Vulnerabilities::Flag.new(flag)
end
end
finding.identifiers = identifiers
finding.signatures = signatures
end
end
def calculate_false_positive?
project.licensed_feature_available?(:sast_fp_reduction)
end
end

View File

@ -46,7 +46,6 @@ class Issue < ApplicationRecord
TYPES_FOR_LIST = %w(issue incident).freeze
belongs_to :project
has_one :namespace, through: :project
belongs_to :duplicated_to, class_name: 'Issue'
belongs_to :closed_by, class_name: 'User'

View File

@ -74,6 +74,7 @@ class Namespace < ApplicationRecord
has_many :sync_events, class_name: 'Namespaces::SyncEvent'
has_one :cluster_enabled_grant, inverse_of: :namespace, class_name: 'Clusters::ClusterEnabledGrant'
has_many :work_items, inverse_of: :namespace, class_name: 'WorkItem'
validates :owner, presence: true, if: ->(n) { n.owner_required? }
validates :name,

View File

@ -4,6 +4,7 @@ class WorkItem < Issue
self.table_name = 'issues'
self.inheritance_column = :_type_disabled
belongs_to :namespace, class_name: 'Namespace', foreign_key: :namespace_id, inverse_of: :work_items
has_one :parent_link, class_name: '::WorkItems::ParentLink', foreign_key: :work_item_id
has_one :work_item_parent, through: :parent_link, class_name: 'WorkItem'

View File

@ -47,7 +47,7 @@ module Issues
issue.run_after_commit do
NewIssueWorker.perform_async(issue.id, user.id)
Issues::PlacementWorker.perform_async(nil, issue.project_id)
Namespaces::OnboardingIssueCreatedWorker.perform_async(issue.namespace.id)
Namespaces::OnboardingIssueCreatedWorker.perform_async(issue.project.namespace_id)
end
end

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
class AddNamespaceIdColumnToIssuesTable < Gitlab::Database::Migration[2.0]
enable_lock_retries!
def up
add_column :issues, :namespace_id, :bigint
end
def down
remove_column :issues, :namespace_id
end
end

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
class AddTempIndexForContainerRegistrySizeMigration < Gitlab::Database::Migration[2.0]
INDEX_CONTAINER_REGISTRY_SIZE = 'tmp_index_migrated_container_registries'
INDEX_PROJECT_STATS_CONT_REG_SIZE = 'tmp_index_project_statistics_cont_registry_size'
disable_ddl_transaction!
def up
# Temporary index used in 20220622080547_backfill_project_statistics_with_container_registry_size
# Temporary index to be remove via https://gitlab.com/gitlab-org/gitlab/-/issues/366392
add_concurrent_index :container_repositories, [:project_id], name: INDEX_CONTAINER_REGISTRY_SIZE,
where: "migration_state = 'import_done' OR created_at >= '2022-01-23'"
add_concurrent_index :project_statistics, [:project_id], name: INDEX_PROJECT_STATS_CONT_REG_SIZE,
where: "container_registry_size = 0"
end
def down
remove_concurrent_index_by_name :container_repositories, INDEX_CONTAINER_REGISTRY_SIZE
remove_concurrent_index_by_name :project_statistics, INDEX_PROJECT_STATS_CONT_REG_SIZE
end
end

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
class BackfillProjectStatisticsWithContainerRegistrySize < Gitlab::Database::Migration[2.0]
restrict_gitlab_migration gitlab_schema: :gitlab_main
DELAY_INTERVAL = 2.minutes.to_i
BATCH_SIZE = 500
MIGRATION_CLASS = 'BackfillProjectStatisticsContainerRepositorySize'
BATCH_CLASS_NAME = 'BackfillProjectStatisticsWithContainerRegistrySizeBatchingStrategy'
SUB_BATCH_SIZE = 100
disable_ddl_transaction!
def up
return unless Gitlab.dev_or_test_env? || Gitlab.com?
queue_batched_background_migration(
MIGRATION_CLASS,
:container_repositories,
:project_id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
batch_class_name: BATCH_CLASS_NAME,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
return unless Gitlab.dev_or_test_env? || Gitlab.com?
delete_batched_background_migration(MIGRATION_CLASS, :container_repositories, :project_id, [])
end
end

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
class AddIssuesNamespaceIdFkAndIndex < Gitlab::Database::Migration[2.0]
disable_ddl_transaction!
INDEX_NAME = 'index_issues_on_namespace_id'
def up
add_concurrent_index :issues, :namespace_id, name: INDEX_NAME
add_concurrent_foreign_key :issues, :namespaces,
column: :namespace_id,
on_delete: :nullify,
reverse_lock_order: true
end
def down
with_lock_retries do
remove_foreign_key_if_exists :issues, column: :namespace_id
end
remove_concurrent_index_by_name :issues, INDEX_NAME
end
end

View File

@ -0,0 +1 @@
e259a91d467b3ec3e09c4514de0e798cffa697a8bc492edd6ad0dcab7f9a9623

View File

@ -0,0 +1 @@
366f0819ce42bc84fc88871872d4b5870e63894fa2e32fbd7808cce2afe4815b

View File

@ -0,0 +1 @@
f95de3ed746d6f661358a3826587da37009f20ba3cd0e8a332e57f9276fb856c

View File

@ -0,0 +1 @@
50d788ced675b3773bbb84122040c775c24c0993c95542f5130a6456fcd4ee69

View File

@ -16575,6 +16575,7 @@ CREATE TABLE issues (
blocking_issues_count integer DEFAULT 0 NOT NULL,
upvotes_count integer DEFAULT 0 NOT NULL,
work_item_type_id bigint,
namespace_id bigint,
CONSTRAINT check_fba63f706d CHECK ((lock_version IS NOT NULL))
);
@ -28513,6 +28514,8 @@ CREATE INDEX index_issues_on_milestone_id ON issues USING btree (milestone_id);
CREATE INDEX index_issues_on_moved_to_id ON issues USING btree (moved_to_id) WHERE (moved_to_id IS NOT NULL);
CREATE INDEX index_issues_on_namespace_id ON issues USING btree (namespace_id);
CREATE INDEX index_issues_on_project_id_and_created_at_issue_type_incident ON issues USING btree (project_id, created_at) WHERE (issue_type = 1);
CREATE UNIQUE INDEX index_issues_on_project_id_and_external_key ON issues USING btree (project_id, external_key) WHERE (external_key IS NOT NULL);
@ -30247,10 +30250,14 @@ CREATE INDEX tmp_index_members_on_state ON members USING btree (state) WHERE (st
CREATE INDEX tmp_index_merge_requests_draft_and_status ON merge_requests USING btree (id) WHERE ((draft = false) AND (state_id = 1) AND ((title)::text ~* '^(\[draft\]|\(draft\)|draft:|draft|\[WIP\]|WIP:|WIP)'::text));
CREATE INDEX tmp_index_migrated_container_registries ON container_repositories USING btree (project_id) WHERE ((migration_state = 'import_done'::text) OR (created_at >= '2022-01-23 00:00:00'::timestamp without time zone));
CREATE UNIQUE INDEX tmp_index_on_tmp_project_id_on_namespaces ON namespaces USING btree (tmp_project_id);
CREATE INDEX tmp_index_on_vulnerabilities_non_dismissed ON vulnerabilities USING btree (id) WHERE (state <> 2);
CREATE INDEX tmp_index_project_statistics_cont_registry_size ON project_statistics USING btree (project_id) WHERE (container_registry_size = 0);
CREATE UNIQUE INDEX uniq_pkgs_deb_grp_architectures_on_distribution_id_and_name ON packages_debian_group_architectures USING btree (distribution_id, name);
CREATE UNIQUE INDEX uniq_pkgs_deb_grp_components_on_distribution_id_and_name ON packages_debian_group_components USING btree (distribution_id, name);
@ -31933,6 +31940,9 @@ ALTER TABLE ONLY projects
ALTER TABLE ONLY dast_profile_schedules
ADD CONSTRAINT fk_6cca0d8800 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY issues
ADD CONSTRAINT fk_6e10d4d38a FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE SET NULL;
ALTER TABLE ONLY projects
ADD CONSTRAINT fk_6e5c14658a FOREIGN KEY (pool_repository_id) REFERENCES pool_repositories(id) ON DELETE SET NULL;

View File

@ -61,14 +61,6 @@ You can work with sample queries that pull data from public projects on GitLab.c
The [get started](getting_started.md) page includes different methods to customize GraphQL queries.
### Update the GraphQL API reference
If you change the GraphQL schema, create a merge request to get your changes approved.
To generate the required documentation and schema, see
[Rake tasks for developers](../../development/rake_tasks.md#update-graphql-documentation-and-schema-definitions).
Run the commands using the [GitLab Development Kit](https://gitlab.com/gitlab-org/gitlab-development-kit/).
## Breaking changes
The GitLab GraphQL API is [versionless](https://graphql.org/learn/best-practices/#versioning) and changes to the API are primarily backward-compatible.

View File

@ -169,7 +169,7 @@ If [Group SAML](index.md) has been configured and you have an existing GitLab.co
We recommend users do this prior to turning on sync, because while synchronization is active, there may be provisioning errors for existing users.
New users and existing users on subsequent visits can access the group through the identify provider's dashboard or by visiting links directly.
New users and existing users on subsequent visits can access the group through the identity provider's dashboard or by visiting links directly.
[In GitLab 14.0 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/325712), GitLab users created by [SAML SSO](index.md#user-access-and-management) or SCIM provisioning display with an **Enterprise** badge in the **Members** view.
@ -257,7 +257,7 @@ Changing the SAML or SCIM configuration or provider can cause the following prob
| Problem | Solution |
| ------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| SAML and SCIM identity mismatch. | First [verify that the user's SAML NameId matches the SCIM externalId](#how-do-i-verify-users-saml-nameid-matches-the-scim-externalid) and then [update or fix the mismatched SCIM externalId and SAML NameId](#update-or-fix-mismatched-scim-externalid-and-saml-nameid). |
| SCIM identity mismatch between GitLab and the Identify Provider SCIM app. | You can confirm whether you're hitting the error because of your SCIM identity mismatch between your SCIM app and GitLab.com by using [SCIM API](../../../api/scim.md#update-a-single-scim-provisioned-user) which shows up in the `id` key and compares it with the user `externalId` in the SCIM app. You can use the same [SCIM API](../../../api/scim.md#update-a-single-scim-provisioned-user) to update the SCIM `id` for the user on GitLab.com. |
| SCIM identity mismatch between GitLab and the identity provider SCIM app. | You can confirm whether you're hitting the error because of your SCIM identity mismatch between your SCIM app and GitLab.com by using [SCIM API](../../../api/scim.md#update-a-single-scim-provisioned-user) which shows up in the `id` key and compares it with the user `externalId` in the SCIM app. You can use the same [SCIM API](../../../api/scim.md#update-a-single-scim-provisioned-user) to update the SCIM `id` for the user on GitLab.com. |
### Azure

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Back-fill container_registry_size for project_statistics
class BackfillProjectStatisticsContainerRepositorySize < Gitlab::BackgroundMigration::BatchedMigrationJob
def perform
# no-op
end
end
end
end
Gitlab::BackgroundMigration::BackfillProjectStatisticsContainerRepositorySize.prepend_mod_with('Gitlab::BackgroundMigration::BackfillProjectStatisticsContainerRepositorySize') # rubocop:disable Layout/LineLength

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
module BatchingStrategies
# Batching class to use for back-filling project_statistic's container_registry_size.
# Batches will be scoped to records where the project_ids are migrated
#
# If no more batches exist in the table, returns nil.
class BackfillProjectStatisticsWithContainerRegistrySizeBatchingStrategy < PrimaryKeyBatchingStrategy
MIGRATION_PHASE_1_ENDED_AT = Date.new(2022, 01, 23).freeze
def apply_additional_filters(relation, job_arguments: [], job_class: nil)
relation.where(created_at: MIGRATION_PHASE_1_ENDED_AT..).or(
relation.where(migration_state: 'import_done')
).select(:project_id).distinct
end
end
end
end
end

View File

@ -163,7 +163,7 @@ module QA
ssh
end
env_vars << %(GIT_SSH_COMMAND="ssh -i #{ssh.private_key_file.path} -o UserKnownHostsFile=#{ssh.known_hosts_file.path}")
env_vars << %(GIT_SSH_COMMAND="ssh -i #{ssh.private_key_file.path} -o UserKnownHostsFile=#{ssh.known_hosts_file.path} -o IdentitiesOnly=yes")
end
def delete_ssh_key

View File

@ -89,6 +89,16 @@ describe('RunnerList', () => {
]);
});
it('can be configured with null or undefined tokens, which are ignored', () => {
createComponent({
props: {
tokens: [statusTokenConfig, null, undefined],
},
});
expect(findFilteredSearch().props('tokens')).toEqual([statusTokenConfig]);
});
it('fails validation for v-model with the wrong shape', () => {
expect(() => {
createComponent({ props: { value: { filters: 'wrong_filters', sort: 'sort' } } });

View File

@ -17,7 +17,209 @@ import groupRunnersData from 'test_fixtures/graphql/runner/list/group_runners.qu
import groupRunnersDataPaginated from 'test_fixtures/graphql/runner/list/group_runners.query.graphql.paginated.json';
import groupRunnersCountData from 'test_fixtures/graphql/runner/list/group_runners_count.query.graphql.json';
import { RUNNER_PAGE_SIZE } from '~/runner/constants';
// Other mock data
// Mock searches and their corresponding urls
export const mockSearchExamples = [
{
name: 'a default query',
urlQuery: '',
search: { runnerType: null, filters: [], pagination: { page: 1 }, sort: 'CREATED_DESC' },
graphqlVariables: { sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
isDefault: true,
},
{
name: 'a single status',
urlQuery: '?status[]=ACTIVE',
search: {
runnerType: null,
filters: [{ type: 'status', value: { data: 'ACTIVE', operator: '=' } }],
pagination: { page: 1 },
sort: 'CREATED_DESC',
},
graphqlVariables: { status: 'ACTIVE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
},
{
name: 'a single term text search',
urlQuery: '?search=something',
search: {
runnerType: null,
filters: [
{
type: 'filtered-search-term',
value: { data: 'something' },
},
],
pagination: { page: 1 },
sort: 'CREATED_DESC',
},
graphqlVariables: { search: 'something', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
},
{
name: 'a two terms text search',
urlQuery: '?search=something+else',
search: {
runnerType: null,
filters: [
{
type: 'filtered-search-term',
value: { data: 'something' },
},
{
type: 'filtered-search-term',
value: { data: 'else' },
},
],
pagination: { page: 1 },
sort: 'CREATED_DESC',
},
graphqlVariables: { search: 'something else', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
},
{
name: 'single instance type',
urlQuery: '?runner_type[]=INSTANCE_TYPE',
search: {
runnerType: 'INSTANCE_TYPE',
filters: [],
pagination: { page: 1 },
sort: 'CREATED_DESC',
},
graphqlVariables: { type: 'INSTANCE_TYPE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
},
{
name: 'multiple runner status',
urlQuery: '?status[]=ACTIVE&status[]=PAUSED',
search: {
runnerType: null,
filters: [
{ type: 'status', value: { data: 'ACTIVE', operator: '=' } },
{ type: 'status', value: { data: 'PAUSED', operator: '=' } },
],
pagination: { page: 1 },
sort: 'CREATED_DESC',
},
graphqlVariables: { status: 'ACTIVE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
},
{
name: 'multiple status, a single instance type and a non default sort',
urlQuery: '?status[]=ACTIVE&runner_type[]=INSTANCE_TYPE&sort=CREATED_ASC',
search: {
runnerType: 'INSTANCE_TYPE',
filters: [{ type: 'status', value: { data: 'ACTIVE', operator: '=' } }],
pagination: { page: 1 },
sort: 'CREATED_ASC',
},
graphqlVariables: {
status: 'ACTIVE',
type: 'INSTANCE_TYPE',
sort: 'CREATED_ASC',
first: RUNNER_PAGE_SIZE,
},
},
{
name: 'a tag',
urlQuery: '?tag[]=tag-1',
search: {
runnerType: null,
filters: [{ type: 'tag', value: { data: 'tag-1', operator: '=' } }],
pagination: { page: 1 },
sort: 'CREATED_DESC',
},
graphqlVariables: {
tagList: ['tag-1'],
first: 20,
sort: 'CREATED_DESC',
},
},
{
name: 'two tags',
urlQuery: '?tag[]=tag-1&tag[]=tag-2',
search: {
runnerType: null,
filters: [
{ type: 'tag', value: { data: 'tag-1', operator: '=' } },
{ type: 'tag', value: { data: 'tag-2', operator: '=' } },
],
pagination: { page: 1 },
sort: 'CREATED_DESC',
},
graphqlVariables: {
tagList: ['tag-1', 'tag-2'],
first: 20,
sort: 'CREATED_DESC',
},
},
{
name: 'the next page',
urlQuery: '?page=2&after=AFTER_CURSOR',
search: {
runnerType: null,
filters: [],
pagination: { page: 2, after: 'AFTER_CURSOR' },
sort: 'CREATED_DESC',
},
graphqlVariables: { sort: 'CREATED_DESC', after: 'AFTER_CURSOR', first: RUNNER_PAGE_SIZE },
},
{
name: 'the previous page',
urlQuery: '?page=2&before=BEFORE_CURSOR',
search: {
runnerType: null,
filters: [],
pagination: { page: 2, before: 'BEFORE_CURSOR' },
sort: 'CREATED_DESC',
},
graphqlVariables: { sort: 'CREATED_DESC', before: 'BEFORE_CURSOR', last: RUNNER_PAGE_SIZE },
},
{
name: 'the next page filtered by a status, an instance type, tags and a non default sort',
urlQuery:
'?status[]=ACTIVE&runner_type[]=INSTANCE_TYPE&tag[]=tag-1&tag[]=tag-2&sort=CREATED_ASC&page=2&after=AFTER_CURSOR',
search: {
runnerType: 'INSTANCE_TYPE',
filters: [
{ type: 'status', value: { data: 'ACTIVE', operator: '=' } },
{ type: 'tag', value: { data: 'tag-1', operator: '=' } },
{ type: 'tag', value: { data: 'tag-2', operator: '=' } },
],
pagination: { page: 2, after: 'AFTER_CURSOR' },
sort: 'CREATED_ASC',
},
graphqlVariables: {
status: 'ACTIVE',
type: 'INSTANCE_TYPE',
tagList: ['tag-1', 'tag-2'],
sort: 'CREATED_ASC',
after: 'AFTER_CURSOR',
first: RUNNER_PAGE_SIZE,
},
},
{
name: 'paused runners',
urlQuery: '?paused[]=true',
search: {
runnerType: null,
filters: [{ type: 'paused', value: { data: 'true', operator: '=' } }],
pagination: { page: 1 },
sort: 'CREATED_DESC',
},
graphqlVariables: { paused: true, sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
},
{
name: 'active runners',
urlQuery: '?paused[]=false',
search: {
runnerType: null,
filters: [{ type: 'paused', value: { data: 'false', operator: '=' } }],
pagination: { page: 1 },
sort: 'CREATED_DESC',
},
graphqlVariables: { paused: false, sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
},
];
export const onlineContactTimeoutSecs = 2 * 60 * 60;
export const staleTimeoutSecs = 7889238; // Ruby's `3.months`

View File

@ -1,4 +1,3 @@
import { RUNNER_PAGE_SIZE } from '~/runner/constants';
import {
searchValidator,
updateOutdatedUrl,
@ -7,208 +6,11 @@ import {
fromSearchToVariables,
isSearchFiltered,
} from '~/runner/runner_search_utils';
import { mockSearchExamples } from './mock_data';
describe('search_params.js', () => {
const examples = [
{
name: 'a default query',
urlQuery: '',
search: { runnerType: null, filters: [], pagination: { page: 1 }, sort: 'CREATED_DESC' },
graphqlVariables: { sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
isDefault: true,
},
{
name: 'a single status',
urlQuery: '?status[]=ACTIVE',
search: {
runnerType: null,
filters: [{ type: 'status', value: { data: 'ACTIVE', operator: '=' } }],
pagination: { page: 1 },
sort: 'CREATED_DESC',
},
graphqlVariables: { status: 'ACTIVE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
},
{
name: 'a single term text search',
urlQuery: '?search=something',
search: {
runnerType: null,
filters: [
{
type: 'filtered-search-term',
value: { data: 'something' },
},
],
pagination: { page: 1 },
sort: 'CREATED_DESC',
},
graphqlVariables: { search: 'something', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
},
{
name: 'a two terms text search',
urlQuery: '?search=something+else',
search: {
runnerType: null,
filters: [
{
type: 'filtered-search-term',
value: { data: 'something' },
},
{
type: 'filtered-search-term',
value: { data: 'else' },
},
],
pagination: { page: 1 },
sort: 'CREATED_DESC',
},
graphqlVariables: { search: 'something else', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
},
{
name: 'single instance type',
urlQuery: '?runner_type[]=INSTANCE_TYPE',
search: {
runnerType: 'INSTANCE_TYPE',
filters: [],
pagination: { page: 1 },
sort: 'CREATED_DESC',
},
graphqlVariables: { type: 'INSTANCE_TYPE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
},
{
name: 'multiple runner status',
urlQuery: '?status[]=ACTIVE&status[]=PAUSED',
search: {
runnerType: null,
filters: [
{ type: 'status', value: { data: 'ACTIVE', operator: '=' } },
{ type: 'status', value: { data: 'PAUSED', operator: '=' } },
],
pagination: { page: 1 },
sort: 'CREATED_DESC',
},
graphqlVariables: { status: 'ACTIVE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
},
{
name: 'multiple status, a single instance type and a non default sort',
urlQuery: '?status[]=ACTIVE&runner_type[]=INSTANCE_TYPE&sort=CREATED_ASC',
search: {
runnerType: 'INSTANCE_TYPE',
filters: [{ type: 'status', value: { data: 'ACTIVE', operator: '=' } }],
pagination: { page: 1 },
sort: 'CREATED_ASC',
},
graphqlVariables: {
status: 'ACTIVE',
type: 'INSTANCE_TYPE',
sort: 'CREATED_ASC',
first: RUNNER_PAGE_SIZE,
},
},
{
name: 'a tag',
urlQuery: '?tag[]=tag-1',
search: {
runnerType: null,
filters: [{ type: 'tag', value: { data: 'tag-1', operator: '=' } }],
pagination: { page: 1 },
sort: 'CREATED_DESC',
},
graphqlVariables: {
tagList: ['tag-1'],
first: 20,
sort: 'CREATED_DESC',
},
},
{
name: 'two tags',
urlQuery: '?tag[]=tag-1&tag[]=tag-2',
search: {
runnerType: null,
filters: [
{ type: 'tag', value: { data: 'tag-1', operator: '=' } },
{ type: 'tag', value: { data: 'tag-2', operator: '=' } },
],
pagination: { page: 1 },
sort: 'CREATED_DESC',
},
graphqlVariables: {
tagList: ['tag-1', 'tag-2'],
first: 20,
sort: 'CREATED_DESC',
},
},
{
name: 'the next page',
urlQuery: '?page=2&after=AFTER_CURSOR',
search: {
runnerType: null,
filters: [],
pagination: { page: 2, after: 'AFTER_CURSOR' },
sort: 'CREATED_DESC',
},
graphqlVariables: { sort: 'CREATED_DESC', after: 'AFTER_CURSOR', first: RUNNER_PAGE_SIZE },
},
{
name: 'the previous page',
urlQuery: '?page=2&before=BEFORE_CURSOR',
search: {
runnerType: null,
filters: [],
pagination: { page: 2, before: 'BEFORE_CURSOR' },
sort: 'CREATED_DESC',
},
graphqlVariables: { sort: 'CREATED_DESC', before: 'BEFORE_CURSOR', last: RUNNER_PAGE_SIZE },
},
{
name: 'the next page filtered by a status, an instance type, tags and a non default sort',
urlQuery:
'?status[]=ACTIVE&runner_type[]=INSTANCE_TYPE&tag[]=tag-1&tag[]=tag-2&sort=CREATED_ASC&page=2&after=AFTER_CURSOR',
search: {
runnerType: 'INSTANCE_TYPE',
filters: [
{ type: 'status', value: { data: 'ACTIVE', operator: '=' } },
{ type: 'tag', value: { data: 'tag-1', operator: '=' } },
{ type: 'tag', value: { data: 'tag-2', operator: '=' } },
],
pagination: { page: 2, after: 'AFTER_CURSOR' },
sort: 'CREATED_ASC',
},
graphqlVariables: {
status: 'ACTIVE',
type: 'INSTANCE_TYPE',
tagList: ['tag-1', 'tag-2'],
sort: 'CREATED_ASC',
after: 'AFTER_CURSOR',
first: RUNNER_PAGE_SIZE,
},
},
{
name: 'paused runners',
urlQuery: '?paused[]=true',
search: {
runnerType: null,
filters: [{ type: 'paused', value: { data: 'true', operator: '=' } }],
pagination: { page: 1 },
sort: 'CREATED_DESC',
},
graphqlVariables: { paused: true, sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
},
{
name: 'active runners',
urlQuery: '?paused[]=false',
search: {
runnerType: null,
filters: [{ type: 'paused', value: { data: 'false', operator: '=' } }],
pagination: { page: 1 },
sort: 'CREATED_DESC',
},
graphqlVariables: { paused: false, sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
},
];
describe('searchValidator', () => {
examples.forEach(({ name, search }) => {
mockSearchExamples.forEach(({ name, search }) => {
it(`Validates ${name} as a search object`, () => {
expect(searchValidator(search)).toBe(true);
});
@ -235,7 +37,7 @@ describe('search_params.js', () => {
});
describe('fromUrlQueryToSearch', () => {
examples.forEach(({ name, urlQuery, search }) => {
mockSearchExamples.forEach(({ name, urlQuery, search }) => {
it(`Converts ${name} to a search object`, () => {
expect(fromUrlQueryToSearch(urlQuery)).toEqual(search);
});
@ -268,7 +70,7 @@ describe('search_params.js', () => {
});
describe('fromSearchToUrl', () => {
examples.forEach(({ name, urlQuery, search }) => {
mockSearchExamples.forEach(({ name, urlQuery, search }) => {
it(`Converts ${name} to a url`, () => {
expect(fromSearchToUrl(search)).toBe(`http://test.host/${urlQuery}`);
});
@ -295,7 +97,7 @@ describe('search_params.js', () => {
});
describe('fromSearchToVariables', () => {
examples.forEach(({ name, graphqlVariables, search }) => {
mockSearchExamples.forEach(({ name, graphqlVariables, search }) => {
it(`Converts ${name} to a GraphQL query variables object`, () => {
expect(fromSearchToVariables(search)).toEqual(graphqlVariables);
});
@ -335,7 +137,7 @@ describe('search_params.js', () => {
});
describe('isSearchFiltered', () => {
examples.forEach(({ name, search, isDefault }) => {
mockSearchExamples.forEach(({ name, search, isDefault }) => {
it(`Given ${name}, evaluates to ${isDefault ? 'not ' : ''}filtered`, () => {
expect(isSearchFiltered(search)).toBe(!isDefault);
});

View File

@ -0,0 +1,138 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::BackfillProjectStatisticsWithContainerRegistrySizeBatchingStrategy, '#next_batch' do # rubocop:disable Layout/LineLength
let(:batching_strategy) { described_class.new(connection: ActiveRecord::Base.connection) }
let(:namespace) { table(:namespaces) }
let(:project) { table(:projects) }
let(:container_repositories) { table(:container_repositories) }
let!(:group) do
namespace.create!(
name: 'namespace1', type: 'Group', path: 'space1'
)
end
let!(:proj_namespace1) do
namespace.create!(
name: 'proj1', path: 'proj1', type: 'Project', parent_id: group.id
)
end
let!(:proj_namespace2) do
namespace.create!(
name: 'proj2', path: 'proj2', type: 'Project', parent_id: group.id
)
end
let!(:proj_namespace3) do
namespace.create!(
name: 'proj3', path: 'proj3', type: 'Project', parent_id: group.id
)
end
let!(:proj1) do
project.create!(
name: 'proj1', path: 'proj1', namespace_id: group.id, project_namespace_id: proj_namespace1.id
)
end
let!(:proj2) do
project.create!(
name: 'proj2', path: 'proj2', namespace_id: group.id, project_namespace_id: proj_namespace2.id
)
end
let!(:proj3) do
project.create!(
name: 'proj3', path: 'proj3', namespace_id: group.id, project_namespace_id: proj_namespace3.id
)
end
let!(:con1) do
container_repositories.create!(
project_id: proj1.id,
name: "ContReg_#{proj1.id}:1",
migration_state: 'import_done',
created_at: Date.new(2022, 01, 20)
)
end
let!(:con2) do
container_repositories.create!(
project_id: proj1.id,
name: "ContReg_#{proj1.id}:2",
migration_state: 'import_done',
created_at: Date.new(2022, 01, 20)
)
end
let!(:con3) do
container_repositories.create!(
project_id: proj2.id,
name: "ContReg_#{proj2.id}:1",
migration_state: 'import_done',
created_at: Date.new(2022, 01, 20)
)
end
let!(:con4) do
container_repositories.create!(
project_id: proj3.id,
name: "ContReg_#{proj3.id}:1",
migration_state: 'default',
created_at: Date.new(2022, 02, 20)
)
end
let!(:con5) do
container_repositories.create!(
project_id: proj3.id,
name: "ContReg_#{proj3.id}:2",
migration_state: 'default',
created_at: Date.new(2022, 02, 20)
)
end
it { expect(described_class).to be < Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy }
context 'when starting on the first batch' do
it 'returns the bounds of the next batch' do
batch_bounds = batching_strategy.next_batch(
:container_repositories,
:project_id,
batch_min_value: con1.project_id,
batch_size: 3,
job_arguments: []
)
expect(batch_bounds).to eq([con1.project_id, con4.project_id])
end
end
context 'when additional batches remain' do
it 'returns the bounds of the next batch' do
batch_bounds = batching_strategy.next_batch(
:container_repositories,
:project_id,
batch_min_value: con3.project_id,
batch_size: 3,
job_arguments: []
)
expect(batch_bounds).to eq([con3.project_id, con5.project_id])
end
end
context 'when no additional batches remain' do
it 'returns nil' do
batch_bounds = batching_strategy.next_batch(:container_repositories,
:project_id,
batch_min_value: con5.project_id + 1,
batch_size: 1, job_arguments: []
)
expect(batch_bounds).to be_nil
end
end
end

View File

@ -0,0 +1,41 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe BackfillProjectStatisticsWithContainerRegistrySize do
let_it_be(:batched_migration) { described_class::MIGRATION_CLASS }
it 'does not schedule background jobs when Gitlab.com is false' do
allow(Gitlab).to receive(:com?).and_return(false)
allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
end
end
it 'schedules background jobs for each batch of container_repository' do
allow(Gitlab).to receive(:com?).and_return(true)
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :container_repositories,
column_name: :project_id,
interval: described_class::DELAY_INTERVAL
)
}
end
end
end

View File

@ -14,7 +14,6 @@ RSpec.describe Issue do
it { is_expected.to belong_to(:milestone) }
it { is_expected.to belong_to(:iteration) }
it { is_expected.to belong_to(:project) }
it { is_expected.to have_one(:namespace).through(:project) }
it { is_expected.to belong_to(:work_item_type).class_name('WorkItems::Type') }
it { is_expected.to belong_to(:moved_to).class_name('Issue') }
it { is_expected.to have_one(:moved_from).class_name('Issue') }

View File

@ -32,6 +32,7 @@ RSpec.describe Namespace do
it { is_expected.to have_one :namespace_route }
it { is_expected.to have_many :namespace_members }
it { is_expected.to have_one :cluster_enabled_grant }
it { is_expected.to have_many(:work_items) }
it do
is_expected.to have_one(:ci_cd_settings).class_name('NamespaceCiCdSetting').inverse_of(:namespace).autosave(true)

View File

@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe WorkItem do
describe 'associations' do
it { is_expected.to belong_to(:namespace) }
it { is_expected.to have_one(:work_item_parent).class_name('WorkItem') }
it 'has one `parent_link`' do

View File

@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Namespaces::OnboardingIssueCreatedWorker, '#perform' do
let_it_be(:issue) { create(:issue) }
let(:namespace) { issue.namespace }
let(:namespace) { issue.project.namespace }
it_behaves_like 'records an onboarding progress action', :issue_created do
subject { described_class.new.perform(namespace.id) }

View File

@ -306,6 +306,7 @@ func (api *API) PreAuthorizeFixedPath(r *http.Request, method string, path strin
return nil, fmt.Errorf("construct auth request: %w", err)
}
authReq.Header = helper.HeaderClone(r.Header)
authReq.URL.RawQuery = r.URL.RawQuery
failureResponse, apiResponse, err := api.PreAuthorize(path, authReq)
if err != nil {

View File

@ -2,15 +2,15 @@ package api
import (
"fmt"
"io"
"net/http"
"net/http/httptest"
"net/url"
"regexp"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/labkit/log"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/secret"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
@ -73,16 +73,39 @@ func testRailsServer(url *regexp.Regexp, code int, body string) *httptest.Server
w.Header().Set("Content-Type", ResponseContentType)
logEntry := log.WithFields(log.Fields{
"method": r.Method,
"url": r.URL,
})
logEntryWithCode := logEntry.WithField("code", code)
// Write pure string
logEntryWithCode.Info("UPSTREAM")
w.WriteHeader(code)
fmt.Fprint(w, body)
})
}
func TestPreAuthorizeFixedPath(t *testing.T) {
var (
upstreamHeaders http.Header
upstreamQuery url.Values
)
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path != "/my/api/path" {
return
}
upstreamHeaders = r.Header
upstreamQuery = r.URL.Query()
w.Header().Set("Content-Type", ResponseContentType)
io.WriteString(w, `{"TempPath":"HELLO!!"}`)
}))
defer ts.Close()
req, err := http.NewRequest("GET", "/original/request/path?q1=Q1&q2=Q2", nil)
require.NoError(t, err)
req.Header.Set("key1", "value1")
api := NewAPI(helper.URLMustParse(ts.URL), "123", http.DefaultTransport)
resp, err := api.PreAuthorizeFixedPath(req, "POST", "/my/api/path")
require.NoError(t, err)
require.Equal(t, "value1", upstreamHeaders.Get("key1"), "original headers must propagate")
require.Equal(t, url.Values{"q1": []string{"Q1"}, "q2": []string{"Q2"}}, upstreamQuery,
"original query must propagate")
require.Equal(t, "HELLO!!", resp.TempPath, "sanity check: successful API call")
}

View File

@ -19,23 +19,15 @@ func Multipart(rails PreAuthorizer, h http.Handler, p Preparer) http.Handler {
}, "/authorize")
}
// SkipRailsPreAuthMultipart behaves like Multipart except it does not
// pre-authorize with Rails. It is intended for use on catch-all routes
// where we cannot pre-authorize both because we don't know which Rails
// endpoint to call, and because eagerly pre-authorizing would add too
// much overhead.
func SkipRailsPreAuthMultipart(tempPath string, myAPI *api.API, h http.Handler, p Preparer) http.Handler {
// FixedPreAuthMultipart behaves like Multipart except it makes lazy
// preauthorization requests when it encounters a multipart upload. The
// preauthorization requests go to a fixed internal GitLab Rails API
// endpoint. This endpoint currently does not support direct upload, so
// using FixedPreAuthMultipart implies disk buffering.
func FixedPreAuthMultipart(myAPI *api.API, h http.Handler, p Preparer) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
s := &SavedFileTracker{Request: r}
// We use testAuthorizer as a temporary measure. When
// https://gitlab.com/groups/gitlab-com/gl-infra/-/epics/742 is done, we
// should only be using apiAuthorizer.
fa := &testAuthorizer{
test: &apiAuthorizer{myAPI},
actual: &eagerAuthorizer{&api.Response{TempPath: tempPath}},
}
fa := &apiAuthorizer{myAPI}
interceptMultipartFiles(w, r, h, s, fa, p)
})
}

View File

@ -15,8 +15,6 @@ import (
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/log"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/exif"
@ -222,21 +220,3 @@ func (aa *apiAuthorizer) AuthorizeFile(r *http.Request) (*api.Response, error) {
}
var _ fileAuthorizer = &apiAuthorizer{}
type testAuthorizer struct {
test fileAuthorizer
actual fileAuthorizer
}
func (ta *testAuthorizer) AuthorizeFile(r *http.Request) (*api.Response, error) {
logger := log.WithRequest(r)
if response, err := ta.test.AuthorizeFile(r); err != nil {
logger.WithError(err).Error("test api preauthorize request failed")
} else {
logger.WithFields(log.Fields{
"temp_path": response.TempPath,
}).Info("test api preauthorize request")
}
return ta.actual.AuthorizeFile(r)
}

View File

@ -3,7 +3,6 @@ package upstream
import (
"net/http"
"net/url"
"path"
"regexp"
"github.com/gorilla/websocket"
@ -222,8 +221,7 @@ func configureRoutes(u *upstream) {
requestBodyUploader := upload.RequestBody(api, signingProxy, preparer)
mimeMultipartUploader := upload.Multipart(api, signingProxy, preparer)
uploadPath := path.Join(u.DocumentRoot, "uploads/tmp")
tempfileMultipartProxy := upload.SkipRailsPreAuthMultipart(uploadPath, api, proxy, preparer)
tempfileMultipartProxy := upload.FixedPreAuthMultipart(api, proxy, preparer)
ciAPIProxyQueue := queueing.QueueRequests("ci_api_job_requests", tempfileMultipartProxy, u.APILimit, u.APIQueueLimit, u.APIQueueTimeout)
ciAPILongPolling := builds.RegisterHandler(ciAPIProxyQueue, redis.WatchKey, u.APICILongPollingDuration)

View File

@ -287,30 +287,38 @@ func TestBlockingRewrittenFieldsHeader(t *testing.T) {
}
for _, tc := range testCases {
ts := testhelper.TestServerWithHandler(regexp.MustCompile(`.`), func(w http.ResponseWriter, r *http.Request) {
key := upload.RewrittenFieldsHeader
if tc.present && r.URL.Path != "/api/v4/internal/workhorse/authorize_upload" {
require.Contains(t, r.Header, key)
} else {
require.NotContains(t, r.Header, key)
}
t.Run(tc.desc, func(t *testing.T) {
ts := testhelper.TestServerWithHandler(regexp.MustCompile(`.`), func(w http.ResponseWriter, r *http.Request) {
switch r.URL.Path {
case "/api/v4/internal/workhorse/authorize_upload":
w.Header().Set("Content-Type", api.ResponseContentType)
io.WriteString(w, `{"TempPath":"`+os.TempDir()+`"}`)
default:
if tc.present {
require.Contains(t, r.Header, upload.RewrittenFieldsHeader)
} else {
require.NotContains(t, r.Header, upload.RewrittenFieldsHeader)
require.NotEqual(t, canary, r.Header.Get(key), "Found canary %q in header %q", canary, key)
}
}
require.NotEqual(t, canary, r.Header.Get(upload.RewrittenFieldsHeader), "Found canary %q in header", canary)
})
defer ts.Close()
ws := startWorkhorseServer(ts.URL)
defer ws.Close()
req, err := http.NewRequest("POST", ws.URL+"/something", tc.body)
require.NoError(t, err)
req.Header.Set("Content-Type", tc.contentType)
req.Header.Set(upload.RewrittenFieldsHeader, canary)
resp, err := http.DefaultClient.Do(req)
require.NoError(t, err)
defer resp.Body.Close()
require.Equal(t, 200, resp.StatusCode, "status code")
})
defer ts.Close()
ws := startWorkhorseServer(ts.URL)
defer ws.Close()
req, err := http.NewRequest("POST", ws.URL+"/something", tc.body)
require.NoError(t, err)
req.Header.Set("Content-Type", tc.contentType)
req.Header.Set(upload.RewrittenFieldsHeader, canary)
resp, err := http.DefaultClient.Do(req)
require.NoError(t, err)
defer resp.Body.Close()
require.Equal(t, 200, resp.StatusCode, "status code")
}
}