Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-12-07 18:10:32 +00:00
parent 3a0f6ebaa9
commit 551b3bfd7e
45 changed files with 620 additions and 338 deletions

View File

@ -26,6 +26,9 @@
- export CI_ENVIRONMENT_URL="$(cat environment_url.txt)"
- echo "${CI_ENVIRONMENT_URL}"
- cd qa
- if [ -n "$KNAPSACK_REPORT_PATH" ]; then
bundle exec rake knapsack:download;
fi
artifacts:
paths:
- qa/tmp
@ -34,15 +37,22 @@
.parallel-qa-base:
parallel: 5
variables:
KNAPSACK_TEST_FILE_PATTERN: "qa/specs/features/**/*_spec.rb"
script:
- export KNAPSACK_REPORT_PATH=knapsack/master_report.json
- export KNAPSACK_TEST_FILE_PATTERN=qa/specs/features/**/*_spec.rb
- |
bin/test "${QA_SCENARIO}" "${CI_ENVIRONMENT_URL}" \
-- \
--color --format documentation \
--format RspecJunitFormatter --out tmp/rspec.xml
after_script:
- if [ -n "$KNAPSACK_GENERATE_REPORT" ]; then
mv qa/${KNAPSACK_REPORT_PATH} qa/knapsack/gcs/regenerated-${CI_NODE_INDEX}.json;
fi
artifacts:
paths:
- qa/tmp # we can't merge list so need to include explicitly once more
- qa/knapsack/gcs/regenerated-*.json
reports:
junit: qa/tmp/rspec.xml
@ -69,6 +79,16 @@
--ignore-missing-results \
--color
.knapsack-upload-base:
image:
name: ${QA_IMAGE}
entrypoint: [""]
stage: post-qa
before_script:
- cd qa
script:
- bundle exec rake 'knapsack:upload[knapsack/gcs/regenerated-*.json]'
review-qa-smoke:
extends:
- .review-qa-base
@ -88,6 +108,7 @@ review-qa-reliable:
variables:
QA_RUN_TYPE: review-qa-reliable
QA_SCENARIO: Test::Instance::Reliable
KNAPSACK_REPORT_PATH: knapsack/gcs/review-qa-reliable.json
review-qa-all:
extends:
@ -97,6 +118,7 @@ review-qa-all:
variables:
QA_RUN_TYPE: review-qa-all
QA_SCENARIO: Test::Instance::All
KNAPSACK_REPORT_PATH: knapsack/gcs/review-qa-all.json
review-performance:
extends:
@ -141,3 +163,19 @@ allure-report-qa-all:
variables:
ALLURE_REPORT_PATH_PREFIX: gitlab-review-all
ALLURE_JOB_NAME: review-qa-all
knapsack-report-qa-all:
extends:
- .knapsack-upload-base
- .review:rules:knapsack-report-qa-all
needs: ["review-qa-all"]
variables:
KNAPSACK_REPORT_PATH: knapsack/gcs/review-qa-all.json
knapsack-report-qa-reliable:
extends:
- .knapsack-upload-base
- .review:rules:knapsack-report-qa-reliable
needs: ["review-qa-reliable"]
variables:
KNAPSACK_REPORT_PATH: knapsack/gcs/review-qa-reliable.json

View File

@ -648,6 +648,8 @@
changes: *ci-patterns
- <<: *if-automated-merge-request
changes: *code-backstage-patterns
- <<: *if-security-merge-request
changes: *code-backstage-patterns
- <<: *if-default-refs
changes: *backend-patterns
- <<: *if-merge-request-not-approved
@ -661,6 +663,8 @@
when: never
- <<: *if-automated-merge-request
when: never
- <<: *if-security-merge-request
when: never
- <<: *if-merge-request-labels-run-all-jest
when: never
- <<: *if-default-refs
@ -815,6 +819,8 @@
changes: *db-patterns
- <<: *if-automated-merge-request
changes: *db-patterns
- <<: *if-security-merge-request
changes: *db-patterns
- <<: *if-merge-request-not-approved
when: never
- changes: *db-patterns
@ -825,6 +831,8 @@
when: never
- <<: *if-automated-merge-request
when: never
- <<: *if-security-merge-request
when: never
- <<: *if-merge-request-labels-run-all-rspec
when: never
- <<: *if-merge-request
@ -860,6 +868,8 @@
changes: *ci-patterns
- <<: *if-automated-merge-request
changes: *backend-patterns
- <<: *if-security-merge-request
changes: *backend-patterns
- <<: *if-merge-request-not-approved
when: never
- changes: *backend-patterns
@ -870,6 +880,8 @@
when: never
- <<: *if-automated-merge-request
when: never
- <<: *if-security-merge-request
when: never
- <<: *if-merge-request-labels-run-all-rspec
when: never
- <<: *if-merge-request
@ -890,6 +902,8 @@
changes: *ci-patterns
- <<: *if-automated-merge-request
changes: *backend-patterns
- <<: *if-security-merge-request
changes: *backend-patterns
- <<: *if-merge-request-not-approved
when: never
- changes: *backend-patterns
@ -900,6 +914,8 @@
when: never
- <<: *if-automated-merge-request
when: never
- <<: *if-security-merge-request
when: never
- <<: *if-merge-request-labels-run-all-rspec
when: never
- <<: *if-merge-request
@ -920,6 +936,8 @@
changes: *ci-patterns
- <<: *if-automated-merge-request
changes: *code-backstage-patterns
- <<: *if-security-merge-request
changes: *code-backstage-patterns
- <<: *if-merge-request-not-approved
when: never
- changes: *code-backstage-patterns
@ -930,6 +948,8 @@
when: never
- <<: *if-automated-merge-request
when: never
- <<: *if-security-merge-request
when: never
- <<: *if-merge-request-labels-run-all-rspec
when: never
- <<: *if-merge-request
@ -950,6 +970,8 @@
changes: *ci-patterns
- <<: *if-automated-merge-request
changes: ["config/**/*"]
- <<: *if-security-merge-request
changes: ["config/**/*"]
- <<: *if-merge-request-not-approved
when: never
- changes: ["config/**/*"]
@ -960,6 +982,8 @@
when: never
- <<: *if-automated-merge-request
when: never
- <<: *if-security-merge-request
when: never
- <<: *if-merge-request-labels-run-all-rspec
when: never
- <<: *if-merge-request
@ -989,6 +1013,8 @@
changes: *db-patterns
- <<: *if-automated-merge-request
changes: *db-patterns
- <<: *if-security-merge-request
changes: *db-patterns
- <<: *if-merge-request-not-approved
when: never
- changes: *db-patterns
@ -1001,6 +1027,8 @@
when: never
- <<: *if-automated-merge-request
when: never
- <<: *if-security-merge-request
when: never
- <<: *if-merge-request-labels-run-all-rspec
when: never
- <<: *if-merge-request
@ -1024,6 +1052,8 @@
changes: *ci-patterns
- <<: *if-automated-merge-request
changes: *backend-patterns
- <<: *if-security-merge-request
changes: *backend-patterns
- <<: *if-merge-request-not-approved
when: never
- changes: *backend-patterns
@ -1036,6 +1066,8 @@
when: never
- <<: *if-automated-merge-request
when: never
- <<: *if-security-merge-request
when: never
- <<: *if-merge-request-labels-run-all-rspec
when: never
- <<: *if-merge-request
@ -1058,6 +1090,8 @@
changes: *ci-patterns
- <<: *if-automated-merge-request
changes: *backend-patterns
- <<: *if-security-merge-request
changes: *backend-patterns
- <<: *if-merge-request-not-approved
when: never
- changes: *backend-patterns
@ -1070,6 +1104,8 @@
when: never
- <<: *if-automated-merge-request
when: never
- <<: *if-security-merge-request
when: never
- <<: *if-merge-request-labels-run-all-rspec
when: never
- <<: *if-merge-request
@ -1092,6 +1128,8 @@
changes: *ci-patterns
- <<: *if-automated-merge-request
changes: *code-backstage-patterns
- <<: *if-security-merge-request
changes: *code-backstage-patterns
- <<: *if-merge-request-not-approved
when: never
- changes: *code-backstage-patterns
@ -1104,6 +1142,8 @@
when: never
- <<: *if-automated-merge-request
when: never
- <<: *if-security-merge-request
when: never
- <<: *if-merge-request-labels-run-all-rspec
when: never
- <<: *if-merge-request
@ -1130,6 +1170,8 @@
changes: *db-patterns
- <<: *if-automated-merge-request
changes: *db-patterns
- <<: *if-security-merge-request
changes: *db-patterns
- <<: *if-merge-request-not-approved
when: never
@ -1141,15 +1183,14 @@
when: never
- <<: *if-automated-merge-request
when: never
- <<: *if-security-merge-request
when: never
- <<: *if-merge-request
changes: *core-backend-patterns
when: never
- <<: *if-merge-request
changes: *ci-patterns
when: never
- <<: *if-security-merge-request
changes: *db-patterns
when: never
- <<: *if-merge-request-labels-as-if-foss
changes: *db-patterns
when: never
@ -1165,10 +1206,10 @@
changes: *ci-patterns
- <<: *if-automated-merge-request
changes: *backend-patterns
- <<: *if-merge-request-not-approved
when: never
- <<: *if-security-merge-request
changes: *backend-patterns
- <<: *if-merge-request-not-approved
when: never
- <<: *if-merge-request-labels-as-if-foss
changes: *backend-patterns
@ -1180,14 +1221,14 @@
when: never
- <<: *if-automated-merge-request
when: never
- <<: *if-security-merge-request
when: never
- <<: *if-merge-request
changes: *core-backend-patterns
when: never
- <<: *if-merge-request
changes: *ci-patterns
when: never
- <<: *if-security-merge-request
changes: *backend-patterns
- <<: *if-merge-request-labels-as-if-foss
changes: *backend-patterns
@ -1202,10 +1243,10 @@
changes: *ci-patterns
- <<: *if-automated-merge-request
changes: *backend-patterns
- <<: *if-merge-request-not-approved
when: never
- <<: *if-security-merge-request
changes: *backend-patterns
- <<: *if-merge-request-not-approved
when: never
- <<: *if-merge-request-labels-as-if-foss
changes: *backend-patterns
@ -1217,14 +1258,14 @@
when: never
- <<: *if-automated-merge-request
when: never
- <<: *if-security-merge-request
when: never
- <<: *if-merge-request
changes: *core-backend-patterns
when: never
- <<: *if-merge-request
changes: *ci-patterns
when: never
- <<: *if-security-merge-request
changes: *backend-patterns
- <<: *if-merge-request-labels-as-if-foss
changes: *backend-patterns
@ -1239,10 +1280,10 @@
changes: *ci-patterns
- <<: *if-automated-merge-request
changes: *code-backstage-patterns
- <<: *if-merge-request-not-approved
when: never
- <<: *if-security-merge-request
changes: *code-backstage-patterns
- <<: *if-merge-request-not-approved
when: never
- <<: *if-merge-request-labels-as-if-foss
changes: *code-backstage-patterns
@ -1254,14 +1295,14 @@
when: never
- <<: *if-automated-merge-request
when: never
- <<: *if-security-merge-request
when: never
- <<: *if-merge-request
changes: *core-backend-patterns
when: never
- <<: *if-merge-request
changes: *ci-patterns
when: never
- <<: *if-security-merge-request
changes: *code-backstage-patterns
- <<: *if-merge-request-labels-as-if-foss
changes: *code-backstage-patterns
@ -1621,6 +1662,10 @@
.review:rules:review-qa-reliable:
rules:
- <<: *if-dot-com-ee-2-hourly-schedule
allow_failure: true
variables:
KNAPSACK_GENERATE_REPORT: "true"
- when: on_success
allow_failure: true
@ -1631,6 +1676,10 @@
changes: *code-patterns
when: manual
allow_failure: true # manual jobs needs to be allowd to fail, otherwise they block the pipeline
- <<: *if-dot-com-ee-2-hourly-schedule
allow_failure: true
variables:
KNAPSACK_GENERATE_REPORT: "true"
- when: on_success
allow_failure: true
@ -1646,6 +1695,20 @@
- when: on_failure
allow_failure: true
# Generate knapsack report on successful runs only
# Reliable suite will pass most of the time so this should yield best distribution
.review:rules:knapsack-report-qa-reliable:
rules:
- <<: *if-dot-com-ee-2-hourly-schedule
when: on_success
allow_failure: true
.review:rules:knapsack-report-qa-all:
rules:
- <<: *if-dot-com-ee-2-hourly-schedule
when: always
allow_failure: true
.review:rules:review-cleanup:
rules:
- <<: *if-not-ee

View File

@ -185,7 +185,7 @@ gem 'rack', '~> 2.2.3'
gem 'rack-timeout', '~> 0.5.1', require: 'rack/timeout/base'
group :puma do
gem 'puma', '~> 5.3.1', require: false
gem 'puma', '~> 5.5.2', require: false
gem 'puma_worker_killer', '~> 0.3.1', require: false
gem 'sd_notify', '~> 0.1.0', require: false
end

View File

@ -945,7 +945,7 @@ GEM
tty-markdown
tty-prompt
public_suffix (4.0.6)
puma (5.3.2)
puma (5.5.2)
nio4r (~> 2.0)
puma_worker_killer (0.3.1)
get_process_mem (~> 0.2)
@ -1581,7 +1581,7 @@ DEPENDENCIES
pry-byebug
pry-rails (~> 0.3.9)
pry-shell (~> 0.5.0)
puma (~> 5.3.1)
puma (~> 5.5.2)
puma_worker_killer (~> 0.3.1)
rack (~> 2.2.3)
rack-attack (~> 6.3.0)

View File

@ -11,3 +11,10 @@ export const FILTER_TYPE = {
};
export const MAX_HISTORY_SIZE = 5;
export const FILTERED_SEARCH = {
MERGE_REQUESTS: 'merge_requests',
ISSUES: 'issues',
ADMIN_RUNNERS: 'admin/runners',
GROUP_RUNNERS_ANCHOR: 'runners-settings',
};

View File

@ -4,3 +4,8 @@ export const ISSUABLE_TYPE = {
issues: 'issues',
mergeRequests: 'merge-requests',
};
export const ISSUABLE_INDEX = {
ISSUE: 'issue_',
MERGE_REQUEST: 'merge_request_',
};

View File

@ -1,7 +1,8 @@
<script>
import { GlButton, GlKeysetPagination } from '@gitlab/ui';
import createFlash from '~/flash';
import { n__ } from '~/locale';
import { joinPaths } from '~/lib/utils/url_utility';
import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue';
import {
REMOVE_TAGS_BUTTON_TITLE,
TAGS_LIST_TITLE,
@ -16,11 +17,10 @@ import TagsLoader from './tags_loader.vue';
export default {
name: 'TagsList',
components: {
GlButton,
GlKeysetPagination,
TagsListRow,
EmptyState,
TagsLoader,
RegistryList,
},
inject: ['config'],
props: {
@ -61,11 +61,13 @@ export default {
},
data() {
return {
selectedItems: {},
containerRepository: {},
};
},
computed: {
listTitle() {
return n__('%d tag', '%d tags', this.tags.length);
},
tags() {
return this.containerRepository?.tags?.nodes || [];
},
@ -78,18 +80,9 @@ export default {
first: GRAPHQL_PAGE_SIZE,
};
},
hasSelectedItems() {
return this.tags.some((tag) => this.selectedItems[tag.name]);
},
showMultiDeleteButton() {
return this.tags.some((tag) => tag.canDelete) && !this.isMobile;
},
multiDeleteButtonIsDisabled() {
return !this.hasSelectedItems || this.disabled;
},
showPagination() {
return this.tagsPageInfo.hasPreviousPage || this.tagsPageInfo.hasNextPage;
},
hasNoTags() {
return this.tags.length === 0;
},
@ -98,19 +91,13 @@ export default {
},
},
methods: {
updateSelectedItems(name) {
this.$set(this.selectedItems, name, !this.selectedItems[name]);
},
mapTagsToBeDleeted(items) {
return this.tags.filter((tag) => items[tag.name]);
},
fetchNextPage() {
this.$apollo.queries.containerRepository.fetchMore({
variables: {
after: this.tagsPageInfo?.endCursor,
first: GRAPHQL_PAGE_SIZE,
},
updateQuery(previousResult, { fetchMoreResult }) {
updateQuery(_, { fetchMoreResult }) {
return fetchMoreResult;
},
});
@ -122,7 +109,7 @@ export default {
before: this.tagsPageInfo?.startCursor,
last: GRAPHQL_PAGE_SIZE,
},
updateQuery(previousResult, { fetchMoreResult }) {
updateQuery(_, { fetchMoreResult }) {
return fetchMoreResult;
},
});
@ -137,42 +124,27 @@ export default {
<template v-else>
<empty-state v-if="hasNoTags" :no-containers-image="config.noContainersImage" />
<template v-else>
<div class="gl-display-flex gl-justify-content-space-between gl-mb-3">
<h5 data-testid="list-title">
{{ $options.i18n.TAGS_LIST_TITLE }}
</h5>
<gl-button
v-if="showMultiDeleteButton"
:disabled="multiDeleteButtonIsDisabled"
category="secondary"
variant="danger"
@click="$emit('delete', mapTagsToBeDleeted(selectedItems))"
>
{{ $options.i18n.REMOVE_TAGS_BUTTON_TITLE }}
</gl-button>
</div>
<tags-list-row
v-for="(tag, index) in tags"
:key="tag.path"
:tag="tag"
:first="index === 0"
:selected="selectedItems[tag.name]"
:is-mobile="isMobile"
:disabled="disabled"
@select="updateSelectedItems(tag.name)"
@delete="$emit('delete', mapTagsToBeDleeted({ [tag.name]: true }))"
/>
<div class="gl-display-flex gl-justify-content-center">
<gl-keyset-pagination
v-if="showPagination"
:has-next-page="tagsPageInfo.hasNextPage"
:has-previous-page="tagsPageInfo.hasPreviousPage"
class="gl-mt-3"
@prev="fetchPreviousPage"
@next="fetchNextPage"
/>
</div>
<registry-list
:title="listTitle"
:pagination="tagsPageInfo"
:items="tags"
id-property="name"
@prev-page="fetchPreviousPage"
@next-page="fetchNextPage"
@delete="$emit('delete', $event)"
>
<template #default="{ selectItem, isSelected, item, first }">
<tags-list-row
:tag="item"
:first="first"
:selected="isSelected(item)"
:is-mobile="isMobile"
:disabled="disabled"
@select="selectItem(item)"
@delete="$emit('delete', [item])"
/>
</template>
</registry-list>
</template>
</template>
</div>

View File

@ -1,6 +0,0 @@
export const FILTERED_SEARCH = {
MERGE_REQUESTS: 'merge_requests',
ISSUES: 'issues',
ADMIN_RUNNERS: 'admin/runners',
GROUP_RUNNERS_ANCHOR: 'runners-settings',
};

View File

@ -1,6 +1,6 @@
import IssuableFilteredSearchTokenKeys from '~/filtered_search/issuable_filtered_search_token_keys';
import initManualOrdering from '~/manual_ordering';
import { FILTERED_SEARCH } from '~/pages/constants';
import { FILTERED_SEARCH } from '~/filtered_search/constants';
import initFilteredSearch from '~/pages/search/init_filtered_search';
import projectSelect from '~/project_select';

View File

@ -1,6 +1,6 @@
import addExtraTokensForMergeRequests from 'ee_else_ce/filtered_search/add_extra_tokens_for_merge_requests';
import IssuableFilteredSearchTokenKeys from '~/filtered_search/issuable_filtered_search_token_keys';
import { FILTERED_SEARCH } from '~/pages/constants';
import { FILTERED_SEARCH } from '~/filtered_search/constants';
import initFilteredSearch from '~/pages/search/init_filtered_search';
import projectSelect from '~/project_select';

View File

@ -2,7 +2,7 @@ import IssuableFilteredSearchTokenKeys from 'ee_else_ce/filtered_search/issuable
import issuableInitBulkUpdateSidebar from '~/issuable/bulk_update_sidebar/issuable_init_bulk_update_sidebar';
import { mountIssuablesListApp, mountIssuesListApp } from '~/issues_list';
import initManualOrdering from '~/manual_ordering';
import { FILTERED_SEARCH } from '~/pages/constants';
import { FILTERED_SEARCH } from '~/filtered_search/constants';
import initFilteredSearch from '~/pages/search/init_filtered_search';
import projectSelect from '~/project_select';

View File

@ -1,7 +1,7 @@
import addExtraTokensForMergeRequests from 'ee_else_ce/filtered_search/add_extra_tokens_for_merge_requests';
import IssuableFilteredSearchTokenKeys from '~/filtered_search/issuable_filtered_search_token_keys';
import issuableInitBulkUpdateSidebar from '~/issuable/bulk_update_sidebar/issuable_init_bulk_update_sidebar';
import { FILTERED_SEARCH } from '~/pages/constants';
import { FILTERED_SEARCH } from '~/filtered_search/constants';
import initFilteredSearch from '~/pages/search/init_filtered_search';
import projectSelect from '~/project_select';

View File

@ -1,7 +1,7 @@
import initVariableList from '~/ci_variable_list';
import GroupRunnersFilteredSearchTokenKeys from '~/filtered_search/group_runners_filtered_search_token_keys';
import initSharedRunnersForm from '~/group_settings/mount_shared_runners';
import { FILTERED_SEARCH } from '~/pages/constants';
import { FILTERED_SEARCH } from '~/filtered_search/constants';
import initFilteredSearch from '~/pages/search/init_filtered_search';
import { initRunnerAwsDeployments } from '~/pages/shared/mount_runner_aws_deployments';
import { initInstallRunner } from '~/pages/shared/mount_runner_instructions';

View File

@ -1,4 +0,0 @@
export const ISSUABLE_INDEX = {
MERGE_REQUEST: 'merge_request_',
ISSUE: 'issue_',
};

View File

@ -5,8 +5,8 @@ import initIssuableByEmail from '~/issuable/init_issuable_by_email';
import IssuableIndex from '~/issuable/issuable_index';
import { mountIssuablesListApp, mountIssuesListApp, mountJiraIssuesListApp } from '~/issues_list';
import initManualOrdering from '~/manual_ordering';
import { FILTERED_SEARCH } from '~/pages/constants';
import { ISSUABLE_INDEX } from '~/pages/projects/constants';
import { FILTERED_SEARCH } from '~/filtered_search/constants';
import { ISSUABLE_INDEX } from '~/issuable/constants';
import initFilteredSearch from '~/pages/search/init_filtered_search';
import UsersSelect from '~/users_select';

View File

@ -4,8 +4,8 @@ import IssuableFilteredSearchTokenKeys from '~/filtered_search/issuable_filtered
import initCsvImportExportButtons from '~/issuable/init_csv_import_export_buttons';
import initIssuableByEmail from '~/issuable/init_issuable_by_email';
import IssuableIndex from '~/issuable/issuable_index';
import { FILTERED_SEARCH } from '~/pages/constants';
import { ISSUABLE_INDEX } from '~/pages/projects/constants';
import { FILTERED_SEARCH } from '~/filtered_search/constants';
import { ISSUABLE_INDEX } from '~/issuable/constants';
import initFilteredSearch from '~/pages/search/init_filtered_search';
import UsersSelect from '~/users_select';

View File

@ -235,6 +235,12 @@ module Ci
pipeline.run_after_commit do
PipelineHooksWorker.perform_async(pipeline.id)
if pipeline.project.jira_subscription_exists?
# Passing the seq-id ensures this is idempotent
seq_id = ::Atlassian::JiraConnect::Client.generate_update_sequence_id
::JiraConnect::SyncBuildsWorker.perform_async(pipeline.id, seq_id)
end
if Feature.enabled?(:expire_job_and_pipeline_cache_synchronously, pipeline.project, default_enabled: :yaml)
Ci::ExpirePipelineCacheService.new.execute(pipeline) # rubocop: disable CodeReuse/ServiceClass
else
@ -274,14 +280,6 @@ module Ci
end
end
after_transition any => any do |pipeline|
pipeline.run_after_commit do
# Passing the seq-id ensures this is idempotent
seq_id = ::Atlassian::JiraConnect::Client.generate_update_sequence_id
::JiraConnect::SyncBuildsWorker.perform_async(pipeline.id, seq_id)
end
end
after_transition any => ::Ci::Pipeline.completed_statuses do |pipeline|
pipeline.run_after_commit do
::Ci::TestFailureHistoryService.new(pipeline).async.perform_if_needed # rubocop: disable CodeReuse/ServiceClass

View File

@ -119,6 +119,8 @@ class Deployment < ApplicationRecord
next if transition.loopback?
deployment.run_after_commit do
next unless deployment.project.jira_subscription_exists?
::JiraConnect::SyncDeploymentsWorker.perform_async(id)
end
end
@ -126,6 +128,8 @@ class Deployment < ApplicationRecord
after_create unless: :importing? do |deployment|
run_after_commit do
next unless deployment.project.jira_subscription_exists?
::JiraConnect::SyncDeploymentsWorker.perform_async(deployment.id)
end
end

View File

@ -43,6 +43,7 @@ module FeatureFlags
def sync_to_jira(feature_flag)
return unless feature_flag.present?
return unless project.jira_subscription_exists?
seq_id = ::Atlassian::JiraConnect::Client.generate_update_sequence_id
feature_flag.run_after_commit do

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class AddIndexSnippetsOnProjectIdAndTitle < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
INDEX_NAME = 'index_snippets_on_project_id_and_title'
def up
add_concurrent_index :snippets, [:project_id, :title], name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :snippets, name: INDEX_NAME
end
end

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
class CleanupFirstMentionedInCommitJobs < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
MIGRATION = 'FixFirstMentionedInCommitAt'
INDEX_NAME = 'index_issue_metrics_first_mentioned_in_commit'
def up
finalize_background_migration(MIGRATION)
remove_concurrent_index_by_name :issue_metrics, name: INDEX_NAME
end
def down
# Handles reported schema inconsistencies (column with or without timezone)
# We did the same in db/post_migrate/20211004110500_add_temporary_index_to_issue_metrics.rb
condition = Gitlab::BackgroundMigration::FixFirstMentionedInCommitAt::TmpIssueMetrics
.first_mentioned_in_commit_at_condition
add_concurrent_index :issue_metrics, :issue_id, where: condition, name: INDEX_NAME
end
end

View File

@ -0,0 +1 @@
6a3591e70ddd6573ad68360c1a8774ef61e7812ce831c75066baec5754e2bd76

View File

@ -0,0 +1 @@
c30656c3f079e789f386b5b607710a7d4df6d2eb20bd457bab3a2e8d9eeb051b

View File

@ -26346,8 +26346,6 @@ CREATE UNIQUE INDEX index_issue_links_on_source_id_and_target_id ON issue_links
CREATE INDEX index_issue_links_on_target_id ON issue_links USING btree (target_id);
CREATE INDEX index_issue_metrics_first_mentioned_in_commit ON issue_metrics USING btree (issue_id) WHERE (date_part('year'::text, first_mentioned_in_commit_at) > (2019)::double precision);
CREATE INDEX index_issue_metrics_on_issue_id_and_timestamps ON issue_metrics USING btree (issue_id, first_mentioned_in_commit_at, first_associated_with_milestone_at, first_added_to_board_at);
CREATE INDEX index_issue_on_project_id_state_id_and_blocking_issues_count ON issues USING btree (project_id, state_id, blocking_issues_count);
@ -27448,6 +27446,8 @@ CREATE INDEX index_snippets_on_id_and_created_at ON snippets USING btree (id, cr
CREATE INDEX index_snippets_on_id_and_type ON snippets USING btree (id, type);
CREATE INDEX index_snippets_on_project_id_and_title ON snippets USING btree (project_id, title);
CREATE INDEX index_snippets_on_project_id_and_visibility_level ON snippets USING btree (project_id, visibility_level);
CREATE INDEX index_snippets_on_title_trigram ON snippets USING gin (title gin_trgm_ops);

View File

@ -68,6 +68,7 @@ In addition, there are a few circumstances where we would always run the full RS
- when the `pipeline:run-all-rspec` label is set on the merge request
- when the merge request is created by an automation (e.g. Gitaly update or MR targeting a stable branch)
- when the merge request is created in a security mirror
- when any CI config file is changed (i.e. `.gitlab-ci.yml` or `.gitlab/ci/**/*`)
### Jest minimal jobs
@ -83,6 +84,7 @@ In addition, there are a few circumstances where we would always run the full Je
- when the `pipeline:run-all-jest` label is set on the merge request
- when the merge request is created by an automation (e.g. Gitaly update or MR targeting a stable branch)
- when the merge request is created in a security mirror
- when any CI config file is changed (i.e. `.gitlab-ci.yml` or `.gitlab/ci/**/*`)
- when any frontend "core" file is changed (i.e. `package.json`, `yarn.lock`, `babel.config.js`, `jest.config.*.js`, `config/helpers/**/*.js`)
- when any vendored JavaScript file is changed (i.e. `vendor/assets/javascripts/**/*`)

View File

@ -113,7 +113,7 @@ CREATE ROLE slony WITH SUPERUSER LOGIN REPLICATION ENCRYPTED PASSWORD 'password
ALTER ROLE slony SET statement_timeout TO 0;
```
Make sure you replace "password string here" with the actual password for the
Make sure you replace "password string here" with an actual password for the
user. A password is required. This user must be created on both the old and
new database server using the same password.
@ -230,7 +230,7 @@ Now run the following commands:
\i /tmp/migrations.sql
```
To verify if the structure is in place close the session, start it again, then
To verify if the structure is in place close the session (`\q`), start it again, then
run `\d`. If all went well you should see output along the lines of the
following:
@ -459,7 +459,7 @@ main
Upload this script to the _target_ server and execute it as follows:
```shell
bash path/to/the/script/above.sh
sudo bash path/to/the/script/above.sh
```
This corrects the ownership of sequences and reset the next value for the

Binary file not shown.

After

Width:  |  Height:  |  Size: 20 KiB

View File

@ -7,7 +7,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Vulnerability Report **(ULTIMATE)**
The Vulnerability Report provides information about vulnerabilities from scans of the default branch. It is available for groups, projects, and the Security Center.
The Vulnerability Report provides information about vulnerabilities from scans of the default branch. It is available for projects, groups, and the Security Center.
At all levels, the Vulnerability Report contains:
@ -214,3 +214,12 @@ You can dismiss a vulnerability for the entire project:
1. Optional. Add a reason for the dismissal and select **Save comment**.
To undo this action, select a different status from the same menu.
## Operational vulnerabilities
> [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/6345) in GitLab 14.6.
The **Operational vulnerabilities** tab lists vulnerabilities found by the `cluster_image_scanner`.
This tab appears on the project, group, and Security Center vulnerability reports.
![Operational Vulnerability Tab](img/operational_vulnerability_tab_v14_6.png)

View File

@ -26,7 +26,7 @@ To manage linked issues through our API, visit the [issue links API documentatio
1. Link one issue to another by selecting the add linked issue button (**{plus}**) in the
**Linked issues** section of an issue.
1. Select the relationship the between the two issues. Either:
1. Select the relationship between the two issues. Either:
- **relates to**
- **blocks** **(PREMIUM)**
- **is blocked by** **(PREMIUM)**

View File

@ -38,16 +38,19 @@ module Gitlab
end
def host_stats
return [] unless ActiveRecord::Base.connected?
Gitlab::Database.database_base_models.each_value.with_object([]) do |base_model, stats|
next unless base_model.connected?
[{ labels: labels_for_class(ActiveRecord::Base), stats: ActiveRecord::Base.connection_pool.stat }]
stats << { labels: labels_for_class(base_model), stats: base_model.connection_pool.stat }
end
end
def labels_for_class(klass)
{
host: klass.connection_db_config.host,
port: klass.connection_db_config.configuration_hash[:port],
class: klass.to_s
class: klass.to_s,
db_config_name: klass.connection_db_config.name
}
end
end

View File

@ -4446,7 +4446,7 @@ msgstr ""
msgid "ApprovalSettings|This setting is configured at the instance level and can only be changed by an administrator."
msgstr ""
msgid "ApprovalSettings|This setting is configured in %{groupName} and can only be changed by an administrator or group owner."
msgid "ApprovalSettings|This setting is configured in %{groupName} and can only be changed in the group settings by an administrator or group owner."
msgstr ""
msgid "ApprovalStatusTooltip|Adheres to separation of duties"

View File

@ -15,7 +15,7 @@ gem 'rest-client', '~> 2.1.0'
gem 'rspec-retry', '~> 0.6.1', require: 'rspec/retry'
gem 'rspec_junit_formatter', '~> 0.4.1'
gem 'faker', '~> 2.19', '>= 2.19.0'
gem 'knapsack', '~> 1.17'
gem 'knapsack', '~> 4.0'
gem 'parallel_tests', '~> 2.29'
gem 'rotp', '~> 3.1.0'
gem 'timecop', '~> 0.9.1'
@ -27,6 +27,7 @@ gem 'zeitwerk', '~> 2.4'
gem 'influxdb-client', '~> 1.17'
gem 'terminal-table', '~> 1.8', require: false
gem 'slack-notifier', '~> 2.4', require: false
gem 'fog-google', '~> 1.17', require: false
gem 'chemlab', '~> 0.9'
gem 'chemlab-library-www-gitlab-com', '~> 0.1'

View File

@ -29,6 +29,7 @@ GEM
uuid (>= 2.3, < 3)
ast (2.4.2)
binding_ninja (0.2.3)
builder (3.2.4)
byebug (9.1.0)
capybara (3.35.3)
addressable
@ -56,12 +57,14 @@ GEM
adamantium (~> 0.2.0)
equalizer (~> 0.0.9)
concurrent-ruby (1.1.9)
declarative (0.0.20)
deprecation_toolkit (1.5.1)
activesupport (>= 4.2)
diff-lcs (1.3)
domain_name (0.5.20190701)
unf (>= 0.0.5, < 1.0.0)
equalizer (0.0.11)
excon (0.88.0)
faker (2.19.0)
i18n (>= 1.6, < 2)
faraday (1.5.1)
@ -85,6 +88,30 @@ GEM
ffi-compiler (1.0.1)
ffi (>= 1.0.0)
rake
fog-core (2.1.0)
builder
excon (~> 0.58)
formatador (~> 0.2)
mime-types
fog-google (1.17.0)
fog-core (<= 2.1.0)
fog-json (~> 1.2)
fog-xml (~> 0.1.0)
google-apis-compute_v1 (~> 0.14)
google-apis-dns_v1 (~> 0.12)
google-apis-iamcredentials_v1 (~> 0.6)
google-apis-monitoring_v3 (~> 0.12)
google-apis-pubsub_v1 (~> 0.7)
google-apis-sqladmin_v1beta4 (~> 0.13)
google-apis-storage_v1 (~> 0.6)
google-cloud-env (~> 1.2)
fog-json (1.2.0)
fog-core
multi_json (~> 1.10)
fog-xml (0.1.4)
fog-core
nokogiri (>= 1.5.11, < 2.0.0)
formatador (0.3.0)
gitlab (4.16.1)
httparty (~> 0.14, >= 0.14.0)
terminal-table (~> 1.5, >= 1.5.1)
@ -94,6 +121,38 @@ GEM
http (~> 5.0)
nokogiri (~> 1.10)
table_print (= 1.5.7)
google-apis-compute_v1 (0.21.0)
google-apis-core (>= 0.4, < 2.a)
google-apis-core (0.4.1)
addressable (~> 2.5, >= 2.5.1)
googleauth (>= 0.16.2, < 2.a)
httpclient (>= 2.8.1, < 3.a)
mini_mime (~> 1.0)
representable (~> 3.0)
retriable (>= 2.0, < 4.a)
rexml
webrick
google-apis-dns_v1 (0.16.0)
google-apis-core (>= 0.4, < 2.a)
google-apis-iamcredentials_v1 (0.8.0)
google-apis-core (>= 0.4, < 2.a)
google-apis-monitoring_v3 (0.18.0)
google-apis-core (>= 0.4, < 2.a)
google-apis-pubsub_v1 (0.10.0)
google-apis-core (>= 0.4, < 2.a)
google-apis-sqladmin_v1beta4 (0.21.0)
google-apis-core (>= 0.4, < 2.a)
google-apis-storage_v1 (0.9.0)
google-apis-core (>= 0.4, < 2.a)
google-cloud-env (1.5.0)
faraday (>= 0.17.3, < 2.0)
googleauth (1.1.0)
faraday (>= 0.17.3, < 2.0)
jwt (>= 1.4, < 3.0)
memoist (~> 0.16)
multi_json (~> 1.11)
os (>= 0.9, < 2.0)
signet (>= 0.16, < 2.a)
http (5.0.4)
addressable (~> 2.8)
http-cookie (~> 1.0)
@ -106,11 +165,13 @@ GEM
httparty (0.20.0)
mime-types (~> 3.0)
multi_xml (>= 0.5.2)
httpclient (2.8.3)
i18n (1.8.11)
concurrent-ruby (~> 1.0)
ice_nine (0.11.2)
influxdb-client (1.17.0)
knapsack (1.17.1)
jwt (2.3.0)
knapsack (4.0.0)
rake
launchy (2.4.3)
addressable (~> 2.3)
@ -119,6 +180,7 @@ GEM
rake (~> 13.0)
macaddr (1.7.2)
systemu (~> 2.6.5)
memoist (0.16.2)
memoizable (0.4.2)
thread_safe (~> 0.3, >= 0.3.1)
method_source (0.9.0)
@ -128,6 +190,7 @@ GEM
mini_mime (1.1.0)
mini_portile2 (2.6.1)
minitest (5.14.4)
multi_json (1.15.0)
multi_xml (0.6.0)
multipart-post (2.1.1)
netrc (0.11.0)
@ -138,6 +201,7 @@ GEM
faraday (>= 0.9)
sawyer (~> 0.8.0, >= 0.5.3)
oj (3.13.8)
os (1.1.4)
parallel (1.19.2)
parallel_tests (2.29.0)
parallel
@ -161,12 +225,17 @@ GEM
rack (>= 1.0, < 3)
rake (13.0.6)
regexp_parser (2.1.1)
representable (3.1.1)
declarative (< 0.1.0)
trailblazer-option (>= 0.1.1, < 0.2.0)
uber (< 0.2.0)
require_all (3.0.0)
rest-client (2.1.0)
http-accept (>= 1.7.0, < 2.0)
http-cookie (>= 1.0.2, < 2.0)
mime-types (>= 1.16, < 4.0)
netrc (~> 0.8)
retriable (3.1.2)
rexml (3.2.5)
rotp (3.1.0)
rspec (3.10.0)
@ -203,6 +272,11 @@ GEM
childprocess (>= 0.5, < 5.0)
rexml (~> 3.2, >= 3.2.5)
rubyzip (>= 1.2.2)
signet (0.16.0)
addressable (~> 2.8)
faraday (>= 0.17.3, < 2.0)
jwt (>= 1.5, < 3.0)
multi_json (~> 1.10)
slack-notifier (2.4.0)
systemu (2.6.5)
table_print (1.5.7)
@ -210,8 +284,10 @@ GEM
unicode-display_width (~> 1.1, >= 1.1.1)
thread_safe (0.3.6)
timecop (0.9.1)
trailblazer-option (0.1.2)
tzinfo (2.0.4)
concurrent-ruby (~> 1.0)
uber (0.1.0)
unf (0.1.4)
unf_ext
unf_ext (0.0.8)
@ -233,6 +309,7 @@ GEM
nokogiri (~> 1.6)
rubyzip (>= 1.3.0)
selenium-webdriver (~> 4.0)
webrick (1.7.0)
xpath (3.2.0)
nokogiri (~> 1.8)
zeitwerk (2.5.1)
@ -250,9 +327,10 @@ DEPENDENCIES
chemlab-library-www-gitlab-com (~> 0.1)
deprecation_toolkit (~> 1.5.1)
faker (~> 2.19, >= 2.19.0)
fog-google (~> 1.17)
gitlab-qa
influxdb-client (~> 1.17)
knapsack (~> 1.17)
knapsack (~> 4.0)
octokit (~> 4.21)
parallel (~> 1.19)
parallel_tests (~> 2.29)

View File

@ -1,8 +1,7 @@
# frozen_string_literal: true
# rubocop:disable Rails/RakeEnvironment
load 'tasks/webdrivers.rake'
load 'tasks/reliable_report.rake'
Dir['tasks/*.rake'].each { |file| load file }
require_relative 'qa/tools/revoke_all_personal_access_tokens'
require_relative 'qa/tools/delete_subgroups'

3
qa/knapsack/gcs/.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
**
!.gitignore

View File

@ -0,0 +1,88 @@
# frozen_string_literal: true
require "fog/google"
module QA
module Tools
class KnapsackReport
PROJECT = "gitlab-qa-resources"
BUCKET = "knapsack-reports"
class << self
def download
new.download_report
end
def upload(glob)
new.upload_report(glob)
end
end
def initialize
ENV["KNAPSACK_REPORT_PATH"] || raise("KNAPSACK_REPORT_PATH env var is required!")
ENV["QA_KNAPSACK_REPORT_GCS_CREDENTIALS"] || raise("QA_KNAPSACK_REPORT_GCS_CREDENTIALS env var is required!")
end
# Download knapsack report from gcs bucket
#
# @return [void]
def download_report
logger.info("Downloading latest knapsack report '#{report_file}'")
file = client.get_object(BUCKET, report_file)
logger.info("Saving latest knapsack report to '#{report_path}'")
File.write(report_path, file[:body])
end
# Merge and upload knapsack report to gcs bucket
#
# @param [String] glob
# @return [void]
def upload_report(glob)
reports = Dir[glob]
return logger.error("Pattern '#{glob}' did not match any files!") if reports.empty?
report = reports
.map { |path| JSON.parse(File.read(path)) }
.reduce({}, :merge)
return logger.error("Knapsack generated empty report, skipping upload!") if report.empty?
logger.info("Uploading latest knapsack report '#{report_file}'")
client.put_object(BUCKET, report_file, JSON.pretty_generate(report))
end
private
# Logger instance
#
# @return [Logger]
def logger
@logger ||= Logger.new($stdout)
end
# GCS client
#
# @return [Fog::Storage::GoogleJSON]
def client
@client ||= Fog::Storage::Google.new(
google_project: PROJECT,
google_json_key_location: ENV["QA_KNAPSACK_REPORT_GCS_CREDENTIALS"]
)
end
# Knapsack report path
#
# @return [String]
def report_path
@report_path ||= ENV["KNAPSACK_REPORT_PATH"]
end
# Knapsack report name
#
# @return [String]
def report_file
@report_name ||= report_path.split("/").last
end
end
end
end

17
qa/tasks/knapsack.rake Normal file
View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
# rubocop:disable Rails/RakeEnvironment
require_relative "../qa/tools/knapsack_report"
namespace :knapsack do
desc "Download latest knapsack report"
task :download do
QA::Tools::KnapsackReport.download
end
desc "Merge and upload knapsack report"
task :upload, [:glob_pattern] do |_task, args|
QA::Tools::KnapsackReport.upload(args[:glob_pattern])
end
end
# rubocop:enable Rails/RakeEnvironment

View File

@ -82,7 +82,7 @@ RSpec.describe 'Container Registry', :js do
end
it 'shows the image tags' do
expect(page).to have_content 'Image tags'
expect(page).to have_content '1 tag'
first_tag = first('[data-testid="name"]')
expect(first_tag).to have_content 'latest'
end

View File

@ -87,7 +87,7 @@ RSpec.describe 'Container Registry', :js do
end
it 'shows the image tags' do
expect(page).to have_content 'Image tags'
expect(page).to have_content '20 tags'
first_tag = first('[data-testid="name"]')
expect(first_tag).to have_content '1'
end

View File

@ -1,18 +1,16 @@
import { GlButton, GlKeysetPagination } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { stripTypenames } from 'helpers/graphql_helpers';
import EmptyTagsState from '~/packages_and_registries/container_registry/explorer/components/details_page/empty_state.vue';
import component from '~/packages_and_registries/container_registry/explorer/components/details_page/tags_list.vue';
import TagsListRow from '~/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row.vue';
import TagsLoader from '~/packages_and_registries/container_registry/explorer/components/details_page/tags_loader.vue';
import {
TAGS_LIST_TITLE,
REMOVE_TAGS_BUTTON_TITLE,
} from '~/packages_and_registries/container_registry/explorer/constants/index';
import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue';
import getContainerRepositoryTagsQuery from '~/packages_and_registries/container_registry/explorer/graphql/queries/get_container_repository_tags.query.graphql';
import { GRAPHQL_PAGE_SIZE } from '~/packages_and_registries/container_registry/explorer/constants/index';
import { tagsMock, imageTagsMock, tagsPageInfo } from '../../mock_data';
const localVue = createLocalVue();
@ -20,25 +18,20 @@ const localVue = createLocalVue();
describe('Tags List', () => {
let wrapper;
let apolloProvider;
let resolver;
const tags = [...tagsMock];
const readOnlyTags = tags.map((t) => ({ ...t, canDelete: false }));
const findTagsListRow = () => wrapper.findAll(TagsListRow);
const findDeleteButton = () => wrapper.find(GlButton);
const findListTitle = () => wrapper.find('[data-testid="list-title"]');
const findPagination = () => wrapper.find(GlKeysetPagination);
const findEmptyState = () => wrapper.find(EmptyTagsState);
const findTagsLoader = () => wrapper.find(TagsLoader);
const findTagsListRow = () => wrapper.findAllComponents(TagsListRow);
const findRegistryList = () => wrapper.findComponent(RegistryList);
const findEmptyState = () => wrapper.findComponent(EmptyTagsState);
const findTagsLoader = () => wrapper.findComponent(TagsLoader);
const waitForApolloRequestRender = async () => {
await waitForPromises();
await nextTick();
};
const mountComponent = ({
propsData = { isMobile: false, id: 1 },
resolver = jest.fn().mockResolvedValue(imageTagsMock()),
} = {}) => {
const mountComponent = ({ propsData = { isMobile: false, id: 1 } } = {}) => {
localVue.use(VueApollo);
const requestHandlers = [[getContainerRepositoryTagsQuery, resolver]];
@ -48,6 +41,7 @@ describe('Tags List', () => {
localVue,
apolloProvider,
propsData,
stubs: { RegistryList },
provide() {
return {
config: {},
@ -56,99 +50,58 @@ describe('Tags List', () => {
});
};
beforeEach(() => {
resolver = jest.fn().mockResolvedValue(imageTagsMock());
});
afterEach(() => {
wrapper.destroy();
wrapper = null;
});
describe('List title', () => {
it('exists', async () => {
describe('registry list', () => {
beforeEach(() => {
mountComponent();
await waitForApolloRequestRender();
expect(findListTitle().exists()).toBe(true);
return waitForApolloRequestRender();
});
it('has the correct text', async () => {
mountComponent();
await waitForApolloRequestRender();
expect(findListTitle().text()).toBe(TAGS_LIST_TITLE);
});
});
describe('delete button', () => {
it.each`
inputTags | isMobile | isVisible
${tags} | ${false} | ${true}
${tags} | ${true} | ${false}
${readOnlyTags} | ${false} | ${false}
${readOnlyTags} | ${true} | ${false}
`(
'is $isVisible that delete button exists when tags is $inputTags and isMobile is $isMobile',
async ({ inputTags, isMobile, isVisible }) => {
mountComponent({
propsData: { tags: inputTags, isMobile, id: 1 },
resolver: jest.fn().mockResolvedValue(imageTagsMock(inputTags)),
});
await waitForApolloRequestRender();
expect(findDeleteButton().exists()).toBe(isVisible);
},
);
it('has the correct text', async () => {
mountComponent();
await waitForApolloRequestRender();
expect(findDeleteButton().text()).toBe(REMOVE_TAGS_BUTTON_TITLE);
});
it('has the correct props', async () => {
mountComponent();
await waitForApolloRequestRender();
expect(findDeleteButton().attributes()).toMatchObject({
category: 'secondary',
variant: 'danger',
it('binds the correct props', () => {
expect(findRegistryList().props()).toMatchObject({
title: '2 tags',
pagination: stripTypenames(tagsPageInfo),
items: stripTypenames(tags),
idProperty: 'name',
});
});
it.each`
disabled | doSelect | buttonDisabled
${true} | ${false} | ${'true'}
${true} | ${true} | ${'true'}
${false} | ${false} | ${'true'}
${false} | ${true} | ${undefined}
`(
'is $buttonDisabled that the button is disabled when the component disabled state is $disabled and is $doSelect that the user selected a tag',
async ({ disabled, buttonDisabled, doSelect }) => {
mountComponent({ propsData: { tags, disabled, isMobile: false, id: 1 } });
describe('events', () => {
it('prev-page fetch the previous page', () => {
findRegistryList().vm.$emit('prev-page');
await waitForApolloRequestRender();
expect(resolver).toHaveBeenCalledWith({
first: null,
before: tagsPageInfo.startCursor,
last: GRAPHQL_PAGE_SIZE,
id: '1',
});
});
if (doSelect) {
findTagsListRow().at(0).vm.$emit('select');
await nextTick();
}
it('next-page fetch the previous page', () => {
findRegistryList().vm.$emit('next-page');
expect(findDeleteButton().attributes('disabled')).toBe(buttonDisabled);
},
);
expect(resolver).toHaveBeenCalledWith({
after: tagsPageInfo.endCursor,
first: GRAPHQL_PAGE_SIZE,
id: '1',
});
});
it('click event emits a deleted event with selected items', async () => {
mountComponent();
it('emits a delete event when list emits delete', () => {
const eventPayload = 'foo';
findRegistryList().vm.$emit('delete', eventPayload);
await waitForApolloRequestRender();
findTagsListRow().at(0).vm.$emit('select');
findDeleteButton().vm.$emit('click');
expect(wrapper.emitted('delete')[0][0][0].name).toBe(tags[0].name);
expect(wrapper.emitted('delete')).toEqual([[eventPayload]]);
});
});
});
@ -199,10 +152,12 @@ describe('Tags List', () => {
});
describe('when the list of tags is empty', () => {
const resolver = jest.fn().mockResolvedValue(imageTagsMock([]));
beforeEach(() => {
resolver = jest.fn().mockResolvedValue(imageTagsMock([]));
});
it('has the empty state', async () => {
mountComponent({ resolver });
mountComponent();
await waitForApolloRequestRender();
@ -210,7 +165,7 @@ describe('Tags List', () => {
});
it('does not show the loader', async () => {
mountComponent({ resolver });
mountComponent();
await waitForApolloRequestRender();
@ -218,76 +173,13 @@ describe('Tags List', () => {
});
it('does not show the list', async () => {
mountComponent({ resolver });
mountComponent();
await waitForApolloRequestRender();
expect(findTagsListRow().exists()).toBe(false);
expect(findListTitle().exists()).toBe(false);
expect(findRegistryList().exists()).toBe(false);
});
});
describe('pagination', () => {
it('exists', async () => {
mountComponent();
await waitForApolloRequestRender();
expect(findPagination().exists()).toBe(true);
});
it('is hidden when loading', () => {
mountComponent();
expect(findPagination().exists()).toBe(false);
});
it('is hidden when there are no more pages', async () => {
mountComponent({ resolver: jest.fn().mockResolvedValue(imageTagsMock([])) });
await waitForApolloRequestRender();
expect(findPagination().exists()).toBe(false);
});
it('is wired to the correct pagination props', async () => {
mountComponent();
await waitForApolloRequestRender();
expect(findPagination().props()).toMatchObject({
hasNextPage: tagsPageInfo.hasNextPage,
hasPreviousPage: tagsPageInfo.hasPreviousPage,
});
});
it('fetch next page when user clicks next', async () => {
const resolver = jest.fn().mockResolvedValue(imageTagsMock());
mountComponent({ resolver });
await waitForApolloRequestRender();
findPagination().vm.$emit('next');
expect(resolver).toHaveBeenCalledWith(
expect.objectContaining({ after: tagsPageInfo.endCursor }),
);
});
it('fetch previous page when user clicks prev', async () => {
const resolver = jest.fn().mockResolvedValue(imageTagsMock());
mountComponent({ resolver });
await waitForApolloRequestRender();
findPagination().vm.$emit('prev');
expect(resolver).toHaveBeenCalledWith(
expect.objectContaining({ first: null, before: tagsPageInfo.startCursor }),
);
});
});
describe('loading state', () => {
it.each`
isImageLoading | queryExecuting | loadingVisible
@ -306,8 +198,6 @@ describe('Tags List', () => {
expect(findTagsLoader().exists()).toBe(loadingVisible);
expect(findTagsListRow().exists()).toBe(!loadingVisible);
expect(findListTitle().exists()).toBe(!loadingVisible);
expect(findPagination().exists()).toBe(!loadingVisible);
},
);
});

View File

@ -8,43 +8,61 @@ RSpec.describe Gitlab::Metrics::Samplers::DatabaseSampler do
it_behaves_like 'metrics sampler', 'DATABASE_SAMPLER'
describe '#sample' do
let(:active_record_labels) do
{
class: 'ActiveRecord::Base',
host: ApplicationRecord.database.config['host'],
port: ApplicationRecord.database.config['port'],
db_config_name: 'main'
}
end
let(:ci_application_record_labels) do
{
class: 'Ci::ApplicationRecord',
host: Ci::ApplicationRecord.database.config['host'],
port: Ci::ApplicationRecord.database.config['port'],
db_config_name: 'ci'
}
end
before do
described_class::METRIC_DESCRIPTIONS.each_key do |metric|
allow(subject.metrics[metric]).to receive(:set)
end
allow(Gitlab::Database).to receive(:database_base_models)
.and_return({ main: ActiveRecord::Base, ci: Ci::ApplicationRecord })
end
context 'for ActiveRecord::Base' do
let(:labels) do
{
class: 'ActiveRecord::Base',
host: ApplicationRecord.database.config['host'],
port: ApplicationRecord.database.config['port']
}
context 'when the database is connected', :add_ci_connection do
it 'samples connection pool statistics' do
expect(subject.metrics[:size]).to receive(:set).with(active_record_labels, a_value >= 1)
expect(subject.metrics[:connections]).to receive(:set).with(active_record_labels, a_value >= 1)
expect(subject.metrics[:busy]).to receive(:set).with(active_record_labels, a_value >= 1)
expect(subject.metrics[:dead]).to receive(:set).with(active_record_labels, a_value >= 0)
expect(subject.metrics[:waiting]).to receive(:set).with(active_record_labels, a_value >= 0)
expect(subject.metrics[:size]).to receive(:set).with(ci_application_record_labels, a_value >= 1)
expect(subject.metrics[:connections]).to receive(:set).with(ci_application_record_labels, a_value >= 1)
expect(subject.metrics[:busy]).to receive(:set).with(ci_application_record_labels, a_value >= 1)
expect(subject.metrics[:dead]).to receive(:set).with(ci_application_record_labels, a_value >= 0)
expect(subject.metrics[:waiting]).to receive(:set).with(ci_application_record_labels, a_value >= 0)
subject.sample
end
end
context 'when a database is not connected', :add_ci_connection do
before do
allow(Ci::ApplicationRecord).to receive(:connected?).and_return(false)
end
context 'when the database is connected' do
it 'samples connection pool statistics' do
expect(subject.metrics[:size]).to receive(:set).with(labels, a_value >= 1)
expect(subject.metrics[:connections]).to receive(:set).with(labels, a_value >= 1)
expect(subject.metrics[:busy]).to receive(:set).with(labels, a_value >= 1)
expect(subject.metrics[:dead]).to receive(:set).with(labels, a_value >= 0)
expect(subject.metrics[:waiting]).to receive(:set).with(labels, a_value >= 0)
it 'records no samples for that database' do
expect(subject.metrics[:size]).to receive(:set).with(active_record_labels, anything)
expect(subject.metrics[:size]).not_to receive(:set).with(ci_application_record_labels, anything)
subject.sample
end
end
context 'when the database is not connected' do
before do
allow(ActiveRecord::Base).to receive(:connected?).and_return(false)
end
it 'records no samples' do
expect(subject.metrics[:size]).not_to receive(:set).with(labels, anything)
subject.sample
end
subject.sample
end
end
end

View File

@ -1356,12 +1356,26 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
describe 'synching status to Jira' do
let(:worker) { ::JiraConnect::SyncBuildsWorker }
%i[prepare! run! skip! drop! succeed! cancel! block! delay!].each do |event|
context "when we call pipeline.#{event}" do
it 'triggers a Jira synch worker' do
expect(worker).to receive(:perform_async).with(pipeline.id, Integer)
context 'when Jira Connect subscription does not exist' do
it 'does not trigger a Jira synch worker' do
expect(worker).not_to receive(:perform_async)
pipeline.send(event)
pipeline.prepare!
end
end
context 'when Jira Connect subscription exists' do
before_all do
create(:jira_connect_subscription, namespace: project.namespace)
end
%i[prepare! run! skip! drop! succeed! cancel! block! delay!].each do |event|
context "when we call pipeline.#{event}" do
it 'triggers a Jira synch worker' do
expect(worker).to receive(:perform_async).with(pipeline.id, Integer)
pipeline.send(event)
end
end
end
end

View File

@ -269,30 +269,45 @@ RSpec.describe Deployment do
end
describe 'synching status to Jira' do
let(:deployment) { create(:deployment) }
let_it_be(:project) { create(:project, :repository) }
let(:deployment) { create(:deployment, project: project) }
let(:worker) { ::JiraConnect::SyncDeploymentsWorker }
it 'calls the worker on creation' do
expect(worker).to receive(:perform_async).with(Integer)
context 'when Jira Connect subscription does not exist' do
it 'does not call the worker' do
expect(worker).not_to receive(:perform_async)
deployment
deployment
end
end
it 'does not call the worker for skipped deployments' do
expect(deployment).to be_present # warm-up, ignore the creation trigger
context 'when Jira Connect subscription exists' do
before_all do
create(:jira_connect_subscription, namespace: project.namespace)
end
expect(worker).not_to receive(:perform_async)
it 'calls the worker on creation' do
expect(worker).to receive(:perform_async).with(Integer)
deployment.skip!
end
deployment
end
%i[run! succeed! drop! cancel!].each do |event|
context "when we call pipeline.#{event}" do
it 'triggers a Jira synch worker' do
expect(worker).to receive(:perform_async).with(deployment.id)
it 'does not call the worker for skipped deployments' do
expect(deployment).to be_present # warm-up, ignore the creation trigger
deployment.send(event)
expect(worker).not_to receive(:perform_async)
deployment.skip!
end
%i[run! succeed! drop! cancel!].each do |event|
context "when we call pipeline.#{event}" do
it 'triggers a Jira synch worker' do
expect(worker).to receive(:perform_async).with(deployment.id)
deployment.send(event)
end
end
end
end

View File

@ -62,10 +62,24 @@ RSpec.describe FeatureFlags::CreateService do
expect { subject }.to change { Operations::FeatureFlag.count }.by(1)
end
it 'syncs the feature flag to Jira' do
expect(::JiraConnect::SyncFeatureFlagsWorker).to receive(:perform_async).with(Integer, Integer)
context 'when Jira Connect subscription does not exist' do
it 'does not sync the feature flag to Jira' do
expect(::JiraConnect::SyncFeatureFlagsWorker).not_to receive(:perform_async)
subject
subject
end
end
context 'when Jira Connect subscription exists' do
before do
create(:jira_connect_subscription, namespace: project.namespace)
end
it 'syncs the feature flag to Jira' do
expect(::JiraConnect::SyncFeatureFlagsWorker).to receive(:perform_async).with(Integer, Integer)
subject
end
end
it 'creates audit event' do

View File

@ -27,10 +27,24 @@ RSpec.describe FeatureFlags::UpdateService do
expect(subject[:status]).to eq(:success)
end
it 'syncs the feature flag to Jira' do
expect(::JiraConnect::SyncFeatureFlagsWorker).to receive(:perform_async).with(Integer, Integer)
context 'when Jira Connect subscription does not exist' do
it 'does not sync the feature flag to Jira' do
expect(::JiraConnect::SyncFeatureFlagsWorker).not_to receive(:perform_async)
subject
subject
end
end
context 'when Jira Connect subscription exists' do
before do
create(:jira_connect_subscription, namespace: project.namespace)
end
it 'syncs the feature flag to Jira' do
expect(::JiraConnect::SyncFeatureFlagsWorker).to receive(:perform_async).with(Integer, Integer)
subject
end
end
it 'creates audit event with correct message' do