Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
5c5d24f032
commit
cc1066db64
38 changed files with 612 additions and 241 deletions
|
@ -21,7 +21,6 @@
|
|||
|
||||
import { Mark } from 'prosemirror-model';
|
||||
import { visitParents, SKIP } from 'unist-util-visit-parents';
|
||||
import { toString } from 'hast-util-to-string';
|
||||
import { isFunction, isString, noop } from 'lodash';
|
||||
|
||||
const NO_ATTRIBUTES = {};
|
||||
|
@ -357,15 +356,6 @@ const createProseMirrorNodeFactories = (schema, proseMirrorFactorySpecs, markdow
|
|||
|
||||
state.closeUntil(parent);
|
||||
state.openNode(nodeType, hastNode, getAttrs(factory, hastNode, parent, markdown), factory);
|
||||
|
||||
/**
|
||||
* If a getContent function is provided, we immediately close
|
||||
* the node to delegate content processing to this function.
|
||||
* */
|
||||
if (isFunction(factory.getContent)) {
|
||||
state.addText(schema, factory.getContent({ hastNode, hastNodeText: toString(hastNode) }));
|
||||
state.closeNode();
|
||||
}
|
||||
};
|
||||
} else if (factory.type === 'inline') {
|
||||
const nodeType = schema.nodeType(proseMirrorName);
|
||||
|
@ -571,19 +561,6 @@ const wrapInlineElements = (nodes, wrappableTags) =>
|
|||
* it allows applying a processing function to that text. This is useful when
|
||||
* you can transform the text node, i.e trim(), substring(), etc.
|
||||
*
|
||||
* **skipChildren**
|
||||
*
|
||||
* Skips a hast node’s children while traversing the tree.
|
||||
*
|
||||
* **getContent**
|
||||
*
|
||||
* Allows to pass a custom function that returns the content of a block node. The
|
||||
* Content is limited to a single text node therefore the function should return
|
||||
* a String value.
|
||||
*
|
||||
* Use this property along skipChildren to provide custom processing of child nodes
|
||||
* for a block node.
|
||||
*
|
||||
* **parent**
|
||||
*
|
||||
* Specifies what is the node’s parent. This is useful when the node’s parent is not
|
||||
|
@ -634,7 +611,7 @@ export const createProseMirrorDocFromMdastTree = ({
|
|||
|
||||
factory.handle(state, hastNode, parent);
|
||||
|
||||
return factory.skipChildren === true ? SKIP : true;
|
||||
return true;
|
||||
});
|
||||
|
||||
return state.buildDoc();
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import { isString } from 'lodash';
|
||||
import { render } from '~/lib/gfm';
|
||||
import { createProseMirrorDocFromMdastTree } from './hast_to_prosemirror_converter';
|
||||
|
||||
|
@ -45,15 +44,8 @@ const factorySpecs = {
|
|||
},
|
||||
codeBlock: {
|
||||
type: 'block',
|
||||
skipChildren: true,
|
||||
selector: 'pre',
|
||||
getContent: ({ hastNodeText }) => hastNodeText.replace(/\n$/, ''),
|
||||
getAttrs: (hastNode) => {
|
||||
const languageClass = hastNode.children[0]?.properties.className?.[0];
|
||||
const language = isString(languageClass) ? languageClass.replace('language-', '') : null;
|
||||
|
||||
return { language };
|
||||
},
|
||||
selector: 'codeblock',
|
||||
getAttrs: (hastNode) => ({ ...hastNode.properties }),
|
||||
},
|
||||
horizontalRule: {
|
||||
type: 'block',
|
||||
|
@ -123,6 +115,11 @@ const factorySpecs = {
|
|||
selector: 'footnotedefinition',
|
||||
getAttrs: (hastNode) => hastNode.properties,
|
||||
},
|
||||
pre: {
|
||||
type: 'block',
|
||||
selector: 'pre',
|
||||
wrapInParagraph: true,
|
||||
},
|
||||
image: {
|
||||
type: 'inline',
|
||||
selector: 'img',
|
||||
|
@ -188,6 +185,7 @@ export default () => {
|
|||
wrappableTags,
|
||||
markdown,
|
||||
}),
|
||||
skipRendering: ['footnoteReference', 'footnoteDefinition', 'code'],
|
||||
});
|
||||
|
||||
return { document };
|
||||
|
|
|
@ -1,30 +1,34 @@
|
|||
import { pick } from 'lodash';
|
||||
import { unified } from 'unified';
|
||||
import remarkParse from 'remark-parse';
|
||||
import remarkGfm from 'remark-gfm';
|
||||
import remarkRehype, { all } from 'remark-rehype';
|
||||
import rehypeRaw from 'rehype-raw';
|
||||
|
||||
const createParser = () => {
|
||||
const skipRenderingHandlers = {
|
||||
footnoteReference: (h, node) =>
|
||||
h(node.position, 'footnoteReference', { identifier: node.identifier, label: node.label }, []),
|
||||
footnoteDefinition: (h, node) =>
|
||||
h(
|
||||
node.position,
|
||||
'footnoteDefinition',
|
||||
{ identifier: node.identifier, label: node.label },
|
||||
all(h, node),
|
||||
),
|
||||
code: (h, node) =>
|
||||
h(node.position, 'codeBlock', { language: node.lang, meta: node.meta }, [
|
||||
{ type: 'text', value: node.value },
|
||||
]),
|
||||
};
|
||||
|
||||
const createParser = ({ skipRendering = [] }) => {
|
||||
return unified()
|
||||
.use(remarkParse)
|
||||
.use(remarkGfm)
|
||||
.use(remarkRehype, {
|
||||
allowDangerousHtml: true,
|
||||
handlers: {
|
||||
footnoteReference: (h, node) =>
|
||||
h(
|
||||
node.position,
|
||||
'footnoteReference',
|
||||
{ identifier: node.identifier, label: node.label },
|
||||
[],
|
||||
),
|
||||
footnoteDefinition: (h, node) =>
|
||||
h(
|
||||
node.position,
|
||||
'footnoteDefinition',
|
||||
{ identifier: node.identifier, label: node.label },
|
||||
all(h, node),
|
||||
),
|
||||
...pick(skipRenderingHandlers, skipRendering),
|
||||
},
|
||||
})
|
||||
.use(rehypeRaw);
|
||||
|
@ -54,8 +58,10 @@ const compilerFactory = (renderer) =>
|
|||
* @returns {Promise<any>} Returns a promise with the result of rendering
|
||||
* the MDast tree
|
||||
*/
|
||||
export const render = async ({ markdown, renderer }) => {
|
||||
const { result } = await createParser().use(compilerFactory(renderer)).process(markdown);
|
||||
export const render = async ({ markdown, renderer, skipRendering = [] }) => {
|
||||
const { result } = await createParser({ skipRendering })
|
||||
.use(compilerFactory(renderer))
|
||||
.process(markdown);
|
||||
|
||||
return result;
|
||||
};
|
||||
|
|
|
@ -404,6 +404,7 @@ module ApplicationSettingsHelper
|
|||
:wiki_page_max_content_bytes,
|
||||
:container_registry_delete_tags_service_timeout,
|
||||
:rate_limiting_response_text,
|
||||
:package_registry_cleanup_policies_worker_capacity,
|
||||
:container_registry_expiration_policies_worker_capacity,
|
||||
:container_registry_cleanup_tags_service_max_list_size,
|
||||
:container_registry_import_max_tags_count,
|
||||
|
|
|
@ -399,6 +399,7 @@ class ApplicationSetting < ApplicationRecord
|
|||
numericality: { only_integer: true, greater_than_or_equal_to: 0 }
|
||||
|
||||
validates :packages_cleanup_package_file_worker_capacity,
|
||||
:package_registry_cleanup_policies_worker_capacity,
|
||||
allow_nil: false,
|
||||
numericality: { only_integer: true, greater_than_or_equal_to: 0 }
|
||||
|
||||
|
|
|
@ -217,6 +217,7 @@ module ApplicationSettingImplementation
|
|||
user_show_add_ssh_key_message: true,
|
||||
valid_runner_registrars: VALID_RUNNER_REGISTRAR_TYPES,
|
||||
wiki_page_max_content_bytes: 50.megabytes,
|
||||
package_registry_cleanup_policies_worker_capacity: 2,
|
||||
container_registry_delete_tags_service_timeout: 250,
|
||||
container_registry_expiration_policies_worker_capacity: 4,
|
||||
container_registry_cleanup_tags_service_max_list_size: 200,
|
||||
|
|
|
@ -23,6 +23,17 @@ module Packages
|
|||
where.not(keep_n_duplicated_package_files: 'all')
|
||||
end
|
||||
|
||||
def self.with_packages
|
||||
exists_select = ::Packages::Package.installable
|
||||
.where('packages_packages.project_id = packages_cleanup_policies.project_id')
|
||||
.select(1)
|
||||
where('EXISTS (?)', exists_select)
|
||||
end
|
||||
|
||||
def self.runnable
|
||||
runnable_schedules.with_packages.order(next_run_at: :asc)
|
||||
end
|
||||
|
||||
def set_next_run_at
|
||||
# fixed cadence of 12 hours
|
||||
self.next_run_at = Time.zone.now + 12.hours
|
||||
|
|
|
@ -4,16 +4,6 @@ module Ci
|
|||
class BuildPresenter < ProcessablePresenter
|
||||
presents ::Ci::Build, as: :build
|
||||
|
||||
def erased_by_user?
|
||||
# Build can be erased through API, therefore it does not have
|
||||
# `erased_by` user assigned in that case.
|
||||
erased? && erased_by
|
||||
end
|
||||
|
||||
def erased_by_name
|
||||
erased_by.name if erased_by_user?
|
||||
end
|
||||
|
||||
def status_title(status = detailed_status)
|
||||
if auto_canceled?
|
||||
"Job is redundant and is auto-canceled by Pipeline ##{auto_canceled_by_id}"
|
||||
|
@ -33,10 +23,6 @@ module Ci
|
|||
end
|
||||
end
|
||||
|
||||
def tooltip_message
|
||||
"#{build.name} - #{detailed_status.status_tooltip}"
|
||||
end
|
||||
|
||||
def execute_in
|
||||
scheduled? && scheduled_at && [0, scheduled_at - Time.now].max
|
||||
end
|
||||
|
|
|
@ -1,3 +1,12 @@
|
|||
- can_push_code = can?(current_user, :push_code, @project)
|
||||
|
||||
- if !can_push_code && selected_branch.present?
|
||||
- branch_collaboration = @project.branch_allows_collaboration?(current_user, selected_branch)
|
||||
- existing_branch = ERB::Util.html_escape(selected_branch)
|
||||
- else
|
||||
- branch_collaboration = false
|
||||
- existing_branch = ''
|
||||
|
||||
- case type.to_s
|
||||
- when 'revert'
|
||||
- revert_merge_request = _('Revert this merge request')
|
||||
|
@ -7,9 +16,9 @@
|
|||
.js-revert-commit-modal{ data: { title: title,
|
||||
endpoint: revert_namespace_project_commit_path(commit, namespace_id: @project.namespace.full_path, project_id: @project),
|
||||
branch: @project.default_branch,
|
||||
push_code: can?(current_user, :push_code, @project).to_s,
|
||||
branch_collaboration: @project.branch_allows_collaboration?(current_user, selected_branch).to_s,
|
||||
existing_branch: ERB::Util.html_escape(selected_branch),
|
||||
push_code: can_push_code.to_s,
|
||||
branch_collaboration: branch_collaboration.to_s,
|
||||
existing_branch: existing_branch,
|
||||
branches_endpoint: project_branches_path(@project) } }
|
||||
|
||||
- when 'cherry-pick'
|
||||
|
@ -20,8 +29,8 @@
|
|||
branch: @project.default_branch,
|
||||
target_project_id: @project.id,
|
||||
target_project_name: @project.full_path,
|
||||
push_code: can?(current_user, :push_code, @project).to_s,
|
||||
branch_collaboration: @project.branch_allows_collaboration?(current_user, selected_branch).to_s,
|
||||
existing_branch: ERB::Util.html_escape(selected_branch),
|
||||
push_code: can_push_code.to_s,
|
||||
branch_collaboration: branch_collaboration.to_s,
|
||||
existing_branch: existing_branch,
|
||||
branches_endpoint: refs_project_path(@project),
|
||||
projects: cherry_pick_projects_data(@project).to_json } }
|
||||
|
|
|
@ -1461,6 +1461,15 @@
|
|||
:weight: 1
|
||||
:idempotent: false
|
||||
:tags: []
|
||||
- :name: package_cleanup:packages_cleanup_execute_policy
|
||||
:worker_name: Packages::Cleanup::ExecutePolicyWorker
|
||||
:feature_category: :package_registry
|
||||
:has_external_dependencies: false
|
||||
:urgency: :low
|
||||
:resource_boundary: :unknown
|
||||
:weight: 1
|
||||
:idempotent: true
|
||||
:tags: []
|
||||
- :name: package_cleanup:packages_cleanup_package_file
|
||||
:worker_name: Packages::CleanupPackageFileWorker
|
||||
:feature_category: :package_registry
|
||||
|
|
72
app/workers/packages/cleanup/execute_policy_worker.rb
Normal file
72
app/workers/packages/cleanup/execute_policy_worker.rb
Normal file
|
@ -0,0 +1,72 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Packages
|
||||
module Cleanup
|
||||
class ExecutePolicyWorker
|
||||
include ApplicationWorker
|
||||
include LimitedCapacity::Worker
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
data_consistency :always
|
||||
queue_namespace :package_cleanup
|
||||
feature_category :package_registry
|
||||
urgency :low
|
||||
worker_resource_boundary :unknown
|
||||
idempotent!
|
||||
|
||||
COUNTS_KEYS = %i[
|
||||
marked_package_files_total_count
|
||||
unique_package_id_and_file_name_total_count
|
||||
].freeze
|
||||
|
||||
def perform_work
|
||||
return unless next_policy
|
||||
|
||||
log_extra_metadata_on_done(:project_id, next_policy.project_id)
|
||||
result = ::Packages::Cleanup::ExecutePolicyService.new(next_policy).execute
|
||||
|
||||
if result.success?
|
||||
timeout = !!result.payload[:timeout]
|
||||
counts = result.payload[:counts]
|
||||
log_extra_metadata_on_done(:execution_timeout, timeout)
|
||||
COUNTS_KEYS.each do |count_key|
|
||||
log_extra_metadata_on_done(count_key, counts[count_key])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def remaining_work_count
|
||||
::Packages::Cleanup::Policy.runnable
|
||||
.limit(max_running_jobs + 1)
|
||||
.count
|
||||
end
|
||||
|
||||
def max_running_jobs
|
||||
::Gitlab::CurrentSettings.package_registry_cleanup_policies_worker_capacity
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def next_policy
|
||||
strong_memoize(:next_policy) do
|
||||
::Packages::Cleanup::Policy.transaction do
|
||||
# the #lock call is specific to this worker
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
policy = ::Packages::Cleanup::Policy.runnable
|
||||
.limit(1)
|
||||
.lock('FOR UPDATE SKIP LOCKED')
|
||||
.first
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
|
||||
next nil unless policy
|
||||
|
||||
policy.set_next_run_at
|
||||
policy.save!
|
||||
|
||||
policy
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -12,6 +12,7 @@ module Packages
|
|||
|
||||
def perform
|
||||
enqueue_package_file_cleanup_job if Packages::PackageFile.pending_destruction.exists?
|
||||
enqueue_cleanup_policy_jobs if Packages::Cleanup::Policy.runnable.exists?
|
||||
|
||||
log_counts
|
||||
end
|
||||
|
@ -22,6 +23,10 @@ module Packages
|
|||
Packages::CleanupPackageFileWorker.perform_with_capacity
|
||||
end
|
||||
|
||||
def enqueue_cleanup_policy_jobs
|
||||
Packages::Cleanup::ExecutePolicyWorker.perform_with_capacity
|
||||
end
|
||||
|
||||
def log_counts
|
||||
use_replica_if_available do
|
||||
pending_destruction_package_files_count = Packages::PackageFile.pending_destruction.count
|
||||
|
@ -31,6 +36,9 @@ module Packages
|
|||
log_extra_metadata_on_done(:pending_destruction_package_files_count, pending_destruction_package_files_count)
|
||||
log_extra_metadata_on_done(:processing_package_files_count, processing_package_files_count)
|
||||
log_extra_metadata_on_done(:error_package_files_count, error_package_files_count)
|
||||
|
||||
pending_cleanup_policies_count = Packages::Cleanup::Policy.runnable.count
|
||||
log_extra_metadata_on_done(:pending_cleanup_policies_count, pending_cleanup_policies_count)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
name: legacy_merge_request_state_check_for_merged_result_pipelines
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/91849
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/367019
|
||||
milestone: '15.2'
|
||||
type: development
|
||||
group: group::pipeline execution
|
||||
default_enabled: false
|
|
@ -1,8 +0,0 @@
|
|||
---
|
||||
table_name: post_migration_test_table
|
||||
classes: []
|
||||
feature_categories:
|
||||
- database
|
||||
description: Test table to verify the behavior of the post-deploy independent pipeline
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/91119
|
||||
milestone: '15.2'
|
|
@ -0,0 +1,11 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddPackagesCleanupPoliciesWorkerCapacityToApplicationSettings < Gitlab::Database::Migration[2.0]
|
||||
def change
|
||||
add_column :application_settings,
|
||||
:package_registry_cleanup_policies_worker_capacity,
|
||||
:integer,
|
||||
default: 2,
|
||||
null: false
|
||||
end
|
||||
end
|
|
@ -0,0 +1,17 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddApplicationSettingsPackagesCleanupPoliciesWorkerCapacityConstraint < Gitlab::Database::Migration[2.0]
|
||||
CONSTRAINT_NAME = 'app_settings_pkg_registry_cleanup_pol_worker_capacity_gte_zero'
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_check_constraint :application_settings,
|
||||
'package_registry_cleanup_policies_worker_capacity >= 0',
|
||||
CONSTRAINT_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
remove_check_constraint :application_settings, CONSTRAINT_NAME
|
||||
end
|
||||
end
|
|
@ -0,0 +1,18 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddEnabledPoliciesIndexToPackagesCleanupPolicies < Gitlab::Database::Migration[2.0]
|
||||
disable_ddl_transaction!
|
||||
|
||||
INDEX_NAME = 'idx_enabled_pkgs_cleanup_policies_on_next_run_at_project_id'
|
||||
|
||||
def up
|
||||
add_concurrent_index :packages_cleanup_policies,
|
||||
[:next_run_at, :project_id],
|
||||
where: "keep_n_duplicated_package_files <> 'all'",
|
||||
name: INDEX_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index_by_name :packages_cleanup_policies, INDEX_NAME
|
||||
end
|
||||
end
|
|
@ -0,0 +1,13 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class DropPostMigrationTestTable < Gitlab::Database::Migration[2.0]
|
||||
def up
|
||||
drop_table :post_migration_test_table
|
||||
end
|
||||
|
||||
def down
|
||||
create_table :post_migration_test_table do |t|
|
||||
t.integer :status, null: false
|
||||
end
|
||||
end
|
||||
end
|
1
db/schema_migrations/20220713175658
Normal file
1
db/schema_migrations/20220713175658
Normal file
|
@ -0,0 +1 @@
|
|||
6bbaa8006a848a65e866c7836d0b0e28e3c303d28b329f5e12f978dd895e868f
|
1
db/schema_migrations/20220713175737
Normal file
1
db/schema_migrations/20220713175737
Normal file
|
@ -0,0 +1 @@
|
|||
95a535d8f97ec96df918547aff7947acacbdf37fd0d3656878c9c60d80f3fd02
|
1
db/schema_migrations/20220713175812
Normal file
1
db/schema_migrations/20220713175812
Normal file
|
@ -0,0 +1 @@
|
|||
41e42a51a0c5b3af8d94edc25e9421a754d6fc517f343bd718b16fd6bfc383f3
|
1
db/schema_migrations/20220715160023
Normal file
1
db/schema_migrations/20220715160023
Normal file
|
@ -0,0 +1 @@
|
|||
3696ff7ea12600702911895c085a85b49e613bc133a580d895fc53cf1f6912a8
|
|
@ -11333,12 +11333,14 @@ CREATE TABLE application_settings (
|
|||
error_tracking_api_url text,
|
||||
git_rate_limit_users_allowlist text[] DEFAULT '{}'::text[] NOT NULL,
|
||||
error_tracking_access_token_encrypted text,
|
||||
package_registry_cleanup_policies_worker_capacity integer DEFAULT 2 NOT NULL,
|
||||
CONSTRAINT app_settings_container_reg_cleanup_tags_max_list_size_positive CHECK ((container_registry_cleanup_tags_service_max_list_size >= 0)),
|
||||
CONSTRAINT app_settings_container_registry_pre_import_tags_rate_positive CHECK ((container_registry_pre_import_tags_rate >= (0)::numeric)),
|
||||
CONSTRAINT app_settings_dep_proxy_ttl_policies_worker_capacity_positive CHECK ((dependency_proxy_ttl_group_policy_worker_capacity >= 0)),
|
||||
CONSTRAINT app_settings_ext_pipeline_validation_service_url_text_limit CHECK ((char_length(external_pipeline_validation_service_url) <= 255)),
|
||||
CONSTRAINT app_settings_git_rate_limit_users_allowlist_max_usernames CHECK ((cardinality(git_rate_limit_users_allowlist) <= 100)),
|
||||
CONSTRAINT app_settings_p_cleanup_package_file_worker_capacity_positive CHECK ((packages_cleanup_package_file_worker_capacity >= 0)),
|
||||
CONSTRAINT app_settings_pkg_registry_cleanup_pol_worker_capacity_gte_zero CHECK ((package_registry_cleanup_policies_worker_capacity >= 0)),
|
||||
CONSTRAINT app_settings_registry_exp_policies_worker_capacity_positive CHECK ((container_registry_expiration_policies_worker_capacity >= 0)),
|
||||
CONSTRAINT app_settings_yaml_max_depth_positive CHECK ((max_yaml_depth > 0)),
|
||||
CONSTRAINT app_settings_yaml_max_size_positive CHECK ((max_yaml_size_bytes > 0)),
|
||||
|
@ -18885,20 +18887,6 @@ CREATE SEQUENCE pool_repositories_id_seq
|
|||
|
||||
ALTER SEQUENCE pool_repositories_id_seq OWNED BY pool_repositories.id;
|
||||
|
||||
CREATE TABLE post_migration_test_table (
|
||||
id bigint NOT NULL,
|
||||
status integer NOT NULL
|
||||
);
|
||||
|
||||
CREATE SEQUENCE post_migration_test_table_id_seq
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
ALTER SEQUENCE post_migration_test_table_id_seq OWNED BY post_migration_test_table.id;
|
||||
|
||||
CREATE TABLE postgres_async_indexes (
|
||||
id bigint NOT NULL,
|
||||
created_at timestamp with time zone NOT NULL,
|
||||
|
@ -23384,8 +23372,6 @@ ALTER TABLE ONLY plans ALTER COLUMN id SET DEFAULT nextval('plans_id_seq'::regcl
|
|||
|
||||
ALTER TABLE ONLY pool_repositories ALTER COLUMN id SET DEFAULT nextval('pool_repositories_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY post_migration_test_table ALTER COLUMN id SET DEFAULT nextval('post_migration_test_table_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY postgres_async_indexes ALTER COLUMN id SET DEFAULT nextval('postgres_async_indexes_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY postgres_reindex_actions ALTER COLUMN id SET DEFAULT nextval('postgres_reindex_actions_id_seq'::regclass);
|
||||
|
@ -25488,9 +25474,6 @@ ALTER TABLE ONLY plans
|
|||
ALTER TABLE ONLY pool_repositories
|
||||
ADD CONSTRAINT pool_repositories_pkey PRIMARY KEY (id);
|
||||
|
||||
ALTER TABLE ONLY post_migration_test_table
|
||||
ADD CONSTRAINT post_migration_test_table_pkey PRIMARY KEY (id);
|
||||
|
||||
ALTER TABLE ONLY postgres_async_indexes
|
||||
ADD CONSTRAINT postgres_async_indexes_pkey PRIMARY KEY (id);
|
||||
|
||||
|
@ -27042,6 +27025,8 @@ CREATE INDEX idx_eaprpb_external_approval_rule_id ON external_approval_rules_pro
|
|||
|
||||
CREATE INDEX idx_elastic_reindexing_slices_on_elastic_reindexing_subtask_id ON elastic_reindexing_slices USING btree (elastic_reindexing_subtask_id);
|
||||
|
||||
CREATE INDEX idx_enabled_pkgs_cleanup_policies_on_next_run_at_project_id ON packages_cleanup_policies USING btree (next_run_at, project_id) WHERE (keep_n_duplicated_package_files <> 'all'::text);
|
||||
|
||||
CREATE UNIQUE INDEX idx_environment_merge_requests_unique_index ON deployment_merge_requests USING btree (environment_id, merge_request_id);
|
||||
|
||||
CREATE UNIQUE INDEX idx_external_audit_event_destination_id_key_uniq ON audit_events_streaming_headers USING btree (key, external_audit_event_destination_id);
|
||||
|
|
|
@ -175,6 +175,7 @@ Example response:
|
|||
"container_registry_expiration_policies_caching": true,
|
||||
"container_registry_expiration_policies_worker_capacity": 4,
|
||||
"container_registry_token_expire_delay": 5,
|
||||
"package_registry_cleanup_policies_worker_capacity": 2,
|
||||
"repository_storages": ["default"],
|
||||
"plantuml_enabled": false,
|
||||
"plantuml_url": null,
|
||||
|
@ -271,6 +272,7 @@ listed in the descriptions of the relevant settings.
|
|||
| `container_registry_expiration_policies_caching` | boolean | no | Caching during the execution of [cleanup policies](../user/packages/container_registry/reduce_container_registry_storage.md#set-cleanup-limits-to-conserve-resources). |
|
||||
| `container_registry_expiration_policies_worker_capacity` | integer | no | Number of workers for [cleanup policies](../user/packages/container_registry/reduce_container_registry_storage.md#set-cleanup-limits-to-conserve-resources). |
|
||||
| `container_registry_token_expire_delay` | integer | no | Container Registry token duration in minutes. |
|
||||
| `package_registry_cleanup_policies_worker_capacity` | integer | no | Number of workers assigned to the packages cleanup policies. |
|
||||
| `deactivate_dormant_users` | boolean | no | Enable [automatic deactivation of dormant users](../user/admin_area/moderate_users.md#automatically-deactivate-dormant-users). |
|
||||
| `default_artifacts_expire_in` | string | no | Set the default expiration time for each job's artifacts. |
|
||||
| `default_branch_name` | string | no | [Instance-level custom initial branch name](../user/project/repository/branches/default.md#instance-level-custom-initial-branch-name) ([introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/225258) in GitLab 13.2). |
|
||||
|
|
|
@ -1292,8 +1292,7 @@
|
|||
<copy-code></copy-code>
|
||||
</div>
|
||||
wysiwyg: |-
|
||||
<pre class="content-editor-code-block undefined code highlight"><code>
|
||||
</code></pre>
|
||||
<pre class="content-editor-code-block undefined code highlight"><code></code></pre>
|
||||
04_05__leaf_blocks__fenced_code_blocks__012:
|
||||
canonical: |
|
||||
<pre><code></code></pre>
|
||||
|
@ -1557,8 +1556,9 @@
|
|||
</p></pre>
|
||||
</td></tr></table>
|
||||
wysiwyg: |-
|
||||
Error - check implementation:
|
||||
Cannot read properties of undefined (reading 'className')
|
||||
<table><tbody><tr><td colspan="1" rowspan="1"><pre><p>**Hello**,
|
||||
</p><p><em>world</em>.
|
||||
</p></pre><p></p></td></tr></tbody></table>
|
||||
04_06__leaf_blocks__html_blocks__002:
|
||||
canonical: |
|
||||
<table>
|
||||
|
@ -1810,11 +1810,12 @@
|
|||
</div>
|
||||
<p data-sourcepos="7:1-7:4" dir="auto">okay</p>
|
||||
wysiwyg: |-
|
||||
<pre class="content-editor-code-block undefined code highlight"><code>
|
||||
<pre><p><code>
|
||||
import Text.HTML.TagSoup
|
||||
|
||||
main :: IO ()
|
||||
main = print $ parseTags tags</code></pre>
|
||||
main = print $ parseTags tags
|
||||
</code></p></pre>
|
||||
<p>okay</p>
|
||||
04_06__leaf_blocks__html_blocks__023:
|
||||
canonical: |
|
||||
|
@ -4569,9 +4570,7 @@
|
|||
<li data-sourcepos="7:1-7:3">c</li>
|
||||
</ul>
|
||||
wysiwyg: |-
|
||||
<ul bullet="*"><li><p>a</p></li><li><p></p><pre class="content-editor-code-block undefined code highlight"><code>b
|
||||
|
||||
</code></pre></li><li><p>c</p></li></ul>
|
||||
<ul bullet="*"><li><p>a</p></li><li><p></p><pre class="content-editor-code-block undefined code highlight"><code>b</code></pre></li><li><p>c</p></li></ul>
|
||||
05_04__container_blocks__lists__019:
|
||||
canonical: |
|
||||
<ul>
|
||||
|
@ -7559,11 +7558,11 @@
|
|||
07_01__gitlab_specific_markdown__footnotes__001:
|
||||
canonical: ""
|
||||
static: |-
|
||||
<p data-sourcepos="1:1-1:27" dir="auto">footnote reference tag <sup class="footnote-ref"><a href="#fn-1-9470" id="fnref-1-9470" data-footnote-ref>1</a></sup></p>
|
||||
<p data-sourcepos="1:1-1:27" dir="auto">footnote reference tag <sup class="footnote-ref"><a href="#fn-1-9034" id="fnref-1-9034" data-footnote-ref>1</a></sup></p>
|
||||
<section data-footnotes class="footnotes">
|
||||
<ol>
|
||||
<li id="fn-1-9470">
|
||||
<p data-sourcepos="3:7-3:19">footnote text <a href="#fnref-1-9470" data-footnote-backref aria-label="Back to content" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
|
||||
<li id="fn-1-9034">
|
||||
<p data-sourcepos="3:7-3:19">footnote text <a href="#fnref-1-9034" data-footnote-backref aria-label="Back to content" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
|
||||
</li>
|
||||
</ol>
|
||||
</section>
|
||||
|
|
|
@ -2502,13 +2502,7 @@
|
|||
"attrs": {
|
||||
"language": null,
|
||||
"class": "code highlight"
|
||||
},
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "\n "
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -2886,8 +2880,69 @@
|
|||
]
|
||||
}
|
||||
04_06__leaf_blocks__html_blocks__001: |-
|
||||
Error - check implementation:
|
||||
Cannot read properties of undefined (reading 'className')
|
||||
{
|
||||
"type": "doc",
|
||||
"content": [
|
||||
{
|
||||
"type": "table",
|
||||
"attrs": {
|
||||
"isMarkdown": null
|
||||
},
|
||||
"content": [
|
||||
{
|
||||
"type": "tableRow",
|
||||
"content": [
|
||||
{
|
||||
"type": "tableCell",
|
||||
"attrs": {
|
||||
"colspan": 1,
|
||||
"rowspan": 1,
|
||||
"colwidth": null
|
||||
},
|
||||
"content": [
|
||||
{
|
||||
"type": "pre",
|
||||
"content": [
|
||||
{
|
||||
"type": "paragraph",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "**Hello**,\n"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "paragraph",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"marks": [
|
||||
{
|
||||
"type": "italic"
|
||||
}
|
||||
],
|
||||
"text": "world"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": ".\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "paragraph"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
04_06__leaf_blocks__html_blocks__002: |-
|
||||
{
|
||||
"type": "doc",
|
||||
|
@ -3304,15 +3359,21 @@
|
|||
"type": "doc",
|
||||
"content": [
|
||||
{
|
||||
"type": "codeBlock",
|
||||
"attrs": {
|
||||
"language": null,
|
||||
"class": "code highlight"
|
||||
},
|
||||
"type": "pre",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "\nimport Text.HTML.TagSoup\n\nmain :: IO ()\nmain = print $ parseTags tags"
|
||||
"type": "paragraph",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"marks": [
|
||||
{
|
||||
"type": "code"
|
||||
}
|
||||
],
|
||||
"text": "\nimport Text.HTML.TagSoup\n\nmain :: IO ()\nmain = print $ parseTags tags\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -9571,7 +9632,7 @@
|
|||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "b\n\n"
|
||||
"text": "b"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
@ -391,7 +391,6 @@ personal_access_tokens: :gitlab_main
|
|||
plan_limits: :gitlab_main
|
||||
plans: :gitlab_main
|
||||
pool_repositories: :gitlab_main
|
||||
post_migration_test_table: :gitlab_main
|
||||
postgres_async_indexes: :gitlab_shared
|
||||
postgres_autovacuum_activity: :gitlab_shared
|
||||
postgres_foreign_keys: :gitlab_shared
|
||||
|
|
|
@ -122,7 +122,6 @@
|
|||
"fuzzaldrin-plus": "^0.6.0",
|
||||
"graphql": "^15.7.2",
|
||||
"graphql-tag": "^2.11.0",
|
||||
"hast-util-to-string": "^2.0.0",
|
||||
"highlight.js": "^11.5.1",
|
||||
"immer": "^7.0.7",
|
||||
"ipaddr.js": "^1.9.1",
|
||||
|
|
|
@ -77,6 +77,7 @@ const {
|
|||
link,
|
||||
listItem,
|
||||
orderedList,
|
||||
pre,
|
||||
strike,
|
||||
table,
|
||||
tableRow,
|
||||
|
@ -1050,6 +1051,34 @@ function matchwo(a,b)
|
|||
|
||||
*baz*`,
|
||||
},
|
||||
{
|
||||
markdown: `
|
||||
<table><tr><td>
|
||||
<pre>
|
||||
**Hello**,
|
||||
|
||||
_world_.
|
||||
</pre>
|
||||
</td></tr></table>
|
||||
`,
|
||||
expectedDoc: doc(
|
||||
table(
|
||||
source('<table><tr><td>\n<pre>\n**Hello**,\n\n_world_.\n</pre>\n</td></tr></table>'),
|
||||
tableRow(
|
||||
source('<tr><td>\n<pre>\n**Hello**,\n\n_world_.\n</pre>\n</td></tr>'),
|
||||
tableCell(
|
||||
source('<td>\n<pre>\n**Hello**,\n\n_world_.\n</pre>\n</td>'),
|
||||
pre(
|
||||
source('<pre>\n**Hello**,\n\n_world_.\n</pre>'),
|
||||
paragraph(source('**Hello**,'), '**Hello**,\n'),
|
||||
paragraph(source('_world_.\n'), italic(source('_world_'), 'world'), '.\n'),
|
||||
),
|
||||
paragraph(),
|
||||
),
|
||||
),
|
||||
),
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
const runOnly = examples.find((example) => example.only === true);
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
import { render } from '~/lib/gfm';
|
||||
|
||||
describe('gfm', () => {
|
||||
const markdownToAST = async (markdown) => {
|
||||
const markdownToAST = async (markdown, skipRendering = []) => {
|
||||
let result;
|
||||
|
||||
await render({
|
||||
markdown,
|
||||
skipRendering,
|
||||
renderer: (tree) => {
|
||||
result = tree;
|
||||
},
|
||||
|
@ -58,36 +59,62 @@ describe('gfm', () => {
|
|||
expect(result).toEqual(rendered);
|
||||
});
|
||||
|
||||
it('transforms footnotes into footnotedefinition and footnotereference tags', async () => {
|
||||
const result = await markdownToAST(
|
||||
`footnote reference [^footnote]
|
||||
describe('when skipping the rendering of footnote reference and definition nodes', () => {
|
||||
it('transforms footnotes into footnotedefinition and footnotereference tags', async () => {
|
||||
const result = await markdownToAST(
|
||||
`footnote reference [^footnote]
|
||||
|
||||
[^footnote]: Footnote definition`,
|
||||
['footnoteReference', 'footnoteDefinition'],
|
||||
);
|
||||
|
||||
expectInRoot(
|
||||
result,
|
||||
expect.objectContaining({
|
||||
children: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
type: 'element',
|
||||
tagName: 'footnotereference',
|
||||
properties: {
|
||||
identifier: 'footnote',
|
||||
label: 'footnote',
|
||||
},
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
);
|
||||
|
||||
expectInRoot(
|
||||
result,
|
||||
expect.objectContaining({
|
||||
tagName: 'footnotedefinition',
|
||||
properties: {
|
||||
identifier: 'footnote',
|
||||
label: 'footnote',
|
||||
},
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when skipping the rendering of code blocks', () => {
|
||||
it('transforms code nodes into codeblock html tags', async () => {
|
||||
const result = await markdownToAST(
|
||||
`
|
||||
\`\`\`javascript
|
||||
console.log('Hola');
|
||||
\`\`\`\
|
||||
`,
|
||||
['code'],
|
||||
);
|
||||
|
||||
expectInRoot(
|
||||
result,
|
||||
expect.objectContaining({
|
||||
children: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
type: 'element',
|
||||
tagName: 'footnotereference',
|
||||
properties: {
|
||||
identifier: 'footnote',
|
||||
label: 'footnote',
|
||||
},
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
);
|
||||
|
||||
expectInRoot(
|
||||
result,
|
||||
expect.objectContaining({
|
||||
tagName: 'footnotedefinition',
|
||||
tagName: 'codeblock',
|
||||
properties: {
|
||||
identifier: 'footnote',
|
||||
label: 'footnote',
|
||||
language: 'javascript',
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
|
|
@ -104,6 +104,9 @@ RSpec.describe ApplicationSetting do
|
|||
it { is_expected.to validate_numericality_of(:packages_cleanup_package_file_worker_capacity).only_integer.is_greater_than_or_equal_to(0) }
|
||||
it { is_expected.not_to allow_value(nil).for(:packages_cleanup_package_file_worker_capacity) }
|
||||
|
||||
it { is_expected.to validate_numericality_of(:package_registry_cleanup_policies_worker_capacity).only_integer.is_greater_than_or_equal_to(0) }
|
||||
it { is_expected.not_to allow_value(nil).for(:package_registry_cleanup_policies_worker_capacity) }
|
||||
|
||||
it { is_expected.to validate_numericality_of(:snippet_size_limit).only_integer.is_greater_than(0) }
|
||||
it { is_expected.to validate_numericality_of(:wiki_page_max_content_bytes).only_integer.is_greater_than_or_equal_to(1024) }
|
||||
it { is_expected.to validate_presence_of(:max_artifacts_size) }
|
||||
|
|
|
@ -26,6 +26,30 @@ RSpec.describe Packages::Cleanup::Policy, type: :model do
|
|||
it { is_expected.to contain_exactly(active_policy) }
|
||||
end
|
||||
|
||||
describe '.with_packages' do
|
||||
let_it_be(:policy_with_packages) { create(:packages_cleanup_policy) }
|
||||
let_it_be(:policy_without_packages) { create(:packages_cleanup_policy) }
|
||||
let_it_be(:package) { create(:package, project: policy_with_packages.project) }
|
||||
|
||||
subject { described_class.with_packages }
|
||||
|
||||
it { is_expected.to contain_exactly(policy_with_packages) }
|
||||
end
|
||||
|
||||
describe '.runnable' do
|
||||
let_it_be(:runnable_policy_with_packages) { create(:packages_cleanup_policy, :runnable) }
|
||||
let_it_be(:runnable_policy_without_packages) { create(:packages_cleanup_policy, :runnable) }
|
||||
let_it_be(:non_runnable_policy_with_packages) { create(:packages_cleanup_policy) }
|
||||
let_it_be(:non_runnable_policy_without_packages) { create(:packages_cleanup_policy) }
|
||||
|
||||
let_it_be(:package1) { create(:package, project: runnable_policy_with_packages.project) }
|
||||
let_it_be(:package2) { create(:package, project: non_runnable_policy_with_packages.project) }
|
||||
|
||||
subject { described_class.runnable }
|
||||
|
||||
it { is_expected.to contain_exactly(runnable_policy_with_packages) }
|
||||
end
|
||||
|
||||
describe '#keep_n_duplicated_package_files_disabled?' do
|
||||
subject { policy.keep_n_duplicated_package_files_disabled? }
|
||||
|
||||
|
|
|
@ -29,36 +29,6 @@ RSpec.describe Ci::BuildPresenter do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#erased_by_user?' do
|
||||
it 'takes a build and optional params' do
|
||||
expect(presenter).not_to be_erased_by_user
|
||||
end
|
||||
end
|
||||
|
||||
describe '#erased_by_name' do
|
||||
context 'when build is not erased' do
|
||||
before do
|
||||
expect(presenter).to receive(:erased_by_user?).and_return(false)
|
||||
end
|
||||
|
||||
it 'returns nil' do
|
||||
expect(presenter.erased_by_name).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when build is erased' do
|
||||
before do
|
||||
expect(presenter).to receive(:erased_by_user?).and_return(true)
|
||||
expect(build).to receive(:erased_by)
|
||||
.and_return(double(:user, name: 'John Doe'))
|
||||
end
|
||||
|
||||
it 'returns the name of the eraser' do
|
||||
expect(presenter.erased_by_name).to eq('John Doe')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#status_title' do
|
||||
context 'when build is auto-canceled' do
|
||||
before do
|
||||
|
@ -168,58 +138,6 @@ RSpec.describe Ci::BuildPresenter do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#tooltip_message' do
|
||||
context 'When build has failed' do
|
||||
let(:build) { create(:ci_build, :script_failure, pipeline: pipeline) }
|
||||
|
||||
it 'returns the reason of failure' do
|
||||
tooltip = subject.tooltip_message
|
||||
|
||||
expect(tooltip).to eq("#{build.name} - failed - (script failure)")
|
||||
end
|
||||
end
|
||||
|
||||
context 'When build has failed and retried' do
|
||||
let(:build) { create(:ci_build, :script_failure, :retried, pipeline: pipeline) }
|
||||
|
||||
it 'includes the reason of failure and the retried title' do
|
||||
tooltip = subject.tooltip_message
|
||||
|
||||
expect(tooltip).to eq("#{build.name} - failed - (script failure) (retried)")
|
||||
end
|
||||
end
|
||||
|
||||
context 'When build has failed and is allowed to' do
|
||||
let(:build) { create(:ci_build, :script_failure, :allowed_to_fail, pipeline: pipeline) }
|
||||
|
||||
it 'includes the reason of failure' do
|
||||
tooltip = subject.tooltip_message
|
||||
|
||||
expect(tooltip).to eq("#{build.name} - failed - (script failure) (allowed to fail)")
|
||||
end
|
||||
end
|
||||
|
||||
context 'For any other build (no retried)' do
|
||||
let(:build) { create(:ci_build, :success, pipeline: pipeline) }
|
||||
|
||||
it 'includes build name and status' do
|
||||
tooltip = subject.tooltip_message
|
||||
|
||||
expect(tooltip).to eq("#{build.name} - passed")
|
||||
end
|
||||
end
|
||||
|
||||
context 'For any other build (retried)' do
|
||||
let(:build) { create(:ci_build, :success, :retried, pipeline: pipeline) }
|
||||
|
||||
it 'includes build name and status' do
|
||||
tooltip = subject.tooltip_message
|
||||
|
||||
expect(tooltip).to eq("#{build.name} - passed (retried)")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#execute_in' do
|
||||
subject { presenter.execute_in }
|
||||
|
||||
|
|
|
@ -330,6 +330,10 @@ RSpec.configure do |config|
|
|||
# most cases. We do test the email verification flow in the appropriate specs.
|
||||
stub_feature_flags(require_email_verification: false)
|
||||
|
||||
# This feature flag is for selectively disabling by actor, therefore we don't enable it by default.
|
||||
# See https://docs.gitlab.com/ee/development/feature_flags/#selectively-disable-by-actor
|
||||
stub_feature_flags(legacy_merge_request_state_check_for_merged_result_pipelines: false)
|
||||
|
||||
allow(Gitlab::GitalyClient).to receive(:can_use_disk?).and_return(enable_rugged)
|
||||
else
|
||||
unstub_all_feature_flags
|
||||
|
|
|
@ -357,6 +357,7 @@ RSpec.describe 'Every Sidekiq worker' do
|
|||
'ObjectStorage::BackgroundMoveWorker' => 5,
|
||||
'ObjectStorage::MigrateUploadsWorker' => 3,
|
||||
'Packages::CleanupPackageFileWorker' => 0,
|
||||
'Packages::Cleanup::ExecutePolicyWorker' => 0,
|
||||
'Packages::Composer::CacheUpdateWorker' => false,
|
||||
'Packages::Go::SyncPackagesWorker' => 3,
|
||||
'Packages::MarkPackageFilesForDestructionWorker' => 3,
|
||||
|
|
160
spec/workers/packages/cleanup/execute_policy_worker_spec.rb
Normal file
160
spec/workers/packages/cleanup/execute_policy_worker_spec.rb
Normal file
|
@ -0,0 +1,160 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Packages::Cleanup::ExecutePolicyWorker do
|
||||
let(:worker) { described_class.new }
|
||||
|
||||
describe '#perform_work' do
|
||||
subject(:perform_work) { worker.perform_work }
|
||||
|
||||
shared_examples 'not executing any policy' do
|
||||
it 'is a no op' do
|
||||
expect(::Packages::Cleanup::ExecutePolicyService).not_to receive(:new)
|
||||
|
||||
expect { perform_work }.not_to change { Packages::PackageFile.installable.count }
|
||||
end
|
||||
end
|
||||
|
||||
context 'with no policies' do
|
||||
it_behaves_like 'not executing any policy'
|
||||
end
|
||||
|
||||
context 'with no runnable policies' do
|
||||
let_it_be(:policy) { create(:packages_cleanup_policy) }
|
||||
|
||||
it_behaves_like 'not executing any policy'
|
||||
end
|
||||
|
||||
context 'with runnable policies linked to no packages' do
|
||||
let_it_be(:policy) { create(:packages_cleanup_policy, :runnable) }
|
||||
|
||||
it_behaves_like 'not executing any policy'
|
||||
end
|
||||
|
||||
context 'with runnable policies linked to packages' do
|
||||
let_it_be(:policy) { create(:packages_cleanup_policy, :runnable, keep_n_duplicated_package_files: '1') }
|
||||
let_it_be(:package) { create(:package, project: policy.project) }
|
||||
|
||||
let_it_be(:package_file1) { create(:package_file, file_name: 'test1', package: package) }
|
||||
let_it_be(:package_file2) { create(:package_file, file_name: 'test1', package: package) }
|
||||
|
||||
include_examples 'an idempotent worker' do
|
||||
it 'executes the policy' do
|
||||
expect(::Packages::Cleanup::ExecutePolicyService)
|
||||
.to receive(:new).with(policy).and_call_original
|
||||
expect_log_extra_metadata(:project_id, policy.project_id)
|
||||
expect_log_extra_metadata(:execution_timeout, false)
|
||||
expect_log_extra_metadata(:marked_package_files_total_count, 1)
|
||||
expect_log_extra_metadata(:unique_package_id_and_file_name_total_count, 1)
|
||||
|
||||
expect { perform_work }
|
||||
.to change { package.package_files.installable.count }.by(-1)
|
||||
.and change { policy.reload.next_run_at.future? }.from(false).to(true)
|
||||
end
|
||||
|
||||
context 'with a timeout' do
|
||||
let(:mark_service_response) do
|
||||
ServiceResponse.error(
|
||||
message: 'Timeout',
|
||||
payload: { marked_package_files_count: 1 }
|
||||
)
|
||||
end
|
||||
|
||||
it 'executes the policy partially' do
|
||||
expect_next_instance_of(::Packages::MarkPackageFilesForDestructionService) do |service|
|
||||
expect(service).to receive(:execute).and_return(mark_service_response)
|
||||
end
|
||||
|
||||
expect_log_extra_metadata(:project_id, policy.project_id)
|
||||
expect_log_extra_metadata(:execution_timeout, true)
|
||||
expect_log_extra_metadata(:marked_package_files_total_count, 1)
|
||||
expect_log_extra_metadata(:unique_package_id_and_file_name_total_count, 1)
|
||||
|
||||
expect { perform_work }
|
||||
.to change { policy.reload.next_run_at.future? }.from(false).to(true)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with several eligible policies' do
|
||||
let_it_be(:policy2) { create(:packages_cleanup_policy, :runnable) }
|
||||
let_it_be(:package2) { create(:package, project: policy2.project) }
|
||||
|
||||
before do
|
||||
policy2.update_column(:next_run_at, 100.years.ago)
|
||||
end
|
||||
|
||||
it 'executes the most urgent policy' do
|
||||
expect(::Packages::Cleanup::ExecutePolicyService)
|
||||
.to receive(:new).with(policy2).and_call_original
|
||||
expect_log_extra_metadata(:project_id, policy2.project_id)
|
||||
expect_log_extra_metadata(:execution_timeout, false)
|
||||
expect_log_extra_metadata(:marked_package_files_total_count, 0)
|
||||
expect_log_extra_metadata(:unique_package_id_and_file_name_total_count, 0)
|
||||
|
||||
expect { perform_work }
|
||||
.to change { policy2.reload.next_run_at.future? }.from(false).to(true)
|
||||
.and not_change { policy.reload.next_run_at }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with runnable policy linked to packages in a disabled state' do
|
||||
let_it_be(:policy) { create(:packages_cleanup_policy, :runnable, keep_n_duplicated_package_files: 'all') }
|
||||
let_it_be(:package) { create(:package, project: policy.project) }
|
||||
|
||||
it_behaves_like 'not executing any policy'
|
||||
end
|
||||
|
||||
def expect_log_extra_metadata(key, value)
|
||||
expect(worker).to receive(:log_extra_metadata_on_done).with(key, value)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#remaining_work_count' do
|
||||
subject { worker.remaining_work_count}
|
||||
|
||||
context 'with no policies' do
|
||||
it { is_expected.to eq(0) }
|
||||
end
|
||||
|
||||
context 'with no runnable policies' do
|
||||
let_it_be(:policy) { create(:packages_cleanup_policy) }
|
||||
|
||||
it { is_expected.to eq(0) }
|
||||
end
|
||||
|
||||
context 'with runnable policies linked to no packages' do
|
||||
let_it_be(:policy) { create(:packages_cleanup_policy, :runnable) }
|
||||
|
||||
it { is_expected.to eq(0) }
|
||||
end
|
||||
|
||||
context 'with runnable policies linked to packages' do
|
||||
let_it_be(:policy) { create(:packages_cleanup_policy, :runnable) }
|
||||
let_it_be(:package) { create(:package, project: policy.project) }
|
||||
|
||||
it { is_expected.to eq(1) }
|
||||
end
|
||||
|
||||
context 'with runnable policy linked to packages in a disabled state' do
|
||||
let_it_be(:policy) { create(:packages_cleanup_policy, :runnable, keep_n_duplicated_package_files: 'all') }
|
||||
let_it_be(:package) { create(:package, project: policy.project) }
|
||||
|
||||
it { is_expected.to eq(0) }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#max_running_jobs' do
|
||||
let(:capacity) { 50 }
|
||||
|
||||
subject { worker.max_running_jobs }
|
||||
|
||||
before do
|
||||
stub_application_setting(package_registry_cleanup_policies_worker_capacity: capacity)
|
||||
end
|
||||
|
||||
it { is_expected.to eq(capacity) }
|
||||
end
|
||||
end
|
|
@ -5,6 +5,8 @@ require 'spec_helper'
|
|||
RSpec.describe Packages::CleanupPackageRegistryWorker do
|
||||
describe '#perform' do
|
||||
let_it_be_with_reload(:package_files) { create_list(:package_file, 2, :pending_destruction) }
|
||||
let_it_be(:policy) { create(:packages_cleanup_policy, :runnable) }
|
||||
let_it_be(:package) { create(:package, project: policy.project) }
|
||||
|
||||
let(:worker) { described_class.new }
|
||||
|
||||
|
@ -34,6 +36,28 @@ RSpec.describe Packages::CleanupPackageRegistryWorker do
|
|||
end
|
||||
end
|
||||
|
||||
context 'with runnable policies' do
|
||||
it_behaves_like 'an idempotent worker'
|
||||
|
||||
it 'queues the cleanup job' do
|
||||
expect(Packages::Cleanup::ExecutePolicyWorker).to receive(:perform_with_capacity)
|
||||
|
||||
perform
|
||||
end
|
||||
end
|
||||
|
||||
context 'with no runnable policies' do
|
||||
before do
|
||||
policy.update_column(:next_run_at, 5.minutes.from_now)
|
||||
end
|
||||
|
||||
it 'does not queue the cleanup job' do
|
||||
expect(Packages::Cleanup::ExecutePolicyWorker).not_to receive(:perform_with_capacity)
|
||||
|
||||
perform
|
||||
end
|
||||
end
|
||||
|
||||
describe 'counts logging' do
|
||||
let_it_be(:processing_package_file) { create(:package_file, status: :processing) }
|
||||
|
||||
|
@ -41,6 +65,7 @@ RSpec.describe Packages::CleanupPackageRegistryWorker do
|
|||
expect(worker).to receive(:log_extra_metadata_on_done).with(:pending_destruction_package_files_count, 2)
|
||||
expect(worker).to receive(:log_extra_metadata_on_done).with(:processing_package_files_count, 1)
|
||||
expect(worker).to receive(:log_extra_metadata_on_done).with(:error_package_files_count, 0)
|
||||
expect(worker).to receive(:log_extra_metadata_on_done).with(:pending_cleanup_policies_count, 1)
|
||||
|
||||
perform
|
||||
end
|
||||
|
|
|
@ -6708,13 +6708,6 @@ hast-util-to-parse5@^7.0.0:
|
|||
web-namespaces "^2.0.0"
|
||||
zwitch "^2.0.0"
|
||||
|
||||
hast-util-to-string@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/hast-util-to-string/-/hast-util-to-string-2.0.0.tgz#b008b0a4ea472bf34dd390b7eea1018726ae152a"
|
||||
integrity sha512-02AQ3vLhuH3FisaMM+i/9sm4OXGSq1UhOOCpTLLQtHdL3tZt7qil69r8M8iDkZYyC0HCFylcYoP+8IO7ddta1A==
|
||||
dependencies:
|
||||
"@types/hast" "^2.0.0"
|
||||
|
||||
hastscript@^7.0.0:
|
||||
version "7.0.2"
|
||||
resolved "https://registry.yarnpkg.com/hastscript/-/hastscript-7.0.2.tgz#d811fc040817d91923448a28156463b2e40d590a"
|
||||
|
|
Loading…
Reference in a new issue