Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
cc803c04b8
commit
174560aed8
59 changed files with 769 additions and 181 deletions
|
@ -2275,7 +2275,6 @@ Gitlab/NamespacedClass:
|
|||
- 'app/models/application_setting/term.rb'
|
||||
- 'app/models/approval.rb'
|
||||
- 'app/models/audit_event.rb'
|
||||
- 'app/models/audit_event_archived.rb'
|
||||
- 'app/models/authentication_event.rb'
|
||||
- 'app/models/award_emoji.rb'
|
||||
- 'app/models/badge.rb'
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# This model is not intended to be used.
|
||||
# It is a temporary reference to the pre-partitioned
|
||||
# audit_events table.
|
||||
# Please refer to https://gitlab.com/groups/gitlab-org/-/epics/3206
|
||||
# for details.
|
||||
class AuditEventArchived < ApplicationRecord
|
||||
self.table_name = 'audit_events_archived'
|
||||
end
|
|
@ -109,6 +109,7 @@ class Note < ApplicationRecord
|
|||
scope :with_updated_at, ->(time) { where(updated_at: time) }
|
||||
scope :inc_author_project, -> { includes(:project, :author) }
|
||||
scope :inc_author, -> { includes(:author) }
|
||||
scope :with_api_entity_associations, -> { preload(:note_diff_file, :author) }
|
||||
scope :inc_relations_for_view, -> do
|
||||
includes(:project, { author: :status }, :updated_by, :resolved_by, :award_emoji,
|
||||
{ system_note_metadata: :description_version }, :note_diff_file, :diff_note_positions, :suggestions)
|
||||
|
|
|
@ -187,6 +187,14 @@
|
|||
:weight: 1
|
||||
:idempotent:
|
||||
:tags: []
|
||||
- :name: cronjob:database_batched_background_migration
|
||||
:feature_category: :database
|
||||
:has_external_dependencies:
|
||||
:urgency: :low
|
||||
:resource_boundary: :unknown
|
||||
:weight: 1
|
||||
:idempotent: true
|
||||
:tags: []
|
||||
- :name: cronjob:environments_auto_stop_cron
|
||||
:feature_category: :continuous_delivery
|
||||
:has_external_dependencies:
|
||||
|
|
48
app/workers/database/batched_background_migration_worker.rb
Normal file
48
app/workers/database/batched_background_migration_worker.rb
Normal file
|
@ -0,0 +1,48 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Database
|
||||
class BatchedBackgroundMigrationWorker
|
||||
include ApplicationWorker
|
||||
include CronjobQueue # rubocop:disable Scalability/CronWorkerContext
|
||||
|
||||
feature_category :database
|
||||
idempotent!
|
||||
|
||||
def perform
|
||||
return unless Feature.enabled?(:execute_batched_migrations_on_schedule, type: :ops) && active_migration
|
||||
|
||||
with_exclusive_lease(active_migration.interval) do
|
||||
# Now that we have the exclusive lease, reload migration in case another process has changed it.
|
||||
# This is a temporary solution until we have better concurrency handling around job execution
|
||||
#
|
||||
# We also have to disable this cop, because ApplicationRecord aliases reset to reload, but our database
|
||||
# models don't inherit from ApplicationRecord
|
||||
active_migration.reload # rubocop:disable Cop/ActiveRecordAssociationReload
|
||||
|
||||
run_active_migration if active_migration.active? && active_migration.interval_elapsed?
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def active_migration
|
||||
@active_migration ||= Gitlab::Database::BackgroundMigration::BatchedMigration.active_migration
|
||||
end
|
||||
|
||||
def run_active_migration
|
||||
Gitlab::Database::BackgroundMigration::BatchedMigrationRunner.new.run_migration_job(active_migration)
|
||||
end
|
||||
|
||||
def with_exclusive_lease(timeout)
|
||||
lease = Gitlab::ExclusiveLease.new(lease_key, timeout: timeout * 2)
|
||||
|
||||
yield if lease.try_obtain
|
||||
ensure
|
||||
lease&.cancel
|
||||
end
|
||||
|
||||
def lease_key
|
||||
self.class.name.demodulize.underscore
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add foreign key from web_hooks to groups
|
||||
merge_request: 57735
|
||||
author:
|
||||
type: other
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add Vulnerabilities::FindingEvidence model
|
||||
merge_request: 56790
|
||||
author:
|
||||
type: changed
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Initialize conversion of events.id to bigint, and add execute_batched_migrations_on_schedule feature flag to control scheduled background migrations
|
||||
merge_request: 51332
|
||||
author:
|
||||
type: other
|
5
changelogs/unreleased/id-n-1-for-commits-notes.yml
Normal file
5
changelogs/unreleased/id-n-1-for-commits-notes.yml
Normal file
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Resolve N + 1 for commits notes API
|
||||
merge_request: 57641
|
||||
author:
|
||||
type: performance
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add geo database changes for pipeline artifact replication
|
||||
merge_request: 57506
|
||||
author:
|
||||
type: added
|
5
changelogs/unreleased/sh-log-upload-api-exceed-size.yml
Normal file
5
changelogs/unreleased/sh-log-upload-api-exceed-size.yml
Normal file
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Log message when upload via API exceeds limit
|
||||
merge_request: 57774
|
||||
author:
|
||||
type: added
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
name: execute_batched_migrations_on_schedule
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/51332
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/326241
|
||||
milestone: '13.11'
|
||||
type: ops
|
||||
group: group::database
|
||||
default_enabled: false
|
|
@ -565,6 +565,9 @@ Gitlab.com do
|
|||
Settings.cron_jobs['namespaces_in_product_marketing_emails_worker'] ||= Settingslogic.new({})
|
||||
Settings.cron_jobs['namespaces_in_product_marketing_emails_worker']['cron'] ||= '0 9 * * *'
|
||||
Settings.cron_jobs['namespaces_in_product_marketing_emails_worker']['job_class'] = 'Namespaces::InProductMarketingEmailsWorker'
|
||||
Settings.cron_jobs['batched_background_migrations_worker'] ||= Settingslogic.new({})
|
||||
Settings.cron_jobs['batched_background_migrations_worker']['cron'] ||= '* * * * *'
|
||||
Settings.cron_jobs['batched_background_migrations_worker']['job_class'] = 'Database::BatchedBackgroundMigrationWorker'
|
||||
end
|
||||
|
||||
Gitlab.ee do
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddMetricsToBatchedBackgroundMigrationJobs < ActiveRecord::Migration[6.0]
|
||||
DOWNTIME = false
|
||||
|
||||
def change
|
||||
add_column :batched_background_migration_jobs, :metrics, :jsonb, null: false, default: {}
|
||||
end
|
||||
end
|
|
@ -0,0 +1,21 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class InitializeConversionOfEventsIdToBigint < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
|
||||
def up
|
||||
# Initialize the conversion of events.id to bigint
|
||||
# Primary Key of the Events table
|
||||
initialize_conversion_of_integer_to_bigint :events, :id
|
||||
end
|
||||
|
||||
def down
|
||||
trigger_name = rename_trigger_name(:events, :id, :id_convert_to_bigint)
|
||||
|
||||
remove_rename_triggers_for_postgresql :events, trigger_name
|
||||
|
||||
remove_column :events, :id_convert_to_bigint
|
||||
end
|
||||
end
|
|
@ -0,0 +1,21 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class InitializeConversionOfPushEventPayloadsEventIdToBigint < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
|
||||
def up
|
||||
# Foreign key that references events.id
|
||||
# Also Primary key of the push_event_payloads table
|
||||
initialize_conversion_of_integer_to_bigint :push_event_payloads, :event_id, primary_key: :event_id
|
||||
end
|
||||
|
||||
def down
|
||||
trigger_name = rename_trigger_name(:push_event_payloads, :event_id, :event_id_convert_to_bigint)
|
||||
|
||||
remove_rename_triggers_for_postgresql :push_event_payloads, trigger_name
|
||||
|
||||
remove_column :push_event_payloads, :event_id_convert_to_bigint
|
||||
end
|
||||
end
|
|
@ -0,0 +1,19 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddNotValidForeignKeyToGroupHooks < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
|
||||
def up
|
||||
with_lock_retries do
|
||||
add_foreign_key :web_hooks, :namespaces, column: :group_id, on_delete: :cascade, validate: false
|
||||
end
|
||||
end
|
||||
|
||||
def down
|
||||
with_lock_retries do
|
||||
remove_foreign_key_if_exists :web_hooks, column: :group_id
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,18 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddVerificationStateToCiPipelineArtifact < ActiveRecord::Migration[6.0]
|
||||
DOWNTIME = false
|
||||
|
||||
def change
|
||||
change_table(:ci_pipeline_artifacts, bulk: true) do |t|
|
||||
t.column :verification_started_at, :datetime_with_timezone
|
||||
t.column :verification_retry_at, :datetime_with_timezone
|
||||
t.column :verified_at, :datetime_with_timezone
|
||||
t.integer :verification_state, default: 0, limit: 2, null: false
|
||||
t.integer :verification_retry_count, limit: 2
|
||||
t.binary :verification_checksum, using: 'verification_checksum::bytea'
|
||||
|
||||
t.text :verification_failure # rubocop:disable Migration/AddLimitToTextColumns
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,19 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddVerificationFailureLimitToCiPipelineArtifact < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
CONSTRAINT_NAME = 'ci_pipeline_artifacts_verification_failure_text_limit'
|
||||
|
||||
def up
|
||||
add_text_limit :ci_pipeline_artifacts, :verification_failure, 255, constraint_name: CONSTRAINT_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
remove_check_constraint(:ci_pipeline_artifacts, CONSTRAINT_NAME)
|
||||
end
|
||||
end
|
|
@ -0,0 +1,27 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddVerificationIndexesToCiPipelineArtifacts < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
VERIFICATION_STATE_INDEX_NAME = "index_ci_pipeline_artifacts_verification_state"
|
||||
PENDING_VERIFICATION_INDEX_NAME = "index_ci_pipeline_artifacts_pending_verification"
|
||||
FAILED_VERIFICATION_INDEX_NAME = "index_ci_pipeline_artifacts_failed_verification"
|
||||
NEEDS_VERIFICATION_INDEX_NAME = "index_ci_pipeline_artifacts_needs_verification"
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_concurrent_index :ci_pipeline_artifacts, :verification_state, name: VERIFICATION_STATE_INDEX_NAME
|
||||
add_concurrent_index :ci_pipeline_artifacts, :verified_at, where: "(verification_state = 0)", order: { verified_at: 'ASC NULLS FIRST' }, name: PENDING_VERIFICATION_INDEX_NAME
|
||||
add_concurrent_index :ci_pipeline_artifacts, :verification_retry_at, where: "(verification_state = 3)", order: { verification_retry_at: 'ASC NULLS FIRST' }, name: FAILED_VERIFICATION_INDEX_NAME
|
||||
add_concurrent_index :ci_pipeline_artifacts, :verification_state, where: "(verification_state = 0 OR verification_state = 3)", name: NEEDS_VERIFICATION_INDEX_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index_by_name :ci_pipeline_artifacts, VERIFICATION_STATE_INDEX_NAME
|
||||
remove_concurrent_index_by_name :ci_pipeline_artifacts, PENDING_VERIFICATION_INDEX_NAME
|
||||
remove_concurrent_index_by_name :ci_pipeline_artifacts, FAILED_VERIFICATION_INDEX_NAME
|
||||
remove_concurrent_index_by_name :ci_pipeline_artifacts, NEEDS_VERIFICATION_INDEX_NAME
|
||||
end
|
||||
end
|
|
@ -0,0 +1,26 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class CreateVulnerabilityFindingEvidences < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
create_table_with_constraints :vulnerability_finding_evidences do |t|
|
||||
t.timestamps_with_timezone null: false
|
||||
|
||||
t.references :vulnerability_occurrence, index: { name: 'finding_evidences_on_vulnerability_occurrence_id' }, null: false, foreign_key: { on_delete: :cascade }
|
||||
t.text :summary
|
||||
|
||||
t.text_limit :summary, 8_000_000
|
||||
end
|
||||
end
|
||||
|
||||
def down
|
||||
with_lock_retries do
|
||||
drop_table :vulnerability_finding_evidences
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,23 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class BackfillEventsIdForBigintConversion < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
|
||||
def up
|
||||
return unless Gitlab.dev_env_or_com?
|
||||
|
||||
backfill_conversion_of_integer_to_bigint :events, :id, batch_size: 15000, sub_batch_size: 100
|
||||
end
|
||||
|
||||
def down
|
||||
return unless Gitlab.dev_env_or_com?
|
||||
|
||||
Gitlab::Database::BackgroundMigration::BatchedMigration
|
||||
.where(job_class_name: 'CopyColumnUsingBackgroundMigrationJob')
|
||||
.where(table_name: 'events', column_name: 'id')
|
||||
.where('job_arguments = ?', %w[id id_convert_to_bigint].to_json)
|
||||
.delete_all
|
||||
end
|
||||
end
|
|
@ -0,0 +1,24 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class BackfillPushEventPayloadEventIdForBigintConversion < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
|
||||
def up
|
||||
return unless Gitlab.dev_env_or_com?
|
||||
|
||||
backfill_conversion_of_integer_to_bigint :push_event_payloads, :event_id, primary_key: :event_id,
|
||||
batch_size: 15000, sub_batch_size: 100
|
||||
end
|
||||
|
||||
def down
|
||||
return unless Gitlab.dev_env_or_com?
|
||||
|
||||
Gitlab::Database::BackgroundMigration::BatchedMigration
|
||||
.where(job_class_name: 'CopyColumnUsingBackgroundMigrationJob')
|
||||
.where(table_name: 'push_event_payloads', column_name: 'event_id')
|
||||
.where('job_arguments = ?', %w[event_id event_id_convert_to_bigint].to_json)
|
||||
.delete_all
|
||||
end
|
||||
end
|
1
db/schema_migrations/20210311120152
Normal file
1
db/schema_migrations/20210311120152
Normal file
|
@ -0,0 +1 @@
|
|||
2ad45eaf6589600d9aadd225b55451d9213a4d858ef2717b7151062f1db225c8
|
1
db/schema_migrations/20210311120153
Normal file
1
db/schema_migrations/20210311120153
Normal file
|
@ -0,0 +1 @@
|
|||
3486452547ffa5da3e12837d2f184e356c90fdd1f016f85144a1ba4865825e87
|
1
db/schema_migrations/20210311120154
Normal file
1
db/schema_migrations/20210311120154
Normal file
|
@ -0,0 +1 @@
|
|||
e169ea265b942f636b2386a432e04d9dfccdc95f04113400d44ce59e81537843
|
1
db/schema_migrations/20210311120155
Normal file
1
db/schema_migrations/20210311120155
Normal file
|
@ -0,0 +1 @@
|
|||
b7af086a68c530dd528c4ceaf4bca8d04951c0f234f75a09922aa392bb17a796
|
1
db/schema_migrations/20210311120156
Normal file
1
db/schema_migrations/20210311120156
Normal file
|
@ -0,0 +1 @@
|
|||
4715c46f5d76c8eb3a206ad3bbcc94a8c13d1d6a66a7824dba400b0aa49c8aa6
|
1
db/schema_migrations/20210325092215
Normal file
1
db/schema_migrations/20210325092215
Normal file
|
@ -0,0 +1 @@
|
|||
ea819fd401c5566986fd495ed3b8aa0d296d6c9e3fedf2a10f34cb7fbaeedb20
|
1
db/schema_migrations/20210325150837
Normal file
1
db/schema_migrations/20210325150837
Normal file
|
@ -0,0 +1 @@
|
|||
6022464130d7a5697f52b9238837c6a6d3363fd349cbcb14052ff52de6ea2e59
|
1
db/schema_migrations/20210325151758
Normal file
1
db/schema_migrations/20210325151758
Normal file
|
@ -0,0 +1 @@
|
|||
de55a114773961e6cae9ebae36ac93e60676555fe4c2973527511bb3a2eae69d
|
1
db/schema_migrations/20210325152011
Normal file
1
db/schema_migrations/20210325152011
Normal file
|
@ -0,0 +1 @@
|
|||
379fdb3c52e55b51ebdb4a3b1e67c12f19b15e97cce22eed351e33953e389c85
|
1
db/schema_migrations/20210326190903
Normal file
1
db/schema_migrations/20210326190903
Normal file
|
@ -0,0 +1 @@
|
|||
d6181f8806592106305366f5e8ef508286ed447c1fce0de26f242de736b21809
|
|
@ -150,6 +150,24 @@ $$;
|
|||
|
||||
COMMENT ON FUNCTION table_sync_function_2be879775d() IS 'Partitioning migration: table sync for audit_events table';
|
||||
|
||||
CREATE FUNCTION trigger_07c94931164e() RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
NEW."event_id_convert_to_bigint" := NEW."event_id";
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$;
|
||||
|
||||
CREATE FUNCTION trigger_69523443cc10() RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
NEW."id_convert_to_bigint" := NEW."id";
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$;
|
||||
|
||||
CREATE TABLE audit_events (
|
||||
id bigint NOT NULL,
|
||||
author_id integer NOT NULL,
|
||||
|
@ -9854,7 +9872,8 @@ CREATE TABLE batched_background_migration_jobs (
|
|||
batch_size integer NOT NULL,
|
||||
sub_batch_size integer NOT NULL,
|
||||
status smallint DEFAULT 0 NOT NULL,
|
||||
attempts smallint DEFAULT 0 NOT NULL
|
||||
attempts smallint DEFAULT 0 NOT NULL,
|
||||
metrics jsonb DEFAULT '{}'::jsonb NOT NULL
|
||||
);
|
||||
|
||||
CREATE SEQUENCE batched_background_migration_jobs_id_seq
|
||||
|
@ -10672,8 +10691,16 @@ CREATE TABLE ci_pipeline_artifacts (
|
|||
file_format smallint NOT NULL,
|
||||
file text,
|
||||
expire_at timestamp with time zone,
|
||||
verification_started_at timestamp with time zone,
|
||||
verification_retry_at timestamp with time zone,
|
||||
verified_at timestamp with time zone,
|
||||
verification_state smallint DEFAULT 0 NOT NULL,
|
||||
verification_retry_count smallint,
|
||||
verification_checksum bytea,
|
||||
verification_failure text,
|
||||
CONSTRAINT check_191b5850ec CHECK ((char_length(file) <= 255)),
|
||||
CONSTRAINT check_abeeb71caf CHECK ((file IS NOT NULL))
|
||||
CONSTRAINT check_abeeb71caf CHECK ((file IS NOT NULL)),
|
||||
CONSTRAINT ci_pipeline_artifacts_verification_failure_text_limit CHECK ((char_length(verification_failure) <= 255))
|
||||
);
|
||||
|
||||
CREATE SEQUENCE ci_pipeline_artifacts_id_seq
|
||||
|
@ -12507,6 +12534,7 @@ CREATE TABLE events (
|
|||
target_type character varying,
|
||||
group_id bigint,
|
||||
fingerprint bytea,
|
||||
id_convert_to_bigint bigint DEFAULT 0 NOT NULL,
|
||||
CONSTRAINT check_97e06e05ad CHECK ((octet_length(fingerprint) <= 128))
|
||||
);
|
||||
|
||||
|
@ -16914,7 +16942,8 @@ CREATE TABLE push_event_payloads (
|
|||
commit_to bytea,
|
||||
ref text,
|
||||
commit_title character varying(70),
|
||||
ref_count integer
|
||||
ref_count integer,
|
||||
event_id_convert_to_bigint bigint DEFAULT 0 NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE push_rules (
|
||||
|
@ -18553,6 +18582,24 @@ CREATE SEQUENCE vulnerability_feedback_id_seq
|
|||
|
||||
ALTER SEQUENCE vulnerability_feedback_id_seq OWNED BY vulnerability_feedback.id;
|
||||
|
||||
CREATE TABLE vulnerability_finding_evidences (
|
||||
id bigint NOT NULL,
|
||||
created_at timestamp with time zone NOT NULL,
|
||||
updated_at timestamp with time zone NOT NULL,
|
||||
vulnerability_occurrence_id bigint NOT NULL,
|
||||
summary text,
|
||||
CONSTRAINT check_5773b236fb CHECK ((char_length(summary) <= 8000000))
|
||||
);
|
||||
|
||||
CREATE SEQUENCE vulnerability_finding_evidences_id_seq
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
ALTER SEQUENCE vulnerability_finding_evidences_id_seq OWNED BY vulnerability_finding_evidences.id;
|
||||
|
||||
CREATE TABLE vulnerability_finding_fingerprints (
|
||||
id bigint NOT NULL,
|
||||
finding_id bigint NOT NULL,
|
||||
|
@ -19840,6 +19887,8 @@ ALTER TABLE ONLY vulnerability_external_issue_links ALTER COLUMN id SET DEFAULT
|
|||
|
||||
ALTER TABLE ONLY vulnerability_feedback ALTER COLUMN id SET DEFAULT nextval('vulnerability_feedback_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY vulnerability_finding_evidences ALTER COLUMN id SET DEFAULT nextval('vulnerability_finding_evidences_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY vulnerability_finding_fingerprints ALTER COLUMN id SET DEFAULT nextval('vulnerability_finding_fingerprints_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY vulnerability_finding_links ALTER COLUMN id SET DEFAULT nextval('vulnerability_finding_links_id_seq'::regclass);
|
||||
|
@ -21465,6 +21514,9 @@ ALTER TABLE ONLY vulnerability_external_issue_links
|
|||
ALTER TABLE ONLY vulnerability_feedback
|
||||
ADD CONSTRAINT vulnerability_feedback_pkey PRIMARY KEY (id);
|
||||
|
||||
ALTER TABLE ONLY vulnerability_finding_evidences
|
||||
ADD CONSTRAINT vulnerability_finding_evidences_pkey PRIMARY KEY (id);
|
||||
|
||||
ALTER TABLE ONLY vulnerability_finding_fingerprints
|
||||
ADD CONSTRAINT vulnerability_finding_fingerprints_pkey PRIMARY KEY (id);
|
||||
|
||||
|
@ -21700,6 +21752,8 @@ CREATE UNIQUE INDEX epic_user_mentions_on_epic_id_index ON epic_user_mentions US
|
|||
|
||||
CREATE INDEX expired_artifacts_temp_index ON ci_job_artifacts USING btree (id, created_at) WHERE ((expire_at IS NULL) AND (date(timezone('UTC'::text, created_at)) < '2020-06-22'::date));
|
||||
|
||||
CREATE INDEX finding_evidences_on_vulnerability_occurrence_id ON vulnerability_finding_evidences USING btree (vulnerability_occurrence_id);
|
||||
|
||||
CREATE INDEX finding_links_on_vulnerability_occurrence_id ON vulnerability_finding_links USING btree (vulnerability_occurrence_id);
|
||||
|
||||
CREATE INDEX idx_audit_events_on_entity_id_desc_author_id_created_at ON audit_events_archived USING btree (entity_id, entity_type, id DESC, author_id, created_at);
|
||||
|
@ -22158,6 +22212,10 @@ CREATE UNIQUE INDEX index_ci_job_variables_on_key_and_job_id ON ci_job_variables
|
|||
|
||||
CREATE UNIQUE INDEX index_ci_namespace_monthly_usages_on_namespace_id_and_date ON ci_namespace_monthly_usages USING btree (namespace_id, date);
|
||||
|
||||
CREATE INDEX index_ci_pipeline_artifacts_failed_verification ON ci_pipeline_artifacts USING btree (verification_retry_at NULLS FIRST) WHERE (verification_state = 3);
|
||||
|
||||
CREATE INDEX index_ci_pipeline_artifacts_needs_verification ON ci_pipeline_artifacts USING btree (verification_state) WHERE ((verification_state = 0) OR (verification_state = 3));
|
||||
|
||||
CREATE INDEX index_ci_pipeline_artifacts_on_expire_at ON ci_pipeline_artifacts USING btree (expire_at);
|
||||
|
||||
CREATE INDEX index_ci_pipeline_artifacts_on_pipeline_id ON ci_pipeline_artifacts USING btree (pipeline_id);
|
||||
|
@ -22166,6 +22224,10 @@ CREATE UNIQUE INDEX index_ci_pipeline_artifacts_on_pipeline_id_and_file_type ON
|
|||
|
||||
CREATE INDEX index_ci_pipeline_artifacts_on_project_id ON ci_pipeline_artifacts USING btree (project_id);
|
||||
|
||||
CREATE INDEX index_ci_pipeline_artifacts_pending_verification ON ci_pipeline_artifacts USING btree (verified_at NULLS FIRST) WHERE (verification_state = 0);
|
||||
|
||||
CREATE INDEX index_ci_pipeline_artifacts_verification_state ON ci_pipeline_artifacts USING btree (verification_state);
|
||||
|
||||
CREATE INDEX index_ci_pipeline_chat_data_on_chat_name_id ON ci_pipeline_chat_data USING btree (chat_name_id);
|
||||
|
||||
CREATE UNIQUE INDEX index_ci_pipeline_chat_data_on_pipeline_id ON ci_pipeline_chat_data USING btree (pipeline_id);
|
||||
|
@ -24620,6 +24682,10 @@ CREATE TRIGGER table_sync_trigger_b99eb6998c AFTER INSERT OR DELETE OR UPDATE ON
|
|||
|
||||
CREATE TRIGGER table_sync_trigger_ee39a25f9d AFTER INSERT OR DELETE OR UPDATE ON audit_events FOR EACH ROW EXECUTE PROCEDURE table_sync_function_2be879775d();
|
||||
|
||||
CREATE TRIGGER trigger_07c94931164e BEFORE INSERT OR UPDATE ON push_event_payloads FOR EACH ROW EXECUTE PROCEDURE trigger_07c94931164e();
|
||||
|
||||
CREATE TRIGGER trigger_69523443cc10 BEFORE INSERT OR UPDATE ON events FOR EACH ROW EXECUTE PROCEDURE trigger_69523443cc10();
|
||||
|
||||
CREATE TRIGGER trigger_has_external_issue_tracker_on_delete AFTER DELETE ON services FOR EACH ROW WHEN ((((old.category)::text = 'issue_tracker'::text) AND (old.active = true) AND (old.project_id IS NOT NULL))) EXECUTE PROCEDURE set_has_external_issue_tracker();
|
||||
|
||||
CREATE TRIGGER trigger_has_external_issue_tracker_on_insert AFTER INSERT ON services FOR EACH ROW WHEN ((((new.category)::text = 'issue_tracker'::text) AND (new.active = true) AND (new.project_id IS NOT NULL))) EXECUTE PROCEDURE set_has_external_issue_tracker();
|
||||
|
@ -26573,6 +26639,9 @@ ALTER TABLE ONLY requirements_management_test_reports
|
|||
ALTER TABLE ONLY pool_repositories
|
||||
ADD CONSTRAINT fk_rails_d2711daad4 FOREIGN KEY (source_project_id) REFERENCES projects(id) ON DELETE SET NULL;
|
||||
|
||||
ALTER TABLE ONLY web_hooks
|
||||
ADD CONSTRAINT fk_rails_d35697648e FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE NOT VALID;
|
||||
|
||||
ALTER TABLE ONLY group_group_links
|
||||
ADD CONSTRAINT fk_rails_d3a0488427 FOREIGN KEY (shared_group_id) REFERENCES namespaces(id) ON DELETE CASCADE;
|
||||
|
||||
|
@ -26636,6 +26705,9 @@ ALTER TABLE ONLY cluster_platforms_kubernetes
|
|||
ALTER TABLE ONLY ci_builds_metadata
|
||||
ADD CONSTRAINT fk_rails_e20479742e FOREIGN KEY (build_id) REFERENCES ci_builds(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY vulnerability_finding_evidences
|
||||
ADD CONSTRAINT fk_rails_e3205a0c65 FOREIGN KEY (vulnerability_occurrence_id) REFERENCES vulnerability_occurrences(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY vulnerability_occurrence_identifiers
|
||||
ADD CONSTRAINT fk_rails_e4ef6d027c FOREIGN KEY (occurrence_id) REFERENCES vulnerability_occurrences(id) ON DELETE CASCADE;
|
||||
|
||||
|
|
|
@ -49,6 +49,73 @@ Support for syncing past branch and commit data [is planned](https://gitlab.com/
|
|||
|
||||
For more information, see [Usage](index.md#usage).
|
||||
|
||||
## Install the GitLab Jira Cloud application for self-managed instances **(FREE SELF)**
|
||||
|
||||
If your GitLab instance is self-managed, you must follow some
|
||||
extra steps to install the GitLab Jira Cloud application.
|
||||
|
||||
Each Jira Cloud application must be installed from a single location. Jira fetches
|
||||
a [manifest file](https://developer.atlassian.com/cloud/jira/platform/connect-app-descriptor/)
|
||||
from the location you provide. The manifest file describes the application to the system. To support
|
||||
self-managed GitLab instances with Jira Cloud, you can either:
|
||||
|
||||
- [Install the application manually](#install-the-application-manually).
|
||||
- [Create a Marketplace listing](#create-a-marketplace-listing).
|
||||
|
||||
### Install the application manually **(FREE SELF)**
|
||||
|
||||
You can configure your Atlassian Cloud instance to allow you to install applications
|
||||
from outside the Marketplace, which allows you to install the application:
|
||||
|
||||
1. Sign in to your Jira instance as a user with administrator permissions.
|
||||
1. Place your Jira instance into
|
||||
[development mode](https://developer.atlassian.com/cloud/jira/platform/getting-started-with-connect/#step-2--enable-development-mode).
|
||||
1. Sign in to your GitLab application as a user with [Administrator](../../user/permissions.md) permissions.
|
||||
1. Install the GitLab application from your self-managed GitLab instance, as
|
||||
described in the [Atlassian developer guides](https://developer.atlassian.com/cloud/jira/platform/getting-started-with-connect/#step-3--install-and-test-your-app)).
|
||||
1. In your Jira instance, go to **Apps > Manage Apps** and click **Upload app**:
|
||||
|
||||
![Image showing button labeled "upload app"](img/jira-upload-app_v13_11.png)
|
||||
|
||||
1. For **App descriptor URL**, provide full URL to your manifest file, modifying this
|
||||
URL based on your instance configuration: `https://your.domain/your-path/-/jira_connect/app_descriptor.json`
|
||||
1. Click **Upload**, and Jira fetches the content of your `app_descriptor` file and installs
|
||||
it for you.
|
||||
1. If the upload is successful, Jira displays a modal panel: **Installed and ready to go!**
|
||||
Click **Get started** to configure the integration.
|
||||
|
||||
![Image showing success modal](img/jira-upload-app-success_v13_11.png)
|
||||
|
||||
The **GitLab for Jira** app now displays under **Manage apps**. You can also
|
||||
click **Get started** to open the configuration page rendered from your GitLab instance.
|
||||
|
||||
NOTE:
|
||||
If you make changes to the application descriptor, you must uninstall, then reinstall, the
|
||||
application.
|
||||
|
||||
### Create a Marketplace listing **(FREE SELF)**
|
||||
|
||||
If you prefer to not use development mode on your Jira instance, you can create
|
||||
your own Marketplace listing for your instance, which enables your application
|
||||
to be installed from the Atlassian Marketplace.
|
||||
|
||||
For full instructions, review the Atlassian [guide to creating a marketplace listing](https://developer.atlassian.com/platform/marketplace/installing-cloud-apps/#creating-the-marketplace-listing). To create a
|
||||
Marketplace listing, you must:
|
||||
|
||||
1. Register as a Marketplace vendor.
|
||||
1. List your application, using the application descriptor URL.
|
||||
- Your manifest file is located at: `https://your.domain/your-path/-/jira_connect/app_descriptor.json`
|
||||
- GitLab recommends you list your application as `private`, because public
|
||||
applications can be viewed and installed by any user.
|
||||
1. Generate test license tokens for your application.
|
||||
|
||||
Review the
|
||||
[official Atlassian documentation](https://developer.atlassian.com/platform/marketplace/installing-cloud-apps/#creating-the-marketplace-listing)
|
||||
for details.
|
||||
|
||||
NOTE:
|
||||
DVCS means distributed version control system.
|
||||
|
||||
## Troubleshooting GitLab for Jira
|
||||
|
||||
The GitLab for Jira App uses an iframe to add namespaces on the settings page. Some browsers block cross-site cookies. This can lead to a message saying that the user needs to log in on GitLab.com even though the user is already logged in.
|
||||
|
|
Before Width: | Height: | Size: 11 KiB After Width: | Height: | Size: 11 KiB |
Before Width: | Height: | Size: 20 KiB After Width: | Height: | Size: 20 KiB |
|
@ -26,101 +26,21 @@ GitLab activity and Jira issues, with additional features:
|
|||
- [Jira development panel integration](../../../integration/jira/index.md). Connects all
|
||||
GitLab projects under a specified group or personal namespace.
|
||||
|
||||
Jira development panel integration configuration depends on whether you are
|
||||
using Jira on [Atlassian cloud](https://www.atlassian.com/cloud) or on your own server:
|
||||
Jira development panel integration configuration depends on whether:
|
||||
|
||||
- *If your Jira instance is hosted on Atlassian Cloud:*
|
||||
- **GitLab.com (SaaS) customers**: Use the
|
||||
[GitLab.com for Jira Cloud](https://marketplace.atlassian.com/apps/1221011/gitlab-com-for-jira-cloud?hosting=cloud&tab=overview)
|
||||
application installed from the [Atlassian Marketplace](https://marketplace.atlassian.com).
|
||||
- **Self-managed installs**: Use the
|
||||
[GitLab.com for Jira Cloud](https://marketplace.atlassian.com/apps/1221011/gitlab-com-for-jira-cloud?hosting=cloud&tab=overview), with
|
||||
[this workaround process](#install-the-gitlab-jira-cloud-application-for-self-managed-instances). Read the
|
||||
[relevant issue](https://gitlab.com/gitlab-org/gitlab/-/issues/268278) for more information.
|
||||
- *If your Jira instance is hosted on your own server:*
|
||||
Use the [Jira DVCS connector](../../../integration/jira/index.md).
|
||||
- You're using GitLab.com or a self-managed GitLab instance.
|
||||
- You're using Jira on [Atlassian cloud](https://www.atlassian.com/cloud) or on your own server.
|
||||
|
||||
### Install the GitLab Jira Cloud application for self-managed instances **(FREE SELF)**
|
||||
The integration you choose depends on the capabilities you require.
|
||||
You can also install both at the same time.
|
||||
|
||||
If your GitLab instance is self-managed, you must follow some
|
||||
extra steps to install the GitLab Jira Cloud application.
|
||||
|
||||
Each Jira Cloud application must be installed from a single location. Jira fetches
|
||||
a [manifest file](https://developer.atlassian.com/cloud/jira/platform/connect-app-descriptor/)
|
||||
from the location you provide. The manifest file describes the application to the system. To support
|
||||
self-managed GitLab instances with Jira Cloud, you can either:
|
||||
|
||||
- [Install the application manually](#install-the-application-manually).
|
||||
- [Create a Marketplace listing](#create-a-marketplace-listing).
|
||||
|
||||
#### Install the application manually **(FREE SELF)**
|
||||
|
||||
You can configure your Atlassian Cloud instance to allow you to install applications
|
||||
from outside the Marketplace, which allows you to install the application:
|
||||
|
||||
1. Sign in to your Jira instance as a user with administrator permissions.
|
||||
1. Place your Jira instance into
|
||||
[development mode](https://developer.atlassian.com/cloud/jira/platform/getting-started-with-connect/#step-2--enable-development-mode).
|
||||
1. Sign in to your GitLab application as a user with [Administrator](../../permissions.md) permissions.
|
||||
1. Install the GitLab application from your self-managed GitLab instance, as
|
||||
described in the [Atlassian developer guides](https://developer.atlassian.com/cloud/jira/platform/getting-started-with-connect/#step-3--install-and-test-your-app)).
|
||||
1. In your Jira instance, go to **Apps > Manage Apps** and click **Upload app**:
|
||||
|
||||
![Image showing button labeled "upload app"](jira-upload-app_v13_11.png)
|
||||
|
||||
1. For **App descriptor URL**, provide full URL to your manifest file, modifying this
|
||||
URL based on your instance configuration: `https://your.domain/your-path/-/jira_connect/app_descriptor.json`
|
||||
1. Click **Upload**, and Jira fetches the content of your `app_descriptor` file and installs
|
||||
it for you.
|
||||
1. If the upload is successful, Jira displays a modal panel: **Installed and ready to go!**
|
||||
Click **Get started** to configure the integration.
|
||||
|
||||
![Image showing success modal](jira-upload-app-success_v13_11.png)
|
||||
|
||||
The **GitLab for Jira** app now displays under **Manage apps**. You can also
|
||||
click **Get started** to open the configuration page rendered from your GitLab instance.
|
||||
|
||||
NOTE:
|
||||
If you make changes to the application descriptor, you must uninstall, then reinstall, the
|
||||
application.
|
||||
|
||||
#### Create a Marketplace listing **(FREE SELF)**
|
||||
|
||||
If you prefer to not use development mode on your Jira instance, you can create
|
||||
your own Marketplace listing for your instance, which enables your application
|
||||
to be installed from the Atlassian Marketplace.
|
||||
|
||||
For full instructions, review the Atlassian [guide to creating a marketplace listing](https://developer.atlassian.com/platform/marketplace/installing-cloud-apps/#creating-the-marketplace-listing). To create a
|
||||
Marketplace listing, you must:
|
||||
|
||||
1. Register as a Marketplace vendor.
|
||||
1. List your application, using the application descriptor URL.
|
||||
- Your manifest file is located at: `https://your.domain/your-path/-/jira_connect/app_descriptor.json`
|
||||
- GitLab recommends you list your application as `private`, because public
|
||||
applications can be viewed and installed by any user.
|
||||
1. Generate test license tokens for your application.
|
||||
|
||||
Review the
|
||||
[official Atlassian documentation](https://developer.atlassian.com/platform/marketplace/installing-cloud-apps/#creating-the-marketplace-listing)
|
||||
for details.
|
||||
|
||||
NOTE:
|
||||
DVCS means distributed version control system.
|
||||
|
||||
## Feature comparison
|
||||
|
||||
The integration to use depends on the capabilities you require. You can install both at the same
|
||||
time.
|
||||
|
||||
| Capability | Jira integration | Jira Development Panel integration |
|
||||
|:----------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------|
|
||||
| Mention of Jira issue ID in GitLab is automatically linked to that issue | Yes | No |
|
||||
| Mention of Jira issue ID in GitLab issue/MR is reflected in the Jira issue | Yes, as a Jira comment with the GitLab issue/MR title and a link back to it. Its first mention also adds the GitLab page to the Jira issue under “Web links”. | Yes, in the issue's Development panel |
|
||||
| Mention of Jira issue ID in GitLab commit message is reflected in the issue | Yes. The entire commit message is added to the Jira issue as a comment and under “Web links”, each with a link back to the commit in GitLab. | Yes, in the issue's Development panel and optionally with a custom comment on the Jira issue using Jira Smart Commits. |
|
||||
| Mention of Jira issue ID in GitLab branch names is reflected in Jira issue | No | Yes, in the issue's Development panel |
|
||||
| Pipeline status is shown in Jira issue | No | Yes, in the issue's Development panel when using Jira Cloud and the GitLab application. |
|
||||
| Deployment status is shown in Jira issue | No | Yes, in the issue's Development panel when using Jira Cloud and the GitLab application. |
|
||||
| Record Jira time tracking information against an issue | No | Yes. Time can be specified via Jira Smart Commits. |
|
||||
| Transition or close a Jira issue with a Git commit or merge request | Yes. Only a single transition type, typically configured to close the issue by setting it to Done. | Yes. Transition to any state using Jira Smart Commits. |
|
||||
| Display a list of Jira issues | Yes **(PREMIUM)** | No |
|
||||
| Create a Jira issue from a vulnerability or finding **(ULTIMATE)** | Yes | No |
|
||||
| Capability | Jira integration | Jira Development panel integration |
|
||||
|-|-|-|
|
||||
| Mention a Jira issue ID in GitLab and a link to the Jira issue is created. | Yes. | No. |
|
||||
| Mention a Jira issue ID in GitLab and the Jira issue shows the GitLab issue or merge request. | Yes. A Jira comment with the GitLab issue or MR title links to GitLab. The first mention is also added to the Jira issue under **Web links**. | Yes, in the issue's Development panel. |
|
||||
| Mention a Jira issue ID in a GitLab commit message and the Jira issue shows the commit message. | Yes. The entire commit message is displayed in the Jira issue as a comment and under **Web links**. Each message links back to the commit in GitLab. | Yes, in the issue's Development panel and optionally with a custom comment on the Jira issue using Jira Smart Commits. |
|
||||
| Mention a Jira issue ID in a GitLab branch name and the Jira issue shows the branch name. | No. | Yes, in the issue's Development panel. |
|
||||
| Add Jira time tracking to an issue. | No. | Yes. Time can be specified using Jira Smart Commits. |
|
||||
| Use a Git commit or merge request to transition or close a Jira issue. | Yes. Only a single transition type, typically configured to close the issue by setting it to Done. | Yes. Transition to any state using Jira Smart Commits. |
|
||||
| Display a list of Jira issues. | Yes. **(PREMIUM)** | No. |
|
||||
| Create a Jira issue from a vulnerability or finding. **(ULTIMATE)** | Yes. | No. |
|
||||
|
|
|
@ -186,16 +186,14 @@ module API
|
|||
use :pagination
|
||||
requires :sha, type: String, desc: 'A commit sha, or the name of a branch or tag'
|
||||
end
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
get ':id/repository/commits/:sha/comments', requirements: API::COMMIT_ENDPOINT_REQUIREMENTS do
|
||||
commit = user_project.commit(params[:sha])
|
||||
|
||||
not_found! 'Commit' unless commit
|
||||
notes = commit.notes.order(:created_at)
|
||||
notes = commit.notes.with_api_entity_associations.fresh
|
||||
|
||||
present paginate(notes), with: Entities::CommitNote
|
||||
end
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
|
||||
desc 'Cherry pick commit into a branch' do
|
||||
detail 'This feature was introduced in GitLab 8.15'
|
||||
|
|
|
@ -67,6 +67,16 @@ module API
|
|||
|
||||
PROJECT_ATTACHMENT_SIZE_EXEMPT
|
||||
end
|
||||
|
||||
# This is to help determine which projects to use in https://gitlab.com/gitlab-org/gitlab/-/issues/325788
|
||||
def log_if_upload_exceed_max_size(user_project, file)
|
||||
return if file.size <= user_project.max_attachment_size
|
||||
return if exempt_from_global_attachment_size?(user_project)
|
||||
|
||||
if file.size > user_project.max_attachment_size
|
||||
Gitlab::AppLogger.info({ message: "File exceeds maximum size", file_bytes: file.size, project_id: user_project.id, project_path: user_project.full_path })
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
helpers do
|
||||
|
@ -576,6 +586,8 @@ module API
|
|||
requires :file, types: [Rack::Multipart::UploadedFile, ::API::Validations::Types::WorkhorseFile], desc: 'The attachment file to be uploaded'
|
||||
end
|
||||
post ":id/uploads", feature_category: :not_owned do
|
||||
log_if_upload_exceed_max_size(user_project, params[:file])
|
||||
|
||||
service = UploadService.new(user_project, params[:file])
|
||||
service.override_max_attachment_size = project_attachment_size(user_project)
|
||||
upload = service.execute
|
||||
|
|
|
@ -34,12 +34,18 @@ module Gitlab
|
|||
parent_batch_relation = relation_scoped_to_range(batch_table, batch_column, start_id, end_id)
|
||||
|
||||
parent_batch_relation.each_batch(column: batch_column, of: sub_batch_size) do |sub_batch|
|
||||
sub_batch.update_all("#{quoted_copy_to}=#{quoted_copy_from}")
|
||||
batch_metrics.time_operation(:update_all) do
|
||||
sub_batch.update_all("#{quoted_copy_to}=#{quoted_copy_from}")
|
||||
end
|
||||
|
||||
sleep(PAUSE_SECONDS)
|
||||
end
|
||||
end
|
||||
|
||||
def batch_metrics
|
||||
@batch_metrics ||= Gitlab::Database::BackgroundMigration::BatchMetrics.new
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def connection
|
||||
|
|
33
lib/gitlab/database/background_migration/batch_metrics.rb
Normal file
33
lib/gitlab/database/background_migration/batch_metrics.rb
Normal file
|
@ -0,0 +1,33 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Database
|
||||
module BackgroundMigration
|
||||
class BatchMetrics
|
||||
attr_reader :timings
|
||||
|
||||
def initialize
|
||||
@timings = {}
|
||||
end
|
||||
|
||||
def time_operation(label)
|
||||
start_time = monotonic_time
|
||||
|
||||
yield
|
||||
|
||||
timings_for_label(label) << monotonic_time - start_time
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def timings_for_label(label)
|
||||
timings[label] ||= []
|
||||
end
|
||||
|
||||
def monotonic_time
|
||||
Gitlab::Metrics::System.monotonic_time
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -23,6 +23,10 @@ module Gitlab
|
|||
finished: 3
|
||||
}
|
||||
|
||||
def self.active_migration
|
||||
active.queue_order.first
|
||||
end
|
||||
|
||||
def interval_elapsed?
|
||||
last_job.nil? || last_job.created_at <= Time.current - interval
|
||||
end
|
||||
|
|
|
@ -8,6 +8,16 @@ module Gitlab
|
|||
@migration_wrapper = migration_wrapper
|
||||
end
|
||||
|
||||
# Runs the next batched_job for a batched_background_migration.
|
||||
#
|
||||
# The batch bounds of the next job are calculated at runtime, based on the migration
|
||||
# configuration and the bounds of the most recently created batched_job. Updating the
|
||||
# migration configuration will cause future jobs to use the updated batch sizes.
|
||||
#
|
||||
# The job instance will automatically receive a set of arguments based on the migration
|
||||
# configuration. For more details, see the BatchedMigrationWrapper class.
|
||||
#
|
||||
# Note that this method is primarily intended to called by a scheduled worker.
|
||||
def run_migration_job(active_migration)
|
||||
if next_batched_job = create_next_batched_job!(active_migration)
|
||||
migration_wrapper.perform(next_batched_job)
|
||||
|
@ -16,7 +26,15 @@ module Gitlab
|
|||
end
|
||||
end
|
||||
|
||||
# Runs all remaining batched_jobs for a batched_background_migration.
|
||||
#
|
||||
# This method is intended to be used in a test/dev environment to execute the background
|
||||
# migration inline. It should NOT be used in a real environment for any non-trivial migrations.
|
||||
def run_entire_migration(migration)
|
||||
unless Rails.env.development? || Rails.env.test?
|
||||
raise 'this method is not intended for use in real environments'
|
||||
end
|
||||
|
||||
while migration.active?
|
||||
run_migration_job(migration)
|
||||
|
||||
|
|
|
@ -4,6 +4,13 @@ module Gitlab
|
|||
module Database
|
||||
module BackgroundMigration
|
||||
class BatchedMigrationWrapper
|
||||
# Wraps the execution of a batched_background_migration.
|
||||
#
|
||||
# Updates the job's tracking records with the status of the migration
|
||||
# when starting and finishing execution, and optionally saves batch_metrics
|
||||
# the migration provides, if any are given.
|
||||
#
|
||||
# The job's batch_metrics are serialized to JSON for storage.
|
||||
def perform(batch_tracking_record)
|
||||
start_tracking_execution(batch_tracking_record)
|
||||
|
||||
|
@ -34,6 +41,10 @@ module Gitlab
|
|||
tracking_record.migration_column_name,
|
||||
tracking_record.sub_batch_size,
|
||||
*tracking_record.migration_job_arguments)
|
||||
|
||||
if job_instance.respond_to?(:batch_metrics)
|
||||
tracking_record.metrics = job_instance.batch_metrics
|
||||
end
|
||||
end
|
||||
|
||||
def finish_tracking_execution(tracking_record)
|
||||
|
|
|
@ -265,6 +265,7 @@ excluded_attributes:
|
|||
- :issue_id
|
||||
push_event_payload:
|
||||
- :event_id
|
||||
- :event_id_convert_to_bigint
|
||||
project_badges:
|
||||
- :group_id
|
||||
resource_label_events:
|
||||
|
@ -287,6 +288,7 @@ excluded_attributes:
|
|||
- :label_id
|
||||
events:
|
||||
- :target_id
|
||||
- :id_convert_to_bigint
|
||||
timelogs:
|
||||
- :issue_id
|
||||
- :merge_request_id
|
||||
|
|
|
@ -24131,7 +24131,7 @@ msgstr ""
|
|||
msgid "ProjectSettings|Enable \"Delete source branch\" option by default"
|
||||
msgstr ""
|
||||
|
||||
msgid "ProjectSettings|Enable merge trains."
|
||||
msgid "ProjectSettings|Enable merge trains"
|
||||
msgstr ""
|
||||
|
||||
msgid "ProjectSettings|Enable merged results pipelines"
|
||||
|
|
|
@ -87,7 +87,6 @@ RSpec.describe 'Database schema' do
|
|||
users_star_projects: %w[user_id],
|
||||
vulnerability_identifiers: %w[external_id],
|
||||
vulnerability_scanners: %w[external_id],
|
||||
web_hooks: %w[group_id],
|
||||
web_hook_logs_part_0c5294f417: %w[web_hook_id]
|
||||
}.with_indifferent_access.freeze
|
||||
|
||||
|
|
|
@ -64,5 +64,13 @@ RSpec.describe Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJo
|
|||
expect(test_table.where('name is NULL and name_convert_to_text is NULL').pluck(:id)).to contain_exactly(15)
|
||||
expect(test_table.where("name_convert_to_text = 'no name'").count).to eq(0)
|
||||
end
|
||||
|
||||
it 'tracks timings of queries' do
|
||||
expect(subject.batch_metrics.timings).to be_empty
|
||||
|
||||
subject.perform(10, 20, table_name, 'id', sub_batch_size, 'name', 'name_convert_to_text')
|
||||
|
||||
expect(subject.batch_metrics.timings[:update_all]).not_to be_empty
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'fast_spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Database::BackgroundMigration::BatchMetrics do
|
||||
let(:batch_metrics) { described_class.new }
|
||||
|
||||
describe '#time_operation' do
|
||||
it 'tracks the duration of the operation using monotonic time' do
|
||||
expect(batch_metrics.timings).to be_empty
|
||||
|
||||
expect(Gitlab::Metrics::System).to receive(:monotonic_time)
|
||||
.exactly(6).times
|
||||
.and_return(0.0, 111.0, 200.0, 290.0, 300.0, 410.0)
|
||||
|
||||
batch_metrics.time_operation(:my_label) do
|
||||
# some operation
|
||||
end
|
||||
|
||||
batch_metrics.time_operation(:my_other_label) do
|
||||
# some operation
|
||||
end
|
||||
|
||||
batch_metrics.time_operation(:my_label) do
|
||||
# some operation
|
||||
end
|
||||
|
||||
expect(batch_metrics.timings).to eq(my_label: [111.0, 110.0], my_other_label: [90.0])
|
||||
end
|
||||
end
|
||||
end
|
|
@ -139,6 +139,19 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
|
|||
end
|
||||
|
||||
describe '#run_entire_migration' do
|
||||
context 'when not in a development or test environment' do
|
||||
it 'raises an error' do
|
||||
environment = double('environment', development?: false, test?: false)
|
||||
migration = build(:batched_background_migration, :finished)
|
||||
|
||||
allow(Rails).to receive(:env).and_return(environment)
|
||||
|
||||
expect do
|
||||
runner.run_entire_migration(migration)
|
||||
end.to raise_error('this method is not intended for use in real environments')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the given migration is not active' do
|
||||
it 'does not create and run migration jobs' do
|
||||
migration = build(:batched_background_migration, :finished)
|
||||
|
|
|
@ -29,6 +29,16 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
|
|||
end
|
||||
end
|
||||
|
||||
describe '.active_migration' do
|
||||
let!(:migration1) { create(:batched_background_migration, :finished) }
|
||||
let!(:migration2) { create(:batched_background_migration, :active) }
|
||||
let!(:migration3) { create(:batched_background_migration, :active) }
|
||||
|
||||
it 'returns the first active migration according to queue order' do
|
||||
expect(described_class.active_migration).to eq(migration2)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#interval_elapsed?' do
|
||||
context 'when the migration has no last_job' do
|
||||
let(:batched_migration) { build(:batched_background_migration) }
|
||||
|
|
|
@ -9,16 +9,24 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
|
|||
let_it_be(:active_migration) { create(:batched_background_migration, :active, job_arguments: [:id, :other_id]) }
|
||||
|
||||
let!(:job_record) { create(:batched_background_migration_job, batched_migration: active_migration) }
|
||||
let(:job_instance) { double('job instance', batch_metrics: {}) }
|
||||
|
||||
before do
|
||||
allow(job_class).to receive(:new).and_return(job_instance)
|
||||
end
|
||||
|
||||
it 'runs the migration job' do
|
||||
expect_next_instance_of(job_class) do |job_instance|
|
||||
expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, 'id', 'other_id')
|
||||
end
|
||||
expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, 'id', 'other_id')
|
||||
|
||||
migration_wrapper.perform(job_record)
|
||||
end
|
||||
|
||||
it 'updates the the tracking record in the database' do
|
||||
it 'updates the tracking record in the database' do
|
||||
test_metrics = { 'my_metris' => 'some value' }
|
||||
|
||||
expect(job_instance).to receive(:perform)
|
||||
expect(job_instance).to receive(:batch_metrics).and_return(test_metrics)
|
||||
|
||||
expect(job_record).to receive(:update!).with(hash_including(attempts: 1, status: :running)).and_call_original
|
||||
|
||||
freeze_time do
|
||||
|
@ -29,14 +37,13 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
|
|||
expect(reloaded_job_record).not_to be_pending
|
||||
expect(reloaded_job_record.attempts).to eq(1)
|
||||
expect(reloaded_job_record.started_at).to eq(Time.current)
|
||||
expect(reloaded_job_record.metrics).to eq(test_metrics)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the migration job does not raise an error' do
|
||||
it 'marks the tracking record as succeeded' do
|
||||
expect_next_instance_of(job_class) do |job_instance|
|
||||
expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, 'id', 'other_id')
|
||||
end
|
||||
expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, 'id', 'other_id')
|
||||
|
||||
freeze_time do
|
||||
migration_wrapper.perform(job_record)
|
||||
|
@ -51,11 +58,9 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
|
|||
|
||||
context 'when the migration job raises an error' do
|
||||
it 'marks the tracking record as failed before raising the error' do
|
||||
expect_next_instance_of(job_class) do |job_instance|
|
||||
expect(job_instance).to receive(:perform)
|
||||
.with(1, 10, 'events', 'id', 1, 'id', 'other_id')
|
||||
.and_raise(RuntimeError, 'Something broke!')
|
||||
end
|
||||
expect(job_instance).to receive(:perform)
|
||||
.with(1, 10, 'events', 'id', 1, 'id', 'other_id')
|
||||
.and_raise(RuntimeError, 'Something broke!')
|
||||
|
||||
freeze_time do
|
||||
expect { migration_wrapper.perform(job_record) }.to raise_error(RuntimeError, 'Something broke!')
|
||||
|
|
|
@ -1,52 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe AuditEventArchived do
|
||||
let(:source_table) { AuditEvent }
|
||||
let(:destination_table) { described_class }
|
||||
|
||||
it 'has the same columns as the source table' do
|
||||
column_names_from_source_table = column_names(source_table)
|
||||
column_names_from_destination_table = column_names(destination_table)
|
||||
|
||||
expect(column_names_from_destination_table).to match_array(column_names_from_source_table)
|
||||
end
|
||||
|
||||
it 'has the same null constraints as the source table' do
|
||||
constraints_from_source_table = null_constraints(source_table)
|
||||
constraints_from_destination_table = null_constraints(destination_table)
|
||||
|
||||
expect(constraints_from_destination_table.to_a).to match_array(constraints_from_source_table.to_a)
|
||||
end
|
||||
|
||||
it 'inserts the same record as the one in the source table', :aggregate_failures do
|
||||
expect { create(:audit_event) }.to change { destination_table.count }.by(1)
|
||||
|
||||
event_from_source_table = source_table.connection.select_one(
|
||||
"SELECT * FROM #{source_table.table_name} ORDER BY created_at desc LIMIT 1"
|
||||
)
|
||||
event_from_destination_table = destination_table.connection.select_one(
|
||||
"SELECT * FROM #{destination_table.table_name} ORDER BY created_at desc LIMIT 1"
|
||||
)
|
||||
|
||||
expect(event_from_destination_table).to eq(event_from_source_table)
|
||||
end
|
||||
|
||||
def column_names(table)
|
||||
table.connection.select_all(<<~SQL)
|
||||
SELECT c.column_name
|
||||
FROM information_schema.columns c
|
||||
WHERE c.table_name = '#{table.table_name}'
|
||||
SQL
|
||||
end
|
||||
|
||||
def null_constraints(table)
|
||||
table.connection.select_all(<<~SQL)
|
||||
SELECT c.column_name, c.is_nullable
|
||||
FROM information_schema.columns c
|
||||
WHERE c.table_name = '#{table.table_name}'
|
||||
AND c.column_name != 'created_at'
|
||||
SQL
|
||||
end
|
||||
end
|
|
@ -1439,6 +1439,22 @@ RSpec.describe API::Commits do
|
|||
it_behaves_like 'ref comments'
|
||||
end
|
||||
end
|
||||
|
||||
context 'multiple notes' do
|
||||
let!(:note) { create(:diff_note_on_commit, project: project) }
|
||||
let(:commit) { note.commit }
|
||||
let(:commit_id) { note.commit_id }
|
||||
|
||||
it 'are returned without N + 1' do
|
||||
get api(route, current_user) # warm up the cache
|
||||
|
||||
control_count = ActiveRecord::QueryRecorder.new { get api(route, current_user) }.count
|
||||
|
||||
create(:diff_note_on_commit, project: project, author: create(:user))
|
||||
|
||||
expect { get api(route, current_user) }.not_to exceed_query_limit(control_count)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the commit is present on two projects' do
|
||||
|
|
|
@ -1519,6 +1519,8 @@ RSpec.describe API::Projects do
|
|||
end
|
||||
|
||||
describe "POST /projects/:id/uploads" do
|
||||
let(:file) { fixture_file_upload("spec/fixtures/dk.png", "image/png") }
|
||||
|
||||
before do
|
||||
project
|
||||
end
|
||||
|
@ -1528,7 +1530,7 @@ RSpec.describe API::Projects do
|
|||
expect(instance).to receive(:override_max_attachment_size=).with(project.max_attachment_size).and_call_original
|
||||
end
|
||||
|
||||
post api("/projects/#{project.id}/uploads", user), params: { file: fixture_file_upload("spec/fixtures/dk.png", "image/png") }
|
||||
post api("/projects/#{project.id}/uploads", user), params: { file: file }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:created)
|
||||
expect(json_response['alt']).to eq("dk")
|
||||
|
@ -1538,13 +1540,21 @@ RSpec.describe API::Projects do
|
|||
expect(json_response['full_path']).to start_with("/#{project.namespace.path}/#{project.path}/uploads")
|
||||
end
|
||||
|
||||
it "logs a warning if file exceeds attachment size" do
|
||||
allow(Gitlab::CurrentSettings).to receive(:max_attachment_size).and_return(0)
|
||||
|
||||
expect(Gitlab::AppLogger).to receive(:info).with(hash_including(message: 'File exceeds maximum size')).and_call_original
|
||||
|
||||
post api("/projects/#{project.id}/uploads", user), params: { file: file }
|
||||
end
|
||||
|
||||
shared_examples 'capped upload attachments' do
|
||||
it "limits the upload to 1 GB" do
|
||||
expect_next_instance_of(UploadService) do |instance|
|
||||
expect(instance).to receive(:override_max_attachment_size=).with(1.gigabyte).and_call_original
|
||||
end
|
||||
|
||||
post api("/projects/#{project.id}/uploads", user), params: { file: fixture_file_upload("spec/fixtures/dk.png", "image/png") }
|
||||
post api("/projects/#{project.id}/uploads", user), params: { file: file }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:created)
|
||||
end
|
||||
|
|
|
@ -0,0 +1,101 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Database::BatchedBackgroundMigrationWorker, '#perform', :clean_gitlab_redis_shared_state do
|
||||
include ExclusiveLeaseHelpers
|
||||
|
||||
let(:worker) { described_class.new }
|
||||
|
||||
context 'when the feature flag is disabled' do
|
||||
before do
|
||||
stub_feature_flags(execute_batched_migrations_on_schedule: false)
|
||||
end
|
||||
|
||||
it 'does nothing' do
|
||||
expect(worker).not_to receive(:active_migration)
|
||||
expect(worker).not_to receive(:run_active_migration)
|
||||
|
||||
worker.perform
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the feature flag is enabled' do
|
||||
before do
|
||||
stub_feature_flags(execute_batched_migrations_on_schedule: true)
|
||||
|
||||
allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration).and_return(nil)
|
||||
end
|
||||
|
||||
context 'when no active migrations exist' do
|
||||
it 'does nothing' do
|
||||
expect(worker).not_to receive(:run_active_migration)
|
||||
|
||||
worker.perform
|
||||
end
|
||||
end
|
||||
|
||||
context 'when active migrations exist' do
|
||||
let(:lease_key) { 'batched_background_migration_worker' }
|
||||
let(:migration) { build(:batched_background_migration, :active, interval: 2.minutes) }
|
||||
|
||||
before do
|
||||
allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration)
|
||||
.and_return(migration)
|
||||
|
||||
allow(migration).to receive(:interval_elapsed?).and_return(true)
|
||||
allow(migration).to receive(:reload)
|
||||
end
|
||||
|
||||
context 'when the reloaded migration is no longer active' do
|
||||
it 'does not run the migration' do
|
||||
expect_to_obtain_exclusive_lease(lease_key, timeout: 4.minutes)
|
||||
|
||||
expect(migration).to receive(:reload)
|
||||
expect(migration).to receive(:active?).and_return(false)
|
||||
|
||||
expect(worker).not_to receive(:run_active_migration)
|
||||
|
||||
worker.perform
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the interval has not elapsed' do
|
||||
it 'does not run the migration' do
|
||||
expect_to_obtain_exclusive_lease(lease_key, timeout: 4.minutes)
|
||||
|
||||
expect(migration).to receive(:interval_elapsed?).and_return(false)
|
||||
|
||||
expect(worker).not_to receive(:run_active_migration)
|
||||
|
||||
worker.perform
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the reloaded migration is still active and the interval has elapsed' do
|
||||
it 'runs the migration' do
|
||||
expect_to_obtain_exclusive_lease(lease_key, timeout: 4.minutes)
|
||||
|
||||
expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |instance|
|
||||
expect(instance).to receive(:run_migration_job).with(migration)
|
||||
end
|
||||
|
||||
expect(worker).to receive(:run_active_migration).and_call_original
|
||||
|
||||
worker.perform
|
||||
end
|
||||
end
|
||||
|
||||
it 'always cleans up the exclusive lease' do
|
||||
lease = stub_exclusive_lease_taken(lease_key, timeout: 4.minutes)
|
||||
|
||||
expect(lease).to receive(:try_obtain).and_return(true)
|
||||
|
||||
expect(worker).to receive(:run_active_migration).and_raise(RuntimeError, 'I broke')
|
||||
expect(lease).to receive(:cancel)
|
||||
|
||||
expect { worker.perform }.to raise_error(RuntimeError, 'I broke')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
0
vendor/gitignore/C++.gitignore
vendored
Normal file → Executable file
0
vendor/gitignore/C++.gitignore
vendored
Normal file → Executable file
0
vendor/gitignore/Java.gitignore
vendored
Normal file → Executable file
0
vendor/gitignore/Java.gitignore
vendored
Normal file → Executable file
Loading…
Reference in a new issue