Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-05-24 00:08:13 +00:00
parent 3cfe75e375
commit 190709c797
64 changed files with 723 additions and 164 deletions

View File

@ -1 +1 @@
14.6.1
14.7.0

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
module CommitSignatures
class SshSignature < ApplicationRecord
include CommitSignature
belongs_to :key, optional: false
end
end

View File

@ -127,24 +127,41 @@ module Namespaces
def self_and_descendants_with_comparison_operators(include_self: true)
base = all.select(:traversal_ids)
base = base.select(:id) if Feature.enabled?(:linear_scopes_superset)
base_cte = Gitlab::SQL::CTE.new(:descendants_base_cte, base)
namespaces = Arel::Table.new(:namespaces)
withs = [base_cte.to_arel]
froms = []
if Feature.enabled?(:linear_scopes_superset)
superset_cte = self.superset_cte(base_cte.table.name)
withs += [superset_cte.to_arel]
froms = [superset_cte.table]
else
froms = [base_cte.table]
end
# Order is important. namespace should be last to handle future joins.
froms += [namespaces]
base_ref = froms.first
# Bound the search space to ourselves (optional) and descendants.
#
# WHERE next_traversal_ids_sibling(base_cte.traversal_ids) > namespaces.traversal_ids
records = unscoped
.distinct
.with(base_cte.to_arel)
.from([base_cte.table, namespaces])
.where(next_sibling_func(base_cte.table[:traversal_ids]).gt(namespaces[:traversal_ids]))
.with(*withs)
.from(froms)
.where(next_sibling_func(base_ref[:traversal_ids]).gt(namespaces[:traversal_ids]))
# AND base_cte.traversal_ids <= namespaces.traversal_ids
if include_self
records.where(base_cte.table[:traversal_ids].lteq(namespaces[:traversal_ids]))
records.where(base_ref[:traversal_ids].lteq(namespaces[:traversal_ids]))
else
records.where(base_cte.table[:traversal_ids].lt(namespaces[:traversal_ids]))
records.where(base_ref[:traversal_ids].lt(namespaces[:traversal_ids]))
end
end
@ -166,6 +183,23 @@ module Namespaces
end
end
def superset_cte(base_name)
superset_sql = <<~SQL
SELECT d1.traversal_ids
FROM #{base_name} d1
LEFT JOIN LATERAL (
SELECT d2.id as ancestor_id
FROM #{base_name} d2
WHERE d2.id = ANY(d1.traversal_ids)
AND d2.id <> d1.id
LIMIT 1
) covered ON TRUE
WHERE covered.ancestor_id IS NULL
SQL
Gitlab::SQL::CTE.new(:superset, superset_sql, materialized: false)
end
def ancestor_ctes
base_scope = all.select('namespaces.id', 'namespaces.traversal_ids')
base_cte = Gitlab::SQL::CTE.new(:base_ancestors_cte, base_scope)

View File

@ -0,0 +1,35 @@
# frozen_string_literal: true
module TimeTracking
class TimelogCategory < ApplicationRecord
include StripAttribute
include CaseSensitivity
self.table_name = "timelog_categories"
belongs_to :namespace, foreign_key: 'namespace_id'
strip_attributes! :name
validates :namespace, presence: true
validates :name, presence: true
validates :name, uniqueness: { case_sensitive: false, scope: [:namespace_id] }
validates :name, length: { maximum: 255 }
validates :description, length: { maximum: 1024 }
validates :color, color: true, allow_blank: false, length: { maximum: 7 }
validates :billing_rate,
if: :billable?,
presence: true,
numericality: { greater_than: 0 }
DEFAULT_COLOR = ::Gitlab::Color.of('#6699cc')
attribute :color, ::Gitlab::Database::Type::Color.new
default_value_for :color, DEFAULT_COLOR
def self.find_by_name(namespace_id, name)
where(namespace: namespace_id)
.iwhere(name: name)
end
end
end

View File

@ -0,0 +1,8 @@
---
name: linear_scopes_superset
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/87643
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/362687
milestone: '15.1'
type: development
group: group::workspace
default_enabled: false

View File

@ -4,6 +4,8 @@ classes:
- ProjectCiCdSetting
feature_categories:
- continuous_integration
description: TODO
- continuous_delivery
- runner_fleet
description: Project-scoped settings related to the CI/CD domain
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/commit/392c411bdc16386ef42c86afaf8c4d8e4cddb955
milestone: '10.8'

View File

@ -0,0 +1,11 @@
---
table_name: ssh_signatures
classes:
- CommitSignatures::SshSignature
feature_categories:
- source_code_management
description: >
The verification status for commits which are signed by SSH keys. The actual signature
is part of the commit body and is stored in Gitaly.
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/87962
milestone: '15.1'

View File

@ -0,0 +1,9 @@
---
table_name: timelog_categories
classes:
- TimelogCategory
feature_categories:
- team_planning
description: Categories that can be associated to a timelog to categorize them
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/87529
milestone: '15.1'

View File

@ -0,0 +1,28 @@
# frozen_string_literal: true
class CreateTimelogCategories < Gitlab::Database::Migration[2.0]
enable_lock_retries!
def up
create_table :timelog_categories do |t|
t.references :namespace,
index: false,
null: false,
foreign_key: { on_delete: :cascade }
t.timestamps_with_timezone null: false
t.decimal :billing_rate, precision: 18, scale: 4, default: 0
t.boolean :billable, default: false, null: false
t.text :name, null: false, limit: 255
t.text :description, limit: 1024
t.text :color, limit: 7, default: '#6699cc', null: false
t.index 'namespace_id, LOWER(name)',
unique: true,
name: :index_timelog_categories_on_unique_name_per_namespace
end
end
def down
drop_table :timelog_categories
end
end

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
class CreateSshSignatures < Gitlab::Database::Migration[2.0]
def change
create_table :ssh_signatures do |t|
t.timestamps_with_timezone null: false
t.bigint :project_id, null: false, index: true
t.bigint :key_id, null: false, index: true
t.integer :verification_status, default: 0, null: false, limit: 2
t.binary :commit_sha, null: false, index: { unique: true }
end
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class AddProjectsRelationToSshSignatures < Gitlab::Database::Migration[2.0]
disable_ddl_transaction!
def up
add_concurrent_foreign_key :ssh_signatures, :projects, column: :project_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :ssh_signatures, column: :project_id
end
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class AddKeysRelationToSshSignatures < Gitlab::Database::Migration[2.0]
disable_ddl_transaction!
def up
add_concurrent_foreign_key :ssh_signatures, :keys, column: :key_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :ssh_signatures, column: :key_id
end
end
end

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
class FinalizeRoutesBackfillingForProjects < Gitlab::Database::Migration[2.0]
disable_ddl_transaction!
restrict_gitlab_migration gitlab_schema: :gitlab_main
MIGRATION = 'BackfillNamespaceIdForProjectRoute'
def up
ensure_batched_background_migration_is_finished(
job_class_name: MIGRATION,
table_name: :routes,
column_name: :id,
job_arguments: []
)
end
def down
# noop
end
end

View File

@ -0,0 +1 @@
0afca448dc191db604d35f1e675f4342390f77542851c2ff716fd9225a9fa5a0

View File

@ -0,0 +1 @@
68327ee0eb01df152d2420fc31e586be0c5d40460c09cc43a1e955e84e5f00fc

View File

@ -0,0 +1 @@
925cd6bbfc67d0f5748c48b960ef1f932370fe078a979440b6bb32d049c2a9a8

View File

@ -0,0 +1 @@
a79526f7eb59fc93d66ff1a58471c9a3de27f8e620b5f3d4a255c88687a5bf2a

View File

@ -0,0 +1 @@
f31157879c1d7e2f08a63b4c68ed0353fd6df1e885cb7f3838aba7e1c782394c

View File

@ -20872,6 +20872,25 @@ CREATE SEQUENCE sprints_id_seq
ALTER SEQUENCE sprints_id_seq OWNED BY sprints.id;
CREATE TABLE ssh_signatures (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
project_id bigint NOT NULL,
key_id bigint NOT NULL,
verification_status smallint DEFAULT 0 NOT NULL,
commit_sha bytea NOT NULL
);
CREATE SEQUENCE ssh_signatures_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE ssh_signatures_id_seq OWNED BY ssh_signatures.id;
CREATE TABLE status_check_responses (
id bigint NOT NULL,
merge_request_id bigint NOT NULL,
@ -21097,6 +21116,30 @@ CREATE SEQUENCE terraform_states_id_seq
ALTER SEQUENCE terraform_states_id_seq OWNED BY terraform_states.id;
CREATE TABLE timelog_categories (
id bigint NOT NULL,
namespace_id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
billing_rate numeric(18,4) DEFAULT 0.0,
billable boolean DEFAULT false NOT NULL,
name text NOT NULL,
description text,
color text DEFAULT '#6699cc'::text NOT NULL,
CONSTRAINT check_37ad5f23d7 CHECK ((char_length(name) <= 255)),
CONSTRAINT check_4ba862ba3e CHECK ((char_length(color) <= 7)),
CONSTRAINT check_c4b8aec13a CHECK ((char_length(description) <= 1024))
);
CREATE SEQUENCE timelog_categories_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE timelog_categories_id_seq OWNED BY timelog_categories.id;
CREATE TABLE timelogs (
id integer NOT NULL,
time_spent integer NOT NULL,
@ -23245,6 +23288,8 @@ ALTER TABLE ONLY spam_logs ALTER COLUMN id SET DEFAULT nextval('spam_logs_id_seq
ALTER TABLE ONLY sprints ALTER COLUMN id SET DEFAULT nextval('sprints_id_seq'::regclass);
ALTER TABLE ONLY ssh_signatures ALTER COLUMN id SET DEFAULT nextval('ssh_signatures_id_seq'::regclass);
ALTER TABLE ONLY status_check_responses ALTER COLUMN id SET DEFAULT nextval('status_check_responses_id_seq'::regclass);
ALTER TABLE ONLY status_page_published_incidents ALTER COLUMN id SET DEFAULT nextval('status_page_published_incidents_id_seq'::regclass);
@ -23267,6 +23312,8 @@ ALTER TABLE ONLY terraform_state_versions ALTER COLUMN id SET DEFAULT nextval('t
ALTER TABLE ONLY terraform_states ALTER COLUMN id SET DEFAULT nextval('terraform_states_id_seq'::regclass);
ALTER TABLE ONLY timelog_categories ALTER COLUMN id SET DEFAULT nextval('timelog_categories_id_seq'::regclass);
ALTER TABLE ONLY timelogs ALTER COLUMN id SET DEFAULT nextval('timelogs_id_seq'::regclass);
ALTER TABLE ONLY todos ALTER COLUMN id SET DEFAULT nextval('todos_id_seq'::regclass);
@ -25454,6 +25501,9 @@ ALTER TABLE ONLY spam_logs
ALTER TABLE ONLY sprints
ADD CONSTRAINT sprints_pkey PRIMARY KEY (id);
ALTER TABLE ONLY ssh_signatures
ADD CONSTRAINT ssh_signatures_pkey PRIMARY KEY (id);
ALTER TABLE ONLY status_check_responses
ADD CONSTRAINT status_check_responses_pkey PRIMARY KEY (id);
@ -25487,6 +25537,9 @@ ALTER TABLE ONLY terraform_state_versions
ALTER TABLE ONLY terraform_states
ADD CONSTRAINT terraform_states_pkey PRIMARY KEY (id);
ALTER TABLE ONLY timelog_categories
ADD CONSTRAINT timelog_categories_pkey PRIMARY KEY (id);
ALTER TABLE ONLY timelogs
ADD CONSTRAINT timelogs_pkey PRIMARY KEY (id);
@ -29319,6 +29372,12 @@ CREATE INDEX index_sprints_on_title ON sprints USING btree (title);
CREATE INDEX index_sprints_on_title_trigram ON sprints USING gin (title gin_trgm_ops);
CREATE UNIQUE INDEX index_ssh_signatures_on_commit_sha ON ssh_signatures USING btree (commit_sha);
CREATE INDEX index_ssh_signatures_on_key_id ON ssh_signatures USING btree (key_id);
CREATE INDEX index_ssh_signatures_on_project_id ON ssh_signatures USING btree (project_id);
CREATE INDEX index_status_check_responses_on_external_approval_rule_id ON status_check_responses USING btree (external_approval_rule_id);
CREATE INDEX index_status_check_responses_on_external_status_check_id ON status_check_responses USING btree (external_status_check_id);
@ -29375,6 +29434,8 @@ CREATE UNIQUE INDEX index_terraform_states_on_project_id_and_name ON terraform_s
CREATE UNIQUE INDEX index_terraform_states_on_uuid ON terraform_states USING btree (uuid);
CREATE UNIQUE INDEX index_timelog_categories_on_unique_name_per_namespace ON timelog_categories USING btree (namespace_id, lower(name));
CREATE INDEX index_timelogs_on_issue_id ON timelogs USING btree (issue_id);
CREATE INDEX index_timelogs_on_merge_request_id ON timelogs USING btree (merge_request_id);
@ -31588,6 +31649,9 @@ ALTER TABLE ONLY issue_customer_relations_contacts
ALTER TABLE ONLY vulnerabilities
ADD CONSTRAINT fk_7c5bb22a22 FOREIGN KEY (due_date_sourcing_milestone_id) REFERENCES milestones(id) ON DELETE SET NULL;
ALTER TABLE ONLY ssh_signatures
ADD CONSTRAINT fk_7d2f93996c FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY labels
ADD CONSTRAINT fk_7de4989a69 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
@ -32020,6 +32084,9 @@ ALTER TABLE ONLY epics
ALTER TABLE ONLY boards
ADD CONSTRAINT fk_f15266b5f9 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY ssh_signatures
ADD CONSTRAINT fk_f177ea6aa5 FOREIGN KEY (key_id) REFERENCES keys(id) ON DELETE CASCADE;
ALTER TABLE ONLY ci_pipeline_variables
ADD CONSTRAINT fk_f29c5f4380 FOREIGN KEY (pipeline_id) REFERENCES ci_pipelines(id) ON DELETE CASCADE;
@ -33049,6 +33116,9 @@ ALTER TABLE ONLY vulnerability_finding_signatures
ALTER TABLE ONLY clusters_applications_cert_managers
ADD CONSTRAINT fk_rails_9e4f2cb4b2 FOREIGN KEY (cluster_id) REFERENCES clusters(id) ON DELETE CASCADE;
ALTER TABLE ONLY timelog_categories
ADD CONSTRAINT fk_rails_9f27b821a8 FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY resource_milestone_events
ADD CONSTRAINT fk_rails_a006df5590 FOREIGN KEY (merge_request_id) REFERENCES merge_requests(id) ON DELETE CASCADE;

View File

@ -10,8 +10,8 @@ info: To determine the technical writer assigned to the Stage/Group associated w
> - [Enabled on GitLab.com and by default on self-managed](https://gitlab.com/gitlab-org/gitlab/-/issues/338939) in GitLab 14.7.
> - [Feature flag `ff_external_audit_events_namespace`](https://gitlab.com/gitlab-org/gitlab/-/issues/349588) removed in GitLab 14.8.
Event streaming allows owners of top-level groups to set an HTTP endpoint to receive **all** audit events about the group, and its
subgroups and projects as structured JSON.
Users can set an HTTP endpoint for a top-level group to receive all audit events about the group, its subgroups, and
projects as structured JSON. Event streaming is only available for top-level groups.
Top-level group owners can manage their audit logs in third-party systems such as Splunk, using the Splunk
[HTTP Event Collector](https://docs.splunk.com/Documentation/Splunk/8.2.2/Data/UsetheHTTPEventCollector). Any service that can receive

View File

@ -707,7 +707,7 @@ For more information, see the [official `ldapsearch` documentation](https://linu
### Using **AdFind** (Windows)
You can use the [`AdFind`](https://social.technet.microsoft.com/wiki/contents/articles/7535.adfind-command-examples.aspx) utility (on Windows based systems) to test that your LDAP server is accessible and authentication is working correctly. AdFind is a freeware utility built by [Joe Richards](http://www.joeware.net/freetools/tools/adfind/index.htm).
You can use the [`AdFind`](https://social.technet.microsoft.com/wiki/contents/articles/7535.adfind-command-examples.aspx) utility (on Windows based systems) to test that your LDAP server is accessible and authentication is working correctly. AdFind is a freeware utility built by [Joe Richards](https://www.joeware.net/freetools/tools/adfind/index.htm).
**Return all objects**

View File

@ -24,7 +24,7 @@ There are various configuration options to help GitLab server administrators:
- Enabling/disabling Git LFS support.
- Changing the location of LFS object storage.
- Setting up object storage supported by [Fog](http://fog.io/about/provider_documentation.html).
- Setting up object storage supported by [Fog](https://fog.io/about/provider_documentation.html).
### Configuration for Omnibus installations
@ -57,7 +57,7 @@ In `config/gitlab.yml`:
You can store LFS objects in remote object storage. This allows you
to reduce reads and writes to the local disk, and free up disk space significantly.
GitLab is tightly integrated with `Fog`, so you can refer to its [documentation](http://fog.io/about/provider_documentation.html)
GitLab is tightly integrated with `Fog`, so you can refer to its [documentation](https://fog.io/about/provider_documentation.html)
to check which storage services can be integrated with GitLab.
You can also use external object storage in a private local network. For example,
[MinIO](https://min.io/) is a standalone object storage service that works with GitLab instances.

View File

@ -276,7 +276,7 @@ If the source or target directory has many contents, this startup phase of `rsyn
server. You can reduce the workload of `rsync` by dividing its work into smaller pieces, and sync one repository at a
time.
In addition to `rsync` we use [GNU Parallel](http://www.gnu.org/software/parallel/).
In addition to `rsync` we use [GNU Parallel](https://www.gnu.org/software/parallel/).
This utility is not included in GitLab, so you must install it yourself with `apt`
or `yum`.

View File

@ -7,7 +7,7 @@ type: reference
# Working with the bundled PgBouncer service **(PREMIUM SELF)**
[PgBouncer](http://www.pgbouncer.org/) is used to seamlessly migrate database
[PgBouncer](https://www.pgbouncer.org/) is used to seamlessly migrate database
connections between servers in a failover scenario. Additionally, it can be used
in a non-fault-tolerant setup to pool connections, speeding up response time
while reducing resource usage.

View File

@ -2186,7 +2186,7 @@ GitLab has been tested on a number of object storage providers:
- [Amazon S3](https://aws.amazon.com/s3/)
- [Google Cloud Storage](https://cloud.google.com/storage)
- [Digital Ocean Spaces](http://www.digitalocean.com/products/spaces)
- [Digital Ocean Spaces](https://www.digitalocean.com/products/spaces)
- [Oracle Cloud Infrastructure](https://docs.cloud.oracle.com/en-us/iaas/Content/Object/Tasks/s3compatibleapi.htm)
- [OpenStack Swift (S3 compatibility mode)](https://docs.openstack.org/swift/latest/s3_compat.html)
- [Azure Blob storage](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blobs-introduction)

View File

@ -2190,7 +2190,7 @@ GitLab has been tested on a number of object storage providers:
- [Amazon S3](https://aws.amazon.com/s3/)
- [Google Cloud Storage](https://cloud.google.com/storage)
- [Digital Ocean Spaces](http://www.digitalocean.com/products/spaces)
- [Digital Ocean Spaces](https://www.digitalocean.com/products/spaces)
- [Oracle Cloud Infrastructure](https://docs.cloud.oracle.com/en-us/iaas/Content/Object/Tasks/s3compatibleapi.htm)
- [OpenStack Swift (S3 compatibility mode)](https://docs.openstack.org/swift/latest/s3_compat.html)
- [Azure Blob storage](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blobs-introduction)

View File

@ -892,7 +892,7 @@ GitLab has been tested on a number of object storage providers:
- [Amazon S3](https://aws.amazon.com/s3/)
- [Google Cloud Storage](https://cloud.google.com/storage)
- [Digital Ocean Spaces](http://www.digitalocean.com/products/spaces)
- [Digital Ocean Spaces](https://www.digitalocean.com/products/spaces)
- [Oracle Cloud Infrastructure](https://docs.cloud.oracle.com/en-us/iaas/Content/Object/Tasks/s3compatibleapi.htm)
- [OpenStack Swift (S3 compatibility mode)](https://docs.openstack.org/swift/latest/s3_compat.html)
- [Azure Blob storage](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blobs-introduction)

View File

@ -2125,7 +2125,7 @@ GitLab has been tested on a number of object storage providers:
- [Amazon S3](https://aws.amazon.com/s3/)
- [Google Cloud Storage](https://cloud.google.com/storage)
- [Digital Ocean Spaces](http://www.digitalocean.com/products/spaces)
- [Digital Ocean Spaces](https://www.digitalocean.com/products/spaces)
- [Oracle Cloud Infrastructure](https://docs.cloud.oracle.com/en-us/iaas/Content/Object/Tasks/s3compatibleapi.htm)
- [OpenStack Swift (S3 compatibility mode)](https://docs.openstack.org/swift/latest/s3_compat.html)
- [Azure Blob storage](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blobs-introduction)

View File

@ -2206,7 +2206,7 @@ GitLab has been tested on a number of object storage providers:
- [Amazon S3](https://aws.amazon.com/s3/)
- [Google Cloud Storage](https://cloud.google.com/storage)
- [Digital Ocean Spaces](http://www.digitalocean.com/products/spaces)
- [Digital Ocean Spaces](https://www.digitalocean.com/products/spaces)
- [Oracle Cloud Infrastructure](https://docs.cloud.oracle.com/en-us/iaas/Content/Object/Tasks/s3compatibleapi.htm)
- [OpenStack Swift (S3 compatibility mode)](https://docs.openstack.org/swift/latest/s3_compat.html)
- [Azure Blob storage](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blobs-introduction)

View File

@ -2125,7 +2125,7 @@ GitLab has been tested on a number of object storage providers:
- [Amazon S3](https://aws.amazon.com/s3/)
- [Google Cloud Storage](https://cloud.google.com/storage)
- [Digital Ocean Spaces](http://www.digitalocean.com/products/spaces)
- [Digital Ocean Spaces](https://www.digitalocean.com/products/spaces)
- [Oracle Cloud Infrastructure](https://docs.cloud.oracle.com/en-us/iaas/Content/Object/Tasks/s3compatibleapi.htm)
- [OpenStack Swift (S3 compatibility mode)](https://docs.openstack.org/swift/latest/s3_compat.html)
- [Azure Blob storage](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blobs-introduction)

View File

@ -804,7 +804,7 @@ Example response:
"discussion_locked":null,
"should_remove_source_branch":null,
"force_remove_source_branch":false,
"web_url":"http://https://gitlab.example.com/root/test-project/merge_requests/1",
"web_url":"https://gitlab.example.com/root/test-project/merge_requests/1",
"time_stats":{
"time_estimate":0,
"total_time_spent":0,

View File

@ -738,7 +738,7 @@ Content-Type: application/json
## Encoding `+` in ISO 8601 dates
If you need to include a `+` in a query parameter, you may need to use `%2B`
instead, due to a [W3 recommendation](http://www.w3.org/Addressing/URL/4_URI_Recommentations.html)
instead, due to a [W3 recommendation](https://www.w3.org/Addressing/URL/4_URI_Recommentations.html)
that causes a `+` to be interpreted as a space. For example, in an ISO 8601 date,
you may want to include a specific time in ISO 8601 format, such as:

View File

@ -54,7 +54,7 @@ Check visual design properties using your browser's _elements inspector_ ([Chrom
- Use recommended [colors](https://design.gitlab.com/product-foundations/colors/)
and [typography](https://design.gitlab.com/product-foundations/type-fundamentals/).
- Follow [layout guidelines](https://design.gitlab.com/layout/grid/).
- Use existing [icons](http://gitlab-org.gitlab.io/gitlab-svgs/) and [illustrations](http://gitlab-org.gitlab.io/gitlab-svgs/illustrations/)
- Use existing [icons](https://gitlab-org.gitlab.io/gitlab-svgs/) and [illustrations](https://gitlab-org.gitlab.io/gitlab-svgs/illustrations/)
or propose new ones according to [iconography](https://design.gitlab.com/product-foundations/iconography/)
and [illustration](https://design.gitlab.com/product-foundations/illustration/)
guidelines.

View File

@ -243,12 +243,14 @@ background migration.
1. Create a post-deployment migration that queues the migration for existing data:
```ruby
class QueueBackfillRoutesNamespaceId < Gitlab::Database::Migration[1.0]
class QueueBackfillRoutesNamespaceId < Gitlab::Database::Migration[2.0]
disable_ddl_transaction!
MIGRATION = 'BackfillRouteNamespaceId'
DELAY_INTERVAL = 2.minutes
restrict_gitlab_migration gitlab_schema: :gitlab_main
def up
queue_batched_background_migration(
MIGRATION,
@ -264,6 +266,14 @@ background migration.
end
```
NOTE:
When queuing a batched background migration, you need to restrict
the schema to the database where you make the actual changes.
In this case, we are updating `routes` records, so we set
`restrict_gitlab_migration gitlab_schema: :gitlab_main`. If, however,
you need to perform a CI data migration, you would set
`restrict_gitlab_migration gitlab_schema: :gitlab_ci`.
After deployment, our application:
- Continues using the data as before.
- Ensures that both existing and new data are migrated.
@ -272,10 +282,12 @@ background migration.
that checks that the batched background migration is completed. For example:
```ruby
class FinalizeBackfillRouteNamespaceId < Gitlab::Database::Migration[1.0]
class FinalizeBackfillRouteNamespaceId < Gitlab::Database::Migration[2.0]
MIGRATION = 'BackfillRouteNamespaceId'
disable_ddl_transaction!
restrict_gitlab_migration gitlab_schema: :gitlab_main
def up
ensure_batched_background_migration_is_finished(
job_class_name: MIGRATION,

View File

@ -41,7 +41,7 @@ Security Policy headers in the GitLab Rails app.
Some resources on implementing Content Security Policy:
- [MDN Article on CSP](https://developer.mozilla.org/en-US/docs/Web/Security/CSP)
- [GitHub's CSP Journey on the GitHub Engineering Blog](http://githubengineering.com/githubs-csp-journey/)
- [GitHub's CSP Journey on the GitHub Engineering Blog](https://github.blog/2016-04-12-githubs-csp-journey/)
- The Dropbox Engineering Blog's series on CSP: [1](https://blogs.dropbox.com/tech/2015/09/on-csp-reporting-and-filtering/), [2](https://blogs.dropbox.com/tech/2015/09/unsafe-inline-and-nonce-deployment/), [3](https://blogs.dropbox.com/tech/2015/09/csp-the-unexpected-eval/), [4](https://blogs.dropbox.com/tech/2015/09/csp-third-party-integrations-and-privilege-separation/)
### Subresource Integrity (SRI)
@ -59,7 +59,7 @@ All CSS and JavaScript assets should use Subresource Integrity.
Some resources on implementing Subresource Integrity:
- [MDN Article on SRI](https://developer.mozilla.org/en-us/docs/web/security/subresource_integrity)
- [Subresource Integrity on the GitHub Engineering Blog](http://githubengineering.com/subresource-integrity/)
- [Subresource Integrity on the GitHub Engineering Blog](https://github.blog/2015-09-19-subresource-integrity/)
-->

View File

@ -187,7 +187,7 @@ implementations:
It leverages the [`commonmarker`](https://github.com/gjtorikian/commonmarker) gem,
which is a Ruby wrapper for [`libcmark-gfm`](https://github.com/github/cmark),
GitHub's fork of the reference parser for CommonMark. `libcmark-gfm` is an extended
version of the C reference implementation of [CommonMark](http://commonmark.org/)
version of the C reference implementation of [CommonMark](https://commonmark.org/)
1. The frontend parser / renderer supports parsing and _WYSIWYG_ rendering for
the Content Editor. It is implemented in JavaScript. Parsing is based on the
[Remark](https://github.com/remarkjs/remark) Markdown parser, which produces a

View File

@ -72,7 +72,7 @@ HTTP.
GitLab Workhorse can listen on either a TCP or a Unix domain socket. It
can also open a second listening TCP listening socket with the Go
[`net/http/pprof` profiler server](http://golang.org/pkg/net/http/pprof/).
[`net/http/pprof` profiler server](https://pkg.go.dev/net/http/pprof).
GitLab Workhorse can listen on Redis build and runner registration events if you
pass a valid TOML configuration file through the `-config` flag.

View File

@ -354,7 +354,7 @@ When using Kerberos ticket-based authentication in an Active Directory domain,
it may be necessary to increase the maximum header size allowed by NGINX,
as extensions to the Kerberos protocol may result in HTTP authentication headers
larger than the default size of 8kB. Configure `large_client_header_buffers`
to a larger value in [the NGINX configuration](http://nginx.org/en/docs/http/ngx_http_core_module.html#large_client_header_buffers).
to a larger value in [the NGINX configuration](https://nginx.org/en/docs/http/ngx_http_core_module.html#large_client_header_buffers).
## Troubleshooting
@ -412,5 +412,5 @@ See also: [Git v2.11 release notes](https://github.com/git/git/blob/master/Docum
## Helpful links
- <https://help.ubuntu.com/community/Kerberos>
- <http://blog.manula.org/2012/04/setting-up-kerberos-server-with-debian.html>
- <https://blog.manula.org/2012/04/setting-up-kerberos-server-with-debian.html>
- <https://www.roguelynn.com/words/explain-like-im-5-kerberos/>

View File

@ -428,9 +428,9 @@ sudo -u git -H bundle exec rake gitlab:backup:create REPOSITORIES_STORAGES=stora
#### Uploading backups to a remote (cloud) storage
You can let the backup script upload (using the [Fog library](http://fog.io/))
You can let the backup script upload (using the [Fog library](https://fog.io/))
the `.tar` file it creates. In the following example, we use Amazon S3 for
storage, but Fog also lets you use [other storage providers](http://fog.io/storage/).
storage, but Fog also lets you use [other storage providers](https://fog.io/storage/).
GitLab also [imports cloud drivers](https://gitlab.com/gitlab-org/gitlab/-/blob/da46c9655962df7d49caef0e2b9f6bbe88462a02/Gemfile#L113)
for AWS, Google, OpenStack Swift, Rackspace, and Aliyun. A local driver is
[also available](#uploading-to-locally-mounted-shares).

View File

@ -19,7 +19,7 @@ The TLS Protocol CRIME Vulnerability affects systems that use data compression
over HTTPS. Your system might be vulnerable to the CRIME vulnerability if you use
SSL Compression (for example, Gzip) or SPDY (which optionally uses compression).
GitLab supports both Gzip and [SPDY](http://nginx.org/en/docs/http/ngx_http_spdy_module.html) and mitigates the CRIME
GitLab supports both Gzip and [SPDY](https://nginx.org/en/docs/http/ngx_http_spdy_module.html) and mitigates the CRIME
vulnerability by deactivating Gzip when HTTPS is enabled. The sources of the
files are here:
@ -58,7 +58,7 @@ vulnerability.
## References
- NGINX ["Module `ngx_http_spdy_module`"](http://nginx.org/en/docs/http/ngx_http_spdy_module.html)
- NGINX ["Module `ngx_http_spdy_module`"](https://nginx.org/en/docs/http/ngx_http_spdy_module.html)
- Tenable Network Security, Inc. ["Transport Layer Security (TLS) Protocol CRIME Vulnerability"](https://www.tenable.com/plugins/index.php?view=single&id=62565)
- Wikipedia contributors, ["CRIME"](https://en.wikipedia.org/wiki/CRIME) Wikipedia, The Free Encyclopedia

View File

@ -34,7 +34,7 @@ After getting used to these three steps, the next challenge is the branching mod
Because many organizations new to Git have no conventions for how to work with it, their repositories can quickly become messy.
The biggest problem is that many long-running branches emerge that all contain part of the changes.
People have a hard time figuring out which branch has the latest code, or which branch to deploy to production.
Frequently, the reaction to this problem is to adopt a standardized pattern such as [Git flow](https://nvie.com/posts/a-successful-git-branching-model/) and [GitHub flow](http://scottchacon.com/2011/08/31/github-flow.html).
Frequently, the reaction to this problem is to adopt a standardized pattern such as [Git flow](https://nvie.com/posts/a-successful-git-branching-model/) and [GitHub flow](https://scottchacon.com/2011/08/31/github-flow.html).
We think there is still room for improvement. In this document, we describe a set of practices we call GitLab flow.
For a video introduction of how this works in GitLab, see [GitLab Flow](https://youtu.be/InKNIvky2KE).

View File

@ -8,11 +8,16 @@ info: To determine the technical writer assigned to the Stage/Group associated w
## Pipeline success and duration charts
> [Renamed](https://gitlab.com/gitlab-org/gitlab/-/issues/38318) to CI/CD Analytics in GitLab 12.8.
CI/CD analytics shows the history of your pipeline successes and failures, as well as how long each pipeline
ran.
Pipeline statistics are gathered by collecting all available pipelines for the
project, regardless of status. The data available for each individual day is based
on when the pipeline was created.
The total pipeline calculation includes child
pipelines and pipelines that failed with an invalid YAML. To filter pipelines based on other attributes, use the [Pipelines API](../../api/pipelines.md#list-project-pipelines).
View successful pipelines:
![Successful pipelines](img/pipelines_success_chart.png)
@ -21,12 +26,6 @@ View pipeline duration history:
![Pipeline duration](img/pipelines_duration_chart.png)
Pipeline statistics are gathered by collecting all available pipelines for the
project regardless of status. The data available for each individual day is based
on when the pipeline was created. The total pipeline calculation includes child
pipelines and pipelines that failed with invalid YAML. If you are interested in
filtering pipelines based on other attributes, consider using the [Pipelines API](../../api/pipelines.md#list-project-pipelines).
## View CI/CD analytics
To view CI/CD analytics:

View File

@ -1327,7 +1327,7 @@ class DastWebsiteTargetView(View):
##### Node (with Express) example for on-demand scan
Here's how you can add a
[custom header in Node (with Express)](http://expressjs.com/en/5x/api.html#res.append):
[custom header in Node (with Express)](https://expressjs.com/en/5x/api.html#res.append):
```javascript
app.get('/dast-website-target', function(req, res) {

View File

@ -121,6 +121,10 @@ Additionally, if you are a project owner and a security policy project has not b
associated with this project, then a new project is created and associated automatically at the same
time that the first policy merge request is created.
## Managing projects in bulk via a script
You can use the [Vulnerability-Check Migration](https://gitlab.com/gitlab-org/gitlab/-/snippets/2328089) script to bulk create policies or associate security policy projects with development projects. For instructions and a demonstration of how to use the Vulnerability-Check Migration script, see [this video](https://youtu.be/biU1N26DfBc).
## Scan execution policies
See [Scan execution policies](scan-execution-policies.md).

View File

@ -10,4 +10,4 @@ redirect_to: '../../update/removals.md#managed-cluster-applicationsgitlab-ciyml'
This feature was [deprecated](https://gitlab.com/groups/gitlab-org/configure/-/epics/8)
in GitLab 14.5. and [removed](https://gitlab.com/gitlab-org/gitlab/-/issues/333610)
in GitLab 15.0. Use [crossplane](http://crossplane.io/) directly instead.
in GitLab 15.0. Use [crossplane](https://crossplane.io/) directly instead.

View File

@ -424,7 +424,7 @@ setting [disabled on GitLab.com](https://gitlab.com/gitlab-org/gitlab/-/issues/1
### SSH maximum number of connections
GitLab.com defines the maximum number of concurrent, unauthenticated SSH
connections by using the [MaxStartups setting](http://man.openbsd.org/sshd_config.5#MaxStartups).
connections by using the [MaxStartups setting](https://man.openbsd.org/sshd_config.5#MaxStartups).
If more than the maximum number of allowed connections occur concurrently, they
are dropped and users get
[an `ssh_exchange_identification` error](../../topics/git/troubleshooting_git.md#ssh_exchange_identification-error).

View File

@ -55,7 +55,7 @@ Nurtch is the company behind the [Rubix library](https://github.com/Nurtch/rubix
Rubix is an open-source Python library that makes it easy to perform common
DevOps tasks inside Jupyter Notebooks. Tasks such as plotting Cloudwatch metrics
and rolling your ECS/Kubernetes app are simplified down to a couple of lines of
code. See the [Nurtch Documentation](http://docs.nurtch.com/en/latest/) for more
code. See the [Nurtch Documentation](https://docs.nurtch.com/en/latest/) for more
information.
## Configure an executable runbook with GitLab
@ -217,4 +217,4 @@ the components outlined above and the pre-loaded demo runbook.
You can try other operations, such as running shell scripts or interacting with a
Kubernetes cluster. Visit the
[Nurtch Documentation](http://docs.nurtch.com/) for more information.
[Nurtch Documentation](https://docs.nurtch.com/) for more information.

View File

@ -71,6 +71,6 @@ Here's a few links to get you started with the migration:
- [Migrate using the `cvs-fast-export` tool](https://gitlab.com/esr/cvs-fast-export)
- [Stack Overflow post on importing the CVS repository](https://stackoverflow.com/a/11490134/974710)
- [Convert a CVS repository to Git](http://www.techrepublic.com/article/convert-cvs-repositories-to-git/)
- [Convert a CVS repository to Git](https://www.techrepublic.com/article/convert-cvs-repositories-to-git/)
- [Man page of the `git-cvsimport` tool](https://mirrors.edge.kernel.org/pub/software/scm/git/docs/git-cvsimport.html)
- [Migrate using `reposurgeon`](http://www.catb.org/~esr/reposurgeon/repository-editing.html#conversion)

View File

@ -167,7 +167,7 @@ for you to check:
- [GitHub](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/githubs-ssh-key-fingerprints)
- [GitLab.com](../../../gitlab_com/index.md#ssh-host-keys-fingerprints)
- [Launchpad](https://help.launchpad.net/SSHFingerprints)
- [Savannah](http://savannah.gnu.org/maintenance/SshAccess/)
- [Savannah](https://savannah.gnu.org/maintenance/SshAccess/)
- [SourceForge](https://sourceforge.net/p/forge/documentation/SSH%20Key%20Fingerprints/)
Other providers vary. You can securely gather key fingerprints with the following

View File

@ -492,6 +492,7 @@ software_license_policies: :gitlab_main
software_licenses: :gitlab_main
spam_logs: :gitlab_main
sprints: :gitlab_main
ssh_signatures: :gitlab_main
status_check_responses: :gitlab_main
status_page_published_incidents: :gitlab_main
status_page_settings: :gitlab_main
@ -504,6 +505,7 @@ term_agreements: :gitlab_main
terraform_states: :gitlab_main
terraform_state_versions: :gitlab_main
timelogs: :gitlab_main
timelog_categories: :gitlab_main
todos: :gitlab_main
token_with_ivs: :gitlab_main
topics: :gitlab_main

View File

@ -23,6 +23,7 @@ module Gitlab
insert
update
update_all
exec_insert_all
).freeze
NON_STICKY_READS = %i(

View File

@ -33,7 +33,7 @@ module Gitlab
# Returns the Arel relation for this CTE.
def to_arel
sql = Arel::Nodes::SqlLiteral.new("(#{query.to_sql})")
sql = Arel::Nodes::SqlLiteral.new("(#{query_as_sql})")
Gitlab::Database::AsWithMaterialized.new(table, sql, materialized: @materialized)
end
@ -54,6 +54,12 @@ module Gitlab
.with(to_arel)
.from(alias_to(relation.model.arel_table))
end
private
def query_as_sql
query.is_a?(String) ? query : query.to_sql
end
end
end
end

View File

@ -32016,13 +32016,16 @@ msgstr ""
msgid "Repositories Analytics"
msgstr ""
msgid "RepositoriesAnalytics|Analyze repositories for projects in %{groupName}. Data doesn't include projects in subgroups. %{learnMoreLink}."
msgstr ""
msgid "RepositoriesAnalytics|Average Coverage by Job"
msgstr ""
msgid "RepositoriesAnalytics|Average coverage"
msgstr ""
msgid "RepositoriesAnalytics|Average test coverage last 30 days"
msgid "RepositoriesAnalytics|Average test coverage"
msgstr ""
msgid "RepositoriesAnalytics|Code Coverage: %{averageCoverage}"
@ -32046,18 +32049,40 @@ msgstr ""
msgid "RepositoriesAnalytics|Historic Test Coverage Data is available in raw format (.csv) for further analysis."
msgstr ""
msgid "RepositoriesAnalytics|In the last day, %{metricValue} job has code coverage."
msgid_plural "RepositoriesAnalytics|In the last day, %{metricValue} jobs have code coverage."
msgstr[0] ""
msgstr[1] ""
msgid "RepositoriesAnalytics|In the last day, %{metricValue} project in %{groupName} has code coverage enabled."
msgid_plural "RepositoriesAnalytics|In the last day, %{metricValue} projects in %{groupName} have code coverage enabled."
msgstr[0] ""
msgstr[1] ""
msgid "RepositoriesAnalytics|In the last day, on average, %{metricValue} of all jobs are covered."
msgstr ""
msgid "RepositoriesAnalytics|Jobs with Coverage"
msgstr ""
msgid "RepositoriesAnalytics|Jobs with Coverage: %{coverageCount}"
msgstr ""
msgid "RepositoriesAnalytics|Last 30 days"
msgstr ""
msgid "RepositoriesAnalytics|Last Update"
msgstr ""
msgid "RepositoriesAnalytics|Last updated %{timeAgo}"
msgstr ""
msgid "RepositoriesAnalytics|Latest test coverage results"
msgstr ""
msgid "RepositoriesAnalytics|Latest test coverage results for all projects in %{groupName} (excluding projects in subgroups)."
msgstr ""
msgid "RepositoriesAnalytics|No test coverage to display"
msgstr ""
@ -32073,6 +32098,9 @@ msgstr ""
msgid "RepositoriesAnalytics|Projects with Coverage: %{projectCount}"
msgstr ""
msgid "RepositoriesAnalytics|Repositories Analytics"
msgstr ""
msgid "RepositoriesAnalytics|Test Code Coverage"
msgstr ""
@ -45990,6 +46018,9 @@ msgstr ""
msgid "time summary"
msgstr ""
msgid "today"
msgstr ""
msgid "toggle collapse"
msgstr ""

View File

@ -0,0 +1,10 @@
# frozen_string_literal: true
FactoryBot.define do
factory :ssh_signature, class: 'CommitSignatures::SshSignature' do
commit_sha { Digest::SHA1.hexdigest(SecureRandom.hex) }
project
key
verification_status { :verified }
end
end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
FactoryBot.define do
factory :timelog_category, class: 'TimeTracking::TimelogCategory' do
namespace
name { generate(:name) }
end
end

View File

@ -79,42 +79,55 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
end
end
describe '.insert_all!' do
describe 'methods using exec_insert_all on the connection', :request_store do
let(:model_class) do
Class.new(ApplicationRecord) do
self.table_name = "_test_connection_proxy_insert_all"
end
end
let(:session) { Gitlab::Database::LoadBalancing::Session.new }
before do
ActiveRecord::Schema.define do
create_table :_test_connection_proxy_bulk_insert, force: true do |t|
t.string :name, null: true
create_table :_test_connection_proxy_insert_all, force: true do |t|
t.string :name, null: false
t.index :name, unique: true
end
end
allow(Gitlab::Database::LoadBalancing::Session).to receive(:current)
.and_return(session)
end
after do
ActiveRecord::Schema.define do
drop_table :_test_connection_proxy_bulk_insert, force: true
drop_table :_test_connection_proxy_insert_all, force: true
end
end
let(:model_class) do
Class.new(ApplicationRecord) do
self.table_name = "_test_connection_proxy_bulk_insert"
describe '#upsert' do
it 'upserts a record and marks the session to stick to the primary' do
expect { 2.times { model_class.upsert({ name: 'test' }, unique_by: :name) } }
.to change { model_class.count }.from(0).to(1)
.and change { session.use_primary? }.from(false).to(true)
end
end
it 'inserts data in bulk' do
expect(model_class).to receive(:connection)
.at_least(:once)
.and_return(proxy)
describe '#insert_all!' do
it 'inserts multiple records and marks the session to stick to the primary' do
expect { model_class.insert_all([{ name: 'one' }, { name: 'two' }]) }
.to change { model_class.count }.from(0).to(2)
.and change { session.use_primary? }.from(false).to(true)
end
end
expect(proxy).to receive(:write_using_load_balancer)
.at_least(:once)
.and_call_original
expect do
model_class.insert_all! [
{ name: "item1" },
{ name: "item2" }
]
end.to change { model_class.count }.by(2)
describe '#insert' do
it 'inserts a single record and marks the session to stick to the primary' do
expect { model_class.insert({ name: 'single' }) }
.to change { model_class.count }.from(0).to(1)
.and change { session.use_primary? }.from(false).to(true)
end
end
end

View File

@ -3,15 +3,14 @@
require 'spec_helper'
RSpec.describe Gitlab::SQL::CTE do
describe '#to_arel' do
shared_examples '#to_arel' do
it 'generates an Arel relation for the CTE body' do
relation = User.where(id: 1)
cte = described_class.new(:cte_name, relation)
sql = cte.to_arel.to_sql
name = ApplicationRecord.connection.quote_table_name(:cte_name)
sql1 = ApplicationRecord.connection.unprepared_statement do
relation.except(:order).to_sql
relation.is_a?(String) ? relation : relation.to_sql
end
expected = [
@ -25,6 +24,20 @@ RSpec.describe Gitlab::SQL::CTE do
end
end
describe '#to_arel' do
context 'when relation is an ActiveRecord::Relation' do
let(:relation) { User.where(id: 1) }
include_examples '#to_arel'
end
context 'when relation is a String' do
let(:relation) { User.where(id: 1).to_sql }
include_examples '#to_arel'
end
end
describe '#alias_to' do
it 'returns an alias for the CTE' do
cte = described_class.new(:cte_name, nil)

View File

@ -0,0 +1,72 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe FinalizeRoutesBackfillingForProjects, :migration do
let(:batched_migrations) { table(:batched_background_migrations) }
let_it_be(:migration) { described_class::MIGRATION }
describe '#up' do
shared_examples 'finalizes the migration' do
it 'finalizes the migration' do
allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
expect(runner).to receive(:finalize).with('BackfillNamespaceIdForProjectRoute', :projects, :id, [])
end
end
end
context 'when routes backfilling migration is missing' do
it 'warns migration not found' do
expect(Gitlab::AppLogger)
.to receive(:warn).with(/Could not find batched background migration for the given configuration:/)
migrate!
end
end
context 'with backfilling migration present' do
let!(:project_namespace_backfill) do
batched_migrations.create!(
job_class_name: 'BackfillNamespaceIdForProjectRoute',
table_name: :routes,
column_name: :id,
job_arguments: [],
interval: 2.minutes,
min_value: 1,
max_value: 2,
batch_size: 1000,
sub_batch_size: 200,
gitlab_schema: :gitlab_main,
status: 3 # finished
)
end
context 'when backfilling migration finished successfully' do
it 'does not raise exception' do
expect { migrate! }.not_to raise_error
end
end
context 'with different backfilling migration statuses' do
using RSpec::Parameterized::TableSyntax
where(:status, :description) do
0 | 'paused'
1 | 'active'
4 | 'failed'
5 | 'finalizing'
end
with_them do
before do
project_namespace_backfill.update!(status: status)
end
it_behaves_like 'finalizes the migration'
end
end
end
end
end

View File

@ -3,17 +3,26 @@
require 'spec_helper'
RSpec.describe CommitSignatures::GpgSignature do
# This commit is seeded from https://gitlab.com/gitlab-org/gitlab-test
# For instructions on how to add more seed data, see the project README
let(:commit_sha) { '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33' }
let!(:project) { create(:project, :repository, path: 'sample-project') }
let!(:commit) { create(:commit, project: project, sha: commit_sha) }
let(:gpg_signature) { create(:gpg_signature, commit_sha: commit_sha) }
let(:signature) { create(:gpg_signature, commit_sha: commit_sha) }
let(:gpg_key) { create(:gpg_key) }
let(:gpg_key_subkey) { create(:gpg_key_subkey) }
let(:attributes) do
{
commit_sha: commit_sha,
project: project,
gpg_key_primary_keyid: gpg_key.keyid
}
end
it_behaves_like 'having unique enum values'
it_behaves_like 'commit signature'
describe 'associations' do
it { is_expected.to belong_to(:project).required }
it { is_expected.to belong_to(:gpg_key) }
it { is_expected.to belong_to(:gpg_key_subkey) }
end
@ -22,104 +31,56 @@ RSpec.describe CommitSignatures::GpgSignature do
subject { described_class.new }
it { is_expected.to validate_presence_of(:commit_sha) }
it { is_expected.to validate_presence_of(:project_id) }
it { is_expected.to validate_presence_of(:gpg_key_primary_keyid) }
end
describe '.safe_create!' do
let(:attributes) do
{
commit_sha: commit_sha,
project: project,
gpg_key_primary_keyid: gpg_key.keyid
}
end
it 'finds a signature by commit sha if it existed' do
gpg_signature
expect(described_class.safe_create!(commit_sha: commit_sha)).to eq(gpg_signature)
end
it 'creates a new signature if it was not found' do
expect { described_class.safe_create!(attributes) }.to change { described_class.count }.by(1)
end
it 'assigns the correct attributes when creating' do
signature = described_class.safe_create!(attributes)
expect(signature.project).to eq(project)
expect(signature.commit_sha).to eq(commit_sha)
expect(signature.gpg_key_primary_keyid).to eq(gpg_key.keyid)
end
it 'does not raise an error in case of a race condition' do
expect(described_class).to receive(:find_by).and_return(nil, double(described_class, persisted?: true))
expect(described_class).to receive(:create).and_raise(ActiveRecord::RecordNotUnique)
allow(described_class).to receive(:create).and_call_original
described_class.safe_create!(attributes)
end
end
describe '.by_commit_sha scope' do
let(:gpg_key) { create(:gpg_key, key: GpgHelpers::User2.public_key) }
let!(:another_gpg_signature) { create(:gpg_signature, gpg_key: gpg_key) }
it 'returns all gpg signatures by sha' do
expect(described_class.by_commit_sha(commit_sha)).to eq([gpg_signature])
expect(described_class.by_commit_sha(commit_sha)).to match_array([signature])
expect(
described_class.by_commit_sha([commit_sha, another_gpg_signature.commit_sha])
).to contain_exactly(gpg_signature, another_gpg_signature)
end
end
describe '#commit' do
it 'fetches the commit through the project' do
expect_next_instance_of(Project) do |instance|
expect(instance).to receive(:commit).with(commit_sha).and_return(commit)
end
gpg_signature.commit
).to contain_exactly(signature, another_gpg_signature)
end
end
describe '#gpg_key=' do
it 'supports the assignment of a GpgKey' do
gpg_signature = create(:gpg_signature, gpg_key: gpg_key)
signature = create(:gpg_signature, gpg_key: gpg_key)
expect(gpg_signature.gpg_key).to be_an_instance_of(GpgKey)
expect(signature.gpg_key).to be_an_instance_of(GpgKey)
end
it 'supports the assignment of a GpgKeySubkey' do
gpg_signature = create(:gpg_signature, gpg_key: gpg_key_subkey)
signature = create(:gpg_signature, gpg_key: gpg_key_subkey)
expect(gpg_signature.gpg_key).to be_an_instance_of(GpgKeySubkey)
expect(signature.gpg_key).to be_an_instance_of(GpgKeySubkey)
end
it 'clears gpg_key and gpg_key_subkey_id when passing nil' do
gpg_signature.update_attribute(:gpg_key, nil)
signature.update_attribute(:gpg_key, nil)
expect(gpg_signature.gpg_key_id).to be_nil
expect(gpg_signature.gpg_key_subkey_id).to be_nil
expect(signature.gpg_key_id).to be_nil
expect(signature.gpg_key_subkey_id).to be_nil
end
end
describe '#gpg_commit' do
context 'when commit does not exist' do
it 'returns nil' do
allow(gpg_signature).to receive(:commit).and_return(nil)
allow(signature).to receive(:commit).and_return(nil)
expect(gpg_signature.gpg_commit).to be_nil
expect(signature.gpg_commit).to be_nil
end
end
context 'when commit exists' do
it 'returns an instance of Gitlab::Gpg::Commit' do
allow(gpg_signature).to receive(:commit).and_return(commit)
allow(signature).to receive(:commit).and_return(commit)
expect(gpg_signature.gpg_commit).to be_an_instance_of(Gitlab::Gpg::Commit)
expect(signature.gpg_commit).to be_an_instance_of(Gitlab::Gpg::Commit)
end
end
end

View File

@ -0,0 +1,38 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe CommitSignatures::SshSignature do
# This commit is seeded from https://gitlab.com/gitlab-org/gitlab-test
# For instructions on how to add more seed data, see the project README
let(:commit_sha) { '7b5160f9bb23a3d58a0accdbe89da13b96b1ece9' }
let!(:project) { create(:project, :repository, path: 'sample-project') }
let!(:commit) { create(:commit, project: project, sha: commit_sha) }
let(:signature) { create(:ssh_signature, commit_sha: commit_sha) }
let(:ssh_key) { create(:ed25519_key_256) }
let(:attributes) do
{
commit_sha: commit_sha,
project: project,
key: ssh_key
}
end
it_behaves_like 'having unique enum values'
it_behaves_like 'commit signature'
describe 'associations' do
it { is_expected.to belong_to(:key).required }
end
describe '.by_commit_sha scope' do
let!(:another_signature) { create(:ssh_signature, commit_sha: '0000000000000000000000000000000000000001') }
it 'returns all signatures by sha' do
expect(described_class.by_commit_sha(commit_sha)).to match_array([signature])
expect(
described_class.by_commit_sha([commit_sha, another_signature.commit_sha])
).to contain_exactly(signature, another_signature)
end
end
end

View File

@ -3,11 +3,13 @@
require 'spec_helper'
RSpec.describe CommitSignatures::X509CommitSignature do
# This commit is seeded from https://gitlab.com/gitlab-org/gitlab-test
# For instructions on how to add more seed data, see the project README
let(:commit_sha) { '189a6c924013fc3fe40d6f1ec1dc20214183bc97' }
let(:project) { create(:project, :public, :repository) }
let!(:commit) { create(:commit, project: project, sha: commit_sha) }
let(:x509_certificate) { create(:x509_certificate) }
let(:x509_signature) { create(:x509_commit_signature, commit_sha: commit_sha) }
let(:signature) { create(:x509_commit_signature, commit_sha: commit_sha) }
let(:attributes) do
{
@ -19,38 +21,16 @@ RSpec.describe CommitSignatures::X509CommitSignature do
end
it_behaves_like 'having unique enum values'
it_behaves_like 'commit signature'
describe 'validation' do
it { is_expected.to validate_presence_of(:commit_sha) }
it { is_expected.to validate_presence_of(:project_id) }
it { is_expected.to validate_presence_of(:x509_certificate_id) }
end
describe 'associations' do
it { is_expected.to belong_to(:project).required }
it { is_expected.to belong_to(:x509_certificate).required }
end
describe '.safe_create!' do
it 'finds a signature by commit sha if it existed' do
x509_signature
expect(described_class.safe_create!(commit_sha: commit_sha)).to eq(x509_signature)
end
it 'creates a new signature if it was not found' do
expect { described_class.safe_create!(attributes) }.to change { described_class.count }.by(1)
end
it 'assigns the correct attributes when creating' do
signature = described_class.safe_create!(attributes)
expect(signature.project).to eq(project)
expect(signature.commit_sha).to eq(commit_sha)
expect(signature.x509_certificate_id).to eq(x509_certificate.id)
end
end
describe '#user' do
context 'if email is assigned to a user' do
let!(:user) { create(:user, email: X509Helpers::User1.certificate_email) }

View File

@ -0,0 +1,59 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe TimeTracking::TimelogCategory, type: :model do
describe 'associations' do
it { is_expected.to belong_to(:namespace).with_foreign_key('namespace_id') }
end
describe 'validations' do
subject { create(:timelog_category) }
it { is_expected.to validate_presence_of(:namespace) }
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_uniqueness_of(:name).case_insensitive.scoped_to([:namespace_id]) }
it { is_expected.to validate_length_of(:name).is_at_most(255) }
it { is_expected.to validate_length_of(:description).is_at_most(1024) }
it { is_expected.to validate_length_of(:color).is_at_most(7) }
end
describe 'validations when billable' do
subject { create(:timelog_category, billable: true, billing_rate: 10.5) }
it { is_expected.to validate_presence_of(:billing_rate) }
it { is_expected.to validate_numericality_of(:billing_rate).is_greater_than(0) }
end
describe '#name' do
it 'strips name' do
timelog_category = described_class.new(name: ' TimelogCategoryTest ')
timelog_category.valid?
expect(timelog_category.name).to eq('TimelogCategoryTest')
end
end
describe '#color' do
it 'strips color' do
timelog_category = described_class.new(name: 'TimelogCategoryTest', color: ' #fafafa ')
timelog_category.valid?
expect(timelog_category.color).to eq(::Gitlab::Color.of('#fafafa'))
end
end
describe '#find_by_name' do
let_it_be(:namespace_a) { create(:namespace) }
let_it_be(:namespace_b) { create(:namespace) }
let_it_be(:timelog_category_a) { create(:timelog_category, namespace: namespace_a, name: 'TimelogCategoryTest') }
it 'finds the correct timelog category' do
expect(described_class.find_by_name(namespace_a.id, 'TIMELOGCATEGORYTest')).to match_array([timelog_category_a])
end
it 'returns empty if not found' do
expect(described_class.find_by_name(namespace_b.id, 'TIMELOGCATEGORYTest')).to be_empty
end
end
end

View File

@ -0,0 +1,51 @@
# frozen_string_literal: true
RSpec.shared_examples 'commit signature' do
describe 'associations' do
it { is_expected.to belong_to(:project).required }
end
describe 'validation' do
subject { described_class.new }
it { is_expected.to validate_presence_of(:commit_sha) }
it { is_expected.to validate_presence_of(:project_id) }
end
describe '.safe_create!' do
it 'finds a signature by commit sha if it existed' do
signature
expect(described_class.safe_create!(commit_sha: commit_sha)).to eq(signature)
end
it 'creates a new signature if it was not found' do
expect { described_class.safe_create!(attributes) }.to change { described_class.count }.by(1)
end
it 'assigns the correct attributes when creating' do
signature = described_class.safe_create!(attributes)
expect(signature).to have_attributes(attributes)
end
it 'does not raise an error in case of a race condition' do
expect(described_class).to receive(:find_by).and_return(nil, instance_double(described_class, persisted?: true))
expect(described_class).to receive(:create).and_raise(ActiveRecord::RecordNotUnique)
allow(described_class).to receive(:create).and_call_original
described_class.safe_create!(attributes)
end
end
describe '#commit' do
it 'fetches the commit through the project' do
expect_next_instance_of(Project) do |instance|
expect(instance).to receive(:commit).with(commit_sha).and_return(commit)
end
signature.commit
end
end
end

View File

@ -238,6 +238,12 @@ RSpec.shared_examples 'namespace traversal scopes' do
subject { described_class.where(id: [nested_group_1, nested_group_2]).self_and_descendants(include_self: false) }
it { is_expected.to contain_exactly(deep_nested_group_1, deep_nested_group_2) }
context 'with duplicate descendants' do
subject { described_class.where(id: [group_1, nested_group_1]).self_and_descendants(include_self: false) }
it { is_expected.to contain_exactly(nested_group_1, deep_nested_group_1) }
end
end
context 'with offset and limit' do
@ -267,6 +273,14 @@ RSpec.shared_examples 'namespace traversal scopes' do
include_examples '.self_and_descendants'
end
context 'with linear_scopes_superset feature flag disabled' do
before do
stub_feature_flags(linear_scopes_superset: false)
end
include_examples '.self_and_descendants'
end
end
shared_examples '.self_and_descendant_ids' do
@ -310,6 +324,14 @@ RSpec.shared_examples 'namespace traversal scopes' do
include_examples '.self_and_descendant_ids'
end
context 'with linear_scopes_superset feature flag disabled' do
before do
stub_feature_flags(linear_scopes_superset: false)
end
include_examples '.self_and_descendant_ids'
end
end
shared_examples '.self_and_hierarchy' do