Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-10-26 18:11:17 +00:00
parent c86ec1d072
commit 51ba1dfa3b
71 changed files with 1187 additions and 1262 deletions

View File

@ -70,11 +70,12 @@ review-build-cng:
FORCE_RAILS_IMAGE_BUILDS: "${FORCE_RAILS_IMAGE_BUILDS}"
CE_PIPELINE: "${CE_PIPELINE}" # Based on https://docs.gitlab.com/ee/ci/jobs/job_control.html#check-if-a-variable-exists, `if: '$CE_PIPELINE'` will evaluate to `false` when this variable is empty
EE_PIPELINE: "${EE_PIPELINE}" # Based on https://docs.gitlab.com/ee/ci/jobs/job_control.html#check-if-a-variable-exists, `if: '$EE_PIPELINE'` will evaluate to `false` when this variable is empty
GITLAB_SHELL_VERSION: "${GITLAB_SHELL_VERSION}"
GITLAB_ELASTICSEARCH_INDEXER_VERSION: "${GITLAB_ELASTICSEARCH_INDEXER_VERSION}"
GITLAB_KAS_VERSION: "${GITLAB_KAS_VERSION}"
GITLAB_WORKHORSE_VERSION: "${GITLAB_WORKHORSE_VERSION}"
GITLAB_METRICS_EXPORTER_VERSION: "${GITLAB_METRICS_EXPORTER_VERSION}"
GITLAB_PAGES_VERSION: "${GITLAB_PAGES_VERSION}"
GITLAB_SHELL_VERSION: "${GITLAB_SHELL_VERSION}"
GITLAB_WORKHORSE_VERSION: "${GITLAB_WORKHORSE_VERSION}"
GITALY_SERVER_VERSION: "${GITALY_SERVER_VERSION}"
trigger:
project: gitlab-org/build/CNG-mirror

View File

@ -143,6 +143,15 @@ export default {
if (!hasArtifacts) return;
toggleDetails();
},
downloadPath(job) {
return job.archive?.downloadPath;
},
downloadButtonDisabled(job) {
return !job.archive?.downloadPath;
},
browseButtonDisabled(job) {
return !job.browseArtifactsPath;
},
},
fields: [
{
@ -271,18 +280,19 @@ export default {
<gl-button-group>
<gl-button
icon="download"
:disabled="!item.archive.downloadPath"
:href="item.archive.downloadPath"
:disabled="downloadButtonDisabled(item)"
:href="downloadPath(item)"
:title="$options.i18n.download"
:aria-label="$options.i18n.download"
data-testid="job-artifacts-download-button"
/>
<gl-button
icon="folder-open"
:disabled="browseButtonDisabled(item)"
:href="item.browseArtifactsPath"
:title="$options.i18n.browse"
:aria-label="$options.i18n.browse"
data-testid="job-artifacts-browse-button"
disabled
/>
<gl-button
icon="remove"

View File

@ -37,6 +37,7 @@ query getJobArtifacts(
shortSha
commitPath
finishedAt
browseArtifactsPath
artifacts {
nodes {
id

View File

@ -0,0 +1,82 @@
# frozen_string_literal: true
# Support for both BCrypt and PBKDF2+SHA512 user passwords
# Meant to be used exclusively with User model but extracted
# to a concern for isolation and clarity.
module EncryptedUserPassword
extend ActiveSupport::Concern
BCRYPT_PREFIX = '$2a$'
PBKDF2_SHA512_PREFIX = '$pbkdf2-sha512$'
BCRYPT_STRATEGY = :bcrypt
PBKDF2_SHA512_STRATEGY = :pbkdf2_sha512
# Use Devise DatabaseAuthenticatable#authenticatable_salt
# unless encrypted password is PBKDF2+SHA512.
def authenticatable_salt
return super unless pbkdf2_password?
Devise::Pbkdf2Encryptable::Encryptors::Pbkdf2Sha512.split_digest(encrypted_password)[:salt]
end
# Called by Devise during database authentication.
# Also migrates the user password to the configured
# encryption type (BCrypt or PBKDF2+SHA512), if needed.
def valid_password?(password)
return false unless password_matches?(password)
migrate_password!(password)
end
def password=(new_password)
@password = new_password # rubocop:disable Gitlab/ModuleWithInstanceVariables
return unless new_password.present?
self.encrypted_password = if Gitlab::FIPS.enabled?
Devise::Pbkdf2Encryptable::Encryptors::Pbkdf2Sha512.digest(
new_password,
Devise::Pbkdf2Encryptable::Encryptors::Pbkdf2Sha512::STRETCHES,
Devise.friendly_token[0, 16])
else
Devise::Encryptor.digest(self.class, new_password)
end
end
private
def password_strategy
return BCRYPT_STRATEGY if encrypted_password.starts_with?(BCRYPT_PREFIX)
return PBKDF2_SHA512_STRATEGY if encrypted_password.starts_with?(PBKDF2_SHA512_PREFIX)
:unknown
end
def pbkdf2_password?
password_strategy == PBKDF2_SHA512_STRATEGY
end
def bcrypt_password?
password_strategy == BCRYPT_STRATEGY
end
def password_matches?(password)
if bcrypt_password?
Devise::Encryptor.compare(self.class, encrypted_password, password)
elsif pbkdf2_password?
Devise::Pbkdf2Encryptable::Encryptors::Pbkdf2Sha512.compare(encrypted_password, password)
end
end
def migrate_password!(password)
return true if password_strategy == encryptor
update_attribute(:password, password)
end
def encryptor
return BCRYPT_STRATEGY unless Gitlab::FIPS.enabled?
PBKDF2_SHA512_STRATEGY
end
end

View File

@ -83,7 +83,10 @@ class User < ApplicationRecord
serialize :otp_backup_codes, JSON # rubocop:disable Cop/ActiveRecordSerialize
devise :lockable, :recoverable, :rememberable, :trackable,
:validatable, :omniauthable, :confirmable, :registerable, :pbkdf2_encryptable
:validatable, :omniauthable, :confirmable, :registerable
# Must be included after `devise`
include EncryptedUserPassword
include AdminChangedPasswordNotifier
@ -937,26 +940,14 @@ class User < ApplicationRecord
reset_password_sent_at.present? && reset_password_sent_at >= 1.minute.ago
end
def authenticatable_salt
return encrypted_password[0, 29] unless Feature.enabled?(:pbkdf2_password_encryption)
return super if password_strategy == :pbkdf2_sha512
encrypted_password[0, 29]
end
# Overwrites valid_password? from Devise::Models::DatabaseAuthenticatable
# In constant-time, check both that the password isn't on a denylist AND
# that the password is the user's password
def valid_password?(password)
return false unless password_allowed?(password)
return false if password_automatically_set?
return super if Feature.enabled?(:pbkdf2_password_encryption)
Devise::Encryptor.compare(self.class, encrypted_password, password)
rescue Devise::Pbkdf2Encryptable::Encryptors::InvalidHash
validate_and_migrate_bcrypt_password(password)
rescue ::BCrypt::Errors::InvalidHash
false
super
end
def generate_otp_backup_codes!
@ -975,27 +966,6 @@ class User < ApplicationRecord
end
end
# This method should be removed once the :pbkdf2_password_encryption feature flag is removed.
def password=(new_password)
if Feature.enabled?(:pbkdf2_password_encryption) && Feature.enabled?(:pbkdf2_password_encryption_write, self)
super
else
# Copied from Devise DatabaseAuthenticatable.
@password = new_password
self.encrypted_password = Devise::Encryptor.digest(self.class, new_password) if new_password.present?
end
end
def password_strategy
super
rescue Devise::Pbkdf2Encryptable::Encryptors::InvalidHash
begin
return :bcrypt if BCrypt::Password.new(encrypted_password)
rescue BCrypt::Errors::InvalidHash
:unknown
end
end
# See https://gitlab.com/gitlab-org/security/gitlab/-/issues/638
DISALLOWED_PASSWORDS = %w[123qweQWE!@#000000000].freeze
@ -2440,15 +2410,6 @@ class User < ApplicationRecord
Ci::NamespaceMirror.contains_traversal_ids(traversal_ids)
end
def validate_and_migrate_bcrypt_password(password)
return false unless Devise::Encryptor.compare(self.class, encrypted_password, password)
return true unless Feature.enabled?(:pbkdf2_password_encryption_write, self)
update_attribute(:password, password)
rescue ::BCrypt::Errors::InvalidHash
false
end
end
User.prepend_mod_with('User')

View File

@ -23,8 +23,6 @@ module Ci
# rubocop: disable CodeReuse/ActiveRecord
def dependent_jobs
return legacy_dependent_jobs unless ::Feature.enabled?(:ci_requeue_with_dag_object_hierarchy, project)
ordered_by_dag(
@processable.pipeline.processables
.from_union(needs_dependent_jobs, stage_dependent_jobs)
@ -50,24 +48,6 @@ module Ci
).descendants
end
def legacy_skipped_jobs
@legacy_skipped_jobs ||= @processable.pipeline.processables.skipped
end
def legacy_dependent_jobs
ordered_by_dag(
legacy_stage_dependent_jobs.or(legacy_needs_dependent_jobs).ordered_by_stage.preload(:needs)
)
end
def legacy_stage_dependent_jobs
legacy_skipped_jobs.after_stage(@processable.stage_idx)
end
def legacy_needs_dependent_jobs
legacy_skipped_jobs.scheduling_type_dag.with_needs([@processable.name])
end
def ordered_by_dag(jobs)
sorted_job_names = sort_jobs(jobs).each_with_index.to_h

View File

@ -29,7 +29,7 @@ module Ci
user: current_user,
sha: sha).execute
result.valid? ? result.root_variables_with_data : {}
result.valid? ? result.root_variables_with_prefill_data : {}
end
# Required for ReactiveCaching, it is also used in `reactive_cache_worker_finder`

View File

@ -11,6 +11,7 @@ module MergeRequests
reset_approvals_cache(merge_request)
merge_request_activity_counter.track_approve_mr_action(user: current_user, merge_request: merge_request)
trigger_merge_request_merge_status_updated(merge_request)
# Approval side effects (things not required to be done immediately but
# should happen after a successful approval) should be done asynchronously

View File

@ -249,6 +249,10 @@ module MergeRequests
def trigger_merge_request_reviewers_updated(merge_request)
GraphqlTriggers.merge_request_reviewers_updated(merge_request)
end
def trigger_merge_request_merge_status_updated(merge_request)
GraphqlTriggers.merge_request_merge_status_updated(merge_request)
end
end
end

View File

@ -17,6 +17,7 @@ module MergeRequests
reset_approvals_cache(merge_request)
create_note(merge_request)
merge_request_activity_counter.track_unapprove_mr_action(user: current_user)
trigger_merge_request_merge_status_updated(merge_request)
end
success

View File

@ -16,6 +16,8 @@ module SystemNotes
def self.issuable_events
{
assigned: s_('IssuableEvents|assigned to'),
unassigned: s_('IssuableEvents|unassigned'),
review_requested: s_('IssuableEvents|requested review from'),
review_request_removed: s_('IssuableEvents|removed review request for')
}.freeze
@ -83,7 +85,7 @@ module SystemNotes
#
# "assigned to @user1 additionally to @user2"
#
# "assigned to @user1, @user2 and @user3 and unassigned from @user4 and @user5"
# "assigned to @user1, @user2 and @user3 and unassigned @user4 and @user5"
#
# "assigned to @user1 and @user2"
#
@ -94,8 +96,8 @@ module SystemNotes
text_parts = []
Gitlab::I18n.with_default_locale do
text_parts << "assigned to #{added_users.map(&:to_reference).to_sentence}" if added_users.any?
text_parts << "unassigned #{unassigned_users.map(&:to_reference).to_sentence}" if unassigned_users.any?
text_parts << "#{self.class.issuable_events[:assigned]} #{added_users.map(&:to_reference).to_sentence}" if added_users.any?
text_parts << "#{self.class.issuable_events[:unassigned]} #{unassigned_users.map(&:to_reference).to_sentence}" if unassigned_users.any?
end
body = text_parts.join(' and ')

View File

@ -1,8 +1,8 @@
---
name: ci_requeue_with_dag_object_hierarchy
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/97156
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/373148
milestone: '15.4'
name: ci_raw_variables_in_yaml_config
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/98420
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/375034
milestone: '15.6'
type: development
group: group::pipeline authoring
default_enabled: true
default_enabled: false

View File

@ -1,8 +1,8 @@
---
name: pbkdf2_password_encryption
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/91622
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/367147
milestone: '15.2'
name: index_user_callback
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/101326
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/378364
milestone: '15.6'
type: development
group: group::authentication and authorization
group: group::global search
default_enabled: false

View File

@ -1,8 +0,0 @@
---
name: pbkdf2_password_encryption_write
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/91622
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/367147
milestone: '15.2'
type: development
group: group::authentication and authorization
default_enabled: false

View File

@ -178,14 +178,6 @@ Devise.setup do |config|
# reset. Defaults to true, so a user is signed in automatically after a reset.
config.sign_in_after_reset_password = false
# ==> Configuration for :encryptable
# Allow you to use another encryption algorithm besides bcrypt (default). You can use
# :sha1, :sha512 or encryptors from others authentication tools as :clearance_sha1,
# :authlogic_sha512 (then you should set stretches above to 20 for default behavior)
# and :restful_authentication_sha1 (then you should set stretches to 10, and copy
# REST_AUTH_SITE_KEY to pepper)
config.encryptor = :pbkdf2_sha512
# Authentication through token does not store user in session and needs
# to be supplied on each request. Useful if you are using the token as API token.
config.skip_session_storage << :token_auth

View File

@ -4,6 +4,10 @@ def feature_mr?
(helper.mr_labels & %w[feature::addition feature::enhancement]).any?
end
def doc_path_to_url(path)
path.sub("doc/", "https://docs.gitlab.com/ee/").sub("index.md", "").sub(".md", "/")
end
DOCUMENTATION_UPDATE_MISSING = <<~MSG
~"feature::addition" and ~"feature::enhancement" merge requests normally have a documentation change. Consider adding a documentation update or confirming the documentation plan with the [Technical Writer counterpart](https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments).
@ -31,7 +35,7 @@ markdown(<<~MARKDOWN)
The following files require a review from a technical writer:
* #{docs_paths_to_review.map { |path| "`#{path}`" }.join("\n* ")}
* #{docs_paths_to_review.map { |path| "`#{path}` ([Link to current live version](#{doc_path_to_url(path)}))" }.join("\n* ")}
The review does not need to block merging this merge request. See the:

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
class EnsureTaskNoteRenamingBackgroundMigrationFinished < Gitlab::Database::Migration[2.0]
disable_ddl_transaction!
restrict_gitlab_migration gitlab_schema: :gitlab_main
MIGRATION = 'RenameTaskSystemNoteToChecklistItem'
def up
ensure_batched_background_migration_is_finished(
job_class_name: MIGRATION,
table_name: :system_note_metadata,
column_name: :id,
job_arguments: []
)
end
def down
# noop
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class DropTmpIndexSystemNoteMetadataOnIdWhereTask < Gitlab::Database::Migration[2.0]
disable_ddl_transaction!
INDEX_NAME = 'tmp_index_system_note_metadata_on_id_where_task'
def up
remove_concurrent_index_by_name :system_note_metadata, INDEX_NAME
end
def down
add_concurrent_index :system_note_metadata, [:id, :action], where: "action = 'task'", name: INDEX_NAME
end
end

View File

@ -0,0 +1 @@
de28d291a4a49dcb1743466ce61d95e47c28bdf293731e446b7b43d370d76e36

View File

@ -0,0 +1 @@
fb64884e988fb0f3589fd189780f3ac5358d06b7599243935f1d4c3dd7e794fc

View File

@ -31137,8 +31137,6 @@ CREATE INDEX tmp_index_on_vulnerabilities_non_dismissed ON vulnerabilities USING
CREATE INDEX tmp_index_project_statistics_cont_registry_size ON project_statistics USING btree (project_id) WHERE (container_registry_size = 0);
CREATE INDEX tmp_index_system_note_metadata_on_id_where_task ON system_note_metadata USING btree (id, action) WHERE ((action)::text = 'task'::text);
CREATE INDEX tmp_index_vulnerability_occurrences_on_id_and_scanner_id ON vulnerability_occurrences USING btree (id, scanner_id) WHERE (report_type = ANY (ARRAY[7, 99]));
CREATE UNIQUE INDEX uniq_pkgs_deb_grp_architectures_on_distribution_id_and_name ON packages_debian_group_architectures USING btree (distribution_id, name);

View File

@ -6,9 +6,6 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# IP whitelist **(FREE SELF)**
NOTE:
We intend to [rename IP whitelist as `IP allowlist`](https://gitlab.com/groups/gitlab-org/-/epics/3478).
GitLab provides some [monitoring endpoints](../../user/admin_area/monitoring/health_check.md)
that provide health check information when probed.

View File

@ -215,7 +215,7 @@ For installations from source:
RAILS_ENV=production sudo -u git -H bundle exec rake gitlab:dependency_proxy:migrate
```
You can optionally track progress and verify that all packages migrated successfully using the
You can optionally track progress and verify that all Dependency Proxy blobs and manifests migrated successfully using the
[PostgreSQL console](https://docs.gitlab.com/omnibus/settings/database.html#connecting-to-the-bundled-postgresql-database):
- For Omnibus GitLab instances: `sudo gitlab-rails dbconsole`

View File

@ -135,7 +135,7 @@ For GitLab 13.8 and earlier versions, you can use a workaround for the Rake task
end
```
You can optionally track progress and verify that all packages migrated successfully using the
You can optionally track progress and verify that all Terraform state files migrated successfully using the
[PostgreSQL console](https://docs.gitlab.com/omnibus/settings/database.html#connecting-to-the-bundled-postgresql-database):
- `sudo gitlab-rails dbconsole` for Omnibus GitLab instances.

View File

@ -193,3 +193,11 @@ A response of `{"message":"404 Not Found"}` when triggering a pipeline might be
by using a [personal access token](../../user/profile/personal_access_tokens.md)
instead of a trigger token. [Create a new trigger token](#create-a-trigger-token)
and use it instead of the personal access token.
### `The requested URL returned error: 400` when triggering a pipeline
If you attempt to trigger a pipeline by using a `ref` that is a branch name that
doesn't exist, GitLab returns `The requested URL returned error: 400`.
For example, you might accidentally use `main` for the branch name in a project that
uses a different branch name for its default branch.

View File

@ -57,6 +57,13 @@ FLAG:
<This feature is not ready for production use.>
```
A `FLAG` note renders on the GitLab documentation site as:
FLAG:
On self-managed GitLab, by default this feature is not available. To make it available, ask an administrator to [enable the feature flag](../../administration/feature_flags.md) named `example_flag`.
On GitLab.com, this feature is not available.
This feature is not ready for production use.
### Self-managed GitLab availability information
| If the feature is... | Use this text |

View File

@ -849,105 +849,6 @@ You can then count each user that performed any combination of these actions.
To add data for aggregated metrics to the Service Ping payload,
create metric YAML definition file following [Aggregated metric instrumentation guide](metrics_instrumentation.md#aggregated-metrics).
### (DEPRECATED) Defining aggregated metric via aggregated metric YAML config file
WARNING:
This feature was [deprecated](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/98206) in GitLab 15.5
and is planned for removal in 15.5. Use [metrics definition YAMLs](https://gitlab.com/gitlab-org/gitlab/-/issues/370963) instead.
To add data for aggregated metrics to the Service Ping payload, add a corresponding definition to:
- [`config/metrics/aggregates/*.yaml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/config/metrics/aggregates/) for metrics available in the Community Edition.
- [`ee/config/metrics/aggregates/*.yaml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/config/metrics/aggregates/) for metrics available in the Enterprise Edition.
Each aggregate definition includes following parts:
- `name`: Unique name under which the aggregate metric is added to the Service Ping payload.
- `operator`: Operator that defines how the aggregated metric data is counted. Available operators are:
- `OR`: Removes duplicates and counts all entries that triggered any of listed events.
- `AND`: Removes duplicates and counts all elements that were observed triggering all of following events.
- `time_frame`: One or more valid time frames. Use these to limit the data included in aggregated metric to events within a specific date-range. Valid time frames are:
- `7d`: Last seven days of data.
- `28d`: Last twenty eight days of data.
- `all`: All historical data, only available for `database` sourced aggregated metrics.
- `source`: Data source used to collect all events data included in aggregated metric. Valid data sources are:
- [`database`](#database-sourced-aggregated-metrics)
- [`redis`](#redis-sourced-aggregated-metrics)
- `events`: list of events names to aggregate into metric. All events in this list must
relay on the same data source. Additional data source requirements are described in the
[Database sourced aggregated metrics](#database-sourced-aggregated-metrics) and
[Redis sourced aggregated metrics](#redis-sourced-aggregated-metrics) sections.
- `feature_flag`: Name of [development feature flag](../feature_flags/index.md#development-type)
that is checked before metrics aggregation is performed. Corresponding feature flag
should have `default_enabled` attribute set to `false`. The `feature_flag` attribute
is optional and can be omitted. When `feature_flag` is missing, no feature flag is checked.
Example aggregated metric entries:
```yaml
- name: example_metrics_union
operator: OR
events:
- 'users_expanding_secure_security_report'
- 'users_expanding_testing_code_quality_report'
- 'users_expanding_testing_accessibility_report'
source: redis
time_frame:
- 7d
- 28d
- name: example_metrics_intersection
operator: AND
source: database
time_frame:
- 28d
- all
events:
- 'dependency_scanning_pipeline_all_time'
- 'container_scanning_pipeline_all_time'
feature_flag: example_aggregated_metric
```
Aggregated metrics collected in `7d` and `28d` time frames are added into Service Ping payload under the `aggregated_metrics` sub-key in the `counts_weekly` and `counts_monthly` top level keys.
```ruby
{
:counts_monthly => {
:deployments => 1003,
:successful_deployments => 78,
:failed_deployments => 275,
:packages => 155,
:personal_snippets => 2106,
:project_snippets => 407,
:aggregated_metrics => {
:example_metrics_union => 7,
:example_metrics_intersection => 2
},
:snippets => 2513
}
}
```
Aggregated metrics for `all` time frame are present in the `count` top level key, with the `aggregate_` prefix added to their name.
For example:
`example_metrics_intersection`
Becomes:
`counts.aggregate_example_metrics_intersection`
```ruby
{
:counts => {
:deployments => 11003,
:successful_deployments => 178,
:failed_deployments => 1275,
:aggregate_example_metrics_intersection => 12
}
}
```
### Redis sourced aggregated metrics
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/45979) in GitLab 13.6.
@ -963,9 +864,7 @@ you must fulfill the following requirements:
### Database sourced aggregated metrics
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/52784) in GitLab 13.9.
> - It's [deployed behind a feature flag](../../user/feature_flags.md), disabled by default.
> - It's enabled on GitLab.com.
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/52784) in GitLab 13.9.
To declare an aggregate of metrics based on events collected from database, follow
these steps:
@ -1018,25 +917,9 @@ end
#### Add new aggregated metric definition
After all metrics are persisted, you can add an aggregated metric definition at
[`aggregated_metrics/`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/config/metrics/aggregates/).
After all metrics are persisted, you can add an aggregated metric definition following [Aggregated metric instrumentation guide](metrics_instrumentation.md#aggregated-metrics).
To declare the aggregate of metrics collected with [Estimated Batch Counters](#estimated-batch-counters),
you must fulfill the following requirements:
- Metrics names listed in the `events:` attribute, have to use the same names you passed in the `metric_name` argument while persisting metrics in previous step.
- Every metric listed in the `events:` attribute, has to be persisted for **every** selected `time_frame:` value.
Example definition:
```yaml
- name: example_metrics_intersection_database_sourced
operator: AND
source: database
events:
- 'dependency_scanning_pipeline'
- 'container_scanning_pipeline'
time_frame:
- 28d
- all
```

View File

@ -11,7 +11,8 @@ GitLab administrators can configure how passwords and OAuth tokens are stored.
## Password storage
> PBKDF2 and SHA512 [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/360658) in GitLab 15.2 [with flags](../administration/feature_flags.md) named `pbkdf2_password_encryption` and `pbkdf2_password_encryption_write`. Disabled by default.
> - PBKDF2+SHA512 [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/360658) in GitLab 15.2 [with flags](../administration/feature_flags.md) named `pbkdf2_password_encryption` and `pbkdf2_password_encryption_write`. Disabled by default.
> - Feature flags [removed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/101691) in GitLab 15.6 and PBKDF2+SHA512 was made available to all GitLab instances running in [FIPS mode](../development/fips_compliance.md).
GitLab stores user passwords in a hashed format to prevent passwords from being
stored as plain text.
@ -23,17 +24,9 @@ library to hash user passwords. Created password hashes have these attributes:
- **BCrypt**: By default, the [`bcrypt`](https://en.wikipedia.org/wiki/Bcrypt) hashing
function is used to generate the hash of the provided password. This cryptographic hashing function is
strong and industry-standard.
- **PBKDF2 and SHA512**: Starting in GitLab 15.2, PBKDF2 and SHA512 are supported
behind the following feature flags (disabled by default):
- `pbkdf2_password_encryption` - Enables reading and comparison of PBKDF2 + SHA512
hashed passwords and supports fallback for BCrypt hashed passwords.
- `pbkdf2_password_encryption_write` - Enables new passwords to be saved
using PBKDF2 and SHA512, and existing BCrypt passwords to be migrated when users sign in.
FLAG:
On self-managed GitLab, by default this feature is not available. To make it available,
ask an administrator to [enable the feature flags](../administration/feature_flags.md) named `pbkdf2_password_encryption` and `pbkdf2_password_encryption_write`.
- **PBKDF2+SHA512**: PBKDF2+SHA512 is supported:
- In GitLab 15.2 to GitLab 15.5 when `pbkdf2_password_encryption` and `pbkdf2_password_encryption_write` [feature flags](../administration/feature_flags.md) are enabled.
- In GitLab 15.6 and later when [FIPS mode](../development/fips_compliance.md) is enabled (feature flags are not required).
- **Stretching**: Password hashes are [stretched](https://en.wikipedia.org/wiki/Key_stretching)
to harden against brute-force attacks. By default, GitLab uses a stretching
factor of 10 for BCrypt and 20,000 for PBKDF2 + SHA512.

View File

@ -299,7 +299,7 @@ hub_docker_quota_check:
## Troubleshooting
## Authentication error: "HTTP Basic: Access Denied"
### Authentication error: "HTTP Basic: Access Denied"
If you receive an `HTTP Basic: Access denied` error when authenticating against the Dependency Proxy, refer to the [two-factor authentication troubleshooting guide](../../profile/account/two_factor_authentication.md#troubleshooting).

View File

@ -73,6 +73,10 @@ module Gitlab
root.variables_entry.value_with_data
end
def variables_with_prefill_data
root.variables_entry.value_with_prefill_data
end
def stages
root.stages_value
end

View File

@ -60,6 +60,7 @@ module Gitlab
entry :variables, ::Gitlab::Ci::Config::Entry::Variables,
description: 'Environment variables available for this job.',
metadata: { allowed_value_data: %i[value expand] },
inherit: false
entry :inherit, ::Gitlab::Ci::Config::Entry::Inherit,

View File

@ -50,7 +50,7 @@ module Gitlab
entry :variables, Entry::Variables,
description: 'Environment variables that will be used.',
metadata: { allowed_value_data: %i[value description], allow_array_value: true },
metadata: { allowed_value_data: %i[value description expand], allow_array_value: true },
reserved: true
entry :stages, Entry::Stages,

View File

@ -33,6 +33,10 @@ module Gitlab
def value_with_data
{ value: @config.to_s }
end
def value_with_prefill_data
value_with_data
end
end
class ComplexVariable < ::Gitlab::Config::Entry::Node
@ -48,6 +52,9 @@ module Gitlab
validates :key, alphanumeric: true
validates :config_value, alphanumeric: true, allow_nil: false, if: :config_value_defined?
validates :config_description, alphanumeric: true, allow_nil: false, if: :config_description_defined?
validates :config_expand, boolean: true,
allow_nil: false,
if: -> { ci_raw_variables_in_yaml_config_enabled? && config_expand_defined? }
validate do
allowed_value_data = Array(opt(:allowed_value_data))
@ -67,7 +74,22 @@ module Gitlab
end
def value_with_data
{ value: value, description: config_description }.compact
if ci_raw_variables_in_yaml_config_enabled?
{
value: value,
raw: (!config_expand if config_expand_defined?)
}.compact
else
{
value: value
}.compact
end
end
def value_with_prefill_data
value_with_data.merge(
description: config_description
).compact
end
def config_value
@ -78,6 +100,10 @@ module Gitlab
@config[:description]
end
def config_expand
@config[:expand]
end
def config_value_defined?
config.key?(:value)
end
@ -85,6 +111,14 @@ module Gitlab
def config_description_defined?
config.key?(:description)
end
def config_expand_defined?
config.key?(:expand)
end
def ci_raw_variables_in_yaml_config_enabled?
YamlProcessor::FeatureFlags.enabled?(:ci_raw_variables_in_yaml_config)
end
end
class ComplexArrayVariable < ComplexVariable
@ -110,8 +144,10 @@ module Gitlab
config_value.first
end
def value_with_data
super.merge(value_options: config_value).compact
def value_with_prefill_data
super.merge(
value_options: config_value
).compact
end
end

View File

@ -29,6 +29,12 @@ module Gitlab
end
end
def value_with_prefill_data
@entries.to_h do |key, entry|
[key.to_s, entry.value_with_prefill_data]
end
end
private
def composable_class(_name, _config)

View File

@ -7,7 +7,7 @@ module Gitlab
class YamlProcessor
class Result
attr_reader :errors, :warnings,
:root_variables, :root_variables_with_data,
:root_variables, :root_variables_with_prefill_data,
:stages, :jobs,
:workflow_rules, :workflow_name
@ -64,8 +64,13 @@ module Gitlab
private
def assign_valid_attributes
@root_variables = transform_to_array(@ci_config.variables)
@root_variables_with_data = @ci_config.variables_with_data
@root_variables = if YamlProcessor::FeatureFlags.enabled?(:ci_raw_variables_in_yaml_config)
transform_to_array(@ci_config.variables_with_data)
else
transform_to_array(@ci_config.variables)
end
@root_variables_with_prefill_data = @ci_config.variables_with_prefill_data
@stages = @ci_config.stages
@jobs = @ci_config.normalized_jobs

View File

@ -39,12 +39,10 @@ module Gitlab
def parse_body(issue_event, assignee_id)
assignee = User.find(assignee_id).to_reference
Gitlab::I18n.with_default_locale do
if issue_event.event == "unassigned"
"unassigned #{assignee}"
else
"assigned to #{assignee}"
end
if issue_event.event == 'unassigned'
"#{SystemNotes::IssuablesService.issuable_events[:unassigned]} #{assignee}"
else
"#{SystemNotes::IssuablesService.issuable_events[:assigned]} #{assignee}"
end
end
end

View File

@ -1,31 +1,52 @@
# frozen_string_literal: true
require 'labkit/logging'
module Gitlab
class JsonLogger < ::Gitlab::Logger
def self.file_name_noext
raise NotImplementedError
end
def format_message(severity, timestamp, progname, message)
data = default_attributes
data[:severity] = severity
data[:time] = timestamp.utc.iso8601(3)
data[Labkit::Correlation::CorrelationId::LOG_KEY] = Labkit::Correlation::CorrelationId.current_id
case message
when String
data[:message] = message
when Hash
data.merge!(message)
class JsonLogger < ::Labkit::Logging::JsonLogger
class << self
def file_name_noext
raise NotImplementedError, "JsonLogger implementations must provide file_name_noext implementation"
end
Gitlab::Json.dump(data) + "\n"
def file_name
file_name_noext + ".log"
end
def debug(message)
build.debug(message)
end
def error(message)
build.error(message)
end
def warn(message)
build.warn(message)
end
def info(message)
build.info(message)
end
def build
Gitlab::SafeRequestStore[cache_key] ||=
new(full_log_path, level: log_level)
end
def cache_key
"logger:" + full_log_path.to_s
end
def full_log_path
Rails.root.join("log", file_name)
end
end
protected
private
def default_attributes
{}
# Override Labkit's default impl, which uses the default Ruby platform json module.
def dump_json(data)
Gitlab::Json.dump(data)
end
end
end

View File

@ -4,9 +4,9 @@ module Gitlab
module Usage
class MetricDefinition
METRIC_SCHEMA_PATH = Rails.root.join('config', 'metrics', 'schema.json')
SKIP_VALIDATION_STATUSES = %w[deprecated removed].to_set.freeze
AVAILABLE_STATUSES = %w[active data_available implemented deprecated broken].to_set.freeze
VALID_SERVICE_PING_STATUSES = %w[active data_available implemented deprecated broken].to_set.freeze
SKIP_VALIDATION_STATUS = 'removed'
AVAILABLE_STATUSES = %w[active broken].to_set.freeze
VALID_SERVICE_PING_STATUSES = %w[active broken].to_set.freeze
InvalidError = Class.new(RuntimeError)
@ -144,7 +144,7 @@ module Gitlab
end
def skip_validation?
!!attributes[:skip_validation] || @skip_validation || SKIP_VALIDATION_STATUSES.include?(attributes[:status])
!!attributes[:skip_validation] || @skip_validation || attributes[:status] == SKIP_VALIDATION_STATUS
end
end
end

View File

@ -7,14 +7,13 @@ module Gitlab
UNION_OF_AGGREGATED_METRICS = 'OR'
INTERSECTION_OF_AGGREGATED_METRICS = 'AND'
ALLOWED_METRICS_AGGREGATIONS = [UNION_OF_AGGREGATED_METRICS, INTERSECTION_OF_AGGREGATED_METRICS].freeze
AGGREGATED_METRICS_PATH = Rails.root.join('config/metrics/aggregates/*.yml')
AggregatedMetricError = Class.new(StandardError)
UnknownAggregationOperator = Class.new(AggregatedMetricError)
UnknownAggregationSource = Class.new(AggregatedMetricError)
DisallowedAggregationTimeFrame = Class.new(AggregatedMetricError)
DATABASE_SOURCE = 'database'
REDIS_SOURCE = 'redis'
REDIS_SOURCE = 'redis_hll'
SOURCES = {
DATABASE_SOURCE => Sources::PostgresHll,

View File

@ -8,22 +8,9 @@ module Gitlab
include Gitlab::Usage::TimeFrame
def initialize(recorded_at)
@aggregated_metrics = load_metrics(AGGREGATED_METRICS_PATH)
@recorded_at = recorded_at
end
def all_time_data
aggregated_metrics_data(Gitlab::Usage::TimeFrame::ALL_TIME_TIME_FRAME_NAME)
end
def monthly_data
aggregated_metrics_data(Gitlab::Usage::TimeFrame::TWENTY_EIGHT_DAYS_TIME_FRAME_NAME)
end
def weekly_data
aggregated_metrics_data(Gitlab::Usage::TimeFrame::SEVEN_DAYS_TIME_FRAME_NAME)
end
def calculate_count_for_aggregation(aggregation:, time_frame:)
with_validate_configuration(aggregation, time_frame) do
source = SOURCES[aggregation[:source]]
@ -40,7 +27,7 @@ module Gitlab
private
attr_accessor :aggregated_metrics, :recorded_at
attr_accessor :recorded_at
def aggregated_metrics_data(time_frame)
aggregated_metrics.each_with_object({}) do |aggregation, data|
@ -83,16 +70,6 @@ module Gitlab
Gitlab::Utils::UsageData::FALLBACK
end
def load_metrics(wildcard)
Dir[wildcard].each_with_object([]) do |path, metrics|
metrics.push(*load_yaml_from_path(path))
end
end
def load_yaml_from_path(path)
YAML.safe_load(File.read(path), aliases: true)&.map(&:with_indifferent_access)
end
def time_constraints(time_frame)
case time_frame
when Gitlab::Usage::TimeFrame::TWENTY_EIGHT_DAYS_TIME_FRAME_NAME
@ -108,5 +85,3 @@ module Gitlab
end
end
end
Gitlab::Usage::Metrics::Aggregates::Aggregate.prepend_mod_with('Gitlab::Usage::Metrics::Aggregates::Aggregate')

View File

@ -25,7 +25,7 @@ module Gitlab
def initialize(metric_definition)
super
@source = parse_data_source_to_legacy_value(metric_definition)
@source = metric_definition[:data_source]
@aggregate = options.fetch(:aggregate, {})
end
@ -48,15 +48,6 @@ module Gitlab
attr_accessor :source, :aggregate
# TODO: This method is a temporary measure that
# handles backwards compatibility until
# point 5 from is resolved https://gitlab.com/gitlab-org/gitlab/-/issues/370963#implementation
def parse_data_source_to_legacy_value(metric_definition)
return 'redis' if metric_definition[:data_source] == 'redis_hll'
metric_definition[:data_source]
end
def aggregate_config
{
source: source,

View File

@ -30,7 +30,6 @@ module Gitlab
deployment_minimum_id
deployment_maximum_id
auth_providers
aggregated_metrics
recorded_at
).freeze
@ -621,16 +620,6 @@ module Gitlab
{ redis_hll_counters: ::Gitlab::UsageDataCounters::HLLRedisCounter.unique_events_data }
end
def aggregated_metrics_data
{
counts_weekly: { aggregated_metrics: aggregated_metrics.weekly_data },
counts_monthly: { aggregated_metrics: aggregated_metrics.monthly_data },
counts: aggregated_metrics
.all_time_data
.to_h { |key, value| ["aggregate_#{key}".to_sym, value.round] }
}
end
def action_monthly_active_users(time_period)
date_range = { date_from: time_period[:created_at].first, date_to: time_period[:created_at].last }
@ -677,7 +666,6 @@ module Gitlab
.merge(usage_activity_by_stage)
.merge(usage_activity_by_stage(:usage_activity_by_stage_monthly, monthly_time_range_db_params))
.merge(redis_hll_counters)
.deep_merge(aggregated_metrics_data)
end
def metric_time_period(time_period)
@ -694,10 +682,6 @@ module Gitlab
end
end
def aggregated_metrics
@aggregated_metrics ||= ::Gitlab::Usage::Metrics::Aggregates::Aggregate.new(recorded_at)
end
def event_monthly_active_users(date_range)
data = {
action_monthly_active_users_project_repo: Gitlab::UsageDataCounters::TrackUniqueEvents::PUSH_ACTION,

View File

@ -22424,12 +22424,18 @@ msgstr ""
msgid "Is using seat"
msgstr ""
msgid "IssuableEvents|assigned to"
msgstr ""
msgid "IssuableEvents|removed review request for"
msgstr ""
msgid "IssuableEvents|requested review from"
msgstr ""
msgid "IssuableEvents|unassigned"
msgstr ""
msgid "IssuableStatus|%{wi_type} created %{created_at} by "
msgstr ""

View File

@ -1,95 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'aggregated metrics' do
RSpec::Matchers.define :be_known_event do
match do |event|
Gitlab::UsageDataCounters::HLLRedisCounter.known_event?(event)
end
failure_message do |event|
"Event with name: `#{event}` can not be found within `#{Gitlab::UsageDataCounters::HLLRedisCounter::KNOWN_EVENTS_PATH}`"
end
end
RSpec::Matchers.define :has_known_source do
match do |aggregate|
Gitlab::Usage::Metrics::Aggregates::SOURCES.include?(aggregate[:source])
end
failure_message do |aggregate|
"Aggregate with name: `#{aggregate[:name]}` uses not allowed source `#{aggregate[:source]}`"
end
end
RSpec::Matchers.define :have_known_time_frame do
allowed_time_frames = [
Gitlab::Usage::TimeFrame::ALL_TIME_TIME_FRAME_NAME,
Gitlab::Usage::TimeFrame::TWENTY_EIGHT_DAYS_TIME_FRAME_NAME,
Gitlab::Usage::TimeFrame::SEVEN_DAYS_TIME_FRAME_NAME
]
match do |aggregate|
(aggregate[:time_frame] - allowed_time_frames).empty?
end
failure_message do |aggregate|
"Aggregate with name: `#{aggregate[:name]}` uses not allowed time_frame`#{aggregate[:time_frame] - allowed_time_frames}`"
end
end
let_it_be(:known_events) do
Gitlab::UsageDataCounters::HLLRedisCounter.known_events
end
Gitlab::Usage::Metrics::Aggregates::Aggregate.new(Time.current).send(:aggregated_metrics).tap do |aggregated_metrics|
it 'all events has unique name' do
event_names = aggregated_metrics&.map { |event| event[:name] }
expect(event_names).to eq(event_names&.uniq)
end
it 'all aggregated metrics has known source' do
expect(aggregated_metrics).to all has_known_source
end
it 'all aggregated metrics has known time frame' do
expect(aggregated_metrics).to all have_known_time_frame
end
aggregated_metrics&.select { |agg| agg[:source] == Gitlab::Usage::Metrics::Aggregates::REDIS_SOURCE }&.each do |aggregate|
context "for #{aggregate[:name]} aggregate of #{aggregate[:events].join(' ')}" do
let_it_be(:events_records) { known_events.select { |event| aggregate[:events].include?(event[:name]) } }
it "does not include 'all' time frame for Redis sourced aggregate" do
expect(aggregate[:time_frame]).not_to include(Gitlab::Usage::TimeFrame::ALL_TIME_TIME_FRAME_NAME)
end
it "only refers to known events", :skip do
expect(aggregate[:events]).to all be_known_event
end
it "has expected structure" do
expect(aggregate.keys).to include(*%w[name operator events])
end
it "uses allowed aggregation operators" do
expect(Gitlab::Usage::Metrics::Aggregates::ALLOWED_METRICS_AGGREGATIONS).to include aggregate[:operator]
end
it "uses events from the same Redis slot" do
event_slots = events_records.map { |event| event[:redis_slot] }.uniq
expect(event_slots).to contain_exactly(be_present)
end
it "uses events with the same aggregation period" do
event_slots = events_records.map { |event| event[:aggregation] }.uniq
expect(event_slots).to contain_exactly(be_present)
end
end
end
end
end

View File

@ -5,5 +5,9 @@ FactoryBot.define do
user
emoji { 'coffee' }
message { 'I crave coffee' }
trait :busy do
availability { 'busy' }
end
end
end

View File

@ -201,18 +201,32 @@ RSpec.describe "Group Runners" do
end
describe "Group runner edit page", :js do
let!(:group_runner) do
create(:ci_runner, :group, groups: [group])
context 'when updating a group runner' do
let_it_be(:group_runner) { create(:ci_runner, :group, groups: [group]) }
before do
visit edit_group_runner_path(group, group_runner)
wait_for_requests
end
it_behaves_like 'submits edit runner form' do
let(:runner) { group_runner }
let(:runner_page_path) { group_runner_path(group, group_runner) }
end
end
before do
visit edit_group_runner_path(group, group_runner)
wait_for_requests
end
context 'when updating a project runner' do
let_it_be(:project_runner) { create(:ci_runner, :project, projects: [project]) }
it_behaves_like 'submits edit runner form' do
let(:runner) { group_runner }
let(:runner_page_path) { group_runner_path(group, group_runner) }
before do
visit edit_group_runner_path(group, project_runner)
wait_for_requests
end
it_behaves_like 'submits edit runner form' do
let(:runner) { project_runner }
let(:runner_page_path) { group_runner_path(group, project_runner) }
end
end
end
end

View File

@ -60,6 +60,11 @@ describe('JobArtifactsTable component', () => {
data: { project: { jobs: { nodes: enoughJobsToPaginate } } },
};
const job = getJobArtifactsResponse.data.project.jobs.nodes[0];
const archiveArtifact = job.artifacts.nodes.find(
(artifact) => artifact.fileType === ARCHIVE_FILE_TYPE,
);
const createComponent = (
handlers = {
getJobArtifactsQuery: jest.fn().mockResolvedValue(getJobArtifactsResponse),
@ -109,11 +114,6 @@ describe('JobArtifactsTable component', () => {
});
describe('job details', () => {
const job = getJobArtifactsResponse.data.project.jobs.nodes[0];
const archiveArtifact = job.artifacts.nodes.find(
(artifact) => artifact.fileType === ARCHIVE_FILE_TYPE,
);
beforeEach(async () => {
createComponent();
@ -163,10 +163,66 @@ describe('JobArtifactsTable component', () => {
it('shows the created time', () => {
expect(findCreated().text()).toBe('5 years ago');
});
});
describe('download button', () => {
it('is a link to the download path for the archive artifact', async () => {
createComponent();
await waitForPromises();
it('shows the download, browse, and delete buttons', () => {
expect(findDownloadButton().attributes('href')).toBe(archiveArtifact.downloadPath);
});
it('is disabled when there is no download path', async () => {
const jobWithoutDownloadPath = {
...job,
archive: { downloadPath: null },
};
createComponent(
{ getJobArtifactsQuery: jest.fn() },
{ jobArtifacts: { nodes: [jobWithoutDownloadPath] } },
);
await waitForPromises();
expect(findDownloadButton().attributes('disabled')).toBe('disabled');
});
});
describe('browse button', () => {
it('is a link to the browse path for the job', async () => {
createComponent();
await waitForPromises();
expect(findBrowseButton().attributes('href')).toBe(job.browseArtifactsPath);
});
it('is disabled when there is no browse path', async () => {
const jobWithoutBrowsePath = {
...job,
browseArtifactsPath: null,
};
createComponent(
{ getJobArtifactsQuery: jest.fn() },
{ jobArtifacts: { nodes: [jobWithoutBrowsePath] } },
);
await waitForPromises();
expect(findBrowseButton().attributes('disabled')).toBe('disabled');
});
});
describe('delete button', () => {
it('shows a disabled delete button for now (coming soon)', async () => {
createComponent();
await waitForPromises();
expect(findDeleteButton().attributes('disabled')).toBe('disabled');
});
});

View File

@ -1,87 +1,8 @@
import { DOMSerializer } from 'prosemirror-model';
// TODO: DRY up duplication with spec/frontend/content_editor/services/markdown_serializer_spec.js
// See https://gitlab.com/groups/gitlab-org/-/epics/7719#plan
import Audio from '~/content_editor/extensions/audio';
import Blockquote from '~/content_editor/extensions/blockquote';
import Bold from '~/content_editor/extensions/bold';
import BulletList from '~/content_editor/extensions/bullet_list';
import Code from '~/content_editor/extensions/code';
import CodeBlockHighlight from '~/content_editor/extensions/code_block_highlight';
import DescriptionItem from '~/content_editor/extensions/description_item';
import DescriptionList from '~/content_editor/extensions/description_list';
import Details from '~/content_editor/extensions/details';
import DetailsContent from '~/content_editor/extensions/details_content';
import Emoji from '~/content_editor/extensions/emoji';
import Figure from '~/content_editor/extensions/figure';
import FigureCaption from '~/content_editor/extensions/figure_caption';
import FootnoteDefinition from '~/content_editor/extensions/footnote_definition';
import FootnoteReference from '~/content_editor/extensions/footnote_reference';
import FootnotesSection from '~/content_editor/extensions/footnotes_section';
import Frontmatter from '~/content_editor/extensions/frontmatter';
import HardBreak from '~/content_editor/extensions/hard_break';
import Heading from '~/content_editor/extensions/heading';
import HorizontalRule from '~/content_editor/extensions/horizontal_rule';
import HTMLNodes from '~/content_editor/extensions/html_nodes';
import Image from '~/content_editor/extensions/image';
import InlineDiff from '~/content_editor/extensions/inline_diff';
import Italic from '~/content_editor/extensions/italic';
import Link from '~/content_editor/extensions/link';
import ListItem from '~/content_editor/extensions/list_item';
import OrderedList from '~/content_editor/extensions/ordered_list';
import ReferenceDefinition from '~/content_editor/extensions/reference_definition';
import Strike from '~/content_editor/extensions/strike';
import Table from '~/content_editor/extensions/table';
import TableCell from '~/content_editor/extensions/table_cell';
import TableHeader from '~/content_editor/extensions/table_header';
import TableRow from '~/content_editor/extensions/table_row';
import TableOfContents from '~/content_editor/extensions/table_of_contents';
import TaskItem from '~/content_editor/extensions/task_item';
import TaskList from '~/content_editor/extensions/task_list';
import Video from '~/content_editor/extensions/video';
import createMarkdownDeserializer from '~/content_editor/services/remark_markdown_deserializer';
import { createTestEditor } from 'jest/content_editor/test_utils';
import { createTiptapEditor } from 'jest/content_editor/test_utils';
const tiptapEditor = createTestEditor({
extensions: [
Audio,
Blockquote,
Bold,
BulletList,
Code,
CodeBlockHighlight,
DescriptionItem,
DescriptionList,
Details,
DetailsContent,
Emoji,
FootnoteDefinition,
FootnoteReference,
FootnotesSection,
Frontmatter,
Figure,
FigureCaption,
HardBreak,
Heading,
HorizontalRule,
...HTMLNodes,
Image,
InlineDiff,
Italic,
Link,
ListItem,
OrderedList,
ReferenceDefinition,
Strike,
Table,
TableCell,
TableHeader,
TableRow,
TableOfContents,
TaskItem,
TaskList,
Video,
],
});
const tiptapEditor = createTiptapEditor();
export const IMPLEMENTATION_ERROR_MSG = 'Error - check implementation';

View File

@ -1,4 +1,3 @@
import Audio from '~/content_editor/extensions/audio';
import Blockquote from '~/content_editor/extensions/blockquote';
import Bold from '~/content_editor/extensions/bold';
import BulletList from '~/content_editor/extensions/bullet_list';
@ -16,7 +15,6 @@ import FootnoteReference from '~/content_editor/extensions/footnote_reference';
import HardBreak from '~/content_editor/extensions/hard_break';
import Heading from '~/content_editor/extensions/heading';
import HorizontalRule from '~/content_editor/extensions/horizontal_rule';
import HTMLMarks from '~/content_editor/extensions/html_marks';
import HTMLNodes from '~/content_editor/extensions/html_nodes';
import Image from '~/content_editor/extensions/image';
import InlineDiff from '~/content_editor/extensions/inline_diff';
@ -34,53 +32,12 @@ import TableHeader from '~/content_editor/extensions/table_header';
import TableRow from '~/content_editor/extensions/table_row';
import TaskItem from '~/content_editor/extensions/task_item';
import TaskList from '~/content_editor/extensions/task_list';
import Video from '~/content_editor/extensions/video';
import markdownSerializer from '~/content_editor/services/markdown_serializer';
import remarkMarkdownDeserializer from '~/content_editor/services/remark_markdown_deserializer';
import { createTestEditor, createDocBuilder } from '../test_utils';
import { createTiptapEditor, createDocBuilder } from '../test_utils';
jest.mock('~/emoji');
const tiptapEditor = createTestEditor({
extensions: [
Audio,
Blockquote,
Bold,
BulletList,
Code,
CodeBlockHighlight,
DescriptionItem,
DescriptionList,
Details,
DetailsContent,
Emoji,
FootnoteDefinition,
FootnoteReference,
Figure,
FigureCaption,
HardBreak,
Heading,
HorizontalRule,
Image,
InlineDiff,
Italic,
Link,
ListItem,
OrderedList,
ReferenceDefinition,
Sourcemap,
Strike,
Table,
TableCell,
TableHeader,
TableRow,
TaskItem,
TaskList,
Video,
...HTMLMarks,
...HTMLNodes,
],
});
const tiptapEditor = createTiptapEditor([Sourcemap]);
const {
builders: {

View File

@ -5,6 +5,44 @@ import { Text } from '@tiptap/extension-text';
import { Editor } from '@tiptap/vue-2';
import { builders, eq } from 'prosemirror-test-builder';
import { nextTick } from 'vue';
import Audio from '~/content_editor/extensions/audio';
import Blockquote from '~/content_editor/extensions/blockquote';
import Bold from '~/content_editor/extensions/bold';
import BulletList from '~/content_editor/extensions/bullet_list';
import Code from '~/content_editor/extensions/code';
import CodeBlockHighlight from '~/content_editor/extensions/code_block_highlight';
import DescriptionItem from '~/content_editor/extensions/description_item';
import DescriptionList from '~/content_editor/extensions/description_list';
import Details from '~/content_editor/extensions/details';
import DetailsContent from '~/content_editor/extensions/details_content';
import Emoji from '~/content_editor/extensions/emoji';
import FootnoteDefinition from '~/content_editor/extensions/footnote_definition';
import FootnoteReference from '~/content_editor/extensions/footnote_reference';
import FootnotesSection from '~/content_editor/extensions/footnotes_section';
import Frontmatter from '~/content_editor/extensions/frontmatter';
import Figure from '~/content_editor/extensions/figure';
import FigureCaption from '~/content_editor/extensions/figure_caption';
import HardBreak from '~/content_editor/extensions/hard_break';
import Heading from '~/content_editor/extensions/heading';
import HorizontalRule from '~/content_editor/extensions/horizontal_rule';
import Image from '~/content_editor/extensions/image';
import InlineDiff from '~/content_editor/extensions/inline_diff';
import Italic from '~/content_editor/extensions/italic';
import Link from '~/content_editor/extensions/link';
import ListItem from '~/content_editor/extensions/list_item';
import OrderedList from '~/content_editor/extensions/ordered_list';
import ReferenceDefinition from '~/content_editor/extensions/reference_definition';
import Strike from '~/content_editor/extensions/strike';
import Table from '~/content_editor/extensions/table';
import TableCell from '~/content_editor/extensions/table_cell';
import TableHeader from '~/content_editor/extensions/table_header';
import TableRow from '~/content_editor/extensions/table_row';
import TableOfContents from '~/content_editor/extensions/table_of_contents';
import TaskItem from '~/content_editor/extensions/task_item';
import TaskList from '~/content_editor/extensions/task_list';
import Video from '~/content_editor/extensions/video';
import HTMLMarks from '~/content_editor/extensions/html_marks';
import HTMLNodes from '~/content_editor/extensions/html_nodes';
export const createDocBuilder = ({ tiptapEditor, names = {} }) => {
const docBuilders = builders(tiptapEditor.schema, {
@ -162,3 +200,48 @@ export const waitUntilNextDocTransaction = ({ tiptapEditor, action = () => {} })
action();
});
};
export const createTiptapEditor = (extensions = []) =>
createTestEditor({
extensions: [
Audio,
Blockquote,
Bold,
BulletList,
Code,
CodeBlockHighlight,
DescriptionItem,
DescriptionList,
Details,
DetailsContent,
Emoji,
FootnoteDefinition,
FootnoteReference,
FootnotesSection,
Frontmatter,
Figure,
FigureCaption,
HardBreak,
Heading,
HorizontalRule,
...HTMLMarks,
...HTMLNodes,
Image,
InlineDiff,
Italic,
Link,
ListItem,
OrderedList,
ReferenceDefinition,
Strike,
Table,
TableCell,
TableHeader,
TableRow,
TableOfContents,
TaskItem,
TaskList,
Video,
...extensions,
],
});

View File

@ -5,10 +5,9 @@ require 'spec_helper'
RSpec.describe Gitlab::AppLogger do
subject { described_class }
it 'builds a Gitlab::Logger object twice' do
expect(Gitlab::Logger).to receive(:new)
.exactly(described_class.loggers.size)
.and_call_original
it 'builds two Logger instances' do
expect(Gitlab::Logger).to receive(:new).and_call_original
expect(Gitlab::JsonLogger).to receive(:new).and_call_original
subject.info('Hello World!')
end

View File

@ -208,7 +208,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
it 'reports error about variable' do
expect(entry.errors)
.to include 'variables:var2 config must be a string'
.to include 'variables:var2 config uses invalid data keys: description'
end
end
end
@ -447,6 +447,29 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
)
end
end
context 'when variables have "expand" data' do
let(:config) do
{
script: 'echo',
variables: { 'VAR1' => 'val 1',
'VAR2' => { value: 'val 2', expand: false },
'VAR3' => { value: 'val 3', expand: true } }
}
end
it 'returns correct value' do
expect(entry.value).to eq(
name: :rspec,
stage: 'test',
only: { refs: %w[branches tags] },
job_variables: { 'VAR1' => { value: 'val 1' },
'VAR2' => { value: 'val 2', raw: true },
'VAR3' => { value: 'val 3', raw: false } },
root_variables_inheritance: true
)
end
end
end
end
end

View File

@ -316,6 +316,35 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
end
end
end
context 'when variables have "expand" data' do
let(:hash) do
{
variables: { 'VAR1' => 'val 1',
'VAR2' => { value: 'val 2', expand: false },
'VAR3' => { value: 'val 3', expand: true } },
rspec: { script: 'rspec' }
}
end
before do
root.compose!
end
it 'returns correct value' do
expect(root.variables_entry.value_with_data).to eq(
'VAR1' => { value: 'val 1' },
'VAR2' => { value: 'val 2', raw: true },
'VAR3' => { value: 'val 3', raw: false }
)
expect(root.variables_value).to eq(
'VAR1' => 'val 1',
'VAR2' => 'val 2',
'VAR3' => 'val 3'
)
end
end
end
context 'when configuration is not valid' do

View File

@ -92,6 +92,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variable do
describe '#value_with_data' do
subject(:value_with_data) { entry.value_with_data }
it { is_expected.to eq(value: 'value') }
end
describe '#value_with_prefill_data' do
subject(:value_with_prefill_data) { entry.value_with_prefill_data }
it { is_expected.to eq(value: 'value', description: 'description') }
end
@ -107,6 +113,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variable do
describe '#value_with_data' do
subject(:value_with_data) { entry.value_with_data }
it { is_expected.to eq(value: 'value') }
end
describe '#value_with_prefill_data' do
subject(:value_with_prefill_data) { entry.value_with_prefill_data }
it { is_expected.to eq(value: 'value', description: 'description') }
end
end
@ -123,6 +135,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variable do
describe '#value_with_data' do
subject(:value_with_data) { entry.value_with_data }
it { is_expected.to eq(value: '123') }
end
describe '#value_with_prefill_data' do
subject(:value_with_prefill_data) { entry.value_with_prefill_data }
it { is_expected.to eq(value: '123', description: 'description') }
end
end
@ -139,6 +157,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variable do
describe '#value_with_data' do
subject(:value_with_data) { entry.value_with_data }
it { is_expected.to eq(value: 'value') }
end
describe '#value_with_prefill_data' do
subject(:value_with_prefill_data) { entry.value_with_prefill_data }
it { is_expected.to eq(value: 'value', description: :description) }
end
end
@ -192,6 +216,94 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variable do
it { is_expected.to eq(value: 'value') }
end
describe '#value_with_prefill_data' do
subject(:value_with_prefill_data) { entry.value_with_prefill_data }
it { is_expected.to eq(value: 'value') }
end
end
end
context 'when config is a hash with expand' do
let(:config) { { value: 'value', expand: false } }
context 'when metadata allowed_value_data is not provided' do
describe '#valid?' do
it { is_expected.not_to be_valid }
end
describe '#errors' do
subject(:errors) { entry.errors }
it { is_expected.to include 'var1 config must be a string' }
end
end
context 'when metadata allowed_value_data is (value, expand)' do
let(:metadata) { { allowed_value_data: %i[value expand] } }
describe '#valid?' do
it { is_expected.to be_valid }
end
describe '#value' do
subject(:value) { entry.value }
it { is_expected.to eq('value') }
end
describe '#value_with_data' do
subject(:value_with_data) { entry.value_with_data }
it { is_expected.to eq(value: 'value', raw: true) }
context 'when the FF ci_raw_variables_in_yaml_config is disabled' do
before do
stub_feature_flags(ci_raw_variables_in_yaml_config: false)
end
it { is_expected.to eq(value: 'value') }
end
end
context 'when config expand is true' do
let(:config) { { value: 'value', expand: true } }
describe '#value_with_data' do
subject(:value_with_data) { entry.value_with_data }
it { is_expected.to eq(value: 'value', raw: false) }
end
end
context 'when config expand is a string' do
let(:config) { { value: 'value', expand: "true" } }
describe '#valid?' do
it { is_expected.not_to be_valid }
end
describe '#errors' do
subject(:errors) { entry.errors }
it { is_expected.to include 'var1 config expand should be a boolean value' }
end
end
end
context 'when metadata allowed_value_data is (value, xyz)' do
let(:metadata) { { allowed_value_data: %i[value xyz] } }
describe '#valid?' do
it { is_expected.not_to be_valid }
end
describe '#errors' do
subject(:errors) { entry.errors }
it { is_expected.to include 'var1 config uses invalid data keys: expand' }
end
end
end
end
@ -229,6 +341,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variable do
describe '#value_with_data' do
subject(:value_with_data) { entry.value_with_data }
it { is_expected.to eq(value: 'value') }
end
describe '#value_with_prefill_data' do
subject(:value_with_prefill_data) { entry.value_with_prefill_data }
it { is_expected.to eq(value: 'value', description: 'description', value_options: %w[value value2]) }
end
end

View File

@ -66,6 +66,15 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variables do
)
end
end
describe '#value_with_prefill_data' do
it 'returns variable with prefill data' do
expect(entry.value_with_prefill_data).to eq(
'VARIABLE_1' => { value: 'value 1' },
'VARIABLE_2' => { value: 'value 2' }
)
end
end
end
context 'with numeric keys and values in the config' do
@ -119,6 +128,14 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variables do
describe '#value_with_data' do
it 'returns variable with data' do
expect(entry.value_with_data).to eq(
'VARIABLE_1' => { value: 'value' }
)
end
end
describe '#value_with_prefill_data' do
it 'returns variable with prefill data' do
expect(entry.value_with_prefill_data).to eq(
'VARIABLE_1' => { value: 'value', description: 'variable 1' }
)
end
@ -147,6 +164,14 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variables do
describe '#value_with_data' do
it 'returns variable with data' do
expect(entry.value_with_data).to eq(
'VARIABLE_1' => { value: 'value1' }
)
end
end
describe '#value_with_prefill_data' do
it 'returns variable with prefill data' do
expect(entry.value_with_prefill_data).to eq(
'VARIABLE_1' => { value: 'value1', value_options: %w[value1 value2], description: 'variable 1' }
)
end
@ -174,6 +199,15 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variables do
describe '#value_with_data' do
it 'returns variable with data' do
expect(entry.value_with_data).to eq(
'VARIABLE_1' => { value: 'value 1' },
'VARIABLE_2' => { value: 'value 2' }
)
end
end
describe '#value_with_prefill_data' do
it 'returns variable with prefill data' do
expect(entry.value_with_prefill_data).to eq(
'VARIABLE_1' => { value: 'value 1', description: 'variable 1' },
'VARIABLE_2' => { value: 'value 2' }
)

View File

@ -1071,6 +1071,7 @@ module Gitlab
let(:build) { execute.builds.first }
let(:job_variables) { build[:job_variables] }
let(:root_variables) { execute.root_variables }
let(:root_variables_inheritance) { build[:root_variables_inheritance] }
context 'when global variables are defined' do
@ -1193,6 +1194,78 @@ module Gitlab
expect(root_variables_inheritance).to eq(true)
end
end
context 'when variables have data other than value' do
let(:config) do
<<~YAML
variables:
VAR1: value1
VAR2:
value: value2
description: description2
VAR3:
value: value3
expand: false
rspec:
script: rspec
variables:
VAR4: value4
VAR5:
value: value5
expand: false
VAR6:
value: value6
expand: true
YAML
end
it 'returns variables' do
expect(job_variables).to contain_exactly(
{ key: 'VAR4', value: 'value4' },
{ key: 'VAR5', value: 'value5', raw: true },
{ key: 'VAR6', value: 'value6', raw: false }
)
expect(execute.root_variables).to contain_exactly(
{ key: 'VAR1', value: 'value1' },
{ key: 'VAR2', value: 'value2' },
{ key: 'VAR3', value: 'value3', raw: true }
)
expect(execute.root_variables_with_prefill_data).to eq(
'VAR1' => { value: 'value1' },
'VAR2' => { value: 'value2', description: 'description2' },
'VAR3' => { value: 'value3', raw: true }
)
end
context 'when the FF ci_raw_variables_in_yaml_config is disabled' do
before do
stub_feature_flags(ci_raw_variables_in_yaml_config: false)
end
it 'returns variables without description and raw' do
expect(job_variables).to contain_exactly(
{ key: 'VAR4', value: 'value4' },
{ key: 'VAR5', value: 'value5' },
{ key: 'VAR6', value: 'value6' }
)
expect(execute.root_variables).to contain_exactly(
{ key: 'VAR1', value: 'value1' },
{ key: 'VAR2', value: 'value2' },
{ key: 'VAR3', value: 'value3' }
)
expect(execute.root_variables_with_prefill_data).to eq(
'VAR1' => { value: 'value1' },
'VAR2' => { value: 'value2', description: 'description2' },
'VAR3' => { value: 'value3' }
)
end
end
end
end
context 'when using `extends`' do

View File

@ -461,11 +461,7 @@ RSpec.describe Gitlab::Git::Repository do
end
it 'raises an error if it failed' do
# TODO: Once https://gitlab.com/gitlab-org/gitaly/-/merge_requests/4921
# is merged, remove the assertion for Gitlab::Git::Repository::GitError
expect { repository.delete_refs('refs\heads\fix') }.to raise_error do |e|
expect(e).to be_a(Gitlab::Git::Repository::GitError).or be_a(Gitlab::Git::InvalidRefFormatError)
end
expect { repository.delete_refs('refs\heads\fix') }.to raise_error(Gitlab::Git::InvalidRefFormatError)
end
end

View File

@ -29,10 +29,8 @@ RSpec.describe Gitlab::ImportExport::DesignRepoRestorer do
after do
FileUtils.rm_rf(export_path)
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
FileUtils.rm_rf(project_with_design_repo.design_repository.path_to_repo)
FileUtils.rm_rf(project.design_repository.path_to_repo)
end
project_with_design_repo.design_repository.remove
project.design_repository.remove
end
it 'restores the repo successfully' do

View File

@ -47,10 +47,8 @@ RSpec.describe 'forked project import' do
after do
FileUtils.rm_rf(export_path)
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
FileUtils.rm_rf(project_with_repo.repository.path_to_repo)
FileUtils.rm_rf(project.repository.path_to_repo)
end
project_with_repo.repository.remove
project.repository.remove
end
it 'can access the MR', :sidekiq_might_not_need_inline do

View File

@ -23,9 +23,7 @@ RSpec.describe Gitlab::ImportExport::MergeRequestParser do
end
after do
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
FileUtils.rm_rf(project.repository.path_to_repo)
end
project.repository.remove
end
it 'has a source branch' do

View File

@ -7,6 +7,26 @@ RSpec.describe Gitlab::JsonLogger do
let(:now) { Time.now }
describe '#file_name' do
let(:subclass) do
Class.new(Gitlab::JsonLogger) do
def self.file_name_noext
'testlogger'
end
end
end
it 'raises error when file_name_noext not implemented' do
expect { described_class.file_name }.to raise_error(
'JsonLogger implementations must provide file_name_noext implementation'
)
end
it 'returns log file name when file_name_noext is implemented' do
expect(subclass.file_name).to eq('testlogger.log')
end
end
describe '#format_message' do
before do
allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('new-correlation-id')

View File

@ -74,13 +74,12 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
end
describe '#with_instrumentation_class' do
let(:metric_status) { 'active' }
let(:all_definitions) do
metrics_definitions = [
{ key_path: 'metric1', instrumentation_class: 'RedisHLLMetric', status: 'data_available' },
{ key_path: 'metric2', instrumentation_class: 'RedisHLLMetric', status: 'implemented' },
{ key_path: 'metric3', instrumentation_class: 'RedisHLLMetric', status: 'deprecated' },
{ key_path: 'metric4', instrumentation_class: 'RedisHLLMetric', status: metric_status },
{ key_path: 'metric1', instrumentation_class: 'RedisHLLMetric', status: 'active' },
{ key_path: 'metric2', instrumentation_class: 'RedisHLLMetric', status: 'broken' },
{ key_path: 'metric3', instrumentation_class: 'RedisHLLMetric', status: 'active' },
{ key_path: 'metric4', instrumentation_class: 'RedisHLLMetric', status: 'removed' },
{ key_path: 'metric5', status: 'active' },
{ key_path: 'metric_missing_status' }
]
@ -92,7 +91,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
end
it 'includes definitions with instrumentation_class' do
expect(described_class.with_instrumentation_class.count).to eq(4)
expect(described_class.with_instrumentation_class.count).to eq(3)
end
context 'with removed metric' do
@ -201,9 +200,9 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
using RSpec::Parameterized::TableSyntax
where(:status, :skip_validation?) do
'deprecated' | true
'removed' | true
'active' | false
'broken' | false
'removed' | true
end
with_them do

View File

@ -3,403 +3,133 @@
require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redis_shared_state do
let(:entity1) { 'dfb9d2d2-f56c-4c77-8aeb-6cddc4a1f857' }
let(:entity2) { '1dd9afb2-a3ee-4de1-8ae3-a405579c8584' }
let(:entity3) { '34rfjuuy-ce56-sa35-ds34-dfer567dfrf2' }
let(:entity4) { '8b9a2671-2abf-4bec-a682-22f6a8f7bf31' }
let(:end_date) { Date.current }
let(:sources) { Gitlab::Usage::Metrics::Aggregates::Sources }
let(:namespace) { described_class.to_s.deconstantize.constantize }
let(:sources) { Gitlab::Usage::Metrics::Aggregates::Sources }
let_it_be(:recorded_at) { Time.current.to_i }
def aggregated_metric(name:, time_frame:, source: "redis", events: %w[event1 event2 event3], operator: "OR", feature_flag: nil)
{
name: name,
source: source,
events: events,
operator: operator,
time_frame: time_frame,
feature_flag: feature_flag
}.compact.with_indifferent_access
end
describe '.calculate_count_for_aggregation' do
using RSpec::Parameterized::TableSyntax
context 'aggregated_metrics_data' do
shared_examples 'aggregated_metrics_data' do
context 'no aggregated metric is defined' do
it 'returns empty hash' do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:aggregated_metrics).and_return([])
end
expect(aggregated_metrics_data).to eq({})
end
context 'with valid configuration' do
where(:number_of_days, :operator, :datasource, :expected_method) do
28 | 'AND' | 'redis_hll' | :calculate_metrics_intersections
7 | 'AND' | 'redis_hll' | :calculate_metrics_intersections
28 | 'AND' | 'database' | :calculate_metrics_intersections
7 | 'AND' | 'database' | :calculate_metrics_intersections
28 | 'OR' | 'redis_hll' | :calculate_metrics_union
7 | 'OR' | 'redis_hll' | :calculate_metrics_union
28 | 'OR' | 'database' | :calculate_metrics_union
7 | 'OR' | 'database' | :calculate_metrics_union
end
context 'there are aggregated metrics defined' do
let(:aggregated_metrics) do
[
aggregated_metric(name: "gmau_1", source: datasource, time_frame: time_frame, operator: operator)
]
end
let(:results) { { 'gmau_1' => 5 } }
with_them do
let(:time_frame) { "#{number_of_days}d" }
let(:start_date) { number_of_days.days.ago.to_date }
let(:params) { { start_date: start_date, end_date: end_date, recorded_at: recorded_at } }
before do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:aggregated_metrics).and_return(aggregated_metrics)
end
let(:aggregate) do
{
source: datasource,
operator: operator,
events: %w[event1 event2]
}
end
context 'with OR operator' do
let(:operator) { Gitlab::Usage::Metrics::Aggregates::UNION_OF_AGGREGATED_METRICS }
it 'returns the number of unique events occurred for any metric in aggregate', :aggregate_failures do
expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event2 event3])).and_return(5)
expect(aggregated_metrics_data).to eq(results)
end
end
context 'with AND operator' do
let(:operator) { Gitlab::Usage::Metrics::Aggregates::INTERSECTION_OF_AGGREGATED_METRICS }
it 'returns the number of unique events that occurred for all of metrics in the aggregate', :aggregate_failures do
expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_intersections).with(params.merge(metric_names: %w[event1 event2 event3])).and_return(5)
expect(aggregated_metrics_data).to eq(results)
end
end
context 'hidden behind feature flag' do
let(:enabled_feature_flag) { 'test_ff_enabled' }
let(:disabled_feature_flag) { 'test_ff_disabled' }
let(:aggregated_metrics) do
params = { source: datasource, time_frame: time_frame }
[
# represents stable aggregated metrics that has been fully released
aggregated_metric(**params.merge(name: "gmau_without_ff")),
# represents new aggregated metric that is under performance testing on gitlab.com
aggregated_metric(**params.merge(name: "gmau_enabled", feature_flag: enabled_feature_flag)),
# represents aggregated metric that is under development and shouldn't be yet collected even on gitlab.com
aggregated_metric(**params.merge(name: "gmau_disabled", feature_flag: disabled_feature_flag))
]
end
it 'does not calculate data for aggregates with ff turned off' do
skip_feature_flags_yaml_validation
skip_default_enabled_yaml_check
stub_feature_flags(enabled_feature_flag => true, disabled_feature_flag => false)
allow(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).and_return(6)
expect(aggregated_metrics_data).to eq('gmau_without_ff' => 6, 'gmau_enabled' => 6)
end
end
end
context 'error handling' do
context 'development and test environment' do
it 'raises error when unknown aggregation operator is used' do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:aggregated_metrics)
.and_return([aggregated_metric(name: 'gmau_1', source: datasource, operator: "SUM", time_frame: time_frame)])
end
expect { aggregated_metrics_data }.to raise_error namespace::UnknownAggregationOperator
end
it 'raises error when unknown aggregation source is used' do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:aggregated_metrics)
.and_return([aggregated_metric(name: 'gmau_1', source: 'whoami', time_frame: time_frame)])
end
expect { aggregated_metrics_data }.to raise_error namespace::UnknownAggregationSource
end
it 'raises error when union is missing' do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:aggregated_metrics)
.and_return([aggregated_metric(name: 'gmau_1', source: datasource, time_frame: time_frame)])
end
allow(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).and_raise(sources::UnionNotAvailable)
expect { aggregated_metrics_data }.to raise_error sources::UnionNotAvailable
end
end
context 'production' do
before do
stub_rails_env('production')
end
it 'rescues unknown aggregation operator error' do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:aggregated_metrics)
.and_return([aggregated_metric(name: 'gmau_1', source: datasource, operator: "SUM", time_frame: time_frame)])
end
expect(aggregated_metrics_data).to eq('gmau_1' => -1)
end
it 'rescues unknown aggregation source error' do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:aggregated_metrics)
.and_return([aggregated_metric(name: 'gmau_1', source: 'whoami', time_frame: time_frame)])
end
expect(aggregated_metrics_data).to eq('gmau_1' => -1)
end
it 'rescues error when union is missing' do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:aggregated_metrics)
.and_return([aggregated_metric(name: 'gmau_1', source: datasource, time_frame: time_frame)])
end
allow(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).and_raise(sources::UnionNotAvailable)
expect(aggregated_metrics_data).to eq('gmau_1' => -1)
end
end
end
end
shared_examples 'database_sourced_aggregated_metrics' do
let(:datasource) { namespace::DATABASE_SOURCE }
it_behaves_like 'aggregated_metrics_data'
end
shared_examples 'redis_sourced_aggregated_metrics' do
let(:datasource) { namespace::REDIS_SOURCE }
it_behaves_like 'aggregated_metrics_data' do
context 'error handling' do
let(:aggregated_metrics) { [aggregated_metric(name: 'gmau_1', source: datasource, time_frame: time_frame)] }
let(:error) { Gitlab::UsageDataCounters::HLLRedisCounter::EventError }
before do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:aggregated_metrics).and_return(aggregated_metrics)
end
allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:calculate_events_union).and_raise(error)
end
context 'development and test environment' do
it 're raises Gitlab::UsageDataCounters::HLLRedisCounter::EventError' do
expect { aggregated_metrics_data }.to raise_error error
end
end
context 'production' do
it 'rescues Gitlab::UsageDataCounters::HLLRedisCounter::EventError' do
stub_rails_env('production')
expect(aggregated_metrics_data).to eq('gmau_1' => -1)
end
end
end
end
end
describe '.aggregated_metrics_all_time_data' do
subject(:aggregated_metrics_data) { described_class.new(recorded_at).all_time_data }
let(:start_date) { nil }
let(:end_date) { nil }
let(:time_frame) { ['all'] }
it_behaves_like 'database_sourced_aggregated_metrics'
context 'redis sourced aggregated metrics' do
let(:aggregated_metrics) { [aggregated_metric(name: 'gmau_1', time_frame: time_frame)] }
before do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:aggregated_metrics).and_return(aggregated_metrics)
end
end
context 'development and test environment' do
it 'raises Gitlab::Usage::Metrics::Aggregates::DisallowedAggregationTimeFrame' do
expect { aggregated_metrics_data }.to raise_error namespace::DisallowedAggregationTimeFrame
end
end
context 'production env' do
it 'returns fallback value for unsupported time frame' do
stub_rails_env('production')
expect(aggregated_metrics_data).to eq('gmau_1' => -1)
end
end
end
end
context 'legacy aggregated metrics configuration' do
let(:temp_dir) { Dir.mktmpdir }
let(:temp_file) { Tempfile.new(%w[common .yml], temp_dir) }
before do
stub_const("#{namespace}::AGGREGATED_METRICS_PATH", File.expand_path('*.yml', temp_dir))
File.open(temp_file.path, "w+b") do |file|
file.write [aggregated_metric(name: "gmau_1", time_frame: '7d')].to_yaml
end
end
after do
temp_file.unlink
FileUtils.remove_entry(temp_dir) if Dir.exist?(temp_dir)
end
it 'allows for YAML aliases in aggregated metrics configs' do
expect(YAML).to receive(:safe_load).with(kind_of(String), aliases: true).at_least(:once)
described_class.new(recorded_at)
end
end
describe '.aggregated_metrics_weekly_data' do
subject(:aggregated_metrics_data) { described_class.new(recorded_at).weekly_data }
let(:start_date) { 7.days.ago.to_date }
let(:time_frame) { ['7d'] }
it_behaves_like 'database_sourced_aggregated_metrics'
it_behaves_like 'redis_sourced_aggregated_metrics'
end
describe '.aggregated_metrics_monthly_data' do
subject(:aggregated_metrics_data) { described_class.new(recorded_at).monthly_data }
let(:start_date) { 4.weeks.ago.to_date }
let(:time_frame) { ['28d'] }
it_behaves_like 'database_sourced_aggregated_metrics'
it_behaves_like 'redis_sourced_aggregated_metrics'
end
describe '.calculate_count_for_aggregation' do
using RSpec::Parameterized::TableSyntax
context 'with valid configuration' do
where(:number_of_days, :operator, :datasource, :expected_method) do
28 | 'AND' | 'redis' | :calculate_metrics_intersections
7 | 'AND' | 'redis' | :calculate_metrics_intersections
28 | 'AND' | 'database' | :calculate_metrics_intersections
7 | 'AND' | 'database' | :calculate_metrics_intersections
28 | 'OR' | 'redis' | :calculate_metrics_union
7 | 'OR' | 'redis' | :calculate_metrics_union
28 | 'OR' | 'database' | :calculate_metrics_union
7 | 'OR' | 'database' | :calculate_metrics_union
end
with_them do
let(:time_frame) { "#{number_of_days}d" }
let(:start_date) { number_of_days.days.ago.to_date }
let(:params) { { start_date: start_date, end_date: end_date, recorded_at: recorded_at } }
let(:aggregate) do
{
source: datasource,
operator: operator,
events: %w[event1 event2]
}
end
subject(:calculate_count_for_aggregation) do
described_class
.new(recorded_at)
.calculate_count_for_aggregation(aggregation: aggregate, time_frame: time_frame)
end
it 'returns the number of unique events for aggregation', :aggregate_failures do
expect(namespace::SOURCES[datasource])
.to receive(expected_method)
.with(params.merge(metric_names: %w[event1 event2]))
.and_return(5)
expect(calculate_count_for_aggregation).to eq(5)
end
end
end
context 'with invalid configuration' do
where(:time_frame, :operator, :datasource, :expected_error) do
'28d' | 'SUM' | 'redis' | namespace::UnknownAggregationOperator
'7d' | 'AND' | 'mongodb' | namespace::UnknownAggregationSource
'all' | 'AND' | 'redis' | namespace::DisallowedAggregationTimeFrame
end
with_them do
let(:aggregate) do
{
source: datasource,
operator: operator,
events: %w[event1 event2]
}
end
subject(:calculate_count_for_aggregation) do
described_class
.new(recorded_at)
.calculate_count_for_aggregation(aggregation: aggregate, time_frame: time_frame)
end
context 'with non prod environment' do
it 'raises error' do
expect { calculate_count_for_aggregation }.to raise_error expected_error
end
end
context 'with prod environment' do
before do
stub_rails_env('production')
end
it 'returns fallback value' do
expect(calculate_count_for_aggregation).to be(-1)
end
end
end
end
context 'when union data is not available' do
subject(:calculate_count_for_aggregation) do
described_class
.new(recorded_at)
.calculate_count_for_aggregation(aggregation: aggregate, time_frame: time_frame)
end
where(:time_frame, :operator, :datasource) do
'28d' | 'OR' | 'redis'
'7d' | 'OR' | 'database'
it 'returns the number of unique events for aggregation', :aggregate_failures do
expect(namespace::SOURCES[datasource])
.to receive(expected_method)
.with(params.merge(metric_names: %w[event1 event2]))
.and_return(5)
expect(calculate_count_for_aggregation).to eq(5)
end
end
end
context 'with invalid configuration' do
where(:time_frame, :operator, :datasource, :expected_error) do
'28d' | 'SUM' | 'redis_hll' | namespace::UnknownAggregationOperator
'7d' | 'AND' | 'mongodb' | namespace::UnknownAggregationSource
'all' | 'AND' | 'redis_hll' | namespace::DisallowedAggregationTimeFrame
end
with_them do
let(:aggregate) do
{
source: datasource,
operator: operator,
events: %w[event1 event2]
}
end
with_them do
subject(:calculate_count_for_aggregation) do
described_class
.new(recorded_at)
.calculate_count_for_aggregation(aggregation: aggregate, time_frame: time_frame)
end
context 'with non prod environment' do
it 'raises error' do
expect { calculate_count_for_aggregation }.to raise_error expected_error
end
end
context 'with prod environment' do
before do
allow(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).and_raise(sources::UnionNotAvailable)
stub_rails_env('production')
end
let(:aggregate) do
{
source: datasource,
operator: operator,
events: %w[event1 event2]
}
it 'returns fallback value' do
expect(calculate_count_for_aggregation).to be(-1)
end
end
end
end
context 'when union data is not available' do
subject(:calculate_count_for_aggregation) do
described_class
.new(recorded_at)
.calculate_count_for_aggregation(aggregation: aggregate, time_frame: time_frame)
end
where(:time_frame, :operator, :datasource) do
'28d' | 'OR' | 'redis_hll'
'7d' | 'OR' | 'database'
end
with_them do
before do
allow(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).and_raise(sources::UnionNotAvailable)
end
let(:aggregate) do
{
source: datasource,
operator: operator,
events: %w[event1 event2]
}
end
context 'with non prod environment' do
it 'raises error' do
expect { calculate_count_for_aggregation }.to raise_error sources::UnionNotAvailable
end
end
context 'with prod environment' do
before do
stub_rails_env('production')
end
context 'with non prod environment' do
it 'raises error' do
expect { calculate_count_for_aggregation }.to raise_error sources::UnionNotAvailable
end
end
context 'with prod environment' do
before do
stub_rails_env('production')
end
it 'returns fallback value' do
expect(calculate_count_for_aggregation).to be(-1)
end
it 'returns fallback value' do
expect(calculate_count_for_aggregation).to be(-1)
end
end
end

View File

@ -33,8 +33,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
.not_to include(:merge_requests_users)
expect(subject[:usage_activity_by_stage_monthly][:create])
.to include(:merge_requests_users)
expect(subject[:counts_weekly]).to include(:aggregated_metrics)
expect(subject[:counts_monthly]).to include(:aggregated_metrics)
end
it 'clears memoized values' do
@ -1207,23 +1205,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
describe '.aggregated_metrics_data' do
it 'uses ::Gitlab::Usage::Metrics::Aggregates::Aggregate methods', :aggregate_failures do
expected_payload = {
counts_weekly: { aggregated_metrics: { global_search_gmau: 123 } },
counts_monthly: { aggregated_metrics: { global_search_gmau: 456 } },
counts: { aggregate_global_search_gmau: 789 }
}
expect_next_instance_of(::Gitlab::Usage::Metrics::Aggregates::Aggregate) do |instance|
expect(instance).to receive(:weekly_data).and_return(global_search_gmau: 123)
expect(instance).to receive(:monthly_data).and_return(global_search_gmau: 456)
expect(instance).to receive(:all_time_data).and_return(global_search_gmau: 789)
end
expect(described_class.aggregated_metrics_data).to eq(expected_payload)
end
end
describe '.service_desk_counts' do
subject { described_class.send(:service_desk_counts) }

View File

@ -0,0 +1,91 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe EnsureTaskNoteRenamingBackgroundMigrationFinished, :migration do
let(:batched_migrations) { table(:batched_background_migrations) }
let(:batch_failed_status) { 2 }
let(:batch_finalized_status) { 3 }
let_it_be(:migration) { described_class::MIGRATION }
describe '#up' do
shared_examples 'finalizes the migration' do
it 'finalizes the migration' do
expect do
migrate!
task_renaming_migration.reload
failed_job.reload
end.to change(task_renaming_migration, :status).from(task_renaming_migration.status).to(3).and(
change(failed_job, :status).from(batch_failed_status).to(batch_finalized_status)
)
end
end
context 'when migration is missing' do
it 'warns migration not found' do
expect(Gitlab::AppLogger)
.to receive(:warn).with(/Could not find batched background migration for the given configuration:/)
migrate!
end
end
context 'with migration present' do
let!(:task_renaming_migration) do
batched_migrations.create!(
job_class_name: 'RenameTaskSystemNoteToChecklistItem',
table_name: :system_note_metadata,
column_name: :id,
job_arguments: [],
interval: 2.minutes,
min_value: 1,
max_value: 2,
batch_size: 1000,
sub_batch_size: 200,
gitlab_schema: :gitlab_main,
status: 3 # finished
)
end
context 'when migration finished successfully' do
it 'does not raise exception' do
expect { migrate! }.not_to raise_error
end
end
context 'with different migration statuses', :redis do
using RSpec::Parameterized::TableSyntax
where(:status, :description) do
0 | 'paused'
1 | 'active'
4 | 'failed'
5 | 'finalizing'
end
with_them do
let!(:failed_job) do
table(:batched_background_migration_jobs).create!(
batched_background_migration_id: task_renaming_migration.id,
status: batch_failed_status,
min_value: 1,
max_value: 10,
attempts: 2,
batch_size: 100,
sub_batch_size: 10
)
end
before do
task_renaming_migration.update!(status: status)
end
it_behaves_like 'finalizes the migration'
end
end
end
end
end

View File

@ -0,0 +1,144 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe User do
describe '#authenticatable_salt' do
let(:user) { build(:user, encrypted_password: encrypted_password) }
subject(:authenticatable_salt) { user.authenticatable_salt }
context 'when password is stored in BCrypt format' do
let(:encrypted_password) { '$2a$10$AvwDCyF/8HnlAv./UkAZx.vAlKRS89yNElP38FzdgOmVaSaiDL7xm' }
it 'returns the first 30 characters of the encrypted_password' do
expect(authenticatable_salt).to eq(user.encrypted_password[0, 29])
end
end
context 'when password is stored in PBKDF2 format' do
let(:encrypted_password) { '$pbkdf2-sha512$20000$rKbYsScsDdk$iwWBewXmrkD2fFfaG1SDcMIvl9gvEo3fBWUAfiqyVceTlw/DYgKBByHzf45pF5Qn59R4R.NQHsFpvZB4qlsYmw' } # rubocop:disable Layout/LineLength
it 'uses the decoded password salt' do
expect(authenticatable_salt).to eq('aca6d8b1272c0dd9')
end
it 'does not use the first 30 characters of the encrypted_password' do
expect(authenticatable_salt).not_to eq(encrypted_password[0, 29])
end
end
context 'when the encrypted_password is an unknown type' do
let(:encrypted_password) { '$argon2i$v=19$m=512,t=4,p=2$eM+ZMyYkpDRGaI3xXmuNcQ$c5DeJg3eb5dskVt1mDdxfw' }
it 'returns the first 30 characters of the encrypted_password' do
expect(authenticatable_salt).to eq(encrypted_password[0, 29])
end
end
end
describe '#valid_password?' do
subject(:validate_password) { user.valid_password?(password) }
let(:user) { build(:user, encrypted_password: encrypted_password) }
let(:password) { described_class.random_password }
shared_examples 'password validation fails when the password is encrypted using an unsupported method' do
let(:encrypted_password) { '$argon2i$v=19$m=512,t=4,p=2$eM+ZMyYkpDRGaI3xXmuNcQ$c5DeJg3eb5dskVt1mDdxfw' }
it { is_expected.to eq(false) }
end
context 'when the default encryption method is BCrypt' do
it_behaves_like 'password validation fails when the password is encrypted using an unsupported method'
context 'when the user password PBKDF2+SHA512' do
let(:encrypted_password) do
Devise::Pbkdf2Encryptable::Encryptors::Pbkdf2Sha512.digest(
password, 20_000, Devise.friendly_token[0, 16])
end
it { is_expected.to eq(true) }
it 're-encrypts the password as BCrypt' do
expect(user.encrypted_password).to start_with('$pbkdf2-sha512$')
validate_password
expect(user.encrypted_password).to start_with('$2a$')
end
end
end
context 'when the default encryption method is PBKDF2+SHA512 and the user password is BCrypt', :fips_mode do
it_behaves_like 'password validation fails when the password is encrypted using an unsupported method'
context 'when the user password BCrypt' do
let(:encrypted_password) { Devise::Encryptor.digest(described_class, password) }
it { is_expected.to eq(true) }
it 're-encrypts the password as PBKDF2+SHA512' do
expect(user.encrypted_password).to start_with('$2a$')
validate_password
expect(user.reload.encrypted_password).to start_with('$pbkdf2-sha512$')
end
end
end
end
describe '#password=' do
let(:user) { build(:user) }
let(:password) { described_class.random_password }
def compare_bcrypt_password(user, password)
Devise::Encryptor.compare(described_class, user.encrypted_password, password)
end
def compare_pbkdf2_password(user, password)
Devise::Pbkdf2Encryptable::Encryptors::Pbkdf2Sha512.compare(user.encrypted_password, password)
end
context 'when FIPS mode is enabled', :fips_mode do
it 'calls PBKDF2 digest and not the default Devise encryptor' do
expect(Devise::Pbkdf2Encryptable::Encryptors::Pbkdf2Sha512)
.to receive(:digest).at_least(:once).and_call_original
expect(Devise::Encryptor).not_to receive(:digest)
user.password = password
end
it 'saves the password in PBKDF2 format' do
user.password = password
user.save!
expect(compare_pbkdf2_password(user, password)).to eq(true)
expect { compare_bcrypt_password(user, password) }.to raise_error(::BCrypt::Errors::InvalidHash)
end
end
context 'when pbkdf2_password_encryption is disabled' do
before do
stub_feature_flags(pbkdf2_password_encryption: false)
end
it 'calls default Devise encryptor and not the PBKDF2 encryptor' do
expect(Devise::Encryptor).to receive(:digest).at_least(:once).and_call_original
expect(Devise::Pbkdf2Encryptable::Encryptors::Pbkdf2Sha512).not_to receive(:digest)
user.password = password
end
it 'saves the password in BCrypt format' do
user.password = password
user.save!
expect { compare_pbkdf2_password(user, password) }
.to raise_error Devise::Pbkdf2Encryptable::Encryptors::InvalidHash
expect(compare_bcrypt_password(user, password)).to eq(true)
end
end
end
end

View File

@ -6157,172 +6157,28 @@ RSpec.describe User do
end
end
describe '#authenticatable_salt' do
let(:user) { create(:user) }
subject(:authenticatable_salt) { user.authenticatable_salt }
it 'uses password_salt' do
expect(authenticatable_salt).to eq(user.password_salt)
end
context 'when the encrypted_password is an unknown type' do
let(:encrypted_password) { '$argon2i$v=19$m=512,t=4,p=2$eM+ZMyYkpDRGaI3xXmuNcQ$c5DeJg3eb5dskVt1mDdxfw' }
before do
user.update_attribute(:encrypted_password, encrypted_password)
end
it 'returns the first 30 characters of the encrypted_password' do
expect(authenticatable_salt).to eq(encrypted_password[0, 29])
end
end
context 'when pbkdf2_password_encryption is disabled' do
before do
stub_feature_flags(pbkdf2_password_encryption: false)
end
it 'returns the first 30 characters of the encrypted_password' do
expect(authenticatable_salt).to eq(user.encrypted_password[0, 29])
end
end
end
def compare_pbkdf2_password(user, password)
Devise::Pbkdf2Encryptable::Encryptors::Pbkdf2Sha512.compare(user.encrypted_password, password)
end
describe '#valid_password?' do
subject(:validate_password) { user.valid_password?(password) }
context 'user with password not in disallowed list' do
let(:user) { create(:user) }
let(:password) { user.password }
it { is_expected.to be_truthy }
context 'using a wrong password' do
let(:password) { 'WRONG PASSWORD' }
it { is_expected.to be_falsey }
end
context 'when pbkdf2_sha512_encryption is disabled and the user password is pbkdf2+sha512' do
it 'does not validate correctly' do
user # Create the user while the feature is enabled
stub_feature_flags(pbkdf2_password_encryption: false)
expect(validate_password).to be_falsey
end
end
end
context 'user with disallowed password' do
let(:user) { create(:user, :disallowed_password) }
let(:password) { user.password }
it { is_expected.to be_falsey }
context 'using a wrong password' do
let(:password) { 'WRONG PASSWORD' }
it { is_expected.to be_falsey }
end
it { is_expected.to eq(false) }
end
context 'user with a bcrypt password hash' do
# Manually set a 'known' encrypted password
let(:password) { User.random_password }
let(:encrypted_password) { Devise::Encryptor.digest(User, password) }
let(:user) { create(:user, encrypted_password: encrypted_password) }
context 'using a wrong password' do
let(:user) { create(:user) }
let(:password) { 'WRONG PASSWORD' }
shared_examples 'not re-encrypting with PBKDF2' do
it 'does not re-encrypt with PBKDF2' do
validate_password
expect(user.reload.encrypted_password).to eq(encrypted_password)
end
end
context 'using the wrong password' do
# password 'WRONG PASSWORD' will not match the bcrypt hash
let(:password) { 'WRONG PASSWORD' }
let(:encrypted_password) { Devise::Encryptor.digest(User, User.random_password) }
it { is_expected.to be_falsey }
it_behaves_like 'not re-encrypting with PBKDF2'
context 'when pbkdf2_password_encryption is disabled' do
before do
stub_feature_flags(pbkdf2_password_encryption: false)
end
it { is_expected.to be_falsey }
it_behaves_like 'not re-encrypting with PBKDF2'
end
end
context 'using the correct password' do
it { is_expected.to be_truthy }
it 'validates the password and re-encrypts with PBKDF2' do
validate_password
current_encrypted_password = user.reload.encrypted_password
expect(compare_pbkdf2_password(user, password)).to eq(true)
expect { ::BCrypt::Password.new(current_encrypted_password) }
.to raise_error(::BCrypt::Errors::InvalidHash)
end
context 'when pbkdf2_password_encryption is disabled' do
before do
stub_feature_flags(pbkdf2_password_encryption: false)
end
it { is_expected.to be_truthy }
it_behaves_like 'not re-encrypting with PBKDF2'
end
context 'when pbkdf2_password_encryption_write is disabled' do
before do
stub_feature_flags(pbkdf2_password_encryption_write: false)
end
it { is_expected.to be_truthy }
it_behaves_like 'not re-encrypting with PBKDF2'
end
end
end
context 'user with password hash that is neither PBKDF2 nor BCrypt' do
# Manually calculated User.random_password
let(:password) { "gg_w215TmVXGWSt7RJKXwYTVz886f6SDM3zvzztaJf2mX9ttUE8gRkNJSbWyWRLqxz4LFzxBekPe75ydDcGauE9wqg-acKMRT-WpSYjTm1Rdx-tnssE7CQByJcnxwWNH" }
# Created with https://argon2.online/ using 'aaaaaaaa' as the salt
let(:encrypted_password) { "$argon2i$v=19$m=512,t=4,p=2$YWFhYWFhYWE$PvJscKO5XRlevcgRReUg6w" }
let(:user) { create(:user, encrypted_password: encrypted_password) }
it { is_expected.to be_falsey }
context 'when pbkdf2_password_encryption is disabled' do
before do
stub_feature_flags(pbkdf2_password_encryption: false)
end
it { is_expected.to be_falsey }
end
it { is_expected.to eq(false) }
end
context 'user with autogenerated_password' do
let(:user) { build_stubbed(:user, password_automatically_set: true) }
let(:password) { user.password }
it { is_expected.to be_falsey }
it { is_expected.to eq(false) }
end
end
@ -6377,95 +6233,6 @@ RSpec.describe User do
end
end
# These entire test section can be removed once the :pbkdf2_password_encryption feature flag is removed.
describe '#password=' do
let(:user) { create(:user) }
let(:password) { User.random_password }
def compare_bcrypt_password(user, password)
Devise::Encryptor.compare(User, user.encrypted_password, password)
end
context 'when pbkdf2_password_encryption is enabled' do
it 'calls PBKDF2 digest and not the default Devise encryptor' do
expect(Devise::Pbkdf2Encryptable::Encryptors::Pbkdf2Sha512).to receive(:digest).at_least(:once).and_call_original
expect(Devise::Encryptor).not_to receive(:digest)
user.password = password
end
it 'saves the password in PBKDF2 format' do
user.password = password
user.save!
expect(compare_pbkdf2_password(user, password)).to eq(true)
expect { compare_bcrypt_password(user, password) }.to raise_error(::BCrypt::Errors::InvalidHash)
end
context 'when pbkdf2_password_encryption_write is disabled' do
before do
stub_feature_flags(pbkdf2_password_encryption_write: false)
end
it 'calls default Devise encryptor and not the PBKDF2 encryptor' do
expect(Devise::Encryptor).to receive(:digest).at_least(:once).and_call_original
expect(Devise::Pbkdf2Encryptable::Encryptors::Pbkdf2Sha512).not_to receive(:digest)
user.password = password
end
end
end
context 'when pbkdf2_password_encryption is disabled' do
before do
stub_feature_flags(pbkdf2_password_encryption: false)
end
it 'calls default Devise encryptor and not the PBKDF2 encryptor' do
expect(Devise::Encryptor).to receive(:digest).at_least(:once).and_call_original
expect(Devise::Pbkdf2Encryptable::Encryptors::Pbkdf2Sha512).not_to receive(:digest)
user.password = password
end
it 'saves the password in BCrypt format' do
user.password = password
user.save!
expect { compare_pbkdf2_password(user, password) }.to raise_error Devise::Pbkdf2Encryptable::Encryptors::InvalidHash
expect(compare_bcrypt_password(user, password)).to eq(true)
end
end
end
describe '#password_strategy' do
let(:user) { create(:user, encrypted_password: encrypted_password) }
context 'with a PBKDF2+SHA512 encrypted password' do
let(:encrypted_password) { '$pbkdf2-sha512$20000$boHGAw0hEyI$DBA67J7zNZebyzLtLk2X9wRDbmj1LNKVGnZLYyz6PGrIDGIl45fl/BPH0y1TPZnV90A20i.fD9C3G9Bp8jzzOA' }
it 'extracts the correct strategy', :aggregate_failures do
expect(user.password_strategy).to eq(:pbkdf2_sha512)
end
end
context 'with a BCrypt encrypted password' do
let(:encrypted_password) { '$2a$10$xLTxCKOa75IU4RQGqqOrTuZOgZdJEzfSzjG6ZSEi/C31TB/yLZYpi' }
it 'extracts the correct strategy', :aggregate_failures do
expect(user.password_strategy).to eq(:bcrypt)
end
end
context 'with an unknown encrypted password' do
let(:encrypted_password) { '$pbkdf2-sha256$6400$.6UI/S.nXIk8jcbdHx3Fhg$98jZicV16ODfEsEZeYPGHU3kbrUrvUEXOPimVSQDD44' }
it 'returns unknown strategy' do
expect(user.password_strategy).to eq(:unknown)
end
end
end
describe '#password_expired?' do
let(:user) { build(:user, password_expires_at: password_expires_at) }

View File

@ -120,26 +120,6 @@ RSpec.describe Ci::AfterRequeueJobService, :sidekiq_inline do
)
end
context 'when the FF ci_requeue_with_dag_object_hierarchy is disabled' do
before do
stub_feature_flags(ci_requeue_with_dag_object_hierarchy: false)
end
it 'marks subsequent skipped jobs as processable but leaves a3 created' do
execute_after_requeue_service(a1)
check_jobs_statuses(
a1: 'pending',
a2: 'created',
a3: 'skipped',
b1: 'success',
b2: 'created',
c1: 'created',
c2: 'created'
)
end
end
context 'when executed by a different user than the original owner' do
let(:retryer) { create(:user).tap { |u| project.add_maintainer(u) } }
let(:service) { described_class.new(project, retryer) }
@ -312,22 +292,6 @@ RSpec.describe Ci::AfterRequeueJobService, :sidekiq_inline do
c: 'created'
)
end
context 'when the FF ci_requeue_with_dag_object_hierarchy is disabled' do
before do
stub_feature_flags(ci_requeue_with_dag_object_hierarchy: false)
end
it 'marks the next subsequent skipped job as processable but leaves c skipped' do
execute_after_requeue_service(a)
check_jobs_statuses(
a: 'pending',
b: 'created',
c: 'skipped'
)
end
end
end
private

View File

@ -33,6 +33,10 @@ RSpec.describe MergeRequests::ApprovalService do
service.execute(merge_request)
end
it_behaves_like 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
let(:action) { service.execute(merge_request) }
end
it 'does not publish MergeRequests::ApprovedEvent' do
expect { service.execute(merge_request) }.not_to publish_event(MergeRequests::ApprovedEvent)
end
@ -46,6 +50,10 @@ RSpec.describe MergeRequests::ApprovalService do
it 'does not create an approval' do
expect { service.execute(merge_request) }.not_to change { merge_request.approvals.size }
end
it_behaves_like 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
let(:action) { service.execute(merge_request) }
end
end
context 'with valid approval' do
@ -67,6 +75,10 @@ RSpec.describe MergeRequests::ApprovalService do
.to publish_event(MergeRequests::ApprovedEvent)
.with(current_user_id: user.id, merge_request_id: merge_request.id)
end
it_behaves_like 'triggers GraphQL subscription mergeRequestMergeStatusUpdated' do
let(:action) { service.execute(merge_request) }
end
end
context 'user cannot update the merge request' do
@ -77,6 +89,10 @@ RSpec.describe MergeRequests::ApprovalService do
it 'does not update approvals' do
expect { service.execute(merge_request) }.not_to change { merge_request.approvals.size }
end
it_behaves_like 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
let(:action) { service.execute(merge_request) }
end
end
end
end

View File

@ -45,6 +45,10 @@ RSpec.describe MergeRequests::RemoveApprovalService do
execute!
end
it_behaves_like 'triggers GraphQL subscription mergeRequestMergeStatusUpdated' do
let(:action) { execute! }
end
end
context 'with a user who has not approved' do
@ -61,6 +65,10 @@ RSpec.describe MergeRequests::RemoveApprovalService do
execute!
end
it_behaves_like 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
let(:action) { execute! }
end
end
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
RSpec.shared_examples 'triggers GraphQL subscription mergeRequestMergeStatusUpdated' do
specify do
expect(GraphqlTriggers).to receive(:merge_request_merge_status_updated).with(merge_request)
action
end
end
RSpec.shared_examples 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
specify do
expect(GraphqlTriggers).not_to receive(:merge_request_merge_status_updated)
action
end
end