Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-10-05 21:09:08 +00:00
parent 51bc2d8e70
commit 91264a6752
37 changed files with 716 additions and 347 deletions

View File

@ -33,7 +33,7 @@ cache-workhorse:
- export GITLAB_ASSETS_HASH=$(bundle exec rake gitlab:assets:hash_sum)
- source scripts/gitlab_component_helpers.sh
- 'gitlab_assets_archive_doesnt_exist || { echoinfo "INFO: Exiting early as package exists."; exit 0; }'
- run_timed_command "bin/rake gitlab:assets:compile"
- assets_compile_script
- echo -n "${GITLAB_ASSETS_HASH}" > "cached-assets-hash.txt"
- run_timed_command "create_gitlab_assets_package"
- run_timed_command "upload_gitlab_assets_package"

View File

@ -1,7 +1,7 @@
.yarn-install:
script:
- source scripts/utils.sh
- run_timed_command "retry yarn install --frozen-lockfile"
- yarn_install_script
.storybook-yarn-install:
script:
@ -23,7 +23,7 @@
WEBPACK_COMPILE_LOG_PATH: "tmp/webpack-output.log"
stage: prepare
script:
- !reference [.yarn-install, script]
- yarn_install_script
- export GITLAB_ASSETS_HASH=$(bin/rake gitlab:assets:hash_sum)
- 'echo "CACHE_ASSETS_AS_PACKAGE: ${CACHE_ASSETS_AS_PACKAGE}"'
# The new strategy to cache assets as generic packages is experimental and can be disabled by removing the `CACHE_ASSETS_AS_PACKAGE` variable
@ -32,7 +32,7 @@
source scripts/gitlab_component_helpers.sh
gitlab_assets_archive_doesnt_exist || run_timed_command "download_and_extract_gitlab_assets"
fi
- run_timed_command "bin/rake gitlab:assets:compile"
- assets_compile_script
compile-production-assets:
extends:
@ -294,7 +294,7 @@ coverage-frontend:
- .yarn-cache
- .frontend:rules:qa-frontend-node
stage: test
dependencies: []
needs: []
script:
- !reference [.yarn-install, script]
- run_timed_command "retry yarn run webpack-prod"

View File

@ -79,7 +79,7 @@ danger-review:
before_script:
- source scripts/utils.sh
- bundle_install_script "--with danger"
- run_timed_command "retry yarn install --frozen-lockfile"
- yarn_install_script
script:
# ${DANGER_DANGERFILE} is used by Jihulab for customizing danger support: https://jihulab.com/gitlab-cn/gitlab/-/blob/main-jh/jh/.gitlab-ci.yml
- >

View File

@ -31,7 +31,7 @@ static-analysis:
- .static-analysis:rules:static-analysis
parallel: 2
script:
- run_timed_command "retry yarn install --frozen-lockfile"
- yarn_install_script
- scripts/static-analysis
static-analysis as-if-foss:
@ -84,7 +84,7 @@ eslint:
variables:
USE_BUNDLE_INSTALL: "false"
script:
- run_timed_command "retry yarn install --frozen-lockfile"
- yarn_install_script
- run_timed_command "yarn run lint:eslint:all"
eslint as-if-foss:

View File

@ -574,6 +574,7 @@ class ProjectPolicy < BasePolicy
rule { issues_disabled & merge_requests_disabled }.policy do
prevent(*create_read_update_admin_destroy(:label))
prevent(*create_read_update_admin_destroy(:milestone))
prevent(:read_cycle_analytics)
end
rule { snippets_disabled }.policy do

View File

@ -1,8 +0,0 @@
---
name: ci_project_pipeline_config_refactoring
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/97240
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/372867
milestone: '15.4'
type: development
group: group::pipeline authoring
default_enabled: false

View File

@ -17,15 +17,6 @@
# Corresponding feature flag should have `default_enabled` attribute set to `false`.
# This attribute is OPTIONAL and can be omitted, when `feature_flag` is missing no feature flag will be checked.
---
- name: incident_management_alerts_total_unique_counts
operator: OR
source: redis
time_frame: [7d, 28d]
events:
- 'incident_management_alert_status_changed'
- 'incident_management_alert_assigned'
- 'incident_management_alert_todo'
- 'incident_management_alert_create_incident'
- name: incident_management_incidents_total_unique_counts
operator: OR
source: redis

View File

@ -9,7 +9,17 @@ product_category: incident_management
value_type: number
status: active
time_frame: 28d
instrumentation_class: AggregatedMetric
data_source: redis_hll
options:
aggregate:
operator: OR
attribute: user_id
events:
- 'incident_management_alert_status_changed'
- 'incident_management_alert_assigned'
- 'incident_management_alert_todo'
- 'incident_management_alert_create_incident'
distribution:
- ce
- ee

View File

@ -9,7 +9,17 @@ product_category: incident_management
value_type: number
status: active
time_frame: 7d
instrumentation_class: AggregatedMetric
data_source: redis_hll
options:
aggregate:
operator: OR
attribute: user_id
events:
- 'incident_management_alert_status_changed'
- 'incident_management_alert_assigned'
- 'incident_management_alert_todo'
- 'incident_management_alert_create_incident'
distribution:
- ce
- ee

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
class ScheduleDestroyInvalidMembers < Gitlab::Database::Migration[2.0]
MIGRATION = 'DestroyInvalidMembers'
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1_000
MAX_BATCH_SIZE = 10_000
SUB_BATCH_SIZE = 200
restrict_gitlab_migration gitlab_schema: :gitlab_main
def up
queue_batched_background_migration(
MIGRATION,
:members,
:id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
max_batch_size: MAX_BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE,
gitlab_schema: :gitlab_main
)
end
def down
delete_batched_background_migration(MIGRATION, :members, :id, [])
end
end

View File

@ -0,0 +1 @@
b274eaa3ef886e5de6fe5dacb11ab72a724a07a5022da6c01b94309fdaebfd0b

View File

@ -839,6 +839,22 @@ However, it has the following limitations:
WARNING:
This feature is intended solely for internal GitLab use.
The aggregated metrics feature provides insight into the data attributes in a collection of Service Ping metrics.
This aggregation allows you to count data attributes in events without counting each occurrence of the same data attribute in multiple events.
For example, you can aggregate the number of users who perform several actions, such as creating a new issue and opening a new merge request.
You can then count each user that performed any combination of these actions.
### Defining aggregated metric via metric YAML definition
To add data for aggregated metrics to the Service Ping payload,
create metric YAML definition file following [Aggregated metric instrumentation guide](metrics_instrumentation.md#aggregated-metrics).
### (DEPRECATED) Defining aggregated metric via aggregated metric YAML config file
WARNING:
This feature was [deprecated](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/98206) in GitLab 15.5
and is planned for removal in 15.5. Use [metrics definition YAMLs](https://gitlab.com/gitlab-org/gitlab/-/issues/370963) instead.
To add data for aggregated metrics to the Service Ping payload, add a corresponding definition to:
- [`config/metrics/aggregates/*.yaml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/config/metrics/aggregates/) for metrics available in the Community Edition.

View File

@ -254,6 +254,86 @@ options:
- i_quickactions_approve
```
## Aggregated metrics
The aggregated metrics feature provides insight into the number of data attributes, for example `pseudonymized_user_ids`, that occurred in a collection of events. For example, you can aggregate the number of users who perform multiple actions such as creating a new issue and opening
a new merge request.
You can use a YAML file to define your aggregated metrics. The following arguments are required:
- `options.events`: List of event names to aggregate into metric data. All events in this list must
use the same data source. Additional data source requirements are described in
[Database sourced aggregated metrics](implement.md#database-sourced-aggregated-metrics) and
[Redis sourced aggregated metrics](implement.md#redis-sourced-aggregated-metrics).
- `options.aggregate.operator`: Operator that defines how the aggregated metric data is counted. Available operators are:
- `OR`: Removes duplicates and counts all entries that triggered any of the listed events.
- `AND`: Removes duplicates and counts all elements that were observed triggering all of the following events.
- `options.aggregate.attribute`: Information pointing to the attribute that is being aggregated across events.
- `time_frame`: One or more valid time frames. Use these to limit the data included in aggregated metrics to events within a specific date-range. Valid time frames are:
- `7d`: The last 7 days of data.
- `28d`: The last 28 days of data.
- `all`: All historical data, only available for `database` sourced aggregated metrics.
- `data_source`: Data source used to collect all events data included in the aggregated metrics. Valid data sources are:
- [`database`](implement.md#database-sourced-aggregated-metrics)
- [`redis_hll`](implement.md#redis-sourced-aggregated-metrics)
Refer to merge request [98206](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/98206) for an example of a merge request that adds an `AggregatedMetric` metric.
Count unique `user_ids` that occurred in at least one of the events: `incident_management_alert_status_changed`,
`incident_management_alert_assigned`, `incident_management_alert_todo`, `incident_management_alert_create_incident`.
```yaml
time_frame: 28d
instrumentation_class: AggregatedMetric
data_source: redis_hll
options:
aggregate:
operator: OR
attribute: user_id
events:
- `incident_management_alert_status_changed`
- `incident_management_alert_assigned`
- `incident_management_alert_todo`
- `incident_management_alert_create_incident`
```
### Availability-restrained Aggregated metrics
If the Aggregated metric should only be available in the report under specific conditions, then you must specify these conditions in a new class that is a child of the `AggregatedMetric` class.
```ruby
# frozen_string_literal: true
module Gitlab
module Usage
module Metrics
module Instrumentations
class MergeUsageCountAggregatedMetric < AggregatedMetric
available? { Feature.enabled?(:merge_usage_data_missing_key_paths) }
end
end
end
end
end
```
You must also use the class's name in the YAML setup.
```yaml
time_frame: 28d
instrumentation_class: MergeUsageCountAggregatedMetric
data_source: redis_hll
options:
aggregate:
operator: OR
attribute: user_id
events:
- `incident_management_alert_status_changed`
- `incident_management_alert_assigned`
- `incident_management_alert_todo`
- `incident_management_alert_create_incident`
```
## Numbers metrics
- `operation`: Operations for the given `data` block. Currently we only support `add` operation.

View File

@ -95,7 +95,6 @@ Depending on your role, you can also use the following methods to manage or redu
- [Reduce dependency proxy storage](packages/dependency_proxy/reduce_dependency_proxy_storage.md).
- [Reduce repository size](project/repository/reducing_the_repo_size_using_git.md).
- [Reduce container registry storage](packages/container_registry/reduce_container_registry_storage.md).
- [Reduce container registry data transfers](packages/container_registry/reduce_container_registry_data_transfer.md).
- [Reduce wiki repository size](../administration/wikis/index.md#reduce-wiki-repository-size).
## Excess storage usage
@ -141,3 +140,9 @@ available decreases. All projects remain unlocked because 40 GB purchased storag
| Green | 11 GB | 1 GB | 10 GB | Not locked |
| Yellow | 5 GB | 0 GB | 10 GB | Not locked |
| **Totals** | **45 GB** | **10 GB** | - | - |
## Manage your transfer usage
Depending on your role, you can use the following methods to manage or reduce your transfer:
- [Reduce Container Registry data transfers](packages/container_registry/reduce_container_registry_data_transfer.md).

View File

@ -41,6 +41,9 @@ module API
end
desc 'Delete an application'
params do
requires :id, type: Integer, desc: 'The ID of the application (not the application_id)'
end
delete ':id' do
application = ApplicationsFinder.new(params).execute
break not_found!('Application') unless application

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class DestroyInvalidMembers < Gitlab::BackgroundMigration::BatchedMigrationJob # rubocop:disable Style/Documentation
scope_to ->(relation) { relation.where(member_namespace_id: nil) }
def perform
each_sub_batch(operation_name: :delete_all) do |sub_batch|
deleted_members_data = sub_batch.map do |m|
{ id: m.id, source_id: m.source_id, source_type: m.source_type }
end
deleted_count = sub_batch.delete_all
Gitlab::AppLogger.info({ message: 'Removing invalid member records',
deleted_count: deleted_count,
deleted_member_data: deleted_members_data })
end
end
end
end
end

View File

@ -9,15 +9,6 @@ module Gitlab
include Chain::Helpers
include ::Gitlab::Utils::StrongMemoize
SOURCES = [
Gitlab::Ci::Pipeline::Chain::Config::Content::Parameter,
Gitlab::Ci::Pipeline::Chain::Config::Content::Bridge,
Gitlab::Ci::Pipeline::Chain::Config::Content::Repository,
Gitlab::Ci::Pipeline::Chain::Config::Content::ExternalProject,
Gitlab::Ci::Pipeline::Chain::Config::Content::Remote,
Gitlab::Ci::Pipeline::Chain::Config::Content::AutoDevops
].freeze
def perform!
if pipeline_config&.exists?
@pipeline.build_pipeline_config(content: pipeline_config.content)
@ -36,8 +27,6 @@ module Gitlab
def pipeline_config
strong_memoize(:pipeline_config) do
next legacy_find_config if ::Feature.disabled?(:ci_project_pipeline_config_refactoring, project)
::Gitlab::Ci::ProjectConfig.new(
project: project, sha: @pipeline.sha,
custom_content: @command.content,
@ -45,24 +34,9 @@ module Gitlab
)
end
end
def legacy_find_config
sources.each do |source|
config = source.new(@pipeline, @command)
return config if config.exists?
end
nil
end
def sources
SOURCES
end
end
end
end
end
end
end
Gitlab::Ci::Pipeline::Chain::Config::Content.prepend_mod_with('Gitlab::Ci::Pipeline::Chain::Config::Content')

View File

@ -1,34 +0,0 @@
# frozen_string_literal: true
module Gitlab
module Ci
module Pipeline
module Chain
module Config
class Content
class AutoDevops < Source
def content
strong_memoize(:content) do
next unless project&.auto_devops_enabled?
template = Gitlab::Template::GitlabCiYmlTemplate.find(template_name)
YAML.dump('include' => [{ 'template' => template.full_name }])
end
end
def source
:auto_devops_source
end
private
def template_name
'Auto-DevOps'
end
end
end
end
end
end
end
end

View File

@ -1,25 +0,0 @@
# frozen_string_literal: true
module Gitlab
module Ci
module Pipeline
module Chain
module Config
class Content
class Bridge < Source
def content
return unless @command.bridge
@command.bridge.yaml_for_downstream
end
def source
:bridge_source
end
end
end
end
end
end
end
end

View File

@ -1,51 +0,0 @@
# frozen_string_literal: true
module Gitlab
module Ci
module Pipeline
module Chain
module Config
class Content
class ExternalProject < Source
def content
strong_memoize(:content) do
next unless external_project_path?
path_file, path_project, ref = extract_location_tokens
config_location = { 'project' => path_project, 'file' => path_file }
config_location['ref'] = ref if ref.present?
YAML.dump('include' => [config_location])
end
end
def source
:external_project_source
end
private
# Example: path/to/.gitlab-ci.yml@another-group/another-project
def external_project_path?
ci_config_path =~ /\A.+(yml|yaml)@.+\z/
end
# Example: path/to/.gitlab-ci.yml@another-group/another-project:refname
def extract_location_tokens
path_file, path_project = ci_config_path.split('@', 2)
if path_project.include? ":"
project, ref = path_project.split(':', 2)
[path_file, project, ref]
else
[path_file, path_project]
end
end
end
end
end
end
end
end
end

View File

@ -1,29 +0,0 @@
# frozen_string_literal: true
module Gitlab
module Ci
module Pipeline
module Chain
module Config
class Content
class Parameter < Source
UnsupportedSourceError = Class.new(StandardError)
def content
strong_memoize(:content) do
next unless command.content.present?
command.content
end
end
def source
:parameter_source
end
end
end
end
end
end
end
end

View File

@ -1,27 +0,0 @@
# frozen_string_literal: true
module Gitlab
module Ci
module Pipeline
module Chain
module Config
class Content
class Remote < Source
def content
strong_memoize(:content) do
next unless ci_config_path =~ URI::DEFAULT_PARSER.make_regexp(%w[http https])
YAML.dump('include' => [{ 'remote' => ci_config_path }])
end
end
def source
:remote_source
end
end
end
end
end
end
end
end

View File

@ -1,38 +0,0 @@
# frozen_string_literal: true
module Gitlab
module Ci
module Pipeline
module Chain
module Config
class Content
class Repository < Source
def content
strong_memoize(:content) do
next unless file_in_repository?
YAML.dump('include' => [{ 'local' => ci_config_path }])
end
end
def source
:repository_source
end
private
def file_in_repository?
return unless project
return unless @pipeline.sha
project.repository.gitlab_ci_yml_for(@pipeline.sha, ci_config_path).present?
rescue GRPC::NotFound, GRPC::Internal
nil
end
end
end
end
end
end
end
end

View File

@ -1,49 +0,0 @@
# frozen_string_literal: true
module Gitlab
module Ci
module Pipeline
module Chain
module Config
class Content
# When removing ci_project_pipeline_config_refactoring, this and its subclasses will be removed.
class Source
include Gitlab::Utils::StrongMemoize
DEFAULT_YAML_FILE = '.gitlab-ci.yml'
attr_reader :command
def initialize(pipeline, command)
@pipeline = pipeline
@command = command
end
def exists?
strong_memoize(:exists) do
content.present?
end
end
def content
raise NotImplementedError
end
def source
raise NotImplementedError
end
def project
@project ||= @pipeline.project
end
def ci_config_path
@ci_config_path ||= project.ci_config_path.presence || DEFAULT_YAML_FILE
end
end
end
end
end
end
end
end

View File

@ -24,19 +24,6 @@ module Gitlab
aggregated_metrics_data(Gitlab::Usage::TimeFrame::SEVEN_DAYS_TIME_FRAME_NAME)
end
private
attr_accessor :aggregated_metrics, :recorded_at
def aggregated_metrics_data(time_frame)
aggregated_metrics.each_with_object({}) do |aggregation, data|
next if aggregation[:feature_flag] && Feature.disabled?(aggregation[:feature_flag], type: :development)
next unless aggregation[:time_frame].include?(time_frame)
data[aggregation[:name]] = calculate_count_for_aggregation(aggregation: aggregation, time_frame: time_frame)
end
end
def calculate_count_for_aggregation(aggregation:, time_frame:)
with_validate_configuration(aggregation, time_frame) do
source = SOURCES[aggregation[:source]]
@ -51,6 +38,19 @@ module Gitlab
failure(error)
end
private
attr_accessor :aggregated_metrics, :recorded_at
def aggregated_metrics_data(time_frame)
aggregated_metrics.each_with_object({}) do |aggregation, data|
next if aggregation[:feature_flag] && Feature.disabled?(aggregation[:feature_flag], type: :development)
next unless aggregation[:time_frame].include?(time_frame)
data[aggregation[:name]] = calculate_count_for_aggregation(aggregation: aggregation, time_frame: time_frame)
end
end
def with_validate_configuration(aggregation, time_frame)
source = aggregation[:source]

View File

@ -0,0 +1,71 @@
# frozen_string_literal: true
module Gitlab
module Usage
module Metrics
module Instrumentations
# Usage example
#
# In metric YAML definition:
#
# instrumentation_class: AggregatedMetric
# data_source: redis_hll
# options:
# aggregate:
# operator: OR
# attribute: user_id
# events:
# - 'incident_management_alert_status_changed'
# - 'incident_management_alert_assigned'
# - 'incident_management_alert_todo'
# - 'incident_management_alert_create_incident'
class AggregatedMetric < BaseMetric
FALLBACK = -1
def initialize(metric_definition)
super
@source = parse_data_source_to_legacy_value(metric_definition)
@aggregate = options.fetch(:aggregate, {})
end
def value
alt_usage_data(fallback: FALLBACK) do
Aggregates::Aggregate
.new(Time.current)
.calculate_count_for_aggregation(
aggregation: aggregate_config,
time_frame: time_frame
)
end
end
def suggested_name
Gitlab::Usage::Metrics::NameSuggestion.for(:alt)
end
private
attr_accessor :source, :aggregate
# TODO: This method is a temporary measure that
# handles backwards compatibility until
# point 5 from is resolved https://gitlab.com/gitlab-org/gitlab/-/issues/370963#implementation
def parse_data_source_to_legacy_value(metric_definition)
return 'redis' if metric_definition[:data_source] == 'redis_hll'
metric_definition[:data_source]
end
def aggregate_config
{
source: source,
events: options[:events],
operator: aggregate[:operator]
}
end
end
end
end
end
end

View File

@ -124,3 +124,4 @@ UsageData/InstrumentationSuperclass:
- :RedisHLLMetric
- :RedisMetric
- :NumbersMetric
- :AggregatedMetric

View File

@ -62,6 +62,22 @@ function bundle_install_script() {
echo -e "section_end:`date +%s`:bundle-install\r\e[0K"
}
function yarn_install_script() {
echo -e "section_start:`date +%s`:yarn-install[collapsed=true]\r\e[0KInstalling Yarn packages"
retry yarn install --frozen-lockfile
echo -e "section_end:`date +%s`:yarn-install\r\e[0K"
}
function assets_compile_script() {
echo -e "section_start:`date +%s`:assets-compile[collapsed=true]\r\e[0KCompiling frontend assets"
bin/rake gitlab:assets:compile
echo -e "section_end:`date +%s`:assets-compile\r\e[0K"
}
function setup_db_user_only() {
source scripts/create_postgres_user.sh
}

View File

@ -10,7 +10,7 @@ import { file } from '../../helpers';
describe('Multi-file editor commit sidebar list item', () => {
let wrapper;
let f;
let testFile;
let findPathEl;
let store;
let router;
@ -21,15 +21,15 @@ describe('Multi-file editor commit sidebar list item', () => {
router = createRouter(store);
f = file('test-file');
testFile = file('test-file');
store.state.entries[f.path] = f;
store.state.entries[testFile.path] = testFile;
wrapper = mount(ListItem, {
store,
propsData: {
file: f,
activeFileKey: `staged-${f.key}`,
file: testFile,
activeFileKey: `staged-${testFile.key}`,
},
});
@ -43,21 +43,21 @@ describe('Multi-file editor commit sidebar list item', () => {
const findPathText = () => trimText(findPathEl.text());
it('renders file path', () => {
expect(findPathText()).toContain(f.path);
expect(findPathText()).toContain(testFile.path);
});
it('correctly renders renamed entries', async () => {
Vue.set(f, 'prevName', 'Old name');
Vue.set(testFile, 'prevName', 'Old name');
await nextTick();
expect(findPathText()).toEqual(`Old name → ${f.name}`);
expect(findPathText()).toEqual(`Old name → ${testFile.name}`);
});
it('correctly renders entry, the name of which did not change after rename (as within a folder)', async () => {
Vue.set(f, 'prevName', f.name);
Vue.set(testFile, 'prevName', testFile.name);
await nextTick();
expect(findPathText()).toEqual(f.name);
expect(findPathText()).toEqual(testFile.name);
});
it('opens a closed file in the editor when clicking the file path', async () => {
@ -86,14 +86,14 @@ describe('Multi-file editor commit sidebar list item', () => {
});
it('is addition when is a tempFile', async () => {
f.tempFile = true;
testFile.tempFile = true;
await nextTick();
expect(getIconName()).toBe('file-addition');
});
it('is deletion when is deleted', async () => {
f.deleted = true;
testFile.deleted = true;
await nextTick();
expect(getIconName()).toBe('file-deletion');
@ -108,14 +108,14 @@ describe('Multi-file editor commit sidebar list item', () => {
});
it('is addition when is a tempFile', async () => {
f.tempFile = true;
testFile.tempFile = true;
await nextTick();
expect(getIconClass()).toContain('ide-file-addition');
});
it('returns deletion when is deleted', async () => {
f.deleted = true;
testFile.deleted = true;
await nextTick();
expect(getIconClass()).toContain('ide-file-deletion');

View File

@ -0,0 +1,141 @@
# frozen_string_literal: true
require 'spec_helper'
# rubocop: disable RSpec/MultipleMemoizedHelpers
RSpec.describe Gitlab::BackgroundMigration::DestroyInvalidMembers, :migration, schema: 20221004094814 do
let!(:migration_attrs) do
{
start_id: 1,
end_id: 1000,
batch_table: :members,
batch_column: :id,
sub_batch_size: 100,
pause_ms: 0,
connection: ApplicationRecord.connection
}
end
let(:users_table) { table(:users) }
let(:namespaces_table) { table(:namespaces) }
let(:members_table) { table(:members) }
let(:projects_table) { table(:projects) }
let(:members_table_name) { 'members' }
let(:connection) { ApplicationRecord.connection }
let(:user1) { users_table.create!(name: 'user1', email: 'user1@example.com', projects_limit: 5) }
let(:user2) { users_table.create!(name: 'user2', email: 'user2@example.com', projects_limit: 5) }
let(:user3) { users_table.create!(name: 'user3', email: 'user3@example.com', projects_limit: 5) }
let(:user4) { users_table.create!(name: 'user4', email: 'user4@example.com', projects_limit: 5) }
let(:user5) { users_table.create!(name: 'user5', email: 'user5@example.com', projects_limit: 5) }
let(:user6) { users_table.create!(name: 'user6', email: 'user6@example.com', projects_limit: 5) }
let(:user7) { users_table.create!(name: 'user7', email: 'user7@example.com', projects_limit: 5) }
let(:user8) { users_table.create!(name: 'user8', email: 'user8@example.com', projects_limit: 5) }
let!(:group1) { namespaces_table.create!(name: 'marvellous group 1', path: 'group-path-1', type: 'Group') }
let!(:group2) { namespaces_table.create!(name: 'outstanding group 2', path: 'group-path-2', type: 'Group') }
let!(:project_namespace1) do
namespaces_table.create!(name: 'fabulous project', path: 'project-path-1',
type: 'ProjectNamespace', parent_id: group1.id)
end
let!(:project1) do
projects_table.create!(name: 'fabulous project', path: 'project-path-1',
project_namespace_id: project_namespace1.id, namespace_id: group1.id)
end
let!(:project_namespace2) do
namespaces_table.create!(name: 'splendiferous project', path: 'project-path-2',
type: 'ProjectNamespace', parent_id: group1.id)
end
let!(:project2) do
projects_table.create!(name: 'splendiferous project', path: 'project-path-2',
project_namespace_id: project_namespace2.id, namespace_id: group1.id)
end
# create valid project member records
let!(:project_member1) { create_valid_project_member(id: 1, user_id: user1.id, project: project1) }
let!(:project_member2) { create_valid_project_member(id: 2, user_id: user2.id, project: project2) }
# create valid group member records
let!(:group_member5) { create_valid_group_member(id: 5, user_id: user5.id, group_id: group1.id) }
let!(:group_member6) { create_valid_group_member(id: 6, user_id: user6.id, group_id: group2.id) }
let!(:migration) { described_class.new(**migration_attrs) }
subject(:perform_migration) { migration.perform }
# create invalid project and group member records
def create_members
[
create_invalid_project_member(id: 3, user_id: user3.id),
create_invalid_project_member(id: 4, user_id: user4.id),
create_invalid_group_member(id: 7, user_id: user7.id),
create_invalid_group_member(id: 8, user_id: user8.id)
]
end
it 'removes invalid memberships but keeps valid ones', :aggregate_failures do
without_check_constraint(members_table_name, 'check_508774aac0', connection: connection) do
create_members
expect(members_table.count).to eq 8
queries = ActiveRecord::QueryRecorder.new do
perform_migration
end
expect(queries.count).to eq(4)
expect(members_table.all).to match_array([project_member1, project_member2, group_member5, group_member6])
end
end
it 'tracks timings of queries' do
without_check_constraint(members_table_name, 'check_508774aac0', connection: connection) do
create_members
expect(migration.batch_metrics.timings).to be_empty
expect { perform_migration }.to change { migration.batch_metrics.timings }
end
end
it 'logs IDs of deleted records' do
without_check_constraint(members_table_name, 'check_508774aac0', connection: connection) do
members = create_members
member_data = members.map do |m|
{ id: m.id, source_id: m.source_id, source_type: m.source_type }
end
expect(Gitlab::AppLogger).to receive(:info).with({ message: 'Removing invalid member records',
deleted_count: 4,
deleted_member_data: member_data })
perform_migration
end
end
def create_invalid_project_member(id:, user_id:)
members_table.create!(id: id, user_id: user_id, source_id: non_existing_record_id,
access_level: Gitlab::Access::MAINTAINER, type: "ProjectMember",
source_type: "Project", notification_level: 3, member_namespace_id: nil)
end
def create_valid_project_member(id:, user_id:, project:)
members_table.create!(id: id, user_id: user_id, source_id: project.id,
access_level: Gitlab::Access::MAINTAINER, type: "ProjectMember", source_type: "Project",
member_namespace_id: project.project_namespace_id, notification_level: 3)
end
def create_invalid_group_member(id:, user_id:)
members_table.create!(id: id, user_id: user_id, source_id: non_existing_record_id,
access_level: Gitlab::Access::MAINTAINER, type: "GroupMember",
source_type: "Namespace", notification_level: 3, member_namespace_id: nil)
end
def create_valid_group_member(id:, user_id:, group_id:)
members_table.create!(id: id, user_id: user_id, source_id: group_id,
access_level: Gitlab::Access::MAINTAINER, type: "GroupMember",
source_type: "Namespace", member_namespace_id: group_id, notification_level: 3)
end
end
# rubocop: enable RSpec/MultipleMemoizedHelpers

View File

@ -11,9 +11,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Config::Content do
subject { described_class.new(pipeline, command) }
# TODO: change this to `describe` and remove rubocop-disable
# when removing the FF ci_project_pipeline_config_refactoring
shared_context '#perform!' do # rubocop:disable RSpec/ContextWording
describe '#perform!' do
context 'when bridge job is passed in as parameter' do
let(:ci_config_path) { nil }
let(:bridge) { create(:ci_bridge) }
@ -203,14 +201,4 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Config::Content do
end
end
end
it_behaves_like '#perform!'
context 'when the FF ci_project_pipeline_config_refactoring is disabled' do
before do
stub_feature_flags(ci_project_pipeline_config_refactoring: false)
end
it_behaves_like '#perform!'
end
end

View File

@ -260,5 +260,132 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi
it_behaves_like 'database_sourced_aggregated_metrics'
it_behaves_like 'redis_sourced_aggregated_metrics'
end
describe '.calculate_count_for_aggregation' do
using RSpec::Parameterized::TableSyntax
context 'with valid configuration' do
where(:number_of_days, :operator, :datasource, :expected_method) do
28 | 'AND' | 'redis' | :calculate_metrics_intersections
7 | 'AND' | 'redis' | :calculate_metrics_intersections
28 | 'AND' | 'database' | :calculate_metrics_intersections
7 | 'AND' | 'database' | :calculate_metrics_intersections
28 | 'OR' | 'redis' | :calculate_metrics_union
7 | 'OR' | 'redis' | :calculate_metrics_union
28 | 'OR' | 'database' | :calculate_metrics_union
7 | 'OR' | 'database' | :calculate_metrics_union
end
with_them do
let(:time_frame) { "#{number_of_days}d" }
let(:start_date) { number_of_days.days.ago.to_date }
let(:params) { { start_date: start_date, end_date: end_date, recorded_at: recorded_at } }
let(:aggregate) do
{
source: datasource,
operator: operator,
events: %w[event1 event2]
}
end
subject(:calculate_count_for_aggregation) do
described_class
.new(recorded_at)
.calculate_count_for_aggregation(aggregation: aggregate, time_frame: time_frame)
end
it 'returns the number of unique events for aggregation', :aggregate_failures do
expect(namespace::SOURCES[datasource])
.to receive(expected_method)
.with(params.merge(metric_names: %w[event1 event2]))
.and_return(5)
expect(calculate_count_for_aggregation).to eq(5)
end
end
end
context 'with invalid configuration' do
where(:time_frame, :operator, :datasource, :expected_error) do
'28d' | 'SUM' | 'redis' | namespace::UnknownAggregationOperator
'7d' | 'AND' | 'mongodb' | namespace::UnknownAggregationSource
'all' | 'AND' | 'redis' | namespace::DisallowedAggregationTimeFrame
end
with_them do
let(:aggregate) do
{
source: datasource,
operator: operator,
events: %w[event1 event2]
}
end
subject(:calculate_count_for_aggregation) do
described_class
.new(recorded_at)
.calculate_count_for_aggregation(aggregation: aggregate, time_frame: time_frame)
end
context 'with non prod environment' do
it 'raises error' do
expect { calculate_count_for_aggregation }.to raise_error expected_error
end
end
context 'with prod environment' do
before do
stub_rails_env('production')
end
it 'returns fallback value' do
expect(calculate_count_for_aggregation).to be(-1)
end
end
end
end
context 'when union data is not available' do
subject(:calculate_count_for_aggregation) do
described_class
.new(recorded_at)
.calculate_count_for_aggregation(aggregation: aggregate, time_frame: time_frame)
end
where(:time_frame, :operator, :datasource) do
'28d' | 'OR' | 'redis'
'7d' | 'OR' | 'database'
end
with_them do
before do
allow(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).and_raise(sources::UnionNotAvailable)
end
let(:aggregate) do
{
source: datasource,
operator: operator,
events: %w[event1 event2]
}
end
context 'with non prod environment' do
it 'raises error' do
expect { calculate_count_for_aggregation }.to raise_error sources::UnionNotAvailable
end
end
context 'with prod environment' do
before do
stub_rails_env('production')
end
it 'returns fallback value' do
expect(calculate_count_for_aggregation).to be(-1)
end
end
end
end
end
end
end

View File

@ -0,0 +1,72 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::AggregatedMetric, :clean_gitlab_redis_shared_state do
using RSpec::Parameterized::TableSyntax
before do
# weekly AND 1 weekly OR 2
Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_approve, values: 1, time: 1.week.ago)
Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_unapprove, values: 1, time: 1.week.ago)
Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_unapprove, values: 2, time: 1.week.ago)
# monthly AND 2 weekly OR 3
Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_approve, values: 2, time: 2.weeks.ago)
Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_unapprove, values: 3, time: 2.weeks.ago)
# out of date range
Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_approve, values: 3, time: 2.months.ago)
# database events
Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll
.save_aggregated_metrics(
metric_name: :i_quickactions_approve,
time_period: { created_at: (1.week.ago..Date.current) },
recorded_at_timestamp: Time.current,
data: ::Gitlab::Database::PostgresHll::Buckets.new(141 => 1, 56 => 1)
)
Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll
.save_aggregated_metrics(
metric_name: :i_quickactions_unapprove,
time_period: { created_at: (1.week.ago..Date.current) },
recorded_at_timestamp: Time.current,
data: ::Gitlab::Database::PostgresHll::Buckets.new(10 => 1, 56 => 1)
)
end
where(:data_source, :time_frame, :operator, :expected_value) do
'redis_hll' | '28d' | 'AND' | 2
'redis_hll' | '28d' | 'OR' | 3
'redis_hll' | '7d' | 'AND' | 1
'redis_hll' | '7d' | 'OR' | 2
'database' | '7d' | 'OR' | 3
'database' | '7d' | 'AND' | 1
end
with_them do
let(:metric_definition) do
{
data_source: data_source,
time_frame: time_frame,
options: {
aggregate: {
operator: operator
},
events: %w[
i_quickactions_approve
i_quickactions_unapprove
]
}
}
end
around do |example|
freeze_time { example.run }
end
it 'has correct value' do
# database source is providing estimated value that have a lot of decimal places, round it for convenience
expect(described_class.new(metric_definition).value.round).to eq(expected_value)
end
end
end

View File

@ -77,7 +77,7 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::RedisMetric, :clean_git
end
it_behaves_like 'a correct instrumented metric value', {
options: { event: 'all_searches_count', prefix: nil, include_usage_prefix: false }
options: { event: 'all_searches_count', prefix: nil, include_usage_prefix: false }, time_frame: 'all'
}
end
end

View File

@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Sidebars::Projects::Menus::AnalyticsMenu do
let_it_be(:project) { create(:project, :repository) }
let_it_be_with_refind(:project) { create(:project, :repository) }
let_it_be(:guest) do
create(:user).tap { |u| project.add_guest(u) }
end
@ -125,6 +125,34 @@ RSpec.describe Sidebars::Projects::Menus::AnalyticsMenu do
specify { is_expected.to be_nil }
end
describe 'when issues are disabled' do
before do
project.issues_enabled = false
project.save!
end
specify { is_expected.not_to be_nil }
end
describe 'when merge requests are disabled' do
before do
project.merge_requests_enabled = false
project.save!
end
specify { is_expected.not_to be_nil }
end
describe 'when the issues and merge requests are disabled' do
before do
project.issues_enabled = false
project.merge_requests_enabled = false
project.save!
end
specify { is_expected.to be_nil }
end
end
end
end

View File

@ -0,0 +1,31 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe ScheduleDestroyInvalidMembers, :migration do
let_it_be(:migration) { described_class::MIGRATION }
describe '#up' do
it 'schedules background jobs for each batch of members' do
migrate!
expect(migration).to have_scheduled_batched_migration(
table_name: :members,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
max_batch_size: described_class::MAX_BATCH_SIZE
)
end
end
describe '#down' do
it 'deletes all batched migration records' do
migrate!
schema_migrate_down!
expect(migration).not_to have_scheduled_batched_migration
end
end
end

View File

@ -103,6 +103,20 @@ RSpec.describe ProjectPolicy do
end
end
context 'when both issues and merge requests are disabled' do
let(:current_user) { owner }
before do
project.issues_enabled = false
project.merge_requests_enabled = false
project.save!
end
it 'does not include the issues permissions' do
expect_disallowed :read_cycle_analytics
end
end
context 'creating_merge_request_in' do
context 'when the current_user can download_code' do
before do