Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-08-19 18:10:34 +00:00
parent 74b5b3ffcb
commit 2f5731cf53
67 changed files with 1034 additions and 478 deletions

View File

@ -59,6 +59,10 @@ package-and-qa:
extends:
- .package-and-qa-base
- .qa:rules:package-and-qa
# This job often times out, so temporarily use private runners and a long timeout: https://gitlab.com/gitlab-org/gitlab/-/issues/238563
tags:
- prm
timeout: 4h
needs:
- job: build-qa-image
artifacts: false

View File

@ -1,12 +1,11 @@
<script>
import { GlPopover, GlIcon, GlDeprecatedButton, GlTooltipDirective } from '@gitlab/ui';
import { GlPopover, GlButton, GlTooltipDirective } from '@gitlab/ui';
export default {
maxTextLength: 95,
components: {
GlPopover,
GlIcon,
GlDeprecatedButton,
GlButton,
},
directives: {
GlTooltip: GlTooltipDirective,
@ -39,16 +38,18 @@ export default {
<template>
<div id="popover-container">
<gl-popover :target="target" triggers="hover" placement="top" container="popover-container">
<div class="d-flex justify-content-between position-relative">
<div class="pr-5 w-100 ci-popover-value">{{ displayValue }}</div>
<gl-deprecated-button
<div class="gl-display-flex gl-justify-content-space-between gl-align-items-center">
<div class="ci-popover-value gl-pr-3">
{{ displayValue }}
</div>
<gl-button
v-gl-tooltip
class="btn-transparent btn-clipboard position-absolute position-top-0 position-right-0"
category="tertiary"
icon="copy-to-clipboard"
:title="tooltipText"
:data-clipboard-text="value"
>
<gl-icon name="copy-to-clipboard" />
</gl-deprecated-button>
:aria-label="__('Copy to clipboard')"
/>
</div>
</gl-popover>
</div>

View File

@ -71,29 +71,56 @@ export function getParameterValues(sParam, url = window.location) {
*
* @param {Object} params - url keys and value to merge
* @param {String} url
* @param {Object} options
* @param {Boolean} options.spreadArrays - split array values into separate key/value-pairs
*/
export function mergeUrlParams(params, url) {
export function mergeUrlParams(params, url, options = {}) {
const { spreadArrays = false } = options;
const re = /^([^?#]*)(\?[^#]*)?(.*)/;
const merged = {};
let merged = {};
const [, fullpath, query, fragment] = url.match(re);
if (query) {
query
merged = query
.substr(1)
.split('&')
.forEach(part => {
.reduce((memo, part) => {
if (part.length) {
const kv = part.split('=');
merged[decodeUrlParameter(kv[0])] = decodeUrlParameter(kv.slice(1).join('='));
let key = decodeUrlParameter(kv[0]);
const value = decodeUrlParameter(kv.slice(1).join('='));
if (spreadArrays && key.endsWith('[]')) {
key = key.slice(0, -2);
if (!Array.isArray(memo[key])) {
return { ...memo, [key]: [value] };
}
memo[key].push(value);
return memo;
}
return { ...memo, [key]: value };
}
});
return memo;
}, {});
}
Object.assign(merged, params);
const newQuery = Object.keys(merged)
.filter(key => merged[key] !== null)
.map(key => `${encodeURIComponent(key)}=${encodeURIComponent(merged[key])}`)
.map(key => {
let value = merged[key];
const encodedKey = encodeURIComponent(key);
if (spreadArrays && Array.isArray(value)) {
value = merged[key]
.map(arrayValue => encodeURIComponent(arrayValue))
.join(`&${encodedKey}[]=`);
return `${encodedKey}[]=${value}`;
}
return `${encodedKey}=${encodeURIComponent(value)}`;
})
.join('&');
if (newQuery) {

View File

@ -14,8 +14,7 @@ module Resolvers
def resolve(**args)
return unless environment
::PerformanceMonitoring::PrometheusDashboard
.find_for(project: environment.project, user: context[:current_user], path: args[:path], options: { environment: environment })
::PerformanceMonitoring::PrometheusDashboard.find_for(project: environment.project, user: context[:current_user], path: args[:path], options: { environment: environment })
end
end
end

View File

@ -16,13 +16,6 @@ module Types
field :annotations, Types::Metrics::Dashboards::AnnotationType.connection_type, null: true,
description: 'Annotations added to the dashboard',
resolver: Resolvers::Metrics::Dashboards::AnnotationResolver
# In order to maintain backward compatibility we need to return NULL when there are no warnings
# and dashboard validation returns an empty array when there are no issues.
def schema_validation_warnings
warnings = object.schema_validation_warnings
warnings unless warnings.empty?
end
end
# rubocop: enable Graphql/AuthorizeTypes
end

View File

@ -26,10 +26,19 @@ module BlobViewer
def parse_blob_data
yaml = ::Gitlab::Config::Loader::Yaml.new(blob.data).load_raw!
Gitlab::Metrics::Dashboard::Validator
.errors(yaml, dashboard_path: blob.path, project: project)
::PerformanceMonitoring::PrometheusDashboard.from_json(yaml)
nil
rescue Gitlab::Config::Loader::FormatError => error
[error]
wrap_yml_syntax_error(error)
rescue ActiveModel::ValidationError => invalid
invalid.model.errors
end
def wrap_yml_syntax_error(error)
::PerformanceMonitoring::PrometheusDashboard.new.errors.tap do |errors|
errors.add(:'YAML syntax', error.message)
end
end
end
end

View File

@ -137,9 +137,10 @@ module RelativePositioning
# If `true`, then all objects with `null` positions are placed _after_
# all siblings with positions. If `false`, all objects with `null`
# positions are placed _before_ all siblings with positions.
# @returns [Number] The number of moved records.
def move_nulls(objects, at_end:)
objects = objects.reject(&:relative_position)
return if objects.empty?
return 0 if objects.empty?
representative = objects.first
number_of_gaps = objects.size + 1 # 1 at left, one between each, and one at right
@ -186,6 +187,8 @@ module RelativePositioning
end
end
end
objects.size
end
end

View File

@ -87,10 +87,12 @@ module DesignManagement
# See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/17788/diffs#note_230875678
order(:relative_position, :id)
else
order(:id)
in_creation_order
end
end
scope :in_creation_order, -> { reorder(:id) }
scope :with_filename, -> (filenames) { where(filename: filenames) }
scope :on_issue, ->(issue) { where(issue_id: issue) }
@ -101,7 +103,7 @@ module DesignManagement
scope :current, -> { visible_at_version(nil) }
def self.relative_positioning_query_base(design)
on_issue(design.issue_id)
default_scoped.on_issue(design.issue_id)
end
def self.relative_positioning_parent_column

View File

@ -53,18 +53,14 @@ module PerformanceMonitoring
# This method is planned to be refactored as a part of https://gitlab.com/gitlab-org/gitlab/-/issues/219398
# implementation. For new existing logic was reused to faster deliver MVC
def schema_validation_warnings
run_custom_validation.map(&:message)
self.class.from_json(reload_schema)
nil
rescue ActiveModel::ValidationError => exception
exception.model.errors.map { |attr, error| "#{attr}: #{error}" }
end
private
def run_custom_validation
Gitlab::Metrics::Dashboard::Validator
.errors(reload_schema, dashboard_path: path, project: environment&.project)
rescue Gitlab::Config::Loader::FormatError => error
[error.message]
end
# dashboard finder methods are somehow limited, #find includes checking if
# user is authorised to view selected dashboard, but modifies schema, which in some cases may
# cause false positives returned from validation, and #find_raw does not authorise users

View File

@ -314,12 +314,14 @@ class Repository
def expire_tags_cache
expire_method_caches(%i(tag_names tag_count has_ambiguous_refs?))
@tags = nil
@tag_names_include = nil
end
def expire_branches_cache
expire_method_caches(%i(branch_names merged_branch_names branch_count has_visible_content? has_ambiguous_refs?))
@local_branches = nil
@branch_exists_memo = nil
@branch_names_include = nil
end
def expire_statistics_caches

View File

@ -231,6 +231,7 @@ class ProjectPolicy < BasePolicy
enable :admin_issue
enable :admin_label
enable :admin_list
enable :admin_issue_link
enable :read_commit_status
enable :read_build
enable :read_container_image
@ -544,6 +545,7 @@ class ProjectPolicy < BasePolicy
rule { can?(:read_issue) }.policy do
enable :read_design
enable :read_design_activity
enable :read_issue_link
end
# Design abilities could also be prevented in the issue policy.

View File

@ -39,9 +39,12 @@ module DesignManagement
delegate :issue, :project, to: :current_design
def move_nulls_to_end
current_design.class.move_nulls_to_end(issue.designs)
next_design.reset if next_design && next_design.relative_position.nil?
previous_design.reset if previous_design && previous_design.relative_position.nil?
moved_records = current_design.class.move_nulls_to_end(issue.designs.in_creation_order)
return if moved_records == 0
current_design.reset
next_design&.reset
previous_design&.reset
end
def neighbors

View File

@ -5,7 +5,7 @@
= icon('warning fw')
= _('Metrics Dashboard YAML definition is invalid:')
%ul
- viewer.errors.each do |error|
%li= error
- viewer.errors.messages.each do |error|
%li= error.join(': ')
= link_to _('Learn more'), help_page_path('operations/metrics/dashboards/index.md')

View File

@ -1,6 +1,6 @@
- return unless can?(current_user, :archive_project, @project)
.sub-section
.sub-section{ data: { qa_selector: 'archive_project_content' } }
%h4.warning-title
- if @project.archived?
= _('Unarchive project')

View File

@ -0,0 +1,5 @@
---
title: Use correct order when repositioning existing designs
merge_request: 39826
author:
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Change to glbutton component in CI variables list
merge_request: 38757
author:
type: other

View File

@ -0,0 +1,5 @@
---
title: Add pipeline_artifacts_size to project_statistics
merge_request: 39607
author:
type: added

View File

@ -1,6 +0,0 @@
---
title: Change metrics dashboard schema validation messages into exhaustive list of
all encountered errors.
merge_request: 38925
author:
type: changed

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class AddPipelineArtifactsSizeToProjectStatistics < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_column :project_statistics, :pipeline_artifacts_size, :bigint, default: 0, null: false
end
end
def down
with_lock_retries do
remove_column :project_statistics, :pipeline_artifacts_size, :bigint, default: 0, null: false
end
end
end

View File

@ -0,0 +1 @@
b488cd2049300b293f584f193edc5435855f7bc85989648a3310dabc609d0af4

View File

@ -14556,7 +14556,8 @@ CREATE TABLE public.project_statistics (
shared_runners_seconds_last_reset timestamp without time zone,
packages_size bigint DEFAULT 0 NOT NULL,
wiki_size bigint,
snippets_size bigint
snippets_size bigint,
pipeline_artifacts_size bigint DEFAULT 0 NOT NULL
);
CREATE SEQUENCE public.project_statistics_id_seq

View File

@ -310,6 +310,10 @@ attribute. As a prerequisite, you must use an LDAP server that:
1. Save the file and [restart](../restart_gitlab.md#installations-from-source)
GitLab for the changes to take effect.
## Passwords for users created via smartcard authentication
The [Generated passwords for users created through integrated authentication](../../security/passwords_for_integrated_authentication_methods.md) guide provides an overview of how GitLab generates and sets passwords for users created via smartcard authentication.
<!-- ## Troubleshooting
Include any troubleshooting steps that you can foresee. If you know beforehand what issues

View File

@ -306,7 +306,7 @@ Courier, which we will install later to add IMAP authentication, requires mailbo
```shell
Trying 123.123.123.123...
Connected to mail.example.gitlab.com.
Connected to mail.gitlab.example.com.
Escape character is '^]'.
- OK [CAPABILITY IMAP4rev1 UIDPLUS CHILDREN NAMESPACE THREAD=ORDEREDSUBJECT THREAD=REFERENCES SORT QUOTA IDLE ACL ACL2=UNION] Courier-IMAP ready. Copyright 1998-2011 Double Precision, Inc. See COPYING for distribution information.
```

View File

@ -34,7 +34,7 @@ Example response:
"id": 1,
"name": "review/fix-foo",
"slug": "review-fix-foo-dfjre3",
"external_url": "https://review-fix-foo-dfjre3.example.gitlab.com",
"external_url": "https://review-fix-foo-dfjre3.gitlab.example.com",
"state": "available"
}
]
@ -62,7 +62,7 @@ Example of response
"id": 1,
"name": "review/fix-foo",
"slug": "review-fix-foo-dfjre3",
"external_url": "https://review-fix-foo-dfjre3.example.gitlab.com",
"external_url": "https://review-fix-foo-dfjre3.gitlab.example.com",
"state": "available",
"last_deployment": {
"id": 100,
@ -164,7 +164,7 @@ POST /projects/:id/environments
| `external_url` | string | no | Place to link to for this environment |
```shell
curl --data "name=deploy&external_url=https://deploy.example.gitlab.com" --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/environments"
curl --data "name=deploy&external_url=https://deploy.gitlab.example.com" --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/environments"
```
Example response:
@ -174,7 +174,7 @@ Example response:
"id": 1,
"name": "deploy",
"slug": "deploy",
"external_url": "https://deploy.example.gitlab.com",
"external_url": "https://deploy.gitlab.example.com",
"state": "available"
}
```
@ -197,7 +197,7 @@ PUT /projects/:id/environments/:environments_id
| `external_url` | string | no | The new `external_url` |
```shell
curl --request PUT --data "name=staging&external_url=https://staging.example.gitlab.com" --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/environments/1"
curl --request PUT --data "name=staging&external_url=https://staging.gitlab.example.com" --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/environments/1"
```
Example response:
@ -207,7 +207,7 @@ Example response:
"id": 1,
"name": "staging",
"slug": "staging",
"external_url": "https://staging.example.gitlab.com",
"external_url": "https://staging.gitlab.example.com",
"state": "available"
}
```
@ -253,7 +253,7 @@ Example response:
"id": 1,
"name": "deploy",
"slug": "deploy",
"external_url": "https://deploy.example.gitlab.com",
"external_url": "https://deploy.gitlab.example.com",
"state": "stopped"
}
```

View File

@ -324,3 +324,7 @@ of the OmniAuth users has admin permissions.
You may also bypass the auto signin feature by browsing to
`https://gitlab.example.com/users/sign_in?auto_sign_in=false`.
## Passwords for users created via OmniAuth
The [Generated passwords for users created through integrated authentication](../security/passwords_for_integrated_authentication_methods.md) guide provides an overview of how GitLab generates and sets passwords for users created via OmniAuth.

View File

@ -584,6 +584,10 @@ These attributes define the SAML user. If users can change these attributes, the
Refer to the documentation for your SAML Identity Provider for information on how to fix these attributes.
## Passwords for users created via SAML
The [Generated passwords for users created through integrated authentication](../security/passwords_for_integrated_authentication_methods.md) guide provides an overview of how GitLab generates and sets passwords for users created via SAML.
## Troubleshooting
You can find the base64-encoded SAML Response in the [`production_json.log`](../administration/logs.md#production_jsonlog).

View File

@ -156,46 +156,21 @@ and files with invalid syntax display **Metrics Dashboard YAML definition is inv
When **Metrics Dashboard YAML definition is invalid** at least one of the following messages is displayed:
1. `[location] is missing required keys: [list of missing keys]` - The entry at
`[location]` is missing a key, or a key has been mistyped. This
example returns the error `root is missing required keys: panel_groups`:
1. `dashboard: can't be blank` [learn more](#dashboard-top-level-properties)
1. `panel_groups: should be an array of panel_groups objects` [learn more](#dashboard-top-level-properties)
1. `group: can't be blank` [learn more](#panel-group-panel_groups-properties)
1. `panels: should be an array of panels objects` [learn more](#panel-group-panel_groups-properties)
1. `title: can't be blank` [learn more](#panel-panels-properties)
1. `metrics: should be an array of metrics objects` [learn more](#panel-panels-properties)
1. `query: can't be blank` [learn more](#metrics-metrics-properties)
1. `query_range: can't be blank` [learn more](#metrics-metrics-properties)
1. `unit: can't be blank` [learn more](#metrics-metrics-properties)
1. `YAML syntax: The parsed YAML is too big`
```yaml
dashboard: Important metrics
group_panels:
- ...
```
This is displayed when the YAML file is larger than 1 MB.
1. `[data] at [location] is not of type: [type]` - The entry at `[location]` contains
`[data]` which type does not adhere to required types. This example returns the
error `'123' at /panel_groups/0/group is not of type: string`:
1. `YAML syntax: Invalid configuration format`
```yaml
dashboard: Environment metrics
panel_groups:
- group: 123
panels:
...
```
1. `[data] at [location] is not one of: [types]` - The entry at `[location]` contains
`[data]` which is not included in the list of required values. This example returns
the error `'scatterplot-chart' at /panel_groups/0/panels/0/type is not one of: ["area-chart", "line-chart", "anomaly-chart", "bar", "column", "stacked-column", "single-stat", "heatmap"]`:
```yaml
dashboard: Environment metrics
panel_groups:
- group: Network
panels:
- title: Throughput
type: scatterplot-chart
y_label: Requests / Sec
...
```
1. `metric_id must be unique across a project` - At least two metrics entries have
the same `id` attribute, which [must be unique](#metrics-metrics-properties).
1. `The parsed YAML is too big` - The YAML file is larger than 1 MB.
1. `Invalid configuration format` - The YAML file is empty or does not contain valid YAML.
This is displayed when the YAML file is empty or does not contain valid YAML.
Metrics Dashboard YAML definition validation information is also available as a [GraphQL API field](../../../api/graphql/reference/index.md#metricsdashboard)

View File

@ -989,11 +989,10 @@ For more information see similar questions on PostgreSQL issue tracker [here](ht
### When the secrets file is lost
If you have failed to [back up the secrets file](#storing-configuration-files),
then users with 2FA enabled will not be able to log into GitLab. In that case,
you need to [disable 2FA for everyone](../security/two_factor_authentication.md#disabling-2fa-for-everyone).
If you have failed to [back up the secrets file](#storing-configuration-files), you'll
need to perform a number of steps to get GitLab working properly again.
The secrets file is also responsible for storing the encryption key for several
The secrets file is responsible for storing the encryption key for several
columns containing sensitive information. If the key is lost, GitLab will be
unable to decrypt those columns. This will break a wide range of functionality,
including (but not restricted to):
@ -1012,17 +1011,28 @@ experience some unexpected behavior such as:
- Stuck jobs.
- 500 errors.
You can check whether you have undecryptable values in the database using
the [Secrets Doctor Rake task](../administration/raketasks/doctor.md).
In this case, you are required to reset all the tokens for CI/CD variables
and Runner Authentication, which is described in more detail below. After
resetting the tokens, you should be able to visit your project and the jobs
will have started running again.
will have started running again. Use the information in the following sections at your own risk.
#### Check for undecryptable values
You can check whether you have undecryptable values in the database using
the [Secrets Doctor Rake task](../administration/raketasks/doctor.md).
#### Take a backup
You will need to directly modify GitLab data to work around your lost secrets file.
CAUTION: **Warning:**
Use the following commands at your own risk, and make sure you've taken a
backup beforehand.
Make sure you've taken a backup beforehand, particularly a full database backup.
#### Disable user two-factor authentication (2FA)
Users with 2FA enabled will not be able to log into GitLab. In that case,
you need to [disable 2FA for everyone](../security/two_factor_authentication.md#disabling-2fa-for-everyone)
and then users will have to reactivate 2FA from scratch.
#### Reset CI/CD variables
@ -1119,6 +1129,35 @@ A similar strategy can be employed for the remaining features - by removing the
data that cannot be decrypted, GitLab can be brought back into working order,
and the lost data can be manually replaced.
#### Fix project integrations
If you've lost your secrets, the
[projects' integrations settings pages](../user/project/integrations/index.md)
are probably generating 500 errors.
The fix is to truncate the `web_hooks` table:
1. Enter the DB console:
For Omnibus GitLab packages:
```shell
sudo gitlab-rails dbconsole
```
For installations from source:
```shell
sudo -u git -H bundle exec rails dbconsole -e production
```
1. Truncate the table
```sql
-- truncate web_hooks table
TRUNCATE web_hooks CASCADE;
```
### Container Registry push failures after restoring from a backup
If you use the [Container Registry](../user/packages/container_registry/index.md), you

View File

@ -7,6 +7,7 @@ type: index
- [Password storage](password_storage.md)
- [Password length limits](password_length_limits.md)
- [Generated passwords for users created through integrated authentication](passwords_for_integrated_authentication_methods.md)
- [Restrict SSH key technologies and minimum length](ssh_keys_restrictions.md)
- [Rate limits](rate_limits.md)
- [Webhooks and insecure internal web services](webhooks.md)

View File

@ -0,0 +1,14 @@
---
type: reference
---
# Generated passwords for users created through integrated authentication
GitLab allows users to set up accounts through integration with external [authentication and authorization providers](../administration/auth/README.md).
These authentication methods do not require the user to explicitly create a password for their accounts.
However, to maintain data consistency, GitLab requires passwords for all user accounts.
For such accounts, we use the [`friendly_token`](https://github.com/heartcombo/devise/blob/f26e05c20079c9acded3c0ee16da0df435a28997/lib/devise.rb#L492) method provided by the Devise gem to generate a random, unique and secure password and sets it as the account password during sign up.
The length of the generated password is the set based on the value of [maximum password length](password_length_limits.md#modify-maximum-password-length-using-configuration-file) as set in the Devise configuation. The default value is 128 characters.

View File

@ -34,7 +34,7 @@ Follow these steps to enable SSL for your fresh instance. Note that these steps
```ruby
# Update external_url from "http" to "https"
external_url "https://example.gitlab.com"
external_url "https://gitlab.example.com"
# Set Let's Encrypt to false
letsencrypt['enable'] = false
@ -64,8 +64,8 @@ Follow these steps to enable the container registry. Note that these steps refle
```ruby
# Change external_registry_url to match external_url, but append the port 4567
external_url "https://example.gitlab.com"
registry_external_url "https://example.gitlab.com:4567"
external_url "https://gitlab.example.com"
registry_external_url "https://gitlab.example.com:4567"
```
1. Reconfigure your instance to apply the changes:

View File

@ -301,6 +301,10 @@ Group SAML on a self-managed instance is limited when compared to the recommende
- { name: 'group_saml' }
```
## Passwords for users created via SAML SSO for Groups
The [Generated passwords for users created through integrated authentication](../../../security/passwords_for_integrated_authentication_methods.md) guide provides an overview of how GitLab generates and sets passwords for users created via SAML SSO for Groups.
## Troubleshooting
This section contains possible solutions for problems you might encounter.

View File

@ -677,22 +677,22 @@ the project.
The following procedure uses these sample project names:
- For the current project: `example.gitlab.com/org/build/sample_project/cr:v2.9.1`
- For the new project: `example.gitlab.com/new_org/build/new_sample_project/cr:v2.9.1`
- For the current project: `gitlab.example.com/org/build/sample_project/cr:v2.9.1`
- For the new project: `gitlab.example.com/new_org/build/new_sample_project/cr:v2.9.1`
Use your own URLs to complete the following steps:
1. Download the Docker images on your computer:
```shell
docker login example.gitlab.com
docker pull example.gitlab.com/org/build/sample_project/cr:v2.9.1
docker login gitlab.example.com
docker pull gitlab.example.com/org/build/sample_project/cr:v2.9.1
```
1. Rename the images to match the new project name:
```shell
docker tag example.gitlab.com/org/build/sample_project/cr:v2.9.1 example.gitlab.com/new_org/build/new_sample_project/cr:v2.9.1
docker tag gitlab.example.com/org/build/sample_project/cr:v2.9.1 gitlab.example.com/new_org/build/new_sample_project/cr:v2.9.1
```
1. Delete the images in both projects by using the [UI](#delete-images) or [API](../../../api/packages.md#delete-a-project-package).
@ -702,7 +702,7 @@ Use your own URLs to complete the following steps:
1. Restore the images:
```shell
docker push example.gitlab.com/new_org/build/new_sample_project/cr:v2.9.1
docker push gitlab.example.com/new_org/build/new_sample_project/cr:v2.9.1
```
Follow [this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/18383) for details.

View File

@ -40,50 +40,11 @@ module ExtractsRef
# Returns an Array where the first value is the tree-ish and the second is the
# path
def extract_ref(id)
pair = ['', '']
return pair unless repository_container
if id =~ /^(\h{40})(.+)/
# If the ref appears to be a SHA, we're done, just split the string
pair = $~.captures
elsif id.exclude?('/')
# If the ID contains no slash, we must have a ref and no path, so
# we can skip the Redis calls below
pair = [id, '']
else
# Otherwise, attempt to detect the ref using a list of the repository_container's
# branches and tags
# Append a trailing slash if we only get a ref and no file path
unless id.ends_with?('/')
id = [id, '/'].join
end
first_path_segment, rest = id.split('/', 2)
if use_first_path_segment?(first_path_segment)
pair = [first_path_segment, rest]
else
valid_refs = ref_names.select { |v| id.start_with?("#{v}/") }
if valid_refs.empty?
# No exact ref match, so just try our best
pair = id.match(%r{([^/]+)(.*)}).captures
else
# There is a distinct possibility that multiple refs prefix the ID.
# Use the longest match to maximize the chance that we have the
# right ref.
best_match = valid_refs.max_by(&:length)
# Partition the string into the ref and the path, ignoring the empty first value
pair = id.partition(best_match)[1..-1]
end
end
end
pair = extract_raw_ref(id)
[
pair[0].strip,
pair[1].gsub(%r{^/|/$}, '') # Remove leading and trailing slashes from path
pair[1].delete_prefix('/').delete_suffix('/')
]
end
@ -117,6 +78,38 @@ module ExtractsRef
private
def extract_raw_ref(id)
return ['', ''] unless repository_container
# If the ref appears to be a SHA, we're done, just split the string
return $~.captures if id =~ /^(\h{40})(.+)/
# No slash means we must have a ref and no path
return [id, ''] unless id.include?('/')
# Otherwise, attempt to detect the ref using a list of the
# repository_container's branches and tags
# Append a trailing slash if we only get a ref and no file path
id = [id, '/'].join unless id.ends_with?('/')
first_path_segment, rest = id.split('/', 2)
return [first_path_segment, rest] if use_first_path_segment?(first_path_segment)
valid_refs = ref_names.select { |v| id.start_with?("#{v}/") }
# No exact ref match, so just try our best
return id.match(%r{([^/]+)(.*)}).captures if valid_refs.empty?
# There is a distinct possibility that multiple refs prefix the ID.
# Use the longest match to maximize the chance that we have the
# right ref.
best_match = valid_refs.max_by(&:length)
# Partition the string into the ref and the path, ignoring the empty first value
id.partition(best_match)[1..-1]
end
def use_first_path_segment?(ref)
return false unless ::Feature.enabled?(:extracts_path_optimization)
return false unless repository_container

View File

@ -3,77 +3,36 @@
module Gitlab
module Analytics
class UniqueVisits
ANALYTICS_IDS = Set[
'g_analytics_contribution',
'g_analytics_insights',
'g_analytics_issues',
'g_analytics_productivity',
'g_analytics_valuestream',
'p_analytics_pipelines',
'p_analytics_code_reviews',
'p_analytics_valuestream',
'p_analytics_insights',
'p_analytics_issues',
'p_analytics_repo',
'i_analytics_cohorts',
'i_analytics_dev_ops_score'
]
COMPLIANCE_IDS = Set[
'g_compliance_dashboard',
'g_compliance_audit_events',
'i_compliance_credential_inventory',
'i_compliance_audit_events'
].freeze
KEY_EXPIRY_LENGTH = 12.weeks
def track_visit(visitor_id, target_id, time = Time.zone.now)
target_key = key(target_id, time)
Gitlab::Redis::HLL.add(key: target_key, value: visitor_id, expiry: KEY_EXPIRY_LENGTH)
Gitlab::UsageDataCounters::HLLRedisCounter.track_event(visitor_id, target_id, time)
end
# Returns number of unique visitors for given targets in given time frame
#
# @param [String, Array[<String>]] targets ids of targets to count visits on. Special case for :any
# @param [ActiveSupport::TimeWithZone] start_week start of time frame
# @param [Integer] weeks time frame length in weeks
# @param [ActiveSupport::TimeWithZone] start_date start of time frame
# @param [ActiveSupport::TimeWithZone] end_date end of time frame
# @return [Integer] number of unique visitors
def unique_visits_for(targets:, start_week: 7.days.ago, weeks: 1)
def unique_visits_for(targets:, start_date: 7.days.ago, end_date: start_date + 1.week)
target_ids = if targets == :analytics
ANALYTICS_IDS
self.class.analytics_ids
elsif targets == :compliance
COMPLIANCE_IDS
self.class.compliance_ids
else
Array(targets)
end
timeframe_start = [start_week, weeks.weeks.ago].min
redis_keys = keys(targets: target_ids, timeframe_start: timeframe_start, weeks: weeks)
Gitlab::Redis::HLL.count(keys: redis_keys)
Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(event_names: target_ids, start_date: start_date, end_date: end_date)
end
private
class << self
def analytics_ids
Gitlab::UsageDataCounters::HLLRedisCounter.events_for_category('analytics')
end
def key(target_id, time)
target_ids = ANALYTICS_IDS + COMPLIANCE_IDS
raise "Invalid target id #{target_id}" unless target_ids.include?(target_id.to_s)
target_key = target_id.to_s.gsub('analytics', '{analytics}').gsub('compliance', '{compliance}')
year_week = time.strftime('%G-%V')
"#{target_key}-#{year_week}"
end
def keys(targets:, timeframe_start:, weeks:)
(0..(weeks - 1)).map do |week_increment|
targets.map { |target_id| key(target_id, timeframe_start + week_increment * 7.days) }
end.flatten
def compliance_ids
Gitlab::UsageDataCounters::HLLRedisCounter.events_for_category('compliance')
end
end
end
end

View File

@ -8,18 +8,20 @@ module Gitlab
class << self
def validate(content, schema_path = DASHBOARD_SCHEMA_PATH, dashboard_path: nil, project: nil)
errors(content, schema_path, dashboard_path: dashboard_path, project: project).empty?
errors = _validate(content, schema_path, dashboard_path: dashboard_path, project: project)
errors.empty?
end
def validate!(content, schema_path = DASHBOARD_SCHEMA_PATH, dashboard_path: nil, project: nil)
errors = errors(content, schema_path, dashboard_path: dashboard_path, project: project)
errors = _validate(content, schema_path, dashboard_path: dashboard_path, project: project)
errors.empty? || raise(errors.first)
end
def errors(content, schema_path = DASHBOARD_SCHEMA_PATH, dashboard_path: nil, project: nil)
Validator::Client
.new(content, schema_path, dashboard_path: dashboard_path, project: project)
.execute
private
def _validate(content, schema_path, dashboard_path: nil, project: nil)
client = Validator::Client.new(content, schema_path, dashboard_path: dashboard_path, project: project)
client.execute
end
end
end

View File

@ -46,7 +46,7 @@ module Gitlab
def validate_against_schema
schemer.validate(content).map do |error|
::Gitlab::Metrics::Dashboard::Validator::Errors::SchemaValidationError.new(error)
Errors::SchemaValidationError.new(error)
end
end
end

View File

@ -4,7 +4,7 @@
"properties": {
"type": {
"type": "string",
"enum": ["area-chart", "line-chart", "anomaly-chart", "bar", "column", "stacked-column", "single-stat", "heatmap", "gauge"],
"enum": ["area-chart", "anomaly-chart", "bar", "column", "stacked-column", "single-stat", "heatmap"],
"default": "area-chart"
},
"title": { "type": "string" },

View File

@ -58,11 +58,19 @@ module Gitlab
# wrong answer. We handle that by querying the full list - which fills
# the cache - and using it directly to answer the question.
define_method("#{name}_include?") do |value|
if strong_memoized?(name) || !redis_set_cache.exist?(name)
return __send__(name).include?(value) # rubocop:disable GitlabSecurity/PublicSend
end
ivar = "@#{name}_include"
memoized = instance_variable_get(ivar) || {}
redis_set_cache.include?(name, value)
next memoized[value] if memoized.key?(value)
memoized[value] =
if strong_memoized?(name) || !redis_set_cache.exist?(name)
__send__(name).include?(value) # rubocop:disable GitlabSecurity/PublicSend
else
redis_set_cache.include?(name, value)
end
instance_variable_set(ivar, memoized)[value]
end
end

View File

@ -584,21 +584,21 @@ module Gitlab
end
def analytics_unique_visits_data
results = ::Gitlab::Analytics::UniqueVisits::ANALYTICS_IDS.each_with_object({}) do |target_id, hash|
results = ::Gitlab::Analytics::UniqueVisits.analytics_ids.each_with_object({}) do |target_id, hash|
hash[target_id] = redis_usage_data { unique_visit_service.unique_visits_for(targets: target_id) }
end
results['analytics_unique_visits_for_any_target'] = redis_usage_data { unique_visit_service.unique_visits_for(targets: :analytics) }
results['analytics_unique_visits_for_any_target_monthly'] = redis_usage_data { unique_visit_service.unique_visits_for(targets: :analytics, weeks: 4) }
results['analytics_unique_visits_for_any_target_monthly'] = redis_usage_data { unique_visit_service.unique_visits_for(targets: :analytics, start_date: 4.weeks.ago.to_date, end_date: Date.current) }
{ analytics_unique_visits: results }
end
def compliance_unique_visits_data
results = ::Gitlab::Analytics::UniqueVisits::COMPLIANCE_IDS.each_with_object({}) do |target_id, hash|
results = ::Gitlab::Analytics::UniqueVisits.compliance_ids.each_with_object({}) do |target_id, hash|
hash[target_id] = redis_usage_data { unique_visit_service.unique_visits_for(targets: target_id) }
end
results['compliance_unique_visits_for_any_target'] = redis_usage_data { unique_visit_service.unique_visits_for(targets: :compliance) }
results['compliance_unique_visits_for_any_target_monthly'] = redis_usage_data { unique_visit_service.unique_visits_for(targets: :compliance, weeks: 4) }
results['compliance_unique_visits_for_any_target_monthly'] = redis_usage_data { unique_visit_service.unique_visits_for(targets: :compliance, start_date: 4.weeks.ago.to_date, end_date: Date.current) }
{ compliance_unique_visits: results }
end

View File

@ -0,0 +1,149 @@
# frozen_string_literal: true
module Gitlab
module UsageDataCounters
module HLLRedisCounter
DEFAULT_WEEKLY_KEY_EXPIRY_LENGTH = 6.weeks
DEFAULT_DAILY_KEY_EXPIRY_LENGTH = 29.days
DEFAULT_REDIS_SLOT = ''.freeze
UnknownEvent = Class.new(StandardError)
UnknownAggregation = Class.new(StandardError)
KNOWN_EVENTS_PATH = 'lib/gitlab/usage_data_counters/known_events.yml'.freeze
ALLOWED_AGGREGATIONS = %i(daily weekly).freeze
# Track event on entity_id
# Increment a Redis HLL counter for unique event_name and entity_id
#
# All events should be added to know_events file lib/gitlab/usage_data_counters/known_events.yml
#
# Event example:
#
# - name: g_compliance_dashboard # Unique event name
# redis_slot: compliance # Optional slot name, if not defined it will use name as a slot, used for totals
# category: compliance # Group events in categories
# expiry: 29 # Optional expiration time in days, default value 29 days for daily and 6.weeks for weekly
# aggregation: daily # Aggregation level, keys are stored daily or weekly
#
# Usage:
#
# * Track event: Gitlab::UsageDataCounters::HLLRedisCounter.track_event(user_id, 'g_compliance_dashboard')
# * Get unique counts per user: Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(event_names: 'g_compliance_dashboard', start_date: 28.days.ago, end_date: Date.current)
class << self
def track_event(entity_id, event_name, time = Time.zone.now)
event = event_for(event_name)
raise UnknownEvent.new("Unknown event #{event_name}") unless event.present?
Gitlab::Redis::HLL.add(key: redis_key(event, time), value: entity_id, expiry: expiry(event))
end
def unique_events(event_names:, start_date:, end_date:)
events = events_for(Array(event_names))
raise 'Events should be in same slot' unless events_in_same_slot?(events)
raise 'Events should be in same category' unless events_in_same_category?(events)
raise 'Events should have same aggregation level' unless events_same_aggregation?(events)
aggregation = events.first[:aggregation]
keys = keys_for_aggregation(aggregation, events: events, start_date: start_date, end_date: end_date)
Gitlab::Redis::HLL.count(keys: keys)
end
def events_for_category(category)
known_events.select { |event| event[:category] == category }.map { |event| event[:name] }
end
private
def keys_for_aggregation(aggregation, events:, start_date:, end_date:)
if aggregation.to_sym == :daily
daily_redis_keys(events: events, start_date: start_date, end_date: end_date)
else
weekly_redis_keys(events: events, start_date: start_date, end_date: end_date)
end
end
def known_events
@known_events ||= YAML.load_file(Rails.root.join(KNOWN_EVENTS_PATH)).map(&:with_indifferent_access)
end
def known_events_names
known_events.map { |event| event[:name] }
end
def events_in_same_slot?(events)
slot = events.first[:redis_slot]
events.all? { |event| event[:redis_slot] == slot }
end
def events_in_same_category?(events)
category = events.first[:category]
events.all? { |event| event[:category] == category }
end
def events_same_aggregation?(events)
aggregation = events.first[:aggregation]
events.all? { |event| event[:aggregation] == aggregation }
end
def expiry(event)
return event[:expiry] if event[:expiry].present?
event[:aggregation].to_sym == :daily ? DEFAULT_DAILY_KEY_EXPIRY_LENGTH : DEFAULT_WEEKLY_KEY_EXPIRY_LENGTH
end
def event_for(event_name)
known_events.find { |event| event[:name] == event_name }
end
def events_for(event_names)
known_events.select { |event| event_names.include?(event[:name]) }
end
def redis_slot(event)
event[:redis_slot] || DEFAULT_REDIS_SLOT
end
# Compose the key in order to store events daily or weekly
def redis_key(event, time)
raise UnknownEvent.new("Unknown event #{event[:name]}") unless known_events_names.include?(event[:name].to_s)
raise UnknownAggregation.new("Use :daily or :weekly aggregation") unless ALLOWED_AGGREGATIONS.include?(event[:aggregation].to_sym)
slot = redis_slot(event)
key = if slot.present?
event[:name].to_s.gsub(slot, "{#{slot}}")
else
"{#{event[:name]}}"
end
if event[:aggregation].to_sym == :daily
year_day = time.strftime('%G-%j')
"#{year_day}-#{key}"
else
year_week = time.strftime('%G-%V')
"#{key}-#{year_week}"
end
end
def daily_redis_keys(events:, start_date:, end_date:)
(start_date.to_date..end_date.to_date).map do |date|
events.map { |event| redis_key(event, date) }
end.flatten
end
def weekly_redis_keys(events:, start_date:, end_date:)
weeks = end_date.to_date.cweek - start_date.to_date.cweek
weeks = 1 if weeks == 0
(0..(weeks - 1)).map do |week_increment|
events.map { |event| redis_key(event, start_date + week_increment * 7.days) }
end.flatten
end
end
end
end
end

View File

@ -0,0 +1,88 @@
---
# Compliance category
- name: g_compliance_dashboard
redis_slot: compliance
category: compliance
expiry: 84 # expiration time in days, equivalent to 12 weeks
aggregation: weekly
- name: g_compliance_audit_events
category: compliance
redis_slot: compliance
expiry: 84
aggregation: weekly
- name: i_compliance_audit_events
category: compliance
redis_slot: compliance
expiry: 84
aggregation: weekly
- name: i_compliance_credential_inventory
category: compliance
redis_slot: compliance
expiry: 84
aggregation: weekly
# Analytics category
- name: g_analytics_contribution
category: analytics
redis_slot: analytics
expiry: 84
aggregation: weekly
- name: g_analytics_insights
category: analytics
redis_slot: analytics
expiry: 84
aggregation: weekly
- name: g_analytics_issues
category: analytics
redis_slot: analytics
expiry: 84
aggregation: weekly
- name: g_analytics_productivity
category: analytics
redis_slot: analytics
expiry: 84
aggregation: weekly
- name: g_analytics_valuestream
category: analytics
redis_slot: analytics
expiry: 84
aggregation: weekly
- name: p_analytics_pipelines
category: analytics
redis_slot: analytics
expiry: 84
aggregation: weekly
- name: p_analytics_code_reviews
category: analytics
redis_slot: analytics
expiry: 84
aggregation: weekly
- name: p_analytics_valuestream
category: analytics
redis_slot: analytics
expiry: 84
aggregation: weekly
- name: p_analytics_insights
category: analytics
redis_slot: analytics
expiry: 84
aggregation: weekly
- name: p_analytics_issues
category: analytics
redis_slot: analytics
expiry: 84
aggregation: weekly
- name: p_analytics_repo
category: analytics
redis_slot: analytics
expiry: 84
aggregation: weekly
- name: i_analytics_cohorts
category: analytics
redis_slot: analytics
expiry: 84
aggregation: weekly
- name: i_analytics_dev_ops_score
category: analytics
redis_slot: analytics
expiry: 84
aggregation: weekly

View File

@ -6931,6 +6931,9 @@ msgstr ""
msgid "Copy the code below to implement tracking in your application:"
msgstr ""
msgid "Copy to clipboard"
msgstr ""
msgid "Copy token"
msgstr ""

View File

@ -44,10 +44,6 @@ module QA
wait_for_requests
end
def click_body
page.find("body").click
end
def wait_until(max_duration: 60, sleep_interval: 0.1, reload: true, raise_on_failure: true)
Support::Waiter.wait_until(max_duration: max_duration, sleep_interval: sleep_interval, raise_on_failure: raise_on_failure) do
yield || (reload && refresh && false)
@ -145,6 +141,15 @@ module QA
end
end
# Use this to simulate moving the pointer to an element's coordinate
# and sending a click event.
# This is a helpful workaround when there is a transparent element overlapping
# the target element and so, normal `click_element` on target would raise
# Selenium::WebDriver::Error::ElementClickInterceptedError
def click_element_coordinates(name)
page.driver.browser.action.move_to(find_element(name).native).click.perform
end
# replace with (..., page = self.class)
def click_element(name, page = nil, **kwargs)
wait_for_requests

View File

@ -17,6 +17,7 @@ module QA
view 'app/views/projects/settings/_archive.html.haml' do
element :archive_project_link
element :unarchive_project_link
element :archive_project_content
end
view 'app/views/projects/_export.html.haml' do
@ -45,7 +46,7 @@ module QA
# Retry added here due to seldom seen inconsistent UI state issue:
# https://gitlab.com/gitlab-org/gitlab/-/issues/231242
retry_on_exception do
click_body
click_element_coordinates(:archive_project_content)
expand_select_list
# Workaround for a failure to search when there are no spaces around the /
# https://gitlab.com/gitlab-org/gitlab/-/issues/218965

View File

@ -66,7 +66,7 @@ module QA
end
context 'when recovering from dataloss after failover' do
it 'allows reconciliation' do
it 'allows reconciliation', quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/238187', type: :investigating } do
# Start the old primary node again
praefect_manager.start_primary_node
praefect_manager.wait_for_health_check_current_primary_node

View File

@ -22,7 +22,7 @@ module QA
praefect_manager.reset_primary_to_original
end
it 'recovers from dataloss' do
it 'recovers from dataloss', quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/238186', type: :investigating } do
# Create a new project with a commit and wait for it to replicate
praefect_manager.wait_for_replication(project.id)

View File

@ -2,7 +2,7 @@
module QA
RSpec.describe 'Create' do
describe 'Changing Gitaly repository storage', :requires_admin do
describe 'Changing Gitaly repository storage', :requires_admin, quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/236195', type: :investigating } do
praefect_manager = Service::PraefectManager.new
praefect_manager.gitlab = 'gitlab'

View File

@ -64,6 +64,12 @@ module QA
super
end
def click_element_coordinates(name)
log(%Q(clicking the coordinates of :#{name}))
super
end
def click_element(name, page = nil, **kwargs)
msg = ["clicking :#{name}"]
msg << ", expecting to be at #{page.class}" if page

View File

@ -4,6 +4,7 @@ UsageData/LargeTable:
- 'lib/gitlab/usage_data.rb'
- 'ee/lib/ee/gitlab/usage_data.rb'
NonRelatedClasses:
- :Date
- :Feature
- :Gitlab
- :Gitlab::AppLogger

View File

@ -589,7 +589,7 @@ RSpec.describe 'File blob', :js do
aggregate_failures do
# shows that dashboard yaml is invalid
expect(page).to have_content('Metrics Dashboard YAML definition is invalid:')
expect(page).to have_content("root is missing required keys: panel_groups")
expect(page).to have_content("panel_groups: should be an array of panel_groups objects")
# shows a learn more link
expect(page).to have_link('Learn more')

View File

@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
import { GlDeprecatedButton } from '@gitlab/ui';
import { GlButton } from '@gitlab/ui';
import CiVariablePopover from '~/ci_variable_list/components/ci_variable_popover.vue';
import mockData from '../services/mock_data';
@ -18,7 +18,7 @@ describe('Ci Variable Popover', () => {
});
};
const findButton = () => wrapper.find(GlDeprecatedButton);
const findButton = () => wrapper.find(GlButton);
beforeEach(() => {
createComponent();

View File

@ -160,6 +160,118 @@ describe('URL utility', () => {
'https://host/path?op=%2B&foo=bar',
);
});
describe('with spread array option', () => {
const spreadArrayOptions = { spreadArrays: true };
it('maintains multiple values', () => {
expect(mergeUrlParams({}, '?array[]=foo&array[]=bar', spreadArrayOptions)).toBe(
'?array[]=foo&array[]=bar',
);
});
it('overrides multiple values with one', () => {
expect(
mergeUrlParams({ array: ['baz'] }, '?array[]=foo&array[]=bar', spreadArrayOptions),
).toBe('?array[]=baz');
});
it('removes existing params', () => {
expect(
mergeUrlParams({ array: null }, '?array[]=foo&array[]=bar', spreadArrayOptions),
).toBe('');
});
it('removes existing params and keeps others', () => {
expect(
mergeUrlParams(
{ array: null },
'?array[]=foo&array[]=bar&other=quis',
spreadArrayOptions,
),
).toBe('?other=quis');
});
it('removes existing params along others', () => {
expect(
mergeUrlParams(
{ array: null, other: 'quis' },
'?array[]=foo&array[]=bar',
spreadArrayOptions,
),
).toBe('?other=quis');
});
it('handles empty arrays along other parameters', () => {
expect(mergeUrlParams({ array: [], other: 'quis' }, '?array=baz', spreadArrayOptions)).toBe(
'?array[]=&other=quis',
);
});
it('handles multiple values along other parameters', () => {
expect(
mergeUrlParams(
{ array: ['foo', 'bar'], other: 'quis' },
'?array=baz',
spreadArrayOptions,
),
).toBe('?array[]=foo&array[]=bar&other=quis');
});
it('handles array values with encoding', () => {
expect(
mergeUrlParams({ array: ['foo+', 'bar,baz'] }, '?array[]=%2Fbaz', spreadArrayOptions),
).toBe('?array[]=foo%2B&array[]=bar%2Cbaz');
});
it('handles multiple arrays', () => {
expect(
mergeUrlParams(
{ array1: ['foo+', 'bar,baz'], array2: ['quis', 'quux'] },
'?array1[]=%2Fbaz',
spreadArrayOptions,
),
).toBe('?array1[]=foo%2B&array1[]=bar%2Cbaz&array2[]=quis&array2[]=quux');
});
});
describe('without spread array option', () => {
it('maintains multiple values', () => {
expect(mergeUrlParams({}, '?array=foo%2Cbar')).toBe('?array=foo%2Cbar');
});
it('overrides multiple values with one', () => {
expect(mergeUrlParams({ array: ['baz'] }, '?array=foo%2Cbar')).toBe('?array=baz');
});
it('removes existing params', () => {
expect(mergeUrlParams({ array: null }, '?array=foo%2Cbar')).toBe('');
});
it('removes existing params and keeps others', () => {
expect(mergeUrlParams({ array: null }, '?array=foo&array=bar&other=quis')).toBe(
'?other=quis',
);
});
it('removes existing params along others', () => {
expect(mergeUrlParams({ array: null, other: 'quis' }, '?array=foo&array=bar')).toBe(
'?other=quis',
);
});
it('handles empty arrays along other parameters', () => {
expect(mergeUrlParams({ array: [], other: 'quis' }, '?array=baz')).toBe(
'?array=&other=quis',
);
});
it('handles multiple values along other parameters', () => {
expect(mergeUrlParams({ array: ['foo', 'bar'], other: 'quis' }, '?array=baz')).toBe(
'?array=foo%2Cbar&other=quis',
);
});
it('handles array values with encoding', () => {
expect(mergeUrlParams({ array: ['foo+', 'bar,baz'] }, '?array=%2Fbaz')).toBe(
'?array=foo%2B%2Cbar%2Cbaz',
);
});
it('handles multiple arrays', () => {
expect(
mergeUrlParams(
{ array1: ['foo+', 'bar,baz'], array2: ['quis', 'quux'] },
'?array1=%2Fbaz',
),
).toBe('?array1=foo%2B%2Cbar%2Cbaz&array2=quis%2Cquux');
});
});
});
describe('removeParams', () => {

View File

@ -41,23 +41,23 @@ RSpec.describe Gitlab::Analytics::UniqueVisits, :clean_gitlab_redis_shared_state
expect(unique_visits.unique_visits_for(targets: target2_id)).to eq(1)
expect(unique_visits.unique_visits_for(targets: target4_id)).to eq(1)
expect(unique_visits.unique_visits_for(targets: target2_id, start_week: 15.days.ago)).to eq(1)
expect(unique_visits.unique_visits_for(targets: target2_id, start_date: 15.days.ago)).to eq(1)
expect(unique_visits.unique_visits_for(targets: target3_id)).to eq(0)
expect(unique_visits.unique_visits_for(targets: target5_id, start_week: 15.days.ago)).to eq(2)
expect(unique_visits.unique_visits_for(targets: target5_id, start_date: 15.days.ago)).to eq(2)
expect(unique_visits.unique_visits_for(targets: :analytics)).to eq(2)
expect(unique_visits.unique_visits_for(targets: :analytics, start_week: 15.days.ago)).to eq(1)
expect(unique_visits.unique_visits_for(targets: :analytics, start_week: 30.days.ago)).to eq(0)
expect(unique_visits.unique_visits_for(targets: :analytics, start_date: 15.days.ago)).to eq(1)
expect(unique_visits.unique_visits_for(targets: :analytics, start_date: 30.days.ago)).to eq(0)
expect(unique_visits.unique_visits_for(targets: :analytics, weeks: 4)).to eq(2)
expect(unique_visits.unique_visits_for(targets: :analytics, start_date: 4.weeks.ago, end_date: Date.current)).to eq(2)
expect(unique_visits.unique_visits_for(targets: :compliance)).to eq(1)
expect(unique_visits.unique_visits_for(targets: :compliance, start_week: 15.days.ago)).to eq(2)
expect(unique_visits.unique_visits_for(targets: :compliance, start_week: 30.days.ago)).to eq(0)
expect(unique_visits.unique_visits_for(targets: :compliance, start_date: 15.days.ago)).to eq(2)
expect(unique_visits.unique_visits_for(targets: :compliance, start_date: 30.days.ago)).to eq(0)
expect(unique_visits.unique_visits_for(targets: :compliance, weeks: 4)).to eq(2)
expect(unique_visits.unique_visits_for(targets: :compliance, start_date: 4.weeks.ago, end_date: Date.current)).to eq(2)
end
it 'sets the keys in Redis to expire automatically after 12 weeks' do
@ -75,7 +75,7 @@ RSpec.describe Gitlab::Analytics::UniqueVisits, :clean_gitlab_redis_shared_state
expect do
unique_visits.track_visit(visitor1_id, invalid_target_id)
end.to raise_error("Invalid target id #{invalid_target_id}")
end.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
end
end
end

View File

@ -34,19 +34,6 @@ RSpec.describe Gitlab::Metrics::Dashboard::Validator::Errors do
it { is_expected.to eq 'root is missing required keys: one' }
end
context 'when there is type mismatch' do
%w(null string boolean integer number array object).each do |expected_type|
context "on type: #{expected_type}" do
let(:type) { expected_type }
let(:details) { nil }
subject { described_class.new(error_hash).message }
it { is_expected.to eq "'property_name' at root is not of type: #{expected_type}" }
end
end
end
end
context 'for nested object' do

View File

@ -143,56 +143,4 @@ RSpec.describe Gitlab::Metrics::Dashboard::Validator do
end
end
end
describe '#errors' do
context 'valid dashboard schema' do
it 'returns no errors' do
expect(described_class.errors(valid_dashboard)).to eq []
end
context 'with duplicate metric_ids' do
it 'returns errors' do
expect(described_class.errors(duplicate_id_dashboard)).to eq [Gitlab::Metrics::Dashboard::Validator::Errors::DuplicateMetricIds.new]
end
end
context 'with dashboard_path and project' do
subject { described_class.errors(valid_dashboard, dashboard_path: 'test/path.yml', project: project) }
context 'with no conflicting metric identifiers in db' do
it { is_expected.to eq [] }
end
context 'with metric identifier present in current dashboard' do
before do
create(:prometheus_metric,
identifier: 'metric_a1',
dashboard_path: 'test/path.yml',
project: project
)
end
it { is_expected.to eq [] }
end
context 'with metric identifier present in another dashboard' do
before do
create(:prometheus_metric,
identifier: 'metric_a1',
dashboard_path: 'some/other/dashboard/path.yml',
project: project
)
end
it { is_expected.to eq [Gitlab::Metrics::Dashboard::Validator::Errors::DuplicateMetricIds.new] }
end
end
end
context 'invalid dashboard schema' do
it 'returns collection of validation errors' do
expect(described_class.errors(invalid_dashboard)).to all be_kind_of(Gitlab::Metrics::Dashboard::Validator::Errors::SchemaValidationError)
end
end
end
end

View File

@ -9,6 +9,89 @@ RSpec.describe Gitlab::RepositoryCacheAdapter do
let(:redis_set_cache) { repository.send(:redis_set_cache) }
let(:redis_hash_cache) { repository.send(:redis_hash_cache) }
describe '.cache_method_output_as_redis_set', :clean_gitlab_redis_cache, :aggregate_failures do
let(:klass) do
Class.new do
include Gitlab::RepositoryCacheAdapter # can't use described_class here
def letters
%w(b a c)
end
cache_method_as_redis_set(:letters)
def redis_set_cache
@redis_set_cache ||= Gitlab::RepositorySetCache.new(self)
end
def full_path
'foo/bar'
end
def project
end
end
end
let(:fake_repository) { klass.new }
context 'with an existing repository' do
it 'caches the output, sorting the results' do
expect(fake_repository).to receive(:_uncached_letters).once.and_call_original
2.times do
expect(fake_repository.letters).to eq(%w(a b c))
end
expect(fake_repository.redis_set_cache.exist?(:letters)).to eq(true)
expect(fake_repository.instance_variable_get(:@letters)).to eq(%w(a b c))
end
context 'membership checks' do
context 'when the cache key does not exist' do
it 'calls the original method and populates the cache' do
expect(fake_repository.redis_set_cache.exist?(:letters)).to eq(false)
expect(fake_repository).to receive(:_uncached_letters).once.and_call_original
# This populates the cache and memoizes the full result
expect(fake_repository.letters_include?('a')).to eq(true)
expect(fake_repository.letters_include?('d')).to eq(false)
expect(fake_repository.redis_set_cache.exist?(:letters)).to eq(true)
end
end
context 'when the cache key exists' do
before do
fake_repository.redis_set_cache.write(:letters, %w(b a c))
end
it 'calls #include? on the set cache' do
expect(fake_repository.redis_set_cache)
.to receive(:include?).with(:letters, 'a').and_call_original
expect(fake_repository.redis_set_cache)
.to receive(:include?).with(:letters, 'd').and_call_original
expect(fake_repository.letters_include?('a')).to eq(true)
expect(fake_repository.letters_include?('d')).to eq(false)
end
it 'memoizes the result' do
expect(fake_repository.redis_set_cache)
.to receive(:include?).once.and_call_original
expect(fake_repository.letters_include?('a')).to eq(true)
expect(fake_repository.letters_include?('a')).to eq(true)
expect(fake_repository.redis_set_cache)
.to receive(:include?).once.and_call_original
expect(fake_repository.letters_include?('d')).to eq(false)
expect(fake_repository.letters_include?('d')).to eq(false)
end
end
end
end
end
describe '#cache_method_output', :use_clean_rails_memory_store_caching do
let(:fallback) { 10 }

View File

@ -0,0 +1,113 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_shared_state do
let(:entity1) { 'dfb9d2d2-f56c-4c77-8aeb-6cddc4a1f857' }
let(:entity2) { '1dd9afb2-a3ee-4de1-8ae3-a405579c8584' }
let(:entity3) { '34rfjuuy-ce56-sa35-ds34-dfer567dfrf2' }
let(:entity4) { '8b9a2671-2abf-4bec-a682-22f6a8f7bf31' }
let(:weekly_event) { 'g_analytics_contribution' }
let(:daily_event) { 'g_search' }
let(:different_aggregation) { 'different_aggregation' }
let(:known_events) do
[
{ name: "g_analytics_contribution", redis_slot: "analytics", category: "analytics", expiry: 84, aggregation: "weekly" },
{ name: "g_analytics_valuestream", redis_slot: "analytics", category: "analytics", expiry: 84, aggregation: "daily" },
{ name: "g_analytics_productivity", redis_slot: "analytics", category: "productivity", expiry: 84, aggregation: "weekly" },
{ name: "g_compliance_dashboard", redis_slot: "compliance", category: "compliance", aggregation: "weekly" },
{ name: "g_search", category: "global", aggregation: "daily" },
{ name: "different_aggregation", category: "global", aggregation: "monthly" }
].map(&:with_indifferent_access)
end
before do
allow(described_class).to receive(:known_events).and_return(known_events)
end
around do |example|
# We need to freeze to a reference time
# because visits are grouped by the week number in the year
# Without freezing the time, the test may behave inconsistently
# depending on which day of the week test is run.
# Monday 6th of June
reference_time = Time.utc(2020, 6, 1)
Timecop.freeze(reference_time) { example.run }
end
describe '.track_event' do
it "raise error if metrics don't have same aggregation" do
expect { described_class.track_event(entity1, different_aggregation, Date.current) } .to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownAggregation)
end
it 'raise error if metrics of unknown aggregation' do
expect { described_class.track_event(entity1, 'unknown', Date.current) } .to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
end
end
describe '.unique_events' do
before do
# events in current week, should not be counted as week is not complete
described_class.track_event(entity1, weekly_event, Date.current)
described_class.track_event(entity2, weekly_event, Date.current)
# Events last week
described_class.track_event(entity1, weekly_event, 2.days.ago)
described_class.track_event(entity1, weekly_event, 2.days.ago)
# Events 2 weeks ago
described_class.track_event(entity1, weekly_event, 2.weeks.ago)
# Events 4 weeks ago
described_class.track_event(entity3, weekly_event, 4.weeks.ago)
described_class.track_event(entity4, weekly_event, 29.days.ago)
# events in current day should be counted in daily aggregation
described_class.track_event(entity1, daily_event, Date.current)
described_class.track_event(entity2, daily_event, Date.current)
# Events last week
described_class.track_event(entity1, daily_event, 2.days.ago)
described_class.track_event(entity1, daily_event, 2.days.ago)
# Events 2 weeks ago
described_class.track_event(entity1, daily_event, 14.days.ago)
# Events 4 weeks ago
described_class.track_event(entity3, daily_event, 28.days.ago)
described_class.track_event(entity4, daily_event, 29.days.ago)
end
it 'raise error if metrics are not in the same slot' do
expect { described_class.unique_events(event_names: %w(g_analytics_contribution g_compliance_dashboard), start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should be in same slot')
end
it 'raise error if metrics are not in the same category' do
expect { described_class.unique_events(event_names: %w(g_analytics_contribution g_analytics_productivity), start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should be in same category')
end
it "raise error if metrics don't have same aggregation" do
expect { described_class.unique_events(event_names: %w(g_analytics_contribution g_analytics_valuestream), start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should have same aggregation level')
end
context 'when data for the last complete week' do
it { expect(described_class.unique_events(event_names: weekly_event, start_date: 1.week.ago, end_date: Date.current)).to eq(1) }
end
context 'when data for the last 4 complete weeks' do
it { expect(described_class.unique_events(event_names: weekly_event, start_date: 4.weeks.ago, end_date: Date.current)).to eq(2) }
end
context 'when data for the week 4 weeks ago' do
it { expect(described_class.unique_events(event_names: weekly_event, start_date: 4.weeks.ago, end_date: 3.weeks.ago)).to eq(1) }
end
context 'when using daily aggregation' do
it { expect(described_class.unique_events(event_names: daily_event, start_date: 7.days.ago, end_date: Date.current)).to eq(2) }
it { expect(described_class.unique_events(event_names: daily_event, start_date: 28.days.ago, end_date: Date.current)).to eq(3) }
it { expect(described_class.unique_events(event_names: daily_event, start_date: 28.days.ago, end_date: 21.days.ago)).to eq(1) }
end
end
end

View File

@ -942,12 +942,12 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
subject { described_class.analytics_unique_visits_data }
it 'returns the number of unique visits to pages with analytics features' do
::Gitlab::Analytics::UniqueVisits::ANALYTICS_IDS.each do |target_id|
::Gitlab::Analytics::UniqueVisits.analytics_ids.each do |target_id|
expect_any_instance_of(::Gitlab::Analytics::UniqueVisits).to receive(:unique_visits_for).with(targets: target_id).and_return(123)
end
expect_any_instance_of(::Gitlab::Analytics::UniqueVisits).to receive(:unique_visits_for).with(targets: :analytics).and_return(543)
expect_any_instance_of(::Gitlab::Analytics::UniqueVisits).to receive(:unique_visits_for).with(targets: :analytics, weeks: 4).and_return(987)
expect_any_instance_of(::Gitlab::Analytics::UniqueVisits).to receive(:unique_visits_for).with(targets: :analytics, start_date: 4.weeks.ago.to_date, end_date: Date.current).and_return(987)
expect(subject).to eq({
analytics_unique_visits: {
@ -978,13 +978,13 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
described_class.clear_memoization(:unique_visit_service)
allow_next_instance_of(::Gitlab::Analytics::UniqueVisits) do |instance|
::Gitlab::Analytics::UniqueVisits::COMPLIANCE_IDS.each do |target_id|
::Gitlab::Analytics::UniqueVisits.compliance_ids.each do |target_id|
allow(instance).to receive(:unique_visits_for).with(targets: target_id).and_return(123)
end
allow(instance).to receive(:unique_visits_for).with(targets: :compliance).and_return(543)
allow(instance).to receive(:unique_visits_for).with(targets: :compliance, weeks: 4).and_return(987)
allow(instance).to receive(:unique_visits_for).with(targets: :compliance, start_date: 4.weeks.ago.to_date, end_date: Date.current).and_return(987)
end
end

View File

@ -13,11 +13,11 @@ RSpec.describe BlobViewer::MetricsDashboardYml do
subject(:viewer) { described_class.new(blob) }
context 'when the definition is valid' do
let(:data) { fixture_file('lib/gitlab/metrics/dashboard/sample_dashboard.yml')}
let(:data) { File.read(Rails.root.join('config/prometheus/common_metrics.yml')) }
describe '#valid?' do
it 'calls prepare! on the viewer' do
allow(Gitlab::Metrics::Dashboard::Validator).to receive(:errors)
allow(PerformanceMonitoring::PrometheusDashboard).to receive(:from_json)
expect(viewer).to receive(:prepare!)
@ -30,44 +30,46 @@ RSpec.describe BlobViewer::MetricsDashboardYml do
expect_next_instance_of(::Gitlab::Config::Loader::Yaml, data) do |loader|
expect(loader).to receive(:load_raw!).and_call_original
end
expect(Gitlab::Metrics::Dashboard::Validator)
.to receive(:errors)
.with(yml, dashboard_path: '.gitlab/dashboards/custom-dashboard.yml', project: project)
expect(PerformanceMonitoring::PrometheusDashboard)
.to receive(:from_json)
.with(yml)
.and_call_original
expect(viewer.valid?).to be_truthy
end
end
describe '#errors' do
it 'returns empty array' do
allow(Gitlab::Metrics::Dashboard::Validator).to receive(:errors).and_return([])
it 'returns nil' do
allow(PerformanceMonitoring::PrometheusDashboard).to receive(:from_json)
expect(viewer.errors).to eq []
expect(viewer.errors).to be nil
end
end
end
context 'when definition is invalid' do
let(:error) { ::Gitlab::Metrics::Dashboard::Validator::Errors::SchemaValidationError.new }
let(:error) { ActiveModel::ValidationError.new(PerformanceMonitoring::PrometheusDashboard.new.tap(&:validate)) }
let(:data) do
<<~YAML
dashboard:
YAML
end
before do
allow(Gitlab::Metrics::Dashboard::Validator).to receive(:errors).and_return([error])
end
describe '#valid?' do
it 'returns false' do
expect(viewer.valid?).to be false
expect(PerformanceMonitoring::PrometheusDashboard)
.to receive(:from_json).and_raise(error)
expect(viewer.valid?).to be_falsey
end
end
describe '#errors' do
it 'returns validation errors' do
expect(viewer.errors).to eq [error]
allow(PerformanceMonitoring::PrometheusDashboard)
.to receive(:from_json).and_raise(error)
expect(viewer.errors).to be error.model.errors
end
end
end
@ -83,14 +85,17 @@ RSpec.describe BlobViewer::MetricsDashboardYml do
describe '#valid?' do
it 'returns false' do
expect(Gitlab::Metrics::Dashboard::Validator).not_to receive(:errors)
expect(viewer.valid?).to be false
expect(PerformanceMonitoring::PrometheusDashboard).not_to receive(:from_json)
expect(viewer.valid?).to be_falsey
end
end
describe '#errors' do
it 'returns validation errors' do
expect(viewer.errors).to all be_kind_of Gitlab::Config::Loader::FormatError
yaml_wrapped_errors = { 'YAML syntax': ["(<unknown>): did not find expected key while parsing a block mapping at line 1 column 1"] }
expect(viewer.errors).to be_kind_of ActiveModel::Errors
expect(viewer.errors.messages).to eql(yaml_wrapped_errors)
end
end
end
@ -108,12 +113,15 @@ RSpec.describe BlobViewer::MetricsDashboardYml do
end
it 'is invalid' do
expect(Gitlab::Metrics::Dashboard::Validator).not_to receive(:errors)
expect(viewer.valid?).to be false
expect(PerformanceMonitoring::PrometheusDashboard).not_to receive(:from_json)
expect(viewer.valid?).to be(false)
end
it 'returns validation errors' do
expect(viewer.errors).to all be_kind_of Gitlab::Config::Loader::FormatError
yaml_wrapped_errors = { 'YAML syntax': ["The parsed YAML is too big"] }
expect(viewer.errors).to be_kind_of(ActiveModel::Errors)
expect(viewer.errors.messages).to eq(yaml_wrapped_errors)
end
end
end

View File

@ -185,6 +185,12 @@ RSpec.describe DesignManagement::Design do
end
end
describe '.in_creation_order' do
it 'sorts by ID in ascending order' do
expect(described_class.in_creation_order).to eq([design1, design2, design3, deleted_design])
end
end
describe '.with_filename' do
it 'returns correct design when passed a single filename' do
expect(described_class.with_filename(design1.filename)).to eq([design1])

View File

@ -219,31 +219,20 @@ RSpec.describe PerformanceMonitoring::PrometheusDashboard do
end
describe '#schema_validation_warnings' do
let_it_be(:project) { create(:project) }
let_it_be(:environment) { create(:environment, project: project) }
let(:path) { '.gitlab/dashboards/test.yml' }
subject(:schema_validation_warnings) { described_class.new(json_content.merge(path: path, environment: environment)).schema_validation_warnings }
before do
allow(Gitlab::Metrics::Dashboard::Finder).to receive(:find_raw).with(project, dashboard_path: path).and_return(json_content)
end
context 'when schema is valid' do
it 'returns nil' do
expect(Gitlab::Metrics::Dashboard::Validator).to receive(:errors).with(json_content, dashboard_path: path, project: project).and_return([])
expect(schema_validation_warnings).to eq []
expect(described_class).to receive(:from_json)
expect(described_class.new.schema_validation_warnings).to be_nil
end
end
context 'when schema is invalid' do
it 'returns array with errors messages' do
error = ::Gitlab::Metrics::Dashboard::Validator::Errors::SchemaValidationError.new
instance = described_class.new
instance.errors.add(:test, 'test error')
expect(Gitlab::Metrics::Dashboard::Validator).to receive(:errors).with(json_content, dashboard_path: path, project: project).and_return([error])
expect(schema_validation_warnings).to eq [error.message]
expect(described_class).to receive(:from_json).and_raise(ActiveModel::ValidationError.new(instance))
expect(described_class.new.schema_validation_warnings).to eq ['test: test error']
end
end
end

View File

@ -20,7 +20,7 @@ RSpec.describe ProjectPolicy do
read_project_for_iids read_issue_iid read_label
read_milestone read_snippet read_project_member read_note
create_project create_issue create_note upload_file create_merge_request_in
award_emoji read_release
award_emoji read_release read_issue_link
]
end
@ -30,7 +30,7 @@ RSpec.describe ProjectPolicy do
admin_issue admin_label admin_list read_commit_status read_build
read_container_image read_pipeline read_environment read_deployment
read_merge_request download_wiki_code read_sentry_issue read_metrics_dashboard_annotation
metrics_dashboard read_confidential_issues
metrics_dashboard read_confidential_issues admin_issue_link
]
end

View File

@ -7,7 +7,7 @@ RSpec.describe 'Getting Metrics Dashboard' do
let_it_be(:current_user) { create(:user) }
let(:project) { create(:project) }
let(:environment) { create(:environment, project: project) }
let!(:environment) { create(:environment, project: project) }
let(:query) do
graphql_query_for(
@ -67,7 +67,7 @@ RSpec.describe 'Getting Metrics Dashboard' do
it 'returns metrics dashboard' do
dashboard = graphql_data.dig('project', 'environments', 'nodes', 0, 'metricsDashboard')
expect(dashboard).to eql("path" => path, "schemaValidationWarnings" => ["root is missing required keys: panel_groups"])
expect(dashboard).to eql("path" => path, "schemaValidationWarnings" => ["panel_groups: should be an array of panel_groups objects"])
end
end
@ -78,7 +78,7 @@ RSpec.describe 'Getting Metrics Dashboard' do
it 'returns metrics dashboard' do
dashboard = graphql_data.dig('project', 'environments', 'nodes', 0, 'metricsDashboard')
expect(dashboard).to eql("path" => path, "schemaValidationWarnings" => ["root is missing required keys: dashboard, panel_groups"])
expect(dashboard).to eql("path" => path, "schemaValidationWarnings" => ["dashboard: can't be blank", "panel_groups: should be an array of panel_groups objects"])
end
end
end

View File

@ -117,9 +117,30 @@ RSpec.describe DesignManagement::MoveDesignsService do
let(:previous_design) { designs.second }
let(:next_design) { designs.third }
it 'calls move_between and is successful' do
expect(current_design).to receive(:move_between).with(previous_design, next_design)
it 'repositions existing designs and correctly places the given design' do
other_design1 = create(:design, issue: issue, relative_position: 10)
other_design2 = create(:design, issue: issue, relative_position: 20)
other_design3, other_design4 = create_list(:design, 2, issue: issue)
expect(subject).to be_success
expect(issue.designs.ordered(issue.project)).to eq([
# Existing designs which already had a relative_position set.
# These should stay at the beginning, in the same order.
other_design1,
other_design2,
# The designs we're passing into the service.
# These should be placed between the existing designs, in the correct order.
previous_design,
current_design,
next_design,
# Existing designs which didn't have a relative_position set.
# These should be placed at the end, in the order of their IDs.
other_design3,
other_design4
])
end
end
end

View File

@ -5,9 +5,9 @@ RSpec.describe DesignManagement::SaveDesignsService do
include DesignManagementTestHelpers
include ConcurrentHelpers
let_it_be(:developer) { create(:user) }
let_it_be_with_reload(:issue) { create(:issue) }
let_it_be(:developer) { create(:user, developer_projects: [issue.project]) }
let(:project) { issue.project }
let(:issue) { create(:issue) }
let(:user) { developer }
let(:files) { [rails_sample] }
let(:design_repository) { ::Gitlab::GlRepository::DESIGN.repository_resolver.call(project) }
@ -19,8 +19,20 @@ RSpec.describe DesignManagement::SaveDesignsService do
fixture_file_upload("spec/fixtures/#{filename}")
end
def commit_count
design_repository.expire_statistics_caches
design_repository.expire_root_ref_cache
design_repository.commit_count
end
before do
project.add_developer(developer)
if issue.design_collection.repository.exists?
issue.design_collection.repository.expire_all_method_caches
issue.design_collection.repository.raw.delete_all_refs_except([Gitlab::Git::BLANK_SHA])
end
allow(::DesignManagement::NewVersionWorker)
.to receive(:perform_async).with(Integer).and_return(nil)
end
def run_service(files_to_upload = nil)
@ -83,24 +95,20 @@ RSpec.describe DesignManagement::SaveDesignsService do
design_repository.exists?
end
it 'creates a design repository when it did not exist' do
expect { run_service }.to change { repository_exists }.from(false).to(true)
it 'is ensured when the service runs' do
run_service
expect(repository_exists).to be true
end
end
it 'updates the creation count' do
it 'creates a commit, an event in the activity stream and updates the creation count' do
counter = Gitlab::UsageDataCounters::DesignsCounter
expect { run_service }.to change { counter.read(:create) }.by(1)
end
it 'creates an event in the activity stream' do
expect { run_service }
.to change { Event.count }.by(1)
.and change { Event.for_design.created_action.count }.by(1)
end
it 'creates a commit in the repository' do
run_service
.and change { counter.read(:create) }.by(1)
expect(design_repository.commit).to have_attributes(
author: user,
@ -109,35 +117,26 @@ RSpec.describe DesignManagement::SaveDesignsService do
end
it 'can run the same command in parallel' do
blocks = Array.new(10).map do
unique_files = %w(rails_sample.jpg dk.png)
.map { |name| RenameableUpload.unique_file(name) }
parellism = 4
blocks = Array.new(parellism).map do
unique_files = [RenameableUpload.unique_file('rails_sample.jpg')]
-> { run_service(unique_files) }
end
expect { run_parallel(blocks) }.to change(DesignManagement::Version, :count).by(10)
expect { run_parallel(blocks) }.to change(DesignManagement::Version, :count).by(parellism)
end
it 'causes diff_refs not to be nil' do
expect(response).to include(
designs: all(have_attributes(diff_refs: be_present))
)
end
describe 'the response' do
it 'includes designs with the expected properties' do
updated_designs = response[:designs]
it 'creates a design & a version for the filename if it did not exist' do
expect(issue.designs.size).to eq(0)
updated_designs = response[:designs]
expect(updated_designs.size).to eq(1)
expect(updated_designs.first.versions.size).to eq(1)
end
it 'saves the user as the author' do
updated_designs = response[:designs]
expect(updated_designs.first.versions.first.author).to eq(user)
expect(updated_designs).to all(have_attributes(diff_refs: be_present))
expect(updated_designs.size).to eq(1)
expect(updated_designs.first.versions.size).to eq(1)
expect(updated_designs.first.versions.first.author).to eq(user)
end
end
describe 'saving the file to LFS' do
@ -147,14 +146,10 @@ RSpec.describe DesignManagement::SaveDesignsService do
end
end
it 'saves the design to LFS' do
expect { run_service }.to change { LfsObject.count }.by(1)
end
it 'saves the repository_type of the LfsObjectsProject as design' do
expect do
run_service
end.to change { project.lfs_objects_projects.count }.from(0).to(1)
it 'saves the design to LFS and saves the repository_type of the LfsObjectsProject as design' do
expect { run_service }
.to change { LfsObject.count }.by(1)
.and change { project.lfs_objects_projects.count }.from(0).to(1)
expect(project.lfs_objects_projects.first.repository_type).to eq('design')
end
@ -202,12 +197,10 @@ RSpec.describe DesignManagement::SaveDesignsService do
run_service
end
it 'does not create a new version' do
expect { run_service }.not_to change { issue.design_versions.count }
end
it 'does not create a new version, and returns the design in `skipped_designs`' do
response = nil
it 'returns the design in `skipped_designs` instead of `designs`' do
response = run_service
expect { response = run_service }.not_to change { issue.design_versions.count }
expect(response[:designs]).to be_empty
expect(response[:skipped_designs].size).to eq(1)
@ -223,35 +216,20 @@ RSpec.describe DesignManagement::SaveDesignsService do
touch_files([files.first])
end
it 'counts one creation and one update' do
it 'has the correct side-effects' do
counter = Gitlab::UsageDataCounters::DesignsCounter
expect { run_service }
.to change { counter.read(:create) }.by(1)
.and change { counter.read(:update) }.by(1)
end
it 'creates the correct activity stream events' do
expect(::DesignManagement::NewVersionWorker)
.to receive(:perform_async).once.with(Integer).and_return(nil)
expect { run_service }
.to change { Event.count }.by(2)
.and change { Event.for_design.count }.by(2)
.and change { Event.created_action.count }.by(1)
.and change { Event.updated_action.count }.by(1)
end
it 'creates a single commit' do
commit_count = -> do
design_repository.expire_all_method_caches
design_repository.commit_count
end
expect { run_service }.to change { commit_count.call }.by(1)
end
it 'enqueues just one new version worker' do
expect(::DesignManagement::NewVersionWorker)
.to receive(:perform_async).once.with(Integer)
run_service
.and change { counter.read(:create) }.by(1)
.and change { counter.read(:update) }.by(1)
.and change { commit_count }.by(1)
end
end
@ -262,45 +240,28 @@ RSpec.describe DesignManagement::SaveDesignsService do
expect(response).to include(designs: have_attributes(size: 2), status: :success)
end
it 'creates 2 designs with a single version' do
expect { run_service }.to change { issue.designs.count }.from(0).to(2)
expect(DesignManagement::Version.for_designs(issue.designs).size).to eq(1)
end
it 'increments the creation count by 2' do
it 'has the correct side-effects', :request_store do
counter = Gitlab::UsageDataCounters::DesignsCounter
expect { run_service }.to change { counter.read(:create) }.by 2
end
it 'enqueues a new version worker' do
expect(::DesignManagement::NewVersionWorker)
.to receive(:perform_async).once.with(Integer)
run_service
end
it 'creates a single commit' do
commit_count = -> do
design_repository.expire_all_method_caches
design_repository.commit_count
end
expect { run_service }.to change { commit_count.call }.by(1)
end
it 'only does 5 gitaly calls', :request_store, :sidekiq_might_not_need_inline do
allow(::DesignManagement::NewVersionWorker).to receive(:perform_async).with(Integer)
service = described_class.new(project, user, issue: issue, files: files)
# Some unrelated calls that are usually cached or happen only once
service.__send__(:repository).create_if_not_exists
service.__send__(:repository).has_visible_content?
# We expect:
# - An exists?
# - a check for existing blobs
# - default branch
# - an after_commit callback on LfsObjectsProject
design_repository.create_if_not_exists
design_repository.has_visible_content?
request_count = -> { Gitlab::GitalyClient.get_request_count }
expect(::DesignManagement::NewVersionWorker)
.to receive(:perform_async).once.with(Integer).and_return(nil)
# An exists?, a check for existing blobs, default branch, an after_commit
# callback on LfsObjectsProject
expect { service.execute }.to change(&request_count).by(4)
expect { service.execute }
.to change { issue.designs.count }.from(0).to(2)
.and change { DesignManagement::Version.count }.by(1)
.and change { counter.read(:create) }.by(2)
.and change { Gitlab::GitalyClient.get_request_count }.by(3)
.and change { commit_count }.by(1)
end
context 'when uploading too many files' do
@ -313,7 +274,7 @@ RSpec.describe DesignManagement::SaveDesignsService do
end
context 'when the user is not allowed to upload designs' do
let(:user) { create(:user) }
let(:user) { build_stubbed(:user) }
it_behaves_like 'a service error'
end

View File

@ -24,7 +24,7 @@ RSpec.shared_examples 'a class that supports relative positioning' do
item3.update!(relative_position: nil)
items = [item1, item2, item3]
described_class.move_nulls_to_end(items)
expect(described_class.move_nulls_to_end(items)).to be(2)
expect(items.sort_by(&:relative_position)).to eq(items)
expect(item1.relative_position).to be(1000)
@ -53,9 +53,8 @@ RSpec.shared_examples 'a class that supports relative positioning' do
it 'does not perform any moves if all items have their relative_position set' do
item1.update!(relative_position: 1)
expect do
described_class.move_nulls_to_end([item1])
end.not_to change { item1.reset.relative_position }
expect(described_class.move_nulls_to_start([item1])).to be(0)
expect(item1.reload.relative_position).to be(1)
end
it 'manages to move nulls to the end even if there is a sequence at the end' do
@ -97,7 +96,7 @@ RSpec.shared_examples 'a class that supports relative positioning' do
item3.update!(relative_position: 1000)
items = [item1, item2, item3]
described_class.move_nulls_to_start(items)
expect(described_class.move_nulls_to_start(items)).to be(2)
items.map(&:reload)
expect(items.sort_by(&:relative_position)).to eq(items)
@ -128,10 +127,8 @@ RSpec.shared_examples 'a class that supports relative positioning' do
it 'does not perform any moves if all items have their relative_position set' do
item1.update!(relative_position: 1)
described_class.move_nulls_to_start([item1])
item1.reload
expect(item1.relative_position).to be(1)
expect(described_class.move_nulls_to_start([item1])).to be(0)
expect(item1.reload.relative_position).to be(1)
end
end