Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-06-04 18:08:32 +00:00
parent 953d26294b
commit 00ecf5debe
47 changed files with 318 additions and 247 deletions

View File

@ -2,20 +2,6 @@
documentation](doc/development/changelog.md) for instructions on adding your own
entry.
## 13.0.5 (2020-06-04)
### Fixed (4 changes)
- Fix NoMethodError by using the correct method to report exceptions to Sentry. !33260
- Fix bug in snippets updating only file_name or content. !33375
- Fix ambiguous string concatenation on CleanupProjectsWithMissingNamespace. !33497
- Fix linking alerts to created issues for the Generic alerts intergration. !33647
### Other (1 change)
- Update GitLab Workhorse to v8.31.2. !33818
## 13.0.4 (2020-06-03)
### Security (1 change)

View File

@ -109,9 +109,10 @@ export function mergeUrlParams(params, url) {
*
* @param {string[]} params - the query param names to remove
* @param {string} [url=windowLocation().href] - url from which the query param will be removed
* @param {boolean} skipEncoding - set to true when the url does not require encoding
* @returns {string} A copy of the original url but without the query param
*/
export function removeParams(params, url = window.location.href) {
export function removeParams(params, url = window.location.href, skipEncoding = false) {
const [rootAndQuery, fragment] = url.split('#');
const [root, query] = rootAndQuery.split('?');
@ -119,12 +120,13 @@ export function removeParams(params, url = window.location.href) {
return url;
}
const encodedParams = params.map(param => encodeURIComponent(param));
const removableParams = skipEncoding ? params : params.map(param => encodeURIComponent(param));
const updatedQuery = query
.split('&')
.filter(paramPair => {
const [foundParam] = paramPair.split('=');
return encodedParams.indexOf(foundParam) < 0;
return removableParams.indexOf(foundParam) < 0;
})
.join('&');

View File

@ -140,7 +140,7 @@ module AlertManagement
end
def register_new_event!
increment!(:events, 1)
increment!(:events)
end
private

View File

@ -28,6 +28,7 @@ class Milestone < ApplicationRecord
scope :order_by_name_asc, -> { order(Arel::Nodes::Ascending.new(arel_table[:title].lower)) }
scope :reorder_by_due_date_asc, -> { reorder(Gitlab::Database.nulls_last_order('due_date', 'ASC')) }
scope :with_api_entity_associations, -> { preload(project: [:project_feature, :route, namespace: :route]) }
validates_associated :milestone_releases, message: -> (_, obj) { obj[:value].map(&:errors).map(&:full_messages).join(",") }

View File

@ -205,7 +205,7 @@ class Wiki
end
def wiki_base_path
Gitlab.config.gitlab.relative_url_root + web_url(only_path: true).sub(%r{/#{Wiki::HOMEPAGE}\z}, '')
web_url(only_path: true).sub(%r{/#{Wiki::HOMEPAGE}\z}, '')
end
private

View File

@ -29,6 +29,7 @@ module AlertManagement
def process_firing_alert_management_alert
if am_alert.present?
am_alert.register_new_event!
reset_alert_management_alert_status
else
create_alert_management_alert

View File

@ -0,0 +1,5 @@
---
title: Add CPU, memory usage charts to self monitoring default dashboard
merge_request: 33532
author:
type: changed

View File

@ -0,0 +1,5 @@
---
title: Fix relative URL root in wiki_base_path
merge_request: 33841
author:
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Fix ambiguous string concatenation on CleanupProjectsWithMissingNamespace
merge_request: 33497
author:
type: fixed

View File

@ -0,0 +1,5 @@
---
title: "Rust CI template: Replace --all with --workspace on cargo test."
merge_request: 33517
author: Markus Becker
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Fix NoMethodError by using the correct method to report exceptions to Sentry
merge_request: 33260
author:
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Fix pagination link header
merge_request: 33714
author: Max Wittig
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Fix bug in snippets updating only file_name or content
merge_request: 33375
author:
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Remove non migrated snippets from failed imports
merge_request: 33621
author:
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Increase events count for Prometheus alerts
merge_request: 33706
author:
type: added

View File

@ -0,0 +1,5 @@
---
title: Fix linking alerts to created issues for the Generic alerts intergration
merge_request: 33647
author:
type: fixed

View File

@ -1,6 +1,27 @@
dashboard: 'Default dashboard'
priority: 1
panel_groups:
- group: 'Resource usage'
panels:
- title: "Memory usage"
type: "line-chart"
y_label: "% memory used"
metrics:
- id: node_memory_usage_percentage
query_range: '(1 - (node_memory_MemAvailable_bytes or node_memory_MemFree_bytes + node_memory_Buffers_bytes + node_memory_Cached_bytes + node_memory_Slab_bytes) / node_memory_MemTotal_bytes) * 100'
unit: "%"
label: instance
- title: "CPU usage"
type: "line-chart"
y_label: "% CPU used"
metrics:
- id: node_cpu_usage_percentage
query_range: '(avg without (mode,cpu) (1 - irate(node_cpu_seconds_total{mode="idle"}[5m]))) * 100'
unit: "%"
label: instance
- group: Web Service
panels:
- title: Web Service - Error Ratio

View File

@ -21,10 +21,12 @@ All administrators at the time of creation of the project and group will be adde
as maintainers of the group and project, and as an admin, you'll be able to add new
members to the group in order to give them maintainer access to the project.
This project is used to self monitor your GitLab instance. Metrics are not yet
fully integrated, and the dashboard does not aggregate any data on Omnibus installations. GitLab plans
to provide integrated self-monitoring metrics in a future release. You can
currently use the project to configure your own [custom metrics](../../../user/project/integrations/prometheus.md#adding-custom-metrics) using
This project is used to self monitor your GitLab instance. The metrics dashboard
of the project shows some basic resource usage charts, such as CPU and memory usage
of each server in [Omnibus GitLab](https://docs.gitlab.com/omnibus/) installations.
You can also use the project to configure your own
[custom metrics](../../../user/project/integrations/prometheus.md#adding-custom-metrics) using
metrics exposed by the [GitLab exporter](../prometheus/gitlab_metrics.md#metrics-available).
## Creating the self monitoring project

View File

@ -417,10 +417,14 @@ The response header includes a link to the next page. For example:
HTTP/1.1 200 OK
...
Links: <https://gitlab.example.com/api/v4/projects?pagination=keyset&per_page=50&order_by=id&sort=asc&id_after=42>; rel="next"
Link: <https://gitlab.example.com/api/v4/projects?pagination=keyset&per_page=50&order_by=id&sort=asc&id_after=42>; rel="next"
Status: 200 OK
...
```
CAUTION: **Deprecation:**
The `Links` Header will be removed in GitLab 14.0 to be aligned with the [W3C specification](https://www.w3.org/wiki/LinkHeader)
The link to the next page contains an additional filter `id_after=42` which excludes records we have retrieved already.
Note the type of filter depends on the `order_by` option used and we may have more than one additional filter.

View File

@ -366,7 +366,9 @@ Example response:
"revision": "33d33a096a",
"package_files_count": 10,
"package_files_checksummed_count": 10,
"package_files_checksum_failed_count": 0
"package_files_checksum_failed_count": 0,
"package_files_synced_count": 10,
"package_files_failed_count": 5
},
{
"geo_node_id": 2,
@ -437,7 +439,9 @@ Example response:
"revision": "33d33a096a",
"package_files_count": 10,
"package_files_checksummed_count": 10,
"package_files_checksum_failed_count": 0
"package_files_checksum_failed_count": 0,
"package_files_synced_count": 10,
"package_files_failed_count": 5
}
]
```

View File

@ -12725,7 +12725,7 @@ type Vulnerability {
"""
Filter issue links by link type
"""
linkType: [VulnerabilityIssueLinkType!]
linkType: VulnerabilityIssueLinkType
): VulnerabilityIssueLinkConnection!
"""

View File

@ -37400,17 +37400,9 @@
"name": "linkType",
"description": "Filter issue links by link type",
"type": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "ENUM",
"name": "VulnerabilityIssueLinkType",
"ofType": null
}
}
"kind": "ENUM",
"name": "VulnerabilityIssueLinkType",
"ofType": null
},
"defaultValue": null
},

View File

@ -146,7 +146,7 @@ test:
Instead, you can use the
[`$CI_COMMIT_REF_NAME` predefined environment
variable](../variables/predefined_variables.md#variables-reference) in
variable](../variables/predefined_variables.md) in
combination with
[`only:variables`](../yaml/README.md#onlyvariablesexceptvariables) to
accomplish this behavior:

View File

@ -23,7 +23,7 @@ The following methods of authentication are supported:
- [Trigger token](#trigger-token)
- [CI job token](#ci-job-token)
If using the `$CI_PIPELINE_SOURCE` [predefined environment variable](../variables/predefined_variables.md#variables-reference)
If using the `$CI_PIPELINE_SOURCE` [predefined environment variable](../variables/predefined_variables.md)
to limit which jobs run in a pipeline, the value could be either `pipeline` or `trigger`,
depending on which trigger method is used.

View File

@ -10,8 +10,6 @@ type: reference
For an introduction on this subject, read through the
[getting started with environment variables](README.md) document.
## Overview
Some of the predefined environment variables are available only if a minimum
version of [GitLab Runner](https://docs.gitlab.com/runner/) is used. Consult the table below to find the
version of Runner required.
@ -22,7 +20,8 @@ Starting with GitLab 9.0, we have deprecated some variables. Read the
strongly advised to use the new variables as we will remove the old ones in
future GitLab releases.**
## Variables reference
You can add a command to your `.gitlab-ci.yml` file to
[output the values of all variables available for a job](README.md#list-all-environment-variables).
| Variable | GitLab | Runner | Description |
|-----------------------------------------------|--------|--------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|

View File

@ -1428,7 +1428,7 @@ This means the `only:changes` policy is useful for pipelines where:
- `$CI_PIPELINE_SOURCE == 'external_pull_request_event'`
If there is no Git push event, such as for pipelines with
[sources other than the three above](../variables/predefined_variables.md#variables-reference),
[sources other than the three above](../variables/predefined_variables.md),
`changes` can't determine if a given file is new or old, and will always
return true.

View File

@ -91,7 +91,7 @@ disable running the custom scanner.
GitLab also defines a `CI_PROJECT_REPOSITORY_LANGUAGES` variable, which provides the list of
languages in the repository. Depending on this value, your scanner may or may not do something different.
Language detection currently relies on the [`linguist`](https://github.com/github/linguist) Ruby gem.
See [GitLab CI/CD predefined variables](../../ci/variables/predefined_variables.md#variables-reference).
See [GitLab CI/CD predefined variables](../../ci/variables/predefined_variables.md).
#### Policy checking example

View File

@ -385,6 +385,79 @@ You can supply a custom root certificate to complete TLS verification by using t
specifying a `ca` setting in a [`.bowerrc`](https://bower.io/docs/config/#bowerrc-specification)
file.
### Configuring Conan projects
You can configure [Conan](https://conan.io/) projects by adding a `.conan` directory to your
project root. The project root serves as the [`CONAN_USER_HOME`](https://docs.conan.io/en/latest/reference/env_vars.html#conan-user-home).
Consult the [Conan](https://docs.conan.io/en/latest/reference/config_files/conan.conf.html#conan-conf)
documentation for a list of settings that you can apply.
The `license_scanning` job runs in a [Debian 10](https://www.debian.org/releases/buster/) Docker
image. The supplied image ships with some build tools such as [CMake](https://cmake.org/) and [GCC](https://gcc.gnu.org/).
However, not all project types are supported by default. To install additional tools needed to
compile dependencies, use a [`before_script`](../../../ci/yaml/README.md#before_script-and-after_script)
to install the necessary build tools using the [`apt`](https://wiki.debian.org/PackageManagementTools)
package manager. For a comprehensive list, consult [the Conan documentation](https://docs.conan.io/en/latest/introduction.html#all-platforms-all-build-systems-and-compilers).
The default [Conan](https://conan.io/) configuration sets [`CONAN_LOGIN_USERNAME`](https://docs.conan.io/en/latest/reference/env_vars.html#conan-login-username-conan-login-username-remote-name)
to `ci_user`, and binds [`CONAN_PASSWORD`](https://docs.conan.io/en/latest/reference/env_vars.html#conan-password-conan-password-remote-name)
to the [`CI_JOB_TOKEN`](../../../ci/variables/predefined_variables.md)
for the running job. This allows Conan projects to fetch packages from a [GitLab Conan Repository](../../packages/conan_repository/#fetching-conan-package-information-from-the-gitlab-package-registry)
if a GitLab remote is specified in the `.conan/remotes.json` file.
To override the default credentials specify a [`CONAN_LOGIN_USERNAME_{REMOTE_NAME}`](https://docs.conan.io/en/latest/reference/env_vars.html#conan-login-username-conan-login-username-remote-name)
matching the name of the remote specified in the `.conan/remotes.json` file.
NOTE: **Note:**
[MSBuild](https://github.com/mono/msbuild#microsoftbuild-msbuild) projects aren't supported. The
`license_scanning` image ships with [Mono](https://www.mono-project.com/) and [MSBuild](https://github.com/mono/msbuild#microsoftbuild-msbuild).
Additional setup may be required to build packages for this project configuration.
#### Using private Conan registries
By default, [Conan](https://conan.io/) uses the `conan-center` remote. For example:
```json
{
"remotes": [
{
"name": "conan-center",
"url": "https://conan.bintray.com",
"verify_ssl": true
}
]
}
```
To fetch dependencies from an alternate remote, specify that remote in a `.conan/remotes.json`. For
example:
```json
{
"remotes": [
{
"name": "gitlab",
"url": "https://gitlab.com/api/v4/packages/conan",
"verify_ssl": true
}
]
}
```
If credentials are required to authenticate then you can configure a [protected variable](../../../ci/variables/README.md#protect-a-custom-variable)
following the naming convention described in the [`CONAN_LOGIN_USERNAME` documentation](https://docs.conan.io/en/latest/reference/env_vars.html#conan-login-username-conan-login-username-remote-name).
#### Custom root certificates for Conan
You can provide custom certificates by adding a `.conan/cacert.pem` file to the project root and
setting [`CA_CERT_PATH`](https://docs.conan.io/en/latest/reference/env_vars.html#conan-cacert-path)
to `.conan/cacert.pem`.
If you specify the `ADDITIONAL_CA_CERT_BUNDLE` [environment variable](#available-variables), this
variable's X.509 certificates are installed in the Docker image's default trust store and Conan is
configured to use this as the default `CA_CERT_PATH`.
### Migration from `license_management` to `license_scanning`
In GitLab 12.8 a new name for `license_management` job was introduced. This change was made to improve clarity around the purpose of the scan, which is to scan and collect the types of licenses present in a projects dependencies.
@ -487,9 +560,13 @@ license_scanning:
The License Compliance job should now use local copies of the License Compliance analyzers to scan
your code and generate security reports, without requiring internet access.
Additional configuration may be needed for connecting to [private Maven repositories](#using-private-maven-repos),
Additional configuration may be needed for connecting to
[private Bower registries](#using-private-bower-registries),
[private NPM registries](#using-private-npm-registries), [private Yarn registries](#using-private-yarn-registries), and [private Python repositories](#using-private-python-repos).
[private Conan registries](#using-private-bower-registries),
[private Maven repositories](#using-private-maven-repos),
[private NPM registries](#using-private-npm-registries),
[private Python repositories](#using-private-python-repos),
and [private Yarn registries](#using-private-yarn-registries).
Exact name matches are required for [project policies](#project-policies-for-license-compliance)
when running in an offline environment ([see related issue](https://gitlab.com/gitlab-org/gitlab/-/issues/212388)).

View File

@ -131,6 +131,7 @@ The following table depicts the various user permission levels in a project.
| Enable/disable tag protections | | | | ✓ | ✓ |
| Edit project | | | | ✓ | ✓ |
| Edit project badges | | | | ✓ | ✓ |
| Share (invite) projects with groups | | | | ✓ (*8*) | ✓ (*8*)|
| Add deploy keys to project | | | | ✓ | ✓ |
| Configure project hooks | | | | ✓ | ✓ |
| Manage Runners | | | | ✓ | ✓ |
@ -175,6 +176,7 @@ The following table depicts the various user permission levels in a project.
1. If the [branch is protected](./project/protected_branches.md#using-the-allowed-to-merge-and-allowed-to-push-settings), this depends on the access Developers and Maintainers are given.
1. Guest users can access GitLab [**Releases**](project/releases/index.md) for downloading assets but are not allowed to download the source code nor see repository information like tags and commits.
1. Actions are limited only to records owned (referenced) by user.
1. When [Share Group Lock](./group/index.md#share-with-group-lock) is enabled the project can't be shared with other groups. It does not affect group with group sharing.
## Project features permissions
@ -242,6 +244,7 @@ group.
| Publish [packages](packages/index.md) **(PREMIUM)** | | | ✓ | ✓ | ✓ |
| View metrics dashboard annotations | | ✓ | ✓ | ✓ | ✓ |
| Create project in group | | | ✓ (3) | ✓ (3) | ✓ (3) |
| Share (invite) groups with groups | | | | | ✓ |
| Create/edit/delete group milestones | | | ✓ | ✓ | ✓ |
| Enable/disable a dependency proxy **(PREMIUM)** | | | ✓ | ✓ | ✓ |
| Use security dashboard **(ULTIMATE)** | | | ✓ | ✓ | ✓ |

View File

@ -23,7 +23,8 @@ module API
SCOPE_PRELOAD_METHOD = {
merge_requests: :with_api_entity_associations,
projects: :with_api_entity_associations,
issues: :with_api_entity_associations
issues: :with_api_entity_associations,
milestones: :with_api_entity_associations
}.freeze
def search(additional_params = {})

View File

@ -20,4 +20,4 @@ image: "rust:latest"
test:cargo:
script:
- rustc --version && cargo --version # Print version info for debugging
- cargo test --all --verbose
- cargo test --workspace --verbose

View File

@ -24,6 +24,11 @@ module Gitlab
raise Projects::ImportService::Error.new(shared.errors.to_sentence)
end
rescue => e
# If some exception was raised could mean that the SnippetsRepoRestorer
# was not called. This would leave us with snippets without a repository.
# This is a state we don't want them to be, so we better delete them.
remove_non_migrated_snippets
raise Projects::ImportService::Error.new(e.message)
ensure
remove_base_tmp_dir
@ -153,6 +158,14 @@ module Gitlab
def remove_base_tmp_dir
FileUtils.rm_rf(@shared.base_path)
end
def remove_non_migrated_snippets
project
.snippets
.left_joins(:snippet_repository)
.where(snippet_repositories: { snippet_id: nil })
.delete_all
end
end
end
end

View File

@ -5,6 +5,8 @@ module Gitlab
class SnippetRepoRestorer < RepoRestorer
attr_reader :snippet
SnippetRepositoryError = Class.new(StandardError)
def initialize(snippet:, user:, shared:, path_to_bundle:)
@snippet = snippet
@user = user
@ -35,6 +37,10 @@ module Gitlab
def create_repository_from_db
Gitlab::BackgroundMigration::BackfillSnippetRepositories.new.perform_by_ids([snippet.id])
unless snippet.reset.snippet_repository
raise SnippetRepositoryError, _("Error creating repository for snippet with id %{snippet_id}") % { snippet_id: snippet.id }
end
end
end
end

View File

@ -10,13 +10,13 @@ module Gitlab
end
def restore
@project.snippets.find_each.all? do |snippet|
@project.snippets.find_each.map do |snippet|
Gitlab::ImportExport::SnippetRepoRestorer.new(snippet: snippet,
user: @user,
shared: @shared,
path_to_bundle: snippet_repo_bundle_path(snippet))
.restore
end
end.all?(true)
end
private

View File

@ -1,99 +0,0 @@
# frozen_string_literal: true
module Gitlab
# Returns an ID range within a table so it can be iterated over. Repeats from
# the beginning after it reaches the end.
#
# Used by Geo in particular to iterate over a replicable and its registry
# table.
#
# Tracks a cursor for each table, by "key". If the table is smaller than
# batch_size, then a range for the whole table is returned on every call.
class LoopingBatcher
# @param [Class] model_class the class of the table to iterate on
# @param [String] key to identify the cursor. Note, cursor is already unique
# per table.
# @param [Integer] batch_size to limit the number of records in a batch
def initialize(model_class, key:, batch_size: 1000)
@model_class = model_class
@key = key
@batch_size = batch_size
end
# @return [Range] a range of IDs. `nil` if 0 records at or after the cursor.
def next_range!
return unless @model_class.any?
batch_first_id = cursor_id
batch_last_id = get_batch_last_id(batch_first_id)
return unless batch_last_id
batch_first_id..batch_last_id
end
private
# @private
#
# Get the last ID of the batch. Increment the cursor or reset it if at end.
#
# @param [Integer] batch_first_id the first ID of the batch
# @return [Integer] batch_last_id the last ID of the batch (not the table)
def get_batch_last_id(batch_first_id)
batch_last_id, more_rows = run_query(@model_class.table_name, @model_class.primary_key, batch_first_id, @batch_size)
if more_rows
increment_batch(batch_last_id)
else
reset if batch_first_id > 1
end
batch_last_id
end
def run_query(table, primary_key, batch_first_id, batch_size)
sql = <<~SQL
SELECT MAX(batch.id) AS batch_last_id,
EXISTS (
SELECT #{primary_key}
FROM #{table}
WHERE #{primary_key} > MAX(batch.id)
) AS more_rows
FROM (
SELECT #{primary_key}
FROM #{table}
WHERE #{primary_key} >= #{batch_first_id}
ORDER BY #{primary_key}
LIMIT #{batch_size}) AS batch;
SQL
result = ActiveRecord::Base.connection.exec_query(sql).first
[result["batch_last_id"], result["more_rows"]]
end
def reset
set_cursor_id(1)
end
def increment_batch(batch_last_id)
set_cursor_id(batch_last_id + 1)
end
# @private
#
# @return [Integer] the cursor ID, or 1 if it is not set
def cursor_id
Rails.cache.fetch("#{cache_key}:cursor_id") || 1
end
def set_cursor_id(id)
Rails.cache.write("#{cache_key}:cursor_id", id)
end
def cache_key
@cache_key ||= "#{self.class.name.parameterize}:#{@model_class.name.parameterize}:#{@key}:cursor_id"
end
end
end

View File

@ -24,7 +24,9 @@ module Gitlab
end
def apply_headers(next_page)
request.header('Links', pagination_links(next_page))
link = pagination_links(next_page)
request.header('Links', link)
request.header('Link', link)
end
private

View File

@ -8,8 +8,6 @@ msgid ""
msgstr ""
"Project-Id-Version: gitlab 1.0.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2020-06-01 14:24-0400\n"
"PO-Revision-Date: 2020-06-01 14:24-0400\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
"Language: \n"
@ -8736,6 +8734,9 @@ msgstr ""
msgid "Error creating new iteration"
msgstr ""
msgid "Error creating repository for snippet with id %{snippet_id}"
msgstr ""
msgid "Error creating the snippet"
msgstr ""

View File

@ -63,7 +63,7 @@ describe Banzai::Pipeline::WikiPipeline do
'when GitLab is hosted at a relative URL' => '/nested/relative/gitlab' }.each do |test_name, relative_url_root|
context test_name do
before do
allow(Gitlab.config.gitlab).to receive(:relative_url_root).and_return(relative_url_root)
allow(Rails.application.routes).to receive(:default_url_options).and_return(script_name: relative_url_root)
end
describe "linking to pages within the wiki" do

View File

@ -97,6 +97,49 @@ describe Gitlab::ImportExport::Importer do
end
end
context 'when import fails' do
let(:error_message) { 'foo' }
shared_examples 'removes any non migrated snippet' do
specify do
create_list(:project_snippet, 2, project: project)
snippet_with_repo = create(:project_snippet, :repository, project: project)
expect { importer.execute }.to change(Snippet, :count).by(-2).and(raise_error(Projects::ImportService::Error))
expect(snippet_with_repo.reload).to be_present
end
end
context 'when there is a graceful error' do
before do
allow_next_instance_of(Gitlab::ImportExport::AvatarRestorer) do |instance|
allow(instance).to receive(:avatar_export_file).and_raise(StandardError, error_message)
end
end
it 'raises and exception' do
expect { importer.execute }.to raise_error(Projects::ImportService::Error, error_message)
end
it_behaves_like 'removes any non migrated snippet'
end
context 'when an unexpected exception is raised' do
before do
allow_next_instance_of(Gitlab::ImportExport::AvatarRestorer) do |instance|
allow(instance).to receive(:restore).and_raise(StandardError, error_message)
end
end
it 'captures it and raises the Projects::ImportService::Error exception' do
expect { importer.execute }.to raise_error(Projects::ImportService::Error, error_message)
end
it_behaves_like 'removes any non migrated snippet'
end
end
context 'when project successfully restored' do
context "with a project in a user's namespace" do
let!(:existing_project) { create(:project, namespace: user.namespace) }

View File

@ -34,6 +34,15 @@ describe Gitlab::ImportExport::SnippetRepoRestorer do
expect(blob.data).to eq(snippet.content)
end
end
context 'when the repository creation fails' do
it 'returns false' do
allow_any_instance_of(Gitlab::BackgroundMigration::BackfillSnippetRepositories).to receive(:perform_by_ids).and_return(nil)
expect(restorer.restore).to be false
expect(shared.errors.first).to match(/Error creating repository for snippet/)
end
end
end
context 'when the snippet does not have a bundle file path' do

View File

@ -86,13 +86,14 @@ describe Gitlab::ImportExport::SnippetsRepoRestorer do
it_behaves_like 'imports snippet repositories'
end
context 'when one snippet cannot be saved' do
it 'returns false and do not process other snippets' do
context 'when any of the snippet repositories cannot be created' do
it 'continues processing other snippets and returns false' do
allow(Gitlab::ImportExport::SnippetRepoRestorer).to receive(:new).with(hash_including(snippet: snippet1)).and_return(service)
allow(service).to receive(:restore).and_return(false)
expect(Gitlab::ImportExport::SnippetRepoRestorer).not_to receive(:new).with(hash_including(snippet: snippet2))
expect(restorer.restore).to be_falsey
expect(Gitlab::ImportExport::SnippetRepoRestorer).to receive(:new).with(hash_including(snippet: snippet2)).and_call_original
expect(restorer.restore).to be false
end
end

View File

@ -1,71 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::LoopingBatcher, :use_clean_rails_memory_store_caching do
describe '#next_range!' do
let(:model_class) { LfsObject }
let(:key) { 'looping_batcher_spec' }
let(:batch_size) { 2 }
subject { described_class.new(model_class, key: key, batch_size: batch_size).next_range! }
context 'when there are no records' do
it { is_expected.to be_nil }
end
context 'when there are records' do
let!(:records) { create_list(model_class.underscore, 3) }
context 'when it has never been called before' do
it { is_expected.to be_a Range }
it 'starts from the beginning' do
expect(subject.first).to eq(1)
end
it 'ends at a full batch' do
expect(subject.last).to eq(records.second.id)
end
context 'when the batch size is greater than the number of records' do
let(:batch_size) { 5 }
it 'ends at the last ID' do
expect(subject.last).to eq(records.last.id)
end
end
end
context 'when it was called before' do
context 'when the previous batch included the end of the table' do
before do
described_class.new(model_class, key: key, batch_size: model_class.count).next_range!
end
it 'starts from the beginning' do
expect(subject).to eq(1..records.second.id)
end
end
context 'when the previous batch did not include the end of the table' do
before do
described_class.new(model_class, key: key, batch_size: model_class.count - 1).next_range!
end
it 'starts after the previous batch' do
expect(subject).to eq(records.last.id..records.last.id)
end
end
context 'if cache is cleared' do
it 'starts from the beginning' do
Rails.cache.clear
expect(subject).to eq(1..records.second.id)
end
end
end
end
end
end

View File

@ -60,9 +60,7 @@ describe Gitlab::Pagination::Keyset::RequestContext do
it 'sets Links header with same host/path as the original request' do
orig_uri = URI.parse(request_context.request.url)
expect(request_context).to receive(:header) do |name, header|
expect(name).to eq('Links')
expect(request_context).to receive(:header).twice do |name, header|
first_link, _ = /<([^>]+)>; rel="next"/.match(header).captures
uri = URI.parse(first_link)
@ -77,9 +75,7 @@ describe Gitlab::Pagination::Keyset::RequestContext do
it 'sets Links header with a link to the next page' do
orig_uri = URI.parse(request_context.request.url)
expect(request_context).to receive(:header) do |name, header|
expect(name).to eq('Links')
expect(request_context).to receive(:header).twice do |name, header|
first_link, _ = /<([^>]+)>; rel="next"/.match(header).captures
query = CGI.parse(URI.parse(first_link).query)
@ -97,9 +93,7 @@ describe Gitlab::Pagination::Keyset::RequestContext do
it 'sets Links header with a link to the next page' do
orig_uri = URI.parse(request_context.request.url)
expect(request_context).to receive(:header) do |name, header|
expect(name).to eq('Links')
expect(request_context).to receive(:header).twice do |name, header|
first_link, _ = /<([^>]+)>; rel="next"/.match(header).captures
query = CGI.parse(URI.parse(first_link).query)

View File

@ -325,7 +325,7 @@ describe AlertManagement::Alert do
let(:alert) { create(:alert_management_alert) }
it 'increments the events count by 1' do
expect { subject }.to change { alert.events}.by(1)
expect { subject }.to change { alert.events }.by(1)
end
end
end

View File

@ -597,6 +597,10 @@ describe API::Projects do
expect(response.header).to include('Links')
expect(response.header['Links']).to include('pagination=keyset')
expect(response.header['Links']).to include("id_after=#{public_project.id}")
expect(response.header).to include('Link')
expect(response.header['Link']).to include('pagination=keyset')
expect(response.header['Link']).to include("id_after=#{public_project.id}")
end
it 'contains only the first project with per_page = 1' do
@ -613,12 +617,17 @@ describe API::Projects do
expect(response.header).to include('Links')
expect(response.header['Links']).to include('pagination=keyset')
expect(response.header['Links']).to include("id_after=#{project3.id}")
expect(response.header).to include('Link')
expect(response.header['Link']).to include('pagination=keyset')
expect(response.header['Link']).to include("id_after=#{project3.id}")
end
it 'does not include a next link when the page does not have any records' do
get api('/projects', current_user), params: params.merge(id_after: Project.maximum(:id))
expect(response.header).not_to include('Links')
expect(response.header).not_to include('Link')
end
it 'returns an empty array when the page does not have any records' do
@ -644,6 +653,10 @@ describe API::Projects do
expect(response.header).to include('Links')
expect(response.header['Links']).to include('pagination=keyset')
expect(response.header['Links']).to include("id_before=#{project3.id}")
expect(response.header).to include('Link')
expect(response.header['Link']).to include('pagination=keyset')
expect(response.header['Link']).to include("id_before=#{project3.id}")
end
it 'contains only the last project with per_page = 1' do
@ -672,6 +685,11 @@ describe API::Projects do
match[1]
end
link = response.header['Link']
url = link&.match(/<[^>]+(\/projects\?[^>]+)>; rel="next"/) do |match|
match[1]
end
ids += Gitlab::Json.parse(response.body).map { |p| p['id'] }
end

View File

@ -6,7 +6,7 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
let_it_be(:project) { create(:project) }
describe '#execute' do
subject { described_class.new(project, nil, payload).execute }
subject(:execute) { described_class.new(project, nil, payload).execute }
context 'when alert payload is valid' do
let(:parsed_alert) { Gitlab::Alerting::Alert.new(project: project, payload: payload) }
@ -37,9 +37,13 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
context 'when alert with the same fingerprint already exists' do
let!(:alert) { create(:alert_management_alert, :resolved, project: project, fingerprint: parsed_alert.gitlab_fingerprint) }
it 'increases alert events count' do
expect { execute }.to change { alert.reload.events }.by(1)
end
context 'when status can be changed' do
it 'changes status to triggered' do
expect { subject }.to change { alert.reload.triggered? }.to(true)
expect { execute }.to change { alert.reload.triggered? }.to(true)
end
end
@ -56,7 +60,7 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
alert_id: alert.id
)
subject
execute
end
end
@ -66,7 +70,7 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
context 'when alert does not exist' do
context 'when alert can be created' do
it 'creates a new alert' do
expect { subject }.to change { AlertManagement::Alert.where(project: project).count }.by(1)
expect { execute }.to change { AlertManagement::Alert.where(project: project).count }.by(1)
end
end
@ -85,7 +89,7 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
alert_errors: { hosts: ['hosts array is over 255 chars'] }
)
subject
execute
end
end
@ -99,7 +103,7 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
context 'when status can be changed' do
it 'resolves an existing alert' do
expect { subject }.to change { alert.reload.resolved? }.to(true)
expect { execute }.to change { alert.reload.resolved? }.to(true)
end
end
@ -116,7 +120,7 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
alert_id: alert.id
)
subject
execute
end
end
@ -128,8 +132,8 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
let(:payload) { {} }
it 'responds with bad_request' do
expect(subject).to be_error
expect(subject.http_status).to eq(:bad_request)
expect(execute).to be_error
expect(execute.http_status).to eq(:bad_request)
end
end
end

View File

@ -32,6 +32,13 @@ RSpec.shared_examples 'wiki model' do
it 'returns the wiki base path' do
expect(subject.wiki_base_path).to eq("#{wiki_container.web_url(only_path: true)}/-/wikis")
end
it 'includes the relative URL root' do
allow(Rails.application.routes).to receive(:default_url_options).and_return(script_name: '/root')
expect(subject.wiki_base_path).to start_with('/root/')
expect(subject.wiki_base_path).not_to start_with('/root/root')
end
end
describe '#wiki' do