Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-05-31 12:09:12 +00:00
parent dcbe65b8b6
commit 8243505178
58 changed files with 966 additions and 665 deletions

View File

@ -798,7 +798,6 @@ Gitlab/NamespacedClass:
- 'app/workers/merge_request_mergeability_check_worker.rb'
- 'app/workers/merge_worker.rb'
- 'app/workers/migrate_external_diffs_worker.rb'
- 'app/workers/namespaceless_project_destroy_worker.rb'
- 'app/workers/new_issue_worker.rb'
- 'app/workers/new_merge_request_worker.rb'
- 'app/workers/new_note_worker.rb'

View File

@ -150,7 +150,6 @@ Rails/SkipsModelValidations:
- 'app/workers/concerns/dependency_proxy/expireable.rb'
- 'app/workers/concerns/packages/cleanup_artifact_worker.rb'
- 'app/workers/container_expiration_policy_worker.rb'
- 'app/workers/namespaceless_project_destroy_worker.rb'
- 'app/workers/packages/helm/extraction_worker.rb'
- 'app/workers/packages/nuget/extraction_worker.rb'
- 'app/workers/packages/rubygems/extraction_worker.rb'

View File

@ -484,7 +484,6 @@ RSpec/AnyInstanceOf:
- spec/workers/expire_pipeline_cache_worker_spec.rb
- spec/workers/group_export_worker_spec.rb
- spec/workers/group_import_worker_spec.rb
- spec/workers/namespaceless_project_destroy_worker_spec.rb
- spec/workers/namespaces/root_statistics_worker_spec.rb
- spec/workers/new_note_worker_spec.rb
- spec/workers/object_pool/create_worker_spec.rb

View File

@ -3902,7 +3902,6 @@ RSpec/ContextWording:
- 'spec/workers/issues/placement_worker_spec.rb'
- 'spec/workers/merge_requests/delete_source_branch_worker_spec.rb'
- 'spec/workers/metrics/dashboard/prune_old_annotations_worker_spec.rb'
- 'spec/workers/namespaceless_project_destroy_worker_spec.rb'
- 'spec/workers/packages/composer/cache_update_worker_spec.rb'
- 'spec/workers/packages/go/sync_packages_worker_spec.rb'
- 'spec/workers/packages/maven/metadata/sync_worker_spec.rb'

View File

@ -10,7 +10,7 @@ const initVitalsLog = () => {
console.log(
`${String.fromCodePoint(
0x1f4d1,
)} To get the final web vital numbers reported you maybe need to switch away and back to the tab`,
)} To get the final web vital numbers report you may need to switch away and back to the tab`,
);
getCLS(reportVital);
getFID(reportVital);

View File

@ -134,7 +134,10 @@ export default {
const { registerInstructions } = this.instructions || {};
if (this.registrationToken) {
return registerInstructions.replace(REGISTRATION_TOKEN_PLACEHOLDER, this.registrationToken);
return registerInstructions?.replace(
REGISTRATION_TOKEN_PLACEHOLDER,
this.registrationToken,
);
}
return registerInstructions;
@ -155,9 +158,13 @@ export default {
selectPlatform(platform) {
this.selectedPlatform = platform;
if (!platform.architectures?.some(({ name }) => name === this.selectedArchitectureName)) {
// Select first architecture when current value is not available
this.selectArchitecture(platform.architectures[0]);
// Update architecture when platform changes
const architectures = platform.architectures || [];
const arch = architectures.find(({ name }) => name === this.selectedArchitectureName);
if (arch) {
this.selectArchitecture(arch);
} else {
this.selectArchitecture(architectures[0]);
}
},
selectArchitecture(architecture) {
@ -220,7 +227,7 @@ export default {
v-for="platform in platforms"
:key="platform.name"
:ref="platform.name"
:selected="selectedPlatform && selectedPlatform.name === platform.name"
:selected="selectedPlatformName === platform.name"
@click="selectPlatform(platform)"
>
{{ platform.humanReadableName }}

View File

@ -363,10 +363,6 @@ table.u2f-registrations {
color: $gl-text-color-secondary;
}
.gitlab-slack-body {
max-width: 420px;
}
.gitlab-slack-slack-logo {
transform: scale(200%); // Slack logo SVG is scaled down 50% and has empty space around it
}

View File

@ -15,7 +15,7 @@ module Enums
size_limit_exceeded: 21,
job_activity_limit_exceeded: 22,
deployments_limit_exceeded: 23,
user_blocked: 24,
# 24 was previously used by the deprecated `user_blocked`
project_deleted: 25
}
end

View File

@ -17,8 +17,7 @@ module Ci
size_limit_exceeded: 'The pipeline size limit was exceeded.',
job_activity_limit_exceeded: 'The pipeline job activity limit was exceeded.',
deployments_limit_exceeded: 'The pipeline deployments limit was exceeded.',
project_deleted: 'The project associated with this pipeline was deleted.',
user_blocked: 'The user who created this pipeline is blocked.' }
project_deleted: 'The project associated with this pipeline was deleted.' }
end
presents ::Ci::Pipeline, as: :pipeline

View File

@ -15,11 +15,9 @@
= f.label :setup_for_company, _('Who will be using this group?')
.gl-display-flex.gl-flex-direction-column.gl-lg-flex-direction-row
.gl-flex-grow-1.gl-display-flex.gl-align-items-center
= f.radio_button :setup_for_company, true
= f.label :setup_for_company, _('My company or team'), class: 'gl-font-weight-normal gl-mb-0 gl-ml-2', value: 'true'
= f.gitlab_ui_radio_component :setup_for_company, true, _('My company or team')
.gl-flex-grow-1.gl-display-flex.gl-align-items-center
= f.radio_button :setup_for_company, false
= f.label :setup_for_company, _('Just me'), class: 'gl-font-weight-normal gl-mb-0 gl-ml-2', value: 'false'
= f.gitlab_ui_radio_component :setup_for_company, false, _('Just me')
.row
.form-group.col-sm-4

View File

@ -63,7 +63,7 @@
%button.gl-button.btn-default.btn.btn-svg.js-toggle-button.js-import-git-toggle-button.js-import-project-btn{ type: "button", data: { platform: 'repo_url', toggle_open_class: 'active', **tracking_attrs_data(track_label, 'click_button', 'repo_url') } }
.gl-button-icon
= sprite_icon('link', css_class: 'gl-icon')
= _('Repo by URL')
= _('Repository by URL')
- if manifest_import_enabled?
%div

View File

@ -2542,15 +2542,6 @@
:weight: 1
:idempotent:
:tags: []
- :name: namespaceless_project_destroy
:worker_name: NamespacelessProjectDestroyWorker
:feature_category: :authentication_and_authorization
:has_external_dependencies:
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent:
:tags: []
- :name: namespaces_onboarding_issue_created
:worker_name: Namespaces::OnboardingIssueCreatedWorker
:feature_category: :onboarding

View File

@ -1,42 +0,0 @@
# frozen_string_literal: true
# Worker to destroy projects that do not have a namespace
#
# It destroys everything it can without having the info about the namespace it
# used to belong to. Projects in this state should be rare.
# The worker will reject doing anything for projects that *do* have a
# namespace. For those use ProjectDestroyWorker instead.
class NamespacelessProjectDestroyWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
data_consistency :always
sidekiq_options retry: 3
include ExceptionBacktrace
feature_category :authentication_and_authorization
def perform(project_id)
begin
project = Project.unscoped.find(project_id)
rescue ActiveRecord::RecordNotFound
return
end
return if project.namespace # Reject doing anything for projects that *do* have a namespace
project.team.truncate
unlink_fork(project) if project.forked?
project.destroy!
end
private
def unlink_fork(project)
merge_requests = project.forked_from_project.merge_requests.opened.from_project(project)
merge_requests.update_all(state_id: MergeRequest.available_states[:closed])
end
end

View File

@ -10,6 +10,7 @@ value_type: number
status: active
time_frame: all
data_source: database
instrumentation_class: IssuesCreatedFromAlertsMetric
distribution:
- ce
- ee

View File

@ -275,8 +275,6 @@
- 1
- - migrate_external_diffs
- 1
- - namespaceless_project_destroy
- 1
- - namespaces_onboarding_issue_created
- 1
- - namespaces_onboarding_pipeline_created

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class AddTraversalIdTypeGroupIndex < Gitlab::Database::Migration[2.0]
INDEX_NAME = 'index_namespaces_on_traversal_ids_for_groups_btree'
disable_ddl_transaction!
def up
add_concurrent_index :namespaces, :traversal_ids, using: :btree, where: "type='Group'", name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :namespaces, INDEX_NAME
end
end

View File

@ -0,0 +1 @@
c049b15e29578180f42701764f27b5301561f2dfb7a2d289f84d69feae70b33f

View File

@ -28627,6 +28627,8 @@ CREATE INDEX index_namespaces_on_traversal_ids ON namespaces USING gin (traversa
CREATE INDEX index_namespaces_on_traversal_ids_for_groups ON namespaces USING gin (traversal_ids) WHERE ((type)::text = 'Group'::text);
CREATE INDEX index_namespaces_on_traversal_ids_for_groups_btree ON namespaces USING btree (traversal_ids) WHERE ((type)::text = 'Group'::text);
CREATE INDEX index_namespaces_on_type_and_id ON namespaces USING btree (type, id);
CREATE INDEX index_namespaces_public_groups_name_id ON namespaces USING btree (name, id) WHERE (((type)::text = 'Group'::text) AND (visibility_level = 20));

View File

@ -833,9 +833,9 @@ prevent any more changes from rendering. For more information about these limits
Reports that go over the 20 MB limit aren't loaded. Affected reports:
- [Merge request security reports](../user/project/merge_requests/testing_and_reports_in_merge_requests.md#security-reports)
- [Merge request security reports](../ci/testing/index.md#security-reports)
- [CI/CD parameter `artifacts:expose_as`](../ci/yaml/index.md#artifactsexpose_as)
- [Unit test reports](../ci/unit_test_reports.md)
- [Unit test reports](../ci/testing/unit_test_reports.md)
## Advanced Search limits

View File

@ -165,7 +165,7 @@ Example of response
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/202525) in GitLab 13.0.
NOTE:
This API route is part of the [Unit test report](../ci/unit_test_reports.md) feature.
This API route is part of the [Unit test report](../ci/testing/unit_test_reports.md) feature.
```plaintext
GET /projects/:id/pipelines/:pipeline_id/test_report
@ -221,7 +221,7 @@ Sample response:
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/65471) in GitLab 14.2.
NOTE:
This API route is part of the [Unit test report](../ci/unit_test_reports.md) feature.
This API route is part of the [Unit test report](../ci/testing/unit_test_reports.md) feature.
```plaintext
GET /projects/:id/pipelines/:pipeline_id/test_report_summary

View File

@ -89,7 +89,7 @@ GitLab CI/CD features, grouped by DevOps stage, include:
| [GitLab CI/CD for external repositories](ci_cd_for_external_repos/index.md) | Get the benefits of GitLab CI/CD combined with repositories in GitHub and Bitbucket Cloud. |
| [Interactive Web Terminals](interactive_web_terminal/index.md) | Open an interactive web terminal to debug the running jobs. |
| [Review Apps](review_apps/index.md) | Configure GitLab CI/CD to preview code changes. |
| [Unit test reports](unit_test_reports.md) | Identify test failures directly on merge requests. |
| [Unit test reports](testing/unit_test_reports.md) | Identify test failures directly on merge requests. |
| [Using Docker images](docker/using_docker_images.md) | Use GitLab and GitLab Runner with Docker to build and test applications. |
| **Release** | |
| [Auto Deploy](../topics/autodevops/stages.md#auto-deploy) | Deploy your application to a production environment in a Kubernetes cluster. |

View File

@ -8,7 +8,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/9788) in GitLab 11.10. Requires GitLab Runner 11.10 and above.
GitLab provides a lot of great reporting tools for things like [merge requests](../user/project/merge_requests/index.md) - [Unit test reports](unit_test_reports.md), [code quality](../user/project/merge_requests/code_quality.md), and performance tests. While JUnit is a great open framework for tests that "pass" or "fail", it is also important to see other types of metrics from a given change.
GitLab provides a lot of great reporting tools for things like [merge requests](../user/project/merge_requests/index.md) - [Unit test reports](testing/unit_test_reports.md), [code quality](../user/project/merge_requests/code_quality.md), and performance tests. While JUnit is a great open framework for tests that "pass" or "fail", it is also important to see other types of metrics from a given change.
You can configure your job to use custom Metrics Reports, and GitLab displays a report on the merge request so that it's easier and faster to identify changes without having to check the entire log.

View File

@ -74,7 +74,7 @@ hosted with a paid cloud service may be provisioned with:
The [Pipeline success and duration charts](index.md#pipeline-success-and-duration-charts)
give information about pipeline runtime and failed job counts.
Tests like [unit tests](../unit_test_reports.md), integration tests, end-to-end tests,
Tests like [unit tests](../testing/unit_test_reports.md), integration tests, end-to-end tests,
[code quality](../../user/project/merge_requests/code_quality.md) tests, and others
ensure that problems are automatically found by the CI/CD pipeline. There could be many
pipeline stages involved causing long runtimes.

View File

Before

Width:  |  Height:  |  Size: 28 KiB

After

Width:  |  Height:  |  Size: 28 KiB

View File

Before

Width:  |  Height:  |  Size: 16 KiB

After

Width:  |  Height:  |  Size: 16 KiB

View File

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 18 KiB

35
doc/ci/testing/index.md Normal file
View File

@ -0,0 +1,35 @@
---
stage: Verify
group: Pipeline Insights
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---
# Test with GitLab CI/CD and generate reports in merge requests **(FREE)**
Use GitLab CI/CD to test the changes included in a feature branch. You can also
display reports or link to important information directly from [merge requests](../../user/project/merge_requests/index.md).
| Feature | Description |
|-------------------------------------------------------------------------------------------------|-------------|
| [Accessibility Testing](../../user/project/merge_requests/accessibility_testing.md) | Automatically report A11y violations for changed pages in merge requests. |
| [Browser Performance Testing](../../user/project/merge_requests/browser_performance_testing.md) | Quickly determine the browser performance impact of pending code changes. |
| [Load Performance Testing](../../user/project/merge_requests/load_performance_testing.md) | Quickly determine the server performance impact of pending code changes. |
| [Code Quality](../../user/project/merge_requests/code_quality.md) | Analyze your source code quality using the [Code Climate](https://codeclimate.com/) analyzer and show the Code Climate report right in the merge request widget area. |
| [Display arbitrary job artifacts](../yaml/index.md#artifactsexpose_as) | Configure CI pipelines with the `artifacts:expose_as` parameter to directly link to selected [artifacts](../pipelines/job_artifacts.md) in merge requests. |
| [Unit test reports](unit_test_reports.md) | Configure your CI jobs to use Unit test reports, and let GitLab display a report on the merge request so that it's easier and faster to identify the failure without having to check the entire job log. |
| [License Compliance](../../user/compliance/license_compliance/index.md) | Manage the licenses of your dependencies. |
| [Metrics Reports](../metrics_reports.md) | Display the Metrics Report on the merge request so that it's fast and easier to identify changes to important metrics. |
| [Test Coverage visualization](../../user/project/merge_requests/test_coverage_visualization.md) | See test coverage results for merge requests, in the file diff. |
| [Fail fast testing](../../user/project/merge_requests/fail_fast_testing.md#fail-fast-testing) | Run a subset of your RSpec test suite, so failed tests stop the pipeline before the full suite of tests run, saving resources. |
## Security Reports **(ULTIMATE)**
In addition to the reports listed above, GitLab can do many types of [Security reports](../../user/application_security/index.md),
generated by scanning and reporting any vulnerabilities found in your project:
| Feature | Description |
|----------------------------------------------------------------------------------------------|-------------|
| [Container Scanning](../../user/application_security/container_scanning/index.md) | Analyze your Docker images for known vulnerabilities. |
| [Dynamic Application Security Testing (DAST)](../../user/application_security/dast/index.md) | Analyze your running web applications for known vulnerabilities. |
| [Dependency Scanning](../../user/application_security/dependency_scanning/index.md) | Analyze your dependencies for known vulnerabilities. |
| [Static Application Security Testing (SAST)](../../user/application_security/sast/index.md) | Analyze your source code for known vulnerabilities. |

View File

@ -0,0 +1,266 @@
---
stage: Verify
group: Pipeline Insights
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---
# Unit test report examples **(FREE)**
[Unit test reports](unit_test_reports.md) can be generated for many languages and packages.
Use these examples as guidelines for configuring your pipeline to generate unit test reports
for the listed languages and packages. You might need to edit the examples to match
the version of the language or package you are using.
## Ruby
Use the following job in `.gitlab-ci.yml`. This includes the `artifacts:paths` keyword to provide a link to the Unit test report output file.
```yaml
## Use https://github.com/sj26/rspec_junit_formatter to generate a JUnit report format XML file with rspec
ruby:
stage: test
script:
- bundle install
- bundle exec rspec --format progress --format RspecJunitFormatter --out rspec.xml
artifacts:
when: always
paths:
- rspec.xml
reports:
junit: rspec.xml
```
## Go
Use the following job in `.gitlab-ci.yml`:
```yaml
## Use https://github.com/gotestyourself/gotestsum to generate a JUnit report format XML file with go
golang:
stage: test
script:
- go get gotest.tools/gotestsum
- gotestsum --junitfile report.xml --format testname
artifacts:
when: always
reports:
junit: report.xml
```
## Java
There are a few tools that can produce JUnit report format XML file in Java.
### Gradle
In the following example, `gradle` is used to generate the test reports.
If there are multiple test tasks defined, `gradle` generates multiple
directories under `build/test-results/`. In that case, you can leverage glob
matching by defining the following path: `build/test-results/test/**/TEST-*.xml`:
```yaml
java:
stage: test
script:
- gradle test
artifacts:
when: always
reports:
junit: build/test-results/test/**/TEST-*.xml
```
In [GitLab Runner 13.0](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/2620)
and later, you can use `**`.
### Maven
For parsing [Surefire](https://maven.apache.org/surefire/maven-surefire-plugin/)
and [Failsafe](https://maven.apache.org/surefire/maven-failsafe-plugin/) test
reports, use the following job in `.gitlab-ci.yml`:
```yaml
java:
stage: test
script:
- mvn verify
artifacts:
when: always
reports:
junit:
- target/surefire-reports/TEST-*.xml
- target/failsafe-reports/TEST-*.xml
```
## Python example
This example uses pytest with the `--junitxml=report.xml` flag to format the output
into the JUnit report XML format:
```yaml
pytest:
stage: test
script:
- pytest --junitxml=report.xml
artifacts:
when: always
reports:
junit: report.xml
```
## C/C++
There are a few tools that can produce JUnit report format XML files in C/C++.
### GoogleTest
In the following example, `gtest` is used to generate the test reports.
If there are multiple `gtest` executables created for different architectures (`x86`, `x64` or `arm`),
you are required to run each test providing a unique filename. The results
are then aggregated together.
```yaml
cpp:
stage: test
script:
- gtest.exe --gtest_output="xml:report.xml"
artifacts:
when: always
reports:
junit: report.xml
```
### CUnit
[CUnit](https://cunity.gitlab.io/cunit/) can be made to produce [JUnit report format XML files](https://cunity.gitlab.io/cunit/group__CI.html)
automatically when run using its `CUnitCI.h` macros:
```yaml
cunit:
stage: test
script:
- ./my-cunit-test
artifacts:
when: always
reports:
junit: ./my-cunit-test.xml
```
## .NET
The [JunitXML.TestLogger](https://www.nuget.org/packages/JunitXml.TestLogger/) NuGet
package can generate test reports for .Net Framework and .Net Core applications. The following
example expects a solution in the root folder of the repository, with one or more
project files in sub-folders. One result file is produced per test project, and each file
is placed in the artifacts folder. This example includes optional formatting arguments, which
improve the readability of test data in the test widget. A full .Net Core
[example is available](https://gitlab.com/Siphonophora/dot-net-cicd-test-logging-demo).
```yaml
## Source code and documentation are here: https://github.com/spekt/junit.testlogger/
Test:
stage: test
script:
- 'dotnet test --test-adapter-path:. --logger:"junit;LogFilePath=..\artifacts\{assembly}-test-result.xml;MethodFormat=Class;FailureBodyFormat=Verbose"'
artifacts:
when: always
paths:
- ./**/*test-result.xml
reports:
junit:
- ./**/*test-result.xml
```
## JavaScript
There are a few tools that can produce JUnit report format XML files in JavaScript.
### Jest
The [jest-junit](https://github.com/jest-community/jest-junit) npm package can generate
test reports for JavaScript applications. In the following `.gitlab-ci.yml` example,
the `javascript` job uses Jest to generate the test reports:
```yaml
javascript:
stage: test
script:
- 'jest --ci --reporters=default --reporters=jest-junit'
artifacts:
when: always
reports:
junit:
- junit.xml
```
### Karma
The [Karma-junit-reporter](https://github.com/karma-runner/karma-junit-reporter)
npm package can generate test reports for JavaScript applications. In the following
`.gitlab-ci.yml` example, the `javascript` job uses Karma to generate the test reports:
```yaml
javascript:
stage: test
script:
- karma start --reporters junit
artifacts:
when: always
reports:
junit:
- junit.xml
```
### Mocha
The [JUnit Reporter for Mocha](https://github.com/michaelleeallen/mocha-junit-reporter)
NPM package can generate test reports for JavaScript applications. In the following
`.gitlab-ci.yml` example, the `javascript` job uses Mocha to generate the test reports:
```yaml
javascript:
stage: test
script:
- mocha --reporter mocha-junit-reporter --reporter-options mochaFile=junit.xml
artifacts:
when: always
reports:
junit:
- junit.xml
```
## Flutter or Dart
This example `.gitlab-ci.yml` file uses the [JUnit Report](https://pub.dev/packages/junitreport)
package to convert the `flutter test` output into JUnit report XML format:
```yaml
test:
stage: test
script:
- flutter test --machine | tojunit -o report.xml
artifacts:
when: always
reports:
junit:
- report.xml
```
## PHP
This example uses [PHPUnit](https://phpunit.de/) with the `--log-junit` flag.
You can also add this option using
[XML](https://phpunit.readthedocs.io/en/stable/configuration.html#the-junit-element)
in the `phpunit.xml` configuration file.
```yaml
phpunit:
stage: test
script:
- composer install
- vendor/bin/phpunit --log-junit report.xml
artifacts:
when: always
reports:
junit: report.xml
```

View File

@ -0,0 +1,160 @@
---
stage: Verify
group: Pipeline Insights
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---
# Unit test reports **(FREE)**
> - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/45318) in GitLab 11.2. Requires GitLab Runner 11.2 and above.
> - [Renamed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/39737) from JUnit test reports to Unit test reports in GitLab 13.4.
It is very common that a [CI/CD pipeline](../pipelines/index.md) contains a
test job that verifies your code.
If the tests fail, the pipeline fails and users get notified. The person that
works on the merge request has to check the job logs and see where the
tests failed so that they can fix them.
You can configure your job to use Unit test reports, and GitLab displays a
report on the merge request so that it's easier and faster to identify the
failure without having to check the entire log. Unit test reports currently
only support test reports in the JUnit report format.
If you don't use merge requests but still want to see the unit test report
output without searching through job logs, the full
[Unit test reports](#view-unit-test-reports-on-gitlab) are available
in the pipeline detail view.
Consider the following workflow:
1. Your default branch is rock solid, your project is using GitLab CI/CD and
your pipelines indicate that there isn't anything broken.
1. Someone from your team submits a merge request, a test fails and the pipeline
gets the known red icon. To investigate more, you have to go through the job
logs to figure out the cause of the failed test, which usually contain
thousands of lines.
1. You configure the Unit test reports and immediately GitLab collects and
exposes them in the merge request. No more searching in the job logs.
1. Your development and debugging workflow becomes easier, faster and efficient.
## How it works
First, GitLab Runner uploads all [JUnit report format XML files](https://www.ibm.com/docs/en/adfz/developer-for-zos/14.1.0?topic=formats-junit-xml-format)
as [artifacts](../yaml/artifacts_reports.md#artifactsreportsjunit) to GitLab. Then, when you visit a merge request, GitLab starts
comparing the head and base branch's JUnit report format XML files, where:
- The base branch is the target branch (usually the default branch).
- The head branch is the source branch (the latest pipeline in each merge request).
The reports panel has a summary showing how many tests failed, how many had errors
and how many were fixed. If no comparison can be done because data for the base branch
is not available, the panel just shows the list of failed tests for head.
There are four types of results:
1. **Newly failed tests:** Test cases which passed on base branch and failed on head branch
1. **Newly encountered errors:** Test cases which passed on base branch and failed due to a
test error on head branch
1. **Existing failures:** Test cases which failed on base branch and failed on head branch
1. **Resolved failures:** Test cases which failed on base branch and passed on head branch
Each entry in the panel shows the test name and its type from the list
above. Clicking on the test name opens a modal window with details of its
execution time and the error output.
![Test Reports Widget](img/junit_test_report.png)
### Number of recent failures
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/241759) in merge requests in GitLab 13.7.
> - [Feature flag removed](https://gitlab.com/gitlab-org/gitlab/-/issues/268249) in GitLab 13.8.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/235525) in Test Reports in GitLab 13.9.
If a test failed in the project's default branch in the last 14 days, a message like
`Failed {n} time(s) in {default_branch} in the last 14 days` is displayed for that test.
## How to set it up
To enable the Unit test reports in merge requests, you must add
[`artifacts:reports:junit`](../yaml/artifacts_reports.md#artifactsreportsjunit)
in `.gitlab-ci.yml`, and specify the paths of the generated test reports.
The reports must be `.xml` files, otherwise [GitLab returns an Error 500](https://gitlab.com/gitlab-org/gitlab/-/issues/216575).
In the following example for Ruby, the job in the `test` stage runs and GitLab
collects the unit test report from the job. After the job is executed, the
XML report is stored in GitLab as an artifact, and the results are shown in the
merge request widget.
```yaml
## Use https://github.com/sj26/rspec_junit_formatter to generate a JUnit report format XML file with rspec
ruby:
stage: test
script:
- bundle install
- bundle exec rspec --format progress --format RspecJunitFormatter --out rspec.xml
artifacts:
when: always
paths:
- rspec.xml
reports:
junit: rspec.xml
```
To make the Unit test report output files browsable, include them with the
[`artifacts:paths`](../yaml/index.md#artifactspaths) keyword as well, as shown in the example.
To upload the report even if the job fails (for example if the tests do not pass),
use the [`artifacts:when:always`](../yaml/index.md#artifactswhen) keyword.
You cannot have multiple tests with the same name and class in your JUnit report format XML file.
## View Unit test reports on GitLab
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/24792) in GitLab 12.5 behind a feature flag (`junit_pipeline_view`), disabled by default.
> - [Feature flag removed](https://gitlab.com/gitlab-org/gitlab/-/issues/216478) in GitLab 13.3.
If JUnit report format XML files are generated and uploaded as part of a pipeline, these reports
can be viewed inside the pipelines details page. The **Tests** tab on this page
displays a list of test suites and cases reported from the XML file.
![Test Reports Widget](img/pipelines_junit_test_report_v13_10.png)
You can view all the known test suites and select each of these to see further
details, including the cases that make up the suite.
You can also retrieve the reports via the [GitLab API](../../api/pipelines.md#get-a-pipelines-test-report).
### Unit test reports parsing errors
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/263457) in GitLab 13.10.
If parsing JUnit report XML results in an error, an indicator is shown next to the job name. Hovering over the icon shows the parser error in a tooltip. If multiple parsing errors come from [grouped jobs](../jobs/index.md#group-jobs-in-a-pipeline), GitLab shows only the first error from the group.
![Test Reports With Errors](img/pipelines_junit_test_report_with_errors_v13_10.png)
For test case parsing limits, see [Max test cases per unit test report](../../user/gitlab_com/#gitlab-cicd).
GitLab does not parse very [large nodes](https://nokogiri.org/tutorials/parsing_an_html_xml_document.html#parse-options) of JUnit reports. There is [an issue](https://gitlab.com/gitlab-org/gitlab/-/issues/268035) open to make this optional.
## View JUnit screenshots on GitLab
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/202114) in GitLab 13.0 behind the `:junit_pipeline_screenshots_view` feature flag, disabled by default.
> - [Feature flag removed](https://gitlab.com/gitlab-org/gitlab/-/issues/216979) in GitLab 13.12.
Upload your screenshots as [artifacts](../yaml/artifacts_reports.md#artifactsreportsjunit) to GitLab. If JUnit
report format XML files contain an `attachment` tag, GitLab parses the attachment. Note that:
- The `attachment` tag **must** contain the relative path to `$CI_PROJECT_DIR` of the screenshots you uploaded. For
example:
```xml
<testcase time="1.00" name="Test">
<system-out>[[ATTACHMENT|/path/to/some/file]]</system-out>
</testcase>
```
- You should set the job that uploads the screenshot to
[`artifacts:when: always`](../yaml/index.md#artifactswhen) so that it still uploads a screenshot
when a test fails.
A link to the test case attachment appears in the test case details in
[the pipeline test report](#view-unit-test-reports-on-gitlab).

View File

@ -1,395 +1,11 @@
---
stage: Verify
group: Pipeline Insights
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
redirect_to: 'testing/unit_test_reports.md'
remove_date: '2022-08-31'
---
# Unit test reports **(FREE)**
This document was moved to [another location](testing/unit_test_reports.md).
> - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/45318) in GitLab 11.2. Requires GitLab Runner 11.2 and above.
> - [Renamed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/39737) from JUnit test reports to Unit test reports in GitLab 13.4.
It is very common that a [CI/CD pipeline](pipelines/index.md) contains a
test job that verifies your code.
If the tests fail, the pipeline fails and users get notified. The person that
works on the merge request has to check the job logs and see where the
tests failed so that they can fix them.
You can configure your job to use Unit test reports, and GitLab displays a
report on the merge request so that it's easier and faster to identify the
failure without having to check the entire log. Unit test reports currently
only support test reports in the JUnit report format.
If you don't use merge requests but still want to see the unit test report
output without searching through job logs, the full
[Unit test reports](#viewing-unit-test-reports-on-gitlab) are available
in the pipeline detail view.
Consider the following workflow:
1. Your default branch is rock solid, your project is using GitLab CI/CD and
your pipelines indicate that there isn't anything broken.
1. Someone from your team submits a merge request, a test fails and the pipeline
gets the known red icon. To investigate more, you have to go through the job
logs to figure out the cause of the failed test, which usually contain
thousands of lines.
1. You configure the Unit test reports and immediately GitLab collects and
exposes them in the merge request. No more searching in the job logs.
1. Your development and debugging workflow becomes easier, faster and efficient.
## How it works
First, GitLab Runner uploads all [JUnit report format XML files](https://www.ibm.com/docs/en/adfz/developer-for-zos/14.1.0?topic=formats-junit-xml-format)
as [artifacts](yaml/artifacts_reports.md#artifactsreportsjunit) to GitLab. Then, when you visit a merge request, GitLab starts
comparing the head and base branch's JUnit report format XML files, where:
- The base branch is the target branch (usually the default branch).
- The head branch is the source branch (the latest pipeline in each merge request).
The reports panel has a summary showing how many tests failed, how many had errors
and how many were fixed. If no comparison can be done because data for the base branch
is not available, the panel just shows the list of failed tests for head.
There are four types of results:
1. **Newly failed tests:** Test cases which passed on base branch and failed on head branch
1. **Newly encountered errors:** Test cases which passed on base branch and failed due to a
test error on head branch
1. **Existing failures:** Test cases which failed on base branch and failed on head branch
1. **Resolved failures:** Test cases which failed on base branch and passed on head branch
Each entry in the panel shows the test name and its type from the list
above. Clicking on the test name opens a modal window with details of its
execution time and the error output.
![Test Reports Widget](img/junit_test_report.png)
### Number of recent failures
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/241759) in merge requests in GitLab 13.7.
> - [Feature flag removed](https://gitlab.com/gitlab-org/gitlab/-/issues/268249) in GitLab 13.8.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/235525) in Test Reports in GitLab 13.9.
If a test failed in the project's default branch in the last 14 days, a message like
`Failed {n} time(s) in {default_branch} in the last 14 days` is displayed for that test.
## How to set it up
To enable the Unit test reports in merge requests, you must add
[`artifacts:reports:junit`](yaml/artifacts_reports.md#artifactsreportsjunit)
in `.gitlab-ci.yml`, and specify the paths of the generated test reports.
The reports must be `.xml` files, otherwise [GitLab returns an Error 500](https://gitlab.com/gitlab-org/gitlab/-/issues/216575).
In the following examples, the job in the `test` stage runs and GitLab
collects the Unit test report from each job. After each job is executed, the
XML reports are stored in GitLab as artifacts and their results are shown in the
merge request widget.
To make the Unit test report output files browsable, include them with the
[`artifacts:paths`](yaml/index.md#artifactspaths) keyword as well, as shown in the [Ruby example](#ruby-example).
To upload the report even if the job fails (for example if the tests do not pass), use the [`artifacts:when:always`](yaml/index.md#artifactswhen)
keyword.
You cannot have multiple tests with the same name and class in your JUnit report format XML file.
### Ruby example
Use the following job in `.gitlab-ci.yml`. This includes the `artifacts:paths` keyword to provide a link to the Unit test report output file.
```yaml
## Use https://github.com/sj26/rspec_junit_formatter to generate a JUnit report format XML file with rspec
ruby:
stage: test
script:
- bundle install
- bundle exec rspec --format progress --format RspecJunitFormatter --out rspec.xml
artifacts:
when: always
paths:
- rspec.xml
reports:
junit: rspec.xml
```
### Go example
Use the following job in `.gitlab-ci.yml`:
```yaml
## Use https://github.com/gotestyourself/gotestsum to generate a JUnit report format XML file with go
golang:
stage: test
script:
- go get gotest.tools/gotestsum
- gotestsum --junitfile report.xml --format testname
artifacts:
when: always
reports:
junit: report.xml
```
### Java examples
There are a few tools that can produce JUnit report format XML file in Java.
#### Gradle
In the following example, `gradle` is used to generate the test reports.
If there are multiple test tasks defined, `gradle` generates multiple
directories under `build/test-results/`. In that case, you can leverage glob
matching by defining the following path: `build/test-results/test/**/TEST-*.xml`:
```yaml
java:
stage: test
script:
- gradle test
artifacts:
when: always
reports:
junit: build/test-results/test/**/TEST-*.xml
```
In [GitLab Runner 13.0](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/2620)
and later, you can use `**`.
#### Maven
For parsing [Surefire](https://maven.apache.org/surefire/maven-surefire-plugin/)
and [Failsafe](https://maven.apache.org/surefire/maven-failsafe-plugin/) test
reports, use the following job in `.gitlab-ci.yml`:
```yaml
java:
stage: test
script:
- mvn verify
artifacts:
when: always
reports:
junit:
- target/surefire-reports/TEST-*.xml
- target/failsafe-reports/TEST-*.xml
```
### Python example
This example uses pytest with the `--junitxml=report.xml` flag to format the output
into the JUnit report XML format:
```yaml
pytest:
stage: test
script:
- pytest --junitxml=report.xml
artifacts:
when: always
reports:
junit: report.xml
```
### C/C++ example
There are a few tools that can produce JUnit report format XML files in C/C++.
#### GoogleTest
In the following example, `gtest` is used to generate the test reports.
If there are multiple `gtest` executables created for different architectures (`x86`, `x64` or `arm`),
you are required to run each test providing a unique filename. The results
are then aggregated together.
```yaml
cpp:
stage: test
script:
- gtest.exe --gtest_output="xml:report.xml"
artifacts:
when: always
reports:
junit: report.xml
```
#### CUnit
[CUnit](https://cunity.gitlab.io/cunit/) can be made to produce [JUnit report format XML files](https://cunity.gitlab.io/cunit/group__CI.html) automatically when run using its `CUnitCI.h` macros:
```yaml
cunit:
stage: test
script:
- ./my-cunit-test
artifacts:
when: always
reports:
junit: ./my-cunit-test.xml
```
### .NET example
The [JunitXML.TestLogger](https://www.nuget.org/packages/JunitXml.TestLogger/) NuGet
package can generate test reports for .Net Framework and .Net Core applications. The following
example expects a solution in the root folder of the repository, with one or more
project files in sub-folders. One result file is produced per test project, and each file
is placed in the artifacts folder. This example includes optional formatting arguments, which
improve the readability of test data in the test widget. A full .Net Core
[example is available](https://gitlab.com/Siphonophora/dot-net-cicd-test-logging-demo).
```yaml
## Source code and documentation are here: https://github.com/spekt/junit.testlogger/
Test:
stage: test
script:
- 'dotnet test --test-adapter-path:. --logger:"junit;LogFilePath=..\artifacts\{assembly}-test-result.xml;MethodFormat=Class;FailureBodyFormat=Verbose"'
artifacts:
when: always
paths:
- ./**/*test-result.xml
reports:
junit:
- ./**/*test-result.xml
```
### JavaScript example
There are a few tools that can produce JUnit report format XML files in JavaScript.
#### Jest
The [jest-junit](https://github.com/jest-community/jest-junit) npm package can generate test reports for JavaScript applications.
In the following `.gitlab-ci.yml` example, the `javascript` job uses Jest to generate the test reports:
```yaml
javascript:
stage: test
script:
- 'jest --ci --reporters=default --reporters=jest-junit'
artifacts:
when: always
reports:
junit:
- junit.xml
```
#### Karma
The [Karma-junit-reporter](https://github.com/karma-runner/karma-junit-reporter) npm package can generate test reports for JavaScript applications.
In the following `.gitlab-ci.yml` example, the `javascript` job uses Karma to generate the test reports:
```yaml
javascript:
stage: test
script:
- karma start --reporters junit
artifacts:
when: always
reports:
junit:
- junit.xml
```
#### Mocha
The [JUnit Reporter for Mocha](https://github.com/michaelleeallen/mocha-junit-reporter) NPM package can generate test reports for JavaScript
applications.
In the following `.gitlab-ci.yml` example, the `javascript` job uses Mocha to generate the test reports:
```yaml
javascript:
stage: test
script:
- mocha --reporter mocha-junit-reporter --reporter-options mochaFile=junit.xml
artifacts:
when: always
reports:
junit:
- junit.xml
```
### Flutter / Dart example
This example `.gitlab-ci.yml` file uses the [JUnit Report](https://pub.dev/packages/junitreport) package to convert the `flutter test` output into JUnit report XML format:
```yaml
test:
stage: test
script:
- flutter test --machine | tojunit -o report.xml
artifacts:
when: always
reports:
junit:
- report.xml
```
### PHP example
This example uses [PHPUnit](https://phpunit.de/) with the `--log-junit` flag.
You can also add this option using
[XML](https://phpunit.readthedocs.io/en/stable/configuration.html#the-junit-element)
in the `phpunit.xml` configuration file.
```yaml
phpunit:
stage: test
script:
- composer install
- vendor/bin/phpunit --log-junit report.xml
artifacts:
when: always
reports:
junit: report.xml
```
## Viewing Unit test reports on GitLab
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/24792) in GitLab 12.5 behind a feature flag (`junit_pipeline_view`), disabled by default.
> - [Feature flag removed](https://gitlab.com/gitlab-org/gitlab/-/issues/216478) in GitLab 13.3.
If JUnit report format XML files are generated and uploaded as part of a pipeline, these reports
can be viewed inside the pipelines details page. The **Tests** tab on this page
displays a list of test suites and cases reported from the XML file.
![Test Reports Widget](img/pipelines_junit_test_report_v13_10.png)
You can view all the known test suites and select each of these to see further
details, including the cases that make up the suite.
You can also retrieve the reports via the [GitLab API](../api/pipelines.md#get-a-pipelines-test-report).
### Unit test reports parsing errors
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/263457) in GitLab 13.10.
If parsing JUnit report XML results in an error, an indicator is shown next to the job name. Hovering over the icon shows the parser error in a tooltip. If multiple parsing errors come from [grouped jobs](jobs/index.md#group-jobs-in-a-pipeline), GitLab shows only the first error from the group.
![Test Reports With Errors](img/pipelines_junit_test_report_with_errors_v13_10.png)
For test case parsing limits, see [Max test cases per unit test report](../user/gitlab_com/#gitlab-cicd).
GitLab does not parse very [large nodes](https://nokogiri.org/tutorials/parsing_an_html_xml_document.html#parse-options) of JUnit reports. There is [an issue](https://gitlab.com/gitlab-org/gitlab/-/issues/268035) open to make this optional.
## Viewing JUnit screenshots on GitLab
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/202114) in GitLab 13.0 behind the `:junit_pipeline_screenshots_view` feature flag, disabled by default.
> - [Feature flag removed](https://gitlab.com/gitlab-org/gitlab/-/issues/216979) in GitLab 13.12.
Upload your screenshots as [artifacts](yaml/artifacts_reports.md#artifactsreportsjunit) to GitLab. If JUnit
report format XML files contain an `attachment` tag, GitLab parses the attachment. Note that:
- The `attachment` tag **must** contain the relative path to `$CI_PROJECT_DIR` of the screenshots you uploaded. For
example:
```xml
<testcase time="1.00" name="Test">
<system-out>[[ATTACHMENT|/path/to/some/file]]</system-out>
</testcase>
```
- You should set the job that uploads the screenshot to
[`artifacts:when: always`](yaml/index.md#artifactswhen) so that it still uploads a screenshot
when a test fails.
A link to the test case attachment appears in the test case details in
[the pipeline test report](#viewing-unit-test-reports-on-gitlab).
<!-- This redirect file can be deleted after <2022-08-31>. -->
<!-- Redirects that point to other docs in the same project expire in three months. -->
<!-- Redirects that point to docs in a different project or site (for example, link is not relative and starts with `https:`) expire in one year. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/redirects.html -->

View File

@ -219,7 +219,7 @@ The `junit` report collects [JUnit report format XML files](https://www.ibm.com/
The collected Unit test reports upload to GitLab as an artifact. Although JUnit was originally developed in Java, there
are many third-party ports for other languages such as JavaScript, Python, and Ruby.
See [Unit test reports](../unit_test_reports.md) for more details and examples.
See [Unit test reports](../testing/unit_test_reports.md) for more details and examples.
Below is an example of collecting a JUnit report format XML file from Ruby's RSpec test tool:
```yaml
@ -235,8 +235,8 @@ rspec:
GitLab can display the results of one or more reports in:
- The merge request [code quality widget](../../ci/unit_test_reports.md#how-it-works).
- The [full report](../../ci/unit_test_reports.md#viewing-unit-test-reports-on-gitlab).
- The merge request [code quality widget](../testing/unit_test_reports.md#how-it-works).
- The [full report](../testing/unit_test_reports.md#view-unit-test-reports-on-gitlab).
Some JUnit tools export to multiple XML files. You can specify multiple test report paths in a single job to
concatenate them into a single file. Use either:

View File

@ -977,7 +977,7 @@ failure.
- `on_success` (default): Upload artifacts only when the job succeeds.
- `on_failure`: Upload artifacts only when the job fails.
- `always`: Always upload artifacts (except when jobs time out). For example, when
[uploading artifacts](../unit_test_reports.md#viewing-junit-screenshots-on-gitlab)
[uploading artifacts](../testing/unit_test_reports.md#view-junit-screenshots-on-gitlab)
required to troubleshoot failing tests.
**Example of `artifacts:when`**:

View File

@ -43,7 +43,7 @@ best place to integrate your own product and its results into GitLab.
implications for app security, corporate policy, or compliance. When complete,
the job reports back on its status and creates a
[job artifact](../../ci/pipelines/job_artifacts.md) as a result.
- The [Merge Request Security Widget](../../user/project/merge_requests/testing_and_reports_in_merge_requests.md#security-reports)
- The [Merge Request Security Widget](../../ci/testing/index.md#security-reports)
displays the results of the pipeline's security checks and the developer can
review them. The developer can review both a summary and a detailed version
of the results.
@ -95,7 +95,7 @@ and complete an integration with the Secure stage.
- If you need a new kind of scan or report, [create an issue](https://gitlab.com/gitlab-org/gitlab/-/issues/new#)
and add the label `devops::secure`.
- Once the job is completed, the data can be seen:
- In the [Merge Request Security Report](../../user/project/merge_requests/testing_and_reports_in_merge_requests.md#security-reports) ([MR Security Report data flow](https://gitlab.com/snippets/1910005#merge-request-view)).
- In the [Merge Request Security Report](../../ci/testing/index.md#security-reports) ([MR Security Report data flow](https://gitlab.com/snippets/1910005#merge-request-view)).
- While [browsing a Job Artifact](../../ci/pipelines/job_artifacts.md).
- In the [Security Dashboard](../../user/application_security/security_dashboard/index.md) ([Dashboard data flow](https://gitlab.com/snippets/1910005#project-and-group-dashboards)).
1. Optional: Provide a way to interact with results as Vulnerabilities:

View File

@ -166,7 +166,7 @@ There are a few caveats for this Rake task:
- The pipeline must have been completed.
- You may need to wait for the test report to be parsed and retry again.
This Rake task depends on the [unit test reports](../ci/unit_test_reports.md) feature,
This Rake task depends on the [unit test reports](../ci/testing/unit_test_reports.md) feature,
which only gets parsed when it is requested for the first time.
### Speed up tests, Rake tasks, and migrations

View File

@ -11,7 +11,7 @@ This guide describes how to develop Service Ping metrics using metrics instrumen
## Nomenclature
- **Instrumentation class**:
- Inherits one of the metric classes: `DatabaseMetric`, `RedisMetric`, `RedisHLLMetric` or `GenericMetric`.
- Inherits one of the metric classes: `DatabaseMetric`, `RedisMetric`, `RedisHLLMetric`, `NumbersMetric` or `GenericMetric`.
- Implements the logic that calculates the value for a Service Ping metric.
- **Metric definition**
@ -24,7 +24,7 @@ This guide describes how to develop Service Ping metrics using metrics instrumen
A metric definition has the [`instrumentation_class`](metrics_dictionary.md) field, which can be set to a class.
The defined instrumentation class should inherit one of the existing metric classes: `DatabaseMetric`, `RedisMetric`, `RedisHLLMetric`, or `GenericMetric`.
The defined instrumentation class should inherit one of the existing metric classes: `DatabaseMetric`, `RedisMetric`, `RedisHLLMetric`, `NumbersMetric` or `GenericMetric`.
The current convention is that a single instrumentation class corresponds to a single metric. On a rare occasions, there are exceptions to that convention like [Redis metrics](#redis-metrics). To use a single instrumentation class for more than one metric, please reach out to one of the `@gitlab-org/growth/product-intelligence/engineers` members to consult about your case.
@ -221,6 +221,43 @@ options:
- i_quickactions_approve
```
## Numbers metrics
- `operation`: Operations for the given `data` block. Currently we only support `add` operation.
- `data`: a `block` which contains an array of numbers.
- `available?`: Specifies whether the metric should be reported. The default is `true`.
```ruby
# frozen_string_literal: true
module Gitlab
module Usage
module Metrics
module Instrumentations
class IssuesBoardsCountMetric < NumbersMetric
operation :add
data do |time_frame|
[
CountIssuesMetric.new(time_frame: time_frame).value,
CountBoardsMetric.new(time_frame: time_frame).value
]
end
end
end
end
end
end
end
```
You must also include the instrumentation class name in the YAML setup.
```yaml
time_frame: 28d
instrumentation_class: 'IssuesBoardsCountMetric'
```
## Generic metrics
- `value`: Specifies the value of the metric.
@ -251,6 +288,7 @@ There is support for:
- `count`, `distinct_count`, `estimate_batch_distinct_count`, `sum` for [database metrics](#database-metrics).
- [Redis metrics](#redis-metrics).
- [Redis HLL metrics](#redis-hyperloglog-metrics).
- `add` for [numbers metrics](#numbers-metrics).
- [Generic metrics](#generic-metrics), which are metrics based on settings or configurations.
There is no support for:
@ -284,6 +322,7 @@ This guide describes how to migrate a Service Ping metric from [`lib/gitlab/usag
- [Database metric](#database-metrics)
- [Redis HyperLogLog metrics](#redis-hyperloglog-metrics)
- [Redis metric](#redis-metrics)
- [Numbers metric](#numbers-metrics)
- [Generic metric](#generic-metrics)
1. Determine the location of instrumentation class: either under `ee` or outside `ee`.

View File

@ -0,0 +1,95 @@
---
stage: none
group: unassigned
info: For assistance with this tutorial, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments-to-other-projects-and-subjects.
---
# Move your personal project to a group **(FREE SAAS)**
This tutorial will show you how to move a personal project to a group.
## Why is a group important?
In GitLab, you use [groups](../user/group/index.md)
to manage one or more related projects at the same time.
A group gives you some great benefits. For example, you can:
- Manage permissions for your projects.
- View all of the issues and merge requests for the projects in the group.
- View all unique users in your namespace, across all projects.
- Manage usage quotas.
- Start a trial or upgrade to a paid tier. This option is important if you're
impacted by the [changes to user limits](https://about.gitlab.com/blog/2022/03/24/efficient-free-tier/),
and need more users.
However, if you're working in a [personal project](../user/project/working_with_projects.md#view-personal-projects),
you can't use these features. Personal projects are created under your
[personal namespace](../user/group/index.md#namespaces). They're not part of a group,
so you can't get any of the benefits and features of a group.
But don't worry! You can move your existing personal project to a group.
The next steps show you how.
## Steps
Here's an overview of what we're going to do:
1. [Create a group](#create-a-group).
1. [Move your project to a group](#move-your-project-to-a-group).
1. [Work with your group](#work-with-your-group).
### Create a group
To begin, make sure you have a suitable group to move your project to.
The group must allow the creation of projects, and you must have at least the
Maintainer role for the group.
If you don't have a group, create one:
1. On the top bar, select **Menu > Groups > Create group**
1. Select **Create group**.
1. In **Group name**, enter a name for the group.
1. In **Group URL**, enter a path for the group, which is used as the namespace.
1. Choose the [visibility level](../user/public_access.md).
1. Optional. Fill in information to personalize your experience.
1. Select **Create group**.
### Move your project to a group
Before you move your project to a group:
- You must have the Owner role for the project.
- Remove any [container images](../user/packages/container_registry/index.md#limitations)
and [NPM packages](../user/packages/npm_registry/index.md#limitations).
Now you're ready to move your project:
1. On the top bar, select **Menu > Projects** and find your project.
1. On the left sidebar, select **Settings > General**.
1. Expand **Advanced**.
1. Under **Transfer project**, choose the group to transfer the project to.
1. Select **Transfer project**.
1. Enter the project's name and select **Confirm**.
You are redirected to the project's new page.
If you have more than one personal project, you can repeat these steps for each
project.
NOTE:
For more information about these migration steps,
see [Transferring your project into another namespace](../user/project/settings/index.md#transferring-an-existing-project-into-another-namespace).
A migration might result in follow-up work to update the project path in
your related resources and tools, such as websites and package managers.
### Work with your group
You can now view your project in your group:
1. On the top bar, select **Menu > Groups** and find your group.
1. Look for your project under **Subgroups and projects**.
Start enjoying the benefits of a group! For example, as the group Owner, you can
quickly view all unique users in your namespace:
1. In your group, select **Settings > Usage Quotas**.
1. The **Seats** tab displays all users across all projects in your group.

View File

@ -157,7 +157,7 @@ the related documentation.
| Maximum pipeline schedules in projects | `10` for Free tier, `50` for all paid tiers | See [Number of pipeline schedules](../../administration/instance_limits.md#number-of-pipeline-schedules) |
| Maximum pipelines per schedule | `24` for Free tier, `288` for all paid tiers | See [Limit the number of pipelines created by a pipeline schedule per day](../../administration/instance_limits.md#limit-the-number-of-pipelines-created-by-a-pipeline-schedule-per-day) |
| Scheduled job archiving | 3 months (from June 22, 2020). Jobs created before that date were archived after September 22, 2020. | Never |
| Maximum test cases per [unit test report](../../ci/unit_test_reports.md) | `500000` | Unlimited |
| Maximum test cases per [unit test report](../../ci/testing/unit_test_reports.md) | `500000` | Unlimited |
| Maximum registered runners | Free tier: `50` per-group / `50` per-project<br/>All paid tiers: `1000` per-group / `1000` per-project | See [Number of registered runners per scope](../../administration/instance_limits.md#number-of-registered-runners-per-scope) |
| Limit of dotenv variables | Free tier: `50` / Premium tier: `100` / Ultimate tier: `150` | See [Limit dotenv variables](../../administration/instance_limits.md#limit-dotenv-variables) |

View File

@ -219,7 +219,7 @@ For a software developer working in a team:
1. You checkout a new branch, and submit your changes through a merge request.
1. You gather feedback from your team.
1. You work on the implementation optimizing code with [Code Quality reports](code_quality.md).
1. You verify your changes with [Unit test reports](../../../ci/unit_test_reports.md) in GitLab CI/CD.
1. You verify your changes with [Unit test reports](../../../ci/testing/unit_test_reports.md) in GitLab CI/CD.
1. You avoid using dependencies whose license is not compatible with your project with [License Compliance reports](../../compliance/license_compliance/index.md).
1. You request the [approval](approvals/index.md) from your manager.
1. Your manager:
@ -244,7 +244,7 @@ For a web developer writing a webpage for your company's website:
- [Create a merge request](creating_merge_requests.md)
- [Review a merge request](reviews/index.md)
- [Authorization for merge requests](authorization_for_merge_requests.md)
- [Testing and reports](testing_and_reports_in_merge_requests.md)
- [Testing and reports](../../../ci/testing/index.md)
- [GitLab keyboard shortcuts](../../shortcuts.md)
- [Comments and threads](../../discussions/index.md)
- [Suggest code changes](reviews/suggestions.md)

View File

@ -1,39 +1,11 @@
---
stage: Verify
group: Pipeline Insights
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
description: "Test your code and display reports in merge requests"
redirect_to: '../../../ci/testing/index.md'
remove_date: '2022-08-31'
---
# Tests and reports in merge requests **(FREE)**
This document was moved to [another location](../../../ci/testing/index.md).
GitLab has the ability to test the changes included in a feature branch and display reports
or link to useful information directly from merge requests:
| Feature | Description |
|----------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| [Accessibility Testing](accessibility_testing.md) | Automatically report A11y violations for changed pages in merge requests. |
| [Browser Performance Testing](browser_performance_testing.md) | Quickly determine the browser performance impact of pending code changes. |
| [Load Performance Testing](load_performance_testing.md) | Quickly determine the server performance impact of pending code changes. |
| [Code Quality](code_quality.md) | Analyze your source code quality using the [Code Climate](https://codeclimate.com/) analyzer and show the Code Climate report right in the merge request widget area. |
| [Display arbitrary job artifacts](../../../ci/yaml/index.md#artifactsexpose_as) | Configure CI pipelines with the `artifacts:expose_as` parameter to directly link to selected [artifacts](../../../ci/pipelines/job_artifacts.md) in merge requests. |
| [GitLab CI/CD](../../../ci/index.md) | Build, test, and deploy your code in a per-branch basis with built-in CI/CD. |
| [Unit test reports](../../../ci/unit_test_reports.md) | Configure your CI jobs to use Unit test reports, and let GitLab display a report on the merge request so that it's easier and faster to identify the failure without having to check the entire job log. |
| [License Compliance](../../compliance/license_compliance/index.md) | Manage the licenses of your dependencies. |
| [Metrics Reports](../../../ci/metrics_reports.md) | Display the Metrics Report on the merge request so that it's fast and easy to identify changes to important metrics. |
| [Multi-Project pipelines](../../../ci/pipelines/multi_project_pipelines.md) | When you set up GitLab CI/CD across multiple projects, you can visualize the entire pipeline, including all cross-project interdependencies. |
| [Merge request pipelines](../../../ci/pipelines/merge_request_pipelines.md) | Customize a specific pipeline structure for merge requests in order to speed the cycle up by running only important jobs. |
| [Pipeline Graphs](../../../ci/pipelines/index.md#visualize-pipelines) | View the status of pipelines within the merge request, including the deployment process. |
| [Test Coverage visualization](test_coverage_visualization.md) | See test coverage results for merge requests, within the file diff. |
## Security Reports **(ULTIMATE)**
In addition to the reports listed above, GitLab can do many types of [Security reports](../../application_security/index.md),
generated by scanning and reporting any vulnerabilities found in your project:
| Feature | Description |
|-----------------------------------------------------------------------------------------|------------------------------------------------------------------|
| [Container Scanning](../../application_security/container_scanning/index.md) | Analyze your Docker images for known vulnerabilities. |
| [Dynamic Application Security Testing (DAST)](../../application_security/dast/index.md) | Analyze your running web applications for known vulnerabilities. |
| [Dependency Scanning](../../application_security/dependency_scanning/index.md) | Analyze your dependencies for known vulnerabilities. |
| [Static Application Security Testing (SAST)](../../application_security/sast/index.md) | Analyze your source code for known vulnerabilities. |
<!-- This redirect file can be deleted after <2022-08-31>. -->
<!-- Redirects that point to other docs in the same project expire in three months. -->
<!-- Redirects that point to docs in a different project or site (for example, link is not relative and starts with `https:`) expire in one year. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/redirects.html -->

View File

@ -11,17 +11,17 @@ module Gitlab
# We exclude `bare_repository` here as it has no import class associated
IMPORT_TABLE = [
ImportSource.new('github', 'GitHub', Gitlab::GithubImport::ParallelImporter),
ImportSource.new('bitbucket', 'Bitbucket Cloud', Gitlab::BitbucketImport::Importer),
ImportSource.new('bitbucket_server', 'Bitbucket Server', Gitlab::BitbucketServerImport::Importer),
ImportSource.new('gitlab', 'GitLab.com', Gitlab::GitlabImport::Importer),
ImportSource.new('google_code', 'Google Code', nil),
ImportSource.new('fogbugz', 'FogBugz', Gitlab::FogbugzImport::Importer),
ImportSource.new('git', 'Repo by URL', nil),
ImportSource.new('gitlab_project', 'GitLab export', Gitlab::ImportExport::Importer),
ImportSource.new('gitea', 'Gitea', Gitlab::LegacyGithubImport::Importer),
ImportSource.new('manifest', 'Manifest file', nil),
ImportSource.new('phabricator', 'Phabricator', Gitlab::PhabricatorImport::Importer)
ImportSource.new('github', 'GitHub', Gitlab::GithubImport::ParallelImporter),
ImportSource.new('bitbucket', 'Bitbucket Cloud', Gitlab::BitbucketImport::Importer),
ImportSource.new('bitbucket_server', 'Bitbucket Server', Gitlab::BitbucketServerImport::Importer),
ImportSource.new('gitlab', 'GitLab.com', Gitlab::GitlabImport::Importer),
ImportSource.new('google_code', 'Google Code', nil),
ImportSource.new('fogbugz', 'FogBugz', Gitlab::FogbugzImport::Importer),
ImportSource.new('git', 'Repository by URL', nil),
ImportSource.new('gitlab_project', 'GitLab export', Gitlab::ImportExport::Importer),
ImportSource.new('gitea', 'Gitea', Gitlab::LegacyGithubImport::Importer),
ImportSource.new('manifest', 'Manifest file', nil),
ImportSource.new('phabricator', 'Phabricator', Gitlab::PhabricatorImport::Importer)
].freeze
class << self

View File

@ -11,6 +11,8 @@ module Gitlab
finish { Issue.maximum(:id) }
relation { Issue }
cache_start_and_finish_as :issue
end
end
end

View File

@ -0,0 +1,30 @@
# frozen_string_literal: true
module Gitlab
module Usage
module Metrics
module Instrumentations
class IssuesCreatedFromAlertsMetric < NumbersMetric
ISSUES_FROM_ALERTS_METRICS = [
IssuesWithAlertManagementAlertsMetric,
IssuesWithPrometheusAlertEvents,
IssuesWithSelfManagedPrometheusAlertEvents
].freeze
operation :add
data do |time_frame|
ISSUES_FROM_ALERTS_METRICS.map { |metric| metric.new(time_frame: time_frame).value }
end
# overwriting instrumentation to generate the appropriate sql query
def instrumentation
'SELECT ' + ISSUES_FROM_ALERTS_METRICS.map do |metric|
"(#{metric.new(time_frame: time_frame).instrumentation})"
end.join(' + ')
end
end
end
end
end
end

View File

@ -0,0 +1,24 @@
# frozen_string_literal: true
module Gitlab
module Usage
module Metrics
module Instrumentations
class IssuesWithAlertManagementAlertsMetric < DatabaseMetric
# this metric is used in IssuesCreatedFromAlertsMetric
# do not report metric directly in service ping
available? { false }
operation :count
start { Issue.minimum(:id) }
finish { Issue.maximum(:id) }
relation { Issue.with_alert_management_alerts }
cache_start_and_finish_as :issue
end
end
end
end
end

View File

@ -0,0 +1,24 @@
# frozen_string_literal: true
module Gitlab
module Usage
module Metrics
module Instrumentations
class IssuesWithPrometheusAlertEvents < DatabaseMetric
# this metric is used in IssuesCreatedFromAlertsMetric
# do not report metric directly in service ping
available? { false }
operation :count
start { Issue.minimum(:id) }
finish { Issue.maximum(:id) }
relation { Issue.with_prometheus_alert_events }
cache_start_and_finish_as :issue
end
end
end
end
end

View File

@ -0,0 +1,24 @@
# frozen_string_literal: true
module Gitlab
module Usage
module Metrics
module Instrumentations
class IssuesWithSelfManagedPrometheusAlertEvents < DatabaseMetric
# this metric is used in IssuesCreatedFromAlertsMetric
# do not report metric directly in service ping
available? { false }
operation :count
start { Issue.minimum(:id) }
finish { Issue.maximum(:id) }
relation { Issue.with_self_managed_prometheus_alert_events }
cache_start_and_finish_as :issue
end
end
end
end
end

View File

@ -0,0 +1,60 @@
# frozen_string_literal: true
module Gitlab
module Usage
module Metrics
module Instrumentations
class NumbersMetric < BaseMetric
# Usage Example
#
# class BoardsCountMetric < NumbersMetric
# operation :add
#
# data do |time_frame|
# [
# CountIssuesMetric.new(time_frame: time_frame).value,
# CountBoardsMetric.new(time_frame: time_frame).value,
# ]
# end
# end
UnimplementedOperationError = Class.new(StandardError) # rubocop:disable UsageData/InstrumentationSuperclass
class << self
IMPLEMENTED_OPERATIONS = %i(add).freeze
private_constant :IMPLEMENTED_OPERATIONS
def data(&block)
return @metric_data&.call unless block_given?
@metric_data = block
end
def operation(symbol)
raise UnimplementedOperationError unless symbol.in?(IMPLEMENTED_OPERATIONS)
@metric_operation = symbol
end
attr_reader :metric_operation, :metric_data
end
def value
method(self.class.metric_operation).call(*data)
end
def suggested_name
Gitlab::Usage::Metrics::NameSuggestion.for(:alt)
end
private
def data
self.class.metric_data.call(time_frame)
end
end
end
end
end
end

View File

@ -815,9 +815,7 @@ module Gitlab
def total_alert_issues
# Remove prometheus table queries once they are deprecated
# To be removed with https://gitlab.com/gitlab-org/gitlab/-/issues/217407.
add count(Issue.with_alert_management_alerts, start: minimum_id(Issue), finish: maximum_id(Issue)),
count(::Issue.with_self_managed_prometheus_alert_events, start: minimum_id(Issue), finish: maximum_id(Issue)),
count(::Issue.with_prometheus_alert_events, start: minimum_id(Issue), finish: maximum_id(Issue))
add_metric('IssuesCreatedFromAlertsMetric')
end
def clear_memoized

View File

@ -23234,6 +23234,9 @@ msgstr ""
msgid "Make issue confidential"
msgstr ""
msgid "Make sure you have the correct permissions to link your project."
msgstr ""
msgid "Make sure you save it - you won't be able to access it again."
msgstr ""
@ -25661,6 +25664,9 @@ msgstr ""
msgid "No project subscribes to the pipelines in this project."
msgstr ""
msgid "No projects available."
msgstr ""
msgid "No projects found"
msgstr ""
@ -31823,9 +31829,6 @@ msgstr ""
msgid "Reply…"
msgstr ""
msgid "Repo by URL"
msgstr ""
msgid "Report abuse"
msgstr ""
@ -32101,6 +32104,9 @@ msgstr ""
msgid "Repository already read-only"
msgstr ""
msgid "Repository by URL"
msgstr ""
msgid "Repository check"
msgstr ""
@ -43523,9 +43529,6 @@ msgstr ""
msgid "You don't have any open merge requests"
msgstr ""
msgid "You don't have any projects available."
msgstr ""
msgid "You don't have any recent searches"
msgstr ""

View File

@ -91,7 +91,7 @@ module QA
end
def click_repo_by_url_link
click_button 'Repo by URL'
click_button 'Repository by URL'
end
def disable_initialize_with_readme

View File

@ -123,3 +123,4 @@ UsageData/InstrumentationSuperclass:
- :GenericMetric
- :RedisHLLMetric
- :RedisMetric
- :NumbersMetric

View File

@ -61,7 +61,7 @@ RSpec.describe 'Admin updates settings' do
expect(current_settings.import_sources).to be_empty
page.within('.as-visibility-access') do
check "Repo by URL"
check "Repository by URL"
click_button 'Save changes'
end

View File

@ -57,7 +57,7 @@ RSpec.describe 'New project', :js do
expect(page).to have_link('GitHub')
expect(page).to have_link('Bitbucket')
expect(page).to have_link('GitLab.com')
expect(page).to have_button('Repo by URL')
expect(page).to have_button('Repository by URL')
expect(page).to have_link('GitLab export')
end
@ -175,7 +175,7 @@ RSpec.describe 'New project', :js do
it 'does not show the initialize with Readme checkbox on "Import project" tab' do
visit new_project_path
click_link 'Import project'
click_button 'Repo by URL'
click_button 'Repository by URL'
page.within '#import-project-pane' do
expect(page).not_to have_css('input#project_initialize_with_readme')
@ -277,7 +277,7 @@ RSpec.describe 'New project', :js do
click_link 'Import project'
end
context 'from git repository url, "Repo by URL"' do
context 'from git repository url, "Repository by URL"' do
before do
first('.js-import-git-toggle-button').click
end

View File

@ -50,6 +50,7 @@ describe('RunnerInstructionsModal component', () => {
const findPlatformButtons = () => findPlatformButtonGroup().findAllComponents(GlButton);
const findOsxPlatformButton = () => wrapper.find({ ref: 'osx' });
const findArchitectureDropdownItems = () => wrapper.findAllByTestId('architecture-dropdown-item');
const findBinaryDownloadButton = () => wrapper.findByTestId('binary-download-button');
const findBinaryInstructions = () => wrapper.findByTestId('binary-instructions');
const findRegisterCommand = () => wrapper.findByTestId('register-command');
@ -177,25 +178,30 @@ describe('RunnerInstructionsModal component', () => {
});
describe('after a platform and architecture are selected', () => {
const windowsIndex = 2;
const { installInstructions } = mockGraphqlInstructionsWindows.data.runnerSetup;
beforeEach(async () => {
runnerSetupInstructionsHandler.mockResolvedValue(mockGraphqlInstructionsWindows);
findPlatformButtons().at(2).vm.$emit('click'); // another option, happens to be windows
await nextTick();
findArchitectureDropdownItems().at(1).vm.$emit('click'); // another option
await nextTick();
findPlatformButtons().at(windowsIndex).vm.$emit('click');
await waitForPromises();
});
it('runner instructions are requested', () => {
expect(runnerSetupInstructionsHandler).toHaveBeenCalledWith({
expect(runnerSetupInstructionsHandler).toHaveBeenLastCalledWith({
platform: 'windows',
architecture: '386',
architecture: 'amd64',
});
});
it('architecture download link is updated', () => {
const architectures =
mockGraphqlRunnerPlatforms.data.runnerPlatforms.nodes[windowsIndex].architectures.nodes;
expect(findBinaryDownloadButton().attributes('href')).toBe(architectures[0].downloadLocation);
});
it('other binary instructions are shown', () => {
const instructions = findBinaryInstructions().text();
@ -209,6 +215,16 @@ describe('RunnerInstructionsModal component', () => {
'./gitlab-runner.exe register --url http://gdk.test:3000/ --registration-token MY_TOKEN',
);
});
it('runner instructions are requested with another architecture', async () => {
findArchitectureDropdownItems().at(1).vm.$emit('click');
await waitForPromises();
expect(runnerSetupInstructionsHandler).toHaveBeenLastCalledWith({
platform: 'windows',
architecture: '386',
});
});
});
describe('when the modal resizes', () => {

View File

@ -7,17 +7,17 @@ RSpec.describe Gitlab::ImportSources do
it 'returns a hash' do
expected =
{
'GitHub' => 'github',
'Bitbucket Cloud' => 'bitbucket',
'Bitbucket Server' => 'bitbucket_server',
'GitLab.com' => 'gitlab',
'Google Code' => 'google_code',
'FogBugz' => 'fogbugz',
'Repo by URL' => 'git',
'GitLab export' => 'gitlab_project',
'Gitea' => 'gitea',
'Manifest file' => 'manifest',
'Phabricator' => 'phabricator'
'GitHub' => 'github',
'Bitbucket Cloud' => 'bitbucket',
'Bitbucket Server' => 'bitbucket_server',
'GitLab.com' => 'gitlab',
'Google Code' => 'google_code',
'FogBugz' => 'fogbugz',
'Repository by URL' => 'git',
'GitLab export' => 'gitlab_project',
'Gitea' => 'gitea',
'Manifest file' => 'manifest',
'Phabricator' => 'phabricator'
}
expect(described_class.options).to eq(expected)
@ -93,7 +93,7 @@ RSpec.describe Gitlab::ImportSources do
'gitlab' => 'GitLab.com',
'google_code' => 'Google Code',
'fogbugz' => 'FogBugz',
'git' => 'Repo by URL',
'git' => 'Repository by URL',
'gitlab_project' => 'GitLab export',
'gitea' => 'Gitea',
'manifest' => 'Manifest file',

View File

@ -0,0 +1,75 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::NumbersMetric do
subject do
described_class.tap do |metric_class|
metric_class.operation :add
metric_class.data do |time_frame|
[
Gitlab::Usage::Metrics::Instrumentations::CountIssuesMetric.new(time_frame: time_frame).value,
Gitlab::Usage::Metrics::Instrumentations::CountBoardsMetric.new(time_frame: time_frame).value
]
end
end.new(time_frame: 'all')
end
describe '#value' do
let_it_be(:issue_1) { create(:issue) }
let_it_be(:issue_2) { create(:issue) }
let_it_be(:issue_3) { create(:issue) }
let_it_be(:issues) { Issue.all }
let_it_be(:board_1) { create(:board) }
let_it_be(:boards) { Board.all }
before do
allow(Issue.connection).to receive(:transaction_open?).and_return(false)
end
it 'calculates a correct result' do
expect(subject.value).to eq(4)
end
context 'with availability defined' do
subject do
described_class.tap do |metric_class|
metric_class.operation :add
metric_class.data { [1] }
metric_class.available? { false }
end.new(time_frame: 'all')
end
it 'responds to #available? properly' do
expect(subject.available?).to eq(false)
end
end
context 'with availability not defined' do
subject do
Class.new(described_class) do
operation :add
data { [] }
end.new(time_frame: 'all')
end
it 'responds to #available? properly' do
expect(subject.available?).to eq(true)
end
end
end
context 'with unimplemented operation method used' do
subject do
described_class.tap do |metric_class|
metric_class.operation :invalid_operation
metric_class.data { [] }
end.new(time_frame: 'all')
end
it 'raises an error' do
expect { subject }.to raise_error(described_class::UnimplementedOperationError)
end
end
end

View File

@ -94,28 +94,5 @@ RSpec.describe Ci::AbortPipelinesService do
end
end
end
context 'with user pipelines' do
def abort_user_pipelines
described_class.new.execute(user.pipelines, :user_blocked)
end
it 'fails all running pipelines and related jobs' do
expect(abort_user_pipelines).to be_success
expect_correct_cancellations
expect(other_users_pipeline.status).not_to eq('failed')
end
it 'avoids N+1 queries' do
control_count = ActiveRecord::QueryRecorder.new { abort_user_pipelines }.count
pipelines = create_list(:ci_pipeline, 5, :running, project: project, user: user)
create_list(:ci_build, 5, :running, pipeline: pipelines.first)
expect { abort_user_pipelines }.not_to exceed_query_limit(control_count)
end
end
end
end

View File

@ -335,7 +335,6 @@ RSpec.describe 'Every Sidekiq worker' do
'Metrics::Dashboard::PruneOldAnnotationsWorker' => 3,
'Metrics::Dashboard::SyncDashboardsWorker' => 3,
'MigrateExternalDiffsWorker' => 3,
'NamespacelessProjectDestroyWorker' => 3,
'Namespaces::OnboardingIssueCreatedWorker' => 3,
'Namespaces::OnboardingPipelineCreatedWorker' => 3,
'Namespaces::OnboardingProgressWorker' => 3,

View File

@ -1,77 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe NamespacelessProjectDestroyWorker do
include ProjectForksHelper
subject { described_class.new }
before do
# Stub after_save callbacks that will fail when Project has no namespace
allow_any_instance_of(Project).to receive(:update_project_statistics).and_return(nil)
end
describe '#perform' do
context 'project has namespace' do
it 'does not do anything' do
project = create(:project)
subject.perform(project.id)
expect(Project.unscoped.all).to include(project)
end
end
context 'project has no namespace' do
let!(:project) { create(:project) }
before do
allow_any_instance_of(Project).to receive(:namespace).and_return(nil)
end
context 'project not a fork of another project' do
it "truncates the project's team" do
expect_any_instance_of(ProjectTeam).to receive(:truncate)
subject.perform(project.id)
end
it 'deletes the project' do
subject.perform(project.id)
expect(Project.unscoped.all).not_to include(project)
end
it 'does not call unlink_fork' do
is_expected.not_to receive(:unlink_fork)
subject.perform(project.id)
end
end
context 'project forked from another' do
let!(:parent_project) { create(:project) }
let(:project) do
namespaceless_project = fork_project(parent_project)
namespaceless_project.save!
namespaceless_project
end
it 'closes open merge requests' do
merge_request = create(:merge_request, source_project: project, target_project: parent_project)
subject.perform(project.id)
expect(merge_request.reload).to be_closed
end
it 'destroys fork network members' do
subject.perform(project.id)
expect(parent_project.forked_to_members).to be_empty
end
end
end
end
end