Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-08-21 21:10:01 +00:00
parent 77d6584e99
commit 7f6fde499f
28 changed files with 581 additions and 306 deletions

View File

@ -509,3 +509,8 @@ Cop/PutGroupRoutesUnderScope:
Include:
- 'config/routes/group.rb'
- 'ee/config/routes/group.rb'
Migration/ComplexIndexesRequireName:
Exclude:
- !ruby/regexp /\Adb\/(post_)?migrate\/201.*\.rb\z/
- !ruby/regexp /\Adb\/(post_)?migrate\/20200[1-7].*\.rb\z/

View File

@ -2,11 +2,15 @@
import { GlBanner } from '@gitlab/ui';
import { s__ } from '~/locale';
import axios from '~/lib/utils/axios_utils';
import Tracking from '~/tracking';
const trackingMixin = Tracking.mixin();
export default {
components: {
GlBanner,
},
mixins: [trackingMixin],
inject: {
svgPath: {
default: '',
@ -20,6 +24,9 @@ export default {
calloutsFeatureId: {
default: '',
},
trackLabel: {
default: '',
},
},
i18n: {
title: s__('CustomizeHomepageBanner|Do you want to customize this page?'),
@ -31,8 +38,19 @@ export default {
data() {
return {
visible: true,
tracking: {
label: this.trackLabel,
},
};
},
created() {
this.$nextTick(() => {
this.addTrackingAttributesToButton();
});
},
mounted() {
this.trackOnShow();
},
methods: {
handleClose() {
axios
@ -45,6 +63,23 @@ export default {
});
this.visible = false;
this.track('click_dismiss');
},
trackOnShow() {
if (this.visible) this.track('show_home_page_banner');
},
addTrackingAttributesToButton() {
// we can't directly add these on the button like we need to due to
// button not being modifiable currently
// https://gitlab.com/gitlab-org/gitlab-ui/-/blob/9209ec424e5cca14bc8a1b5c9fa12636d8c83dad/src/components/base/banner/banner.vue#L60
const button = this.$refs.banner.$el.querySelector(
`[href='${this.preferencesBehaviorPath}']`,
);
if (button) {
button.setAttribute('data-track-event', 'click_go_to_preferences');
button.setAttribute('data-track-label', this.trackLabel);
}
},
},
};
@ -53,6 +88,7 @@ export default {
<template>
<gl-banner
v-if="visible"
ref="banner"
:title="$options.i18n.title"
:button-text="$options.i18n.button_text"
:button-link="preferencesBehaviorPath"

View File

@ -9,7 +9,8 @@
.js-customize-homepage-banner{ data: { svg_path: image_path('illustrations/monitoring/getting_started.svg'),
preferences_behavior_path: profile_preferences_path(anchor: 'behavior'),
callouts_path: user_callouts_path,
callouts_feature_id: UserCalloutsHelper::CUSTOMIZE_HOMEPAGE } }
callouts_feature_id: UserCalloutsHelper::CUSTOMIZE_HOMEPAGE,
track_label: 'home_page' } }
= render_dashboard_gold_trial(current_user)

View File

@ -18,7 +18,8 @@ class RemoveOldExternalDiffMigrationIndex < ActiveRecord::Migration[6.0]
add_concurrent_index(
:merge_request_diffs,
[:merge_request_id, :id],
where: { stored_externally: [nil, false] }
where: 'NOT stored_externally OR stored_externally IS NULL',
name: 'index_merge_request_diffs_on_merge_request_id_and_id_partial'
)
end
end

View File

@ -244,7 +244,7 @@ The following documentation relates to the DevOps **Verify** stage:
|:----------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------|
| [Code Quality reports](user/project/merge_requests/code_quality.md) | Analyze source code quality. |
| [GitLab CI/CD](ci/README.md) | Explore the features and capabilities of Continuous Integration with GitLab. |
| [JUnit test reports](ci/junit_test_reports.md) | Display JUnit test reports on merge requests. |
| [Unit test reports](ci/unit_test_reports.md) | Display Unit test reports on merge requests. |
| [Multi-project pipelines](ci/multi_project_pipelines.md) **(PREMIUM)** | Visualize entire pipelines that span multiple projects, including all cross-project inter-dependencies. |
| [Pipeline Graphs](ci/pipelines/index.md#visualize-pipelines) | Visualize builds. |
| [Review Apps](ci/review_apps/index.md) | Preview changes to your application right from a merge request. |

View File

@ -96,6 +96,7 @@ From there, you can see the following actions:
- Permission to approve merge requests by authors was updated ([introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/7531) in GitLab 12.9)
- Number of required approvals was updated ([introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/7531) in GitLab 12.9)
- Added or removed users and groups from project approval groups ([introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/213603) in GitLab 13.2)
- Project CI/CD variable added, removed, or protected status changed. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/30857) in GitLab 13.4.
Project events can also be accessed via the [Project Audit Events API](../api/audit_events.md#project-audit-events-starter)

View File

@ -436,7 +436,7 @@ Reports that go over the 20 MB limit won't be loaded. Affected reports:
- [Merge Request security reports](../user/project/merge_requests/testing_and_reports_in_merge_requests.md#security-reports-ultimate)
- [CI/CD parameter `artifacts:expose_as`](../ci/yaml/README.md#artifactsexpose_as)
- [JUnit test reports](../ci/junit_test_reports.md)
- [Unit test reports](../ci/unit_test_reports.md)
## Advanced Global Search limits

View File

@ -155,8 +155,8 @@ Example of response
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/202525) in GitLab 13.0.
CAUTION: **Caution:**
This API route is part of the [JUnit test report](../ci/junit_test_reports.md) feature. It is protected by a [feature flag](../development/feature_flags/index.md) that is **disabled** due to performance issues with very large data sets.
NOTE: **Note:**
This API route is part of the [Unit test report](../ci/unit_test_reports.md) feature.
```plaintext
GET /projects/:id/pipelines/:pipeline_id/test_report

View File

@ -132,7 +132,7 @@ Its feature set is listed on the table below according to DevOps stages.
| [Code Quality](../user/project/merge_requests/code_quality.md) | Analyze your source code quality. |
| [GitLab CI/CD for external repositories](ci_cd_for_external_repos/index.md) **(PREMIUM)** | Get the benefits of GitLab CI/CD combined with repositories in GitHub and Bitbucket Cloud. |
| [Interactive Web Terminals](interactive_web_terminal/index.md) **(CORE ONLY)** | Open an interactive web terminal to debug the running jobs. |
| [JUnit tests](junit_test_reports.md) | Identify script failures directly on merge requests. |
| [Unit test reports](unit_test_reports.md) | Identify script failures directly on merge requests. |
| [Using Docker images](docker/using_docker_images.md) | Use GitLab and GitLab Runner with Docker to build and test applications. |
|---+---|
| **Release** ||

View File

@ -191,7 +191,7 @@ according to each stage (Verify, Package, Release).
- Analyze your source code quality with [GitLab Code Quality](../../user/project/merge_requests/code_quality.md).
- Determine the browser performance impact of code changes with [Browser Performance Testing](../../user/project/merge_requests/browser_performance_testing.md). **(PREMIUM)**
- Determine the server performance impact of code changes with [Load Performance Testing](../../user/project/merge_requests/load_performance_testing.md). **(PREMIUM)**
- Perform a series of tests, such as [Container Scanning](../../user/application_security/container_scanning/index.md) **(ULTIMATE)**, [Dependency Scanning](../../user/application_security/dependency_scanning/index.md) **(ULTIMATE)**, and [JUnit tests](../junit_test_reports.md).
- Perform a series of tests, such as [Container Scanning](../../user/application_security/container_scanning/index.md) **(ULTIMATE)**, [Dependency Scanning](../../user/application_security/dependency_scanning/index.md) **(ULTIMATE)**, and [Unit tests](../unit_test_reports.md).
- Deploy your changes with [Review Apps](../review_apps/index.md) to preview the app changes on every branch.
1. **Package**:
- Store Docker images with [Container Registry](../../user/packages/container_registry/index.md).

View File

@ -1,281 +1,5 @@
---
stage: Verify
group: Testing
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
type: reference
redirect_to: 'unit_test_reports.md'
---
# JUnit test reports
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/45318) in GitLab 11.2. Requires GitLab Runner 11.2 and above.
## Overview
It is very common that a [CI/CD pipeline](pipelines/index.md) contains a
test job that will verify your code.
If the tests fail, the pipeline fails and users get notified. The person that
works on the merge request will have to check the job logs and see where the
tests failed so that they can fix them.
You can configure your job to use JUnit test reports, and GitLab will display a
report on the merge request so that it's easier and faster to identify the
failure without having to check the entire log.
If you don't use Merge Requests but still want to see the JUnit output without searching through job logs, the full [JUnit test reports](#viewing-junit-test-reports-on-gitlab) are available in the pipeline detail view.
## Use cases
Consider the following workflow:
1. Your `master` branch is rock solid, your project is using GitLab CI/CD and
your pipelines indicate that there isn't anything broken.
1. Someone from your team submits a merge request, a test fails and the pipeline
gets the known red icon. To investigate more, you have to go through the job
logs to figure out the cause of the failed test, which usually contain
thousands of lines.
1. You configure the JUnit test reports and immediately GitLab collects and
exposes them in the merge request. No more searching in the job logs.
1. Your development and debugging workflow becomes easier, faster and efficient.
## How it works
First, GitLab Runner uploads all JUnit XML files as artifacts to GitLab. Then,
when you visit a merge request, GitLab starts comparing the head and base branch's
JUnit test reports, where:
- The base branch is the target branch (usually `master`).
- The head branch is the source branch (the latest pipeline in each merge request).
The reports panel has a summary showing how many tests failed, how many had errors
and how many were fixed. If no comparison can be done because data for the base branch
is not available, the panel will just show the list of failed tests for head.
There are four types of results:
1. **Newly failed tests:** Test cases which passed on base branch and failed on head branch
1. **Newly encountered errors:** Test cases which passed on base branch and failed due to a
test error on head branch
1. **Existing failures:** Test cases which failed on base branch and failed on head branch
1. **Resolved failures:** Test cases which failed on base branch and passed on head branch
Each entry in the panel will show the test name and its type from the list
above. Clicking on the test name will open a modal window with details of its
execution time and the error output.
![Test Reports Widget](img/junit_test_report.png)
## How to set it up
To enable the JUnit reports in merge requests, you need to add
[`artifacts:reports:junit`](pipelines/job_artifacts.md#artifactsreportsjunit)
in `.gitlab-ci.yml`, and specify the path(s) of the generated test reports.
The reports must be `.xml` files, otherwise [GitLab returns an Error 500](https://gitlab.com/gitlab-org/gitlab/-/issues/216575).
In the following examples, the job in the `test` stage runs and GitLab
collects the JUnit test report from each job. After each job is executed, the
XML reports are stored in GitLab as artifacts and their results are shown in the
merge request widget.
To make the JUnit output files browsable, include them with the
[`artifacts:paths`](yaml/README.md#artifactspaths) keyword as well, as shown in the [Ruby example](#ruby-example).
NOTE: **Note:**
You cannot have multiple tests with the same name and class in your JUnit report.
### Ruby example
Use the following job in `.gitlab-ci.yml`. This includes the `artifacts:paths` keyword to provide a link to the JUnit output file.
```yaml
## Use https://github.com/sj26/rspec_junit_formatter to generate a JUnit report with rspec
ruby:
stage: test
script:
- bundle install
- bundle exec rspec --format progress --format RspecJunitFormatter --out rspec.xml
artifacts:
paths:
- rspec.xml
reports:
junit: rspec.xml
```
### Go example
Use the following job in `.gitlab-ci.yml`, and ensure you use `-set-exit-code`,
otherwise the pipeline will be marked successful, even if the tests fail:
```yaml
## Use https://github.com/jstemmer/go-junit-report to generate a JUnit report with go
golang:
stage: test
script:
- go get -u github.com/jstemmer/go-junit-report
- go test -v 2>&1 | go-junit-report -set-exit-code > report.xml
artifacts:
reports:
junit: report.xml
```
### Java examples
There are a few tools that can produce JUnit reports in Java.
#### Gradle
In the following example, `gradle` is used to generate the test reports.
If there are multiple test tasks defined, `gradle` will generate multiple
directories under `build/test-results/`. In that case, you can leverage glob
matching by defining the following path: `build/test-results/test/**/TEST-*.xml`:
```yaml
java:
stage: test
script:
- gradle test
artifacts:
reports:
junit: build/test-results/test/**/TEST-*.xml
```
NOTE: **Note:**
Support for `**` was added in [GitLab Runner 13.0](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/2620).
#### Maven
For parsing [Surefire](https://maven.apache.org/surefire/maven-surefire-plugin/)
and [Failsafe](https://maven.apache.org/surefire/maven-failsafe-plugin/) test
reports, use the following job in `.gitlab-ci.yml`:
```yaml
java:
stage: test
script:
- mvn verify
artifacts:
reports:
junit:
- target/surefire-reports/TEST-*.xml
- target/failsafe-reports/TEST-*.xml
```
### Python example
This example uses pytest with the `--junitxml=report.xml` flag to format the output
for JUnit:
```yaml
pytest:
stage: test
script:
- pytest --junitxml=report.xml
artifacts:
reports:
junit: report.xml
```
### C/C++ example
There are a few tools that can produce JUnit reports in C/C++.
#### GoogleTest
In the following example, `gtest` is used to generate the test reports.
If there are multiple gtest executables created for different architectures (`x86`, `x64` or `arm`),
you will be required to run each test providing a unique filename. The results
will then be aggregated together.
```yaml
cpp:
stage: test
script:
- gtest.exe --gtest_output="xml:report.xml"
artifacts:
reports:
junit: report.xml
```
#### CUnit
[CUnit](https://cunity.gitlab.io/cunit/) can be made to produce [JUnit XML reports](https://cunity.gitlab.io/cunit/group__CI.html) automatically when run using its `CUnitCI.h` macros:
```yaml
cunit:
stage: test
script:
- ./my-cunit-test
artifacts:
reports:
junit: ./my-cunit-test.xml
```
### .NET example
The [JunitXML.TestLogger](https://www.nuget.org/packages/JunitXml.TestLogger/) NuGet
package can generate test reports for .Net Framework and .Net Core applications. The following
example expects a solution in the root folder of the repository, with one or more
project files in sub-folders. One result file is produced per test project, and each file
is placed in a new artifacts folder. This example includes optional formatting arguments, which
improve the readability of test data in the test widget. A full .Net Core
[example is available](https://gitlab.com/Siphonophora/dot-net-cicd-test-logging-demo).
```yaml
## Source code and documentation are here: https://github.com/spekt/junit.testlogger/
Test:
stage: test
script:
- 'dotnet test --test-adapter-path:. --logger:"junit;LogFilePath=..\artifacts\{assembly}-test-result.xml;MethodFormat=Class;FailureBodyFormat=Verbose"'
artifacts:
when: always
paths:
- ./**/*test-result.xml
reports:
junit:
- ./**/*test-result.xml
```
## Viewing JUnit test reports on GitLab
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/24792) in GitLab 12.5 behind a feature flag (`junit_pipeline_view`), disabled by default.
> - The feature flag was removed and the feature was [made generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/216478) in GitLab 13.3.
If JUnit XML files are generated and uploaded as part of a pipeline, these reports
can be viewed inside the pipelines details page. The **Tests** tab on this page will
display a list of test suites and cases reported from the XML file.
![Test Reports Widget](img/pipelines_junit_test_report_ui_v12_5.png)
You can view all the known test suites and click on each of these to see further
details, including the cases that make up the suite.
You can also retrieve the reports via the [GitLab API](../api/pipelines.md#get-a-pipelines-test-report).
## Viewing JUnit screenshots on GitLab
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/202114) in GitLab 13.0.
> - It's deployed behind a feature flag, disabled by default.
> - To use it in GitLab self-managed instances, ask a GitLab administrator to [enable it](#enabling-the-junit-screenshots-feature-core-only). **(CORE ONLY)**
If JUnit XML files contain an `attachment` tag, GitLab parses the attachment.
Upload your screenshots as [artifacts](pipelines/job_artifacts.md#artifactsreportsjunit) to GitLab. The `attachment` tag **must** contain the absolute path to the screenshots you uploaded.
```xml
<testcase time="1.00" name="Test">
<system-out>[[ATTACHMENT|/absolute/path/to/some/file]]</system-out>
</testcase>
```
When [this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/6061) is complete, the attached file will be visible on the pipeline details page.
### Enabling the JUnit screenshots feature **(CORE ONLY)**
This feature comes with the `:junit_pipeline_screenshots_view` feature flag disabled by default.
To enable this feature, ask a GitLab administrator with [Rails console access](../administration/feature_flags.md#how-to-enable-and-disable-features-behind-flags) to run the
following command:
```ruby
Feature.enable(:junit_pipeline_screenshots_view)
```
This document was moved to [unit_test_reports](unit_test_reports.md).

View File

@ -11,7 +11,7 @@ type: reference
## Overview
GitLab provides a lot of great reporting tools for [merge requests](../user/project/merge_requests/index.md) - [JUnit reports](junit_test_reports.md), [code quality](../user/project/merge_requests/code_quality.md), performance tests, etc. While JUnit is a great open framework for tests that "pass" or "fail", it is also important to see other types of metrics from a given change.
GitLab provides a lot of great reporting tools for [merge requests](../user/project/merge_requests/index.md) - [Unit test reports](unit_test_reports.md), [code quality](../user/project/merge_requests/code_quality.md), performance tests, etc. While JUnit is a great open framework for tests that "pass" or "fail", it is also important to see other types of metrics from a given change.
You can configure your job to use custom Metrics Reports, and GitLab will display a report on the merge request so that it's easier and faster to identify changes without having to check the entire log.

View File

@ -75,13 +75,13 @@ If you also want the ability to browse the report output files, include the
> - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/20390) in GitLab 11.2.
> - Requires GitLab Runner 11.2 and above.
The `junit` report collects [JUnit XML files](https://www.ibm.com/support/knowledgecenter/en/SSQ2R2_14.1.0/com.ibm.rsar.analysis.codereview.cobol.doc/topics/cac_useresults_junit.html)
The `junit` report collects [JUnit report format XML files](https://www.ibm.com/support/knowledgecenter/en/SSQ2R2_14.1.0/com.ibm.rsar.analysis.codereview.cobol.doc/topics/cac_useresults_junit.html)
as artifacts. Although JUnit was originally developed in Java, there are many
[third party ports](https://en.wikipedia.org/wiki/JUnit#Ports) for other
third party ports for other
languages like JavaScript, Python, Ruby, and so on.
See [JUnit test reports](../junit_test_reports.md) for more details and examples.
Below is an example of collecting a JUnit XML file from Ruby's RSpec test tool:
See [Unit test reports](../unit_test_reports.md) for more details and examples.
Below is an example of collecting a JUnit report format XML file from Ruby's RSpec test tool:
```yaml
rspec:
@ -94,7 +94,7 @@ rspec:
junit: rspec.xml
```
The collected JUnit reports upload to GitLab as an artifact and display in merge requests.
The collected Unit test reports upload to GitLab as an artifact and display in merge requests.
NOTE: **Note:**
If the JUnit tool you use exports to multiple XML files, specify

286
doc/ci/unit_test_reports.md Normal file
View File

@ -0,0 +1,286 @@
---
stage: Verify
group: Testing
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
type: reference
---
# Unit test reports
> - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/45318) in GitLab 11.2. Requires GitLab Runner 11.2 and above.
> - [Renamed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/39737) from JUnit test reports to Unit test reports in GitLab 13.4.
## Overview
It is very common that a [CI/CD pipeline](pipelines/index.md) contains a
test job that will verify your code.
If the tests fail, the pipeline fails and users get notified. The person that
works on the merge request will have to check the job logs and see where the
tests failed so that they can fix them.
You can configure your job to use Unit test reports, and GitLab will display a
report on the merge request so that it's easier and faster to identify the
failure without having to check the entire log. Unit test reports currently
only support test reports in the JUnit report format.
If you don't use Merge Requests but still want to see the unit test report
output without searching through job logs, the full
[Unit test reports](#viewing-unit-test-reports-on-gitlab) are available
in the pipeline detail view.
## Use cases
Consider the following workflow:
1. Your `master` branch is rock solid, your project is using GitLab CI/CD and
your pipelines indicate that there isn't anything broken.
1. Someone from your team submits a merge request, a test fails and the pipeline
gets the known red icon. To investigate more, you have to go through the job
logs to figure out the cause of the failed test, which usually contain
thousands of lines.
1. You configure the Unit test reports and immediately GitLab collects and
exposes them in the merge request. No more searching in the job logs.
1. Your development and debugging workflow becomes easier, faster and efficient.
## How it works
First, GitLab Runner uploads all JUnit report format XML files as artifacts to GitLab. Then,
when you visit a merge request, GitLab starts comparing the head and base branch's
JUnit report format XML files, where:
- The base branch is the target branch (usually `master`).
- The head branch is the source branch (the latest pipeline in each merge request).
The reports panel has a summary showing how many tests failed, how many had errors
and how many were fixed. If no comparison can be done because data for the base branch
is not available, the panel will just show the list of failed tests for head.
There are four types of results:
1. **Newly failed tests:** Test cases which passed on base branch and failed on head branch
1. **Newly encountered errors:** Test cases which passed on base branch and failed due to a
test error on head branch
1. **Existing failures:** Test cases which failed on base branch and failed on head branch
1. **Resolved failures:** Test cases which failed on base branch and passed on head branch
Each entry in the panel will show the test name and its type from the list
above. Clicking on the test name will open a modal window with details of its
execution time and the error output.
![Test Reports Widget](img/junit_test_report.png)
## How to set it up
To enable the Unit test reports in merge requests, you need to add
[`artifacts:reports:junit`](pipelines/job_artifacts.md#artifactsreportsjunit)
in `.gitlab-ci.yml`, and specify the path(s) of the generated test reports.
The reports must be `.xml` files, otherwise [GitLab returns an Error 500](https://gitlab.com/gitlab-org/gitlab/-/issues/216575).
In the following examples, the job in the `test` stage runs and GitLab
collects the Unit test report from each job. After each job is executed, the
XML reports are stored in GitLab as artifacts and their results are shown in the
merge request widget.
To make the Unit test report output files browsable, include them with the
[`artifacts:paths`](yaml/README.md#artifactspaths) keyword as well, as shown in the [Ruby example](#ruby-example).
NOTE: **Note:**
You cannot have multiple tests with the same name and class in your JUnit report format XML file.
### Ruby example
Use the following job in `.gitlab-ci.yml`. This includes the `artifacts:paths` keyword to provide a link to the Unit test report output file.
```yaml
## Use https://github.com/sj26/rspec_junit_formatter to generate a JUnit report format XML file with rspec
ruby:
stage: test
script:
- bundle install
- bundle exec rspec --format progress --format RspecJunitFormatter --out rspec.xml
artifacts:
paths:
- rspec.xml
reports:
junit: rspec.xml
```
### Go example
Use the following job in `.gitlab-ci.yml`, and ensure you use `-set-exit-code`,
otherwise the pipeline will be marked successful, even if the tests fail:
```yaml
## Use https://github.com/jstemmer/go-junit-report to generate a JUnit report format XML file with go
golang:
stage: test
script:
- go get -u github.com/jstemmer/go-junit-report
- go test -v 2>&1 | go-junit-report -set-exit-code > report.xml
artifacts:
reports:
junit: report.xml
```
### Java examples
There are a few tools that can produce JUnit report format XML file in Java.
#### Gradle
In the following example, `gradle` is used to generate the test reports.
If there are multiple test tasks defined, `gradle` will generate multiple
directories under `build/test-results/`. In that case, you can leverage glob
matching by defining the following path: `build/test-results/test/**/TEST-*.xml`:
```yaml
java:
stage: test
script:
- gradle test
artifacts:
reports:
junit: build/test-results/test/**/TEST-*.xml
```
NOTE: **Note:**
Support for `**` was added in [GitLab Runner 13.0](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/2620).
#### Maven
For parsing [Surefire](https://maven.apache.org/surefire/maven-surefire-plugin/)
and [Failsafe](https://maven.apache.org/surefire/maven-failsafe-plugin/) test
reports, use the following job in `.gitlab-ci.yml`:
```yaml
java:
stage: test
script:
- mvn verify
artifacts:
reports:
junit:
- target/surefire-reports/TEST-*.xml
- target/failsafe-reports/TEST-*.xml
```
### Python example
This example uses pytest with the `--junitxml=report.xml` flag to format the output
into the JUnit report XML format:
```yaml
pytest:
stage: test
script:
- pytest --junitxml=report.xml
artifacts:
reports:
junit: report.xml
```
### C/C++ example
There are a few tools that can produce JUnit report format XML files in C/C++.
#### GoogleTest
In the following example, `gtest` is used to generate the test reports.
If there are multiple gtest executables created for different architectures (`x86`, `x64` or `arm`),
you will be required to run each test providing a unique filename. The results
will then be aggregated together.
```yaml
cpp:
stage: test
script:
- gtest.exe --gtest_output="xml:report.xml"
artifacts:
reports:
junit: report.xml
```
#### CUnit
[CUnit](https://cunity.gitlab.io/cunit/) can be made to produce [JUnit report format XML files](https://cunity.gitlab.io/cunit/group__CI.html) automatically when run using its `CUnitCI.h` macros:
```yaml
cunit:
stage: test
script:
- ./my-cunit-test
artifacts:
reports:
junit: ./my-cunit-test.xml
```
### .NET example
The [JunitXML.TestLogger](https://www.nuget.org/packages/JunitXml.TestLogger/) NuGet
package can generate test reports for .Net Framework and .Net Core applications. The following
example expects a solution in the root folder of the repository, with one or more
project files in sub-folders. One result file is produced per test project, and each file
is placed in a new artifacts folder. This example includes optional formatting arguments, which
improve the readability of test data in the test widget. A full .Net Core
[example is available](https://gitlab.com/Siphonophora/dot-net-cicd-test-logging-demo).
```yaml
## Source code and documentation are here: https://github.com/spekt/junit.testlogger/
Test:
stage: test
script:
- 'dotnet test --test-adapter-path:. --logger:"junit;LogFilePath=..\artifacts\{assembly}-test-result.xml;MethodFormat=Class;FailureBodyFormat=Verbose"'
artifacts:
when: always
paths:
- ./**/*test-result.xml
reports:
junit:
- ./**/*test-result.xml
```
## Viewing Unit test reports on GitLab
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/24792) in GitLab 12.5 behind a feature flag (`junit_pipeline_view`), disabled by default.
> - The feature flag was removed and the feature was [made generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/216478) in GitLab 13.3.
If JUnit report format XML files are generated and uploaded as part of a pipeline, these reports
can be viewed inside the pipelines details page. The **Tests** tab on this page will
display a list of test suites and cases reported from the XML file.
![Test Reports Widget](img/pipelines_junit_test_report_ui_v12_5.png)
You can view all the known test suites and click on each of these to see further
details, including the cases that make up the suite.
You can also retrieve the reports via the [GitLab API](../api/pipelines.md#get-a-pipelines-test-report).
## Viewing JUnit screenshots on GitLab
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/202114) in GitLab 13.0.
> - It's deployed behind a feature flag, disabled by default.
> - To use it in GitLab self-managed instances, ask a GitLab administrator to [enable it](#enabling-the-junit-screenshots-feature-core-only). **(CORE ONLY)**
If JUnit report format XML files contain an `attachment` tag, GitLab parses the attachment.
Upload your screenshots as [artifacts](pipelines/job_artifacts.md#artifactsreportsjunit) to GitLab. The `attachment` tag **must** contain the absolute path to the screenshots you uploaded.
```xml
<testcase time="1.00" name="Test">
<system-out>[[ATTACHMENT|/absolute/path/to/some/file]]</system-out>
</testcase>
```
When [this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/6061) is complete, the attached file will be visible on the pipeline details page.
### Enabling the JUnit screenshots feature **(CORE ONLY)**
This feature comes with the `:junit_pipeline_screenshots_view` feature flag disabled by default.
To enable this feature, ask a GitLab administrator with [Rails console access](../administration/feature_flags.md#how-to-enable-and-disable-features-behind-flags) to run the
following command:
```ruby
Feature.enable(:junit_pipeline_screenshots_view)
```

View File

@ -74,7 +74,7 @@ that need to be stored. Also, a job may depend on artifacts from previous jobs i
case the Runner will download them using a dedicated API endpoint.
Artifacts are stored in object storage, while metadata is kept in the database. An important example of artifacts
is reports (JUnit, SAST, DAST, etc.) which are parsed and rendered in the merge request.
are reports (JUnit, SAST, DAST, etc.) which are parsed and rendered in the merge request.
Job status transitions are not all automated. A user may run [manual jobs](../../ci/yaml/README.md#whenmanual), cancel a pipeline, retry
specific failed jobs or the entire pipeline. Anything that

Binary file not shown.

Before

Width:  |  Height:  |  Size: 53 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 61 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 54 KiB

View File

@ -38,7 +38,8 @@ The following analytics features are available at the project level:
- [Code Review](code_review_analytics.md). **(STARTER)**
- [Insights](../group/insights/index.md). **(ULTIMATE)**
- [Issue](../group/issues_analytics/index.md). **(PREMIUM)**
- [Merge Request](merge_request_analytics.md). **(STARTER)**
- [Merge Request](merge_request_analytics.md), enabled with the `project_merge_request_analytics`
[feature flag](../../development/feature_flags/development.md#enabling-a-feature-flag-locally-in-development). **(STARTER)**
- [Repository](repository_analytics.md).
- [Value Stream](value_stream_analytics.md), enabled with the `cycle_analytics`
[feature flag](../../development/feature_flags/development.md#enabling-a-feature-flag-locally-in-development). **(STARTER)**

View File

@ -5,7 +5,6 @@ group: Analytics
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
---
# Merge Request Analytics **(STARTER)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/229045) in [GitLab Starter](https://about.gitlab.com/pricing/) 13.3.
@ -14,7 +13,7 @@ Merge Request Analytics helps you understand the efficiency of your code review
## Overview
Merge Request Analytics displays information about all accepted merge requests.
Merge Request Analytics displays information that will help you evaluate the efficiency and productivity of your merge request process.
The Throughput chart shows the number of completed merge requests, by month. Merge request throughput is
a common measure of productivity in software engineering. Although imperfect, the average throughput can
@ -22,8 +21,6 @@ be a meaningful benchmark of your team's overall productivity.
To access Merge Request Analytics, from your project's menu, go to **Analytics > Merge Request**.
![Merge Request Analytics](img/merge_request_analytics_v13_3.png "Merge Request Analytics - Throughput chart")
## Use cases
This feature is designed for [development team leaders](https://about.gitlab.com/handbook/marketing/product-marketing/roles-personas/#delaney-development-team-lead)
@ -37,6 +34,40 @@ Merge Request Analytics could be used when:
- You want to know if you were more productive this month than last month, or 12 months ago.
- You want to drill into low- or high-productivity months to understand the work that took place.
## Visualizations and data
The following visualizations and data are available, representing all merge requests that were merged in the past 12 months.
### Throughput chart
The throughput chart shows the number of completed merge requests per month.
![Throughput chart](img/mr_throughput_chart_v13_3.png "Merge Request Analytics - Throughput chart showing merge requests merged in the past 12 months")
### Throughput table
Data table displaying a maximum of the 100 most recent merge requests merged for the time period.
![Throughput table](img/mr_throughput_table_v13_3.png "Merge Request Analytics - Throughput table listing the 100 merge requests most recently merged")
## Permissions
- On [Starter or Bronze tier](https://about.gitlab.com/pricing/) and above.
The **Merge Request Analytics** feature can be accessed only:
- On [Starter](https://about.gitlab.com/pricing/) and above.
- By users with [Reporter access](../permissions.md) and above.
## Enable and disable related feature flags
Merge Request Analytics is disabled by default but can be enabled using the following
[feature flag](../../development/feature_flags/development.md#enabling-a-feature-flag-locally-in-development):
- `project_merge_request_analytics`
A GitLab administrator can:
- Enable this feature by running the following command in a Rails console:
```ruby
Feature.enable(:project_merge_request_analytics)
```

Binary file not shown.

Before

Width:  |  Height:  |  Size: 536 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 165 KiB

View File

@ -48,8 +48,6 @@ At the pipeline level, the Security section displays the vulnerabilities present
Visit the page for any pipeline that ran any of the [supported reports](#supported-reports). To view
the pipeline's security findings, select the **Security** tab when viewing the pipeline.
![Pipeline Security Navigation](img/pipeline_security_v13_3.gif)
NOTE: **Note:**
A pipeline consists of multiple jobs, including SAST and DAST scanning. If any job fails to finish for any reason, the security dashboard will not show SAST scanner output. For example, if the SAST job finishes but the DAST job fails, the security dashboard will not show SAST results. The analyzer will output an [exit code](../../../development/integrations/secure.md#exit-code) on failure.
@ -63,11 +61,13 @@ to **Security & Compliance > Security Dashboard**. By default, the Security Dash
detected and confirmed vulnerabilities.
The Security Dashboard first displays the total number of vulnerabilities by severity (for example,
Critical, High, Medium, Low). Below this, a table displays each vulnerability's status, severity,
Critical, High, Medium, Low, Info, Unknown). Below this, a table shows each vulnerability's status, severity,
and description. Clicking a vulnerability takes you to its [Vulnerability Details](../vulnerabilities)
page to view more information about that vulnerability.
You can filter the vulnerabilities by:
![Project Security Dashboard](img/project_security_dashboard_v13_3.png)
You can filter the vulnerabilities by one or more of the following:
- Status
- Severity

View File

@ -19,7 +19,7 @@ A. Consider you're a software developer working in a team:
1. You checkout a new branch, and submit your changes through a merge request
1. You gather feedback from your team
1. You work on the implementation optimizing code with [Code Quality reports](code_quality.md)
1. You verify your changes with [JUnit test reports](../../../ci/junit_test_reports.md) in GitLab CI/CD
1. You verify your changes with [Unit test reports](../../../ci/unit_test_reports.md) in GitLab CI/CD
1. You avoid using dependencies whose license is not compatible with your project with [License Compliance reports](../../compliance/license_compliance/index.md) **(ULTIMATE)**
1. You request the [approval](merge_request_approvals.md) from your manager **(STARTER)**
1. Your manager:

View File

@ -19,7 +19,7 @@ or link to useful information directly from merge requests:
| [Code Quality](code_quality.md) | Analyze your source code quality using the [Code Climate](https://codeclimate.com/) analyzer and show the Code Climate report right in the merge request widget area. |
| [Display arbitrary job artifacts](../../../ci/yaml/README.md#artifactsexpose_as) | Configure CI pipelines with the `artifacts:expose_as` parameter to directly link to selected [artifacts](../../../ci/pipelines/job_artifacts.md) in merge requests. |
| [GitLab CI/CD](../../../ci/README.md) | Build, test, and deploy your code in a per-branch basis with built-in CI/CD. |
| [JUnit test reports](../../../ci/junit_test_reports.md) | Configure your CI jobs to use JUnit test reports, and let GitLab display a report on the merge request so that its easier and faster to identify the failure without having to check the entire job log. |
| [Unit test reports](../../../ci/unit_test_reports.md) | Configure your CI jobs to use Unit test reports, and let GitLab display a report on the merge request so that its easier and faster to identify the failure without having to check the entire job log. |
| [License Compliance](../../compliance/license_compliance/index.md) **(ULTIMATE)** | Manage the licenses of your dependencies. |
| [Metrics Reports](../../../ci/metrics_reports.md) **(PREMIUM)** | Display the Metrics Report on the merge request so that it's fast and easy to identify changes to important metrics. |
| [Multi-Project pipelines](../../../ci/multi_project_pipelines.md) **(PREMIUM)** | When you set up GitLab CI/CD across multiple projects, you can visualize the entire pipeline, including all cross-project interdependencies. |

View File

@ -0,0 +1,53 @@
# frozen_string_literal: true
require_relative '../../migration_helpers'
module RuboCop
module Cop
module Migration
class ComplexIndexesRequireName < RuboCop::Cop::Cop
include MigrationHelpers
MSG = 'indexes added with custom options must be explicitly named'
def_node_matcher :match_add_index_with_options, <<~PATTERN
(send _ {:add_concurrent_index} _ _ (hash $...))
PATTERN
def_node_matcher :name_option?, <<~PATTERN
(pair {(sym :name) (str "name")} _)
PATTERN
def_node_matcher :unique_option?, <<~PATTERN
(pair {(:sym :unique) (str "unique")} _)
PATTERN
def on_def(node)
return unless in_migration?(node)
node.each_descendant(:send) do |send_node|
next unless add_index_offense?(send_node)
add_offense(send_node, location: :selector)
end
end
private
def add_index_offense?(send_node)
match_add_index_with_options(send_node) { |option_nodes| needs_name_option?(option_nodes) }
end
def needs_name_option?(option_nodes)
return false if only_unique_option?(option_nodes)
option_nodes.none? { |node| name_option?(node) }
end
def only_unique_option?(option_nodes)
option_nodes.size == 1 && unique_option?(option_nodes.first)
end
end
end
end
end

View File

@ -1,6 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import { GlBanner } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import { mockTracking, unmockTracking, triggerEvent } from 'helpers/tracking_helper';
import CustomizeHomepageBanner from '~/pages/dashboard/projects/index/components/customize_homepage_banner.vue';
import axios from '~/lib/utils/axios_utils';
@ -10,18 +11,22 @@ const provide = {
preferencesBehaviorPath: 'some/behavior/path',
calloutsPath: 'call/out/path',
calloutsFeatureId: 'some-feature-id',
trackLabel: 'home_page',
};
const createComponent = () => {
return shallowMount(CustomizeHomepageBanner, { provide });
return shallowMount(CustomizeHomepageBanner, { provide, stubs: { GlBanner } });
};
describe('CustomizeHomepageBanner', () => {
let trackingSpy;
let mockAxios;
let wrapper;
beforeEach(() => {
mockAxios = new MockAdapter(axios);
document.body.dataset.page = 'some:page';
trackingSpy = mockTracking('_category_', undefined, jest.spyOn);
wrapper = createComponent();
});
@ -29,6 +34,7 @@ describe('CustomizeHomepageBanner', () => {
wrapper.destroy();
wrapper = null;
mockAxios.restore();
unmockTracking();
});
it('should render the banner when not dismissed', () => {
@ -47,4 +53,56 @@ describe('CustomizeHomepageBanner', () => {
it('includes the body text from options', () => {
expect(wrapper.html()).toContain(wrapper.vm.$options.i18n.body);
});
describe('tracking', () => {
const preferencesTrackingEvent = 'click_go_to_preferences';
const mockTrackingOnWrapper = () => {
unmockTracking();
trackingSpy = mockTracking('_category_', wrapper.element, jest.spyOn);
};
it('sets the needed data attributes for tracking button', async () => {
await wrapper.vm.$nextTick();
const button = wrapper.find(`[href='${wrapper.vm.preferencesBehaviorPath}']`);
expect(button.attributes('data-track-event')).toEqual(preferencesTrackingEvent);
expect(button.attributes('data-track-label')).toEqual(provide.trackLabel);
});
it('sends a tracking event when the banner is shown', () => {
const trackCategory = undefined;
const trackEvent = 'show_home_page_banner';
expect(trackingSpy).toHaveBeenCalledWith(trackCategory, trackEvent, {
label: provide.trackLabel,
});
});
it('sends a tracking event when the banner is dismissed', async () => {
mockTrackingOnWrapper();
mockAxios.onPost(provide.calloutsPath).replyOnce(200);
const trackCategory = undefined;
const trackEvent = 'click_dismiss';
wrapper.find(GlBanner).vm.$emit('close');
await wrapper.vm.$nextTick();
expect(trackingSpy).toHaveBeenCalledWith(trackCategory, trackEvent, {
label: provide.trackLabel,
});
});
it('sends a tracking event when the button is clicked', async () => {
mockTrackingOnWrapper();
mockAxios.onPost(provide.calloutsPath).replyOnce(200);
const button = wrapper.find(`[href='${wrapper.vm.preferencesBehaviorPath}']`);
triggerEvent(button.element);
await wrapper.vm.$nextTick();
expect(trackingSpy).toHaveBeenCalledWith('_category_', preferencesTrackingEvent, {
label: provide.trackLabel,
});
});
});
});

View File

@ -0,0 +1,78 @@
# frozen_string_literal: true
#
require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/migration/complex_indexes_require_name'
RSpec.describe RuboCop::Cop::Migration::ComplexIndexesRequireName, type: :rubocop do
include CopHelper
subject(:cop) { described_class.new }
context 'in migration' do
before do
allow(cop).to receive(:in_migration?).and_return(true)
end
context 'when indexes are configured with an options hash, but no name' do
it 'registers an offense' do
expect_offense(<<~RUBY)
class TestComplexIndexes < ActiveRecord::Migration[6.0]
DOWNTIME = false
INDEX_NAME = 'my_test_name'
disable_ddl_transaction!
def up
add_concurrent_index :test_indexes, :column1
add_concurrent_index :test_indexes, :column2, where: "column2 = 'value'", order: { column4: :desc }
^^^^^^^^^^^^^^^^^^^^ #{described_class::MSG}
add_concurrent_index :test_indexes, :column3, where: 'column3 = 10', name: 'idx_equal_to_10'
end
def down
add_concurrent_index :test_indexes, :column4, 'unique' => true
add_concurrent_index :test_indexes, :column4, 'unique' => true, where: 'column4 IS NOT NULL'
^^^^^^^^^^^^^^^^^^^^ #{described_class::MSG}
add_concurrent_index :test_indexes, :column5, using: :gin, name: INDEX_NAME
add_concurrent_index :test_indexes, :column6, using: :gin, opclass: :gin_trgm_ops
^^^^^^^^^^^^^^^^^^^^ #{described_class::MSG}
end
end
RUBY
expect(cop.offenses.map(&:cop_name)).to all(eq("Migration/#{described_class.name.demodulize}"))
end
end
end
context 'outside migration' do
before do
allow(cop).to receive(:in_migration?).and_return(false)
end
it 'registers no offenses' do
expect_no_offenses(<<~RUBY)
class TestComplexIndexes < ActiveRecord::Migration[6.0]
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_index :test_indexes, :column1, where: "some_column = 'value'"
end
def down
add_concurrent_index :test_indexes, :column2, unique: true
end
end
RUBY
end
end
end