Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-09-08 18:08:48 +00:00
parent dc47d7f5c0
commit ea8492330b
49 changed files with 476 additions and 303 deletions

View file

@ -14,5 +14,5 @@ indent_size = 2
indent_style = space
charset = utf-8
[*.{md,markdown}]
[*.{md,markdown,js.snap}]
trim_trailing_whitespace = false

View file

@ -1 +1 @@
72d42e8a398b0a0b91bfc145abb145ba36eaa682
4b9bbc37dd7bc587ee28fb7beab8b9c384a41e4c

View file

@ -46,6 +46,7 @@ export default {
{
key: 'actions',
label: '',
tdClass: 'text-right',
customStyle: { width: '35px' },
},
],

View file

@ -1,6 +1,6 @@
<script>
/* eslint-disable vue/no-v-html */
import { GlDeprecatedButton, GlIcon } from '@gitlab/ui';
import { GlButton, GlIcon } from '@gitlab/ui';
import { isString } from 'lodash';
import ProjectAvatar from '~/vue_shared/components/project_avatar/default.vue';
import highlight from '~/lib/utils/highlight';
@ -8,7 +8,7 @@ import { truncateNamespace } from '~/lib/utils/text_utility';
export default {
name: 'ProjectListItem',
components: { GlIcon, ProjectAvatar, GlDeprecatedButton },
components: { GlIcon, ProjectAvatar, GlButton },
props: {
project: {
type: Object,
@ -40,17 +40,16 @@ export default {
};
</script>
<template>
<gl-deprecated-button
class="d-flex align-items-center btn pt-1 pb-1 border-0 project-list-item"
<gl-button
category="tertiary"
class="gl-display-flex gl-align-items-center gl-justify-content-start! gl-mb-2 gl-w-full"
@click="onClick"
>
<gl-icon
class="gl-ml-3 gl-mr-3 flex-shrink-0 position-top-0 js-selected-icon"
:class="{ 'js-selected visible': selected, 'js-unselected invisible': !selected }"
name="mobile-issue-close"
/>
<project-avatar class="flex-shrink-0 js-project-avatar" :project="project" :size="32" />
<div class="d-flex flex-wrap project-namespace-name-container">
<div
class="gl-display-flex gl-align-items-center gl-flex-wrap project-namespace-name-container"
>
<gl-icon v-if="selected" class="js-selected-icon" name="mobile-issue-close" />
<project-avatar class="gl-flex-shrink-0 js-project-avatar" :project="project" :size="32" />
<div
v-if="truncatedNamespace"
:title="projectNameWithNamespace"
@ -65,5 +64,5 @@ export default {
v-html="highlightedProjectName"
></div>
</div>
</gl-deprecated-button>
</gl-button>
</template>

View file

@ -100,7 +100,7 @@ export default {
@bottomReached="bottomReached"
>
<template v-if="!showLoadingIndicator" #items>
<div class="d-flex flex-column">
<div class="gl-display-flex gl-flex-direction-column gl-p-3">
<project-list-item
v-for="project in projectSearchResults"
:key="project.id"

View file

@ -164,6 +164,10 @@
}
@include media-breakpoint-down(sm) {
.container-fluid .todos-list-container {
margin: 0 (-$gl-padding);
}
.todo {
.avatar {
display: none;

View file

@ -294,12 +294,12 @@ ul.related-merge-requests > li {
&::after {
content: image-url('icon_anchor.svg');
@include invisible(hidden);
visibility: hidden;
}
}
&:hover > a.anchor::after {
@include invisible(visible);
visibility: visible;
}
}
}

View file

@ -384,6 +384,13 @@
font-weight: $gl-font-weight-bold;
border: 0;
}
// When tables are "stacked", restore td padding
@media(max-width: map-get($grid-breakpoints, lg)) {
td {
padding-left: $gl-spacing-scale-5;
}
}
}
}

View file

@ -81,7 +81,7 @@
= link_to todos_filter_path(sort: sort_value_oldest_created) do
= sort_title_oldest_created
.js-todos-all
.todos-list-container.js-todos-all
- if @todos.any?
.js-todos-list-container
.js-todos-options{ data: { per_page: @todos.limit_value, current_page: @todos.current_page, total_pages: @todos.total_pages } }

View file

@ -167,7 +167,7 @@
= clipboard_button(text: source_branch, title: _('Copy branch name'), placement: "left", boundary: 'viewport')
.sidebar-mr-source-branch.hide-collapsed
%span
= _('Source branch: %{source_branch_open}${source_branch}%{source_branch_close}').html_safe % { source_branch_open: "<cite class='ref-name' title='#{source_branch}'>".html_safe, source_branch_close: "</cite>".html_safe, source_branch: source_branch }
= _('Source branch: %{source_branch_open}%{source_branch}%{source_branch_close}').html_safe % { source_branch_open: "<cite class='ref-name' title='#{source_branch}'>".html_safe, source_branch_close: "</cite>".html_safe, source_branch: source_branch }
= clipboard_button(text: source_branch, title: _('Copy branch name'), placement: "left", boundary: 'viewport')
- if issuable_sidebar.dig(:current_user, :can_move)

View file

@ -0,0 +1,5 @@
---
title: Fix padding on CI settings tables in mobile version
merge_request: 41728
author:
type: fixed

View file

@ -0,0 +1,5 @@
---
title: Remove Docker-in-Docker mode from Dependency Scanning documentation
merge_request: 40631
author:
type: removed

View file

@ -0,0 +1,5 @@
---
title: Handle todos api argument error
merge_request: 41167
author: gaga5lala
type: fixed

View file

@ -0,0 +1,5 @@
---
title: Widen TODO list only on mobile to be mobile-friendly
merge_request: 41244
author: Takuya Noguchi
type: other

View file

@ -0,0 +1,30 @@
# frozen_string_literal: true
class UpdateLocationFingerprintColumnForCs < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
BATCH_SIZE = 1_000
INTERVAL = 2.minutes
# 883_152 records
def up
return unless Gitlab.ee?
migration = Gitlab::BackgroundMigration::UpdateLocationFingerprintForContainerScanningFindings
migration_name = migration.to_s.demodulize
relation = migration::Finding.container_scanning
queue_background_migration_jobs_by_range_at_intervals(relation,
migration_name,
INTERVAL,
batch_size: BATCH_SIZE)
end
def down
# no-op
# intentionally blank
end
end

View file

@ -0,0 +1 @@
3cd8614d1d93340b4607d5270b54ec96b60b04a830c0a15a84b9843048515a12

View file

@ -21,12 +21,12 @@ Parameters:
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `action` | string | no | The action to be filtered. Can be `assigned`, `mentioned`, `build_failed`, `marked`, `approval_required`, `unmergeable` or `directly_addressed`. |
| `action` | string | no | The action to be filtered. Can be `assigned`, `mentioned`, `build_failed`, `marked`, `approval_required`, `unmergeable`, `directly_addressed` or `merge_train_removed`. |
| `author_id` | integer | no | The ID of an author |
| `project_id` | integer | no | The ID of a project |
| `group_id` | integer | no | The ID of a group |
| `state` | string | no | The state of the todo. Can be either `pending` or `done` |
| `type` | string | no | The type of a todo. Can be either `Issue` or `MergeRequest` |
| `type` | string | no | The type of a todo. Can be either `Issue`, `MergeRequest`, `DesignManagement::Design` or `AlertManagement::Alert` |
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/todos"

View file

@ -179,12 +179,12 @@ You can override cache settings without overwriting the global cache by using
```yaml
cache: &global_cache
key: ${CI_COMMIT_REF_SLUG}
paths:
- node_modules/
- public/
- vendor/
policy: pull-push
key: ${CI_COMMIT_REF_SLUG}
paths:
- node_modules/
- public/
- vendor/
policy: pull-push
job:
cache:
@ -281,7 +281,7 @@ image: python:latest
# Change pip's cache directory to be inside the project directory since we can
# only cache local items.
variables:
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
# Pip's cache doesn't store the python packages
# https://pip.pypa.io/en/stable/reference/pip_install/#caching

View file

@ -56,7 +56,7 @@ Some credentials are required to be able to run `aws` commands:
```yaml
deploy:
stage: deploy
image: registry.gitlab.com/gitlab-org/cloud-deploy/aws-base:latest # see the note below
image: registry.gitlab.com/gitlab-org/cloud-deploy/aws-base:latest # see the note below
script:
- aws s3 ...
- aws create-deployment ...

View file

@ -85,13 +85,13 @@ This can be solved by adding your CA's certificate to the kaniko certificate
store:
```yaml
before_script:
- mkdir -p /kaniko/.docker
- echo "{\"auths\":{\"$CI_REGISTRY\":{\"username\":\"$CI_REGISTRY_USER\",\"password\":\"$CI_REGISTRY_PASSWORD\"}}}" > /kaniko/.docker/config.json
- |
echo "-----BEGIN CERTIFICATE-----
...
-----END CERTIFICATE-----" >> /kaniko/ssl/certs/additional-ca-cert-bundle.crt
before_script:
- mkdir -p /kaniko/.docker
- echo "{\"auths\":{\"$CI_REGISTRY\":{\"username\":\"$CI_REGISTRY_USER\",\"password\":\"$CI_REGISTRY_PASSWORD\"}}}" > /kaniko/.docker/config.json
- |
echo "-----BEGIN CERTIFICATE-----
...
-----END CERTIFICATE-----" >> /kaniko/ssl/certs/additional-ca-cert-bundle.crt
```
## Video walkthrough of a working example

View file

@ -228,6 +228,7 @@ deploy_terraform:
stage: deploy
script:
# Your Review App deployment scripts - for a working example please check https://gitlab.com/Flockademic/Flockademic/blob/5a45f1c2412e93810fab50e2dab8949e2d0633c7/.gitlab-ci.yml#L315
- echo
e2e:firefox:
stage: confidence-check
services:

View file

@ -566,15 +566,11 @@ Also set the variables `DB_HOST` to `mysql` and `DB_USERNAME` to `root`, which a
We define `DB_HOST` as `mysql` instead of `127.0.0.1`, as we use MySQL Docker image as a service which [is linked to the main Docker image](../../docker/using_docker_images.md#how-services-are-linked-to-the-job).
```yaml
...
variables:
MYSQL_DATABASE: homestead
MYSQL_ROOT_PASSWORD: secret
DB_HOST: mysql
DB_USERNAME: root
...
```
#### Unit Test as the first job
@ -584,8 +580,6 @@ We defined the required shell scripts as an array of the [script](../../yaml/REA
These scripts are some Artisan commands to prepare the Laravel, and, at the end of the script, we'll run the tests by `PHPUnit`.
```yaml
...
unit_test:
script:
# Install app dependencies
@ -598,8 +592,6 @@ unit_test:
- php artisan migrate
# Run tests
- vendor/bin/phpunit
...
```
#### Deploy to production
@ -615,8 +607,6 @@ The `only` keyword tells GitLab CI/CD that the job should be executed only when
Lastly, `when: manual` is used to turn the job from running automatically to a manual action.
```yaml
...
deploy_production:
script:
# Add the private SSH key to the build environment

View file

@ -73,24 +73,16 @@ Now that we created the script that contains all prerequisites for our build
environment, let's add it in `.gitlab-ci.yml`:
```yaml
...
before_script:
- bash ci/docker_install.sh > /dev/null
...
```
Last step, run the actual tests using `phpunit`:
```yaml
...
test:app:
script:
- phpunit --configuration phpunit_myapp.xml
...
```
Finally, commit your files and push them to GitLab to see your build succeeding
@ -103,7 +95,7 @@ The final `.gitlab-ci.yml` should look similar to this:
image: php:5.6
before_script:
# Install dependencies
# Install dependencies
- bash ci/docker_install.sh > /dev/null
test:app:
@ -118,7 +110,7 @@ with a different Docker image version and the runner will do the rest:
```yaml
before_script:
# Install dependencies
# Install dependencies
- bash ci/docker_install.sh > /dev/null
# We test PHP5.6
@ -231,8 +223,6 @@ In order to execute Composer before running your tests, simply add the
following in your `.gitlab-ci.yml`:
```yaml
...
# Composer stores all downloaded packages in the vendor/ directory.
# Do not use the following if the vendor/ directory is committed to
# your git repository.
@ -241,15 +231,13 @@ cache:
- vendor/
before_script:
# Install composer dependencies
# Install composer dependencies
- wget https://composer.github.io/installer.sig -O - -q | tr -d '\n' > installer.sig
- php -r "copy('https://getcomposer.org/installer', 'composer-setup.php');"
- php -r "if (hash_file('SHA384', 'composer-setup.php') === file_get_contents('installer.sig')) { echo 'Installer verified'; } else { echo 'Installer corrupt'; unlink('composer-setup.php'); } echo PHP_EOL;"
- php composer-setup.php
- php -r "unlink('composer-setup.php'); unlink('installer.sig');"
- php composer.phar install
...
```
## Access private packages or dependencies

View file

@ -19,27 +19,27 @@ This is what the `.gitlab-ci.yml` file looks like for this project:
test:
stage: test
script:
- apt-get update -qy
- apt-get install -y nodejs
- bundle install --path /cache
- bundle exec rake db:create RAILS_ENV=test
- bundle exec rake test
- apt-get update -qy
- apt-get install -y nodejs
- bundle install --path /cache
- bundle exec rake db:create RAILS_ENV=test
- bundle exec rake test
staging:
stage: deploy
script:
- gem install dpl
- dpl --provider=heroku --app=gitlab-ci-ruby-test-staging --api-key=$HEROKU_STAGING_API_KEY
- gem install dpl
- dpl --provider=heroku --app=gitlab-ci-ruby-test-staging --api-key=$HEROKU_STAGING_API_KEY
only:
- master
- master
production:
stage: deploy
script:
- gem install dpl
- dpl --provider=heroku --app=gitlab-ci-ruby-test-prod --api-key=$HEROKU_PRODUCTION_API_KEY
- gem install dpl
- dpl --provider=heroku --app=gitlab-ci-ruby-test-prod --api-key=$HEROKU_PRODUCTION_API_KEY
only:
- tags
- tags
```
This project has three jobs:

View file

@ -120,7 +120,7 @@ stages:
- build
- test
- deploy
job 1:
stage: build
script: make build dependencies
@ -128,7 +128,7 @@ job 1:
job 2:
stage: build
script: make build artifacts
job3:
stage: test
script: make test

View file

@ -238,7 +238,6 @@ case it will apply to all jobs in the pipeline:
```yaml
my_job:
image: alpine
...
```
#### `post`
@ -284,7 +283,6 @@ stages:
my_job:
stage: build
...
```
#### `steps`
@ -297,7 +295,6 @@ my_job:
script:
- echo "hello! the current time is:"
- time
...
```
### Directives

View file

@ -199,7 +199,7 @@ trigger_a:
include: a/.gitlab-ci.yml
rules:
- changes:
- a/*
- a/*
trigger_b:
stage: triggers
@ -207,7 +207,7 @@ trigger_b:
include: b/.gitlab-ci.yml
rules:
- changes:
- b/*
- b/*
```
Example child `a` pipeline configuration, located in `/a/.gitlab-ci.yml`, making

View file

@ -130,20 +130,20 @@ deployed from its [project on GitLab.com](https://gitlab.com/gitlab-com/www-gitl
```yaml
# Team data
- source: 'data/team.yml' # data/team.yml
public: 'team/' # team/
- source: 'data/team.yml' # data/team.yml
public: 'team/' # team/
# Blogposts
- source: /source\/posts\/([0-9]{4})-([0-9]{2})-([0-9]{2})-(.+?)\..*/ # source/posts/2017-01-30-around-the-world-in-6-releases.html.md.erb
public: '\1/\2/\3/\4/' # 2017/01/30/around-the-world-in-6-releases/
- source: /source\/posts\/([0-9]{4})-([0-9]{2})-([0-9]{2})-(.+?)\..*/ # source/posts/2017-01-30-around-the-world-in-6-releases.html.md.erb
public: '\1/\2/\3/\4/' # 2017/01/30/around-the-world-in-6-releases/
# HTML files
- source: /source\/(.+?\.html).*/ # source/index.html.haml
public: '\1' # index.html
- source: /source\/(.+?\.html).*/ # source/index.html.haml
public: '\1' # index.html
# Other files
- source: /source\/(.*)/ # source/images/blogimages/around-the-world-in-6-releases-cover.png
public: '\1' # images/blogimages/around-the-world-in-6-releases-cover.png
- source: /source\/(.*)/ # source/images/blogimages/around-the-world-in-6-releases-cover.png
public: '\1' # images/blogimages/around-the-world-in-6-releases-cover.png
```
Mappings are defined as entries in the root YAML array, and are identified by a `-` prefix. Within an entry, there is a hash map with two keys:

View file

@ -91,8 +91,8 @@ to access it. This is where an SSH key pair comes in handy.
## Optionally, if you will be using any Git commands, set the user name and
## and email.
##
#- git config --global user.email "user@example.com"
#- git config --global user.name "User name"
# - git config --global user.email "user@example.com"
# - git config --global user.name "User name"
```
NOTE: **Note:**
@ -193,8 +193,8 @@ before_script:
## Replace example.com with your private server's domain name. Repeat that
## command if you have more than one server to connect to.
##
#- ssh-keyscan example.com >> ~/.ssh/known_hosts
#- chmod 644 ~/.ssh/known_hosts
# - ssh-keyscan example.com >> ~/.ssh/known_hosts
# - chmod 644 ~/.ssh/known_hosts
##
## You can optionally disable host key checking. Be aware that by adding that
@ -202,7 +202,7 @@ before_script:
## WARNING: Use this only with the Docker executor, if you use it with shell
## you will overwrite your user's SSH config.
##
#- '[[ -f /.dockerenv ]] && echo -e "Host *\n\tStrictHostKeyChecking no\n\n" >> ~/.ssh/config'
# - '[[ -f /.dockerenv ]] && echo -e "Host *\n\tStrictHostKeyChecking no\n\n" >> ~/.ssh/config'
```
## Example project

View file

@ -131,10 +131,10 @@ After you set a variable, call it from the `.gitlab-ci.yml` file:
test_variable:
stage: test
script:
- echo $CI_JOB_STAGE # calls a predefined variable
- echo $TEST # calls a custom variable of type `env_var`
- echo $GREETING # calls a custom variable of type `file` that contains the path to the temp file
- cat $GREETING # the temp file itself contains the variable value
- echo $CI_JOB_STAGE # calls a predefined variable
- echo $TEST # calls a custom variable of type `env_var`
- echo $GREETING # calls a custom variable of type `file` that contains the path to the temp file
- cat $GREETING # the temp file itself contains the variable value
```
The output is:
@ -511,7 +511,7 @@ build:
deploy:
stage: deploy
script:
- echo $BUILD_VERSION # => hello
- echo $BUILD_VERSION # => hello
dependencies:
- build
```
@ -530,7 +530,7 @@ build:
deploy:
stage: deploy
script:
- echo $BUILD_VERSION # => hello
- echo $BUILD_VERSION # => hello
needs:
- job: build
artifacts: true

View file

@ -491,7 +491,7 @@ include:
file: '/templates/.gitlab-ci-template.yml'
- project: 'my-group/my-project'
ref: 787123b47f14b552955ca2786bc9542ae66fee5b # Git SHA
ref: 787123b47f14b552955ca2786bc9542ae66fee5b # Git SHA
file: '/templates/.gitlab-ci-template.yml'
```
@ -1353,7 +1353,7 @@ job:
- if: '$CI_MERGE_REQUEST_SOURCE_BRANCH_NAME =~ /^feature/'
when: manual
allow_failure: true
- if: '$CI_MERGE_REQUEST_SOURCE_BRANCH_NAME' # Checking for the presence of a variable is possible
- if: '$CI_MERGE_REQUEST_SOURCE_BRANCH_NAME' # Checking for the presence of a variable is possible
```
Some details regarding the logic that determines the `when` for the job:
@ -1538,11 +1538,11 @@ docker build:
script: docker build -t my-image:$CI_COMMIT_REF_SLUG .
rules:
- if: '$VAR == "string value"'
changes: # Will include the job and set to when:manual if any of the follow paths match a modified file.
changes: # Will include the job and set to when:manual if any of the follow paths match a modified file.
- Dockerfile
- docker/scripts/*
when: manual
# - when: never would be redundant here, this is implied any time rules are listed.
# - when: never would be redundant here, this is implied any time rules are listed.
```
Keywords such as `branches` or `refs` that are currently available for
@ -3085,7 +3085,7 @@ For example, to match a single file:
```yaml
test:
script: [ "echo 'test' > file.txt" ]
script: ["echo 'test' > file.txt"]
artifacts:
expose_as: 'artifact 1'
paths: ['file.txt']
@ -3098,7 +3098,7 @@ An example that will match an entire directory:
```yaml
test:
script: [ "mkdir test && echo 'test' > test/file.txt" ]
script: ["mkdir test && echo 'test' > test/file.txt"]
artifacts:
expose_as: 'artifact 1'
paths: ['test/']
@ -3893,15 +3893,15 @@ ios-release:
script:
- echo 'iOS release job'
release:
tag_name: v1.0.0-ios
description: 'iOS release v1.0.0'
tag_name: v1.0.0-ios
description: 'iOS release v1.0.0'
android-release:
script:
- echo 'Android release job'
release:
tag_name: v1.0.0-android
description: 'Android release v1.0.0'
tag_name: v1.0.0-android
description: 'Android release v1.0.0'
```
#### `release:tag_name`
@ -3973,25 +3973,24 @@ tags. These options cannot be used together, so choose one:
script:
- echo 'running release_job'
release:
name: 'Release $CI_COMMIT_TAG'
description: 'Created using the release-cli $EXTRA_DESCRIPTION' # $EXTRA_DESCRIPTION must be defined
tag_name: '$CI_COMMIT_TAG' # elsewhere in the pipeline.
ref: '$CI_COMMIT_TAG'
milestones:
- 'm1'
- 'm2'
- 'm3'
released_at: '2020-07-15T08:00:00Z' # Optional, will auto generate if not defined,
# or can use a variable.
name: 'Release $CI_COMMIT_TAG'
description: 'Created using the release-cli $EXTRA_DESCRIPTION' # $EXTRA_DESCRIPTION must be defined
tag_name: '$CI_COMMIT_TAG' # elsewhere in the pipeline.
ref: '$CI_COMMIT_TAG'
milestones:
- 'm1'
- 'm2'
- 'm3'
released_at: '2020-07-15T08:00:00Z' # Optional, will auto generate if not defined, or can use a variable.
```
- To create a release automatically when commits are pushed or merged to the default branch,
using a new Git tag that is defined with variables:
NOTE: **Note:**
Environment variables set in `before_script` or `script` are not available for expanding
in the same job. Read more about
[potentially making variables available for expanding](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/6400).
NOTE: **Note:**
Environment variables set in `before_script` or `script` are not available for expanding
in the same job. Read more about
[potentially making variables available for expanding](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/6400).
```yaml
prepare_job:
@ -4011,25 +4010,24 @@ in the same job. Read more about
stage: release
image: registry.gitlab.com/gitlab-org/release-cli:latest
needs:
- job: prepare_job
artifacts: true
- job: prepare_job
artifacts: true
rules:
- if: $CI_COMMIT_TAG
when: never # Do not run this job when a tag is created manually
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH # Run this job when commits are pushed or merged to the default branch
when: never # Do not run this job when a tag is created manually
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH # Run this job when commits are pushed or merged to the default branch
script:
- echo 'running release_job for $TAG'
release:
name: 'Release $TAG'
description: 'Created using the release-cli $EXTRA_DESCRIPTION' # $EXTRA_DESCRIPTION and the $TAG
tag_name: '$TAG' # variables must be defined elsewhere
ref: '$CI_COMMIT_SHA' # in the pipeline. For example, in the
milestones: # prepare_job
- 'm1'
- 'm2'
- 'm3'
released_at: '2020-07-15T08:00:00Z' # Optional, will auto generate if not defined,
# or can use a variable.
name: 'Release $TAG'
description: 'Created using the release-cli $EXTRA_DESCRIPTION' # $EXTRA_DESCRIPTION and the $TAG
tag_name: '$TAG' # variables must be defined elsewhere
ref: '$CI_COMMIT_SHA' # in the pipeline. For example, in the
milestones: # prepare_job
- 'm1'
- 'm2'
- 'm3'
released_at: '2020-07-15T08:00:00Z' # Optional, will auto generate if not defined, or can use a variable.
```
#### `releaser-cli` command line
@ -4664,9 +4662,9 @@ If you want to temporarily 'disable' a job, rather than commenting out all the
lines where the job is defined:
```yaml
#hidden_job:
# script:
# - run test
# hidden_job:
# script:
# - run test
```
You can instead start its name with a dot (`.`) and it won't be processed by

View file

@ -885,6 +885,11 @@ GitLab documentation from both the GitLab application and external sites.
Headings generate anchor links automatically when rendered. `## This is an example`
generates the anchor `#this-is-an-example`.
NOTE: **Note:**
[Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/39717) in GitLab 13.4, [product badges](#product-badges) used in headings aren't included in the
generated anchor links. For example, when you link to
`## This is an example **(CORE)**`, use the anchor `#this-is-an-example`.
Keep in mind that the GitLab user interface links to many documentation pages
and anchor links to take the user to the right spot. Therefore, when you change
a heading, search `doc/*`, `app/views/*`, and `ee/app/views/*` for the old

View file

@ -1,3 +1,11 @@
---
type: reference, dev
stage: none
group: Development
info: "See the Technical Writers assigned to Development Guidelines: https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments-to-development-guidelines"
description: "GitLab development guidelines - testing best practices."
---
# Testing best practices
## Test Design
@ -15,21 +23,6 @@ manifest themselves within our code. When designing our tests, take time to revi
our test design. We can find some helpful heuristics documented in the Handbook in the
[Test Engineering](https://about.gitlab.com/handbook/engineering/quality/test-engineering/#test-heuristics) section.
## Test speed
GitLab has a massive test suite that, without [parallelization](ci.md#test-suite-parallelization-on-the-ci), can take hours
to run. It's important that we make an effort to write tests that are accurate
and effective _as well as_ fast.
Here are some things to keep in mind regarding test performance:
- `instance_double` and `spy` are faster than `FactoryBot.build(...)`
- `FactoryBot.build(...)` and `.build_stubbed` are faster than `.create`.
- Don't `create` an object when `build`, `build_stubbed`, `attributes_for`,
`spy`, or `instance_double` will do. Database persistence is slow!
- Don't mark a feature as requiring JavaScript (through `:js` in RSpec) unless it's _actually_ required for the test
to be valid. Headless browser testing is slow!
## RSpec
To run RSpec tests:
@ -57,6 +50,51 @@ bundle exec guard
When using spring and guard together, use `SPRING=1 bundle exec guard` instead to make use of spring.
### Test speed
GitLab has a massive test suite that, without [parallelization](ci.md#test-suite-parallelization-on-the-ci), can take hours
to run. It's important that we make an effort to write tests that are accurate
and effective _as well as_ fast.
Test performance is important to maintaining quality and velocity, and has a
direct impact on CI build times and thus fixed costs. We want thorough, correct,
and fast tests. Here you can find some information about tools and techniques
available to you to achieve that.
#### Don't request capabilities you don't need
We make it easy to add capabilities to our examples by annotating the example or
a parent context. Examples of these are:
- `:js` in feature specs, which runs a full JavaScript capable headless browser.
- `:clean_gitlab_redis_cache` which provides a clean Redis cache to the examples.
- `:request_store` which provides a request store to the examples.
Obviously we should reduce test dependencies, and avoiding
capabilities also reduces the amount of set-up needed.
`:js` is particularly important to avoid. This must only be used if the feature
test requires JavaScript reactivity in the browser, since using a headless
browser is much slower than parsing the HTML response from the app.
#### Optimize factory usage
A common cause of slow tests is excessive creation of objects, and thus
computation and DB time. Factories are essential to development, but they can
make inserting data into the DB so easy that we may be able to optimize.
The two basic techniques to bear in mind here are:
- **Reduce**: avoid creating objects, and avoid persisting them.
- **Reuse**: shared objects, especially nested ones we do not examine, can generally be shared.
To avoid creation, it is worth bearing in mind that:
- `instance_double` and `spy` are faster than `FactoryBot.build(...)`.
- `FactoryBot.build(...)` and `.build_stubbed` are faster than `.create`.
- Don't `create` an object when `build`, `build_stubbed`, `attributes_for`,
`spy`, or `instance_double` will do. Database persistence is slow!
Use [Factory Doctor](https://test-prof.evilmartians.io/#/profilers/factory_doctor) to find cases where database persistence is not needed in a given test.
```shell
@ -64,7 +102,7 @@ Use [Factory Doctor](https://test-prof.evilmartians.io/#/profilers/factory_docto
FDOC=1 bin/rspec spec/[path]/[to]/[spec].rb
```
A common change is to use `build` instead of `create`:
A common change is to use `build` or `build_stubbed` instead of `create`:
```ruby
# Old
@ -97,29 +135,133 @@ let_it_be(:project) { create(:project) }
A common cause of a large number of created factories is [factory cascades](https://github.com/test-prof/test-prof/blob/master/docs/profilers/factory_prof.md#factory-flamegraph), which result when factories create and recreate associations.
They can be identified by a noticeable difference between `total time` and `top-level time` numbers:
```shell
```plaintext
total top-level total time time per call top-level time name
208 0 9.5812s 0.0461s 0.0000s namespace
208 76 37.4214s 0.1799s 13.8749s project
```
In order to reuse a single factory for all implicit parent associations,
The table above shows us that we never create any `namespace` objects explicitly
(`top-level == 0`) - they are all created implicitly for us. But we still end up
with 208 of them (one for each project) and this takes 9.5 seconds.
In order to reuse a single object for all calls to a named factory in implicit parent associations,
[`FactoryDefault`](https://github.com/test-prof/test-prof/blob/master/docs/recipes/factory_default.md)
can be used:
```ruby
let_it_be(:namespace) { create_default(:namespace) }
```
Then every project we create will use this `namespace`, without us having to pass
it as `namespace: namespace`.
Maybe we don't need to create 208 different projects - we
can create one and reuse it. In addition, we can see that only about 1/3 of the
projects we create are ones we ask for (76/208), so there is benefit in setting
a default value for projects as well:
```ruby
let_it_be(:project) { create_default(:project) }
```
In this case, the `total time` and `top-level time` numbers match more closely:
```shell
```plaintext
total top-level total time time per call top-level time name
31 30 4.6378s 0.1496s 4.5366s project
8 8 0.0477s 0.0477s 0.0477s namespace
```
#### Identify slow tests
Running a spec with profiling is a good way to start optimizing a spec. This can
be done with:
```shell
bundle exec rspec --profile -- path/to/spec_file.rb
```
Which includes information like the following:
```plaintext
Top 10 slowest examples (10.69 seconds, 7.7% of total time):
Issue behaves like an editable mentionable creates new cross-reference notes when the mentionable text is edited
1.62 seconds ./spec/support/shared_examples/models/mentionable_shared_examples.rb:164
Issue relative positioning behaves like a class that supports relative positioning .move_nulls_to_end manages to move nulls to the end, stacking if we cannot create enough space
1.39 seconds ./spec/support/shared_examples/models/relative_positioning_shared_examples.rb:88
Issue relative positioning behaves like a class that supports relative positioning .move_nulls_to_start manages to move nulls to the end, stacking if we cannot create enough space
1.27 seconds ./spec/support/shared_examples/models/relative_positioning_shared_examples.rb:180
Issue behaves like an editable mentionable behaves like a mentionable extracts references from its reference property
0.99253 seconds ./spec/support/shared_examples/models/mentionable_shared_examples.rb:69
Issue behaves like an editable mentionable behaves like a mentionable creates cross-reference notes
0.94987 seconds ./spec/support/shared_examples/models/mentionable_shared_examples.rb:101
Issue behaves like an editable mentionable behaves like a mentionable when there are cached markdown fields sends in cached markdown fields when appropriate
0.94148 seconds ./spec/support/shared_examples/models/mentionable_shared_examples.rb:86
Issue behaves like an editable mentionable when there are cached markdown fields when the markdown cache is stale persists the refreshed cache so that it does not have to be refreshed every time
0.92833 seconds ./spec/support/shared_examples/models/mentionable_shared_examples.rb:153
Issue behaves like an editable mentionable when there are cached markdown fields refreshes markdown cache if necessary
0.88153 seconds ./spec/support/shared_examples/models/mentionable_shared_examples.rb:130
Issue behaves like an editable mentionable behaves like a mentionable generates a descriptive back-reference
0.86914 seconds ./spec/support/shared_examples/models/mentionable_shared_examples.rb:65
Issue#related_issues returns only authorized related issues for given user
0.84242 seconds ./spec/models/issue_spec.rb:335
Finished in 2 minutes 19 seconds (files took 1 minute 4.42 seconds to load)
277 examples, 0 failures, 1 pending
```
From this result, we can see the most expensive examples in our spec, giving us
a place to start. The fact that the most expensive examples here are in
shared examples means that any reductions are likely to have a larger impact as
they are called in multiple places.
#### Avoid repeating expensive actions
While isolated examples are very clear, and help serve the purpose of specs as
specification, the following example shows how we can combine expensive
actions:
```ruby
subject { described_class.new(arg_0, arg_1) }
it 'creates an event' do
expect { subject.execute }.to change(Event, :count).by(1)
end
it 'sets the frobulance' do
expect { subject.execute }.to change { arg_0.reset.frobulance }.to('wibble')
end
it 'schedules a background job' do
expect(BackgroundJob).to receive(:perform_async)
subject.execute
end
```
If the call to `subject.execute` is expensive, then we are repeating the same
action just to make different assertions. We can reduce this repetition by
combining the examples:
```ruby
it 'performs the expected side-effects' do
expect(BackgroundJob).to receive(:perform_async)
expect { subject.execute }
.to change(Event, :count).by(1)
.and change { arg_0.frobulance }.to('wibble')
end
```
Be careful doing this, as this sacrifices clarity and test independence for
performance gains.
When combining tests, consider using `:aggregate_failures`, so that the full
results are available, and not just the first failure.
### General guidelines
- Use a single, top-level `RSpec.describe ClassName` block.

View file

@ -90,32 +90,7 @@ That's needed when one totally relies on [custom analyzers](#custom-analyzers).
## Custom analyzers
### Custom analyzers with Docker-in-Docker
When Docker-in-Docker for Dependency Scanning is enabled,
you can provide your own analyzers as a comma-separated list of Docker images.
Here's how to add `analyzers/nuget` and `analyzers/perl` to the default images.
In `.gitlab-ci.yml` define:
```yaml
include:
template: Dependency-Scanning.gitlab-ci.yml
variables:
DS_ANALYZER_IMAGES: "my-docker-registry/analyzers/nuget,amy-docker-registry/analyzers/perl"
```
The values must be the full path to the container registry images,
like what you would feed to the `docker pull` command.
NOTE: **Note:**
This configuration doesn't benefit from the integrated detection step. Dependency
Scanning has to fetch and spawn each Docker image to establish whether the
custom analyzer can scan the source code.
### Custom analyzers without Docker-in-Docker
When Docker-in-Docker for Dependency Scanning is disabled, you can provide your own analyzers by
You can provide your own analyzers by
defining CI jobs in your CI configuration. For consistency, you should suffix your custom Dependency
Scanning jobs with `-dependency_scanning`. Here's how to add a scanning job that's based on the
Docker image `my-docker-registry/analyzers/nuget` and generates a Dependency Scanning report

View file

@ -49,8 +49,6 @@ CAUTION: **Caution:**
If you use your own Runners, make sure your installed version of Docker
is **not** `19.03.0`. See [troubleshooting information](#error-response-from-daemon-error-processing-tar-file-docker-tar-relocation-error) for details.
Beginning with GitLab 13.0, Docker privileged mode is necessary only if you've [enabled Docker-in-Docker for Dependency Scanning](#enabling-docker-in-docker).
## Supported languages and package managers
GitLab relies on [`rules`](../../../ci/yaml/README.md#rules) to start relevant analyzers depending on the languages detected in the repository.
@ -154,24 +152,10 @@ The following variables allow configuration of global dependency scanning settin
| --------------------------------------- |------------ |
| `SECURE_ANALYZERS_PREFIX` | Override the name of the Docker registry providing the official default images (proxy). Read more about [customizing analyzers](analyzers.md). |
| `DS_DEFAULT_ANALYZERS` | Override the names of the official default images. Read more about [customizing analyzers](analyzers.md). |
| `DS_DISABLE_DIND` | Disable Docker-in-Docker and run analyzers [individually](#enabling-docker-in-docker). This variable is `true` by default. |
| `ADDITIONAL_CA_CERT_BUNDLE` | Bundle of CA certs to trust. The bundle of certificates provided here is also used by other tools during the scanning process, such as `git`, `yarn`, or `npm`. |
| `DS_EXCLUDED_PATHS` | Exclude vulnerabilities from output based on the paths. A comma-separated list of patterns. Patterns can be globs, or file or folder paths (for example, `doc,spec`). Parent directories also match patterns. Default: `"spec, test, tests, tmp"` |
| `SECURE_LOG_LEVEL` | Set the minimum logging level. Messages of this logging level or higher are output. From highest to lowest severity, the logging levels are: `fatal`, `error`, `warn`, `info`, `debug`. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/10880) in GitLab 13.1. Default: `info` |
#### Configuring Docker-in-Docker orchestrator
The following variables configure the Docker-in-Docker orchestrator, and therefore are only used when the Docker-in-Docker mode is [enabled](#enabling-docker-in-docker).
| Environment variable | Default | Description |
| --------------------------------------- | ----------- | ----------- |
| `DS_ANALYZER_IMAGES` | | Comma-separated list of custom images. The official default images are still enabled. Read more about [customizing analyzers](analyzers.md). |
| `DS_ANALYZER_IMAGE_TAG` | | Override the Docker tag of the official default images. Read more about [customizing analyzers](analyzers.md). |
| `DS_PULL_ANALYZER_IMAGES` | | Pull the images from the Docker registry (set to `0` to disable). |
| `DS_DOCKER_CLIENT_NEGOTIATION_TIMEOUT` | 2m | Time limit for Docker client negotiation. Timeouts are parsed using Go's [`ParseDuration`](https://golang.org/pkg/time/#ParseDuration). Valid time units are `ns`, `us` (or `µs`), `ms`, `s`, `m`, or `h`. For example, `300ms`, `1.5h`, or `2h45m`. |
| `DS_PULL_ANALYZER_IMAGE_TIMEOUT` | 5m | Time limit when pulling an analyzer's image. Timeouts are parsed using Go's [`ParseDuration`](https://golang.org/pkg/time/#ParseDuration). Valid time units are `ns`, `us` (or `µs`), `ms`, `s`, `m`, or `h`. For example, `300ms`, `1.5h`, or `2h45m`. |
| `DS_RUN_ANALYZER_TIMEOUT` | 20m | Time limit when running an analyzer. Timeouts are parsed using Go's [`ParseDuration`](https://golang.org/pkg/time/#ParseDuration). Valid time units are `ns`, `us` (or `µs`), `ms`, `s`, `m`, or `h`. For example, `300ms`, `1.5h`, or `2h45m`. |
#### Configuring specific analyzers used by Dependency Scanning
The following variables are used for configuring specific analyzers (used for a specific language/framework).
@ -206,27 +190,6 @@ you can use the `MAVEN_CLI_OPTS` environment variable.
Read more on [how to use private Maven repositories](../index.md#using-private-maven-repos).
### Enabling Docker-in-Docker
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/12487) in GitLab Ultimate 12.5.
If needed, you can enable Docker-in-Docker to restore the Dependency Scanning behavior that existed
prior to GitLab 13.0. Follow these steps to do so:
1. Configure GitLab Runner with Docker-in-Docker in [privileged mode](https://docs.gitlab.com/runner/executors/docker.html#use-docker-in-docker-with-privileged-mode).
1. Set the `DS_DISABLE_DIND` variable to `false`:
```yaml
include:
- template: Dependency-Scanning.gitlab-ci.yml
variables:
DS_DISABLE_DIND: "false"
```
This creates a single `dependency_scanning` job in your CI/CD pipeline instead of multiple
`<analyzer-name>-dependency_scanning` jobs.
## Interacting with the vulnerabilities
Once a vulnerability is found, you can interact with it. Read more on how to
@ -389,7 +352,6 @@ jobs to run successfully. For more information, see [Offline environments](../of
Here are the requirements for using Dependency Scanning in an offline environment:
- Keep Docker-In-Docker disabled (default).
- GitLab Runner with the [`docker` or `kubernetes` executor](#requirements).
- Docker Container Registry with locally available copies of Dependency Scanning [analyzer](https://gitlab.com/gitlab-org/security-products/analyzers) images.
- Host an offline Git copy of the [gemnasium-db advisory database](https://gitlab.com/gitlab-org/security-products/gemnasium-db/).

View file

@ -243,9 +243,9 @@ The project settings for Merge request approvals are found by going to
#### Prevent overriding default approvals
By default, users are able to edit the approval rules in merge requests. If disabled,
the approval rules for all new merge requests will be determined by the
[default approval rules](#adding--editing-a-default-approval-rule). To disable this feature:
Regardless of the approval rules you choose for your project, users can edit them in every merge
request, overriding the rules you set as [default](#adding--editing-a-default-approval-rule).
To prevent that from happening:
1. Uncheck the **Can override approvers and approvals required per merge request** checkbox.
1. Click **Save changes**.
@ -268,14 +268,15 @@ from the UI. However, approvals will be reset if the target branch is changed.
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/3349) in [GitLab Starter](https://about.gitlab.com/pricing/) 11.3.
You can allow merge request authors to self-approve merge requests. Authors
also need to be included in the approvers list in order to be able to
approve their merge request. To enable this feature:
By default, projects are configured to prevent merge requests from being approved by
their own authors. To change this setting:
1. Uncheck the **Prevent approval of merge requests by merge request author** checkbox,
which is enabled by default.
1. Go to your project's **Settings > General**, expand **Merge request approvals**.
1. Uncheck the **Prevent approval of merge requests by merge request author** checkbox.
1. Click **Save changes**.
Note that users can edit the approval rules in every merge request and override pre-defined settings unless it's set [**not to allow** overrides](#prevent-overriding-default-approvals).
#### Prevent approval of merge requests by their committers
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/10441) in [GitLab Starter](https://about.gitlab.com/pricing/) 11.10.

View file

@ -39,8 +39,17 @@ module API
resource :todos do
helpers do
params :todo_filters do
optional :action, String, values: Todo::ACTION_NAMES.values.map(&:to_s)
optional :author_id, Integer
optional :state, String, values: Todo.state_machine.states.map(&:name).map(&:to_s)
optional :type, String, values: TodosFinder.todo_types
optional :project_id, Integer
optional :group_id, Integer
end
def find_todos
TodosFinder.new(current_user, params).execute
TodosFinder.new(current_user, declared_params(include_missing: false)).execute
end
def issuable_and_awardable?(type)
@ -72,7 +81,7 @@ module API
success Entities::Todo
end
params do
use :pagination
use :pagination, :todo_filters
end
get do
todos = paginate(find_todos.with_entity_associations)

View file

@ -0,0 +1,13 @@
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class UpdateLocationFingerprintForContainerScanningFindings
def perform(start_id, stop_id)
end
end
end
end
Gitlab::BackgroundMigration::UpdateLocationFingerprintForContainerScanningFindings.prepend_if_ee('EE::Gitlab::BackgroundMigration::UpdateLocationFingerprintForContainerScanningFindings')

View file

@ -23577,7 +23577,7 @@ msgstr ""
msgid "Source (branch or tag)"
msgstr ""
msgid "Source branch: %{source_branch_open}${source_branch}%{source_branch_close}"
msgid "Source branch: %{source_branch_open}%{source_branch}%{source_branch_close}"
msgstr ""
msgid "Source code"

View file

@ -47,4 +47,4 @@ exports[`AddContextCommitsModal renders modal with 2 tabs 1`] = `
</gl-tab-stub>
</gl-tabs-stub>
</gl-modal-stub>
`;
`;

View file

@ -29,7 +29,7 @@ describe('ProjectListItem component', () => {
it('does not render a check mark icon if selected === false', () => {
wrapper = shallowMount(Component, options);
expect(wrapper.find('.js-selected-icon.js-unselected').exists()).toBe(true);
expect(wrapper.find('.js-selected-icon').exists()).toBe(false);
});
it('renders a check mark icon if selected === true', () => {
@ -37,7 +37,7 @@ describe('ProjectListItem component', () => {
wrapper = shallowMount(Component, options);
expect(wrapper.find('.js-selected-icon.js-selected').exists()).toBe(true);
expect(wrapper.find('.js-selected-icon').exists()).toBe(true);
});
it(`emits a "clicked" event when clicked`, () => {

View file

@ -22,11 +22,12 @@ RSpec.describe StorageHelper do
end
describe "#storage_counters_details" do
let(:namespace) { create :namespace }
let(:project) do
let_it_be(:namespace) { create(:namespace) }
let_it_be(:project) do
create(:project,
namespace: namespace,
statistics: build(:project_statistics,
namespace: namespace,
repository_size: 10.kilobytes,
wiki_size: 10.bytes,
lfs_objects_size: 20.gigabytes,

View file

@ -175,12 +175,13 @@ RSpec.describe Namespace do
end
describe '.with_statistics' do
let(:namespace) { create :namespace }
let_it_be(:namespace) { create(:namespace) }
let(:project1) do
create(:project,
namespace: namespace,
statistics: build(:project_statistics,
namespace: namespace,
repository_size: 101,
wiki_size: 505,
lfs_objects_size: 202,
@ -193,6 +194,7 @@ RSpec.describe Namespace do
create(:project,
namespace: namespace,
statistics: build(:project_statistics,
namespace: namespace,
repository_size: 10,
wiki_size: 50,
lfs_objects_size: 20,

View file

@ -131,7 +131,7 @@ RSpec.describe Project do
end
it_behaves_like 'model with wiki' do
let(:container) { create(:project, :wiki_repo) }
let_it_be(:container) { create(:project, :wiki_repo) }
let(:container_without_wiki) { create(:project) }
end
@ -202,11 +202,11 @@ RSpec.describe Project do
end
describe '#members & #requesters' do
let(:project) { create(:project, :public) }
let(:requester) { create(:user) }
let(:developer) { create(:user) }
let_it_be(:project) { create(:project, :public) }
let_it_be(:requester) { create(:user) }
let_it_be(:developer) { create(:user) }
before do
before_all do
project.request_access(requester)
project.add_developer(developer)
end
@ -453,9 +453,9 @@ RSpec.describe Project do
end
describe '#all_pipelines' do
let(:project) { create(:project) }
let_it_be(:project) { create(:project) }
before do
before_all do
create(:ci_pipeline, project: project, ref: 'master', source: :web)
create(:ci_pipeline, project: project, ref: 'master', source: :external)
end
@ -477,7 +477,7 @@ RSpec.describe Project do
end
describe '#has_packages?' do
let(:project) { create(:project, :public) }
let_it_be(:project) { create(:project, :public) }
subject { project.has_packages?(package_type) }
@ -517,9 +517,9 @@ RSpec.describe Project do
end
describe '#ci_pipelines' do
let(:project) { create(:project) }
let_it_be(:project) { create(:project) }
before do
before_all do
create(:ci_pipeline, project: project, ref: 'master', source: :web)
create(:ci_pipeline, project: project, ref: 'master', source: :external)
create(:ci_pipeline, project: project, ref: 'master', source: :webide)
@ -543,7 +543,7 @@ RSpec.describe Project do
describe '#autoclose_referenced_issues' do
context 'when DB entry is nil' do
let(:project) { create(:project, autoclose_referenced_issues: nil) }
let(:project) { build(:project, autoclose_referenced_issues: nil) }
it 'returns true' do
expect(project.autoclose_referenced_issues).to be_truthy
@ -551,7 +551,7 @@ RSpec.describe Project do
end
context 'when DB entry is true' do
let(:project) { create(:project, autoclose_referenced_issues: true) }
let(:project) { build(:project, autoclose_referenced_issues: true) }
it 'returns true' do
expect(project.autoclose_referenced_issues).to be_truthy
@ -559,7 +559,7 @@ RSpec.describe Project do
end
context 'when DB entry is false' do
let(:project) { create(:project, autoclose_referenced_issues: false) }
let(:project) { build(:project, autoclose_referenced_issues: false) }
it 'returns false' do
expect(project.autoclose_referenced_issues).to be_falsey
@ -769,8 +769,8 @@ RSpec.describe Project do
end
describe "#new_issuable_address" do
let(:project) { create(:project, path: "somewhere") }
let(:user) { create(:user) }
let_it_be(:project) { create(:project, path: "somewhere") }
let_it_be(:user) { create(:user) }
context 'incoming email enabled' do
before do
@ -851,11 +851,11 @@ RSpec.describe Project do
end
describe '#get_issue' do
let(:project) { create(:project) }
let!(:issue) { create(:issue, project: project) }
let(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let!(:issue) { create(:issue, project: project) }
before do
before_all do
project.add_developer(user)
end
@ -927,7 +927,7 @@ RSpec.describe Project do
end
describe '#issue_exists?' do
let(:project) { create(:project) }
let_it_be(:project) { create(:project) }
it 'is truthy when issue exists' do
expect(project).to receive(:get_issue).and_return(double)
@ -1020,7 +1020,7 @@ RSpec.describe Project do
end
describe '#cache_has_external_issue_tracker' do
let(:project) { create(:project, has_external_issue_tracker: nil) }
let_it_be(:project) { create(:project, has_external_issue_tracker: nil) }
it 'stores true if there is any external_issue_tracker' do
services = double(:service, external_issue_trackers: [RedmineService.new])
@ -1050,7 +1050,7 @@ RSpec.describe Project do
end
describe '#cache_has_external_wiki' do
let(:project) { create(:project, has_external_wiki: nil) }
let_it_be(:project) { create(:project, has_external_wiki: nil) }
it 'stores true if there is any external_wikis' do
services = double(:service, external_wikis: [ExternalWikiService.new])
@ -1116,7 +1116,7 @@ RSpec.describe Project do
end
describe '#external_wiki' do
let(:project) { create(:project) }
let_it_be(:project) { create(:project) }
context 'with an active external wiki' do
before do
@ -1736,7 +1736,7 @@ RSpec.describe Project do
end
describe '#visibility_level_allowed?' do
let(:project) { create(:project, :internal) }
let_it_be(:project) { create(:project, :internal) }
context 'when checking on non-forked project' do
it { expect(project.visibility_level_allowed?(Gitlab::VisibilityLevel::PRIVATE)).to be_truthy }
@ -1745,7 +1745,6 @@ RSpec.describe Project do
end
context 'when checking on forked project' do
let(:project) { create(:project, :internal) }
let(:forked_project) { fork_project(project) }
it { expect(forked_project.visibility_level_allowed?(Gitlab::VisibilityLevel::PRIVATE)).to be_truthy }
@ -1930,7 +1929,7 @@ RSpec.describe Project do
end
describe '.optionally_search' do
let(:project) { create(:project) }
let_it_be(:project) { create(:project) }
it 'searches for projects matching the query if one is given' do
relation = described_class.optionally_search(project.name)
@ -1987,7 +1986,7 @@ RSpec.describe Project do
end
describe '.search_by_title' do
let(:project) { create(:project, name: 'kittens') }
let_it_be(:project) { create(:project, name: 'kittens') }
it 'returns projects with a matching name' do
expect(described_class.search_by_title(project.name)).to eq([project])
@ -2003,11 +2002,11 @@ RSpec.describe Project do
end
context 'when checking projects from groups' do
let(:private_group) { create(:group, visibility_level: 0) }
let(:internal_group) { create(:group, visibility_level: 10) }
let(:private_group) { build(:group, visibility_level: 0) }
let(:internal_group) { build(:group, visibility_level: 10) }
let(:private_project) { create(:project, :private, group: private_group) }
let(:internal_project) { create(:project, :internal, group: internal_group) }
let(:private_project) { build(:project, :private, group: private_group) }
let(:internal_project) { build(:project, :internal, group: internal_group) }
context 'when group is private project can not be internal' do
it { expect(private_project.visibility_level_allowed?(Gitlab::VisibilityLevel::INTERNAL)).to be_falsey }
@ -2071,7 +2070,7 @@ RSpec.describe Project do
end
describe '#create_repository' do
let(:project) { create(:project, :repository) }
let_it_be(:project) { build(:project, :repository) }
context 'using a regular repository' do
it 'creates the repository' do
@ -2097,7 +2096,7 @@ RSpec.describe Project do
end
describe '#ensure_repository' do
let(:project) { create(:project, :repository) }
let_it_be(:project) { build(:project, :repository) }
it 'creates the repository if it not exist' do
allow(project).to receive(:repository_exists?).and_return(false)
@ -2151,7 +2150,7 @@ RSpec.describe Project do
end
describe '#container_registry_url' do
let(:project) { create(:project) }
let_it_be(:project) { build(:project) }
subject { project.container_registry_url }
@ -2178,7 +2177,7 @@ RSpec.describe Project do
end
describe '#has_container_registry_tags?' do
let(:project) { create(:project) }
let(:project) { build(:project) }
context 'when container registry is enabled' do
before do
@ -2244,7 +2243,7 @@ RSpec.describe Project do
describe '#ci_config_path=' do
using RSpec::Parameterized::TableSyntax
let(:project) { create(:project) }
let(:project) { build_stubbed(:project) }
where(:default_ci_config_path, :project_ci_config_path, :expected_ci_config_path) do
nil | :notset | :default
@ -2299,8 +2298,8 @@ RSpec.describe Project do
end
describe '#latest_successful_build_for_ref' do
let(:project) { create(:project, :repository) }
let(:pipeline) { create_pipeline(project) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:pipeline) { create_pipeline(project) }
it_behaves_like 'latest successful build for sha or ref'
@ -2316,7 +2315,7 @@ RSpec.describe Project do
end
describe '#latest_pipeline' do
let(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
let(:second_branch) { project.repository.branches[2] }
let!(:pipeline_for_default_branch) do
@ -2402,8 +2401,8 @@ RSpec.describe Project do
end
describe '#latest_successful_build_for_sha' do
let(:project) { create(:project, :repository) }
let(:pipeline) { create_pipeline(project) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:pipeline) { create_pipeline(project) }
it_behaves_like 'latest successful build for sha or ref'
@ -2411,8 +2410,8 @@ RSpec.describe Project do
end
describe '#latest_successful_build_for_ref!' do
let(:project) { create(:project, :repository) }
let(:pipeline) { create_pipeline(project) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:pipeline) { create_pipeline(project) }
context 'with many builds' do
it 'gives the latest builds from latest pipeline' do
@ -2485,7 +2484,7 @@ RSpec.describe Project do
end
describe '#jira_import_status' do
let(:project) { create(:project, import_type: 'jira') }
let_it_be(:project) { create(:project, import_type: 'jira') }
context 'when no jira imports' do
it 'returns none' do
@ -2691,7 +2690,7 @@ RSpec.describe Project do
end
describe '#remote_mirror_available?' do
let(:project) { create(:project) }
let(:project) { build_stubbed(:project) }
context 'when remote mirror global setting is enabled' do
it 'returns true' do
@ -2732,10 +2731,10 @@ RSpec.describe Project do
end
describe '#ancestors_upto' do
let(:parent) { create(:group) }
let(:child) { create(:group, parent: parent) }
let(:child2) { create(:group, parent: child) }
let(:project) { create(:project, namespace: child2) }
let_it_be(:parent) { create(:group) }
let_it_be(:child) { create(:group, parent: parent) }
let_it_be(:child2) { create(:group, parent: child) }
let_it_be(:project) { create(:project, namespace: child2) }
it 'returns all ancestors when no namespace is given' do
expect(project.ancestors_upto).to contain_exactly(child2, child, parent)
@ -2780,7 +2779,7 @@ RSpec.describe Project do
end
describe '#emails_disabled?' do
let(:project) { create(:project, emails_disabled: false) }
let(:project) { build(:project, emails_disabled: false) }
context 'emails disabled in group' do
it 'returns true' do
@ -2808,7 +2807,7 @@ RSpec.describe Project do
end
describe '#lfs_enabled?' do
let(:project) { create(:project) }
let(:project) { build(:project) }
shared_examples 'project overrides group' do
it 'returns true when enabled in project' do
@ -2870,7 +2869,7 @@ RSpec.describe Project do
end
describe '#change_head' do
let(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
it 'returns error if branch does not exist' do
expect(project.change_head('unexisted-branch')).to be false
@ -3065,7 +3064,7 @@ RSpec.describe Project do
end
describe '#pushes_since_gc' do
let(:project) { create(:project) }
let(:project) { build_stubbed(:project) }
after do
project.reset_pushes_since_gc
@ -3087,7 +3086,7 @@ RSpec.describe Project do
end
describe '#increment_pushes_since_gc' do
let(:project) { create(:project) }
let(:project) { build_stubbed(:project) }
after do
project.reset_pushes_since_gc
@ -3101,7 +3100,7 @@ RSpec.describe Project do
end
describe '#reset_pushes_since_gc' do
let(:project) { create(:project) }
let(:project) { build_stubbed(:project) }
after do
project.reset_pushes_since_gc
@ -3117,7 +3116,7 @@ RSpec.describe Project do
end
describe '#deployment_variables' do
let(:project) { create(:project) }
let(:project) { build_stubbed(:project) }
let(:environment) { 'production' }
let(:namespace) { 'namespace' }
@ -3194,7 +3193,7 @@ RSpec.describe Project do
end
describe '#default_environment' do
let(:project) { create(:project) }
let(:project) { build(:project) }
it 'returns production environment when it exists' do
production = create(:environment, name: "production", project: project)
@ -3216,7 +3215,7 @@ RSpec.describe Project do
end
describe '#ci_variables_for' do
let(:project) { create(:project) }
let_it_be(:project) { create(:project) }
let(:environment_scope) { '*' }
let!(:ci_variable) do
@ -3371,7 +3370,7 @@ RSpec.describe Project do
end
describe '#ci_instance_variables_for' do
let(:project) { create(:project) }
let(:project) { build_stubbed(:project) }
let!(:instance_variable) do
create(:ci_instance_variable, value: 'secret')

View file

@ -18,8 +18,10 @@ RSpec.describe UserAgentDetail do
end
describe '.valid?' do
let(:issue) { create(:issue) }
it 'is valid with a subject' do
detail = build(:user_agent_detail)
detail = build(:user_agent_detail, subject: issue)
expect(detail).to be_valid
end

View file

@ -3,14 +3,17 @@
require 'spec_helper'
RSpec.describe UserInteractedProject do
let_it_be(:project) { create(:project) }
let_it_be(:author) { project.creator }
describe '.track' do
subject { described_class.track(event) }
let(:event) { build(:event) }
let(:event) { build(:event, project: project, author: author) }
Event.actions.each_key do |action|
context "for all actions (event types)" do
let(:event) { build(:event, action: action) }
let(:event) { build(:event, project: project, author: author, action: action) }
it 'creates a record' do
expect { subject }.to change { described_class.count }.from(0).to(1)

View file

@ -44,7 +44,7 @@ RSpec.describe 'Adding a DiffNote' do
it_behaves_like 'a Note mutation when there are active record validation errors', model: DiffNote
context do
let(:diff_refs) { build(:merge_request).diff_refs } # Allow fake diff refs so arguments are valid
let(:diff_refs) { build(:commit).diff_refs } # Allow fake diff refs so arguments are valid
it_behaves_like 'a Note mutation when the given resource id is not for a Noteable'
end

View file

@ -47,7 +47,7 @@ RSpec.describe 'Adding an image DiffNote' do
it_behaves_like 'a Note mutation when there are active record validation errors', model: DiffNote
context do
let(:diff_refs) { build(:merge_request).diff_refs } # Allow fake diff refs so arguments are valid
let(:diff_refs) { build(:commit).diff_refs } # Allow fake diff refs so arguments are valid
it_behaves_like 'a Note mutation when the given resource id is not for a Noteable'
end

View file

@ -34,6 +34,29 @@ RSpec.describe API::Todos do
end
context 'when authenticated' do
context 'when invalid params' do
context "invalid action" do
it 'returns 400' do
get api('/todos', john_doe), params: { action: 'InvalidAction' }
expect(response).to have_gitlab_http_status(:bad_request)
end
end
context "invalid state" do
it 'returns 400' do
get api('/todos', john_doe), params: { state: 'InvalidState' }
expect(response).to have_gitlab_http_status(:bad_request)
end
end
context "invalid type" do
it 'returns 400' do
get api('/todos', john_doe), params: { type: 'InvalidType' }
expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
it 'returns an array of pending todos for current user' do
get api('/todos', john_doe)