Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-02-15 00:15:45 +00:00
parent e916a9880b
commit 99d074d787
17 changed files with 648 additions and 376 deletions

View file

@ -16,6 +16,12 @@ class CustomerRelations::IssueContact < ApplicationRecord
.pluck(:contact_id)
end
def self.delete_for_project(project_id)
joins(:issue)
.where(issues: { project_id: project_id })
.delete_all
end
private
def contact_belongs_to_issue_group_or_ancestor

View file

@ -2,11 +2,11 @@
# Projects::TransferService class
#
# Used for transfer project to another namespace
# Used to transfer a project to another namespace
#
# Ex.
# # Move projects to namespace with ID 17 by user
# Projects::TransferService.new(project, user, namespace_id: 17).execute
# # Move project to namespace by user
# Projects::TransferService.new(project, user).execute(namespace)
#
module Projects
class TransferService < BaseService
@ -103,6 +103,8 @@ module Projects
update_repository_configuration(@new_path)
remove_issue_contacts
execute_system_hooks
end
@ -254,6 +256,12 @@ module Projects
namespace_traversal_ids: new_namespace.traversal_ids
}
end
def remove_issue_contacts
return unless @old_group&.root_ancestor != @new_namespace&.root_ancestor
CustomerRelations::IssueContact.delete_for_project(project.id)
end
end
end

View file

@ -1,8 +1,8 @@
---
name: api_caching_tags
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/54975
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/324391
milestone: '13.10'
name: read_from_vulnerability_finding_evidence
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/79883
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/352632
milestone: '14.8'
type: development
group: group::source code
group: group::threat insights
default_enabled: false

View file

@ -697,6 +697,38 @@ Example response:
}
```
## Set user status
Set the status of the current user.
```plaintext
PUT /user/status
```
| Attribute | Type | Required | Description |
| -------------------- | ------ | -------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `emoji` | string | no | The name of the emoji to use as status. If omitted `speech_balloon` is used. Emoji name can be one of the specified names in the [Gemojione index](https://github.com/bonusly/gemojione/blob/master/config/index.json). |
| `message` | string | no | The message to set as a status. It can also contain emoji codes. |
| `clear_status_after` | string | no | Automatically clean up the status after a given time interval, allowed values: `30_minutes`, `3_hours`, `8_hours`, `1_day`, `3_days`, `7_days`, `30_days`
When both parameters `emoji` and `message` are empty, the status is cleared. When the `clear_status_after` parameter is missing from the request, the previously set value for `"clear_status_after` is cleared.
```shell
curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" --data "clear_status_after=1_day" --data "emoji=coffee" \
--data "message=I crave coffee" "https://gitlab.example.com/api/v4/user/status"
```
Example responses
```json
{
"emoji":"coffee",
"message":"I crave coffee",
"message_html": "I crave coffee",
"clear_status_at":"2021-02-15T10:49:01.311Z"
}
```
## Get user preferences
Get a list of currently authenticated user's preferences.
@ -744,38 +776,6 @@ Parameters:
| `view_diffs_file_by_file` | Yes | Flag indicating the user sees only one file diff per page. |
| `show_whitespace_in_diffs` | Yes | Flag indicating the user sees whitespace changes in diffs. |
## Set user status
Set the status of the current user.
```plaintext
PUT /user/status
```
| Attribute | Type | Required | Description |
| -------------------- | ------ | -------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `emoji` | string | no | The name of the emoji to use as status. If omitted `speech_balloon` is used. Emoji name can be one of the specified names in the [Gemojione index](https://github.com/bonusly/gemojione/blob/master/config/index.json). |
| `message` | string | no | The message to set as a status. It can also contain emoji codes. |
| `clear_status_after` | string | no | Automatically clean up the status after a given time interval, allowed values: `30_minutes`, `3_hours`, `8_hours`, `1_day`, `3_days`, `7_days`, `30_days`
When both parameters `emoji` and `message` are empty, the status is cleared. When the `clear_status_after` parameter is missing from the request, the previously set value for `"clear_status_after` is cleared.
```shell
curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" --data "clear_status_after=1_day" --data "emoji=coffee" \
--data "message=I crave coffee" "https://gitlab.example.com/api/v4/user/status"
```
Example responses
```json
{
"emoji":"coffee",
"message":"I crave coffee",
"message_html": "I crave coffee",
"clear_status_at":"2021-02-15T10:49:01.311Z"
}
```
## User Follow
### Follow and unfollow users

View file

@ -679,6 +679,14 @@ Use **press** when talking about keyboard keys. For example:
Do not use profanity. Doing so may negatively affect other users and contributors, which is contrary to the GitLab value of [Diversity, Inclusion, and Belonging](https://about.gitlab.com/handbook/values/#diversity-inclusion).
## provision
Use the term **provision** when referring to provisioning cloud infrastructure. You provision the infrastructure, and then deploy applications to it.
For example, you might write something like:
- Provision an AWS EKS cluster and deploy your application to it.
## push rules
Use lowercase for **push rules**.

View file

@ -154,7 +154,7 @@ To change the namespace linked to a subscription:
for that group.
1. Select **Proceed to checkout**.
Subscription charges are calculated based on the total number of users in a group, including its subgroups and nested projects. If the total number of users exceeds the number of seats in your subscription, your account is charged for the additional users.
Subscription charges are calculated based on the total number of users in a group, including its subgroups and nested projects. If the [total number of users](gitlab_com/index.md#view-seat-usage) exceeds the number of seats in your subscription, your account is charged for the additional users and you need to pay for the overage before you can change the linked namespace.
Only one namespace can be linked to a subscription.

View file

@ -461,7 +461,7 @@ Follow these steps to provide the bearer token with `FUZZAPI_OVERRIDES_ENV`:
```
1. To validate that authentication is working, run an API fuzzing test and review the fuzzing logs
and the test API's application logs.
and the test API's application logs. See the [overrides section](#overrides) for more information about override commands.
##### Token generated at test runtime
@ -495,7 +495,7 @@ variables:
FUZZAPI_PROFILE: Quick
FUZZAPI_OPENAPI: test-api-specification.json
FUZZAPI_TARGET_URL: http://test-deployment/
FUZZAPI_OVERRIDES_FILE: output/api-fuzzing-overrides.json
FUZZAPI_OVERRIDES_FILE: api-fuzzing-overrides.json
```
To validate that authentication is working, run an API fuzzing test and review the fuzzing logs and
@ -537,7 +537,7 @@ variables:
FUZZAPI_PROFILE: Quick-10
FUZZAPI_OPENAPI: test-api-specification.json
FUZZAPI_TARGET_URL: http://test-deployment/
FUZZAPI_OVERRIDES_FILE: output/api-fuzzing-overrides.json
FUZZAPI_OVERRIDES_FILE: api-fuzzing-overrides.json
FUZZAPI_OVERRIDES_CMD: renew_token.py
FUZZAPI_OVERRIDES_INTERVAL: 300
```
@ -577,6 +577,9 @@ profile increases as the number of tests increases.
|[`FUZZAPI_OVERRIDES_FILE`](#overrides) | Path to a JSON file containing overrides. |
|[`FUZZAPI_OVERRIDES_ENV`](#overrides) | JSON string containing headers to override. |
|[`FUZZAPI_OVERRIDES_CMD`](#overrides) | Overrides command. |
|[`FUZZAPI_OVERRIDES_CMD_VERBOSE`](#overrides) | When set to any value. It shows overrides command output as part of the job output. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/334578) in GitLab 14.8. |
|`FUZZAPI_PRE_SCRIPT` | Run user command or script before scan session starts. |
|`FUZZAPI_POST_SCRIPT` | Run user command or script after scan session has finished. |
|[`FUZZAPI_OVERRIDES_INTERVAL`](#overrides) | How often to run overrides command in seconds. Defaults to `0` (once). |
|[`FUZZAPI_HTTP_USERNAME`](#http-basic-authentication) | Username for HTTP authentication. |
|[`FUZZAPI_HTTP_PASSWORD`](#http-basic-authentication) | Password for HTTP authentication. |
@ -756,7 +759,7 @@ variables:
FUZZAPI_PROFILE: Quick
FUZZAPI_OPENAPI: test-api-specification.json
FUZZAPI_TARGET_URL: http://test-deployment/
FUZZAPI_OVERRIDES_FILE: output/api-fuzzing-overrides.json
FUZZAPI_OVERRIDES_FILE: api-fuzzing-overrides.json
```
#### Using a CI/CD variable
@ -801,16 +804,28 @@ variables:
If the value must be generated or regenerated on expiration, you can provide a program or script for
the API fuzzer to execute on a specified interval. The provided script runs in an Alpine Linux
container that has Python 3 and Bash installed. If the Python script requires additional packages,
it must detect this and install the packages at runtime. The script creates the overrides JSON file
as defined above.
container that has Python 3 and Bash installed.
You have to set the environment variable `FUZZAPI_OVERRIDES_CMD` to the program or script you would like
to execute. The provided command creates the overrides JSON file as defined previously.
You might want to install other scripting runtimes like NodeJS or Ruby, or maybe you need to install a dependency for
your overrides command. In this case, we recommend setting the `FUZZAPI_PRE_SCRIPT` to the file path of a script which
provides those prerequisites. The script provided by `FUZZAPI_PRE_SCRIPT` is executed once, before the analyzer starts.
See the [Alpine Linux package management](https://wiki.alpinelinux.org/wiki/Alpine_Linux_package_management)
page for information about installing Alpine Linux packages.
You must provide three CI/CD variables, each set for correct operation:
- `FUZZAPI_OVERRIDES_FILE`: File generated by the provided command.
- `FUZZAPI_OVERRIDES_CMD`: Command to generate JSON file.
- `FUZZAPI_OVERRIDES_CMD`: Overrides command in charge of generating the overrides JSON file periodically.
- `FUZZAPI_OVERRIDES_INTERVAL`: Interval in seconds to run command.
Optionally:
- `FUZZAPI_PRE_SCRIPT`: Script to install runtimes or dependencies before the analyzer starts.
```yaml
stages:
- fuzz
@ -822,11 +837,167 @@ variables:
FUZZAPI_PROFILE: Quick
FUZZAPI_OPENAPI: test-api-specification.json
FUZZAPI_TARGET_URL: http://test-deployment/
FUZZAPI_OVERRIDES_FILE: output/api-fuzzing-overrides.json
FUZZAPI_OVERRIDES_FILE: api-fuzzing-overrides.json
FUZZAPI_OVERRIDES_CMD: renew_token.py
FUZZAPI_OVERRIDES_INTERVAL: 300
```
#### Debugging overrides
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/334578) in GitLab 14.8.
By default the output of the overrides command is hidden. If the overrides command returns a non zero exit code, the command is displayed as part of your job output. Optionally, you can set the variable `FUZZAPI_OVERRIDES_CMD_VERBOSE` to any value in order to display overrides command output as it is generated. This is useful when testing your overrides script, but should be disabled afterwards as it slows down testing.
It is also possible to write messages from your script to a log file that is collected when the job completes or fails. The log file must be created in a specific location and follow a naming convention.
Adding some basic logging to your overrides script is useful in case the script fails unexpectedly during normal running of the job. The log file is automatically included as an artifact of the job, allowing you to download it after the job has finished.
Following our example, we provided `renew_token.py` in the environmental variable `FUZZAPI_OVERRIDES_CMD`. Please notice two things in the script:
- Log file is saved in the location indicated by the environment variable `CI_PROJECT_DIR`.
- Log file name should match `gl-*.log`.
```python
#!/usr/bin/env python
# Example of an overrides command
# Override commands can update the overrides json file
# with new values to be used. This is a great way to
# update an authentication token that will expire
# during testing.
import logging
import json
import os
import requests
import backoff
# [1] Store log file in directory indicated by env var CI_PROJECT_DIR
working_directory = os.environ['CI_PROJECT_DIR']
# [2] File name should match the pattern: gl-*.log
log_file_path = os.path.join(working_directory, 'gl-user-overrides.log')
# Set up logger
logging.basicConfig(filename=log_file_path, level=logging.DEBUG)
# Use `backoff` decorator to retry in case of transient errors.
@backoff.on_exception(backoff.expo,
(requests.exceptions.Timeout,
requests.exceptions.ConnectionError),
max_time=30)
def get_auth_response():
return requests.get('https://authorization.service/api/get_api_token', auth=(os.environ['AUTH_USER'], os.environ['AUTH_PWD']))
# In our example, access token is retrieved from a given endpoint
try:
# Performs a http request, response sample:
# { "Token" : "b5638ae7-6e77-4585-b035-7d9de2e3f6b3" }
response = get_auth_response()
# Check that the request is successful. may raise `requests.exceptions.HTTPError`
response.raise_for_status()
# Gets JSON data
response_body = response.json()
# If needed specific exceptions can be caught
# requests.ConnectionError : A network connection error problem occurred
# requests.HTTPError : HTTP request returned an unsuccessful status code. [Response.raise_for_status()]
# requests.ConnectTimeout : The request timed out while trying to connect to the remote server
# requests.ReadTimeout : The server did not send any data in the allotted amount of time.
# requests.TooManyRedirects : The request exceeds the configured number of maximum redirections
# requests.exceptions.RequestException : All exceptions that related to Requests
except requests.exceptions.RequestException as requests_error:
# logs exceptions related to `Requests`
logging.error(f'Error, failed while performing HTTP request. Error message: {requests_error}')
raise
except requests.exceptions.JSONDecodeError as json_decode_error:
# logs errors related decoding JSON response
logging.error(f'Error, failed while decoding JSON response. Error message: {json_decode_error}')
raise
except Exception as e:
# logs any other error
logging.error(f'Error, unknown error while retrieving access token. Error message: {e}')
raise
# computes object that holds overrides file content.
# It uses data fetched from request
overrides_data = {
"headers": {
"Authorization": f"Token {response_body['Token']}"
}
}
# log entry informing about the file override computation
overrides_file_path = os.path.join(
working_directory, "api-fuzzing-overrides.json")
logging.info("Creating overrides file: %s" % overrides_file_path)
# attempts to overwrite the file
try:
if os.path.exists(overrides_file_path):
os.unlink(overrides_file_path)
# overwrites the file with our updated dictionary
with open(overrides_file_path, "wb+") as fd:
fd.write(json.dumps(overrides_data).encode('utf-8'))
except Exception as e:
# logs any other error
logging.error(f'Error, unkown error when overwritng file {overrides_file_path}. Error message: {e}')
raise
# logs informing override has finished successfully
logging.info("Override file has been updated")
# end
```
In the overrides command example, the Python script depends on the `backoff` library. To make sure the library is installed before executing the Python script, the `FUZZAPI_PRE_SCRIPT` is set to a script that will install the dependencies of your overrides command.
As for example, the following script `user-pre-scan-set-up.sh`:
```shell
#!/bin/bash
# user-pre-scan-set-up.sh
# Ensures python dependencies are installed
echo "**** install python dependencies ****"
python3 -m ensurepip
pip3 install --no-cache --upgrade \
pip \
backoff
echo "**** python dependencies installed ****"
# end
```
You have to update your configuration to set the `FUZZAPI_PRE_SCRIPT` to our new `user-pre-scan-set-up.sh` script. For example:
```yaml
stages:
- fuzz
include:
- template: API-Fuzzing.gitlab-ci.yml
variables:
FUZZAPI_PROFILE: Quick
FUZZAPI_OPENAPI: test-api-specification.json
FUZZAPI_TARGET_URL: http://test-deployment/
FUZZAPI_PRE_SCRIPT: user-pre-scan-set-up.sh
FUZZAPI_OVERRIDES_FILE: api-fuzzing-overrides.json
FUZZAPI_OVERRIDES_CMD: renew_token.py
FUZZAPI_OVERRIDES_INTERVAL: 300
```
In the previous sample, you could use the script `user-pre-scan-set-up.sh` to also install new runtimes or applications that later on you could use in your overrides command.
### Exclude Paths
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/211892) in GitLab 14.0.

View file

@ -533,7 +533,7 @@ variables:
DAST_API_PROFILE: Quick
DAST_API_OPENAPI: test-api-specification.json
DAST_API_TARGET_URL: http://test-deployment/
DAST_API_OVERRIDES_FILE: output/dast-api-overrides.json
DAST_API_OVERRIDES_FILE: dast-api-overrides.json
```
To validate that authentication is working, run an DAST API test and review the job logs and
@ -575,13 +575,12 @@ variables:
DAST_API_PROFILE: Quick
DAST_API_OPENAPI: test-api-specification.json
DAST_API_TARGET_URL: http://test-deployment/
DAST_API_OVERRIDES_FILE: output/dast-api-overrides.json
DAST_API_OVERRIDES_FILE: dast-api-overrides.json
DAST_API_OVERRIDES_CMD: renew_token.py
DAST_API_OVERRIDES_INTERVAL: 300
```
To validate that authentication is working, run an DAST API test and review the job logs and
the test API's application logs.
To validate that authentication is working, run an DAST API test and review the job logs and the test API's application logs. See the [overrides section](#overrides) for more information about override commands.
### Configuration files
@ -648,6 +647,9 @@ can be added, removed, and modified by creating a custom configuration.
|[`DAST_API_OVERRIDES_FILE`](#overrides) | Path to a JSON file containing overrides. |
|[`DAST_API_OVERRIDES_ENV`](#overrides) | JSON string containing headers to override. |
|[`DAST_API_OVERRIDES_CMD`](#overrides) | Overrides command. |
|[`DAST_API_OVERRIDES_CMD_VERBOSE`](#overrides) | When set to any value. It shows overrides command output as part of the job output. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/334578) in GitLab 14.6. |
|`DAST_API_PRE_SCRIPT` | Run user command or script before scan session starts. |
|`DAST_API_POST_SCRIPT` | Run user command or script after scan session has finished. |
|[`DAST_API_OVERRIDES_INTERVAL`](#overrides) | How often to run overrides command in seconds. Defaults to `0` (once). |
|[`DAST_API_HTTP_USERNAME`](#http-basic-authentication) | Username for HTTP authentication. |
|[`DAST_API_HTTP_PASSWORD`](#http-basic-authentication) | Password for HTTP authentication. |
@ -829,7 +831,7 @@ variables:
DAST_API_PROFILE: Quick
DAST_API_OPENAPI: test-api-specification.json
DAST_API_TARGET_URL: http://test-deployment/
DAST_API_OVERRIDES_FILE: output/dast-api-overrides.json
DAST_API_OVERRIDES_FILE: dast-api-overrides.json
```
#### Using a CI/CD variable
@ -873,17 +875,29 @@ variables:
#### Using a command
If the value must be generated or regenerated on expiration, you can provide a program or script for
the DAST API scanner to execute on a specified interval. The provided script runs in an Alpine Linux
container that has Python 3 and Bash installed. If the Python script requires additional packages,
it must detect this and install the packages at runtime. The script creates the overrides JSON file
as defined above.
the DAST API scanner to execute on a specified interval. The provided command runs in an Alpine Linux
container that has Python 3 and Bash installed.
You have to set the environment variable `DAST_API_OVERRIDES_CMD` to the program or script you would like
to execute. The provided command creates the overrides JSON file as defined previously.
You might want to install other scripting runtimes like NodeJS or Ruby, or maybe you need to install a dependency for
your overrides command. In this case, we recommend setting the `DAST_API_PRE_SCRIPT` to the file path of a script which
provides those prerequisites. The script provided by `DAST_API_PRE_SCRIPT` is executed once, before the analyzer starts.
See the [Alpine Linux package management](https://wiki.alpinelinux.org/wiki/Alpine_Linux_package_management)
page for information about installing Alpine Linux packages.
You must provide three CI/CD variables, each set for correct operation:
- `DAST_API_OVERRIDES_FILE`: File generated by the provided command.
- `DAST_API_OVERRIDES_CMD`: Command to generate JSON file.
- `DAST_API_OVERRIDES_CMD`: Overrides command in charge of generating the overrides JSON file periodically.
- `DAST_API_OVERRIDES_INTERVAL`: Interval in seconds to run command.
Optionally:
- `DAST_API_PRE_SCRIPT`: Script to install runtimes or dependencies before the scan starts.
```yaml
stages:
- dast
@ -895,11 +909,167 @@ variables:
DAST_API_PROFILE: Quick
DAST_API_OPENAPI: test-api-specification.json
DAST_API_TARGET_URL: http://test-deployment/
DAST_API_OVERRIDES_FILE: output/dast-api-overrides.json
DAST_API_OVERRIDES_FILE: dast-api-overrides.json
DAST_API_OVERRIDES_CMD: renew_token.py
DAST_API_OVERRIDES_INTERVAL: 300
```
#### Debugging overrides
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/334578) in GitLab 14.8.
By default the output of the overrides command is hidden. If the overrides command returns a non zero exit code, the command is displayed as part of your job output. Optionally, you can set the variable `DAST_API_OVERRIDES_CMD_VERBOSE` to any value in order to display overrides command output as it is generated. This is useful when testing your overrides script, but should be disabled afterwards as it slows down testing.
It is also possible to write messages from your script to a log file that is collected when the job completes or fails. The log file must be created in a specific location and following a naming convention.
Adding some basic logging to your overrides script is useful in case the script fails unexpectedly during normal running of the job. The log file is automatically included as an artifact of the job, allowing you to download it after the job has finished.
Following our example, we provided `renew_token.py` in the environment variable `DAST_API_OVERRIDES_CMD`. Please notice two things in the script:
- Log file is saved in the location indicated by the environmental variable `CI_PROJECT_DIR`.
- Log file name should match `gl-*.log`.
```python
#!/usr/bin/env python
# Example of an overrides command
# Override commands can update the overrides json file
# with new values to be used. This is a great way to
# update an authentication token that will expire
# during testing.
import logging
import json
import os
import requests
import backoff
# [1] Store log file in directory indicated by env var CI_PROJECT_DIR
working_directory = os.environ['CI_PROJECT_DIR']
# [2] File name should match the pattern: gl-*.log
log_file_path = os.path.join(working_directory, 'gl-user-overrides.log')
# Set up logger
logging.basicConfig(filename=log_file_path, level=logging.DEBUG)
# Use `backoff` decorator to retry in case of transient errors.
@backoff.on_exception(backoff.expo,
(requests.exceptions.Timeout,
requests.exceptions.ConnectionError),
max_time=30)
def get_auth_response():
return requests.get('https://authorization.service/api/get_api_token', auth=(os.environ['AUTH_USER'], os.environ['AUTH_PWD']))
# In our example, access token is retrieved from a given endpoint
try:
# Performs a http request, response sample:
# { "Token" : "b5638ae7-6e77-4585-b035-7d9de2e3f6b3" }
response = get_auth_response()
# Check that the request is successful. may raise `requests.exceptions.HTTPError`
response.raise_for_status()
# Gets JSON data
response_body = response.json()
# If needed specific exceptions can be caught
# requests.ConnectionError : A network connection error problem occurred
# requests.HTTPError : HTTP request returned an unsuccessful status code. [Response.raise_for_status()]
# requests.ConnectTimeout : The request timed out while trying to connect to the remote server
# requests.ReadTimeout : The server did not send any data in the allotted amount of time.
# requests.TooManyRedirects : The request exceeds the configured number of maximum redirections
# requests.exceptions.RequestException : All exceptions that related to Requests
except requests.exceptions.RequestException as requests_error:
# logs exceptions related to `Requests`
logging.error(f'Error, failed while performing HTTP request. Error message: {requests_error}')
raise
except requests.exceptions.JSONDecodeError as json_decode_error:
# logs errors related decoding JSON response
logging.error(f'Error, failed while decoding JSON response. Error message: {json_decode_error}')
raise
except Exception as e:
# logs any other error
logging.error(f'Error, unknown error while retrieving access token. Error message: {e}')
raise
# computes object that holds overrides file content.
# It uses data fetched from request
overrides_data = {
"headers": {
"Authorization": f"Token {response_body['Token']}"
}
}
# log entry informing about the file override computation
# the location of the overrides json file is also CI_PROJECT_DIR
overrides_file_path = os.path.join(
working_directory, "dast-api-overrides.json")
logging.info("Creating overrides file: %s" % overrides_file_path)
# attempts to overwrite the file
try:
if os.path.exists(overrides_file_path):
os.unlink(overrides_file_path)
# overwrites the file with our updated dictionary
with open(overrides_file_path, "wb+") as fd:
fd.write(json.dumps(overrides_data).encode('utf-8'))
except Exception as e:
# logs any other error
logging.error(f'Error, unkown error when overwritng file {overrides_file_path}. Error message: {e}')
raise
# logs informing override has finished successfully
logging.info("Override file has been updated")
# end
```
In the overrides command example, the Python script depends on the `backoff` library. To make sure the library is installed before executing the Python script, the `DAST_API_PRE_SCRIPT` is set to a script that will install the dependencies of your overrides command.
As for example, the following script `user-pre-scan-set-up.sh`
```shell
#!/bin/bash
# user-pre-scan-set-up.sh
# Ensures python dependencies are installed
echo "**** install python dependencies ****"
python3 -m ensurepip
pip3 install --no-cache --upgrade \
pip \
backoff
echo "**** python dependencies installed ****"
# end
```
You have to update your configuration to set the `DAST_API_PRE_SCRIPT` to our new `user-pre-scan-set-up.sh` script. For example:
```yaml
stages:
- dast
include:
- template: DAST-API.gitlab-ci.yml
variables:
DAST_API_PROFILE: Quick
DAST_API_OPENAPI: test-api-specification.json
DAST_API_TARGET_URL: http://test-deployment/
DAST_API_PRE_SCRIPT: user-pre-scan-set-up.sh
DAST_API_OVERRIDES_FILE: dast-api-overrides.json
DAST_API_OVERRIDES_CMD: renew_token.py
DAST_API_OVERRIDES_INTERVAL: 300
```
In the previous sample, you could use the script `user-pre-scan-set-up.sh` to also install new runtimes or applications that later on you could use in our overrides command.
### Exclude Paths
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/211892) in GitLab 14.0.

View file

@ -62,10 +62,13 @@ module API
end
def add_single_member_by_user_id(create_service_params)
source = create_service_params[:source]
user_id = create_service_params[:user_ids]
user = User.find_by(id: user_id) # rubocop: disable CodeReuse/ActiveRecord
if user
conflict!('Member already exists') if member_already_exists?(source, user_id)
instance = ::Members::CreateService.new(current_user, create_service_params)
instance.execute
@ -87,6 +90,12 @@ module API
def add_single_member?(user_id)
user_id.present?
end
private
def member_already_exists?(source, user_id)
source.members.exists?(user_id: user_id) # rubocop: disable CodeReuse/ActiveRecord
end
end
end
end

View file

@ -33,11 +33,7 @@ module API
paginated_tags = Gitlab::Pagination::GitalyKeysetPager.new(self, user_project).paginate(tags_finder)
if Feature.enabled?(:api_caching_tags, user_project, type: :development)
present_cached paginated_tags, with: Entities::Tag, project: user_project, cache_context: -> (_tag) { user_project.cache_key }
else
present paginated_tags, with: Entities::Tag, project: user_project
end
present_cached paginated_tags, with: Entities::Tag, project: user_project, cache_context: -> (_tag) { user_project.cache_key }
rescue Gitlab::Git::InvalidPageToken => e
unprocessable_entity!(e.message)

View file

@ -57,7 +57,7 @@
"@gitlab/at.js": "1.5.7",
"@gitlab/favicon-overlay": "2.0.0",
"@gitlab/svgs": "2.5.0",
"@gitlab/ui": "35.1.0",
"@gitlab/ui": "36.1.0",
"@gitlab/visual-review-tools": "1.6.1",
"@rails/actioncable": "6.1.4-1",
"@rails/ujs": "6.1.4-1",

View file

@ -80,4 +80,12 @@ RSpec.describe CustomerRelations::IssueContact do
expect { described_class.find_contact_ids_by_emails(issue.id, Array(0..too_many_emails)) }.to raise_error(ArgumentError)
end
end
describe '.delete_for_project' do
let_it_be(:issue_contacts) { create_list(:issue_customer_relations_contact, 3, :for_issue, issue: create(:issue, project: project)) }
it 'destroys all issue_contacts for project' do
expect { described_class.delete_for_project(project.id) }.to change { described_class.count }.by(-3)
end
end
end

View file

@ -291,25 +291,6 @@ RSpec.describe API::Members do
user: maintainer
)
end
context 'with an already existing member' do
before do
source.add_developer(stranger)
end
it 'tracks the invite source from params' do
post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
params: params.merge(invite_source: '_invite_source_')
expect_snowplow_event(
category: 'Members::CreateService',
action: 'create_member',
label: '_invite_source_',
property: 'existing_user',
user: maintainer
)
end
end
end
context 'when executing the Members::CreateService for multiple user_ids' do
@ -418,49 +399,6 @@ RSpec.describe API::Members do
expect(member.tasks_to_be_done).to match_array([:code, :ci])
expect(member.member_task.project_id).to eq(project_id)
end
context 'with already existing member' do
before do
source.add_developer(stranger)
end
it 'does not update tasks to be done if tasks already exist', :aggregate_failures do
member = source.members.find_by(user_id: stranger.id)
create(:member_task, member: member, project_id: project_id, tasks_to_be_done: %w(code ci))
expect do
post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
params: {
user_id: stranger.id,
access_level: Member::DEVELOPER,
tasks_to_be_done: %w(issues),
tasks_project_id: project_id
}
end.not_to change(MemberTask, :count)
member.reset
expect(response).to have_gitlab_http_status(:created)
expect(member.tasks_to_be_done).to match_array([:code, :ci])
expect(member.member_task.project_id).to eq(project_id)
end
it 'adds tasks to be done if they do not exist', :aggregate_failures do
expect do
post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
params: {
user_id: stranger.id,
access_level: Member::DEVELOPER,
tasks_to_be_done: %w(issues),
tasks_project_id: project_id
}
end.to change(MemberTask, :count).by(1)
member = source.members.find_by(user_id: stranger.id)
expect(response).to have_gitlab_http_status(:created)
expect(member.tasks_to_be_done).to match_array([:issues])
expect(member.member_task.project_id).to eq(project_id)
end
end
end
context 'when there are multiple users to add' do
@ -474,68 +412,16 @@ RSpec.describe API::Members do
expect(member.member_task.project_id).to eq(project_id)
end
end
context 'with already existing members' do
before do
source.add_developer(stranger)
source.add_developer(developer)
end
it 'does not update tasks to be done if tasks already exist', :aggregate_failures do
members = source.members.where(user_id: [developer.id, stranger.id])
members.each do |member|
create(:member_task, member: member, project_id: project_id, tasks_to_be_done: %w(code ci))
end
expect do
post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
params: {
user_id: [developer.id, stranger.id].join(','),
access_level: Member::DEVELOPER,
tasks_to_be_done: %w(issues),
tasks_project_id: project_id
}
end.not_to change(MemberTask, :count)
expect(response).to have_gitlab_http_status(:created)
members.each do |member|
member.reset
expect(member.tasks_to_be_done).to match_array([:code, :ci])
expect(member.member_task.project_id).to eq(project_id)
end
end
it 'adds tasks to be done if they do not exist', :aggregate_failures do
expect do
post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
params: {
user_id: [developer.id, stranger.id].join(','),
access_level: Member::DEVELOPER,
tasks_to_be_done: %w(issues),
tasks_project_id: project_id
}
end.to change(MemberTask, :count).by(2)
expect(response).to have_gitlab_http_status(:created)
members = source.members.where(user_id: [developer.id, stranger.id])
members.each do |member|
expect(member.tasks_to_be_done).to match_array([:issues])
expect(member.member_task.project_id).to eq(project_id)
end
end
end
end
end
it "updates a current member" do
it "returns 409 if member already exists" do
source.add_guest(stranger)
post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
params: { user_id: stranger.id, access_level: Member::MAINTAINER }
params: { user_id: maintainer.id, access_level: Member::MAINTAINER }
expect(response).to have_gitlab_http_status(:created)
expect(json_response['id']).to eq(stranger.id)
expect(json_response['access_level']).to eq(Member::MAINTAINER)
expect(response).to have_gitlab_http_status(:conflict)
end
it 'returns 404 when the user_id is not valid' do

View file

@ -121,6 +121,7 @@ project_feature:
- created_at
- metrics_dashboard_access_level
- project_id
- requirements_access_level
- security_and_compliance_access_level
- updated_at
computed_attributes:

View file

@ -16,250 +16,232 @@ RSpec.describe API::Tags do
project.add_developer(user)
end
describe 'GET /projects/:id/repository/tags' do
describe 'GET /projects/:id/repository/tags', :use_clean_rails_memory_store_caching do
before do
stub_feature_flags(tag_list_keyset_pagination: false)
end
shared_examples "get repository tags" do
let(:route) { "/projects/#{project_id}/repository/tags" }
let(:route) { "/projects/#{project_id}/repository/tags" }
context 'sorting' do
let(:current_user) { user }
context 'sorting' do
let(:current_user) { user }
it 'sorts by descending order by default' do
get api(route, current_user)
it 'sorts by descending order by default' do
get api(route, current_user)
desc_order_tags = project.repository.tags.sort_by { |tag| tag.dereferenced_target.committed_date }
desc_order_tags.reverse!.map! { |tag| tag.dereferenced_target.id }
desc_order_tags = project.repository.tags.sort_by { |tag| tag.dereferenced_target.committed_date }
desc_order_tags.reverse!.map! { |tag| tag.dereferenced_target.id }
expect(json_response.map { |tag| tag['commit']['id'] }).to eq(desc_order_tags)
end
it 'sorts by ascending order if specified' do
get api("#{route}?sort=asc", current_user)
asc_order_tags = project.repository.tags.sort_by { |tag| tag.dereferenced_target.committed_date }
asc_order_tags.map! { |tag| tag.dereferenced_target.id }
expect(json_response.map { |tag| tag['commit']['id'] }).to eq(asc_order_tags)
end
it 'sorts by name in descending order when requested' do
get api("#{route}?order_by=name", current_user)
ordered_by_name = project.repository.tags.map { |tag| tag.name }.sort.reverse
expect(json_response.map { |tag| tag['name'] }).to eq(ordered_by_name)
end
it 'sorts by name in ascending order when requested' do
get api("#{route}?order_by=name&sort=asc", current_user)
ordered_by_name = project.repository.tags.map { |tag| tag.name }.sort
expect(json_response.map { |tag| tag['name'] }).to eq(ordered_by_name)
end
expect(json_response.map { |tag| tag['commit']['id'] }).to eq(desc_order_tags)
end
context 'searching' do
it 'only returns searched tags' do
get api("#{route}", user), params: { search: 'v1.1.0' }
it 'sorts by ascending order if specified' do
get api("#{route}?sort=asc", current_user)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(1)
expect(json_response[0]['name']).to eq('v1.1.0')
end
asc_order_tags = project.repository.tags.sort_by { |tag| tag.dereferenced_target.committed_date }
asc_order_tags.map! { |tag| tag.dereferenced_target.id }
expect(json_response.map { |tag| tag['commit']['id'] }).to eq(asc_order_tags)
end
shared_examples_for 'repository tags' do
it 'returns the repository tags' do
get api(route, current_user)
it 'sorts by name in descending order when requested' do
get api("#{route}?order_by=name", current_user)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/tags')
expect(response).to include_pagination_headers
expect(json_response.map { |r| r['name'] }).to include(tag_name)
end
ordered_by_name = project.repository.tags.map { |tag| tag.name }.sort.reverse
context 'when repository is disabled' do
include_context 'disabled repository'
it_behaves_like '403 response' do
let(:request) { get api(route, current_user) }
end
end
expect(json_response.map { |tag| tag['name'] }).to eq(ordered_by_name)
end
context 'when unauthenticated', 'and project is public' do
let(:project) { create(:project, :public, :repository) }
it 'sorts by name in ascending order when requested' do
get api("#{route}?order_by=name&sort=asc", current_user)
it_behaves_like 'repository tags'
ordered_by_name = project.repository.tags.map { |tag| tag.name }.sort
expect(json_response.map { |tag| tag['name'] }).to eq(ordered_by_name)
end
end
context 'searching' do
it 'only returns searched tags' do
get api("#{route}", user), params: { search: 'v1.1.0' }
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(1)
expect(json_response[0]['name']).to eq('v1.1.0')
end
end
shared_examples_for 'repository tags' do
it 'returns the repository tags' do
get api(route, current_user)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/tags')
expect(response).to include_pagination_headers
expect(json_response.map { |r| r['name'] }).to include(tag_name)
end
context 'when unauthenticated', 'and project is private' do
it_behaves_like '404 response' do
let(:request) { get api(route) }
let(:message) { '404 Project Not Found' }
end
end
context 'when repository is disabled' do
include_context 'disabled repository'
context 'when authenticated', 'as a maintainer' do
let(:current_user) { user }
it_behaves_like 'repository tags'
context 'requesting with the escaped project full path' do
let(:project_id) { CGI.escape(project.full_path) }
it_behaves_like 'repository tags'
end
end
context 'when authenticated', 'as a guest' do
it_behaves_like '403 response' do
let(:request) { get api(route, guest) }
let(:request) { get api(route, current_user) }
end
end
end
context 'with releases' do
let(:description) { 'Awesome release!' }
context 'when unauthenticated', 'and project is public' do
let(:project) { create(:project, :public, :repository) }
let!(:release) do
create(:release,
:legacy,
project: project,
tag: tag_name,
description: description)
end
it_behaves_like 'repository tags'
end
it 'returns an array of project tags with release info' do
get api(route, user)
context 'when unauthenticated', 'and project is private' do
it_behaves_like '404 response' do
let(:request) { get api(route) }
let(:message) { '404 Project Not Found' }
end
end
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/tags')
expect(response).to include_pagination_headers
context 'when authenticated', 'as a maintainer' do
let(:current_user) { user }
expected_tag = json_response.find { |r| r['name'] == tag_name }
expect(expected_tag['message']).to eq(tag_message)
expect(expected_tag['release']['description']).to eq(description)
end
it_behaves_like 'repository tags'
context 'requesting with the escaped project full path' do
let(:project_id) { CGI.escape(project.full_path) }
it_behaves_like 'repository tags'
end
end
context 'when authenticated', 'as a guest' do
it_behaves_like '403 response' do
let(:request) { get api(route, guest) }
end
end
context 'with releases' do
let(:description) { 'Awesome release!' }
let!(:release) do
create(:release,
:legacy,
project: project,
tag: tag_name,
description: description)
end
context 'with keyset pagination on', :aggregate_errors do
before do
stub_feature_flags(tag_list_keyset_pagination: true)
end
it 'returns an array of project tags with release info' do
get api(route, user)
context 'with keyset pagination option' do
let(:base_params) { { pagination: 'keyset' } }
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/tags')
expect(response).to include_pagination_headers
context 'with gitaly pagination params' do
context 'with high limit' do
let(:params) { base_params.merge(per_page: 100) }
expected_tag = json_response.find { |r| r['name'] == tag_name }
expect(expected_tag['message']).to eq(tag_message)
expect(expected_tag['release']['description']).to eq(description)
end
end
it 'returns all repository tags' do
get api(route, user), params: params
context 'with keyset pagination on', :aggregate_errors do
before do
stub_feature_flags(tag_list_keyset_pagination: true)
end
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/tags')
expect(response.headers).not_to include('Link')
tag_names = json_response.map { |x| x['name'] }
expect(tag_names).to match_array(project.repository.tag_names)
end
context 'with keyset pagination option' do
let(:base_params) { { pagination: 'keyset' } }
context 'with gitaly pagination params' do
context 'with high limit' do
let(:params) { base_params.merge(per_page: 100) }
it 'returns all repository tags' do
get api(route, user), params: params
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/tags')
expect(response.headers).not_to include('Link')
tag_names = json_response.map { |x| x['name'] }
expect(tag_names).to match_array(project.repository.tag_names)
end
end
context 'with low limit' do
let(:params) { base_params.merge(per_page: 2) }
context 'with low limit' do
let(:params) { base_params.merge(per_page: 2) }
it 'returns limited repository tags' do
get api(route, user), params: params
it 'returns limited repository tags' do
get api(route, user), params: params
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/tags')
expect(response.headers).to include('Link')
tag_names = json_response.map { |x| x['name'] }
expect(tag_names).to match_array(%w(v1.1.0 v1.1.1))
end
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/tags')
expect(response.headers).to include('Link')
tag_names = json_response.map { |x| x['name'] }
expect(tag_names).to match_array(%w(v1.1.0 v1.1.1))
end
end
context 'with missing page token' do
let(:params) { base_params.merge(page_token: 'unknown') }
context 'with missing page token' do
let(:params) { base_params.merge(page_token: 'unknown') }
it_behaves_like '422 response' do
let(:request) { get api(route, user), params: params }
let(:message) { 'Invalid page token: refs/tags/unknown' }
end
it_behaves_like '422 response' do
let(:request) { get api(route, user), params: params }
let(:message) { 'Invalid page token: refs/tags/unknown' }
end
end
end
end
end
context ":api_caching_tags flag enabled", :use_clean_rails_memory_store_caching do
describe "cache expiry" do
let(:route) { "/projects/#{project_id}/repository/tags" }
let(:current_user) { user }
before do
stub_feature_flags(api_caching_tags: true)
# Set the cache
get api(route, current_user)
end
it_behaves_like "get repository tags"
it "is cached" do
expect(API::Entities::Tag).not_to receive(:represent)
describe "cache expiry" do
let(:route) { "/projects/#{project_id}/repository/tags" }
let(:current_user) { user }
get api(route, current_user)
end
shared_examples "cache expired" do
it "isn't cached" do
expect(API::Entities::Tag).to receive(:represent).exactly(3).times
get api(route, current_user)
end
end
context "when protected tag is changed" do
before do
# Set the cache
get api(route, current_user)
create(:protected_tag, name: tag_name, project: project)
end
it "is cached" do
expect(API::Entities::Tag).not_to receive(:represent)
get api(route, current_user)
end
shared_examples "cache expired" do
it "isn't cached" do
expect(API::Entities::Tag).to receive(:represent).exactly(3).times
get api(route, current_user)
end
end
context "when protected tag is changed" do
before do
create(:protected_tag, name: tag_name, project: project)
end
it_behaves_like "cache expired"
end
context "when release is changed" do
before do
create(:release, :legacy, project: project, tag: tag_name)
end
it_behaves_like "cache expired"
end
context "when project is changed" do
before do
project.touch
end
it_behaves_like "cache expired"
end
end
end
context ":api_caching_tags flag disabled" do
before do
stub_feature_flags(api_caching_tags: false)
it_behaves_like "cache expired"
end
it_behaves_like "get repository tags"
context "when release is changed" do
before do
create(:release, :legacy, project: project, tag: tag_name)
end
it_behaves_like "cache expired"
end
context "when project is changed" do
before do
project.touch
end
it_behaves_like "cache expired"
end
end
context 'when gitaly is unavailable' do

View file

@ -5,13 +5,14 @@ require 'spec_helper'
RSpec.describe Projects::TransferService do
include GitHelpers
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:user) { create(:user) }
let_it_be(:group_integration) { create(:integrations_slack, :group, group: group, webhook: 'http://group.slack.com') }
let(:project) { create(:project, :repository, :legacy_storage, namespace: user.namespace) }
let(:target) { group }
subject(:execute_transfer) { described_class.new(project, user).execute(group).tap { project.reload } }
subject(:execute_transfer) { described_class.new(project, user).execute(target).tap { project.reload } }
context 'with npm packages' do
before do
@ -690,6 +691,32 @@ RSpec.describe Projects::TransferService do
end
end
context 'handling issue contacts' do
let_it_be(:root_group) { create(:group) }
let(:project) { create(:project, group: root_group) }
before do
root_group.add_owner(user)
target.add_owner(user)
create_list(:issue_customer_relations_contact, 2, :for_issue, issue: create(:issue, project: project))
end
context 'with the same root_ancestor' do
let(:target) { create(:group, parent: root_group) }
it 'retains issue contacts' do
expect { execute_transfer }.not_to change { CustomerRelations::IssueContact.count }
end
end
context 'with a different root_ancestor' do
it 'deletes issue contacts' do
expect { execute_transfer }.to change { CustomerRelations::IssueContact.count }.by(-2)
end
end
end
def rugged_config
rugged_repo(project.repository).config
end

View file

@ -986,10 +986,10 @@
resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-2.5.0.tgz#e0569916fa858462b1801cc90ef8dd9706a12e96"
integrity sha512-cH/EBs//wdkH6kG+kDpvRCIl63/A8JgjAhBJ+ZWucPgtNCDD6x6RDMGdQrxSqhYwcCKDoLStfcxmblBkuiSRXQ==
"@gitlab/ui@35.1.0":
version "35.1.0"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-35.1.0.tgz#feebe3e7bc4260b256c92e753201f12dae3d8857"
integrity sha512-j0+kXYkWfgxrHUG41WR0xL+ctcPwGhCM2YxinKy0DQmXmHGgw380bk922/r2yXAnQ6A4KDuvjQz1Ue0m1Yj6Cw==
"@gitlab/ui@36.1.0":
version "36.1.0"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-36.1.0.tgz#1cd3d74fabd429a5ff5086eb1f6b4db22506e1b3"
integrity sha512-hTSG1l12AX+2SuGu+04bTc3lt1xE4FXej7O1UIrGELo197GfnpfnQM76/+JK0+b1w8vHw5MODBlt/c536dgaVg==
dependencies:
"@babel/standalone" "^7.0.0"
bootstrap-vue "2.20.1"