Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-10-13 00:08:53 +00:00
parent a9acc0c2fb
commit 0cd52ae4af
63 changed files with 652 additions and 246 deletions

View File

@ -1,6 +1,7 @@
<script>
import { GlBadge } from '@gitlab/ui';
import { s__ } from '~/locale';
import Tracking from '~/tracking';
import axios from '~/lib/utils/axios_utils';
import { joinPaths } from '~/lib/utils/url_utility';
import { helpPagePath } from '~/helpers/help_page_helper';
@ -18,6 +19,7 @@ export default {
components: {
GlBadge,
},
mixins: [Tracking.mixin()],
props: {
size: {
type: String,
@ -53,6 +55,10 @@ export default {
.then((res) => {
if (res.data) {
this.status = res.data.severity;
this.track('rendered_version_badge', {
label: this.status,
});
}
})
.catch(() => {
@ -72,6 +78,7 @@ export default {
class="version-check-badge"
:variant="status"
:size="size"
@click="track('click_version_badge', { label: status })"
>{{ title }}</gl-badge
>
</template>

View File

@ -73,6 +73,17 @@ class Import::GithubController < Import::BaseController
}
end
def cancel
project = Project.imported_from(provider_name).find(params[:project_id])
result = Import::Github::CancelProjectImportService.new(project, current_user).execute
if result[:status] == :success
render json: serialized_imported_projects(result[:project])
else
render json: { errors: result[:message] }, status: result[:http_status]
end
end
protected
override :importable_repos

View File

@ -4,8 +4,11 @@ class Projects::MilestonesController < Projects::ApplicationController
include Gitlab::Utils::StrongMemoize
include MilestoneActions
REDIRECT_TARGETS = [:new_release].freeze
before_action :check_issuables_available!
before_action :milestone, only: [:edit, :update, :destroy, :show, :issues, :merge_requests, :participants, :labels, :promote]
before_action :redirect_path, only: [:new, :create]
# Allow read any milestone
before_action :authorize_read_milestone!
@ -59,7 +62,11 @@ class Projects::MilestonesController < Projects::ApplicationController
@milestone = Milestones::CreateService.new(project, current_user, milestone_params).execute
if @milestone.valid?
redirect_to project_milestone_path(@project, @milestone)
if @redirect_path == :new_release
redirect_to new_project_release_path(@project)
else
redirect_to project_milestone_path(@project, @milestone)
end
else
render "new"
end
@ -113,6 +120,11 @@ class Projects::MilestonesController < Projects::ApplicationController
protected
def redirect_path
path = params[:redirect_path]&.to_sym
@redirect_path = path if REDIRECT_TARGETS.include?(path)
end
def project_group
strong_memoize(:project_group) do
project.group

View File

@ -82,7 +82,7 @@ module ReleasesHelper
markdown_docs_path: help_page_path('user/markdown'),
release_assets_docs_path: releases_help_page_path(anchor: 'release-assets'),
manage_milestones_path: project_milestones_path(@project),
new_milestone_path: new_project_milestone_path(@project),
new_milestone_path: new_project_milestone_path(@project, redirect_path: 'new_release'),
edit_release_docs_path: releases_help_page_path(anchor: 'edit-a-release'),
upcoming_release_docs_path: releases_help_page_path(anchor: 'upcoming-releases')
}

View File

@ -59,12 +59,14 @@ class WebHook < ApplicationRecord
def temporarily_disabled?
return false unless web_hooks_disable_failed?
return false if recent_failures <= FAILURE_THRESHOLD
disabled_until.present? && disabled_until >= Time.current
end
def permanently_disabled?
return false unless web_hooks_disable_failed?
return false if disabled_until.present?
recent_failures > FAILURE_THRESHOLD
end
@ -112,17 +114,26 @@ class WebHook < ApplicationRecord
save(validate: false)
end
# Don't actually back-off until FAILURE_THRESHOLD failures have been seen
# we mark the grace-period using the recent_failures counter
def backoff!
return if permanently_disabled? || (backoff_count >= MAX_FAILURES && temporarily_disabled?)
assign_attributes(disabled_until: next_backoff.from_now, backoff_count: backoff_count.succ.clamp(0, MAX_FAILURES))
attrs = { recent_failures: recent_failures + 1 }
if recent_failures >= FAILURE_THRESHOLD
attrs[:backoff_count] = backoff_count.succ.clamp(1, MAX_FAILURES)
attrs[:disabled_until] = next_backoff.from_now
end
assign_attributes(attrs)
save(validate: false)
end
def failed!
return unless recent_failures < MAX_FAILURES
assign_attributes(recent_failures: recent_failures + 1)
assign_attributes(disabled_until: nil, backoff_count: 0, recent_failures: recent_failures + 1)
save(validate: false)
end

View File

@ -0,0 +1,36 @@
# frozen_string_literal: true
module Import
module Github
class CancelProjectImportService < ::BaseService
def execute
return error('Not Found', :not_found) unless authorized_to_read?
return error('Unauthorized access', :forbidden) unless authorized_to_cancel?
if project.import_in_progress?
project.import_state.cancel
success(project: project)
else
error(cannot_cancel_error_message, :bad_request)
end
end
private
def authorized_to_read?
can?(current_user, :read_project, project)
end
def authorized_to_cancel?
can?(current_user, :owner_access, project)
end
def cannot_cancel_error_message
format(
_('The import cannot be canceled because it is %{project_status}'),
project_status: project.import_state.status
)
end
end
end
end

View File

@ -1,6 +1,8 @@
= gitlab_ui_form_for [@project, @milestone],
html: { class: 'milestone-form common-note-form js-quick-submit js-requires-input' } do |f|
= form_errors(@milestone)
- if @redirect_path.present?
= f.hidden_field(:redirect_path, name: :redirect_path, id: :redirect_path, value: @redirect_path)
.form-group.row
.col-form-label.col-sm-2
= f.label :title, _('Title')

View File

@ -0,0 +1,8 @@
---
name: ci_skip_auto_cancelation_on_child_pipelines
introduced_by_url: "https://gitlab.com/gitlab-org/gitlab/-/merge_requests/100854"
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/377712
milestone: '15.5'
type: development
group: group::pipeline execution
default_enabled: false

View File

@ -23,6 +23,7 @@ namespace :import do
get :status
get :callback
get :realtime_changes
post :cancel
end
resource :gitea, only: [:create, :new], controller: :gitea do

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class PrepareAsyncIndexAuthorIdTargetProjectIdOnMergeRequests < Gitlab::Database::Migration[2.0]
INDEX_NAME = 'index_merge_requests_on_author_id_and_target_project_id'
disable_ddl_transaction!
def up
prepare_async_index :merge_requests, %i[author_id target_project_id], name: INDEX_NAME
end
def down
unprepare_async_index :merge_requests, %i[author_id target_project_id], name: INDEX_NAME
end
end

View File

@ -0,0 +1 @@
ab9ab20d1a220e715a758f6175fcaf5c62cb240f49a2ad4227f7cd11f030fa1d

View File

@ -123,7 +123,9 @@ The OpenID Connect provides you with a client's details and secret for you to us
in such requests, set this to `false`.
- `client_options` are the OpenID Connect client-specific options. Specifically:
- `identifier` is the client identifier as configured in the OpenID Connect service provider.
- `secret` is the client secret as configured in the OpenID Connect service provider.
- `secret` is the client secret as configured in the OpenID Connect service provider. For example,
[OmniAuth OpenIDConnect](https://github.com/omniauth/omniauth_openid_connect)) requires this. If the service provider doesn't require a secret,
provide any value and it is ignored.
- `redirect_uri` is the GitLab URL to redirect the user to after successful login
(for example, `http://example.com/users/auth/openid_connect/callback`).
- `end_session_endpoint` (optional) is the URL to the endpoint that ends the

View File

@ -1325,8 +1325,8 @@ Check which files are in use:
The output of these `openssl` commands should match, proving that the cert-key pair is a match:
```shell
openssl x509 -noout -modulus -in /var/opt/gitlab/registry/gitlab-registry.crt | openssl sha256
openssl rsa -noout -modulus -in /var/opt/gitlab/gitlab-rails/etc/gitlab-registry.key | openssl sha256
/opt/gitlab/embedded/bin/openssl x509 -noout -modulus -in /var/opt/gitlab/registry/gitlab-registry.crt | /opt/gitlab/embedded/bin/openssl sha256
/opt/gitlab/embedded/bin/openssl rsa -noout -modulus -in /var/opt/gitlab/gitlab-rails/etc/gitlab-registry.key | /opt/gitlab/embedded/bin/openssl sha256
```
If the two pieces of the certificate do not align, remove the files and run `gitlab-ctl reconfigure`

View File

@ -50,7 +50,7 @@ following issues:
- `openssl` works when specifying the path to the certificate:
```shell
/opt/gitlab/embedded/bin/openssl s_client -CAfile /root/my-cert.crt -connect gitlab.domain.tld:443
/opt/gitlab/embedded/bin/openssl s_client -CAfile /root/my-cert.crt -connect gitlab.domain.tld:443 -servername gitlab.domain.tld
```
If you have the previously described issues, add your certificate to

View File

@ -47,6 +47,51 @@ Example response:
}
```
## Cancel GitHub project import
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/364783) in GitLab 15.5.
Cancel an in-progress GitHub project import using the API.
```plaintext
POST /import/github/cancel
```
| Attribute | Type | Required | Description |
|------------|---------|----------|---------------------|
| `project_id` | integer | yes | GitLab project ID |
```shell
curl --request POST \
--url "https://gitlab.example.com/api/v4/import/github/cancel" \
--header "content-type: application/json" \
--header "PRIVATE-TOKEN: <your_access_token>" \
--data '{
"project_id": 12345
}'
```
Example response:
```json
{
"id": 160,
"name": "my-repo",
"full_path": "/root/my-repo",
"full_name": "Administrator / my-repo",
"import_source": "source/source-repo",
"import_status": "canceled",
"human_import_status_name": "canceled",
"provider_link": "/source/source-repo"
}
```
Returns the following status codes:
- `200 OK`: the project import is being canceled.
- `400 Bad Request`: the project import cannot be canceled.
- `404 Not Found`: the project associated with `project_id` does not exist.
## Import repository from Bitbucket Server
Import your projects from Bitbucket Server to GitLab via the API.

View File

@ -77,7 +77,7 @@ curl "http://localhost:3000/-/chaos/leakmem?memory_mb=1024&duration_s=10&token=s
## CPU spin
This endpoint attempts to fully utilise a single core, at 100%, for the given period.
This endpoint attempts to fully use a single core, at 100%, for the given period.
Depending on your rack server setup, your request may timeout after a predetermined period (normally 60 seconds).
@ -100,7 +100,7 @@ curl "http://localhost:3000/-/chaos/cpu_spin?duration_s=60&token=secret"
## DB spin
This endpoint attempts to fully utilise a single core, and interleave it with DB request, for the given period.
This endpoint attempts to fully use a single core, and interleave it with DB request, for the given period.
This endpoint can be used to model yielding execution to another threads when running concurrently.
Depending on your rack server setup, your request may timeout after a predetermined period (normally 60 seconds).

View File

@ -136,7 +136,7 @@ somewhat abstract and hard to understand initially, but this approach enables us
communicate about experiments as something that's wider than just user behavior.
NOTE:
Using `actor:` utilizes cookies if the `current_user` is nil. If you don't need
Using `actor:` uses cookies if the `current_user` is nil. If you don't need
cookies though - meaning that the exposed functionality would only be visible to
signed in users - `{ user: current_user }` would be just as effective.
@ -318,7 +318,7 @@ Given that we've defined a class for our experiment, and have defined the varian
The first way is by running the experiment. Assuming the experiment has been run, it surfaces in the client layer without having to do anything special.
The second way doesn't run the experiment and is intended to be used if the experiment must only surface in the client layer. To accomplish this we can `.publish` the experiment. This does not run any logic, but does surface the experiment details in the client layer so they can be utilized there.
The second way doesn't run the experiment and is intended to be used if the experiment must only surface in the client layer. To accomplish this we can `.publish` the experiment. This does not run any logic, but does surface the experiment details in the client layer so they can be used there.
An example might be to publish an experiment in a `before_action` in a controller. Assuming we've defined the `PillColorExperiment` class, like we have above, we can surface it to the client by publishing it instead of running it:

View File

@ -712,10 +712,10 @@ We also use `#database-lab` and [explain.depesz.com](https://explain.depesz.com/
- Use specialized indexes. For examples, see these merge requests:
- [Example 1](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/26871)
- [Example 2](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/26445)
- Use defined `start` and `finish`, and simple queries.
These values can be memoized and reused, as in this [example merge request](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/37155).
- Avoid joins and write the queries as clearly as possible,
as in this [example merge request](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/36316).
- Use defined `start` and `finish`. These values can be memoized and reused, as in this
[example merge request](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/37155).
- Avoid joins and unnecessary complexity in your queries. See this
[example merge request](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/36316) as an example.
- Set a custom `batch_size` for `distinct_count`, as in this [example merge request](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/38000).
## Add the metric definition

View File

@ -71,7 +71,7 @@ on how to configure Review Apps for DAST.
#### Docker Services
If your application utilizes Docker containers you have another option for deploying and scanning with DAST.
If your application uses Docker containers you have another option for deploying and scanning with DAST.
After your Docker build job completes and your image is added to your container registry, you can use the image as a
[service](../../../ci/services/index.md).

View File

@ -1340,7 +1340,7 @@ It is also possible to write messages from your script to a log file that is col
Adding some basic logging to your overrides script is useful in case the script fails unexpectedly during normal running of the job. The log file is automatically included as an artifact of the job, allowing you to download it after the job has finished.
Following our example, we provided `renew_token.py` in the environment variable `DAST_API_OVERRIDES_CMD`. Please notice two things in the script:
Following our example, we provided `renew_token.py` in the environment variable `DAST_API_OVERRIDES_CMD`. Notice two things in the script:
- Log file is saved in the location indicated by the environmental variable `CI_PROJECT_DIR`.
- Log filename should match `gl-*.log`.
@ -2065,7 +2065,7 @@ A bug exists in versions of the DAST API analyzer prior to v1.6.196 that can cau
The version information can be found in the job details for the `dast_api` job.
If the issue is occurring with versions v1.6.196 or greater, please contact Support and provide the following information:
If the issue is occurring with versions v1.6.196 or greater, contact Support and provide the following information:
1. Reference this troubleshooting section and ask for the issue to be escalated to the Dynamic Analysis Team.
1. The full console output of the job.
@ -2129,7 +2129,7 @@ The DAST API engine outputs an error message when it cannot determine the target
There is a order of precedence in which the DAST API engine tries to get the target API when checking the different sources. First, it will try to use the `DAST_API_TARGET_URL`. If the environment variable has not been set, then the DAST API engine will attempt to use the `environment_url.txt` file. If there is no file `environment_url.txt`, then the DAST API engine will use the OpenAPI document contents and the URL provided in `DAST_API_OPENAPI` (if a URL is provided) to try to compute the target API.
The best-suited solution will depend on whether or not your target API changes for each deployment. In static environments, the target API is the same for each deployment, in this case please refer to the [static environment solution](#static-environment-solution). If the target API changes for each deployment a [dynamic environment solution](#dynamic-environment-solutions) should be applied.
The best-suited solution will depend on whether or not your target API changes for each deployment. In static environments, the target API is the same for each deployment, in this case refer to the [static environment solution](#static-environment-solution). If the target API changes for each deployment a [dynamic environment solution](#dynamic-environment-solutions) should be applied.
#### Static environment solution
@ -2226,10 +2226,10 @@ DAST API uses the specified media types in the OpenAPI document to generate requ
## Get support or request an improvement
To get support for your particular problem please use the [getting help channels](https://about.gitlab.com/get-help/).
To get support for your particular problem, use the [getting help channels](https://about.gitlab.com/get-help/).
The [GitLab issue tracker on GitLab.com](https://gitlab.com/gitlab-org/gitlab/-/issues) is the right place for bugs and feature proposals about API Security and DAST API.
Please use `~"Category:API Security"` [label](../../../development/contributing/issue_workflow.md#labels) when opening a new issue regarding DAST API to ensure it is quickly reviewed by the right people. Please refer to our [review response SLO](https://about.gitlab.com/handbook/engineering/workflow/code-review/#review-response-slo) to understand when you should receive a response.
Use `~"Category:API Security"` [label](../../../development/contributing/issue_workflow.md#labels) when opening a new issue regarding DAST API to ensure it is quickly reviewed by the right people. Refer to our [review response SLO](https://about.gitlab.com/handbook/engineering/workflow/code-review/#review-response-slo) to understand when you should receive a response.
[Search the issue tracker](https://gitlab.com/gitlab-org/gitlab/-/issues) for similar entries before submitting your own, there's a good chance somebody else had the same issue or feature proposal. Show your support with an award emoji and or join the discussion.
@ -2241,7 +2241,7 @@ When experiencing a behavior not working as expected, consider providing context
- Scanner log file available as a job artifact named `gl-api-security-scanner.log`.
WARNING:
**Sanitize data attached to a support issue**. Please remove sensitive information, including: credentials, passwords, tokens, keys, and secrets.
**Sanitize data attached to a support issue**. Remove sensitive information, including: credentials, passwords, tokens, keys, and secrets.
## Glossary

View File

@ -94,7 +94,7 @@ is **not** `19.03.0`. See [troubleshooting information](#error-response-from-dae
WARNING:
Dependency Scanning does not support run-time installation of compilers and interpreters.
If you need it, please explain why by filling out [the survey](https://docs.google.com/forms/d/e/1FAIpQLScKo7xEYA65rOjPTGIufAyfjPGnCALSJZoTxBlvskfFMEOZMw/viewform).
If you need it, explain why by filling out [the survey](https://docs.google.com/forms/d/e/1FAIpQLScKo7xEYA65rOjPTGIufAyfjPGnCALSJZoTxBlvskfFMEOZMw/viewform).
## Supported languages and package managers
@ -297,7 +297,7 @@ table.supported-languages ul {
<a id="notes-regarding-supported-languages-and-package-managers-2"></a>
<p>
Although Gradle with Java 8 is supported, there are other issues such that Android project builds are not supported at this time.
Please see the backlog issue <a href="https://gitlab.com/gitlab-org/gitlab/-/issues/336866">Android support for Dependency
See the backlog issue <a href="https://gitlab.com/gitlab-org/gitlab/-/issues/336866">Android support for Dependency
Scanning (gemnasium-maven)</a> for more details. Also, Gradle is not supported when <a href="https://docs.gitlab.com/ee/development/fips_compliance.html#enable-fips-mode">FIPS mode</a> is enabled.
</p>
</li>
@ -432,7 +432,7 @@ When a supported dependency file is detected, all dependencies, including transi
### How multiple files are processed
NOTE:
If you've run into problems while scanning multiple files, please contribute a comment to
If you've run into problems while scanning multiple files, contribute a comment to
[this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/337056).
#### Python
@ -616,7 +616,7 @@ The following variables are used for configuring specific analyzers (used for a
| `GRADLE_CLI_OPTS` | `gemnasium-maven` | | List of command line arguments that are passed to `gradle` by the analyzer. |
| `SBT_CLI_OPTS` | `gemnasium-maven` | | List of command-line arguments that the analyzer passes to `sbt`. |
| `PIP_INDEX_URL` | `gemnasium-python` | `https://pypi.org/simple` | Base URL of Python Package Index. |
| `PIP_EXTRA_INDEX_URL` | `gemnasium-python` | | Array of [extra URLs](https://pip.pypa.io/en/stable/reference/pip_install/#cmdoption-extra-index-url) of package indexes to use in addition to `PIP_INDEX_URL`. Comma-separated. **Warning:** Please read [the following security consideration](#python-projects) when using this environment variable. |
| `PIP_EXTRA_INDEX_URL` | `gemnasium-python` | | Array of [extra URLs](https://pip.pypa.io/en/stable/reference/pip_install/#cmdoption-extra-index-url) of package indexes to use in addition to `PIP_INDEX_URL`. Comma-separated. **Warning:** Read [the following security consideration](#python-projects) when using this environment variable. |
| `PIP_REQUIREMENTS_FILE` | `gemnasium-python` | | Pip requirements file to be scanned. |
| `DS_PIP_VERSION` | `gemnasium-python` | | Force the install of a specific pip version (example: `"19.3"`), otherwise the pip installed in the Docker image is used. ([Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/12811) in GitLab 12.7) |
| `DS_PIP_DEPENDENCY_PATH` | `gemnasium-python` | | Path to load Python pip dependencies from. ([Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/12412) in GitLab 12.2) |
@ -912,7 +912,7 @@ this information is removed from the resulting merged file.
## Versioning and release process
Please check the [Release Process documentation](https://gitlab.com/gitlab-org/security-products/release/blob/master/docs/release_process.md).
Check the [Release Process documentation](https://gitlab.com/gitlab-org/security-products/release/blob/master/docs/release_process.md).
## Contributing to the vulnerability database
@ -956,7 +956,7 @@ registry.gitlab.com/security-products/gemnasium-python:3
```
The process for importing Docker images into a local offline Docker registry depends on
**your network security policy**. Please consult your IT staff to find an accepted and approved
**your network security policy**. Consult your IT staff to find an accepted and approved
process by which external resources can be imported or temporarily accessed.
These scanners are [periodically updated](../index.md#vulnerability-scanner-maintenance)
with new definitions, and you may be able to make occasional updates on your own.
@ -1038,7 +1038,7 @@ ensure that it can reach your private repository. Here is an example configurati
1. Fetch the certificate from your repository URL and add it to the project:
```shell
printf "\n" | openssl s_client -connect pypi.example.com:443 | sed -ne '/-BEGIN CERTIFICATE-/,/-END CERTIFICATE-/p' > internal.crt
printf "\n" | openssl s_client -connect pypi.example.com:443 -servername pypi.example.com | sed -ne '/-BEGIN CERTIFICATE-/,/-END CERTIFICATE-/p' > internal.crt
```
1. Point `setup.py` at the newly downloaded certificate:

View File

@ -131,7 +131,7 @@ to be able to use the `docker` command inside the jobs. This runner can be insta
a bastion, and used only for this specific project.
WARNING:
This template does not include updates for the container scanning analyzer. Please see
This template does not include updates for the container scanning analyzer. See
[Container scanning offline directions](../container_scanning/index.md#running-container-scanning-in-an-offline-environment).
#### Scheduling the updates

Binary file not shown.

After

Width:  |  Height:  |  Size: 23 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 25 KiB

View File

@ -43,9 +43,7 @@ Most policy changes take effect as soon as the merge request is merged. Any chan
do not go through a merge request and are committed directly to the default branch may require up to 10 minutes
before the policy changes take effect.
![Scan Execution Policy Editor YAML Mode](img/scan_execution_policy_yaml_mode_v14_7.png)
The policy editor currently only supports the YAML mode. The Rule mode is tracked in the [Allow Users to Edit Rule-mode Scan Execution Policies in the Policy UI](https://gitlab.com/groups/gitlab-org/-/epics/5363) epic.
![Scan Execution Policy Editor Rule Mode](img/scan_execution_policy_rule_mode_v15_5.png)
## Scan execution policies schema

View File

@ -680,7 +680,7 @@ registry.gitlab.com/security-products/spotbugs:2
```
The process for importing Docker images into a local offline Docker registry depends on
**your network security policy**. Please consult your IT staff to find an accepted and approved
**your network security policy**. Consult your IT staff to find an accepted and approved
process by which external resources can be imported or temporarily accessed. These scanners are [periodically updated](../index.md#vulnerability-scanner-maintenance)
with new definitions, and you may be able to make occasional updates on your own.

View File

@ -13,7 +13,7 @@ with your charts and values. To do this, you use the pull-based GitOps features
Kubernetes.
This feature is in Alpha and [an epic exists](https://gitlab.com/groups/gitlab-org/-/epics/7938)
to track future work. Please tell us about your use cases by leaving comments in the epic.
to track future work. Tell us about your use cases by leaving comments in the epic.
NOTE:
This feature is Alpha. In future releases, to accommodate new features, the configuration format might change without notice.

View File

@ -100,7 +100,7 @@ To enable License Compliance in your project's pipeline, either:
(provided by [Auto DevOps](../../../topics/autodevops/index.md)).
- Include the [`License-Scanning.gitlab-ci.yml` template](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Security/License-Scanning.gitlab-ci.yml) in your `.gitlab-ci.yml` file.
Please note that License Compliance is not supported when GitLab is run with FIPS mode enabled.
License Compliance is not supported when GitLab is run with FIPS mode enabled.
### Include the License Scanning template
@ -656,7 +656,7 @@ registry.gitlab.com/security-products/license-finder:latest
```
The process for importing Docker images into a local offline Docker registry depends on
**your network security policy**. Please consult your IT staff to find an accepted and approved
**your network security policy**. Consult your IT staff to find an accepted and approved
process by which external resources can be imported or temporarily accessed. Note that these scanners are [updated periodically](../../application_security/index.md#vulnerability-scanner-maintenance)
with new definitions, so consider if you are able to make periodic updates yourself.

View File

@ -9,7 +9,7 @@ type: reference
These are notes and screenshots regarding Group SAML and SCIM that the GitLab Support Team sometimes uses while troubleshooting, but which do not fit into the official documentation. GitLab is making this public, so that anyone can make use of the Support team's collected knowledge.
Please refer to the GitLab [Group SAML](index.md) docs for information on the feature and how to set it up.
Refer to the GitLab [Group SAML](index.md) documentation for information on the feature and how to set it up.
When troubleshooting a SAML configuration, GitLab team members will frequently start with the [SAML troubleshooting section](index.md#troubleshooting).

View File

@ -31,7 +31,7 @@ If required, you can find [a glossary of common terms](../../../integration/saml
See [specific identity provider documentation](#providers) for more details.
1. Configure the SAML response to include a [NameID](#nameid) that uniquely identifies each user.
1. Configure the required [user attributes](#user-attributes), ensuring you include the user's email address.
1. While the default is enabled for most SAML providers, please ensure the app is set to have service provider
1. While the default is enabled for most SAML providers, ensure the app is set to have service provider
initiated calls to link existing GitLab accounts.
1. Once the identity provider is set up, move on to [configuring GitLab](#configure-gitlab).
@ -156,13 +156,13 @@ When SSO is enforced, users are not immediately revoked. If the user:
The SAML standard means that you can use a wide range of identity providers with GitLab. Your identity provider might have relevant documentation. It can be generic SAML documentation or specifically targeted for GitLab.
When [configuring your identity provider](#configure-your-identity-provider), please consider the notes below for specific providers to help avoid common issues and as a guide for terminology used.
When [configuring your identity provider](#configure-your-identity-provider), consider the notes below for specific providers to help avoid common issues and as a guide for terminology used.
For providers not listed below, you can refer to the [instance SAML notes on configuring an identity provider](../../../integration/saml.md#notes-on-configuring-your-identity-provider)
for additional guidance on information your identity provider may require.
GitLab provides the following information for guidance only.
If you have any questions on configuring the SAML app, please contact your provider's support.
If you have any questions on configuring the SAML app, contact your provider's support.
### Azure setup notes
@ -224,7 +224,7 @@ See our [example configuration page](example_saml_config.md#google-workspace).
### Okta setup notes
Please follow the Okta documentation on [setting up a SAML application in Okta](https://developer.okta.com/docs/guides/build-sso-integration/saml2/main/) with the notes below for consideration.
Follow the Okta documentation on [setting up a SAML application in Okta](https://developer.okta.com/docs/guides/build-sso-integration/saml2/main/) with the notes below for consideration.
<i class="fa fa-youtube-play youtube" aria-hidden="true"></i>
For a demo of the Okta SAML setup including SCIM, see [Demo: Okta Group SAML & SCIM setup](https://youtu.be/0ES9HsZq0AQ).
@ -298,7 +298,7 @@ To migrate users to a new email domain, users must:
> SAML user provisioning [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/268142) in GitLab 13.7.
Once Group SSO is configured and enabled, users can access the GitLab.com group through the identity provider's dashboard. If [SCIM](scim_setup.md) is configured, please see the [user access and linking setup section on the SCIM page](scim_setup.md#user-access-and-linking-setup).
Once Group SSO is configured and enabled, users can access the GitLab.com group through the identity provider's dashboard. If [SCIM](scim_setup.md) is configured, see the [user access and linking setup section on the SCIM page](scim_setup.md#user-access-and-linking-setup).
When a user tries to sign in with Group SSO, GitLab attempts to find or create a user based on the following:

View File

@ -203,7 +203,7 @@ New users and existing users on subsequent visits can access the group through t
![Enterprise badge for users created with a SCIM identity](img/member_enterprise_badge_v14_0.png)
For role information, please see the [Group SAML page](index.md#user-access-and-management)
For role information, see the [Group SAML page](index.md#user-access-and-management)
### Blocking access

View File

@ -713,7 +713,7 @@ Follow [this issue](https://gitlab.com/gitlab-org/container-registry/-/issues/55
GitLab is [migrating to the next generation of the Container Registry](https://gitlab.com/groups/gitlab-org/-/epics/5523).
During the migration, you may encounter difficulty deleting tags.
If you encounter an error, it's likely that your image repository is in the process of being migrated.
Please wait a few minutes and try again.
Wait a few minutes and try again.
### `unauthorized: authentication required` when pushing large images

View File

@ -51,7 +51,7 @@ See the epic for:
- A list of known issues.
- Our planned direction and next steps.
If you find an issue that isn't listed, please leave a comment on the epic or create a
If you find an issue that isn't listed, leave a comment on the epic or create a
new issue.
Dark mode is available as a navigation theme, for MVC and compatibility reasons.
@ -201,7 +201,7 @@ To set your time preference:
NOTE:
This feature is experimental, and choosing absolute times might break certain layouts.
Please open an issue if you notice that using absolute times breaks a layout.
Open an issue if you notice that using absolute times breaks a layout.
## Integrations

View File

@ -244,8 +244,8 @@ You may also experience this error if your certificate is not valid. To check th
subject alternative names contain the correct domain for your cluster's API, run this command:
```shell
echo | openssl s_client -showcerts -connect kubernetes.example.com:443 2>/dev/null |
echo | openssl s_client -showcerts -connect kubernetes.example.com:443 -servername kubernetes.example.com 2>/dev/null |
openssl x509 -inform pem -noout -text
```
The `-connect` argument expects a `host:port` combination. For example, `https://kubernetes.example.com` would be `kubernetes.example.com:443`.
The `-connect` argument expects a `host:port` combination. For example, `https://kubernetes.example.com` would be `kubernetes.example.com:443`. The `-servername` argument expects a domain without any URI, for example `kubernetes.example.com`.

View File

@ -118,7 +118,7 @@ To display the deploy boards for a specific [environment](../../ci/environments/
NOTE:
Matching based on the Kubernetes `app` label was removed in
[GitLab 12.1](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/14020).
To migrate, please apply the required annotations (see above) and
To migrate, apply the required annotations (see above) and
re-deploy your application. If you are using Auto DevOps, this will
be done automatically and no action is necessary.

View File

@ -192,7 +192,7 @@ Here is an example of a bug report template:
## Example Project
(If possible, please create an example project here on GitLab.com that exhibits the problematic
(If possible, create an example project here on GitLab.com that exhibits the problematic
behavior, and link to it here in the bug report.
If you are using an older version of GitLab, this will also determine whether the bug has been fixed
in a more recent version)
@ -207,7 +207,7 @@ in a more recent version)
## Relevant logs and/or screenshots
(Paste any relevant logs - please use code blocks (```) to format console output, logs, and code, as
(Paste any relevant logs - use code blocks (```) to format console output, logs, and code, as
it's very hard to read otherwise.)
## Possible fixes

View File

@ -161,7 +161,7 @@ Cards finished by the UX team automatically appear in the **Frontend** column wh
for them.
NOTE:
For a broader use case, please see the blog post
For a broader use case, see the blog post
[What is GitLab Flow?](https://about.gitlab.com/topics/version-control/what-is-gitlab-flow/).
For a real use case example, you can read why
[Codepen decided to adopt issue boards](https://about.gitlab.com/blog/2017/01/27/codepen-welcome-to-gitlab/#project-management-everything-in-one-place)

View File

@ -238,7 +238,7 @@ This can occur if Sidekiq doesn't pick up the changes fast enough.
#### Sidekiq
Sidekiq didn't process the CI state change fast enough. Please wait a few
Sidekiq didn't process the CI state change fast enough. Wait a few
seconds and the status should update automatically.
#### Bug

View File

@ -188,7 +188,7 @@ Unable to fetch branches list, please close the form and try again
An unexpected response was received from the branches retrieval API.
As suggested, you should close the form and reopen again or refresh the page. This error should be temporary, although
if it persists please check the [GitLab status page](https://status.gitlab.com/) to see if there is a wider outage.
if it persists, check the [GitLab status page](https://status.gitlab.com/) to see if there is a wider outage.
### Failed to load status checks

View File

@ -96,7 +96,7 @@ Root domains (`example.com`) require:
| `_gitlab-pages-verification-code.example.com` | `TXT` | `gitlab-pages-verification-code=00112233445566778899aabbccddeeff` |
For projects on GitLab.com, this IP is `35.185.44.232`.
For projects living in other GitLab instances (CE or EE), please contact
For projects living in other GitLab instances (CE or EE), contact
your sysadmin asking for this information (which IP address is Pages
server running on your instance).

View File

@ -134,10 +134,10 @@ schemas:
Each schema entry supports two properties:
- `uri`: please provide an absolute URL for the schema definition file here.
- `uri`: Provide an absolute URL for the schema definition file here.
The schema from this URL is loaded when a matching file is open.
- `match`: a list of matching paths or glob expressions. If a schema matches a
particular path pattern, it is applied to that file. Please enclose the pattern
- `match`: A list of matching paths or glob expressions. If a schema matches a
particular path pattern, it is applied to that file. Enclose the pattern
in quotes if it begins with an asterisk (`*`), it's be applied to that file.
If a pattern begins with an asterisk (`*`), enclose it in quotation marks.
Otherwise, the configuration file is not valid YAML.
@ -457,5 +457,5 @@ The Web IDE has a few limitations:
and it can no longer be used. A stopped terminal can be restarted by selecting
**Restart Terminal**.
- If the terminal displays **Connection Failure**, then the terminal could not
connect to the runner. Please try to stop and restart the terminal. If the
connect to the runner. Try to stop and restart the terminal. If the
problem persists, double check your runner configuration.

View File

@ -54,5 +54,20 @@ module API
{ errors: result[:message] }
end
end
params do
requires :project_id, type: Integer, desc: 'ID of importing project to be canceled'
end
post 'import/github/cancel' do
project = Project.imported_from(provider.to_s).find(params[:project_id])
result = Import::Github::CancelProjectImportService.new(project, current_user).execute
if result[:status] == :success
status :ok
present ProjectSerializer.new.represent(project, serializer: :import)
else
render_api_error!(result[:message], result[:http_status])
end
end
end
end

View File

@ -11,9 +11,11 @@ module Gitlab
# rubocop: disable CodeReuse/ActiveRecord
def perform!
ff_enabled = Feature.enabled?(:ci_skip_auto_cancelation_on_child_pipelines, project)
return if ff_enabled && pipeline.child?
return unless project.auto_cancel_pending_pipelines?
Gitlab::OptimisticLocking.retry_lock(auto_cancelable_pipelines, name: 'cancel_pending_pipelines') do |cancelables|
Gitlab::OptimisticLocking.retry_lock(auto_cancelable_pipelines(ff_enabled), name: 'cancel_pending_pipelines') do |cancelables|
cancelables.select(:id).each_batch(of: BATCH_SIZE) do |cancelables_batch|
auto_cancel_interruptible_pipelines(cancelables_batch.ids)
end
@ -27,13 +29,19 @@ module Gitlab
private
def auto_cancelable_pipelines
project.all_pipelines.created_after(1.week.ago)
def auto_cancelable_pipelines(ff_enabled)
relation = project.all_pipelines
.created_after(1.week.ago)
.ci_and_parent_sources
.for_ref(pipeline.ref)
.id_not_in(pipeline.same_family_pipeline_ids)
.where_not_sha(project.commit(pipeline.ref).try(:id))
.alive_or_scheduled
if ff_enabled
relation.id_not_in(pipeline.id)
else
relation.id_not_in(pipeline.same_family_pipeline_ids)
end
end
def auto_cancel_interruptible_pipelines(pipeline_ids)

View File

@ -1,5 +1,5 @@
variables:
DAST_AUTO_DEPLOY_IMAGE_VERSION: 'v2.38.1'
DAST_AUTO_DEPLOY_IMAGE_VERSION: 'v2.39.0'
.dast-auto-deploy:
image: "${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cluster-integration/auto-deploy-image:${DAST_AUTO_DEPLOY_IMAGE_VERSION}"

View File

@ -1,5 +1,5 @@
variables:
AUTO_DEPLOY_IMAGE_VERSION: 'v2.38.1'
AUTO_DEPLOY_IMAGE_VERSION: 'v2.39.0'
.auto-deploy:
image: "${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cluster-integration/auto-deploy-image:${AUTO_DEPLOY_IMAGE_VERSION}"

View File

@ -1,5 +1,5 @@
variables:
AUTO_DEPLOY_IMAGE_VERSION: 'v2.38.1'
AUTO_DEPLOY_IMAGE_VERSION: 'v2.39.0'
.auto-deploy:
image: "${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cluster-integration/auto-deploy-image:${AUTO_DEPLOY_IMAGE_VERSION}"

View File

@ -40215,6 +40215,9 @@ msgstr ""
msgid "The hostname of your Snowplow collector."
msgstr ""
msgid "The import cannot be canceled because it is %{project_status}"
msgstr ""
msgid "The import will time out after %{timeout}. For repositories that take longer, use a clone/push combination."
msgstr ""

View File

@ -20,7 +20,7 @@ module QA
project.group = group
project.github_personal_access_token = Runtime::Env.github_access_token
project.github_repository_path = 'gitlab-qa-github/import-test'
project.api_client = api_client
project.api_client = Runtime::API::Client.new(user: user)
end
end

View File

@ -198,7 +198,7 @@ module QA
project.github_personal_access_token = Runtime::Env.github_access_token
project.github_repository_path = github_repo
project.personal_namespace = user.username
project.api_client = api_client
project.api_client = Runtime::API::Client.new(user: user)
end
end

View File

@ -321,4 +321,37 @@ RSpec.describe Import::GithubController do
expect(json_response[0]['stats']).to include('imported')
end
end
describe "POST cancel" do
let_it_be(:project) { create(:project, :import_started, import_type: 'github', import_url: 'https://fake.url') }
context 'when project import was canceled' do
before do
allow(Import::Github::CancelProjectImportService)
.to receive(:new).with(project, user)
.and_return(double(execute: { status: :success, project: project }))
end
it 'returns success' do
post :cancel, params: { project_id: project.id }
expect(response).to have_gitlab_http_status(:ok)
end
end
context 'when project import was not canceled' do
before do
allow(Import::Github::CancelProjectImportService)
.to receive(:new).with(project, user)
.and_return(double(execute: { status: :error, message: 'The import cannot be canceled because it is finished', http_status: :bad_request }))
end
it 'returns error' do
post :cancel, params: { project_id: project.id }
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['errors']).to eq('The import cannot be canceled because it is finished')
end
end
end
end

View File

@ -44,6 +44,26 @@ RSpec.describe Projects::MilestonesController do
end
end
describe "#create" do
it 'does not redirect without redirect_path' do
post :create, params: { namespace_id: project.namespace.id, project_id: project.id, milestone: { title: 'test' } }
expect(response).to redirect_to(project_milestone_path(project, project.milestones.last))
end
it 'redirects when given a redirect_path' do
post :create, params: { namespace_id: project.namespace.id, project_id: project.id, redirect_path: 'new_release', milestone: { title: 'test' } }
expect(response).to redirect_to(new_project_release_path(project))
end
it 'will not redirect when given a redirect_path with an error' do
post :create, params: { namespace_id: project.namespace.id, project_id: project.id, redirect_path: 'new_release', milestone: { title: nil } }
expect(response).to have_gitlab_http_status(:ok)
end
end
describe "#index" do
context "as html" do
def render_index(project:, page:, search_title: '')

View File

@ -28,6 +28,12 @@ RSpec.describe 'Milestone' do
expect(find('[data-testid="no-issues-alert"]')).to have_content('Assign some issues to this milestone.')
expect(page).to have_content('Nov 16, 2016Dec 16, 2016')
end
it 'passes redirect_path through to form' do
visit new_project_milestone_path(project, redirect_path: 'new_release')
expect(find('#redirect_path', visible: :all)[:value]).to eq('new_release')
end
end
describe 'Open a milestone with closed issues' do

View File

@ -3,123 +3,136 @@
require 'spec_helper'
RSpec.describe 'OAuth Registration', :js, :allow_forgery_protection do
include DeviseHelpers
include LoginHelpers
include TermsHelper
using RSpec::Parameterized::TableSyntax
let(:uid) { 'my-uid' }
let(:email) { 'user@example.com' }
around do |example|
with_omniauth_full_host { example.run }
end
context 'when the user registers using single-sign on provider' do
let(:uid) { 'my-uid' }
let(:email) { 'user@example.com' }
where(:provider, :additional_info) do
:github | {}
:twitter | {}
:bitbucket | {}
:gitlab | {}
:google_oauth2 | {}
:facebook | {}
:cas3 | {}
:auth0 | {}
:authentiq | {}
:salesforce | { extra: { email_verified: true } }
:dingtalk | {}
:alicloud | {}
end
where(:provider, :additional_info) do
:github | {}
:twitter | {}
:bitbucket | {}
:gitlab | {}
:google_oauth2 | {}
:facebook | {}
:cas3 | {}
:auth0 | {}
:authentiq | {}
:salesforce | { extra: { email_verified: true } }
:dingtalk | {}
:alicloud | {}
with_them do
before do
stub_omniauth_provider(provider)
stub_feature_flags(update_oauth_registration_flow: true)
end
with_them do
context 'when block_auto_created_users is true' do
before do
stub_omniauth_provider(provider)
stub_feature_flags(update_oauth_registration_flow: true)
stub_omniauth_setting(block_auto_created_users: true)
end
context 'when block_auto_created_users is true' do
before do
stub_omniauth_setting(block_auto_created_users: true)
end
it 'redirects back to the sign-in page' do
register_via(provider, uid, email, additional_info: additional_info)
it 'redirects back to the sign-in page' do
register_via(provider, uid, email, additional_info: additional_info)
expect(page).to have_current_path new_user_session_path
expect(page).to have_content('Your account is pending approval')
end
end
expect(page).to have_current_path new_user_session_path
expect(page).to have_content('Your account is pending approval')
end
context 'when block_auto_created_users is false' do
before do
stub_omniauth_setting(block_auto_created_users: false)
end
context 'when block_auto_created_users is false' do
it 'redirects to the initial welcome path' do
register_via(provider, uid, email, additional_info: additional_info)
expect(page).to have_current_path users_sign_up_welcome_path
expect(page).to have_content('Welcome to GitLab, mockuser!')
end
context 'when terms are enforced' do
before do
stub_omniauth_setting(block_auto_created_users: false)
enforce_terms
end
it 'redirects to the initial welcome path' do
it 'auto accepts terms and redirects to the initial welcome path' do
register_via(provider, uid, email, additional_info: additional_info)
expect(page).to have_current_path users_sign_up_welcome_path
expect(page).to have_content('Welcome to GitLab, mockuser!')
end
end
context 'when terms are enforced' do
before do
enforce_terms
end
context 'when provider does not send a verified email address' do
let(:email) { 'temp-email-for-oauth@email.com' }
it 'auto accepts terms and redirects to the initial welcome path' do
register_via(provider, uid, email, additional_info: additional_info)
it 'redirects to the profile path' do
register_via(provider, uid, email, additional_info: additional_info)
expect(page).to have_current_path users_sign_up_welcome_path
expect(page).to have_content('Welcome to GitLab, mockuser!')
end
end
context 'when provider does not send a verified email address' do
let(:email) { 'temp-email-for-oauth@email.com' }
it 'redirects to the profile path' do
register_via(provider, uid, email, additional_info: additional_info)
expect(page).to have_current_path profile_path
expect(page).to have_content('Please complete your profile with email address')
end
end
context 'when registering via an invitation email' do
let_it_be(:owner) { create(:user) }
let_it_be(:group) { create(:group, name: 'Owned') }
let_it_be(:project) { create(:project, :repository, namespace: group) }
let(:invite_email) { generate(:email) }
let(:extra_params) { { invite_type: Emails::Members::INITIAL_INVITE } }
let(:group_invite) do
create(
:group_member, :invited,
group: group,
invite_email: invite_email,
created_by: owner
)
end
before do
project.add_maintainer(owner)
group.add_owner(owner)
group_invite.generate_invite_token!
mock_auth_hash(provider, uid, invite_email, additional_info: additional_info)
end
it 'redirects to the activity page with all the projects/groups invitations accepted' do
visit invite_path(group_invite.raw_invite_token, extra_params)
click_link_or_button "oauth-login-#{provider}"
fill_in_welcome_form
expect(page).to have_content('You have been granted Owner access to group Owned.')
expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
end
expect(page).to have_current_path profile_path
expect(page).to have_content('Please complete your profile with email address')
end
end
context 'when registering via an invitation email' do
let_it_be(:owner) { create(:user) }
let_it_be(:group) { create(:group, name: 'Owned') }
let_it_be(:project) { create(:project, :repository, namespace: group) }
let(:invite_email) { generate(:email) }
let(:extra_params) { { invite_type: Emails::Members::INITIAL_INVITE } }
let(:group_invite) do
create(
:group_member, :invited,
group: group,
invite_email: invite_email,
created_by: owner
)
end
before do
project.add_maintainer(owner)
group.add_owner(owner)
group_invite.generate_invite_token!
mock_auth_hash(provider, uid, invite_email, additional_info: additional_info)
end
it 'redirects to the activity page with all the projects/groups invitations accepted' do
visit invite_path(group_invite.raw_invite_token, extra_params)
click_link_or_button "oauth-login-#{provider}"
fill_in_welcome_form
expect(page).to have_content('You have been granted Owner access to group Owned.')
expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
end
end
end
end
context 'when update_oauth_registration_flow is disabled' do
before do
stub_omniauth_provider(:github)
stub_omniauth_setting(block_auto_created_users: false)
stub_feature_flags(update_oauth_registration_flow: false)
enforce_terms
end
it 'presents the terms page' do
register_via(:github, uid, email)
expect(page).to have_content('These are the terms')
end
end

View File

@ -2,6 +2,7 @@ import { GlBadge } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import waitForPromises from 'helpers/wait_for_promises';
import { mockTracking } from 'helpers/tracking_helper';
import { helpPagePath } from '~/helpers/help_page_helper';
import axios from '~/lib/utils/axios_utils';
import GitlabVersionCheck from '~/vue_shared/components/gitlab_version_check.vue';
@ -93,8 +94,11 @@ describe('GitlabVersionCheck', () => {
${{ code: 200, res: { severity: 'danger' } }} | ${{ title: 'Update ASAP', variant: 'danger' }}
`('badge ui', ({ mockResponse, expectedUI }) => {
describe(`when response is ${mockResponse.res.severity}`, () => {
let trackingSpy;
beforeEach(async () => {
createComponent(mockResponse);
trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
await waitForPromises(); // Ensure we wrap up the axios call
});
@ -106,9 +110,23 @@ describe('GitlabVersionCheck', () => {
expect(findGlBadge().attributes('variant')).toBe(expectedUI.variant);
});
it(`tracks rendered_version_badge with status ${expectedUI.variant}`, () => {
expect(trackingSpy).toHaveBeenCalledWith(undefined, 'rendered_version_badge', {
label: expectedUI.variant,
});
});
it(`link is ${UPGRADE_DOCS_URL}`, () => {
expect(findGlBadge().attributes('href')).toBe(UPGRADE_DOCS_URL);
});
it(`tracks click_version_badge with status ${expectedUI.variant} when badge is clicked`, async () => {
await findGlBadge().vm.$emit('click');
expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_version_badge', {
label: expectedUI.variant,
});
});
});
});
});

View File

@ -49,6 +49,12 @@ RSpec.describe ReleasesHelper do
expect(helper.data_for_releases_page[:new_release_path]).to eq(new_project_release_path(project))
end
end
context 'new releases redirect new milestone creation' do
it 'redirects new_milestone_path back to the release page' do
expect(helper.data_for_new_release_page[:new_milestone_path]).to include('redirect_path')
end
end
end
describe '#data_for_edit_release_page' do

View File

@ -141,7 +141,42 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::CancelPendingPipelines do
end
end
context 'when the prev pipeline source is webide' do
context 'when the pipeline is a child pipeline' do
let!(:parent_pipeline) { create(:ci_pipeline, project: project, sha: new_commit.sha) }
let(:pipeline) { create(:ci_pipeline, child_of: parent_pipeline) }
before do
create(:ci_build, :interruptible, :running, pipeline: parent_pipeline)
create(:ci_build, :interruptible, :running, pipeline: parent_pipeline)
end
it 'does not cancel any builds' do
expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
expect(build_statuses(parent_pipeline)).to contain_exactly('running', 'running')
perform
expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
expect(build_statuses(parent_pipeline)).to contain_exactly('running', 'running')
end
context 'when feature flag ci_skip_auto_cancelation_on_child_pipelines is disabled' do
before do
stub_feature_flags(ci_skip_auto_cancelation_on_child_pipelines: false)
end
it 'does not cancel the parent pipeline' do
expect(build_statuses(parent_pipeline)).to contain_exactly('running', 'running')
perform
expect(build_statuses(prev_pipeline)).to contain_exactly('success', 'canceled', 'canceled')
expect(build_statuses(parent_pipeline)).to contain_exactly('running', 'running')
end
end
end
context 'when the previous pipeline source is webide' do
let(:prev_pipeline) { create(:ci_pipeline, :webide, project: project) }
it 'does not cancel builds of the previous pipeline' do

View File

@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe Gitlab::GitAccess, :aggregate_failures do
include TermsHelper
include GitHelpers
include AdminModeHelper
let(:user) { create(:user) }
@ -789,18 +788,29 @@ RSpec.describe Gitlab::GitAccess, :aggregate_failures do
def merge_into_protected_branch
@protected_branch_merge_commit ||= begin
project.repository.add_branch(user, unprotected_branch, 'feature')
rugged = rugged_repo(project.repository)
target_branch = rugged.rev_parse('feature')
target_branch = TestEnv::BRANCH_SHA['feature']
source_branch = project.repository.create_file(
user,
'filename',
'This is the file content',
message: 'This is a good commit message',
branch_name: unprotected_branch)
author = { email: "email@example.com", time: Time.now, name: "Example Git User" }
merge_id = project.repository.raw.merge_to_ref(
user,
branch: target_branch,
first_parent_ref: target_branch,
source_sha: source_branch,
target_ref: 'refs/merge-requests/test',
message: 'commit message'
)
merge_index = rugged.merge_commits(target_branch, source_branch)
Rugged::Commit.create(rugged, author: author, committer: author, message: "commit message", parents: [target_branch, source_branch], tree: merge_index.write_tree(rugged))
# We are trying to simulate what the repository would look like
# during the pre-receive hook, before the actual ref is
# written/created. Repository#new_commits relies on there being no
# ref pointing to the merge commit.
project.repository.delete_refs('refs/merge-requests/test')
merge_id
end
end

View File

@ -170,7 +170,7 @@ RSpec.describe WebHook do
end
it 'does not async execute non-executable hooks' do
hook.update!(disabled_until: 1.day.from_now)
allow(hook).to receive(:executable?).and_return(false)
expect(WebHookService).not_to receive(:new)
@ -238,17 +238,18 @@ RSpec.describe WebHook do
[
[0, :not_set, true],
[0, :past, true],
[0, :future, false],
[0, :now, false],
[0, :future, true],
[0, :now, true],
[1, :not_set, true],
[1, :past, true],
[1, :future, false],
[1, :future, true],
[3, :not_set, true],
[3, :past, true],
[3, :future, false],
[3, :future, true],
[4, :not_set, false],
[4, :past, false],
[4, :future, false]
[4, :past, true], # expired suspension
[4, :now, false], # active suspension
[4, :future, false] # active suspension
]
end
@ -357,6 +358,7 @@ RSpec.describe WebHook do
end
it 'makes a hook executable if it is currently backed off' do
hook.recent_failures = 1000
hook.disabled_until = 1.hour.from_now
expect { hook.enable! }.to change(hook, :executable?).from(false).to(true)
@ -378,55 +380,71 @@ RSpec.describe WebHook do
end
describe 'backoff!' do
it 'sets disabled_until to the next backoff' do
expect { hook.backoff! }.to change(hook, :disabled_until).to(hook.next_backoff.from_now)
context 'when we have not backed off before' do
it 'does not disable the hook' do
expect { hook.backoff! }.not_to change(hook, :executable?).from(true)
end
it 'increments the recent_failures count' do
expect { hook.backoff! }.to change(hook, :recent_failures).by(1)
end
end
it 'increments the backoff count' do
expect { hook.backoff! }.to change(hook, :backoff_count).by(1)
end
context 'when the hook is permanently disabled' do
context 'when we have exhausted the grace period' do
before do
allow(hook).to receive(:permanently_disabled?).and_return(true)
hook.update!(recent_failures: described_class::FAILURE_THRESHOLD)
end
it 'does not set disabled_until' do
expect { hook.backoff! }.not_to change(hook, :disabled_until)
it 'sets disabled_until to the next backoff' do
expect { hook.backoff! }.to change(hook, :disabled_until).to(hook.next_backoff.from_now)
end
it 'does not increment the backoff count' do
expect { hook.backoff! }.not_to change(hook, :backoff_count)
end
end
context 'when we have backed off MAX_FAILURES times' do
before do
stub_const("#{described_class}::MAX_FAILURES", 5)
5.times { hook.backoff! }
it 'increments the backoff count' do
expect { hook.backoff! }.to change(hook, :backoff_count).by(1)
end
it 'does not let the backoff count exceed the maximum failure count' do
expect { hook.backoff! }.not_to change(hook, :backoff_count)
end
context 'when the hook is permanently disabled' do
before do
allow(hook).to receive(:permanently_disabled?).and_return(true)
end
it 'does not change disabled_until', :skip_freeze_time do
travel_to(hook.disabled_until - 1.minute) do
it 'does not set disabled_until' do
expect { hook.backoff! }.not_to change(hook, :disabled_until)
end
end
it 'changes disabled_until when it has elapsed', :skip_freeze_time do
travel_to(hook.disabled_until + 1.minute) do
expect { hook.backoff! }.to change { hook.disabled_until }
expect(hook.backoff_count).to eq(described_class::MAX_FAILURES)
it 'does not increment the backoff count' do
expect { hook.backoff! }.not_to change(hook, :backoff_count)
end
end
end
include_examples 'is tolerant of invalid records' do
def run_expectation
expect { hook.backoff! }.to change(hook, :backoff_count).by(1)
context 'when we have backed off MAX_FAILURES times' do
before do
stub_const("#{described_class}::MAX_FAILURES", 5)
(described_class::FAILURE_THRESHOLD + 5).times { hook.backoff! }
end
it 'does not let the backoff count exceed the maximum failure count' do
expect { hook.backoff! }.not_to change(hook, :backoff_count)
end
it 'does not change disabled_until', :skip_freeze_time do
travel_to(hook.disabled_until - 1.minute) do
expect { hook.backoff! }.not_to change(hook, :disabled_until)
end
end
it 'changes disabled_until when it has elapsed', :skip_freeze_time do
travel_to(hook.disabled_until + 1.minute) do
expect { hook.backoff! }.to change { hook.disabled_until }
expect(hook.backoff_count).to eq(described_class::MAX_FAILURES)
end
end
end
include_examples 'is tolerant of invalid records' do
def run_expectation
expect { hook.backoff! }.to change(hook, :backoff_count).by(1)
end
end
end
end
@ -468,8 +486,19 @@ RSpec.describe WebHook do
expect(hook).not_to be_temporarily_disabled
end
it 'allows FAILURE_THRESHOLD initial failures before we back-off' do
described_class::FAILURE_THRESHOLD.times do
hook.backoff!
expect(hook).not_to be_temporarily_disabled
end
hook.backoff!
expect(hook).to be_temporarily_disabled
end
context 'when hook has been told to back off' do
before do
hook.update!(recent_failures: described_class::FAILURE_THRESHOLD)
hook.backoff!
end
@ -550,6 +579,7 @@ RSpec.describe WebHook do
context 'when hook has been backed off' do
before do
hook.update!(recent_failures: described_class::FAILURE_THRESHOLD + 1)
hook.disabled_until = 1.hour.from_now
end

View File

@ -26,13 +26,6 @@ RSpec.describe PoolRepository do
describe '#unlink_repository' do
let(:pool) { create(:pool_repository, :ready) }
let(:alternates_file) { File.join(repository_path, 'objects', 'info', 'alternates') }
let(:repository_path) do
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
File.join(TestEnv.repos_path, pool.source_project.repository.relative_path)
end
end
before do
pool.link_repository(pool.source_project.repository)
@ -41,19 +34,17 @@ RSpec.describe PoolRepository do
context 'when the last member leaves' do
it 'schedules pool removal' do
expect(::ObjectPool::DestroyWorker).to receive(:perform_async).with(pool.id).and_call_original
expect(pool.source_project.repository).to receive(:disconnect_alternates).and_call_original
pool.unlink_repository(pool.source_project.repository)
expect(File).not_to exist(alternates_file)
end
end
context 'when skipping disconnect' do
it 'does not change the alternates file' do
before = File.read(alternates_file)
pool.unlink_repository(pool.source_project.repository, disconnect: false)
expect(pool.source_project.repository).not_to receive(:disconnect_alternates)
expect(File.read(alternates_file)).to eq(before)
pool.unlink_repository(pool.source_project.repository, disconnect: false)
end
end
@ -63,10 +54,9 @@ RSpec.describe PoolRepository do
pool.link_repository(other_project.repository)
expect(::ObjectPool::DestroyWorker).not_to receive(:perform_async).with(pool.id)
expect(pool.source_project.repository).to receive(:disconnect_alternates).and_call_original
pool.unlink_repository(pool.source_project.repository)
expect(File).not_to exist(alternates_file)
end
end
end

View File

@ -89,4 +89,42 @@ RSpec.describe API::ImportGithub do
expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
describe "POST /import/github/cancel" do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :import_started, import_type: 'github', import_url: 'https://fake.url') }
context 'when project import was canceled' do
before do
allow(Import::Github::CancelProjectImportService)
.to receive(:new).with(project, user)
.and_return(double(execute: { status: :success, project: project }))
end
it 'returns success' do
post api("/import/github/cancel", user), params: {
project_id: project.id
}
expect(response).to have_gitlab_http_status(:ok)
end
end
context 'when project import was not canceled' do
before do
allow(Import::Github::CancelProjectImportService)
.to receive(:new).with(project, user)
.and_return(double(execute: { status: :error, message: 'The import cannot be canceled because it is finished', http_status: :bad_request }))
end
it 'returns error' do
post api("/import/github/cancel", user), params: {
project_id: project.id
}
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq('The import cannot be canceled because it is finished')
end
end
end
end

View File

@ -0,0 +1,56 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Import::Github::CancelProjectImportService do
subject(:import_cancel) { described_class.new(project, project.owner) }
let_it_be(:user) { create(:user) }
let_it_be_with_reload(:project) { create(:project, :import_started, import_type: 'github', import_url: 'https://fake.url') }
describe '.execute' do
context 'when user is an owner' do
context 'when import is in progress' do
it 'update import state to be canceled' do
expect(import_cancel.execute).to eq({ status: :success, project: project })
end
end
context 'when import is finished' do
let(:expected_result) do
{
status: :error,
http_status: :bad_request,
message: 'The import cannot be canceled because it is finished'
}
end
before do
project.import_state.finish!
end
it 'returns error' do
expect(import_cancel.execute).to eq(expected_result)
end
end
end
context 'when user is not allowed to read project' do
it 'returns 404' do
expect(described_class.new(project, user).execute)
.to eq({ status: :error, http_status: :not_found, message: 'Not Found' })
end
end
context 'when user is not allowed to cancel project' do
before do
project.add_developer(user)
end
it 'returns 403' do
expect(described_class.new(project, user).execute)
.to eq({ status: :error, http_status: :forbidden, message: 'Unauthorized access' })
end
end
end
end

View File

@ -46,7 +46,8 @@ RSpec.describe WebHooks::LogExecutionService do
it 'updates failure state using a lease that ensures fresh state is written' do
service = described_class.new(hook: project_hook, log_data: data, response_category: :error)
WebHook.find(project_hook.id).update!(backoff_count: 1)
# Write state somewhere else, so that the hook is out-of-date
WebHook.find(project_hook.id).update!(recent_failures: 5, disabled_until: 10.minutes.from_now, backoff_count: 1)
lease = stub_exclusive_lease(lease_key, timeout: described_class::LOCK_TTL)
@ -148,36 +149,10 @@ RSpec.describe WebHooks::LogExecutionService do
data[:response_status] = '500'
end
it 'does not increment the failure count' do
expect { service.execute }.not_to change(project_hook, :recent_failures)
end
it 'backs off' do
expect { service.execute }.to change(project_hook, :disabled_until)
end
expect(project_hook).to receive(:backoff!)
it 'increases the backoff count' do
expect { service.execute }.to change(project_hook, :backoff_count).by(1)
end
context 'when the previous cool-off was near the maximum' do
before do
project_hook.update!(disabled_until: 5.minutes.ago, backoff_count: 8)
end
it 'sets the disabled_until attribute' do
expect { service.execute }.to change(project_hook, :disabled_until).to(1.day.from_now)
end
end
context 'when we have backed-off many many times' do
before do
project_hook.update!(disabled_until: 5.minutes.ago, backoff_count: 365)
end
it 'sets the disabled_until attribute' do
expect { service.execute }.to change(project_hook, :disabled_until).to(1.day.from_now)
end
service.execute
end
end
end

View File

@ -134,6 +134,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix|
context 'the hook is backed-off' do
before do
WebHook::FAILURE_THRESHOLD.times { hook.backoff! }
hook.backoff!
end