Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-12-01 09:10:25 +00:00
parent 9b646e9297
commit 7bc1ee0bcb
52 changed files with 731 additions and 283 deletions

View File

@ -1,11 +1,14 @@
<script>
import { GlTooltipDirective as GlTooltip } from '@gitlab/ui';
import { isFunction } from 'lodash';
import { GlTooltipDirective, GlResizeObserverDirective } from '@gitlab/ui';
import { isFunction, debounce } from 'lodash';
import { hasHorizontalOverflow } from '~/lib/utils/dom_utils';
const UPDATE_TOOLTIP_DEBOUNCED_WAIT_MS = 300;
export default {
directives: {
GlTooltip,
GlTooltip: GlTooltipDirective,
GlResizeObserver: GlResizeObserverDirective,
},
props: {
title: {
@ -26,15 +29,33 @@ export default {
},
data() {
return {
showTooltip: false,
tooltipDisabled: true,
};
},
computed: {
classes() {
if (this.tooltipDisabled) {
return '';
}
return 'js-show-tooltip';
},
tooltip() {
return {
title: this.title,
placement: this.placement,
disabled: this.tooltipDisabled,
};
},
},
watch: {
title() {
// Wait on $nextTick in case of slot width changes
// Wait on $nextTick in case the slot width changes
this.$nextTick(this.updateTooltip);
},
},
created() {
this.updateTooltipDebounced = debounce(this.updateTooltip, UPDATE_TOOLTIP_DEBOUNCED_WAIT_MS);
},
mounted() {
this.updateTooltip();
},
@ -45,25 +66,20 @@ export default {
} else if (this.truncateTarget === 'child') {
return this.$el.childNodes[0];
}
return this.$el;
},
updateTooltip() {
const target = this.selectTarget();
this.showTooltip = hasHorizontalOverflow(target);
this.tooltipDisabled = !hasHorizontalOverflow(this.selectTarget());
},
onResize() {
this.updateTooltipDebounced();
},
},
};
</script>
<template>
<span
v-if="showTooltip"
v-gl-tooltip="{ placement }"
:title="title"
class="js-show-tooltip gl-min-w-0"
>
<span v-gl-tooltip="tooltip" v-gl-resize-observer="onResize" :class="classes" class="gl-min-w-0">
<slot></slot>
</span>
<span v-else class="gl-min-w-0"> <slot></slot> </span>
</template>

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
module Resolvers
module Clusters
class AgentActivityEventsResolver < BaseResolver
type Types::Clusters::AgentActivityEventType, null: true
alias_method :agent, :object
delegate :project, to: :agent
def resolve(**args)
return ::Clusters::Agents::ActivityEvent.none unless can_view_activity_events?
agent.activity_events
end
private
def can_view_activity_events?
current_user.can?(:admin_cluster, project)
end
end
end
end

View File

@ -28,7 +28,10 @@ module Resolvers
private
def preloads
{ tokens: :last_used_agent_tokens }
{
activity_events: { activity_events: [:user, agent_token: :agent] },
tokens: :last_used_agent_tokens
}
end
end
end

View File

@ -0,0 +1,38 @@
# frozen_string_literal: true
module Types
module Clusters
class AgentActivityEventType < BaseObject
graphql_name 'ClusterAgentActivityEvent'
authorize :admin_cluster
connection_type_class(Types::CountableConnectionType)
field :recorded_at,
Types::TimeType,
null: true,
description: 'Timestamp the event was recorded.'
field :kind,
GraphQL::Types::String,
null: true,
description: 'Type of event.'
field :level,
GraphQL::Types::String,
null: true,
description: 'Severity of the event.'
field :user,
Types::UserType,
null: true,
description: 'User associated with the event.'
field :agent_token,
Types::Clusters::AgentTokenType,
null: true,
description: 'Agent token associated with the event.'
end
end
end

View File

@ -55,6 +55,12 @@ module Types
complexity: 5,
resolver: ::Resolvers::Kas::AgentConnectionsResolver
field :activity_events,
Types::Clusters::AgentActivityEventType.connection_type,
null: true,
description: 'Recent activity for the cluster agent.',
resolver: Resolvers::Clusters::AgentActivityEventsResolver
def project
Gitlab::Graphql::Loaders::BatchModelLoader.new(Project, object.project_id).find
end

View File

@ -20,8 +20,6 @@
class BulkImports::Entity < ApplicationRecord
self.table_name = 'bulk_import_entities'
EXPORT_RELATIONS_URL = '/%{resource}/%{full_path}/export_relations'
belongs_to :bulk_import, optional: false
belongs_to :parent, class_name: 'BulkImports::Entity', optional: true
@ -112,14 +110,22 @@ class BulkImports::Entity < ApplicationRecord
entity_type.pluralize
end
def base_resource_url_path
"/#{pluralized_name}/#{encoded_source_full_path}"
end
def export_relations_url_path
@export_relations_url_path ||= EXPORT_RELATIONS_URL % { resource: pluralized_name, full_path: encoded_source_full_path }
"#{base_resource_url_path}/export_relations"
end
def relation_download_url_path(relation)
"#{export_relations_url_path}/download?relation=#{relation}"
end
def wikis_url_path
"#{base_resource_url_path}/wikis"
end
def project?
source_type == 'project_entity'
end

View File

@ -2168,12 +2168,7 @@ class User < ApplicationRecord
project_creation_levels << nil
end
if Feature.enabled?(:linear_user_groups_with_developer_maintainer_project_access, self, default_enabled: :yaml)
developer_groups.self_and_descendants.where(project_creation_level: project_creation_levels)
else
developer_groups_hierarchy = ::Gitlab::ObjectHierarchy.new(developer_groups).base_and_descendants
::Group.where(id: developer_groups_hierarchy.select(:id), project_creation_level: project_creation_levels)
end
developer_groups.self_and_descendants.where(project_creation_level: project_creation_levels)
end
def no_recent_activity?

View File

@ -0,0 +1,11 @@
# frozen_string_literal: true
module Clusters
module Agents
class ActivityEventPolicy < BasePolicy
alias_method :event, :subject
delegate { event.agent }
end
end
end

View File

@ -4,7 +4,7 @@ module MergeRequests
class RebaseService < MergeRequests::BaseService
REBASE_ERROR = 'Rebase failed. Please rebase locally'
attr_reader :merge_request
attr_reader :merge_request, :rebase_error
def execute(merge_request, skip_ci: false)
@merge_request = merge_request
@ -13,7 +13,7 @@ module MergeRequests
if rebase
success
else
error(REBASE_ERROR)
error(rebase_error)
end
end
@ -22,11 +22,23 @@ module MergeRequests
true
rescue StandardError => e
log_error(exception: e, message: REBASE_ERROR, save_message_on_model: true)
set_rebase_error(e)
log_error(exception: e, message: rebase_error, save_message_on_model: true)
false
ensure
merge_request.update_column(:rebase_jid, nil)
end
private
def set_rebase_error(exception)
@rebase_error =
if exception.is_a?(Gitlab::Git::PreReceiveError)
"Something went wrong during the rebase pre-receive hook: #{exception.message}."
else
REBASE_ERROR
end
end
end
end

View File

@ -1,8 +0,0 @@
---
name: linear_user_groups_with_developer_maintainer_project_access
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/68851
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/339436
milestone: '14.3'
type: development
group: group::access
default_enabled: false

View File

@ -690,7 +690,7 @@ Gitlab.ee do
Settings.cron_jobs['elastic_migration_worker']['cron'] ||= '*/30 * * * *'
Settings.cron_jobs['elastic_migration_worker']['job_class'] ||= 'Elastic::MigrationWorker'
Settings.cron_jobs['sync_seat_link_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['sync_seat_link_worker']['cron'] ||= "#{rand(60)} 3 * * * UTC"
Settings.cron_jobs['sync_seat_link_worker']['cron'] ||= "#{rand(60)} #{rand(3..4)} * * * UTC"
Settings.cron_jobs['sync_seat_link_worker']['job_class'] = 'SyncSeatLinkWorker'
Settings.cron_jobs['users_create_statistics_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['users_create_statistics_worker']['cron'] ||= '2 15 * * *'

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class RemoveNotesTrigramIndex < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
NOTES_TRIGRAM_INDEX_NAME = 'index_notes_on_note_trigram'
def up
remove_concurrent_index_by_name(:notes, NOTES_TRIGRAM_INDEX_NAME)
end
def down
add_concurrent_index :notes, :note, name: NOTES_TRIGRAM_INDEX_NAME, using: :gin, opclass: { content: :gin_trgm_ops }
end
end

View File

@ -0,0 +1 @@
166ae24ae4856488c81a71c650dca038c8cd7cb2221545e84431e118da097688

View File

@ -26577,8 +26577,6 @@ CREATE INDEX index_notes_on_discussion_id ON notes USING btree (discussion_id);
CREATE INDEX index_notes_on_line_code ON notes USING btree (line_code);
CREATE INDEX index_notes_on_note_trigram ON notes USING gin (note gin_trgm_ops);
CREATE INDEX index_notes_on_noteable_id_and_noteable_type_and_system ON notes USING btree (noteable_id, noteable_type, system);
CREATE INDEX index_notes_on_project_id_and_id_and_system_false ON notes USING btree (project_id, id) WHERE (NOT system);

View File

@ -58,9 +58,9 @@ For Omnibus GitLab installations:
sudo gitlab-ctl reconfigure
```
## Set the `needs:` job limit **(FREE SELF)**
## Set the `needs` job limit **(FREE SELF)**
The maximum number of jobs that can be defined in `needs:` defaults to 50.
The maximum number of jobs that can be defined in `needs` defaults to 50.
A GitLab administrator with [access to the GitLab Rails console](operations/rails_console.md#starting-a-rails-console-session)
can choose a custom limit. For example, to set the limit to `100`:

View File

@ -5457,6 +5457,30 @@ The edge type for [`CiStage`](#cistage).
| <a id="cistageedgecursor"></a>`cursor` | [`String!`](#string) | A cursor for use in pagination. |
| <a id="cistageedgenode"></a>`node` | [`CiStage`](#cistage) | The item at the end of the edge. |
#### `ClusterAgentActivityEventConnection`
The connection type for [`ClusterAgentActivityEvent`](#clusteragentactivityevent).
##### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="clusteragentactivityeventconnectioncount"></a>`count` | [`Int!`](#int) | Total count of collection. |
| <a id="clusteragentactivityeventconnectionedges"></a>`edges` | [`[ClusterAgentActivityEventEdge]`](#clusteragentactivityeventedge) | A list of edges. |
| <a id="clusteragentactivityeventconnectionnodes"></a>`nodes` | [`[ClusterAgentActivityEvent]`](#clusteragentactivityevent) | A list of nodes. |
| <a id="clusteragentactivityeventconnectionpageinfo"></a>`pageInfo` | [`PageInfo!`](#pageinfo) | Information to aid in pagination. |
#### `ClusterAgentActivityEventEdge`
The edge type for [`ClusterAgentActivityEvent`](#clusteragentactivityevent).
##### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="clusteragentactivityeventedgecursor"></a>`cursor` | [`String!`](#string) | A cursor for use in pagination. |
| <a id="clusteragentactivityeventedgenode"></a>`node` | [`ClusterAgentActivityEvent`](#clusteragentactivityevent) | The item at the end of the edge. |
#### `ClusterAgentConnection`
The connection type for [`ClusterAgent`](#clusteragent).
@ -8776,6 +8800,7 @@ GitLab CI/CD configuration template.
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="clusteragentactivityevents"></a>`activityEvents` | [`ClusterAgentActivityEventConnection`](#clusteragentactivityeventconnection) | Recent activity for the cluster agent. (see [Connections](#connections)) |
| <a id="clusteragentconnections"></a>`connections` | [`ConnectedAgentConnection`](#connectedagentconnection) | Active connections for the cluster agent. (see [Connections](#connections)) |
| <a id="clusteragentcreatedat"></a>`createdAt` | [`Time`](#time) | Timestamp the cluster agent was created. |
| <a id="clusteragentcreatedbyuser"></a>`createdByUser` | [`UserCore`](#usercore) | User object, containing information about the person who created the agent. |
@ -8786,6 +8811,18 @@ GitLab CI/CD configuration template.
| <a id="clusteragentupdatedat"></a>`updatedAt` | [`Time`](#time) | Timestamp the cluster agent was updated. |
| <a id="clusteragentwebpath"></a>`webPath` | [`String`](#string) | Web path of the cluster agent. |
### `ClusterAgentActivityEvent`
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="clusteragentactivityeventagenttoken"></a>`agentToken` | [`ClusterAgentToken`](#clusteragenttoken) | Agent token associated with the event. |
| <a id="clusteragentactivityeventkind"></a>`kind` | [`String`](#string) | Type of event. |
| <a id="clusteragentactivityeventlevel"></a>`level` | [`String`](#string) | Severity of the event. |
| <a id="clusteragentactivityeventrecordedat"></a>`recordedAt` | [`Time`](#time) | Timestamp the event was recorded. |
| <a id="clusteragentactivityeventuser"></a>`user` | [`UserCore`](#usercore) | User associated with the event. |
### `ClusterAgentToken`
#### Fields

View File

@ -27,7 +27,7 @@ can't link to files outside it.
### Cache
- Define cache per job by using the `cache:` keyword. Otherwise it is disabled.
- Define cache per job by using the `cache` keyword. Otherwise it is disabled.
- Subsequent pipelines can use the cache.
- Subsequent jobs in the same pipeline can use the cache, if the dependencies are identical.
- Different projects cannot share the cache.

View File

@ -66,9 +66,9 @@ as quickly as possible.
## Usage
Relationships are defined between jobs using the [`needs:` keyword](../yaml/index.md#needs).
Relationships are defined between jobs using the [`needs` keyword](../yaml/index.md#needs).
Note that `needs:` also works with the [parallel](../yaml/index.md#parallel) keyword,
Note that `needs` also works with the [parallel](../yaml/index.md#parallel) keyword,
giving you powerful options for parallelization within your pipeline.
## Limitations
@ -87,7 +87,7 @@ are certain use cases that you may need to work around. For more information, ch
The needs visualization makes it easier to visualize the relationships between dependent jobs in a DAG. This graph displays all the jobs in a pipeline that need or are needed by other jobs. Jobs with no relationships are not displayed in this view.
To see the needs visualization, click on the **Needs** tab when viewing a pipeline that uses the `needs:` keyword.
To see the needs visualization, click on the **Needs** tab when viewing a pipeline that uses the `needs` keyword.
![Needs visualization example](img/dag_graph_example_v13_1.png)

View File

@ -294,7 +294,7 @@ As soon as the `review` job finishes, GitLab updates the `review/your-branch-nam
environment's URL.
It parses the `deploy.env` report artifact, registers a list of variables as runtime-created,
uses it for expanding `environment:url: $DYNAMIC_ENVIRONMENT_URL` and sets it to the environment URL.
You can also specify a static part of the URL at `environment:url:`, such as
You can also specify a static part of the URL at `environment:url`, such as
`https://$DYNAMIC_ENVIRONMENT_URL`. If the value of `DYNAMIC_ENVIRONMENT_URL` is
`example.com`, the final result is `https://example.com`.
@ -303,7 +303,7 @@ The assigned URL for the `review/your-branch-name` environment is visible in the
Note the following:
- `stop_review` doesn't generate a dotenv report artifact, so it doesn't recognize the
`DYNAMIC_ENVIRONMENT_URL` environment variable. Therefore you shouldn't set `environment:url:` in the
`DYNAMIC_ENVIRONMENT_URL` environment variable. Therefore you shouldn't set `environment:url` in the
`stop_review` job.
- If the environment URL isn't valid (for example, the URL is malformed), the system doesn't update
the environment URL.
@ -451,7 +451,7 @@ Read more in the [`.gitlab-ci.yml` reference](../yaml/index.md#environmenton_sto
You can set an environment to stop when another job is finished.
In your `.gitlab-ci.yml` file, specify in the [`on_stop:`](../yaml/index.md#environmenton_stop)
In your `.gitlab-ci.yml` file, specify in the [`on_stop`](../yaml/index.md#environmenton_stop)
keyword the name of the job that stops the environment.
The following example shows a `review_app` job that calls a `stop_review_app` job after the first
@ -478,7 +478,7 @@ The `stop_review_app` job **must** have the following keywords defined:
- `when`, defined at either:
- [The job level](../yaml/index.md#when).
- [In a rules clause](../yaml/index.md#rules). If you use `rules:` and `when: manual`, you should
- [In a rules clause](../yaml/index.md#rules). If you use `rules` and `when: manual`, you should
also set [`allow_failure: true`](../yaml/index.md#allow_failure) so the pipeline can complete
even if the job doesn't run.
- `environment:name`

View File

@ -79,7 +79,7 @@ job:
- In **all other cases**, the job is added to the pipeline, with `when: on_success`.
WARNING:
If you use a `when:` clause as the final rule (not including `when: never`), two
If you use a `when` clause as the final rule (not including `when: never`), two
simultaneous pipelines may start. Both push pipelines and merge request pipelines can
be triggered by the same event (a push to the source branch for an open merge request).
See how to [prevent duplicate pipelines](#avoid-duplicate-pipelines)
@ -153,7 +153,7 @@ To avoid duplicate pipelines, you can:
- Use [`workflow`](../yaml/index.md#workflow) to specify which types of pipelines
can run.
- Rewrite the rules to run the job only in very specific cases,
and avoid a final `when:` rule:
and avoid a final `when` rule:
```yaml
job:
@ -480,8 +480,8 @@ All files are considered to have changed when a scheduled pipeline runs.
If you use multiple keywords with `only` or `except`, the keywords are evaluated
as a single conjoined expression. That is:
- `only:` includes the job if **all** of the keys have at least one condition that matches.
- `except:` excludes the job if **any** of the keys have at least one condition that matches.
- `only` includes the job if **all** of the keys have at least one condition that matches.
- `except` excludes the job if **any** of the keys have at least one condition that matches.
With `only`, individual keys are logically joined by an `AND`. A job is added to
the pipeline if the following is true:

View File

@ -146,15 +146,15 @@ as well.
Jenkins Pipelines are based on [Groovy](https://groovy-lang.org/), so the pipeline specification is written as code.
GitLab works a bit differently, we use the more highly structured [YAML](https://yaml.org/) format, which
places scripting elements inside of `script:` blocks separate from the pipeline specification itself.
places scripting elements inside of `script` blocks separate from the pipeline specification itself.
This is a strength of GitLab, in that it helps keep the learning curve much simpler to get up and running
and avoids some of the problem of unconstrained complexity which can make your Jenkinsfile hard to understand
and manage.
That said, we do of course still value DRY (don't repeat yourself) principles and want to ensure that
behaviors of your jobs can be codified once and applied as needed. You can use the `extends:` syntax to
[reuse configuration in your jobs](../yaml/index.md#extends), and `include:` can
behaviors of your jobs can be codified once and applied as needed. You can use the `extends` syntax to
[reuse configuration in your jobs](../yaml/index.md#extends), and `include` can
be used to [reuse pipeline configurations](../yaml/index.md#include) in pipelines
in different projects:
@ -174,7 +174,7 @@ rspec:
## Artifact publishing
Artifacts may work a bit differently than you've used them with Jenkins. In GitLab, any job can define
a set of artifacts to be saved by using the `artifacts:` keyword. This can be configured to point to a file
a set of artifacts to be saved by using the `artifacts` keyword. This can be configured to point to a file
or set of files that can then be persisted from job to job. Read more on our detailed
[artifacts documentation](../pipelines/job_artifacts.md):
@ -271,7 +271,7 @@ default:
GitLab CI/CD also lets you define stages, but is a little bit more free-form to configure. The GitLab [`stages` keyword](../yaml/index.md#stages)
is a top level setting that enumerates the list of stages, but you are not required to nest individual jobs underneath
the `stages` section. Any job defined in the `.gitlab-ci.yml` can be made a part of any stage through use of the
[`stage:` keyword](../yaml/index.md#stage).
[`stage` keyword](../yaml/index.md#stage).
Note that, unless otherwise specified, every pipeline is instantiated with a `build`, `test`, and `deploy` stage
which are run in that order. Jobs that have no `stage` defined are placed by default in the `test` stage.

View File

@ -112,11 +112,11 @@ C:
- merge_requests
```
- `A` and `B` always run, because they get the `only:` rule to execute in all cases.
- `A` and `B` always run, because they get the `only` rule to execute in all cases.
- `C` only runs for merge requests. It doesn't run for any pipeline
except a merge request pipeline.
In this example, you don't have to add the `only:` rule to all of your jobs to make
In this example, you don't have to add the `only` rule to all of your jobs to make
them always run. You can use this format to set up a Review App, which helps to
save resources.

View File

@ -213,7 +213,7 @@ In the upstream pipeline:
```
1. Set the `test` job in the downstream pipeline to inherit the variables from the `build_vars`
job in the upstream project with `needs:`. The `test` job inherits the variables in the
job in the upstream project with `needs`. The `test` job inherits the variables in the
`dotenv` report and it can access `BUILD_VERSION` in the script:
```yaml

View File

@ -42,7 +42,7 @@ Child pipelines work well with other GitLab CI/CD features:
- Since the parent pipeline in `.gitlab-ci.yml` and the child pipeline run as normal
pipelines, they can have their own behaviors and sequencing in relation to triggers.
See the [`trigger:`](../yaml/index.md#trigger) keyword documentation for full details on how to
See the [`trigger`](../yaml/index.md#trigger) keyword documentation for full details on how to
include the child pipeline configuration.
<i class="fa fa-youtube-play youtube" aria-hidden="true"></i>
@ -84,7 +84,7 @@ microservice_a:
file: '/path/to/child-pipeline.yml'
```
The maximum number of entries that are accepted for `trigger:include:` is three.
The maximum number of entries that are accepted for `trigger:include` is three.
Similar to [multi-project pipelines](multi_project_pipelines.md#mirror-status-of-a-triggered-pipeline-in-the-trigger-job),
we can set the parent pipeline to depend on the status of the child pipeline upon completion:

View File

@ -211,7 +211,7 @@ trigger_b:
```
Example child `a` pipeline configuration, located in `/a/.gitlab-ci.yml`, making
use of the DAG `needs:` keyword:
use of the DAG `needs` keyword:
```yaml
stages:
@ -240,7 +240,7 @@ deploy_a:
```
Example child `b` pipeline configuration, located in `/b/.gitlab-ci.yml`, making
use of the DAG `needs:` keyword:
use of the DAG `needs` keyword:
```yaml
stages:

View File

@ -267,7 +267,7 @@ when merging a merge request would cause the project's test coverage to decline.
Follow these steps to enable the `Coverage-Check` MR approval rule:
1. Set up a [`coverage:`](../yaml/index.md#coverage) regular expression for all jobs you want to include in the overall coverage value.
1. Set up a [`coverage`](../yaml/index.md#coverage) regular expression for all jobs you want to include in the overall coverage value.
1. Go to your project and select **Settings > General**.
1. Expand **Merge request approvals**.
1. Select **Enable** next to the `Coverage-Check` approval rule.

View File

@ -28,7 +28,7 @@ NOTE:
Variables set in the GitLab UI are not passed down to the service containers.
[Learn more](../variables/index.md#).
Then, commands in `script:` sections in your `.gitlab-ci.yml` file can access the API at `http://gitlab/api/v4`.
Then, commands in `script` sections in your `.gitlab-ci.yml` file can access the API at `http://gitlab/api/v4`.
For more information about why `gitlab` is used for the `Host`, see
[How services are linked to the job](../docker/using_docker_images.md#extended-docker-configuration-options).

View File

@ -23,7 +23,7 @@ depending on which trigger method is used.
| `$CI_PIPELINE_SOURCE` value | Trigger method |
|-----------------------------|----------------|
| `pipeline` | Using the `trigger:` keyword in the CI/CD configuration file, or using the trigger API with `$CI_JOB_TOKEN`. |
| `pipeline` | Using the `trigger` keyword in the CI/CD configuration file, or using the trigger API with `$CI_JOB_TOKEN`. |
| `trigger` | Using the trigger API using a generated trigger token |
This also applies when using the `pipelines` or `triggers` keywords with the legacy [`only/except` basic syntax](../yaml/index.md#only--except).

View File

@ -291,7 +291,7 @@ Pipeline configuration warnings are shown when you:
### "Job may allow multiple pipelines to run for a single action" warning
When you use [`rules`](yaml/index.md#rules) with a `when:` clause without an `if:`
When you use [`rules`](yaml/index.md#rules) with a `when` clause without an `if`
clause, multiple pipelines may run. Usually this occurs when you push a commit to
a branch that has an open merge request associated with it.

View File

@ -73,7 +73,7 @@ or import additional pipeline configuration.
### `default`
You can set global defaults for some keywords. Jobs that do not define one or more
of the listed keywords use the value defined in the `default:` section.
of the listed keywords use the value defined in the `default` section.
**Keyword type**: Global keyword.
@ -90,7 +90,7 @@ of the listed keywords use the value defined in the `default:` section.
- [`tags`](#tags)
- [`timeout`](#timeout)
**Example of `default`:**
**Example of `default`**:
```yaml
default:
@ -106,7 +106,7 @@ rspec 2.7:
In this example, `ruby:3.0` is the default `image` value for all jobs in the pipeline.
The `rspec 2.7` job does not use the default, because it overrides the default with
a job-specific `image:` section:
a job-specific `image` section:
**Additional details**:
@ -324,7 +324,7 @@ The order of the items in `stages` defines the execution order for jobs:
**Keyword type**: Global keyword.
**Example of `stages`:**
**Example of `stages`**:
```yaml
stages:
@ -368,7 +368,7 @@ Use [`workflow`](workflow.md) to control pipeline behavior.
#### `workflow:rules`
The `rules` keyword in `workflow` is similar to [`rules:` defined in jobs](#rules),
The `rules` keyword in `workflow` is similar to [`rules` defined in jobs](#rules),
but controls whether or not a whole pipeline is created.
When no rules evaluate to true, the pipeline does not run.
@ -381,7 +381,7 @@ When no rules evaluate to true, the pipeline does not run.
- [`when`](#when), can only be `always` or `never` when used with `workflow`.
- [`variables`](#workflowrulesvariables).
**Example of `workflow:rules`:**
**Example of `workflow:rules`**:
```yaml
workflow:
@ -414,7 +414,7 @@ and the pipeline is for either:
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/294232) in GitLab 13.11.
> - [Feature flag removed](https://gitlab.com/gitlab-org/gitlab/-/issues/300997) in GitLab 14.1.
You can use [`variables`](#variables) in `workflow:rules:` to define variables for
You can use [`variables`](#variables) in `workflow:rules` to define variables for
specific pipeline conditions.
When the condition matches, the variable is created and can be used by all jobs
@ -428,7 +428,7 @@ variable takes precedence and overrides the global variable.
- The name can use only numbers, letters, and underscores (`_`).
- The value must be a string.
**Example of `workflow:rules:variables`:**
**Example of `workflow:rules:variables`**:
```yaml
variables:
@ -486,7 +486,7 @@ The following topics explain how to use keywords to configure CI/CD pipelines.
Use `after_script` to define an array of commands that run after each job, including failed jobs.
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**: An array including:
@ -494,7 +494,7 @@ Use `after_script` to define an array of commands that run after each job, inclu
- Long commands [split over multiple lines](script.md#split-long-commands).
- [YAML anchors](yaml_optimization.md#yaml-anchors-for-scripts).
**Example of `after_script`:**
**Example of `after_script`**:
```yaml
job:
@ -585,7 +585,7 @@ In this example, `job1` and `job2` run in parallel:
**Additional details**:
- You can use `allow_failure` as a subkey of [`rules:`](#rulesallow_failure).
- You can use `allow_failure` as a subkey of [`rules`](#rulesallow_failure).
- You can use `allow_failure: false` with a manual job to create a [blocking manual job](../jobs/job_control.md#types-of-manual-jobs).
A blocked pipeline does not run any jobs in later stages until the manual job
is started and completes successfully.
@ -656,7 +656,7 @@ artifacts are restored after [caches](#cache).
Use `artifacts:exclude` to prevent files from being added to an artifacts archive.
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**:
@ -709,7 +709,7 @@ After their expiry, artifacts are deleted hourly by default (using a cron job),
accessible anymore.
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**: The expiry time. If no unit is provided, the time is in seconds.
Valid values include:
@ -749,7 +749,7 @@ Use the `artifacts:expose_as` keyword to
[expose job artifacts in the merge request UI](../pipelines/job_artifacts.md#expose-job-artifacts-in-the-merge-request-ui).
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**:
@ -793,7 +793,7 @@ archive. You can specify a unique name for every archive.
If not defined, the default name is `artifacts`, which becomes `artifacts.zip` when downloaded.
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**:
@ -822,7 +822,7 @@ Paths are relative to the project directory (`$CI_PROJECT_DIR`) and can't direct
link outside it.
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**:
@ -872,7 +872,7 @@ To deny read access for anonymous and guest users to artifacts in public
pipelines, set `artifacts:public` to `false`:
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**:
@ -892,7 +892,7 @@ Use [`artifacts:reports`](artifacts_reports.md) to collect artifacts generated b
included templates in jobs.
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**: See list of available [artifacts reports types](artifacts_reports.md).
@ -925,7 +925,7 @@ with the paths defined in `artifacts:paths`). `artifacts:untracked` ignores conf
in the repository's `.gitignore` file.
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**:
@ -951,7 +951,7 @@ Use `artifacts:when` to upload artifacts on job failure or despite the
failure.
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**:
@ -975,7 +975,7 @@ Use `before_script` to define an array of commands that should run before each j
`script` commands, but after [artifacts](#artifacts) are restored.
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**: An array including:
@ -983,7 +983,7 @@ Use `before_script` to define an array of commands that should run before each j
- Long commands [split over multiple lines](script.md#split-long-commands).
- [YAML anchors](yaml_optimization.md#yaml-anchors-for-scripts).
**Example of `before_script`:**
**Example of `before_script`**:
```yaml
job:
@ -1022,7 +1022,7 @@ Learn more about caches in [Caching in GitLab CI/CD](../caching/index.md).
Use the `cache:paths` keyword to choose which files or directories to cache.
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**: An array of paths relative to the project directory (`$CI_PROJECT_DIR`).
You can use wildcards that use [glob](https://en.wikipedia.org/wiki/Glob_(programming))
@ -1058,11 +1058,11 @@ rspec:
Use the `cache:key` keyword to give each cache a unique identifying key. All jobs
that use the same cache key use the same cache, including in different pipelines.
If not set, the default key is `default`. All jobs with the `cache:` keyword but
If not set, the default key is `default`. All jobs with the `cache` keyword but
no `cache:key` share the `default` cache.
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**:
@ -1112,7 +1112,7 @@ change. `cache:key:files` lets you reuse some caches, and rebuild them less ofte
which speeds up subsequent pipeline runs.
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**: An array of one or two file paths.
@ -1151,7 +1151,7 @@ that changed each listed file.
Use `cache:key:prefix` to combine a prefix with the SHA computed for [`cache:key:files`](#cachekeyfiles).
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**:
@ -1188,7 +1188,7 @@ is not found, the prefix is added to `default`, so the key in the example would
Use `untracked: true` to cache all files that are untracked in your Git repository:
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**: `true` or `false` (default).
@ -1222,7 +1222,7 @@ rspec:
Use `cache:when` to define when to save the cache, based on the status of the job.
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**:
@ -1260,7 +1260,7 @@ This policy speeds up job execution and reduces load on the cache server. You ca
use a job with the `push` policy to build the cache.
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**:
@ -1359,7 +1359,7 @@ dast:
scanner_profile: "Quick Passive Test"
```
In this example, the `dast` job extends the `dast` configuration added with the `include:` keyword
In this example, the `dast` job extends the `dast` configuration added with the `include` keyword
to select a specific site profile and scanner profile.
**Additional details**:
@ -1685,12 +1685,12 @@ and is a little more flexible and readable.
**Keyword type**: Job keyword. You can use it only as part of a job.
**Possible inputs:**
**Possible inputs**:
- The name of another job in the pipeline.
- A list (array) of names of other jobs in the pipeline.
**Example of `extends`:**
**Example of `extends`**:
```yaml
.tests:
@ -1728,7 +1728,7 @@ rspec:
- $RSPEC
```
**Additional details:**
**Additional details**:
- In GitLab 12.0 and later, you can use multiple parents for `extends`.
- The `extends` keyword supports up to eleven levels of inheritance, but you should
@ -1736,7 +1736,7 @@ rspec:
- In the example above, `.tests` is a [hidden job](../jobs/index.md#hide-jobs),
but you can extend configuration from regular jobs as well.
**Related topics:**
**Related topics**:
- [Reuse configuration sections by using `extends`](yaml_optimization.md#use-extends-to-reuse-configuration-sections).
- Use `extends` to reuse configuration from [included configuration files](yaml_optimization.md#use-extends-and-include-together).
@ -1746,7 +1746,7 @@ rspec:
Use `image` to specify a Docker image that the job runs in.
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**: The name of the image, including the registry path if needed, in one of these formats:
@ -1770,7 +1770,7 @@ rspec 2.7:
In this example, the `ruby:3.0` image is the default for all jobs in the pipeline.
The `rspec 2.7` job does not use the default, because it overrides the default with
a job-specific `image:` section.
a job-specific `image` section.
**Related topics**:
@ -1778,10 +1778,10 @@ a job-specific `image:` section.
#### `image:name`
The name of the Docker image that the job runs in. Similar to [`image:`](#image) used by itself.
The name of the Docker image that the job runs in. Similar to [`image`](#image) used by itself.
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**: The name of the image, including the registry path if needed, in one of these formats:
@ -1809,7 +1809,7 @@ The syntax is similar to the [Dockerfile `ENTRYPOINT` directive](https://docs.do
where each shell token is a separate string in the array.
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**: A string.
@ -1829,7 +1829,7 @@ image:
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/207484) in GitLab 12.9.
Use `inherit:` to [control inheritance of globally-defined defaults and variables](../jobs/index.md#control-the-inheritance-of-default-keywords-and-global-variables).
Use `inherit` to [control inheritance of globally-defined defaults and variables](../jobs/index.md#control-the-inheritance-of-default-keywords-and-global-variables).
#### `inherit:default`
@ -1842,7 +1842,7 @@ Use `inherit:default` to control the inheritance of [default keywords](#default)
- `true` (default) or `false` to enable or disable the inheritance of all default keywords.
- A list of specific default keywords to inherit.
**Example of `inherit:default`:**
**Example of `inherit:default`**:
```yaml
default:
@ -1863,7 +1863,7 @@ job2:
- image
```
**Additional details:**
**Additional details**:
- You can also list default keywords to inherit on one line: `default: [keyword1, keyword2]`
@ -1878,7 +1878,7 @@ Use `inherit:variables` to control the inheritance of [global variables](#variab
- `true` (default) or `false` to enable or disable the inheritance of all global variables.
- A list of specific variables to inherit.
**Example of `inherit:variables`:**
**Example of `inherit:variables`**:
```yaml
variables:
@ -1899,7 +1899,7 @@ job2:
- VARIABLE2
```
**Additional details:**
**Additional details**:
- You can also list global variables to inherit on one line: `variables: [VARIABLE1, VARIABLE2]`
@ -1916,7 +1916,7 @@ a new pipeline starts on the same branch.
You can't cancel subsequent jobs after a job with `interruptible: false` starts.
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**: `true` or `false` (default).
@ -1965,7 +1965,7 @@ In this example, a new pipeline causes a running pipeline to be:
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/30631) in GitLab 12.8, `needs: []` lets jobs start immediately.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/30632) in GitLab 14.2, you can refer to jobs in the same stage as the job you are configuring.
Use `needs:` to execute jobs out-of-order. Relationships between jobs
Use `needs` to execute jobs out-of-order. Relationships between jobs
that use `needs` can be visualized as a [directed acyclic graph](../directed_acyclic_graph/index.md).
You can ignore stage ordering and run some jobs without waiting for others to complete.
@ -2022,11 +2022,11 @@ This example creates four paths of execution:
**Additional details**:
- The maximum number of jobs that a single job can have in the `needs:` array is limited:
- The maximum number of jobs that a single job can have in the `needs` array is limited:
- For GitLab.com, the limit is 50. For more information, see our
[infrastructure issue](https://gitlab.com/gitlab-com/gl-infra/infrastructure/-/issues/7541).
- For self-managed instances, the default limit is 50. This limit [can be changed](../../administration/cicd.md#set-the-needs-job-limit).
- If `needs:` refers to a job that uses the [`parallel`](#parallel) keyword,
- If `needs` refers to a job that uses the [`parallel`](#parallel) keyword,
it depends on all jobs created in parallel, not just one job. It also downloads
artifacts from all the parallel jobs by default. If the artifacts have the same
name, they overwrite each other and only the last one downloaded is saved.
@ -2035,9 +2035,9 @@ This example creates four paths of execution:
enabled on GitLab.com and ready for production use. On self-managed [GitLab 14.2 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/30632)
this feature is available by default.
- In GitLab 14.0 and older, you can only refer to jobs in earlier stages. Stages must be
explicitly defined for all jobs that use the `needs:` keyword, or are referenced
in a job's `needs:` section.
- In GitLab 13.9 and older, if `needs:` refers to a job that might not be added to
explicitly defined for all jobs that use the `needs` keyword, or are referenced
in a job's `needs` section.
- In GitLab 13.9 and older, if `needs` refers to a job that might not be added to
a pipeline because of `only`, `except`, or `rules`, the pipeline might fail to create.
#### `needs:artifacts`
@ -2046,7 +2046,7 @@ This example creates four paths of execution:
When a job uses `needs`, it no longer downloads all artifacts from previous stages
by default, because jobs with `needs` can start before earlier stages complete. With
`needs` you can only download artifacts from the jobs listed in the `needs:` configuration.
`needs` you can only download artifacts from the jobs listed in the `needs` configuration.
Use `artifacts: true` (default) or `artifacts: false` to control when artifacts are
downloaded in jobs that use `needs`.
@ -2085,7 +2085,7 @@ In this example:
- The `test-job1` job downloads the `build_job1` artifacts
- The `test-job2` job does not download the `build_job2` artifacts.
- The `test-job3` job downloads the artifacts from all three `build_jobs`, because
`artifacts:` is `true`, or defaults to `true`, for all three needed jobs.
`artifacts` is `true`, or defaults to `true`, for all three needed jobs.
**Additional details**:
@ -2103,14 +2103,14 @@ If there is a pipeline running for the specified ref, a job with `needs:project`
does not wait for the pipeline to complete. Instead, the job downloads the artifact
from the latest pipeline that completed successfully.
`needs:project` must be used with `job:`, `ref:`, and `artifacts:`.
`needs:project` must be used with `job`, `ref`, and `artifacts`.
**Keyword type**: Job keyword. You can use it only as part of a job.
**Possible inputs**:
- `needs:project`: A full project path, including namespace and group. If the
project is in the same group or namespace, you can omit them from the `project:`
project is in the same group or namespace, you can omit them from the `project`
keyword. For example: `project: group/project-name` or `project: project-name`.
- `job`: The job to download artifacts from.
- `ref`: The ref to download artifacts from.
@ -2150,7 +2150,7 @@ build_job:
**Additional details**:
- To download artifacts from a different pipeline in the current project, set `project:`
- To download artifacts from a different pipeline in the current project, set `project`
to be the same as the current project, but use a different ref than the current pipeline.
Concurrent pipelines running on the same ref could override the artifacts.
- The user running the pipeline must have at least the Reporter role for the group or project,
@ -2160,7 +2160,7 @@ build_job:
the needed job to complete. [Directed acyclic graph](../directed_acyclic_graph/index.md)
behavior is limited to jobs in the same pipeline. Make sure that the needed job in the other
pipeline completes before the job that needs it tries to download the artifacts.
- You can't download artifacts from jobs that run in [`parallel:`](#parallel).
- You can't download artifacts from jobs that run in [`parallel`](#parallel).
- Support for [CI/CD variables](../variables/index.md) in `project`, `job`, and `ref` was
[introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/202093) in GitLab 13.3.
[Feature flag removed](https://gitlab.com/gitlab-org/gitlab/-/issues/235761) in GitLab 13.4.
@ -2182,7 +2182,7 @@ its parent pipeline or another child pipeline in the same parent-child pipeline
**Possible inputs**:
- `needs:pipeline`: A pipeline ID. Must be a pipeline present in the same parent-child pipeline hierarchy.
- `job:`: The job to download artifacts from.
- `job`: The job to download artifacts from.
**Example of `needs:pipeline:job`**:
@ -2244,7 +2244,7 @@ error similar to:
**Possible inputs**:
- `job:`: The job to make optional.
- `job`: The job to make optional.
- `true` or `false` (default).
**Example of `needs:optional`**:
@ -2281,7 +2281,7 @@ replicated to the bridge job.
**Possible inputs**:
- A full project path, including namespace and group. If the
project is in the same group or namespace, you can omit them from the `project:`
project is in the same group or namespace, you can omit them from the `project`
keyword. For example: `project: group/project-name` or `project: project-name`.
**Example of `needs:pipeline`**:
@ -2366,7 +2366,7 @@ job2:
- schedules
```
**Additional details:**
**Additional details**:
- Scheduled pipelines run on specific branches, so jobs configured with `only: branches`
run on scheduled pipelines too. Add `except: schedules` to prevent jobs with `only: branches`
@ -2636,7 +2636,7 @@ you can use this image from the GitLab Container Registry: `registry.gitlab.com/
**Keyword type**: Job keyword. You can use it only as part of a job.
**Possible inputs**: The `release:` subkeys:
**Possible inputs**: The `release` subkeys:
- [`tag_name`](#releasetag_name)
- [`name`](#releasename) (optional)
@ -2873,7 +2873,7 @@ By default, all failure types cause the job to be retried. Use [`retry:when`](#r
to select which failures to retry on.
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**: `0` (default), `1`, or `2`.
@ -2892,7 +2892,7 @@ Use `retry:when` with `retry:max` to retry jobs for only specific failure cases.
`0`, `1`, or `2`.
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**: A single failure type, or an array of one or more failure types:
@ -2996,7 +2996,7 @@ Use `rules:if` clauses to specify when to add a job to a pipeline:
- If an `if` statement is true, but it's combined with `when: never`, do not add the job to the pipeline.
- If no `if` statements are true, do not add the job to the pipeline.
`if:` clauses are evaluated based on the values of [predefined CI/CD variables](../variables/predefined_variables.md)
`if` clauses are evaluated based on the values of [predefined CI/CD variables](../variables/predefined_variables.md)
or [custom CI/CD variables](../variables/index.md#custom-cicd-variables).
**Keyword type**: Job-specific and pipeline-specific. You can use it as part of a job
@ -3043,7 +3043,7 @@ You should use `rules: changes` only with **branch pipelines** or **merge reques
You can use `rules: changes` with other pipeline types, but `rules: changes` always
evaluates to true when there is no Git `push` event. Tag pipelines, scheduled pipelines,
and so on do **not** have a Git `push` event associated with them. A `rules: changes` job
is **always** added to those pipelines if there is no `if:` that limits the job to
is **always** added to those pipelines if there is no `if` that limits the job to
branch or merge request pipelines.
**Keyword type**: Job keyword. You can use it only as part of a job.
@ -3112,7 +3112,7 @@ job:
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/30235) in GitLab 12.8.
Use [`allow_failure: true`](#allow_failure) in `rules:` to allow a job to fail
Use [`allow_failure: true`](#allow_failure) in `rules` to allow a job to fail
without stopping the pipeline.
You can also use `allow_failure: true` with a manual job. The pipeline continues
@ -3147,7 +3147,7 @@ If the rule matches, then the job is a manual job with `allow_failure: true`.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/209864) in GitLab 13.7.
> - [Feature flag removed](https://gitlab.com/gitlab-org/gitlab/-/issues/289803) in GitLab 13.10.
Use [`variables`](#variables) in `rules:` to define variables for specific conditions.
Use [`variables`](#variables) in `rules` to define variables for specific conditions.
**Keyword type**: Job-specific. You can use it only as part of a job.
@ -3185,7 +3185,7 @@ All jobs except [trigger jobs](#trigger) require a `script` keyword.
- Long commands [split over multiple lines](script.md#split-long-commands).
- [YAML anchors](yaml_optimization.md#yaml-anchors-for-scripts).
**Example of `script`:**
**Example of `script`**:
```yaml
job1:
@ -3309,7 +3309,7 @@ Use `services` to specify an additional Docker image to run scripts in. The [`se
to the image specified in the [`image`](#image) keyword.
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**: The name of the services image, including the registry path if needed, in one of these formats:
@ -3482,7 +3482,7 @@ example `ruby`, `postgres`, or `development`. To pick up and run a job, a runner
be assigned every tag listed in the job.
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**:
@ -3520,7 +3520,7 @@ The job-level timeout can be longer than the [project-level timeout](../pipeline
but can't be longer than the [runner's timeout](../runners/configure_runners.md#set-maximum-job-timeout-for-a-runner).
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default:` section](#default).
[`default` section](#default).
**Possible inputs**: A period of time written in natural language. For example, these are all equivalent:
@ -3639,7 +3639,7 @@ variable defined, the [job-level variable takes precedence](../variables/index.m
the first character must be a letter.
- The value must be a string.
**Examples of `variables`:**
**Examples of `variables`**:
```yaml
variables:
@ -3793,7 +3793,7 @@ Defining `image`, `services`, `cache`, `before_script`, and
`after_script` globally is deprecated. Support could be removed
from a future release.
Use [`default:`](#default) instead. For example:
Use [`default`](#default) instead. For example:
```yaml
default:

View File

@ -13,7 +13,7 @@ You can use special syntax in [`script`](index.md#script) sections to:
- [Create custom collapsible sections](../jobs/index.md#custom-collapsible-sections)
to simplify job log output.
## Use special characters with `script:`
## Use special characters with `script`
Sometimes, `script` commands must be wrapped in single or double quotes.
For example, commands that contain a colon (`:`) must be wrapped in single quotes (`'`).
@ -101,7 +101,7 @@ WARNING:
If multiple commands are combined into one command string, only the last command's
failure or success is reported.
[Failures from earlier commands are ignored due to a bug](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/25394).
To work around this, run each command as a separate `script:` item, or add an `exit 1`
To work around this, run each command as a separate `script` item, or add an `exit 1`
command to each command string.
You can use the `|` (literal) YAML multiline block scalar indicator to write

View File

@ -102,6 +102,20 @@ The **Time** metrics near the top of the page are measured as follows:
- **Lead time**: Median time from issue created to issue closed.
- **Cycle time**: Median time from first commit to issue closed. (You can associate a commit with an issue by [crosslinking in the commit message](../project/issues/crosslinking_issues.md#from-commit-messages).)
- **Lead Time for Changes**: median duration between merge request merge and deployment to a production environment for all MRs deployed in the given time period. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/340150) in GitLab 14.5 (Ultimate only).
## Deployment metrics (**PREMIUM**)
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/337256) in GitLab 11.3.
Value Stream Analytics exposes two deployment related metrics near the top of the page:
- **Deploys:** The number of successful deployments in the date range.
- **Deployment Frequency:** The average number of successful deployments.
The deployment metrics calculation uses the same method as the
[group-level Value Stream Analytics](../group/value_stream_analytics/index.md#how-metrics-are-measured).
Both of them are based on the [DORA API](../../api/dora/metrics.md#devops-research-and-assessment-dora-key-metrics-api).
## How the stages are measured

View File

@ -25,7 +25,8 @@ and steps below.
(`*.gitlab.io`, for GitLab.com).
- A custom domain name `example.com` or subdomain `subdomain.example.com`.
- Access to your domain's server control panel to set up DNS records:
- A DNS A or CNAME record pointing your domain to GitLab Pages server.
- A DNS record (`A`, `ALIAS`, or `CNAME`) pointing your domain to the GitLab Pages server. If
there are multiple DNS records on that name, you must use an `ALIAS` record.
- A DNS `TXT` record to verify your domain's ownership.
- Set either `external_http` or `external_https` in `/etc/gitlab/gitlab.rb` to the IP and port of
your [Pages Daemon](../../../../administration/pages/index.md#overview).
@ -109,15 +110,15 @@ as it most likely doesn't work if you set an [`MX` record](dns_concepts.md#mx-re
Subdomains (`subdomain.example.com`) require:
- A DNS [CNAME record](dns_concepts.md#cname-record) pointing your subdomain to the Pages server.
- A DNS [`ALIAS` or `CNAME` record](dns_concepts.md#cname-record) pointing your subdomain to the Pages server.
- A DNS [TXT record](dns_concepts.md#txt-record) to verify your domain's ownership.
| From | DNS Record | To |
| ------------------------------------------------------- | ---------- | --------------------- |
| `subdomain.example.com` | CNAME | `namespace.gitlab.io` |
| `_gitlab-pages-verification-code.subdomain.example.com` | `TXT` | `gitlab-pages-verification-code=00112233445566778899aabbccddeeff` |
| From | DNS Record | To |
|:--------------------------------------------------------|:----------------|:----------------------|
| `subdomain.example.com` | `ALIAS`/`CNAME` | `namespace.gitlab.io` |
| `_gitlab-pages-verification-code.subdomain.example.com` | `TXT` | `gitlab-pages-verification-code=00112233445566778899aabbccddeeff` |
Note that, whether it's a user or a project website, the `CNAME`
Note that, whether it's a user or a project website, the DNS record
should point to your Pages domain (`namespace.gitlab.io`),
without any `/project-name`.
@ -131,7 +132,7 @@ domain to the same website, for instance, `example.com` and `www.example.com`.
They require:
- A DNS A record for the domain.
- A DNS CNAME record for the subdomain.
- A DNS `ALIAS`/`CNAME` record for the subdomain.
- A DNS `TXT` record for each.
| From | DNS Record | To |

View File

@ -123,7 +123,7 @@ module BulkImports
def with_error_handling
response = yield
raise ::BulkImports::NetworkError.new(response: response) unless response.success?
raise ::BulkImports::NetworkError.new("Unsuccessful response #{response.code} from #{response.request.path.path}", response: response) unless response.success?
response
rescue *Gitlab::HTTP::HTTP_ERRORS => e

View File

@ -7,7 +7,9 @@ module BulkImports
include Pipeline
def extract(*)
BulkImports::Pipeline::ExtractedData.new(data: { url: url_from_parent_path(context.entity.source_full_path) })
url = url_from_parent_path(context.entity.source_full_path) if source_wiki_exists?
BulkImports::Pipeline::ExtractedData.new(data: { url: url })
end
def transform(_, data)
@ -15,14 +17,15 @@ module BulkImports
end
def load(context, data)
return unless context.portable.wiki
return unless data&.dig(:url)
wiki = context.portable.wiki
url = data[:url].sub("://", "://oauth2:#{context.configuration.access_token}@")
Gitlab::UrlBlocker.validate!(url, allow_local_network: allow_local_requests?, allow_localhost: allow_local_requests?)
context.portable.wiki.ensure_repository
context.portable.wiki.repository.fetch_as_mirror(url)
wiki.ensure_repository
wiki.repository.fetch_as_mirror(url)
end
private
@ -36,6 +39,16 @@ module BulkImports
def allow_local_requests?
Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services?
end
def source_wiki_exists?
wikis = client.get(context.entity.wikis_url_path).parsed_response
wikis.any?
end
def client
BulkImports::Clients::HTTP.new(url: context.configuration.url, token: context.configuration.access_token)
end
end
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
module BulkImports
module Projects
module Pipelines
class AutoDevopsPipeline
include NdjsonPipeline
relation_name 'auto_devops'
extractor ::BulkImports::Common::Extractors::NdjsonExtractor, relation: relation
end
end
end
end

View File

@ -67,6 +67,10 @@ module BulkImports
pipeline: BulkImports::Common::Pipelines::UploadsPipeline,
stage: 5
},
auto_devops: {
pipeline: BulkImports::Projects::Pipelines::AutoDevopsPipeline,
stage: 5
},
finisher: {
pipeline: BulkImports::Common::Pipelines::EntityFinisher,
stage: 6

View File

@ -1,25 +1,20 @@
import { mount, shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { hasHorizontalOverflow } from '~/lib/utils/dom_utils';
import TooltipOnTruncate from '~/vue_shared/components/tooltip_on_truncate.vue';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
const DUMMY_TEXT = 'lorem-ipsum-dolar-sit-amit-consectur-adipiscing-elit-sed-do';
const MOCK_TITLE = 'lorem-ipsum-dolar-sit-amit-consectur-adipiscing-elit-sed-do';
const SHORT_TITLE = 'my-text';
const createChildElement = () => `<a href="#">${DUMMY_TEXT}</a>`;
const createChildElement = () => `<a href="#">${MOCK_TITLE}</a>`;
jest.mock('~/lib/utils/dom_utils', () => ({
hasHorizontalOverflow: jest.fn(() => {
...jest.requireActual('~/lib/utils/dom_utils'),
hasHorizontalOverflow: jest.fn().mockImplementation(() => {
throw new Error('this needs to be mocked');
}),
}));
jest.mock('@gitlab/ui', () => ({
GlTooltipDirective: {
bind(el, binding) {
el.classList.add('gl-tooltip');
el.setAttribute('data-original-title', el.title);
el.dataset.placement = binding.value.placement;
},
},
}));
describe('TooltipOnTruncate component', () => {
let wrapper;
@ -27,15 +22,31 @@ describe('TooltipOnTruncate component', () => {
const createComponent = ({ propsData, ...options } = {}) => {
wrapper = shallowMount(TooltipOnTruncate, {
attachTo: document.body,
propsData: {
title: MOCK_TITLE,
...propsData,
},
slots: {
default: [MOCK_TITLE],
},
directives: {
GlTooltip: createMockDirective(),
GlResizeObserver: createMockDirective(),
},
...options,
});
};
const createWrappedComponent = ({ propsData, ...options }) => {
const WrappedTooltipOnTruncate = {
...TooltipOnTruncate,
directives: {
...TooltipOnTruncate.directives,
GlTooltip: createMockDirective(),
GlResizeObserver: createMockDirective(),
},
};
// set a parent around the tested component
parent = mount(
{
@ -43,74 +54,85 @@ describe('TooltipOnTruncate component', () => {
title: { default: '' },
},
template: `
<TooltipOnTruncate :title="title" truncate-target="child">
<div>{{title}}</div>
</TooltipOnTruncate>
<TooltipOnTruncate :title="title" truncate-target="child">
<div>{{title}}</div>
</TooltipOnTruncate>
`,
components: {
TooltipOnTruncate,
TooltipOnTruncate: WrappedTooltipOnTruncate,
},
},
{
propsData: { ...propsData },
attachTo: document.body,
...options,
},
);
wrapper = parent.find(TooltipOnTruncate);
wrapper = parent.find(WrappedTooltipOnTruncate);
};
const hasTooltip = () => wrapper.classes('gl-tooltip');
const getTooltipValue = () => getBinding(wrapper.element, 'gl-tooltip')?.value;
const resize = async ({ truncate }) => {
hasHorizontalOverflow.mockReturnValueOnce(truncate);
getBinding(wrapper.element, 'gl-resize-observer').value();
await nextTick();
};
afterEach(() => {
wrapper.destroy();
});
describe('with default target', () => {
it('renders tooltip if truncated', () => {
describe('when truncated', () => {
beforeEach(async () => {
hasHorizontalOverflow.mockReturnValueOnce(true);
createComponent({
propsData: {
title: DUMMY_TEXT,
},
slots: {
default: [DUMMY_TEXT],
},
});
return wrapper.vm.$nextTick().then(() => {
expect(hasHorizontalOverflow).toHaveBeenCalledWith(wrapper.element);
expect(hasTooltip()).toBe(true);
expect(wrapper.attributes('data-original-title')).toEqual(DUMMY_TEXT);
expect(wrapper.attributes('data-placement')).toEqual('top');
});
createComponent();
});
it('does not render tooltip if normal', () => {
it('renders tooltip', async () => {
expect(hasHorizontalOverflow).toHaveBeenLastCalledWith(wrapper.element);
expect(getTooltipValue()).toMatchObject({
title: MOCK_TITLE,
placement: 'top',
disabled: false,
});
expect(wrapper.classes('js-show-tooltip')).toBe(true);
});
});
describe('with default target', () => {
beforeEach(async () => {
hasHorizontalOverflow.mockReturnValueOnce(false);
createComponent({
propsData: {
title: DUMMY_TEXT,
},
slots: {
default: [DUMMY_TEXT],
},
createComponent();
});
it('does not render tooltip if not truncated', () => {
expect(hasHorizontalOverflow).toHaveBeenLastCalledWith(wrapper.element);
expect(getTooltipValue()).toMatchObject({
disabled: true,
});
expect(wrapper.classes('js-show-tooltip')).toBe(false);
});
it('renders tooltip on resize', async () => {
await resize({ truncate: true });
expect(getTooltipValue()).toMatchObject({
disabled: false,
});
return wrapper.vm.$nextTick().then(() => {
expect(hasHorizontalOverflow).toHaveBeenCalledWith(wrapper.element);
expect(hasTooltip()).toBe(false);
await resize({ truncate: false });
expect(getTooltipValue()).toMatchObject({
disabled: true,
});
});
});
describe('with child target', () => {
it('renders tooltip if truncated', () => {
it('renders tooltip if truncated', async () => {
hasHorizontalOverflow.mockReturnValueOnce(true);
createComponent({
propsData: {
title: DUMMY_TEXT,
truncateTarget: 'child',
},
slots: {
@ -118,13 +140,18 @@ describe('TooltipOnTruncate component', () => {
},
});
return wrapper.vm.$nextTick().then(() => {
expect(hasHorizontalOverflow).toHaveBeenCalledWith(wrapper.element.childNodes[0]);
expect(hasTooltip()).toBe(true);
expect(hasHorizontalOverflow).toHaveBeenLastCalledWith(wrapper.element.childNodes[0]);
await nextTick();
expect(getTooltipValue()).toMatchObject({
title: MOCK_TITLE,
placement: 'top',
disabled: false,
});
});
it('does not render tooltip if normal', () => {
it('does not render tooltip if normal', async () => {
hasHorizontalOverflow.mockReturnValueOnce(false);
createComponent({
propsData: {
@ -135,19 +162,21 @@ describe('TooltipOnTruncate component', () => {
},
});
return wrapper.vm.$nextTick().then(() => {
expect(hasHorizontalOverflow).toHaveBeenCalledWith(wrapper.element.childNodes[0]);
expect(hasTooltip()).toBe(false);
expect(hasHorizontalOverflow).toHaveBeenLastCalledWith(wrapper.element.childNodes[0]);
await nextTick();
expect(getTooltipValue()).toMatchObject({
disabled: true,
});
});
});
describe('with fn target', () => {
it('renders tooltip if truncated', () => {
it('renders tooltip if truncated', async () => {
hasHorizontalOverflow.mockReturnValueOnce(true);
createComponent({
propsData: {
title: DUMMY_TEXT,
truncateTarget: (el) => el.childNodes[1],
},
slots: {
@ -155,93 +184,97 @@ describe('TooltipOnTruncate component', () => {
},
});
return wrapper.vm.$nextTick().then(() => {
expect(hasHorizontalOverflow).toHaveBeenCalledWith(wrapper.element.childNodes[1]);
expect(hasTooltip()).toBe(true);
expect(hasHorizontalOverflow).toHaveBeenLastCalledWith(wrapper.element.childNodes[1]);
await nextTick();
expect(getTooltipValue()).toMatchObject({
disabled: false,
});
});
});
describe('placement', () => {
it('sets data-placement when tooltip is rendered', () => {
const placement = 'bottom';
it('sets placement when tooltip is rendered', () => {
const mockPlacement = 'bottom';
hasHorizontalOverflow.mockReturnValueOnce(true);
createComponent({
propsData: {
placement,
},
slots: {
default: DUMMY_TEXT,
placement: mockPlacement,
},
});
return wrapper.vm.$nextTick().then(() => {
expect(hasTooltip()).toBe(true);
expect(wrapper.attributes('data-placement')).toEqual(placement);
expect(hasHorizontalOverflow).toHaveBeenLastCalledWith(wrapper.element);
expect(getTooltipValue()).toMatchObject({
placement: mockPlacement,
});
});
});
describe('updates when title and slot content changes', () => {
describe('is initialized with a long text', () => {
beforeEach(() => {
beforeEach(async () => {
hasHorizontalOverflow.mockReturnValueOnce(true);
createWrappedComponent({
propsData: { title: DUMMY_TEXT },
propsData: { title: MOCK_TITLE },
});
return parent.vm.$nextTick();
await nextTick();
});
it('renders tooltip', () => {
expect(hasTooltip()).toBe(true);
expect(wrapper.attributes('data-original-title')).toEqual(DUMMY_TEXT);
expect(wrapper.attributes('data-placement')).toEqual('top');
expect(getTooltipValue()).toMatchObject({
title: MOCK_TITLE,
placement: 'top',
disabled: false,
});
});
it('does not render tooltip after updated to a short text', () => {
it('does not render tooltip after updated to a short text', async () => {
hasHorizontalOverflow.mockReturnValueOnce(false);
parent.setProps({
title: 'new-text',
title: SHORT_TITLE,
});
return wrapper.vm
.$nextTick()
.then(() => wrapper.vm.$nextTick()) // wait 2 times to get an updated slot
.then(() => {
expect(hasTooltip()).toBe(false);
});
await nextTick();
await nextTick(); // wait 2 times to get an updated slot
expect(getTooltipValue()).toMatchObject({
title: SHORT_TITLE,
disabled: true,
});
});
});
describe('is initialized with a short text', () => {
beforeEach(() => {
describe('is initialized with a short text that does not overflow', () => {
beforeEach(async () => {
hasHorizontalOverflow.mockReturnValueOnce(false);
createWrappedComponent({
propsData: { title: DUMMY_TEXT },
propsData: { title: MOCK_TITLE },
});
return wrapper.vm.$nextTick();
await nextTick();
});
it('does not render tooltip', () => {
expect(hasTooltip()).toBe(false);
expect(getTooltipValue()).toMatchObject({
title: MOCK_TITLE,
disabled: true,
});
});
it('renders tooltip after text is updated', () => {
it('renders tooltip after text is updated', async () => {
hasHorizontalOverflow.mockReturnValueOnce(true);
const newText = 'new-text';
parent.setProps({
title: newText,
title: SHORT_TITLE,
});
return wrapper.vm
.$nextTick()
.then(() => wrapper.vm.$nextTick()) // wait 2 times to get an updated slot
.then(() => {
expect(hasTooltip()).toBe(true);
expect(wrapper.attributes('data-original-title')).toEqual(newText);
expect(wrapper.attributes('data-placement')).toEqual('top');
});
await nextTick();
await nextTick(); // wait 2 times to get an updated slot
expect(getTooltipValue()).toMatchObject({
title: SHORT_TITLE,
disabled: false,
});
});
});
});

View File

@ -0,0 +1,34 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Resolvers::Clusters::AgentActivityEventsResolver do
include GraphqlHelpers
it { expect(described_class.type).to eq(Types::Clusters::AgentActivityEventType) }
it { expect(described_class.null).to be_truthy }
describe '#resolve' do
let_it_be(:agent) { create(:cluster_agent) }
let(:user) { create(:user, maintainer_projects: [agent.project]) }
let(:ctx) { { current_user: user } }
let(:events) { double }
before do
allow(agent).to receive(:activity_events).and_return(events)
end
subject { resolve(described_class, obj: agent, ctx: ctx) }
it 'returns events associated with the agent' do
expect(subject).to eq(events)
end
context 'user does not have permission' do
let(:user) { create(:user, developer_projects: [agent.project]) }
it { is_expected.to be_empty }
end
end
end

View File

@ -0,0 +1,11 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe GitlabSchema.types['ClusterAgentActivityEvent'] do
let(:fields) { %i[recorded_at kind level user agent_token] }
it { expect(described_class.graphql_name).to eq('ClusterAgentActivityEvent') }
it { expect(described_class).to require_graphql_authorizations(:admin_cluster) }
it { expect(described_class).to have_graphql_fields(fields) }
end

View File

@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe GitlabSchema.types['ClusterAgent'] do
let(:fields) { %i[created_at created_by_user id name project updated_at tokens web_path connections] }
let(:fields) { %i[created_at created_by_user id name project updated_at tokens web_path connections activity_events] }
it { expect(described_class.graphql_name).to eq('ClusterAgent') }

View File

@ -38,11 +38,11 @@ RSpec.describe BulkImports::Clients::HTTP do
context 'when response is not success' do
it 'raises BulkImports::Error' do
response_double = double(code: 503, success?: false)
response_double = double(code: 503, success?: false, request: double(path: double(path: '/test')))
allow(Gitlab::HTTP).to receive(method).and_return(response_double)
expect { subject.public_send(method, resource) }.to raise_exception(BulkImports::NetworkError)
expect { subject.public_send(method, resource) }.to raise_exception(BulkImports::NetworkError, 'Unsuccessful response 503 from /test')
end
end
end

View File

@ -0,0 +1,52 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Projects::Pipelines::AutoDevopsPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:entity) do
create(
:bulk_import_entity,
:project_entity,
project: project,
bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: 'My Destination Project',
destination_namespace: group.full_path
)
end
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:auto_devops) do
{
'created_at' => '2016-06-13T15:02:47.967Z',
'updated_at' => '2016-06-14T15:02:47.967Z',
'enabled' => true,
'deploy_strategy' => 'continuous'
}
end
subject(:pipeline) { described_class.new(context) }
describe '#run' do
it 'imports auto devops options into destination project' do
group.add_owner(user)
allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [auto_devops]))
end
pipeline.run
expect(project.auto_devops.enabled).to be_truthy
expect(project.auto_devops.deploy_strategy).to eq('continuous')
expect(project.auto_devops.created_at).to eq('2016-06-13T15:02:47.967Z')
expect(project.auto_devops.updated_at).to eq('2016-06-14T15:02:47.967Z')
end
end
end

View File

@ -22,6 +22,7 @@ RSpec.describe BulkImports::Projects::Stage do
[4, BulkImports::Projects::Pipelines::ProjectFeaturePipeline],
[5, BulkImports::Common::Pipelines::WikiPipeline],
[5, BulkImports::Common::Pipelines::UploadsPipeline],
[5, BulkImports::Projects::Pipelines::AutoDevopsPipeline],
[6, BulkImports::Common::Pipelines::EntityFinisher]
]
end

View File

@ -282,4 +282,20 @@ RSpec.describe BulkImports::Entity, type: :model do
expect(project_entity.group?).to eq(false)
end
end
describe '#base_resource_url_path' do
it 'returns base entity url path' do
entity = build(:bulk_import_entity)
expect(entity.base_resource_url_path).to eq("/groups/#{entity.encoded_source_full_path}")
end
end
describe '#wiki_url_path' do
it 'returns entity wiki url path' do
entity = build(:bulk_import_entity)
expect(entity.wikis_url_path).to eq("/groups/#{entity.encoded_source_full_path}/wikis")
end
end
end

View File

@ -6246,19 +6246,7 @@ RSpec.describe User do
subject { user.send(:groups_with_developer_maintainer_project_access) }
shared_examples 'groups_with_developer_maintainer_project_access examples' do
specify { is_expected.to contain_exactly(developer_group2) }
end
it_behaves_like 'groups_with_developer_maintainer_project_access examples'
context 'when feature flag :linear_user_groups_with_developer_maintainer_project_access is disabled' do
before do
stub_feature_flags(linear_user_groups_with_developer_maintainer_project_access: false)
end
it_behaves_like 'groups_with_developer_maintainer_project_access examples'
end
specify { is_expected.to contain_exactly(developer_group2) }
end
describe '.get_ids_by_username' do

View File

@ -0,0 +1,31 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::Agents::ActivityEventPolicy do
let_it_be(:event) { create(:agent_activity_event) }
let(:user) { create(:user) }
let(:policy) { described_class.new(user, event) }
let(:project) { event.agent.project }
describe 'rules' do
context 'developer' do
before do
project.add_developer(user)
end
it { expect(policy).to be_disallowed :admin_cluster }
it { expect(policy).to be_disallowed :read_cluster }
end
context 'maintainer' do
before do
project.add_maintainer(user)
end
it { expect(policy).to be_allowed :admin_cluster }
it { expect(policy).to be_allowed :read_cluster }
end
end
end

View File

@ -7,7 +7,7 @@ RSpec.describe 'Project.cluster_agents' do
let_it_be(:project) { create(:project, :public) }
let_it_be(:current_user) { create(:user, maintainer_projects: [project]) }
let_it_be(:agents) { create_list(:cluster_agent, 5, project: project) }
let_it_be(:agents) { create_list(:cluster_agent, 3, project: project) }
let(:first) { var('Int') }
let(:cluster_agents_fields) { nil }
@ -105,4 +105,37 @@ RSpec.describe 'Project.cluster_agents' do
})
end
end
context 'selecting activity events' do
let_it_be(:token) { create(:cluster_agent_token, agent: agents.first) }
let_it_be(:event) { create(:agent_activity_event, agent: agents.first, agent_token: token, user: current_user) }
let(:cluster_agents_fields) { [:id, query_nodes(:activity_events, of: 'ClusterAgentActivityEvent', max_depth: 2)] }
it 'retrieves activity event details' do
post_graphql(query, current_user: current_user)
response = graphql_data_at(:project, :cluster_agents, :nodes, :activity_events, :nodes).first
expect(response).to include({
'kind' => event.kind,
'level' => event.level,
'recordedAt' => event.recorded_at.iso8601,
'agentToken' => hash_including('name' => token.name),
'user' => hash_including('name' => current_user.name)
})
end
it 'preloads associations to prevent N+1 queries' do
user = create(:user)
token = create(:cluster_agent_token, agent: agents.second)
create(:agent_activity_event, agent: agents.second, agent_token: token, user: user)
post_graphql(query, current_user: current_user)
expect do
post_graphql(query, current_user: current_user)
end.to issue_same_number_of_queries_as { post_graphql(query, current_user: current_user, variables: [first.with(1)]) }
end
end
end

View File

@ -24,6 +24,8 @@ RSpec.describe Ci::JobArtifacts::CreateService do
def file_to_upload(path, params = {})
upload = Tempfile.new('upload')
FileUtils.copy(path, upload.path)
# This is a workaround for https://github.com/docker/for-linux/issues/1015
FileUtils.touch(upload.path)
UploadedFile.new(upload.path, **params)
end

View File

@ -80,6 +80,27 @@ RSpec.describe MergeRequests::RebaseService do
end
end
context 'with a pre-receive failure' do
let(:pre_receive_error) { "Commit message does not follow the pattern 'ACME'" }
let(:merge_error) { "Something went wrong during the rebase pre-receive hook: #{pre_receive_error}." }
before do
allow(repository).to receive(:gitaly_operation_client).and_raise(Gitlab::Git::PreReceiveError, "GitLab: #{pre_receive_error}")
end
it 'saves a specific message' do
subject.execute(merge_request)
expect(merge_request.reload.merge_error).to eq merge_error
end
it 'returns an error' do
expect(service.execute(merge_request)).to match(
status: :error,
message: merge_error)
end
end
context 'with git command failure' do
before do
allow(repository).to receive(:gitaly_operation_client).and_raise(Gitlab::Git::Repository::GitError, 'Something went wrong')

View File

@ -480,3 +480,14 @@ Rugged::Settings['search_path_global'] = Rails.root.join('tmp/tests').to_s
# Initialize FactoryDefault to use create_default helper
TestProf::FactoryDefault.init
module TouchRackUploadedFile
def initialize_from_file_path(path)
super
# This is a no-op workaround for https://github.com/docker/for-linux/issues/1015
File.utime @tempfile.atime, @tempfile.mtime, @tempfile.path # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
end
Rack::Test::UploadedFile.prepend(TouchRackUploadedFile)

View File

@ -9,16 +9,18 @@ RSpec.shared_examples 'wiki pipeline imports a wiki for an entity' do
let(:extracted_data) { BulkImports::Pipeline::ExtractedData.new(data: {}) }
context 'successfully imports wiki for an entity' do
subject { described_class.new(context) }
subject { described_class.new(context) }
before do
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(extracted_data)
end
before do
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(extracted_data)
end
end
context 'when wiki exists' do
it 'imports new wiki into destination project' do
expect(subject).to receive(:source_wiki_exists?).and_return(true)
expect_next_instance_of(Gitlab::GitalyClient::RepositoryService) do |repository_service|
url = "https://oauth2:token@gitlab.example/#{entity.source_full_path}.wiki.git"
expect(repository_service).to receive(:fetch_remote).with(url, any_args).and_return 0
@ -27,5 +29,16 @@ RSpec.shared_examples 'wiki pipeline imports a wiki for an entity' do
subject.run
end
end
context 'when wiki does not exist' do
it 'does not import wiki' do
expect(subject).to receive(:source_wiki_exists?).and_return(false)
expect(parent.wiki).not_to receive(:ensure_repository)
expect(parent.wiki.repository).not_to receive(:ensure_repository)
expect { subject.run }.not_to raise_error
end
end
end
end