Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-11-07 18:08:08 +00:00
parent fa4473a487
commit 59f37a9943
46 changed files with 651 additions and 193 deletions

View File

@ -269,7 +269,6 @@ RSpec/FactoryBot/AvoidCreate:
- 'spec/serializers/project_serializer_spec.rb'
- 'spec/serializers/prometheus_alert_entity_spec.rb'
- 'spec/serializers/release_serializer_spec.rb'
- 'spec/serializers/remote_mirror_entity_spec.rb'
- 'spec/serializers/review_app_setup_entity_spec.rb'
- 'spec/serializers/runner_entity_spec.rb'
- 'spec/serializers/serverless/domain_entity_spec.rb'

View File

@ -1 +1 @@
0f35939596221fc096d873473ef0d6fc3fd09440
ba02c22370d12ccf8ec464497603394effbaf8b0

View File

@ -103,7 +103,9 @@
"workflow": {
"type": "object",
"properties": {
"name": { "$ref": "#/definitions/workflowName" },
"name": {
"$ref": "#/definitions/workflowName"
},
"rules": {
"type": "array",
"items": {
@ -861,98 +863,74 @@
"markdownDescription": "Describes the conditions for when to run the job. Defaults to 'on_success'. [Learn More](https://docs.gitlab.com/ee/ci/yaml/#when).",
"default": "on_success",
"type": "string",
"enum": ["on_success", "on_failure", "always", "never", "manual", "delayed"]
"enum": [
"on_success",
"on_failure",
"always",
"never",
"manual",
"delayed"
]
},
"cache": {
"markdownDescription": "Use `cache` to specify a list of files and directories to cache between jobs. You can only use paths that are in the local working copy. [Learn More](https://docs.gitlab.com/ee/ci/yaml/#cache)",
"properties": {
"when": {
"markdownDescription": "Defines when to save the cache, based on the status of the job. [Learn More](https://docs.gitlab.com/ee/ci/yaml/#cachewhen).",
"default": "on_success",
"oneOf": [
{
"enum": [
"on_success"
],
"description": "Save the cache only when the job succeeds."
},
{
"enum": [
"on_failure"
],
"description": "Save the cache only when the job fails. "
},
{
"enum": [
"always"
],
"description": "Always save the cache. "
}
]
}
}
},
"cache_entry": {
"type": "object",
"description": "Specify files or directories to cache between jobs. Can be set globally or per job.",
"additionalProperties": false,
"properties": {
"paths": {
"type": "array",
"description": "List of files or paths to cache.",
"items": {
"type": "string"
}
},
"key": {
"markdownDescription": "Use the `cache:key` keyword to give each cache a unique identifying key. All jobs that use the same cache key use the same cache, including in different pipelines. Must be used with `cache:path`, or nothing is cached. [Learn More](https://docs.gitlab.com/ee/ci/yaml/#cachekey).",
"oneOf": [
{
"type": "string",
"description": "Unique cache ID, to allow e.g. specific branch or job cache. Environment variables can be used to set up unique keys (e.g. \"$CI_COMMIT_REF_SLUG\" for per branch cache)."
"pattern": "^(?!.*\\/)^(.*[^.]+.*)$"
},
{
"type": "object",
"description": "When you include cache:key:files, you must also list the project files that will be used to generate the key, up to a maximum of two files. The cache key will be a SHA checksum computed from the most recent commits (up to two, if two files are listed) that changed the given files.",
"properties": {
"files": {
"markdownDescription": "Use the `cache:key:files` keyword to generate a new key when one or two specific files change. [Learn More](https://docs.gitlab.com/ee/ci/yaml/#cachekeyfiles)",
"type": "array",
"items": {
"type": "string"
},
"minItems": 1,
"maxItems": 2
},
"prefix": {
"markdownDescription": "Use `cache:key:prefix` to combine a prefix with the SHA computed for `cache:key:files`. [Learn More](https://docs.gitlab.com/ee/ci/yaml/#cachekeyprefix)",
"type": "string"
}
}
}
]
},
"untracked": {
"type": "boolean",
"description": "Set to `true` to cache untracked files.",
"default": false
"paths": {
"type": "array",
"markdownDescription": "Use the `cache:paths` keyword to choose which files or directories to cache. [Learn More](https://docs.gitlab.com/ee/ci/yaml/#cachepaths)",
"items": {
"type": "string"
}
},
"policy": {
"type": "string",
"description": "Determines the strategy for downloading and updating the cache.",
"markdownDescription": "Determines the strategy for downloading and updating the cache. [Learn More](https://docs.gitlab.com/ee/ci/yaml/#cachepolicy)",
"default": "pull-push",
"oneOf": [
{
"enum": [
"pull"
],
"description": "Pull will download cache but skip uploading after job completes."
},
{
"enum": [
"push"
],
"description": "Push will skip downloading cache and always recreate cache after job completes."
},
{
"enum": [
"pull-push"
],
"description": "Pull-push will both download cache at job start and upload cache on job success."
}
"enum": [
"pull",
"push",
"pull-push"
]
},
"untracked": {
"type": "boolean",
"markdownDescription": "Use `untracked: true` to cache all files that are untracked in your Git repository. [Learn More](https://docs.gitlab.com/ee/ci/yaml/#cacheuntracked)",
"default": false
},
"when": {
"markdownDescription": "Defines when to save the cache, based on the status of the job. [Learn More](https://docs.gitlab.com/ee/ci/yaml/#cachewhen).",
"default": "on_success",
"enum": [
"on_success",
"on_failure",
"always"
]
}
}

View File

@ -13,6 +13,7 @@ import StatusIcon from './status_icon.vue';
const FETCH_TYPE_COLLAPSED = 'collapsed';
const FETCH_TYPE_EXPANDED = 'expanded';
const WIDGET_PREFIX = 'Widget';
export default {
components: {
@ -89,6 +90,8 @@ export default {
widgetName: {
type: String,
required: true,
// see https://docs.gitlab.com/ee/development/fe_guide/merge_request_widget_extensions.html#add-new-widgets
validator: (val) => val.startsWith(WIDGET_PREFIX),
},
telemetry: {
type: Boolean,

View File

@ -6,6 +6,7 @@ class ConfirmationsController < Devise::ConfirmationsController
include OneTrustCSP
include GoogleAnalyticsCSP
skip_before_action :required_signup_info
prepend_before_action :check_recaptcha, only: :create
before_action :load_recaptcha, only: :new

View File

@ -183,7 +183,11 @@ module Ci
end
event :succeed do
transition any - [:success] => :success
# A success pipeline can also be retried, for example; a pipeline with a failed manual job.
# When retrying the pipeline, the status of the pipeline is not changed because the failed
# manual job transitions to the `manual` status.
# More info: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/98967#note_1144718316
transition any => :success
end
event :cancel do

View File

@ -1012,10 +1012,6 @@ class Group < Namespace
Arel::Nodes::SqlLiteral.new(column_alias))
end
def self.groups_including_descendants_by(group_ids)
Group.where(id: group_ids).self_and_descendants
end
def disable_shared_runners!
update!(
shared_runners_enabled: false,

View File

@ -94,7 +94,7 @@ class Todo < ApplicationRecord
#
# Returns an `ActiveRecord::Relation`.
def for_group_ids_and_descendants(group_ids)
groups = Group.groups_including_descendants_by(group_ids)
groups = Group.where(id: group_ids).self_and_descendants
from_union(
[

View File

@ -2,7 +2,6 @@
- content_for :page_specific_javascripts do
= render "layouts/google_tag_manager_head"
= render "layouts/one_trust"
= render "layouts/bizible"
= render "layouts/google_tag_manager_body"
#signin-container

View File

@ -1,11 +1,7 @@
- milestone_url = @milestone.project_milestone? ? project_milestone_path(@project, @milestone) : group_milestone_path(@group, @milestone)
%button.js-delete-milestone-button.btn.gl-button.btn-grouped.btn-danger{ data: { milestone_id: @milestone.id,
milestone_title: markdown_field(@milestone, :title),
milestone_url: milestone_url,
milestone_issue_count: @milestone.issues.count,
milestone_merge_request_count: @milestone.merge_requests.count },
disabled: true }
= render Pajamas::ButtonComponent.new(variant: :danger,
button_options: { class: 'js-delete-milestone-button btn-grouped', data: { milestone_id: @milestone.id, milestone_title: markdown_field(@milestone, :title), milestone_url: milestone_url, milestone_issue_count: @milestone.issues.count, milestone_merge_request_count: @milestone.merge_requests.count }, disabled: true }) do
= gl_loading_icon(inline: true, css_class: "gl-mr-2 js-loading-icon hidden")
= _('Delete')

View File

@ -1,8 +0,0 @@
---
name: cube_api_proxy
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/96250
rollout_issue_url:
milestone: '15.4'
type: development
group: group::product_analytics
default_enabled: false

View File

@ -58,7 +58,7 @@ Example response:
"parent_ids": [
"6104942438c14ec7bd21c6cd5bd995272b3faff6"
],
"web_url": "https://gitlab.example.com/thedude/gitlab-foss/-/commit/ed899a2f4b50b4370feeea94676502b42383c746"
"web_url": "https://gitlab.example.com/janedoe/gitlab-foss/-/commit/ed899a2f4b50b4370feeea94676502b42383c746"
},
{
"id": "6104942438c14ec7bd21c6cd5bd995272b3faff6",
@ -73,7 +73,7 @@ Example response:
"parent_ids": [
"ae1d9fb46aa2b07ee9836d49862ec4e2c46fbbba"
],
"web_url": "https://gitlab.example.com/thedude/gitlab-foss/-/commit/ed899a2f4b50b4370feeea94676502b42383c746"
"web_url": "https://gitlab.example.com/janedoe/gitlab-foss/-/commit/ed899a2f4b50b4370feeea94676502b42383c746"
}
]
```
@ -173,7 +173,7 @@ Example response:
"total": 4
},
"status": null,
"web_url": "https://gitlab.example.com/thedude/gitlab-foss/-/commit/ed899a2f4b50b4370feeea94676502b42383c746"
"web_url": "https://gitlab.example.com/janedoe/gitlab-foss/-/commit/ed899a2f4b50b4370feeea94676502b42383c746"
}
```
@ -253,7 +253,7 @@ Example response:
"total": 25
},
"status": "running",
"web_url": "https://gitlab.example.com/thedude/gitlab-foss/-/commit/6104942438c14ec7bd21c6cd5bd995272b3faff6"
"web_url": "https://gitlab.example.com/janedoe/gitlab-foss/-/commit/6104942438c14ec7bd21c6cd5bd995272b3faff6"
}
```
@ -331,7 +331,7 @@ Example response:
"parent_ids": [
"a738f717824ff53aebad8b090c1b79a14f2bd9e8"
],
"web_url": "https://gitlab.example.com/thedude/gitlab-foss/-/commit/8b090c1b79a14f2bd9e8a738f717824ff53aebad"
"web_url": "https://gitlab.example.com/janedoe/gitlab-foss/-/commit/8b090c1b79a14f2bd9e8a738f717824ff53aebad"
}
```
@ -401,7 +401,7 @@ Example response:
"committer_name":"Administrator",
"committer_email":"admin@example.com",
"committed_date":"2018-11-08T15:55:26.000Z",
"web_url": "https://gitlab.example.com/thedude/gitlab-foss/-/commit/8b090c1b79a14f2bd9e8a738f717824ff53aebad"
"web_url": "https://gitlab.example.com/janedoe/gitlab-foss/-/commit/8b090c1b79a14f2bd9e8a738f717824ff53aebad"
}
```
@ -545,11 +545,11 @@ Example response:
```json
{
"author" : {
"web_url" : "https://gitlab.example.com/thedude",
"avatar_url" : "https://gitlab.example.com/uploads/user/avatar/28/The-Big-Lebowski-400-400.png",
"username" : "thedude",
"web_url" : "https://gitlab.example.com/janedoe",
"avatar_url" : "https://gitlab.example.com/uploads/user/avatar/28/jane-doe-400-400.png",
"username" : "janedoe",
"state" : "active",
"name" : "Jeff Lebowski",
"name" : "Jane Doe",
"id" : 28
},
"created_at" : "2016-01-19T09:44:55.600Z",
@ -590,15 +590,15 @@ Example response:
{
"id": 334686748,
"type": null,
"body": "I'm the Dude, so that's what you call me.",
"body": "Nice piece of code!",
"attachment": null,
"author" : {
"id" : 28,
"name" : "Jeff Lebowski",
"username" : "thedude",
"web_url" : "https://gitlab.example.com/thedude",
"name" : "Jane Doe",
"username" : "janedoe",
"web_url" : "https://gitlab.example.com/janedoe",
"state" : "active",
"avatar_url" : "https://gitlab.example.com/uploads/user/avatar/28/The-Big-Lebowski-400-400.png"
"avatar_url" : "https://gitlab.example.com/uploads/user/avatar/28/jane-doe-400-400.png"
},
"created_at": "2020-04-30T18:48:11.432Z",
"updated_at": "2020-04-30T18:48:11.432Z",
@ -655,16 +655,16 @@ Example response:
"name" : "bundler:audit",
"allow_failure" : true,
"author" : {
"username" : "thedude",
"username" : "janedoe",
"state" : "active",
"web_url" : "https://gitlab.example.com/thedude",
"avatar_url" : "https://gitlab.example.com/uploads/user/avatar/28/The-Big-Lebowski-400-400.png",
"web_url" : "https://gitlab.example.com/janedoe",
"avatar_url" : "https://gitlab.example.com/uploads/user/avatar/28/jane-doe-400-400.png",
"id" : 28,
"name" : "Jeff Lebowski"
"name" : "Jane Doe"
},
"description" : null,
"sha" : "18f3e63d05582537db6d183d9d557be09e1f90c8",
"target_url" : "https://gitlab.example.com/thedude/gitlab-foss/builds/91",
"target_url" : "https://gitlab.example.com/janedoe/gitlab-foss/builds/91",
"finished_at" : null,
"id" : 91,
"ref" : "master"
@ -675,18 +675,18 @@ Example response:
"allow_failure" : false,
"status" : "pending",
"created_at" : "2016-01-19T08:40:25.832Z",
"target_url" : "https://gitlab.example.com/thedude/gitlab-foss/builds/90",
"target_url" : "https://gitlab.example.com/janedoe/gitlab-foss/builds/90",
"id" : 90,
"finished_at" : null,
"ref" : "master",
"sha" : "18f3e63d05582537db6d183d9d557be09e1f90c8",
"author" : {
"id" : 28,
"name" : "Jeff Lebowski",
"username" : "thedude",
"web_url" : "https://gitlab.example.com/thedude",
"name" : "Jane Doe",
"username" : "janedoe",
"web_url" : "https://gitlab.example.com/janedoe",
"state" : "active",
"avatar_url" : "https://gitlab.example.com/uploads/user/avatar/28/The-Big-Lebowski-400-400.png"
"avatar_url" : "https://gitlab.example.com/uploads/user/avatar/28/jane-doe-400-400.png"
},
"description" : null
},
@ -724,10 +724,10 @@ Example response:
```json
{
"author" : {
"web_url" : "https://gitlab.example.com/thedude",
"name" : "Jeff Lebowski",
"avatar_url" : "https://gitlab.example.com/uploads/user/avatar/28/The-Big-Lebowski-400-400.png",
"username" : "thedude",
"web_url" : "https://gitlab.example.com/janedoe",
"name" : "Jane Doe",
"avatar_url" : "https://gitlab.example.com/uploads/user/avatar/28/jane-doe-400-400.png",
"username" : "janedoe",
"state" : "active",
"id" : 28
},
@ -781,10 +781,10 @@ Example response:
"upvotes":0,
"downvotes":0,
"author" : {
"web_url" : "https://gitlab.example.com/thedude",
"name" : "Jeff Lebowski",
"avatar_url" : "https://gitlab.example.com/uploads/user/avatar/28/The-Big-Lebowski-400-400.png",
"username" : "thedude",
"web_url" : "https://gitlab.example.com/janedoe",
"name" : "Jane Doe",
"avatar_url" : "https://gitlab.example.com/uploads/user/avatar/28/jane-doe-400-400.png",
"username" : "janedoe",
"state" : "active",
"id" : 28
},

View File

@ -7582,6 +7582,29 @@ The edge type for [`ExternalAuditEventDestination`](#externalauditeventdestinati
| <a id="externalauditeventdestinationedgecursor"></a>`cursor` | [`String!`](#string) | A cursor for use in pagination. |
| <a id="externalauditeventdestinationedgenode"></a>`node` | [`ExternalAuditEventDestination`](#externalauditeventdestination) | The item at the end of the edge. |
#### `ExternalStatusCheckConnection`
The connection type for [`ExternalStatusCheck`](#externalstatuscheck).
##### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="externalstatuscheckconnectionedges"></a>`edges` | [`[ExternalStatusCheckEdge]`](#externalstatuscheckedge) | A list of edges. |
| <a id="externalstatuscheckconnectionnodes"></a>`nodes` | [`[ExternalStatusCheck]`](#externalstatuscheck) | A list of nodes. |
| <a id="externalstatuscheckconnectionpageinfo"></a>`pageInfo` | [`PageInfo!`](#pageinfo) | Information to aid in pagination. |
#### `ExternalStatusCheckEdge`
The edge type for [`ExternalStatusCheck`](#externalstatuscheck).
##### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="externalstatuscheckedgecursor"></a>`cursor` | [`String!`](#string) | A cursor for use in pagination. |
| <a id="externalstatuscheckedgenode"></a>`node` | [`ExternalStatusCheck`](#externalstatuscheck) | The item at the end of the edge. |
#### `GroupConnection`
The connection type for [`Group`](#group).
@ -10461,6 +10484,7 @@ List of branch rules for a project, grouped by branch name.
| <a id="branchruleapprovalrules"></a>`approvalRules` | [`ApprovalProjectRuleConnection`](#approvalprojectruleconnection) | Merge request approval rules configured for this branch rule. (see [Connections](#connections)) |
| <a id="branchrulebranchprotection"></a>`branchProtection` | [`BranchProtection!`](#branchprotection) | Branch protections configured for this branch rule. |
| <a id="branchrulecreatedat"></a>`createdAt` | [`Time!`](#time) | Timestamp of when the branch rule was created. |
| <a id="branchruleexternalstatuschecks"></a>`externalStatusChecks` | [`ExternalStatusCheckConnection`](#externalstatuscheckconnection) | External status checks configured for this branch rule. (see [Connections](#connections)) |
| <a id="branchruleisdefault"></a>`isDefault` | [`Boolean!`](#boolean) | Check if this branch rule protects the project's default branch. |
| <a id="branchrulename"></a>`name` | [`String!`](#string) | Branch name, with wildcards, for the branch rules. |
| <a id="branchruleupdatedat"></a>`updatedAt` | [`Time!`](#time) | Timestamp of when the branch rule was last updated. |
@ -12636,6 +12660,18 @@ Represents an external issue.
| <a id="externalissueupdatedat"></a>`updatedAt` | [`Time`](#time) | Timestamp of when the issue was updated. |
| <a id="externalissueweburl"></a>`webUrl` | [`String`](#string) | URL to the issue in the external tracker. |
### `ExternalStatusCheck`
Describes an external status check.
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="externalstatuscheckexternalurl"></a>`externalUrl` | [`String!`](#string) | External URL for the status check. |
| <a id="externalstatuscheckid"></a>`id` | [`GlobalID!`](#globalid) | ID of the rule. |
| <a id="externalstatuscheckname"></a>`name` | [`String!`](#string) | Name of the rule. |
### `FileUpload`
#### Fields

View File

@ -0,0 +1,101 @@
---
stage: none
group: Tutorials
info: For assistance with this tutorial, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments-to-other-projects-and-subjects.
---
# Use GitLab to run an agile iteration
To run an agile development iteration in GitLab, you use multiple GitLab features
that work together.
To run an agile iteration from GitLab:
1. Create a group.
1. Create a project.
1. Set up an iteration cadence.
1. Create scoped labels.
1. Create your epics and issues.
1. Create an issue board.
After you've created these core components, you can begin running your iterations.
## Create a group
Iteration cadences are created at the group level, so start by
[creating one](../user/group/manage.md#create-a-group) if you don't have one already.
You use groups to manage one or more related projects at the same time.
You add your users as members in the group, and assign them a role. Roles determine
the [level of permissions](../user/permissions.md) each user has on the projects in the group.
Membership automatically cascades down to all subgroups and projects.
## Create a project
Now [create one or more projects](../user/project/working_with_projects.md#create-a-project) in your group.
There are several different ways to create a project. A project contains
your code and pipelines, but also the issues that are used for planning your upcoming code changes.
## Set up an iteration cadence
Before you start creating epics or issues, create an
[iteration cadence](../user/group/iterations/index.md#iteration-cadences).
Iteration cadences contain the individual, sequential iteration timeboxes for planning and reporting
on your issues.
When creating an iteration cadence, you can decide whether to automatically manage the iterations or
disable the automated scheduling to
[manually manage the iterations](../user/group/iterations/index.md#manual-iteration-management).
Similar to membership, iterations cascade down your group, subgroup, and project hierarchy. If your
team works across many groups, subgroups, and projects, create the iteration cadence in the top-most
group shared by all projects that contain the team's issues as illustrated by the diagram below.
```mermaid
graph TD
Group --> SubgroupA --> Project1
Group --> SubgroupB --> Project2
Group --> IterationCadence
```
## Create scoped labels
You should also [create scoped labels](../user/project/labels.md) in the same group where you created
your iteration cadence. Labels help you
organize your epics, issues, and merge requests, as well as help you
to visualize the flow of issues in boards. For example, you can use scoped labels like
`workflow::planning`, `workflow::ready for development`, `workflow::in development`, and `workflow::complete`
to indicate the status of an issue. You can also leverage scoped labels to denote the type of issue
or epic such as `type::feature`, `type::defect`, and `type::maintenance`.
## Create your epics and issues
Now you can get started planning your iterations. Start by creating [epics](../user/group/epics/index.md)
in the group where you created your iteration cadence,
then create child [issues](../user/project/issues/index.md) in one or more of your projects.
Add labels to each as needed.
## Create an issue board
[Issue boards](../user/project/issue_board.md) help you plan your upcoming iterations or visualize
the workflow of the iteration currently in progress. List columns can be created based on label,
assignee, iteration, or milestone. You can also filter the board by multiple attributes and group
issues by their epic.
In the group where you created your iteration cadence and labels,
[create an issue board](../user/project/issue_board.md#create-an-issue-board) and name it
"Iteration Planning." Then, create lists for each of your iterations. You can then drag issues from
the "Open" list into iteration lists to schedule them for upcoming iterations.
To visualize the workflow for issues in the current iteration, create another issue board called
"Current Iteration." When you're creating the board:
1. Select **Edit board**.
1. Next to **Iteration**, select **Edit**.
1. From the dropdown list, select **Current iteration**.
1. Select **Save changes**.
Your board will now only ever show issues that are in the current iteration.
You can start adding lists for each of the `workflow::...` labels you created previously.
Now you're ready to start development.

View File

@ -44,6 +44,8 @@ collaborating, and more.
|-------|-------------|--------------------|
| [Create a project from a template](https://gitlab.com/projects/new#create_from_template) | For hands-on learning, select **Sample GitLab Project** and create a project with example issues and merge requests. | **{star}** |
| [Migrate to GitLab](../user/project/import/index.md) | If you are coming to GitLab from another platform, you can import or convert your projects. | |
| [Run an agile iteration](agile_sprint.md) | Use group, projects, and iterations to run an agile development iteration. |
| <i class="fa fa-youtube-play youtube" aria-hidden="true"></i> [Use GitLab for multi-team planning (SAFe)](https://www.youtube.com/watch?v=KmASFwSap7c) (37m 37s) | A use case of a multi-team organization that uses GitLab with [Scaled Agile Framework (SAFe)](https://about.gitlab.com/solutions/agile-delivery/scaled-agile/). |
## Use CI/CD pipelines

View File

@ -4,7 +4,7 @@ module Gitlab
module Database
module LoadBalancing
class SidekiqServerMiddleware
JobReplicaNotUpToDate = Class.new(StandardError)
JobReplicaNotUpToDate = Class.new(::Gitlab::SidekiqMiddleware::RetryError)
MINIMUM_DELAY_INTERVAL_SECONDS = 0.8

View File

@ -131,6 +131,9 @@ module Gitlab
end
def before_send(event, hint)
# Don't report Sidekiq retry errors to Sentry
return if hint[:exception].is_a?(Gitlab::SidekiqMiddleware::RetryError)
inject_context_for_exception(event, hint[:exception])
custom_fingerprinting(event, hint[:exception])

View File

@ -167,23 +167,11 @@ module Gitlab
# @return [Boolean, String, Array, Hash, Object]
# @raise [JSON::ParserError]
def handle_legacy_mode!(data)
return data unless feature_table_exists?
return data unless Feature.feature_flags_available?
return data unless Feature.enabled?(:json_wrapper_legacy_mode)
raise parser_error if INVALID_LEGACY_TYPES.any? { |type| data.is_a?(type) }
end
# There are a variety of database errors possible when checking the feature
# flags at the wrong time during boot, e.g. during migrations. We don't care
# about these errors, we just need to ensure that we skip feature detection
# if they will fail.
#
# @return [Boolean]
def feature_table_exists?
Feature::FlipperFeature.table_exists?
rescue StandardError
false
end
end
# GrapeFormatter is a JSON formatter for the Grape API.

View File

@ -10,6 +10,14 @@ module Gitlab
class Controller < ActionController::Base
protect_from_forgery with: :exception, prepend: true
def initialize
super
# Squelch noisy and unnecessary "Can't verify CSRF token authenticity." messages.
# X-Csrf-Token is only one authentication mechanism for API helpers.
self.logger = ActiveSupport::Logger.new(File::NULL)
end
def index
head :ok
end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
module Gitlab
module SidekiqMiddleware
# Sidekiq retry error that won't be reported to Sentry
# Use it when a job retry is an expected behavior
RetryError = Class.new(StandardError)
end
end

View File

@ -10,17 +10,27 @@ RSpec.describe ConfirmationsController do
end
describe '#show' do
let_it_be_with_reload(:user) { create(:user, :unconfirmed) }
let(:confirmation_token) { user.confirmation_token }
render_views
def perform_request
get :show, params: { confirmation_token: confirmation_token }
end
context 'when signup info is required' do
before do
allow(controller).to receive(:current_user) { user }
user.set_role_required!
end
it 'does not redirect' do
expect(perform_request).not_to redirect_to(users_sign_up_welcome_path)
end
end
context 'user is already confirmed' do
let_it_be_with_reload(:user) { create(:user, :unconfirmed) }
let(:confirmation_token) { user.confirmation_token }
before do
user.confirm
end
@ -57,10 +67,6 @@ RSpec.describe ConfirmationsController do
end
context 'user accesses the link after the expiry of confirmation token has passed' do
let_it_be_with_reload(:user) { create(:user, :unconfirmed) }
let(:confirmation_token) { user.confirmation_token }
before do
allow(Devise).to receive(:confirm_within).and_return(1.day)
end
@ -133,6 +139,17 @@ RSpec.describe ConfirmationsController do
stub_feature_flags(identity_verification: false)
end
context 'when signup info is required' do
before do
allow(controller).to receive(:current_user) { user }
user.set_role_required!
end
it 'does not redirect' do
expect(perform_request).not_to redirect_to(users_sign_up_welcome_path)
end
end
context 'when reCAPTCHA is disabled' do
before do
stub_application_setting(recaptcha_enabled: false)

View File

@ -139,7 +139,7 @@ RSpec.describe 'Dashboard Projects' do
end
describe 'with a pipeline', :clean_gitlab_redis_shared_state do
let_it_be(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit.sha, ref: project.default_branch) }
let!(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit.sha, ref: project.default_branch) }
before do
# Since the cache isn't updated when a new pipeline is created

View File

@ -35,7 +35,6 @@ import JobWhenYaml from './yaml_tests/positive_tests/job_when.yml';
// YAML NEGATIVE TEST
import ArtifactsNegativeYaml from './yaml_tests/negative_tests/artifacts.yml';
import CacheNegativeYaml from './yaml_tests/negative_tests/cache.yml';
import IncludeNegativeYaml from './yaml_tests/negative_tests/include.yml';
import RulesNegativeYaml from './yaml_tests/negative_tests/rules.yml';
import VariablesNegativeYaml from './yaml_tests/negative_tests/variables.yml';
@ -62,6 +61,16 @@ import ProjectPathTriggerProjectLeadSlashYaml from './yaml_tests/negative_tests/
import ProjectPathTriggerProjectNoSlashYaml from './yaml_tests/negative_tests/project_path/trigger/project/no_slash.yml';
import ProjectPathTriggerProjectTailSlashYaml from './yaml_tests/negative_tests/project_path/trigger/project/tailing_slash.yml';
import CacheKeyFilesNotArray from './yaml_tests/negative_tests/cache/key_files_not_an_array.yml';
import CacheKeyPrefixArray from './yaml_tests/negative_tests/cache/key_prefix_array.yml';
import CacheKeyWithDot from './yaml_tests/negative_tests/cache/key_with_dot.yml';
import CacheKeyWithMultipleDots from './yaml_tests/negative_tests/cache/key_with_multiple_dots.yml';
import CacheKeyWithSlash from './yaml_tests/negative_tests/cache/key_with_slash.yml';
import CachePathsNotAnArray from './yaml_tests/negative_tests/cache/paths_not_an_array.yml';
import CacheUntrackedString from './yaml_tests/negative_tests/cache/untracked_string.yml';
import CacheWhenInteger from './yaml_tests/negative_tests/cache/when_integer.yml';
import CacheWhenNotReservedKeyword from './yaml_tests/negative_tests/cache/when_not_reserved_keyword.yml';
const ajv = new Ajv({
strictTypes: false,
strictTuples: false,
@ -116,7 +125,15 @@ describe('negative tests', () => {
// YAML
ArtifactsNegativeYaml,
CacheNegativeYaml,
CacheKeyFilesNotArray,
CacheKeyPrefixArray,
CacheKeyWithDot,
CacheKeyWithMultipleDots,
CacheKeyWithSlash,
CachePathsNotAnArray,
CacheUntrackedString,
CacheWhenInteger,
CacheWhenNotReservedKeyword,
IncludeNegativeYaml,
JobWhenNegativeYaml,
RulesNegativeYaml,

View File

@ -1,13 +0,0 @@
stages:
- prepare
# invalid cache:when values
when no integer:
stage: prepare
cache:
when: 0
when must be a reserved word:
stage: prepare
cache:
when: 'never'

View File

@ -0,0 +1,8 @@
cache-key-files-not-an-array:
script: echo "This job uses a cache."
cache:
key:
files: package.json
paths:
- vendor/ruby
- node_modules

View File

@ -0,0 +1,10 @@
cache-key-prefix-array:
script: echo "This job uses a cache."
cache:
key:
files:
- Gemfile.lock
prefix:
- binaries-cache-$CI_JOB_NAME
paths:
- binaries/

View File

@ -0,0 +1,6 @@
cache-key-with-.:
script: echo "This job uses a cache."
cache:
key: .
paths:
- binaries/

View File

@ -0,0 +1,7 @@
cache-key-with-multiple-.:
stage: test
script: echo "This job uses a cache."
cache:
key: ..
paths:
- binaries/

View File

@ -0,0 +1,6 @@
cache-key-with-/:
script: echo "This job uses a cache."
cache:
key: binaries-ca/che
paths:
- binaries/

View File

@ -0,0 +1,5 @@
cache-path-not-an-array:
script: echo "This job uses a cache."
cache:
key: binaries-cache
paths: binaries/*.apk

View File

@ -0,0 +1,4 @@
cache-untracked-string:
script: echo "This job uses a cache."
cache:
untracked: 'true'

View File

@ -0,0 +1,4 @@
when_integer:
script: echo "This job uses a cache."
cache:
when: 0

View File

@ -0,0 +1,4 @@
when_not_reserved_keyword:
script: echo "This job uses a cache."
cache:
when: 'never'

View File

@ -1,24 +1,124 @@
stages:
- prepare
# valid cache:when values
job1:
stage: prepare
script:
- echo 'running job'
cache:
when: 'on_success'
job2:
stage: prepare
script:
- echo 'running job'
cache:
when: 'on_failure'
job3:
stage: prepare
script:
- echo 'running job'
cache:
when: 'always'
# valid cache:paths
cache-paths:
script: echo "This job uses a cache."
cache:
key: binaries-cache
paths:
- binaries/*.apk
- .config
# valid cache:key
cache-key-string:
script: echo "This job uses a cache."
cache:
key: random-string
paths:
- binaries/
cache-key-string-with-dots:
script: echo "This job uses a cache."
cache:
key: random-..string
paths:
- binaries/
cache-key-string-beginning-with-dot:
script: echo "This job uses a cache."
cache:
key: .random-string
paths:
- binaries/
cache-key-string-ending-with-dot:
script: echo "This job uses a cache."
cache:
key: random-string.
paths:
- binaries/
cache-key-predefined-variable:
script: echo "This job uses a cache."
cache:
key: $CI_COMMIT_REF_SLUG
paths:
- binaries/
cache-key-combination:
script: echo "This job uses a cache."
cache:
key: binaries-cache-$CI_COMMIT_REF_SLUG
paths:
- binaries/
# valid cache:key:files
cache-key-files:
script: echo "This job uses a cache."
cache:
key:
files:
- Gemfile.lock
- package.json
paths:
- vendor/ruby
- node_modules
# valide cache:key:prefix
cache-key-prefix-string:
script: echo "This job uses a cache."
cache:
key:
files:
- Gemfile.lock
prefix: random-string
paths:
- binaries/
cache-key-prefix-predefined-variable:
script: echo "This job uses a cache."
cache:
key:
files:
- Gemfile.lock
prefix: $CI_JOB_NAME
paths:
- binaries/
cache-key-prefix-combination:
script: echo "This job uses a cache."
cache:
key:
files:
- Gemfile.lock
prefix: binaries-cache-$CI_JOB_NAME
paths:
- binaries/
# valid cache:untracked
cache-untracked-true:
script: test
cache:
untracked: true
cache-untracked-false:
script: test
cache:
untracked: false

View File

@ -28,7 +28,7 @@ describe('~/vue_merge_request_widget/components/widget/widget.vue', () => {
propsData: {
isCollapsible: false,
loadingText: 'Loading widget',
widgetName: 'MyWidget',
widgetName: 'WidgetTest',
value: {
collapsed: null,
expanded: null,
@ -94,6 +94,14 @@ describe('~/vue_merge_request_widget/components/widget/widget.vue', () => {
await nextTick();
expect(wrapper.text()).toContain('Loading');
});
it('validates widget name', () => {
expect(() => {
createComponent({
propsData: { fetchCollapsedData: jest.fn(), widgetName: 'InvalidWidgetName' },
});
}).toThrow();
});
});
describe('fetch', () => {

View File

@ -17,7 +17,7 @@ RSpec.describe GitlabSchema.types['BranchRule'] do
]
end
specify { is_expected.to require_graphql_authorizations(:read_protected_branch) }
it { is_expected.to require_graphql_authorizations(:read_protected_branch) }
specify { is_expected.to have_graphql_fields(fields).at_least }
it { is_expected.to have_graphql_fields(fields).at_least }
end

View File

@ -369,6 +369,25 @@ RSpec.describe Gitlab::ErrorTracking do
end
end
context 'when exception is excluded' do
before do
stub_const('SubclassRetryError', Class.new(Gitlab::SidekiqMiddleware::RetryError))
end
['Gitlab::SidekiqMiddleware::RetryError', 'SubclassRetryError'].each do |ex|
let(:exception) { ex.constantize.new }
it "does not report #{ex} exception to Sentry" do
expect(Gitlab::ErrorTracking::Logger).to receive(:error)
track_exception
expect(Raven.client.transport.events).to eq([])
expect(Sentry.get_current_client.transport.events).to eq([])
end
end
end
context 'when processing invalid URI exceptions' do
let(:invalid_uri) { 'http://foo:bar' }
let(:raven_exception_values) { raven_event['exception']['values'] }

View File

@ -367,6 +367,7 @@ protected_branches:
- push_access_levels
- unprotect_access_levels
- approval_project_rules
- external_status_checks
- required_code_owners_sections
protected_tags:
- project

View File

@ -13,6 +13,18 @@ RSpec.describe Gitlab::RequestForgeryProtection, :allow_forgery_protection do
}
end
it 'logs to /dev/null' do
logger = described_class::Controller.new.logger
# Taken from ActiveSupport.logger_outputs_to?
# There is no equivalent /dev/null stream like STDOUT, so
# we need to extract the path.
logdev = logger.instance_variable_get(:@logdev)
logger_source = logdev.dev
expect(logger_source.path).to eq(File::NULL)
end
describe '.call' do
context 'when the request method is GET' do
before do

View File

@ -166,7 +166,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
it do
pipeline.status = from_status.to_s
if from_status != to_status
if from_status != to_status || success_to_success?
expect(pipeline.set_status(to_status.to_s))
.to eq(true)
else
@ -174,6 +174,12 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
.to eq(false), "loopback transitions are not allowed"
end
end
private
def success_to_success?
from_status == :success && to_status == :success
end
end
end

View File

@ -2406,23 +2406,6 @@ RSpec.describe Group do
end
end
describe '.groups_including_descendants_by' do
let_it_be(:parent_group1) { create(:group) }
let_it_be(:parent_group2) { create(:group) }
let_it_be(:extra_group) { create(:group) }
let_it_be(:child_group1) { create(:group, parent: parent_group1) }
let_it_be(:child_group2) { create(:group, parent: parent_group1) }
let_it_be(:child_group3) { create(:group, parent: parent_group2) }
subject { described_class.groups_including_descendants_by([parent_group2.id, parent_group1.id]) }
shared_examples 'returns the expected groups for a group and its descendants' do
specify { is_expected.to contain_exactly(parent_group1, parent_group2, child_group1, child_group2, child_group3) }
end
it_behaves_like 'returns the expected groups for a group and its descendants'
end
describe '.preset_root_ancestor_for' do
let_it_be(:rootgroup, reload: true) { create(:group) }
let_it_be(:subgroup, reload: true) { create(:group, parent: rootgroup) }

View File

@ -3,8 +3,7 @@
require 'spec_helper'
RSpec.describe RemoteMirrorEntity do
let(:project) { create(:project, :repository, :remote_mirror, url: "https://test:password@gitlab.com") }
let(:remote_mirror) { project.remote_mirrors.first }
let(:remote_mirror) { build_stubbed(:remote_mirror, url: "https://test:password@gitlab.com") }
let(:entity) { described_class.new(remote_mirror) }
subject { entity.as_json }

View File

@ -25,7 +25,7 @@ RSpec.describe Ci::PipelineProcessing::AtomicProcessingService do
check_expectation(test_file.dig('init', 'expect'), "init")
test_file['transitions'].each_with_index do |transition, idx|
event_on_jobs(transition['event'], transition['jobs'])
process_events(transition)
Sidekiq::Worker.drain_all # ensure that all async jobs are executed
check_expectation(transition['expect'], "transition:#{idx}")
end
@ -48,6 +48,14 @@ RSpec.describe Ci::PipelineProcessing::AtomicProcessingService do
}
end
def process_events(transition)
if transition['jobs']
event_on_jobs(transition['event'], transition['jobs'])
else
event_on_pipeline(transition['event'])
end
end
def event_on_jobs(event, job_names)
statuses = pipeline.latest_statuses.by_name(job_names).to_a
expect(statuses.count).to eq(job_names.count) # ensure that we have the same counts
@ -63,6 +71,14 @@ RSpec.describe Ci::PipelineProcessing::AtomicProcessingService do
end
end
end
def event_on_pipeline(event)
if event == 'retry'
pipeline.retry_failed(user)
else
pipeline.public_send("#{event}!")
end
end
end
end

View File

@ -0,0 +1,54 @@
config:
test1:
script: exit 0
test2:
when: manual
script: exit 1
init:
expect:
pipeline: pending
stages:
test: pending
jobs:
test1: pending
test2: manual
transitions:
- event: success
jobs: [test1]
expect:
pipeline: success
stages:
test: success
jobs:
test1: success
test2: manual
- event: play
jobs: [test2]
expect:
pipeline: running
stages:
test: running
jobs:
test1: success
test2: pending
- event: drop
jobs: [test2]
expect:
pipeline: success
stages:
test: success
jobs:
test1: success
test2: failed
- event: retry
jobs: [test2]
expect:
pipeline: running
stages:
test: running
jobs:
test1: success
test2: pending

View File

@ -0,0 +1,53 @@
config:
test1:
script: exit 0
test2:
when: manual
script: exit 1
init:
expect:
pipeline: pending
stages:
test: pending
jobs:
test1: pending
test2: manual
transitions:
- event: success
jobs: [test1]
expect:
pipeline: success
stages:
test: success
jobs:
test1: success
test2: manual
- event: play
jobs: [test2]
expect:
pipeline: running
stages:
test: running
jobs:
test1: success
test2: pending
- event: drop
jobs: [test2]
expect:
pipeline: success
stages:
test: success
jobs:
test1: success
test2: failed
- event: retry
expect:
pipeline: success
stages:
test: success
jobs:
test1: success
test2: manual

View File

@ -5,14 +5,16 @@ require 'spec_helper'
RSpec.describe Ci::RetryPipelineService, '#execute' do
include ProjectForksHelper
let(:user) { create(:user) }
let(:project) { create(:project) }
let_it_be_with_refind(:user) { create(:user) }
let_it_be_with_refind(:project) { create(:project) }
let(:pipeline) { create(:ci_pipeline, project: project) }
let(:service) { described_class.new(project, user) }
let(:build_stage) { create(:ci_stage, name: 'build', position: 0, pipeline: pipeline) }
let(:test_stage) { create(:ci_stage, name: 'test', position: 1, pipeline: pipeline) }
let(:deploy_stage) { create(:ci_stage, name: 'deploy', position: 2, pipeline: pipeline) }
subject(:service) { described_class.new(project, user) }
context 'when user has full ability to modify pipeline' do
before do
project.add_developer(user)
@ -272,6 +274,21 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
expect(pipeline.reload).to be_running
end
end
context 'when there is a failed manual action' do
before do
create_build('rspec', :success, build_stage)
create_build('manual-rspec', :failed, build_stage, when: :manual, allow_failure: true)
end
it 'processes the manual action' do
service.execute(pipeline)
expect(build('rspec')).to be_success
expect(build('manual-rspec')).to be_manual
expect(pipeline.reload).to be_success
end
end
end
it 'closes all todos about failed jobs for pipeline' do