Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-09-11 12:08:50 +00:00
parent 6b5f961bef
commit 0eaa82ad61
76 changed files with 2161 additions and 559 deletions

View File

@ -543,3 +543,8 @@ Migration/CreateTableWithForeignKeys:
# Disable this cop for all the existing migrations
Exclude:
- !ruby/regexp /\Adb\/(?:post_)?migrate\/(?:201[0-9]\d+|20200[0-8][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9])_.+\.rb\z/
Gitlab/RailsLogger:
Exclude:
- 'spec/**/*.rb'
- 'ee/spec/**/*.rb'

View File

@ -1,4 +1,5 @@
<script>
import produce from 'immer';
import { s__ } from '~/locale';
import Todo from '~/sidebar/components/todo_toggle/todo.vue';
import createAlertTodoMutation from '../../graphql/mutations/alert_todo_create.mutation.graphql';
@ -109,12 +110,15 @@ export default {
});
},
updateCache(store) {
const data = store.readQuery({
const sourceData = store.readQuery({
query: alertQuery,
variables: this.getAlertQueryVariables,
});
data.project.alertManagementAlerts.nodes[0].todos.nodes.shift();
const data = produce(sourceData, draftData => {
// eslint-disable-next-line no-param-reassign
draftData.project.alertManagementAlerts.nodes[0].todos.nodes = [];
});
store.writeQuery({
query: alertQuery,

View File

@ -1,5 +1,6 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import produce from 'immer';
import { defaultDataIdFromObject } from 'apollo-cache-inmemory';
import createDefaultClient from '~/lib/graphql';
import createRouter from './router';
@ -16,8 +17,11 @@ export default selector => {
const resolvers = {
Mutation: {
toggleSidebarStatus: (_, __, { cache }) => {
const data = cache.readQuery({ query: sidebarStatusQuery });
data.sidebarStatus = !data.sidebarStatus;
const sourceData = cache.readQuery({ query: sidebarStatusQuery });
const data = produce(sourceData, draftData => {
// eslint-disable-next-line no-param-reassign
draftData.sidebarStatus = !draftData.sidebarStatus;
});
cache.writeQuery({ query: sidebarStatusQuery, data });
},
},
@ -34,6 +38,7 @@ export default selector => {
return defaultDataIdFromObject(object);
},
},
assumeImmutableResults: true,
}),
});

View File

@ -23,6 +23,8 @@ import { s__ } from '~/locale';
import { mergeUrlParams, joinPaths, visitUrl } from '~/lib/utils/url_utility';
import getIncidents from '../graphql/queries/get_incidents.query.graphql';
import getIncidentsCountByStatus from '../graphql/queries/get_count_by_status.query.graphql';
import SeverityToken from '~/sidebar/components/severity/severity.vue';
import { INCIDENT_SEVERITY } from '~/sidebar/components/severity/constants';
import { I18N, DEFAULT_PAGE_SIZE, INCIDENT_SEARCH_DELAY, INCIDENT_STATUS_TABS } from '../constants';
const TH_TEST_ID = { 'data-testid': 'incident-management-created-at-sort' };
@ -44,6 +46,12 @@ export default {
i18n: I18N,
statusTabs: INCIDENT_STATUS_TABS,
fields: [
{
key: 'severity',
label: s__('IncidentManagement|Severity'),
thClass: `gl-pointer-events-none`,
tdClass,
},
{
key: 'title',
label: s__('IncidentManagement|Incident'),
@ -82,6 +90,7 @@ export default {
PublishedCell: () => import('ee_component/incidents/components/published_cell.vue'),
GlBadge,
GlEmptyState,
SeverityToken,
},
directives: {
GlTooltip: GlTooltipDirective,
@ -280,6 +289,9 @@ export default {
this.sort = `${sortingColumn}_${sortingDirection}`;
},
getSeverity(severity) {
return INCIDENT_SEVERITY[severity];
},
},
};
</script>
@ -348,6 +360,10 @@ export default {
@row-clicked="navigateToIncidentDetails"
@sort-changed="fetchSortedData"
>
<template #cell(severity)="{ item }">
<severity-token :severity="getSeverity(item.severity)" />
</template>
<template #cell(title)="{ item }">
<div :class="{ 'gl-display-flex gl-align-items-center': item.state === 'closed' }">
<div class="gl-max-w-full text-truncate" :title="item.title">{{ item.title }}</div>

View File

@ -40,6 +40,7 @@ query getIncidents(
}
}
statusPagePublishedIncident
severity
}
pageInfo {
hasNextPage

View File

@ -12,8 +12,8 @@
flex-direction: row;
&:hover {
background-color: $blue-50;
border-color: $blue-200;
background-color: var(--blue-50, $blue-50);
border-color: var(--blue-200, $blue-200);
cursor: pointer;
}
@ -22,7 +22,7 @@
border-bottom: 1px solid transparent;
&:hover {
border-color: $blue-200;
border-color: var(--blue-200, $blue-200);
}
}
@ -46,11 +46,9 @@
}
&.todo-pending.done-reversible {
background-color: $white;
&:hover {
border-color: $white-normal;
background-color: $gray-light;
border-color: var(--border-color, $border-color);
background-color: var(--gray-50, $gray-50);
border-top: 1px solid transparent;
.todo-avatar,
@ -65,7 +63,7 @@
}
.btn {
background-color: $gray-light;
background-color: var(--gray-50, $gray-50);
}
}
}
@ -105,15 +103,15 @@
.todo-label,
.todo-project {
a {
color: $blue-600;
font-weight: $gl-font-weight-normal;
color: var(--blue-600, $blue-600);
}
}
.todo-body {
.badge.badge-pill,
p {
color: $gl-text-color;
color: var(--gl-text-color, $gl-text-color);
}
.md {
@ -127,9 +125,9 @@
pre {
border: 0;
background: $gray-light;
background: var(--gray-50, $gray-50);
border-radius: 0;
color: $gray-500;
color: var(--gray-500, $gray-500);
margin: 0 20px;
overflow: hidden;
}
@ -185,7 +183,7 @@
.todo-body {
margin: 0;
border-left: 2px solid $gray-100;
border-left: 2px solid var(--border-color, $border-color);
padding-left: 10px;
}
}

View File

@ -85,6 +85,86 @@ $white-light: #2b2b2b;
$white-normal: #333;
$white-dark: #444;
$border-color: #4f4f4f;
body.gl-dark {
--gray-10: #{$gray-10};
--gray-50: #{$gray-50};
--gray-100: #{$gray-100};
--gray-200: #{$gray-200};
--gray-300: #{$gray-300};
--gray-400: #{$gray-400};
--gray-500: #{$gray-500};
--gray-600: #{$gray-600};
--gray-700: #{$gray-700};
--gray-800: #{$gray-800};
--gray-900: #{$gray-900};
--gray-950: #{$gray-950};
--green-50: #{$green-50};
--green-100: #{$green-100};
--green-200: #{$green-200};
--green-300: #{$green-300};
--green-400: #{$green-400};
--green-500: #{$green-500};
--green-600: #{$green-600};
--green-700: #{$green-700};
--green-800: #{$green-800};
--green-900: #{$green-900};
--green-950: #{$green-950};
--blue-50: #{$blue-50};
--blue-100: #{$blue-100};
--blue-200: #{$blue-200};
--blue-300: #{$blue-300};
--blue-400: #{$blue-400};
--blue-500: #{$blue-500};
--blue-600: #{$blue-600};
--blue-700: #{$blue-700};
--blue-800: #{$blue-800};
--blue-900: #{$blue-900};
--blue-950: #{$blue-950};
--orange-50: #{$orange-50};
--orange-100: #{$orange-100};
--orange-200: #{$orange-200};
--orange-300: #{$orange-300};
--orange-400: #{$orange-400};
--orange-500: #{$orange-500};
--orange-600: #{$orange-600};
--orange-700: #{$orange-700};
--orange-800: #{$orange-800};
--orange-900: #{$orange-900};
--orange-950: #{$orange-950};
--red-50: #{$red-50};
--red-100: #{$red-100};
--red-200: #{$red-200};
--red-300: #{$red-300};
--red-400: #{$red-400};
--red-500: #{$red-500};
--red-600: #{$red-600};
--red-700: #{$red-700};
--red-800: #{$red-800};
--red-900: #{$red-900};
--red-950: #{$red-950};
--indigo-50: #{$indigo-50};
--indigo-100: #{$indigo-100};
--indigo-200: #{$indigo-200};
--indigo-300: #{$indigo-300};
--indigo-400: #{$indigo-400};
--indigo-500: #{$indigo-500};
--indigo-600: #{$indigo-600};
--indigo-700: #{$indigo-700};
--indigo-800: #{$indigo-800};
--indigo-900: #{$indigo-900};
--indigo-950: #{$indigo-950};
--gl-text-color: #{$gray-900};
--border-color: #{$border-color};
}
$border-white-light: $gray-900;
$border-white-normal: $gray-900;

View File

@ -15,6 +15,7 @@ class SearchController < ApplicationController
around_action :allow_gitaly_ref_name_caching
before_action :block_anonymous_global_searches
skip_before_action :authenticate_user!
requires_cross_project_access if: -> do
search_term_present = params[:search].present? || params[:term].present?
@ -128,6 +129,16 @@ class SearchController < ApplicationController
payload[:metadata]['meta.search.search'] = params[:search]
payload[:metadata]['meta.search.scope'] = params[:scope]
end
def block_anonymous_global_searches
return if params[:project_id].present? || params[:group_id].present?
return if current_user
return unless ::Feature.enabled?(:block_anonymous_global_searches)
store_location_for(:user, request.fullpath)
redirect_to new_user_session_path, alert: _('You must be logged in to search across all of GitLab')
end
end
SearchController.prepend_if_ee('EE::SearchController')

View File

@ -4,31 +4,38 @@ module Ci
class JobsFinder
include Gitlab::Allowable
def initialize(current_user:, project: nil, params: {})
def initialize(current_user:, pipeline: nil, project: nil, params: {}, type: ::Ci::Build)
@pipeline = pipeline
@current_user = current_user
@project = project
@params = params
@type = type
raise ArgumentError 'type must be a subclass of Ci::Processable' unless type < ::Ci::Processable
end
def execute
builds = init_collection.order_id_desc
filter_by_scope(builds)
rescue Gitlab::Access::AccessDeniedError
Ci::Build.none
type.none
end
private
attr_reader :current_user, :project, :params
attr_reader :current_user, :pipeline, :project, :params, :type
def init_collection
project ? project_builds : all_builds
if Feature.enabled?(:ci_jobs_finder_refactor)
pipeline_jobs || project_jobs || all_jobs
else
project ? project_builds : all_jobs
end
end
def all_builds
def all_jobs
raise Gitlab::Access::AccessDeniedError unless current_user&.admin?
Ci::Build.all
type.all
end
def project_builds
@ -37,7 +44,25 @@ module Ci
project.builds.relevant
end
def project_jobs
return unless project
raise Gitlab::Access::AccessDeniedError unless can?(current_user, :read_build, project)
jobs_by_type(project, type).relevant
end
def pipeline_jobs
return unless pipeline
raise Gitlab::Access::AccessDeniedError unless can?(current_user, :read_build, pipeline)
jobs_by_type(pipeline, type).latest
end
def filter_by_scope(builds)
if Feature.enabled?(:ci_jobs_finder_refactor)
return filter_by_statuses!(params[:scope], builds) if params[:scope].is_a?(Array)
end
case params[:scope]
when 'pending'
builds.pending.reverse_order
@ -49,5 +74,23 @@ module Ci
builds
end
end
def filter_by_statuses!(statuses, builds)
unknown_statuses = params[:scope] - ::CommitStatus::AVAILABLE_STATUSES
raise ArgumentError, 'Scope contains invalid value(s)' unless unknown_statuses.empty?
builds.where(status: params[:scope]) # rubocop: disable CodeReuse/ActiveRecord
end
def jobs_by_type(relation, type)
case type.name
when ::Ci::Build.name
relation.builds
when ::Ci::Bridge.name
relation.bridges
else
raise ArgumentError, "finder does not support #{type} type"
end
end
end
end

View File

@ -0,0 +1,37 @@
# frozen_string_literal: true
module Mutations
module Boards
class Destroy < ::Mutations::BaseMutation
graphql_name 'DestroyBoard'
field :board,
Types::BoardType,
null: true,
description: 'The board after mutation'
argument :id,
::Types::GlobalIDType[::Board],
required: true,
description: 'The global ID of the board to destroy'
authorize :admin_board
def resolve(id:)
board = authorized_find!(id: id)
response = ::Boards::DestroyService.new(board.resource_parent, current_user).execute(board)
{
board: response.success? ? nil : board,
errors: response.errors
}
end
private
def find_object(id:)
GitlabSchema.object_from_id(id, expected_type: ::Board)
end
end
end
end

View File

@ -14,6 +14,7 @@ module Types
mount_mutation Mutations::AwardEmojis::Add
mount_mutation Mutations::AwardEmojis::Remove
mount_mutation Mutations::AwardEmojis::Toggle
mount_mutation Mutations::Boards::Destroy
mount_mutation Mutations::Boards::Issues::IssueMoveList
mount_mutation Mutations::Boards::Lists::Create
mount_mutation Mutations::Boards::Lists::Update

View File

@ -48,6 +48,14 @@ module Ci
raise NotImplementedError
end
def self.with_preloads
preload(
:metadata,
downstream_pipeline: [project: [:route, { namespace: :route }]],
project: [:namespace]
)
end
def schedule_downstream_pipeline!
raise InvalidBridgeTypeError unless downstream_project

View File

@ -175,7 +175,6 @@ module Ci
end
scope :queued_before, ->(time) { where(arel_table[:queued_at].lt(time)) }
scope :order_id_desc, -> { order('ci_builds.id DESC') }
scope :preload_project_and_pipeline_project, -> do
preload(Ci::Pipeline::PROJECT_ROUTE_AND_NAMESPACE_ROUTE,
@ -213,6 +212,10 @@ module Ci
.execute(build)
# rubocop: enable CodeReuse/ServiceClass
end
def with_preloads
preload(:job_artifacts_archive, :job_artifacts, project: [:namespace])
end
end
state_machine :status do

View File

@ -8,6 +8,8 @@ module Clusters
has_many :agent_tokens, class_name: 'Clusters::AgentToken'
scope :with_name, -> (name) { where(name: name) }
validates :name,
presence: true,
length: { maximum: 63 },

View File

@ -32,6 +32,8 @@ class CommitStatus < ApplicationRecord
where(allow_failure: true, status: [:failed, :canceled])
end
scope :order_id_desc, -> { order('ci_builds.id DESC') }
scope :exclude_ignored, -> do
# We want to ignore failed but allowed to fail jobs.
#

View File

@ -3,9 +3,13 @@
module Boards
class DestroyService < Boards::BaseService
def execute(board)
return false if parent.boards.size == 1
if parent.boards.size == 1
return ServiceResponse.error(message: "The board could not be deleted, because the parent doesn't have any other boards.")
end
board.destroy
board.destroy!
ServiceResponse.success
end
end
end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
module MergeRequests
module RemovesRefs
def cleanup_refs(merge_request)
CleanupRefsService.schedule(merge_request)
end
end
end

View File

@ -0,0 +1,60 @@
# frozen_string_literal: true
module MergeRequests
class CleanupRefsService
include BaseServiceUtility
TIME_THRESHOLD = 14.days
attr_reader :merge_request
def self.schedule(merge_request)
MergeRequestCleanupRefsWorker.perform_in(TIME_THRESHOLD, merge_request.id)
end
def initialize(merge_request)
@merge_request = merge_request
@repository = merge_request.project.repository
@ref_path = merge_request.ref_path
@ref_head_sha = @repository.commit(merge_request.ref_path).id
end
def execute
return error("Merge request has not been closed nor merged for #{TIME_THRESHOLD.inspect}.") unless eligible?
# Ensure that commit shas of refs are kept around so we won't lose them when GC runs.
keep_around
return error('Failed to create keep around refs.') unless kept_around?
delete_refs
success
end
private
attr_reader :repository, :ref_path, :ref_head_sha
def eligible?
return met_time_threshold?(merge_request.metrics&.latest_closed_at) if merge_request.closed?
merge_request.merged? && met_time_threshold?(merge_request.metrics&.merged_at)
end
def met_time_threshold?(attr)
attr.nil? || attr.to_i <= TIME_THRESHOLD.ago.to_i
end
def kept_around?
Gitlab::Git::KeepAround.new(repository).kept_around?(ref_head_sha)
end
def keep_around
repository.keep_around(ref_head_sha)
end
def delete_refs
repository.delete_refs(ref_path)
end
end
end

View File

@ -2,6 +2,8 @@
module MergeRequests
class CloseService < MergeRequests::BaseService
include RemovesRefs
def execute(merge_request, commit = nil)
return merge_request unless can?(current_user, :update_merge_request, merge_request)
@ -19,6 +21,7 @@ module MergeRequests
merge_request.update_project_counter_caches
cleanup_environments(merge_request)
abort_auto_merge(merge_request, 'merge request was closed')
cleanup_refs(merge_request)
end
merge_request

View File

@ -7,6 +7,8 @@ module MergeRequests
# and execute all hooks and notifications
#
class PostMergeService < MergeRequests::BaseService
include RemovesRefs
def execute(merge_request)
merge_request.mark_as_merged
close_issues(merge_request)
@ -20,6 +22,7 @@ module MergeRequests
delete_non_latest_diffs(merge_request)
cancel_review_app_jobs!(merge_request)
cleanup_environments(merge_request)
cleanup_refs(merge_request)
end
private

View File

@ -53,4 +53,4 @@
%strong Tip:
= succeed '.' do
You can also checkout merge requests locally by
= link_to 'following these guidelines', help_page_path('user/project/merge_requests/reviewing_and_managing_merge_requests.md', anchor: "checkout-merge-requests-locally"), target: '_blank', rel: 'noopener noreferrer'
= link_to 'following these guidelines', help_page_path('user/project/merge_requests/reviewing_and_managing_merge_requests.md', anchor: "checkout-merge-requests-locally-through-the-head-ref"), target: '_blank', rel: 'noopener noreferrer'

View File

@ -1,4 +1,4 @@
- return unless issuable.supports_issue_type?
- return unless issuable.supports_issue_type? && can?(current_user, :admin_issue, @project)
.form-group.row.gl-mb-0
= form.label :type, 'Type', class: 'col-form-label col-sm-2'

View File

@ -8,7 +8,7 @@
.gl-mb-2
%strong{ data: { qa_selector: "milestone_link", qa_milestone_title: milestone.title } }
= link_to truncate(milestone.title, length: 100), milestone_path(milestone)
- if @group
- if @group || dashboard
= " - #{milestone_type}"
- if milestone.due_date || milestone.start_date
@ -62,7 +62,3 @@
= link_to s_('Milestones|Reopen Milestone'), milestone_path(milestone, milestone: { state_event: :activate }), method: :put, class: "btn btn-sm btn-grouped btn-reopen"
- else
= link_to s_('Milestones|Close Milestone'), milestone_path(milestone, milestone: { state_event: :close }), method: :put, class: "btn btn-sm btn-grouped btn-close"
- if dashboard
.label-badge.label-badge-gray
= milestone_type

View File

@ -1492,6 +1492,14 @@
:weight: 5
:idempotent:
:tags: []
- :name: merge_request_cleanup_refs
:feature_category: :source_code_management
:has_external_dependencies:
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: merge_request_mergeability_check
:feature_category: :source_code_management
:has_external_dependencies:

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
class MergeRequestCleanupRefsWorker
include ApplicationWorker
feature_category :source_code_management
idempotent!
def perform(merge_request_id)
merge_request = MergeRequest.find_by_id(merge_request_id)
unless merge_request
logger.error("Failed to find merge request with ID: #{merge_request_id}")
return
end
result = ::MergeRequests::CleanupRefsService.new(merge_request).execute
return if result[:status] == :success
logger.error("Failed cleanup refs of merge request (#{merge_request_id}): #{result[:message]}")
end
end

View File

@ -0,0 +1,5 @@
---
title: Clean up stale merge request HEAD ref
merge_request: 41555
author:
type: performance

View File

@ -0,0 +1,5 @@
---
title: 'Milestone Dashboard: Move Gray Type Badge Next to the Milestone Title'
merge_request: 39617
author: Kev @KevSlashNull
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Surface incident severity and icon in the Incident List table
merge_request: 40112
author:
type: added

View File

@ -0,0 +1,5 @@
---
title: Destroy issue board via GraphQL
merge_request: 40930
author:
type: added

View File

@ -0,0 +1,5 @@
---
title: Fix todos hover style in dark mode
merge_request: 41122
author:
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Use applogger in some files of auth/ldap dir
merge_request: 41061
author: Rajendra Kadam
type: other

View File

@ -0,0 +1,7 @@
---
name: block_anonymous_global_searches
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/41041
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/244276
group: group::global search
type: development
default_enabled: false

View File

@ -0,0 +1,7 @@
---
name: ci_jobs_finder_refactor
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/36622
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/245183
group: group::continuous integration
type: development
default_enabled: false

View File

@ -154,6 +154,8 @@
- 2
- - merge
- 5
- - merge_request_cleanup_refs
- 1
- - merge_request_mergeability_check
- 1
- - metrics_dashboard_prune_old_annotations

View File

@ -198,7 +198,7 @@ The following documentation relates to the DevOps **Create** stage:
| Create topics - Merge Requests | Description |
|:--------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------|
| [Checking out merge requests locally](user/project/merge_requests/reviewing_and_managing_merge_requests.md#checkout-merge-requests-locally) | Tips for working with merge requests locally. |
| [Checking out merge requests locally](user/project/merge_requests/reviewing_and_managing_merge_requests.md#checkout-merge-requests-locally-through-the-head-ref) | Tips for working with merge requests locally. |
| [Cherry-picking](user/project/merge_requests/cherry_pick_changes.md) | Use GitLab for cherry-picking changes. |
| [Merge request thread resolution](user/discussions/index.md#moving-a-single-thread-to-a-new-issue) | Resolve threads, move threads in a merge request to an issue, and only allow merge requests to be merged if all threads are resolved. |
| [Merge requests](user/project/merge_requests/index.md) | Merge request management. |

View File

@ -131,6 +131,7 @@ POST /groups/:id/epics/:epic_iid/epics
| `id` | integer/string | yes | The ID or [URL-encoded path of the group](README.md#namespaced-path-encoding) owned by the authenticated user |
| `epic_iid` | integer | yes | The internal ID of the (future parent) epic. |
| `title` | string | yes | The title of a newly created epic. |
| `confidential` | boolean | no | Whether the epic should be confidential. Will be ignored if `confidential_epics` feature flag is disabled. Defaults to the confidentiality state of the parent epic. |
```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/1/epics/5/epics?title=Newpic"

View File

@ -1709,6 +1709,31 @@ type ClusterAgent {
"""
project: Project
"""
Tokens associated with the cluster agent
"""
tokens(
"""
Returns the elements in the list that come after the specified cursor.
"""
after: String
"""
Returns the elements in the list that come before the specified cursor.
"""
before: String
"""
Returns the first _n_ elements from the list.
"""
first: Int
"""
Returns the last _n_ elements from the list.
"""
last: Int
): ClusterAgentTokenConnection
"""
Timestamp the cluster agent was updated
"""
@ -1797,6 +1822,26 @@ type ClusterAgentToken {
id: ClustersAgentTokenID!
}
"""
The connection type for ClusterAgentToken.
"""
type ClusterAgentTokenConnection {
"""
A list of edges.
"""
edges: [ClusterAgentTokenEdge]
"""
A list of nodes.
"""
nodes: [ClusterAgentToken]
"""
Information to aid in pagination.
"""
pageInfo: PageInfo!
}
"""
Autogenerated input type of ClusterAgentTokenCreate
"""
@ -1867,6 +1912,21 @@ type ClusterAgentTokenDeletePayload {
errors: [String!]!
}
"""
An edge in a connection.
"""
type ClusterAgentTokenEdge {
"""
A cursor for use in pagination.
"""
cursor: String!
"""
The item at the end of the edge.
"""
node: ClusterAgentToken
}
"""
Identifier of Clusters::Agent
"""
@ -4340,6 +4400,41 @@ enum DesignVersionEvent {
NONE
}
"""
Autogenerated input type of DestroyBoard
"""
input DestroyBoardInput {
"""
A unique identifier for the client performing the mutation.
"""
clientMutationId: String
"""
The global ID of the board to destroy
"""
id: BoardID!
}
"""
Autogenerated return type of DestroyBoard
"""
type DestroyBoardPayload {
"""
The board after mutation
"""
board: Board
"""
A unique identifier for the client performing the mutation.
"""
clientMutationId: String
"""
Errors encountered during execution of the mutation.
"""
errors: [String!]!
}
"""
Autogenerated input type of DestroyNote
"""
@ -10588,6 +10683,7 @@ type Mutation {
designManagementDelete(input: DesignManagementDeleteInput!): DesignManagementDeletePayload
designManagementMove(input: DesignManagementMoveInput!): DesignManagementMovePayload
designManagementUpload(input: DesignManagementUploadInput!): DesignManagementUploadPayload
destroyBoard(input: DestroyBoardInput!): DestroyBoardPayload
destroyNote(input: DestroyNoteInput!): DestroyNotePayload
destroySnippet(input: DestroySnippetInput!): DestroySnippetPayload
@ -11776,6 +11872,16 @@ type Project {
last: Int
): BoardConnection
"""
Find a single cluster agent by name
"""
clusterAgent(
"""
Name of the cluster agent
"""
name: String!
): ClusterAgent
"""
Cluster agents associated with the project
"""

View File

@ -4659,6 +4659,59 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "tokens",
"description": "Tokens associated with the cluster agent",
"args": [
{
"name": "after",
"description": "Returns the elements in the list that come after the specified cursor.",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
},
{
"name": "before",
"description": "Returns the elements in the list that come before the specified cursor.",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
},
{
"name": "first",
"description": "Returns the first _n_ elements from the list.",
"type": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
},
"defaultValue": null
},
{
"name": "last",
"description": "Returns the last _n_ elements from the list.",
"type": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
},
"defaultValue": null
}
],
"type": {
"kind": "OBJECT",
"name": "ClusterAgentTokenConnection",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "updatedAt",
"description": "Timestamp the cluster agent was updated",
@ -4940,6 +4993,73 @@
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "ClusterAgentTokenConnection",
"description": "The connection type for ClusterAgentToken.",
"fields": [
{
"name": "edges",
"description": "A list of edges.",
"args": [
],
"type": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "OBJECT",
"name": "ClusterAgentTokenEdge",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "nodes",
"description": "A list of nodes.",
"args": [
],
"type": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "OBJECT",
"name": "ClusterAgentToken",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "pageInfo",
"description": "Information to aid in pagination.",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "OBJECT",
"name": "PageInfo",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "INPUT_OBJECT",
"name": "ClusterAgentTokenCreateInput",
@ -5144,6 +5264,51 @@
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "ClusterAgentTokenEdge",
"description": "An edge in a connection.",
"fields": [
{
"name": "cursor",
"description": "A cursor for use in pagination.",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "node",
"description": "The item at the end of the edge.",
"args": [
],
"type": {
"kind": "OBJECT",
"name": "ClusterAgentToken",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "SCALAR",
"name": "ClustersAgentID",
@ -11999,6 +12164,108 @@
],
"possibleTypes": null
},
{
"kind": "INPUT_OBJECT",
"name": "DestroyBoardInput",
"description": "Autogenerated input type of DestroyBoard",
"fields": null,
"inputFields": [
{
"name": "id",
"description": "The global ID of the board to destroy",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "BoardID",
"ofType": null
}
},
"defaultValue": null
},
{
"name": "clientMutationId",
"description": "A unique identifier for the client performing the mutation.",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
}
],
"interfaces": null,
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "DestroyBoardPayload",
"description": "Autogenerated return type of DestroyBoard",
"fields": [
{
"name": "board",
"description": "The board after mutation",
"args": [
],
"type": {
"kind": "OBJECT",
"name": "Board",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "clientMutationId",
"description": "A unique identifier for the client performing the mutation.",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "errors",
"description": "Errors encountered during execution of the mutation.",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
}
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "INPUT_OBJECT",
"name": "DestroyNoteInput",
@ -30390,6 +30657,33 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "destroyBoard",
"description": null,
"args": [
{
"name": "input",
"description": null,
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "INPUT_OBJECT",
"name": "DestroyBoardInput",
"ofType": null
}
},
"defaultValue": null
}
],
"type": {
"kind": "OBJECT",
"name": "DestroyBoardPayload",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "destroyNote",
"description": null,
@ -35050,6 +35344,33 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "clusterAgent",
"description": "Find a single cluster agent by name",
"args": [
{
"name": "name",
"description": "Name of the cluster agent",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
},
"defaultValue": null
}
],
"type": {
"kind": "OBJECT",
"name": "ClusterAgent",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "clusterAgents",
"description": "Cluster agents associated with the project",

View File

@ -728,6 +728,16 @@ A specific version in which designs were added, modified or deleted
| `id` | ID! | ID of the design version |
| `sha` | ID! | SHA of the design version |
### DestroyBoardPayload
Autogenerated return type of DestroyBoard
| Field | Type | Description |
| ----- | ---- | ----------- |
| `board` | Board | The board after mutation |
| `clientMutationId` | String | A unique identifier for the client performing the mutation. |
| `errors` | String! => Array | Errors encountered during execution of the mutation. |
### DestroyNotePayload
Autogenerated return type of DestroyNote
@ -1771,6 +1781,7 @@ Autogenerated return type of PipelineRetry
| `autocloseReferencedIssues` | Boolean | Indicates if issues referenced by merge requests and commits within the default branch are closed automatically |
| `avatarUrl` | String | URL to avatar image file of the project |
| `board` | Board | A single board of the project |
| `clusterAgent` | ClusterAgent | Find a single cluster agent by name |
| `containerExpirationPolicy` | ContainerExpirationPolicy | The container expiration policy of the project |
| `containerRegistryEnabled` | Boolean | Indicates if the project stores Docker container images in a container registry |
| `createdAt` | Time | Timestamp of the project creation |

View File

@ -316,6 +316,10 @@ Implemented using Redis methods [PFADD](https://redis.io/commands/pfadd) and [PF
Increment unique users count using Redis HLL, for given event name.
Tracking events using the `UsageData` API requires the `usage_data_api` feature flag to be enabled, which is disabled by default.
API requests are protected by checking of a valid CSRF token.
In order to be able to increment the values the related feature `usage_data<event_name>` should be enabled.
```plaintext
@ -330,9 +334,10 @@ Implemented using Redis methods [PFADD](https://redis.io/commands/pfadd) and [PF
Return 200 if tracking failed for any reason.
- `401 Unauthorized` if user is not authenticated
- `400 Bad request` if event parameter is missing
- `200` if event was tracked or any errors
- `400 Bad request` if event parameter is missing
- `401 Unauthorized` if user is not authenticated
- `403 Forbidden` for invalid CSRF token provided
1. Track event using base module `Gitlab::UsageDataCounters::HLLRedisCounter.track_event(entity_id, event_name)`.

View File

@ -54,22 +54,6 @@ To create and enable a feature flag:
You can change these settings by clicking the **{pencil}** (edit) button
next to any feature flag in the list.
## Rollout strategy (legacy)
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/8240) in GitLab 12.2.
In GitLab 13.0 and earlier, the **Rollout strategy** setting affects which users will experience
the feature as enabled. Choose the percentage of users that the feature will be enabled
for. The rollout strategy will have no effect if the environment spec is disabled.
It can be set to:
- All users
- [Percent of users](#percent-of-users)
- Optionally, you can click the **Include additional user IDs** checkbox and add a list
of specific users IDs to enable the feature for.
- [User IDs](#user-ids)
## Feature flag strategies
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/35555) in GitLab 13.0.
@ -208,6 +192,23 @@ To enable it:
Feature.enable(:feature_flags_new_version)
```
## Rollout strategy (legacy)
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/8240) in GitLab 12.2.
> - [Made read-only](https://gitlab.com/gitlab-org/gitlab/-/issues/220228) in GitLab 13.4.
In GitLab 13.0 and earlier, the **Rollout strategy** setting affects which users will experience
the feature as enabled. Choose the percentage of users that the feature will be enabled
for. The rollout strategy will have no effect if the environment spec is disabled.
It can be set to:
- All users
- [Percent of users](#percent-of-users)
- Optionally, you can click the **Include additional user IDs** checkbox and add a list
of specific users IDs to enable the feature for.
- [User IDs](#user-ids)
## Disable a feature flag for a specific environment
In [GitLab 13.0 and earlier](https://gitlab.com/gitlab-org/gitlab/-/issues/8621),

Binary file not shown.

Before

Width:  |  Height:  |  Size: 39 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 KiB

View File

@ -13,12 +13,18 @@ For users with at least Developer [permissions](../../user/permissions.md), the
Incident Management list is available at **Operations > Incidents**
in your project's sidebar. The list contains the following metrics:
![Incident List](./img/incident_list_sort_v13_3.png)
![Incident List](img/incident_list_v13_4.png)
- **Status** - To filter incidents by their status, click **Open**, **Closed**,
or **All** above the incident list.
- **Search** - The Incident list supports a simple free text search, which filters
on the **Title** and **Incident** fields.
- **Severity** - Severity of a particular incident. Can have one of the following values:
- `Critical - S1`
- `High - S2`
- `Medium - S3`
- `Low - S4`
- `Unknown`
- **Incident** - The description of the incident, which attempts to capture the
most meaningful data.
- **Date created** - How long ago the incident was created. This field uses the

Binary file not shown.

Before

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 41 KiB

View File

@ -196,7 +196,7 @@ to configure daily security scans.
Each dashboard's vulnerability list contains vulnerabilities from the latest scans that were merged
into the default branch.
![Vulnerability Report](img/group_vulnerability_report_v13_3.png)
![Vulnerability Report](img/group_vulnerability_report_v13_4.png)
You can filter which vulnerabilities the Security Dashboard displays by:

View File

@ -284,15 +284,26 @@ the command line.
NOTE: **Note:**
This section might move in its own document in the future.
### Checkout merge requests locally
### Checkout merge requests locally through the `head` ref
A merge request contains all the history from a repository, plus the additional
commits added to the branch associated with the merge request. Here's a few
tricks to checkout a merge request locally.
ways to checkout a merge request locally.
Please note that you can checkout a merge request locally even if the source
project is a fork (even a private fork) of the target project.
This relies on the merge request `head` ref (`refs/merge-requests/:iid/head`)
that is available for each merge request. It allows checking out a merge
request via its ID instead of its branch.
[Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/223156) in GitLab
13.4, 14 days after a merge request gets closed or merged, the merge request
`head` ref will be deleted. This means that the merge request will not be available
for local checkout via the merge request `head` ref anymore. The merge request
can still be re-opened. Also, as long as the merge request's branch
exists, you can still check out the branch as it won't be affected.
#### Checkout locally by adding a Git alias
Add the following alias to your `~/.gitconfig`:

View File

@ -15,6 +15,24 @@ module API
detail 'This feature was introduced in GitLab 8.11.'
success Entities::Ci::PipelineBasic
end
helpers do
params :optional_scope do
optional :scope, types: [String, Array[String]], desc: 'The scope of builds to show',
values: ::CommitStatus::AVAILABLE_STATUSES,
coerce_with: ->(scope) {
case scope
when String
[scope]
when ::Array
scope
else
['unknown']
end
}
end
end
params do
use :pagination
optional :scope, type: String, values: %w[running pending finished branches tags],
@ -96,6 +114,64 @@ module API
present pipeline, with: Entities::Ci::Pipeline
end
desc 'Get pipeline jobs' do
success Entities::Ci::Job
end
params do
requires :pipeline_id, type: Integer, desc: 'The pipeline ID'
use :optional_scope
use :pagination
end
get ':id/pipelines/:pipeline_id/jobs' do
authorize!(:read_pipeline, user_project)
pipeline = user_project.all_pipelines.find(params[:pipeline_id])
if Feature.enabled?(:ci_jobs_finder_refactor)
builds = ::Ci::JobsFinder
.new(current_user: current_user, pipeline: pipeline, params: params)
.execute
else
authorize!(:read_build, pipeline)
builds = pipeline.builds
builds = filter_builds(builds, params[:scope])
end
builds = builds.with_preloads
present paginate(builds), with: Entities::Ci::Job
end
desc 'Get pipeline bridge jobs' do
success Entities::Ci::Bridge
end
params do
requires :pipeline_id, type: Integer, desc: 'The pipeline ID'
use :optional_scope
use :pagination
end
get ':id/pipelines/:pipeline_id/bridges' do
authorize!(:read_build, user_project)
pipeline = user_project.all_pipelines.find(params[:pipeline_id])
if Feature.enabled?(:ci_jobs_finder_refactor)
bridges = ::Ci::JobsFinder
.new(current_user: current_user, pipeline: pipeline, params: params, type: ::Ci::Bridge)
.execute
else
authorize!(:read_pipeline, pipeline)
bridges = pipeline.bridges
bridges = filter_builds(bridges, params[:scope])
end
bridges = bridges.with_preloads
present paginate(bridges), with: Entities::Ci::Bridge
end
desc 'Gets the variables for a given pipeline' do
detail 'This feature was introduced in GitLab 11.11'
success Entities::Ci::Variable
@ -170,6 +246,21 @@ module API
end
helpers do
# NOTE: This method should be removed once the ci_jobs_finder_refactor FF is
# removed. https://gitlab.com/gitlab-org/gitlab/-/issues/245183
# rubocop: disable CodeReuse/ActiveRecord
def filter_builds(builds, scope)
return builds if scope.nil? || scope.empty?
available_statuses = ::CommitStatus::AVAILABLE_STATUSES
unknown = scope - available_statuses
render_api_error!('Scope contains invalid value(s)', 400) unless unknown.empty?
builds.where(status: scope)
end
# rubocop: enable CodeReuse/ActiveRecord
def pipeline
strong_memoize(:pipeline) do
user_project.all_pipelines.find(params[:pipeline_id])

View File

@ -48,54 +48,6 @@ module API
end
# rubocop: enable CodeReuse/ActiveRecord
desc 'Get pipeline jobs' do
success Entities::Ci::Job
end
params do
requires :pipeline_id, type: Integer, desc: 'The pipeline ID'
use :optional_scope
use :pagination
end
# rubocop: disable CodeReuse/ActiveRecord
get ':id/pipelines/:pipeline_id/jobs' do
authorize!(:read_pipeline, user_project)
pipeline = user_project.all_pipelines.find(params[:pipeline_id])
authorize!(:read_build, pipeline)
builds = pipeline.builds
builds = filter_builds(builds, params[:scope])
builds = builds.preload(:job_artifacts_archive, :job_artifacts, project: [:namespace])
present paginate(builds), with: Entities::Ci::Job
end
# rubocop: enable CodeReuse/ActiveRecord
desc 'Get pipeline bridge jobs' do
success ::API::Entities::Ci::Bridge
end
params do
requires :pipeline_id, type: Integer, desc: 'The pipeline ID'
use :optional_scope
use :pagination
end
# rubocop: disable CodeReuse/ActiveRecord
get ':id/pipelines/:pipeline_id/bridges' do
authorize!(:read_build, user_project)
pipeline = user_project.ci_pipelines.find(params[:pipeline_id])
authorize!(:read_pipeline, pipeline)
bridges = pipeline.bridges
bridges = filter_builds(bridges, params[:scope])
bridges = bridges.preload(
:metadata,
downstream_pipeline: [project: [:route, { namespace: :route }]],
project: [:namespace]
)
present paginate(bridges), with: ::API::Entities::Ci::Bridge
end
# rubocop: enable CodeReuse/ActiveRecord
desc 'Get a specific job of a project' do
success Entities::Ci::Job
end

View File

@ -7,6 +7,7 @@ module API
namespace 'usage_data' do
before do
not_found! unless Feature.enabled?(:usage_data_api)
forbidden!('Invalid CSRF token is provided') unless verified_request?
end
desc 'Track usage data events' do

View File

@ -55,7 +55,7 @@ module Gitlab
response = ldap.get_operation_result
unless response.code == 0
Rails.logger.warn("LDAP search error: #{response.message}") # rubocop:disable Gitlab/RailsLogger
Gitlab::AppLogger.warn("LDAP search error: #{response.message}")
end
[]
@ -67,7 +67,7 @@ module Gitlab
retries += 1
error_message = connection_error_message(error)
Rails.logger.warn(error_message) # rubocop:disable Gitlab/RailsLogger
Gitlab::AppLogger.warn(error_message)
if retries < MAX_SEARCH_RETRIES
renew_connection_adapter

View File

@ -116,7 +116,7 @@ module Gitlab
true
rescue => e
Rails.logger.warn("Repository does not exist: #{e} at: #{disk_path}.git") # rubocop:disable Gitlab/RailsLogger
Gitlab::AppLogger.warn("Repository does not exist: #{e} at: #{disk_path}.git")
Gitlab::ErrorTracking.track_exception(e, path: disk_path, storage: storage)
false

View File

@ -13345,6 +13345,9 @@ msgstr ""
msgid "IncidentManagement|Published to status page"
msgstr ""
msgid "IncidentManagement|Severity"
msgstr ""
msgid "IncidentManagement|There are no closed incidents"
msgstr ""
@ -28952,6 +28955,9 @@ msgstr ""
msgid "You must accept our Terms of Service and privacy policy in order to register an account"
msgstr ""
msgid "You must be logged in to search across all of GitLab"
msgstr ""
msgid "You must disassociate %{domain} from all clusters it is attached to before deletion."
msgstr ""

View File

@ -14,6 +14,10 @@ module QA
element :new_user_accept_terms_checkbox
end
view 'ee/app/views/registrations/welcome/_button.html.haml' do
element :get_started_button
end
def sign_up!(user)
fill_element :new_user_name_field, user.name
fill_element :new_user_username_field, user.username
@ -24,7 +28,9 @@ module QA
check_element :new_user_accept_terms_checkbox if has_element?(:new_user_accept_terms_checkbox)
signed_in = retry_until do
click_element :new_user_register_button
click_element :new_user_register_button if has_element?(:new_user_register_button)
click_element :get_started_button if has_element?(:get_started_button)
Page::Main::Menu.perform(&:has_personal_area?)
end

View File

@ -8,7 +8,7 @@ module RuboCop
class RailsLogger < ::RuboCop::Cop::Cop
include CodeReuseHelpers
# This cop checks for the Rails.logger in the codebase
# This cop checks for the Rails.logger log methods in the codebase
#
# @example
#
@ -17,34 +17,29 @@ module RuboCop
#
# # good
# Gitlab::AppLogger.error("Project %{project_path} could not be saved" % { project_path: project.full_path })
#
# # OK
# Rails.logger.level
MSG = 'Use a structured JSON logger instead of `Rails.logger`. ' \
'https://docs.gitlab.com/ee/development/logging.html'.freeze
def_node_matcher :rails_logger?, <<~PATTERN
(send (const nil? :Rails) :logger ... )
# See supported log methods:
# https://ruby-doc.org/stdlib-2.6.6/libdoc/logger/rdoc/Logger.html
LOG_METHODS = %i[debug error fatal info warn].freeze
LOG_METHODS_PATTERN = LOG_METHODS.map(&:inspect).join(' ').freeze
def_node_matcher :rails_logger_log?, <<~PATTERN
(send
(send (const nil? :Rails) :logger)
{#{LOG_METHODS_PATTERN}} ...
)
PATTERN
WHITELISTED_DIRECTORIES = %w[
spec
].freeze
def on_send(node)
return if in_whitelisted_directory?(node)
return unless rails_logger?(node)
return unless rails_logger_log?(node)
add_offense(node, location: :expression)
end
def in_whitelisted_directory?(node)
path = file_path_for_node(node)
WHITELISTED_DIRECTORIES.any? do |directory|
path.start_with?(
File.join(rails_root, directory),
File.join(rails_root, 'ee', directory)
)
end
end
end
end
end

View File

@ -95,46 +95,74 @@ RSpec.describe SearchController do
using RSpec::Parameterized::TableSyntax
render_views
it 'omits pipeline status from load' do
project = create(:project, :public)
expect(Gitlab::Cache::Ci::ProjectPipelineStatus).not_to receive(:load_in_batch_for_projects)
context 'when block_anonymous_global_searches is disabled' do
before do
stub_feature_flags(block_anonymous_global_searches: false)
end
get :show, params: { scope: 'projects', search: project.name }
it 'omits pipeline status from load' do
project = create(:project, :public)
expect(Gitlab::Cache::Ci::ProjectPipelineStatus).not_to receive(:load_in_batch_for_projects)
expect(assigns[:search_objects].first).to eq project
get :show, params: { scope: 'projects', search: project.name }
expect(assigns[:search_objects].first).to eq project
end
context 'check search term length' do
let(:search_queries) do
char_limit = SearchService::SEARCH_CHAR_LIMIT
term_limit = SearchService::SEARCH_TERM_LIMIT
{
chars_under_limit: ('a' * (char_limit - 1)),
chars_over_limit: ('a' * (char_limit + 1)),
terms_under_limit: ('abc ' * (term_limit - 1)),
terms_over_limit: ('abc ' * (term_limit + 1))
}
end
where(:string_name, :expectation) do
:chars_under_limit | :not_to_set_flash
:chars_over_limit | :set_chars_flash
:terms_under_limit | :not_to_set_flash
:terms_over_limit | :set_terms_flash
end
with_them do
it do
get :show, params: { scope: 'projects', search: search_queries[string_name] }
case expectation
when :not_to_set_flash
expect(controller).not_to set_flash[:alert]
when :set_chars_flash
expect(controller).to set_flash[:alert].to(/characters/)
when :set_terms_flash
expect(controller).to set_flash[:alert].to(/terms/)
end
end
end
end
end
context 'check search term length' do
let(:search_queries) do
char_limit = SearchService::SEARCH_CHAR_LIMIT
term_limit = SearchService::SEARCH_TERM_LIMIT
{
chars_under_limit: ('a' * (char_limit - 1)),
chars_over_limit: ('a' * (char_limit + 1)),
terms_under_limit: ('abc ' * (term_limit - 1)),
terms_over_limit: ('abc ' * (term_limit + 1))
}
context 'when block_anonymous_global_searches is enabled' do
context 'for unauthenticated user' do
before do
sign_out(user)
end
it 'redirects to login page' do
get :show, params: { scope: 'projects', search: '*' }
expect(response).to redirect_to new_user_session_path
end
end
where(:string_name, :expectation) do
:chars_under_limit | :not_to_set_flash
:chars_over_limit | :set_chars_flash
:terms_under_limit | :not_to_set_flash
:terms_over_limit | :set_terms_flash
end
context 'for authenticated user' do
it 'succeeds' do
get :show, params: { scope: 'projects', search: '*' }
with_them do
it do
get :show, params: { scope: 'projects', search: search_queries[string_name] }
case expectation
when :not_to_set_flash
expect(controller).not_to set_flash[:alert]
when :set_chars_flash
expect(controller).to set_flash[:alert].to(/characters/)
when :set_terms_flash
expect(controller).to set_flash[:alert].to(/terms/)
end
expect(response).to have_gitlab_http_status(:ok)
end
end
end

View File

@ -61,6 +61,10 @@ RSpec.describe "User creates issue" do
.and have_content(project.name)
expect(page).to have_selector('strong', text: 'Description')
end
it 'does not render the issue type dropdown' do
expect(page).not_to have_selector('.s-issuable-type-filter-dropdown-wrap')
end
end
context "when signed in as developer", :js do

View File

@ -86,20 +86,33 @@ RSpec.describe 'User searches for issues', :js do
end
context 'when signed out' do
let(:project) { create(:project, :public) }
context 'when block_anonymous_global_searches is disabled' do
let(:project) { create(:project, :public) }
before do
visit(search_path)
before do
stub_feature_flags(block_anonymous_global_searches: false)
visit(search_path)
end
include_examples 'top right search form'
it 'finds an issue' do
search_for_issue(issue1.title)
page.within('.results') do
expect(page).to have_link(issue1.title)
expect(page).not_to have_link(issue2.title)
end
end
end
include_examples 'top right search form'
context 'when block_anonymous_global_searches is enabled' do
before do
visit(search_path)
end
it 'finds an issue' do
search_for_issue(issue1.title)
page.within('.results') do
expect(page).to have_link(issue1.title)
expect(page).not_to have_link(issue2.title)
it 'is redirected to login page' do
expect(page).to have_content('You must be logged in to search across all of GitLab')
end
end
end

View File

@ -6,31 +6,44 @@ RSpec.describe 'User searches for projects' do
let!(:project) { create(:project, :public, name: 'Shop') }
context 'when signed out' do
include_examples 'top right search form'
context 'when block_anonymous_global_searches is disabled' do
before do
stub_feature_flags(block_anonymous_global_searches: false)
end
it 'finds a project' do
visit(search_path)
include_examples 'top right search form'
fill_in('dashboard_search', with: project.name[0..3])
click_button('Search')
it 'finds a project' do
visit(search_path)
expect(page).to have_link(project.name)
fill_in('dashboard_search', with: project.name[0..3])
click_button('Search')
expect(page).to have_link(project.name)
end
it 'preserves the group being searched in' do
visit(search_path(group_id: project.namespace.id))
submit_search('foo')
expect(find('#group_id', visible: false).value).to eq(project.namespace.id.to_s)
end
it 'preserves the project being searched in' do
visit(search_path(project_id: project.id))
submit_search('foo')
expect(find('#project_id', visible: false).value).to eq(project.id.to_s)
end
end
it 'preserves the group being searched in' do
visit(search_path(group_id: project.namespace.id))
submit_search('foo')
expect(find('#group_id', visible: false).value).to eq(project.namespace.id.to_s)
end
it 'preserves the project being searched in' do
visit(search_path(project_id: project.id))
submit_search('foo')
expect(find('#project_id', visible: false).value).to eq(project.id.to_s)
context 'when block_anonymous_global_searches is enabled' do
it 'is redirected to login page' do
visit(search_path)
expect(page).to have_content('You must be logged in to search across all of GitLab')
end
end
end
end

View File

@ -36,53 +36,135 @@ RSpec.describe Ci::JobsFinder, '#execute' do
end
end
context 'scope is present' do
let(:jobs) { [job_1, job_2, job_3] }
where(:scope, :index) do
[
['pending', 0],
['running', 1],
['finished', 2]
]
context 'with ci_jobs_finder_refactor ff enabled' do
before do
stub_feature_flags(ci_jobs_finder_refactor: true)
end
with_them do
let(:params) { { scope: scope } }
context 'scope is present' do
let(:jobs) { [job_1, job_2, job_3] }
it { expect(subject).to match_array([jobs[index]]) }
where(:scope, :index) do
[
['pending', 0],
['running', 1],
['finished', 2]
]
end
with_them do
let(:params) { { scope: scope } }
it { expect(subject).to match_array([jobs[index]]) }
end
end
context 'scope is an array' do
let(:jobs) { [job_1, job_2, job_3] }
let(:params) {{ scope: ['running'] }}
it 'filters by the job statuses in the scope' do
expect(subject).to match_array([job_2])
end
end
end
context 'with ci_jobs_finder_refactor ff disabled' do
before do
stub_feature_flags(ci_jobs_finder_refactor: false)
end
context 'scope is present' do
let(:jobs) { [job_1, job_2, job_3] }
where(:scope, :index) do
[
['pending', 0],
['running', 1],
['finished', 2]
]
end
with_them do
let(:params) { { scope: scope } }
it { expect(subject).to match_array([jobs[index]]) }
end
end
end
end
context 'a project is present' do
subject { described_class.new(current_user: user, project: project, params: params).execute }
context 'user has access to the project' do
before do
project.add_maintainer(user)
end
it 'returns jobs for the specified project' do
expect(subject).to match_array([job_3])
end
context 'with ci_jobs_finder_refactor ff enabled' do
before do
stub_feature_flags(ci_jobs_finder_refactor: true)
end
context 'user has no access to project builds' do
before do
project.add_guest(user)
context 'a project is present' do
subject { described_class.new(current_user: user, project: project, params: params).execute }
context 'user has access to the project' do
before do
project.add_maintainer(user)
end
it 'returns jobs for the specified project' do
expect(subject).to match_array([job_3])
end
end
it 'returns no jobs' do
expect(subject).to be_empty
context 'user has no access to project builds' do
before do
project.add_guest(user)
end
it 'returns no jobs' do
expect(subject).to be_empty
end
end
context 'without user' do
let(:user) { nil }
it 'returns no jobs' do
expect(subject).to be_empty
end
end
end
end
context 'without user' do
let(:user) { nil }
context 'with ci_jobs_finder_refactor ff disabled' do
before do
stub_feature_flags(ci_jobs_finder_refactor: false)
end
context 'a project is present' do
subject { described_class.new(current_user: user, project: project, params: params).execute }
it 'returns no jobs' do
expect(subject).to be_empty
context 'user has access to the project' do
before do
project.add_maintainer(user)
end
it 'returns jobs for the specified project' do
expect(subject).to match_array([job_3])
end
end
context 'user has no access to project builds' do
before do
project.add_guest(user)
end
it 'returns no jobs' do
expect(subject).to be_empty
end
end
context 'without user' do
let(:user) { nil }
it 'returns no jobs' do
expect(subject).to be_empty
end
end
end
end

View File

@ -7,10 +7,16 @@ RSpec.describe SearchController, '(JavaScript fixtures)', type: :controller do
render_views
let_it_be(:user) { create(:admin) }
before(:all) do
clean_frontend_fixtures('search/')
end
before do
sign_in(user)
end
it 'search/show.html' do
get :show

View File

@ -13,6 +13,7 @@ import {
} from '@gitlab/ui';
import { visitUrl, joinPaths, mergeUrlParams } from '~/lib/utils/url_utility';
import IncidentsList from '~/incidents/components/incidents_list.vue';
import SeverityToken from '~/sidebar/components/severity/severity.vue';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
import { I18N, INCIDENT_STATUS_TABS } from '~/incidents/constants';
import mockIncidents from '../mocks/incidents.json';
@ -51,6 +52,7 @@ describe('Incidents List', () => {
const findStatusFilterBadge = () => wrapper.findAll(GlBadge);
const findStatusTabs = () => wrapper.find(GlTabs);
const findEmptyState = () => wrapper.find(GlEmptyState);
const findSeverity = () => wrapper.findAll(SeverityToken);
function mountComponent({ data = { incidents: [], incidentsCount: {} }, loading = false }) {
wrapper = mount(IncidentsList, {
@ -182,6 +184,10 @@ describe('Incidents List', () => {
);
});
});
it('renders severity per row', () => {
expect(findSeverity().length).toBe(mockIncidents.length);
});
});
describe('Create Incident', () => {

View File

@ -4,7 +4,8 @@
"title": "New: Incident",
"createdAt": "2020-06-03T15:46:08Z",
"assignees": {},
"state": "opened"
"state": "opened",
"severity": "CRITICAL"
},
{
"iid": "14",
@ -20,20 +21,23 @@
}
]
},
"state": "opened"
"state": "opened",
"severity": "HIGH"
},
{
"iid": "13",
"title": "Create issue3",
"createdAt": "2020-05-19T08:53:55Z",
"assignees": {},
"state": "closed"
"state": "closed",
"severity": "LOW"
},
{
"iid": "12",
"title": "Create issue2",
"createdAt": "2020-05-18T17:13:35Z",
"assignees": {},
"state": "closed"
"state": "closed",
"severity": "MEDIUM"
}
]

View File

@ -128,7 +128,7 @@ RSpec.describe Gitlab::Auth::Ldap::Adapter do
before do
allow(adapter).to receive(:renew_connection_adapter).and_return(ldap)
allow(ldap).to receive(:search) { raise Net::LDAP::Error, "some error" }
allow(Rails.logger).to receive(:warn)
allow(Gitlab::AppLogger).to receive(:warn)
end
context 'retries the operation' do
@ -152,7 +152,7 @@ RSpec.describe Gitlab::Auth::Ldap::Adapter do
it 'logs the error' do
expect { subject }.to raise_error(Gitlab::Auth::Ldap::LdapConnectionError)
expect(Rails.logger).to have_received(:warn).with(
expect(Gitlab::AppLogger).to have_received(:warn).with(
"LDAP search raised exception Net::LDAP::Error: some error")
end
end

View File

@ -12,6 +12,17 @@ RSpec.describe Clusters::Agent do
it { is_expected.to validate_length_of(:name).is_at_most(63) }
it { is_expected.to validate_uniqueness_of(:name).scoped_to(:project_id) }
describe 'scopes' do
describe '.with_name' do
let!(:matching_name) { create(:cluster_agent, name: 'matching-name') }
let!(:other_name) { create(:cluster_agent, name: 'other-name') }
subject { described_class.with_name(matching_name.name) }
it { is_expected.to contain_exactly(matching_name) }
end
end
describe 'validation' do
describe 'name validation' do
it 'rejects names that do not conform to RFC 1123', :aggregate_failures do

View File

@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe API::Ci::Pipelines do
let_it_be(:user) { create(:user) }
let_it_be(:non_member) { create(:user) }
let_it_be(:project2) { create(:project, creator: user) }
# We need to reload as the shared example 'pipelines visibility table' is changing project
let_it_be(:project, reload: true) do
@ -307,6 +308,606 @@ RSpec.describe API::Ci::Pipelines do
end
end
describe 'GET /projects/:id/pipelines/:pipeline_id/jobs' do
let(:query) { {} }
let(:api_user) { user }
let_it_be(:job) do
create(:ci_build, :success, pipeline: pipeline,
artifacts_expire_at: 1.day.since)
end
let(:guest) { create(:project_member, :guest, project: project).user }
before do |example|
unless example.metadata[:skip_before_request]
project.update!(public_builds: false)
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), params: query
end
end
context 'with ci_jobs_finder_refactor ff enabled' do
before do
stub_feature_flags(ci_jobs_finder_refactor: true)
end
context 'authorized user' do
it 'returns pipeline jobs' do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
end
it 'returns correct values' do
expect(json_response).not_to be_empty
expect(json_response.first['commit']['id']).to eq project.commit.id
expect(Time.parse(json_response.first['artifacts_expire_at'])).to be_like_time(job.artifacts_expire_at)
expect(json_response.first['artifacts_file']).to be_nil
expect(json_response.first['artifacts']).to be_an Array
expect(json_response.first['artifacts']).to be_empty
end
it_behaves_like 'a job with artifacts and trace' do
let(:api_endpoint) { "/projects/#{project.id}/pipelines/#{pipeline.id}/jobs" }
end
it 'returns pipeline data' do
json_job = json_response.first
expect(json_job['pipeline']).not_to be_empty
expect(json_job['pipeline']['id']).to eq job.pipeline.id
expect(json_job['pipeline']['ref']).to eq job.pipeline.ref
expect(json_job['pipeline']['sha']).to eq job.pipeline.sha
expect(json_job['pipeline']['status']).to eq job.pipeline.status
end
context 'filter jobs with one scope element' do
let(:query) { { 'scope' => 'pending' } }
it do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
end
end
context 'filter jobs with hash' do
let(:query) { { scope: { hello: 'pending', world: 'running' } } }
it { expect(response).to have_gitlab_http_status(:bad_request) }
end
context 'filter jobs with array of scope elements' do
let(:query) { { scope: %w(pending running) } }
it do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
end
end
context 'respond 400 when scope contains invalid state' do
let(:query) { { scope: %w(unknown running) } }
it { expect(response).to have_gitlab_http_status(:bad_request) }
end
context 'jobs in different pipelines' do
let!(:pipeline2) { create(:ci_empty_pipeline, project: project) }
let!(:job2) { create(:ci_build, pipeline: pipeline2) }
it 'excludes jobs from other pipelines' do
json_response.each { |job| expect(job['pipeline']['id']).to eq(pipeline.id) }
end
end
it 'avoids N+1 queries' do
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), params: query
end.count
create_list(:ci_build, 3, :trace_artifact, :artifacts, :test_reports, pipeline: pipeline)
expect do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), params: query
end.not_to exceed_all_query_limit(control_count)
end
end
context 'no pipeline is found' do
it 'does not return jobs' do
get api("/projects/#{project2.id}/pipelines/#{pipeline.id}/jobs", user)
expect(json_response['message']).to eq '404 Project Not Found'
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'unauthorized user' do
context 'when user is not logged in' do
let(:api_user) { nil }
it 'does not return jobs' do
expect(json_response['message']).to eq '404 Project Not Found'
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'when user is guest' do
let(:guest) { create(:project_member, :guest, project: project).user }
let(:api_user) { guest }
it 'does not return jobs' do
expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
end
context 'with ci_jobs_finder ff disabled' do
before do
stub_feature_flags(ci_jobs_finder_refactor: false)
end
context 'authorized user' do
it 'returns pipeline jobs' do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
end
it 'returns correct values' do
expect(json_response).not_to be_empty
expect(json_response.first['commit']['id']).to eq project.commit.id
expect(Time.parse(json_response.first['artifacts_expire_at'])).to be_like_time(job.artifacts_expire_at)
expect(json_response.first['artifacts_file']).to be_nil
expect(json_response.first['artifacts']).to be_an Array
expect(json_response.first['artifacts']).to be_empty
end
it_behaves_like 'a job with artifacts and trace' do
let(:api_endpoint) { "/projects/#{project.id}/pipelines/#{pipeline.id}/jobs" }
end
it 'returns pipeline data' do
json_job = json_response.first
expect(json_job['pipeline']).not_to be_empty
expect(json_job['pipeline']['id']).to eq job.pipeline.id
expect(json_job['pipeline']['ref']).to eq job.pipeline.ref
expect(json_job['pipeline']['sha']).to eq job.pipeline.sha
expect(json_job['pipeline']['status']).to eq job.pipeline.status
end
context 'filter jobs with one scope element' do
let(:query) { { 'scope' => 'pending' } }
it do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
end
end
context 'filter jobs with hash' do
let(:query) { { scope: { hello: 'pending', world: 'running' } } }
it { expect(response).to have_gitlab_http_status(:bad_request) }
end
context 'filter jobs with array of scope elements' do
let(:query) { { scope: %w(pending running) } }
it do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
end
end
context 'respond 400 when scope contains invalid state' do
let(:query) { { scope: %w(unknown running) } }
it { expect(response).to have_gitlab_http_status(:bad_request) }
end
context 'jobs in different pipelines' do
let!(:pipeline2) { create(:ci_empty_pipeline, project: project) }
let!(:job2) { create(:ci_build, pipeline: pipeline2) }
it 'excludes jobs from other pipelines' do
json_response.each { |job| expect(job['pipeline']['id']).to eq(pipeline.id) }
end
end
it 'avoids N+1 queries' do
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), params: query
end.count
create_list(:ci_build, 3, :trace_artifact, :artifacts, :test_reports, pipeline: pipeline)
expect do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), params: query
end.not_to exceed_all_query_limit(control_count)
end
end
context 'no pipeline is found' do
it 'does not return jobs' do
get api("/projects/#{project2.id}/pipelines/#{pipeline.id}/jobs", user)
expect(json_response['message']).to eq '404 Project Not Found'
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'unauthorized user' do
context 'when user is not logged in' do
let(:api_user) { nil }
it 'does not return jobs' do
expect(json_response['message']).to eq '404 Project Not Found'
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'when user is guest' do
let(:guest) { create(:project_member, :guest, project: project).user }
let(:api_user) { guest }
it 'does not return jobs' do
expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
end
end
describe 'GET /projects/:id/pipelines/:pipeline_id/bridges' do
let_it_be(:bridge) { create(:ci_bridge, pipeline: pipeline) }
let(:downstream_pipeline) { create(:ci_pipeline) }
let!(:pipeline_source) do
create(:ci_sources_pipeline,
source_pipeline: pipeline,
source_project: project,
source_job: bridge,
pipeline: downstream_pipeline,
project: downstream_pipeline.project)
end
let(:query) { {} }
let(:api_user) { user }
before do |example|
unless example.metadata[:skip_before_request]
project.update!(public_builds: false)
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/bridges", api_user), params: query
end
end
context 'with ci_jobs_finder_refactor ff enabled' do
before do
stub_feature_flags(ci_jobs_finder_refactor: true)
end
context 'authorized user' do
it 'returns pipeline bridges' do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
end
it 'returns correct values' do
expect(json_response).not_to be_empty
expect(json_response.first['commit']['id']).to eq project.commit.id
expect(json_response.first['id']).to eq bridge.id
expect(json_response.first['name']).to eq bridge.name
expect(json_response.first['stage']).to eq bridge.stage
end
it 'returns pipeline data' do
json_bridge = json_response.first
expect(json_bridge['pipeline']).not_to be_empty
expect(json_bridge['pipeline']['id']).to eq bridge.pipeline.id
expect(json_bridge['pipeline']['ref']).to eq bridge.pipeline.ref
expect(json_bridge['pipeline']['sha']).to eq bridge.pipeline.sha
expect(json_bridge['pipeline']['status']).to eq bridge.pipeline.status
end
it 'returns downstream pipeline data' do
json_bridge = json_response.first
expect(json_bridge['downstream_pipeline']).not_to be_empty
expect(json_bridge['downstream_pipeline']['id']).to eq downstream_pipeline.id
expect(json_bridge['downstream_pipeline']['ref']).to eq downstream_pipeline.ref
expect(json_bridge['downstream_pipeline']['sha']).to eq downstream_pipeline.sha
expect(json_bridge['downstream_pipeline']['status']).to eq downstream_pipeline.status
end
context 'filter bridges' do
before_all do
create_bridge(pipeline, :pending)
create_bridge(pipeline, :running)
end
context 'with one scope element' do
let(:query) { { 'scope' => 'pending' } }
it :skip_before_request do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/bridges", api_user), params: query
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.count).to eq 1
expect(json_response.first["status"]).to eq "pending"
end
end
context 'with array of scope elements' do
let(:query) { { scope: %w(pending running) } }
it :skip_before_request do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/bridges", api_user), params: query
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.count).to eq 2
json_response.each { |r| expect(%w(pending running).include?(r['status'])).to be true }
end
end
end
context 'respond 400 when scope contains invalid state' do
context 'in an array' do
let(:query) { { scope: %w(unknown running) } }
it { expect(response).to have_gitlab_http_status(:bad_request) }
end
context 'in a hash' do
let(:query) { { scope: { unknown: true } } }
it { expect(response).to have_gitlab_http_status(:bad_request) }
end
context 'in a string' do
let(:query) { { scope: "unknown" } }
it { expect(response).to have_gitlab_http_status(:bad_request) }
end
end
context 'bridges in different pipelines' do
let!(:pipeline2) { create(:ci_empty_pipeline, project: project) }
let!(:bridge2) { create(:ci_bridge, pipeline: pipeline2) }
it 'excludes bridges from other pipelines' do
json_response.each { |bridge| expect(bridge['pipeline']['id']).to eq(pipeline.id) }
end
end
it 'avoids N+1 queries' do
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/bridges", api_user), params: query
end.count
3.times { create_bridge(pipeline) }
expect do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/bridges", api_user), params: query
end.not_to exceed_all_query_limit(control_count)
end
end
context 'no pipeline is found' do
it 'does not return bridges' do
get api("/projects/#{project2.id}/pipelines/#{pipeline.id}/bridges", user)
expect(json_response['message']).to eq '404 Project Not Found'
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'unauthorized user' do
context 'when user is not logged in' do
let(:api_user) { nil }
it 'does not return bridges' do
expect(json_response['message']).to eq '404 Project Not Found'
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'when user is guest' do
let(:api_user) { guest }
let(:guest) { create(:project_member, :guest, project: project).user }
it 'does not return bridges' do
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when user has no read_build access for project' do
before do
project.add_guest(api_user)
end
it 'does not return bridges' do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/bridges", api_user)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
end
context 'with ci_jobs_finder_refactor ff disabled' do
before do
stub_feature_flags(ci_jobs_finder_refactor: false)
end
context 'authorized user' do
it 'returns pipeline bridges' do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
end
it 'returns correct values' do
expect(json_response).not_to be_empty
expect(json_response.first['commit']['id']).to eq project.commit.id
expect(json_response.first['id']).to eq bridge.id
expect(json_response.first['name']).to eq bridge.name
expect(json_response.first['stage']).to eq bridge.stage
end
it 'returns pipeline data' do
json_bridge = json_response.first
expect(json_bridge['pipeline']).not_to be_empty
expect(json_bridge['pipeline']['id']).to eq bridge.pipeline.id
expect(json_bridge['pipeline']['ref']).to eq bridge.pipeline.ref
expect(json_bridge['pipeline']['sha']).to eq bridge.pipeline.sha
expect(json_bridge['pipeline']['status']).to eq bridge.pipeline.status
end
it 'returns downstream pipeline data' do
json_bridge = json_response.first
expect(json_bridge['downstream_pipeline']).not_to be_empty
expect(json_bridge['downstream_pipeline']['id']).to eq downstream_pipeline.id
expect(json_bridge['downstream_pipeline']['ref']).to eq downstream_pipeline.ref
expect(json_bridge['downstream_pipeline']['sha']).to eq downstream_pipeline.sha
expect(json_bridge['downstream_pipeline']['status']).to eq downstream_pipeline.status
end
context 'filter bridges' do
before_all do
create_bridge(pipeline, :pending)
create_bridge(pipeline, :running)
end
context 'with one scope element' do
let(:query) { { 'scope' => 'pending' } }
it :skip_before_request do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/bridges", api_user), params: query
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.count).to eq 1
expect(json_response.first["status"]).to eq "pending"
end
end
context 'with array of scope elements' do
let(:query) { { scope: %w(pending running) } }
it :skip_before_request do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/bridges", api_user), params: query
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.count).to eq 2
json_response.each { |r| expect(%w(pending running).include?(r['status'])).to be true }
end
end
end
context 'respond 400 when scope contains invalid state' do
context 'in an array' do
let(:query) { { scope: %w(unknown running) } }
it { expect(response).to have_gitlab_http_status(:bad_request) }
end
context 'in a hash' do
let(:query) { { scope: { unknown: true } } }
it { expect(response).to have_gitlab_http_status(:bad_request) }
end
context 'in a string' do
let(:query) { { scope: "unknown" } }
it { expect(response).to have_gitlab_http_status(:bad_request) }
end
end
context 'bridges in different pipelines' do
let!(:pipeline2) { create(:ci_empty_pipeline, project: project) }
let!(:bridge2) { create(:ci_bridge, pipeline: pipeline2) }
it 'excludes bridges from other pipelines' do
json_response.each { |bridge| expect(bridge['pipeline']['id']).to eq(pipeline.id) }
end
end
it 'avoids N+1 queries' do
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/bridges", api_user), params: query
end.count
3.times { create_bridge(pipeline) }
expect do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/bridges", api_user), params: query
end.not_to exceed_all_query_limit(control_count)
end
end
context 'no pipeline is found' do
it 'does not return bridges' do
get api("/projects/#{project2.id}/pipelines/#{pipeline.id}/bridges", user)
expect(json_response['message']).to eq '404 Project Not Found'
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'unauthorized user' do
context 'when user is not logged in' do
let(:api_user) { nil }
it 'does not return bridges' do
expect(json_response['message']).to eq '404 Project Not Found'
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'when user is guest' do
let(:api_user) { guest }
let(:guest) { create(:project_member, :guest, project: project).user }
it 'does not return bridges' do
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when user has no read_build access for project' do
before do
project.add_guest(api_user)
end
it 'does not return bridges' do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/bridges", api_user)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
end
def create_bridge(pipeline, status = :created)
create(:ci_bridge, status: status, pipeline: pipeline).tap do |bridge|
downstream_pipeline = create(:ci_pipeline)
create(:ci_sources_pipeline,
source_pipeline: pipeline,
source_project: pipeline.project,
source_job: bridge,
pipeline: downstream_pipeline,
project: downstream_pipeline.project)
end
end
end
describe 'POST /projects/:id/pipeline ' do
def expect_variables(variables, expected_variables)
variables.each_with_index do |variable, index|

View File

@ -0,0 +1,79 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Mutations::Boards::Destroy do
include GraphqlHelpers
let_it_be(:current_user, reload: true) { create(:user) }
let_it_be(:project, reload: true) { create(:project) }
let_it_be(:board) { create(:board, project: project) }
let_it_be(:other_board) { create(:board, project: project) }
let(:mutation) do
variables = {
id: GitlabSchema.id_from_object(board).to_s
}
graphql_mutation(:destroy_board, variables)
end
subject { post_graphql_mutation(mutation, current_user: current_user) }
def mutation_response
graphql_mutation_response(:destroy_board)
end
context 'when the user does not have permission' do
it_behaves_like 'a mutation that returns a top-level access error'
it 'does not destroy the board' do
expect { subject }.not_to change { Board.count }
end
end
context 'when the user has permission' do
before do
project.add_maintainer(current_user)
end
context 'when given id is not for a board' do
let_it_be(:board) { build_stubbed(:issue, project: project) }
it 'returns an error' do
subject
expect(graphql_errors.first['message']).to include('does not represent an instance of Board')
end
end
context 'when everything is ok' do
it 'destroys the board' do
expect { subject }.to change { Board.count }.from(2).to(1)
end
it 'returns an empty board' do
post_graphql_mutation(mutation, current_user: current_user)
expect(mutation_response).to have_key('board')
expect(mutation_response['board']).to be_nil
end
end
context 'when there is only 1 board for the parent' do
before do
other_board.destroy!
end
it 'does not destroy the board' do
expect { subject }.not_to change { Board.count }.from(1)
end
it 'returns an error and not nil board' do
subject
expect(mutation_response['errors']).not_to be_empty
expect(mutation_response['board']).not_to be_nil
end
end
end
end

View File

@ -5,32 +5,6 @@ require 'spec_helper'
RSpec.describe API::Jobs do
include HttpIOHelpers
shared_examples 'a job with artifacts and trace' do |result_is_array: true|
context 'with artifacts and trace' do
let!(:second_job) { create(:ci_build, :trace_artifact, :artifacts, :test_reports, pipeline: pipeline) }
it 'returns artifacts and trace data', :skip_before_request do
get api(api_endpoint, api_user)
json_job = result_is_array ? json_response.select { |job| job['id'] == second_job.id }.first : json_response
expect(json_job['artifacts_file']).not_to be_nil
expect(json_job['artifacts_file']).not_to be_empty
expect(json_job['artifacts_file']['filename']).to eq(second_job.artifacts_file.filename)
expect(json_job['artifacts_file']['size']).to eq(second_job.artifacts_file.size)
expect(json_job['artifacts']).not_to be_nil
expect(json_job['artifacts']).to be_an Array
expect(json_job['artifacts'].size).to eq(second_job.job_artifacts.length)
json_job['artifacts'].each do |artifact|
expect(artifact).not_to be_nil
file_type = Ci::JobArtifact.file_types[artifact['file_type']]
expect(artifact['size']).to eq(second_job.job_artifacts.find_by(file_type: file_type).size)
expect(artifact['filename']).to eq(second_job.job_artifacts.find_by(file_type: file_type).filename)
expect(artifact['file_format']).to eq(second_job.job_artifacts.find_by(file_type: file_type).file_format)
end
end
end
end
let_it_be(:project, reload: true) do
create(:project, :repository, public_builds: false)
end
@ -166,295 +140,6 @@ RSpec.describe API::Jobs do
end
end
describe 'GET /projects/:id/pipelines/:pipeline_id/jobs' do
let(:query) { {} }
before do |example|
unless example.metadata[:skip_before_request]
job
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), params: query
end
end
context 'authorized user' do
it 'returns pipeline jobs' do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
end
it 'returns correct values' do
expect(json_response).not_to be_empty
expect(json_response.first['commit']['id']).to eq project.commit.id
expect(Time.parse(json_response.first['artifacts_expire_at'])).to be_like_time(job.artifacts_expire_at)
expect(json_response.first['artifacts_file']).to be_nil
expect(json_response.first['artifacts']).to be_an Array
expect(json_response.first['artifacts']).to be_empty
end
it_behaves_like 'a job with artifacts and trace' do
let(:api_endpoint) { "/projects/#{project.id}/pipelines/#{pipeline.id}/jobs" }
end
it 'returns pipeline data' do
json_job = json_response.first
expect(json_job['pipeline']).not_to be_empty
expect(json_job['pipeline']['id']).to eq job.pipeline.id
expect(json_job['pipeline']['ref']).to eq job.pipeline.ref
expect(json_job['pipeline']['sha']).to eq job.pipeline.sha
expect(json_job['pipeline']['status']).to eq job.pipeline.status
end
context 'filter jobs with one scope element' do
let(:query) { { 'scope' => 'pending' } }
it do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
end
end
context 'filter jobs with array of scope elements' do
let(:query) { { scope: %w(pending running) } }
it do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
end
end
context 'respond 400 when scope contains invalid state' do
let(:query) { { scope: %w(unknown running) } }
it { expect(response).to have_gitlab_http_status(:bad_request) }
end
context 'jobs in different pipelines' do
let!(:pipeline2) { create(:ci_empty_pipeline, project: project) }
let!(:job2) { create(:ci_build, pipeline: pipeline2) }
it 'excludes jobs from other pipelines' do
json_response.each { |job| expect(job['pipeline']['id']).to eq(pipeline.id) }
end
end
context 'when jobs belong to a dangling pipeline' do
let(:dangling_source) { Enums::Ci::Pipeline.dangling_sources.each_value.first }
let(:pipeline) do
create(:ci_pipeline, source: dangling_source, project: project)
end
it 'returns pipeline jobs' do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response[0]['pipeline']['sha']).to eq(pipeline.sha.to_s)
end
end
it 'avoids N+1 queries' do
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), params: query
end.count
create_list(:ci_build, 3, :trace_artifact, :artifacts, :test_reports, pipeline: pipeline)
expect do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), params: query
end.not_to exceed_all_query_limit(control_count)
end
end
context 'unauthorized user' do
context 'when user is not logged in' do
let(:api_user) { nil }
it 'does not return jobs' do
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
context 'when user is guest' do
let(:api_user) { guest }
it 'does not return jobs' do
expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
end
describe 'GET /projects/:id/pipelines/:pipeline_id/bridges' do
let!(:bridge) { create(:ci_bridge, pipeline: pipeline) }
let(:downstream_pipeline) { create(:ci_pipeline) }
let!(:pipeline_source) do
create(:ci_sources_pipeline,
source_pipeline: pipeline,
source_project: project,
source_job: bridge,
pipeline: downstream_pipeline,
project: downstream_pipeline.project)
end
let(:query) { {} }
before do |example|
unless example.metadata[:skip_before_request]
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/bridges", api_user), params: query
end
end
context 'authorized user' do
it 'returns pipeline bridges' do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
end
it 'returns correct values' do
expect(json_response).not_to be_empty
expect(json_response.first['commit']['id']).to eq project.commit.id
expect(json_response.first['id']).to eq bridge.id
expect(json_response.first['name']).to eq bridge.name
expect(json_response.first['stage']).to eq bridge.stage
end
it 'returns pipeline data' do
json_bridge = json_response.first
expect(json_bridge['pipeline']).not_to be_empty
expect(json_bridge['pipeline']['id']).to eq bridge.pipeline.id
expect(json_bridge['pipeline']['ref']).to eq bridge.pipeline.ref
expect(json_bridge['pipeline']['sha']).to eq bridge.pipeline.sha
expect(json_bridge['pipeline']['status']).to eq bridge.pipeline.status
end
it 'returns downstream pipeline data' do
json_bridge = json_response.first
expect(json_bridge['downstream_pipeline']).not_to be_empty
expect(json_bridge['downstream_pipeline']['id']).to eq downstream_pipeline.id
expect(json_bridge['downstream_pipeline']['ref']).to eq downstream_pipeline.ref
expect(json_bridge['downstream_pipeline']['sha']).to eq downstream_pipeline.sha
expect(json_bridge['downstream_pipeline']['status']).to eq downstream_pipeline.status
end
context 'filter bridges' do
before do
create_bridge(pipeline, :pending)
create_bridge(pipeline, :running)
end
context 'with one scope element' do
let(:query) { { 'scope' => 'pending' } }
it :skip_before_request do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/bridges", api_user), params: query
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.count).to eq 1
expect(json_response.first["status"]).to eq "pending"
end
end
context 'with array of scope elements' do
let(:query) { { scope: %w(pending running) } }
it :skip_before_request do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/bridges", api_user), params: query
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.count).to eq 2
json_response.each { |r| expect(%w(pending running).include?(r['status'])).to be true }
end
end
end
context 'respond 400 when scope contains invalid state' do
let(:query) { { scope: %w(unknown running) } }
it { expect(response).to have_gitlab_http_status(:bad_request) }
end
context 'bridges in different pipelines' do
let!(:pipeline2) { create(:ci_empty_pipeline, project: project) }
let!(:bridge2) { create(:ci_bridge, pipeline: pipeline2) }
it 'excludes bridges from other pipelines' do
json_response.each { |bridge| expect(bridge['pipeline']['id']).to eq(pipeline.id) }
end
end
it 'avoids N+1 queries' do
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/bridges", api_user), params: query
end.count
3.times { create_bridge(pipeline) }
expect do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/bridges", api_user), params: query
end.not_to exceed_all_query_limit(control_count)
end
end
context 'unauthorized user' do
context 'when user is not logged in' do
let(:api_user) { nil }
it 'does not return bridges' do
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
context 'when user is guest' do
let(:api_user) { guest }
it 'does not return bridges' do
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when user has no read access for pipeline' do
before do
allow(Ability).to receive(:allowed?).and_call_original
allow(Ability).to receive(:allowed?).with(api_user, :read_pipeline, pipeline).and_return(false)
end
it 'does not return bridges' do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/bridges", api_user)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when user has no read_build access for project' do
before do
allow(Ability).to receive(:allowed?).and_call_original
allow(Ability).to receive(:allowed?).with(api_user, :read_build, project).and_return(false)
end
it 'does not return bridges' do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/bridges", api_user)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
def create_bridge(pipeline, status = :created)
create(:ci_bridge, status: status, pipeline: pipeline).tap do |bridge|
downstream_pipeline = create(:ci_pipeline)
create(:ci_sources_pipeline,
source_pipeline: pipeline,
source_project: pipeline.project,
source_job: bridge,
pipeline: downstream_pipeline,
project: downstream_pipeline.project)
end
end
end
describe 'GET /projects/:id/jobs/:job_id' do
before do |example|
unless example.metadata[:skip_before_request]

View File

@ -10,6 +10,17 @@ RSpec.describe API::UsageData do
let(:known_event) { 'g_compliance_dashboard' }
let(:unknown_event) { 'unknown' }
context 'without CSRF token' do
it 'returns forbidden' do
stub_feature_flags(usage_data_api: true)
allow(Gitlab::RequestForgeryProtection).to receive(:verified?).and_return(false)
post api(endpoint, user), params: { event: known_event }
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'usage_data_api feature not enabled' do
it 'returns not_found' do
stub_feature_flags(usage_data_api: false)
@ -33,6 +44,7 @@ RSpec.describe API::UsageData do
stub_feature_flags(usage_data_api: true)
stub_feature_flags("usage_data_#{known_event}" => true)
stub_application_setting(usage_ping_enabled: true)
allow(Gitlab::RequestForgeryProtection).to receive(:verified?).and_return(true)
end
context 'when event is missing from params' do

View File

@ -10,22 +10,12 @@ RSpec.describe RuboCop::Cop::Gitlab::RailsLogger, type: :rubocop do
subject(:cop) { described_class.new }
it 'flags the use of Rails.logger.error with a constant receiver' do
inspect_source("Rails.logger.error('some error')")
described_class::LOG_METHODS.each do |method|
it "flags the use of Rails.logger.#{method} with a constant receiver" do
inspect_source("Rails.logger.#{method}('some error')")
expect(cop.offenses.size).to eq(1)
end
it 'flags the use of Rails.logger.info with a constant receiver' do
inspect_source("Rails.logger.info('some info')")
expect(cop.offenses.size).to eq(1)
end
it 'flags the use of Rails.logger.warn with a constant receiver' do
inspect_source("Rails.logger.warn('some warning')")
expect(cop.offenses.size).to eq(1)
expect(cop.offenses.size).to eq(1)
end
end
it 'does not flag the use of Rails.logger with a constant that is not Rails' do
@ -39,4 +29,10 @@ RSpec.describe RuboCop::Cop::Gitlab::RailsLogger, type: :rubocop do
expect(cop.offenses.size).to eq(0)
end
it 'does not flag the use of Rails.logger.level' do
inspect_source("Rails.logger.level")
expect(cop.offenses.size).to eq(0)
end
end

View File

@ -0,0 +1,112 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe MergeRequests::CleanupRefsService do
describe '.schedule' do
let(:merge_request) { build(:merge_request) }
it 'schedules MergeRequestCleanupRefsWorker' do
expect(MergeRequestCleanupRefsWorker)
.to receive(:perform_in)
.with(described_class::TIME_THRESHOLD, merge_request.id)
described_class.schedule(merge_request)
end
end
describe '#execute' do
before do
# Need to re-enable this as it's being stubbed in spec_helper for
# performance reasons but is needed to run for this test.
allow(Gitlab::Git::KeepAround).to receive(:execute).and_call_original
end
subject(:result) { described_class.new(merge_request).execute }
shared_examples_for 'service that cleans up merge request refs' do
it 'creates keep around ref and deletes merge request refs' do
old_ref_head = ref_head
aggregate_failures do
expect(result[:status]).to eq(:success)
expect(kept_around?(old_ref_head)).to be_truthy
expect(ref_head).to be_nil
end
end
context 'when keep around ref cannot be created' do
before do
allow_next_instance_of(Gitlab::Git::KeepAround) do |keep_around|
expect(keep_around).to receive(:kept_around?).and_return(false)
end
end
it_behaves_like 'service that does not clean up merge request refs'
end
end
shared_examples_for 'service that does not clean up merge request refs' do
it 'does not delete merge request refs' do
aggregate_failures do
expect(result[:status]).to eq(:error)
expect(ref_head).to be_present
end
end
end
context 'when merge request is closed' do
let(:merge_request) { create(:merge_request, :closed) }
context "when closed #{described_class::TIME_THRESHOLD.inspect} ago" do
before do
merge_request.metrics.update!(latest_closed_at: described_class::TIME_THRESHOLD.ago)
end
it_behaves_like 'service that cleans up merge request refs'
end
context "when closed later than #{described_class::TIME_THRESHOLD.inspect} ago" do
before do
merge_request.metrics.update!(latest_closed_at: (described_class::TIME_THRESHOLD - 1.day).ago)
end
it_behaves_like 'service that does not clean up merge request refs'
end
end
context 'when merge request is merged' do
let(:merge_request) { create(:merge_request, :merged) }
context "when merged #{described_class::TIME_THRESHOLD.inspect} ago" do
before do
merge_request.metrics.update!(merged_at: described_class::TIME_THRESHOLD.ago)
end
it_behaves_like 'service that cleans up merge request refs'
end
context "when merged later than #{described_class::TIME_THRESHOLD.inspect} ago" do
before do
merge_request.metrics.update!(merged_at: (described_class::TIME_THRESHOLD - 1.day).ago)
end
it_behaves_like 'service that does not clean up merge request refs'
end
end
context 'when merge request is not closed nor merged' do
let(:merge_request) { create(:merge_request, :opened) }
it_behaves_like 'service that does not clean up merge request refs'
end
end
def kept_around?(commit)
Gitlab::Git::KeepAround.new(merge_request.project.repository).kept_around?(commit.id)
end
def ref_head
merge_request.project.repository.commit(merge_request.ref_path)
end
end

View File

@ -99,6 +99,12 @@ RSpec.describe MergeRequests::CloseService do
described_class.new(project, user).execute(merge_request)
end
it 'schedules CleanupRefsService' do
expect(MergeRequests::CleanupRefsService).to receive(:schedule).with(merge_request)
described_class.new(project, user).execute(merge_request)
end
context 'current user is not authorized to close merge request' do
before do
perform_enqueued_jobs do

View File

@ -72,6 +72,12 @@ RSpec.describe MergeRequests::PostMergeService do
subject
end
it 'schedules CleanupRefsService' do
expect(MergeRequests::CleanupRefsService).to receive(:schedule).with(merge_request)
subject
end
context 'when the merge request has review apps' do
it 'cancels all review app deployments' do
pipeline = create(:ci_pipeline,

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
RSpec.shared_examples 'a job with artifacts and trace' do |result_is_array: true|
context 'with artifacts and trace' do
let!(:second_job) { create(:ci_build, :trace_artifact, :artifacts, :test_reports, pipeline: pipeline) }
it 'returns artifacts and trace data', :skip_before_request do
get api(api_endpoint, api_user)
json_job = json_response.is_a?(Array) ? json_response.find { |job| job['id'] == second_job.id } : json_response
expect(json_job['artifacts_file']).not_to be_nil
expect(json_job['artifacts_file']).not_to be_empty
expect(json_job['artifacts_file']['filename']).to eq(second_job.artifacts_file.filename)
expect(json_job['artifacts_file']['size']).to eq(second_job.artifacts_file.size)
expect(json_job['artifacts']).not_to be_nil
expect(json_job['artifacts']).to be_an Array
expect(json_job['artifacts'].size).to eq(second_job.job_artifacts.length)
json_job['artifacts'].each do |artifact|
expect(artifact).not_to be_nil
file_type = Ci::JobArtifact.file_types[artifact['file_type']]
expect(artifact['size']).to eq(second_job.job_artifacts.find_by(file_type: file_type).size)
expect(artifact['filename']).to eq(second_job.job_artifacts.find_by(file_type: file_type).filename)
expect(artifact['file_format']).to eq(second_job.job_artifacts.find_by(file_type: file_type).file_format)
end
end
end
end

View File

@ -0,0 +1,30 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe MergeRequestCleanupRefsWorker do
describe '#perform' do
context 'when merge request exists' do
let(:merge_request) { create(:merge_request) }
let(:job_args) { merge_request.id }
include_examples 'an idempotent worker' do
it 'calls MergeRequests::CleanupRefsService#execute' do
expect_next_instance_of(MergeRequests::CleanupRefsService, merge_request) do |svc|
expect(svc).to receive(:execute).and_call_original
end.twice
subject
end
end
end
context 'when merge request does not exist' do
it 'does not call MergeRequests::CleanupRefsService' do
expect(MergeRequests::CleanupRefsService).not_to receive(:new)
perform_multiple(1)
end
end
end
end