Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-09-25 09:09:40 +00:00
parent 654281e682
commit e6bcd6e738
78 changed files with 1104 additions and 345 deletions

View File

@ -0,0 +1,9 @@
-----BEGIN PUBLIC KEY-----
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAtgemxR8RUJXi3p7G/dkh
Yuln1L4lA6GtQsT83X0yTVDbLVsI2C6bepsRjGiLV0R/9JGvTojORx+9F/ZQAiEC
g6QXWasAOSmrzr4EjADG6cWcCnOju8hX9yib1HUIBxl+jHkmXP3NPuwyb8p2G149
EG1o4apEqE5RtqV/Xyx/u57xTYYZShJ/c7o4iA8xvt6IAKFPFKpQwb5hv4KvUZBP
h0xG2qvOjDu430fK8JclPlXHqPjXDkXOZyLd4FvRStdEQU3RVXvUQfuGt/tOMS7J
nPQ94fr/xdaEbcEtIlr32+tcgsMWyhqtDCPUWJT1aRPviUgaJKLoVs8tRKwYMV9+
1wIDAQAB
-----END PUBLIC KEY-----

View File

@ -1,4 +1,5 @@
<script>
import { GlBadge, GlTab, GlTabs } from '@gitlab/ui';
import environmentsMixin from '../mixins/environments_mixin';
import CIPaginationMixin from '../../vue_shared/mixins/ci_pagination_api_mixin';
import StopEnvironmentModal from '../components/stop_environment_modal.vue';
@ -6,8 +7,11 @@ import DeleteEnvironmentModal from '../components/delete_environment_modal.vue';
export default {
components: {
StopEnvironmentModal,
DeleteEnvironmentModal,
GlBadge,
GlTab,
GlTabs,
StopEnvironmentModal,
},
mixins: [environmentsMixin, CIPaginationMixin],
@ -73,9 +77,21 @@ export default {
<b>{{ folderName }}</b>
</h4>
<div class="top-area">
<tabs v-if="!isLoading" :tabs="tabs" scope="environments" @onChangeTab="onChangeTab" />
</div>
<gl-tabs v-if="!isLoading" scope="environments" content-class="gl-display-none">
<gl-tab
v-for="(tab, i) in tabs"
:key="`${tab.name}-${i}`"
:active="tab.isActive"
:title-item-class="tab.isActive ? 'gl-outline-none' : ''"
:title-link-attributes="{ 'data-testid': `environments-tab-${tab.scope}` }"
@click="onChangeTab(tab.scope)"
>
<template #title>
<span>{{ tab.name }}</span>
<gl-badge size="sm" class="gl-tab-counter-badge">{{ tab.count }}</gl-badge>
</template>
</gl-tab>
</gl-tabs>
<container
:is-loading="isLoading"

View File

@ -6,7 +6,7 @@ import App from './components/jira_import_app.vue';
Vue.use(VueApollo);
const defaultClient = createDefaultClient();
const defaultClient = createDefaultClient({}, { assumeImmutableResults: true });
const apolloProvider = new VueApollo({
defaultClient,

View File

@ -2,7 +2,6 @@
mutation($input: JiraImportStartInput!) {
jiraImportStart(input: $input) {
clientMutationId
jiraImport {
...JiraImport
}

View File

@ -1,3 +1,4 @@
import produce from 'immer';
import getJiraImportDetailsQuery from '../queries/get_jira_import_details.query.graphql';
import { IMPORT_STATE } from './jira_import_utils';
@ -13,22 +14,16 @@ export const addInProgressImportToStore = (store, jiraImportStart, fullPath) =>
},
};
const cacheData = store.readQuery({
const sourceData = store.readQuery({
...queryDetails,
});
store.writeQuery({
...queryDetails,
data: {
project: {
...cacheData.project,
jiraImportStatus: IMPORT_STATE.SCHEDULED,
jiraImports: {
...cacheData.project.jiraImports,
nodes: cacheData.project.jiraImports.nodes.concat(jiraImportStart.jiraImport),
},
},
},
data: produce(sourceData, draftData => {
draftData.project.jiraImportStatus = IMPORT_STATE.SCHEDULED; // eslint-disable-line no-param-reassign
draftData.project.jiraImports.nodes.push(jiraImportStart.jiraImport);
}),
});
};

View File

@ -13,6 +13,8 @@ export const TestStatus = {
FAILED: 'failed',
SKIPPED: 'skipped',
SUCCESS: 'success',
ERROR: 'error',
UNKNOWN: 'unknown',
};
export const FETCH_AUTHOR_ERROR_MESSAGE = __('There was a problem fetching project users.');

View File

@ -1,13 +1,19 @@
import { __, sprintf } from '../../../locale';
import { TestStatus } from '../../constants';
export function iconForTestStatus(status) {
switch (status) {
case 'success':
case TestStatus.SUCCESS:
return 'status_success_borderless';
case 'failed':
case TestStatus.FAILED:
return 'status_failed_borderless';
default:
case TestStatus.ERROR:
return 'status_warning_borderless';
case TestStatus.SKIPPED:
return 'status_skipped_borderless';
case TestStatus.UNKNOWN:
default:
return 'status_notfound_borderless';
}
}

View File

@ -203,18 +203,6 @@
margin-right: 0;
}
}
&:hover,
&:focus {
text-decoration: none;
outline: 0;
opacity: 1;
color: $white;
&.header-user-dropdown-toggle .header-user-avatar {
border-color: $white;
}
}
}
.header-new-dropdown-toggle {

View File

@ -9,6 +9,7 @@
}
}
.ci-status-icon-error,
.ci-status-icon-failed {
svg {
fill: $red-500;

View File

@ -0,0 +1,11 @@
# frozen_string_literal: true
module ShowInheritedLabelsChecker
extend ActiveSupport::Concern
private
def show_inherited_labels?(include_ancestor_groups)
Feature.enabled?(:show_inherited_labels, @project || @group) || include_ancestor_groups # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
end

View File

@ -2,6 +2,7 @@
class Groups::LabelsController < Groups::ApplicationController
include ToggleSubscriptionAction
include ShowInheritedLabelsChecker
before_action :label, only: [:edit, :update, :destroy]
before_action :authorize_admin_labels!, only: [:new, :create, :edit, :update, :destroy]
@ -12,8 +13,9 @@ class Groups::LabelsController < Groups::ApplicationController
def index
respond_to do |format|
format.html do
@labels = GroupLabelsFinder
.new(current_user, @group, params.merge(sort: sort)).execute
# at group level we do not want to list project labels,
# we only want `only_group_labels = false` when pulling labels for label filter dropdowns, fetched through json
@labels = available_labels(params.merge(only_group_labels: true)).page(params[:page])
end
format.json do
render json: LabelSerializer.new.represent_appearance(available_labels)
@ -74,7 +76,7 @@ class Groups::LabelsController < Groups::ApplicationController
end
def label
@label ||= @group.labels.find(params[:id])
@label ||= available_labels(params.merge(only_group_labels: true)).find(params[:id])
end
alias_method :subscribable_resource, :label
@ -102,15 +104,17 @@ class Groups::LabelsController < Groups::ApplicationController
session[:previous_labels_path] = URI(request.referer || '').path
end
def available_labels
def available_labels(options = params)
@available_labels ||=
LabelsFinder.new(
current_user,
group_id: @group.id,
only_group_labels: params[:only_group_labels],
include_ancestor_groups: params[:include_ancestor_groups],
include_descendant_groups: params[:include_descendant_groups],
search: params[:search]).execute
only_group_labels: options[:only_group_labels],
include_ancestor_groups: show_inherited_labels?(params[:include_ancestor_groups]),
sort: sort,
subscribed: options[:subscribed],
include_descendant_groups: options[:include_descendant_groups],
search: options[:search]).execute
end
def sort

View File

@ -26,8 +26,7 @@ class Import::ManifestController < Import::BaseController
manifest = Gitlab::ManifestImport::Manifest.new(params[:manifest].tempfile)
if manifest.valid?
session[:manifest_import_repositories] = manifest.projects
session[:manifest_import_group_id] = group.id
manifest_import_metadata.save(manifest.projects, group.id)
redirect_to status_import_manifest_path
else
@ -96,12 +95,16 @@ class Import::ManifestController < Import::BaseController
# rubocop: disable CodeReuse/ActiveRecord
def group
@group ||= Group.find_by(id: session[:manifest_import_group_id])
@group ||= Group.find_by(id: manifest_import_metadata.group_id)
end
# rubocop: enable CodeReuse/ActiveRecord
def manifest_import_metadata
@manifest_import_status ||= Gitlab::ManifestImport::Metadata.new(current_user, fallback: session)
end
def repositories
@repositories ||= session[:manifest_import_repositories]
@repositories ||= manifest_import_metadata.repositories
end
def find_jobs

View File

@ -2,6 +2,7 @@
class Projects::LabelsController < Projects::ApplicationController
include ToggleSubscriptionAction
include ShowInheritedLabelsChecker
before_action :check_issuables_available!
before_action :label, only: [:edit, :update, :destroy, :promote]
@ -161,7 +162,7 @@ class Projects::LabelsController < Projects::ApplicationController
@available_labels ||=
LabelsFinder.new(current_user,
project_id: @project.id,
include_ancestor_groups: params[:include_ancestor_groups],
include_ancestor_groups: show_inherited_labels?(params[:include_ancestor_groups]),
search: params[:search],
subscribed: params[:subscribed],
sort: sort).execute

View File

@ -1,29 +0,0 @@
# frozen_string_literal: true
class GroupLabelsFinder
attr_reader :current_user, :group, :params
def initialize(current_user, group, params = {})
@current_user = current_user
@group = group
@params = params
end
def execute
group.labels
.optionally_subscribed_by(subscriber_id)
.optionally_search(params[:search])
.order_by(params[:sort])
.page(params[:page])
end
private
def subscriber_id
current_user&.id if subscribed?
end
def subscribed?
params[:subscribed] == 'true'
end
end

View File

@ -8,6 +8,8 @@ module Types
value 'DUE_DATE_ASC', 'Due date by ascending order', value: :due_date_asc
value 'DUE_DATE_DESC', 'Due date by descending order', value: :due_date_desc
value 'RELATIVE_POSITION_ASC', 'Relative position by ascending order', value: :relative_position_asc
value 'SEVERITY_ASC', 'Severity from less critical to more critical', value: :severity_asc
value 'SEVERITY_DESC', 'Severity from more critical to less critical', value: :severity_desc
end
end

View File

@ -36,8 +36,7 @@ module Types
end
field :author, Types::UserType, null: false,
description: 'User that created the issue',
resolve: -> (obj, _args, _ctx) { Gitlab::Graphql::Loaders::BatchModelLoader.new(User, obj.author_id).find }
description: 'User that created the issue'
field :assignees, Types::UserType.connection_type, null: true,
description: 'Assignees of the issue'
@ -45,16 +44,14 @@ module Types
field :labels, Types::LabelType.connection_type, null: true,
description: 'Labels of the issue'
field :milestone, Types::MilestoneType, null: true,
description: 'Milestone of the issue',
resolve: -> (obj, _args, _ctx) { Gitlab::Graphql::Loaders::BatchModelLoader.new(Milestone, obj.milestone_id).find }
description: 'Milestone of the issue'
field :due_date, Types::TimeType, null: true,
description: 'Due date of the issue'
field :confidential, GraphQL::BOOLEAN_TYPE, null: false,
description: 'Indicates the issue is confidential'
field :discussion_locked, GraphQL::BOOLEAN_TYPE, null: false,
description: 'Indicates discussion is locked on the issue',
resolve: -> (obj, _args, _ctx) { !!obj.discussion_locked }
description: 'Indicates discussion is locked on the issue'
field :upvotes, GraphQL::INT_TYPE, null: false,
description: 'Number of upvotes the issue has received'
@ -108,6 +105,18 @@ module Types
field :severity, Types::IssuableSeverityEnum, null: true,
description: 'Severity level of the incident'
def author
Gitlab::Graphql::Loaders::BatchModelLoader.new(User, object.author_id).find
end
def milestone
Gitlab::Graphql::Loaders::BatchModelLoader.new(Milestone, object.milestone_id).find
end
def discussion_locked
!!object.discussion_locked
end
end
end

View File

@ -12,7 +12,10 @@ module Types
authorize :read_project
field :project, Types::ProjectType, null: true,
description: 'Project that User is a member of',
resolve: -> (obj, _args, _ctx) { Gitlab::Graphql::Loaders::BatchModelLoader.new(Project, obj.source_id).find }
description: 'Project that User is a member of'
def project
Gitlab::Graphql::Loaders::BatchModelLoader.new(Project, object.source_id).find
end
end
end

View File

@ -24,16 +24,14 @@ module Types
field :project, Types::ProjectType,
description: 'The project the snippet is associated with',
null: true,
authorize: :read_project,
resolve: -> (snippet, args, context) { Gitlab::Graphql::Loaders::BatchModelLoader.new(Project, snippet.project_id).find }
authorize: :read_project
# Author can be nil in some scenarios. For example,
# when the admin setting restricted visibility
# level is set to public
field :author, Types::UserType,
description: 'The owner of the snippet',
null: true,
resolve: -> (snippet, args, context) { Gitlab::Graphql::Loaders::BatchModelLoader.new(User, snippet.author_id).find }
null: true
field :file_name, GraphQL::STRING_TYPE,
description: 'File Name of the snippet',
@ -86,5 +84,13 @@ module Types
null: true
markdown_field :description_html, null: true, method: :description
def author
Gitlab::Graphql::Loaders::BatchModelLoader.new(User, object.author_id).find
end
def project
Gitlab::Graphql::Loaders::BatchModelLoader.new(Project, object.project_id).find
end
end
end

View File

@ -5,9 +5,16 @@ module Types
graphql_name 'Sort'
description 'Common sort values'
# Deprecated, as we prefer uppercase enums
# https://gitlab.com/groups/gitlab-org/-/epics/1838
value 'updated_desc', 'Updated at descending order'
value 'updated_asc', 'Updated at ascending order'
value 'created_desc', 'Created at descending order'
value 'created_asc', 'Created at ascending order'
value 'UPDATED_DESC', 'Updated at descending order', value: :updated_desc
value 'UPDATED_ASC', 'Updated at ascending order', value: :updated_asc
value 'CREATED_DESC', 'Created at descending order', value: :created_desc
value 'CREATED_ASC', 'Created at ascending order', value: :created_asc
end
end

View File

@ -1,7 +1,7 @@
# frozen_string_literal: true
module SearchHelper
SEARCH_PERMITTED_PARAMS = [:search, :scope, :project_id, :group_id, :repository_ref, :snippets, :state].freeze
SEARCH_PERMITTED_PARAMS = [:search, :scope, :project_id, :group_id, :repository_ref, :snippets, :state, :confidential].freeze
def search_autocomplete_opts(term)
return unless current_user

View File

@ -101,6 +101,8 @@ class Issue < ApplicationRecord
scope :order_relative_position_asc, -> { reorder(::Gitlab::Database.nulls_last_order('relative_position', 'ASC')) }
scope :order_closed_date_desc, -> { reorder(closed_at: :desc) }
scope :order_created_at_desc, -> { reorder(created_at: :desc) }
scope :order_severity_asc, -> { includes(:issuable_severity).order('issuable_severities.severity ASC NULLS FIRST') }
scope :order_severity_desc, -> { includes(:issuable_severity).order('issuable_severities.severity DESC NULLS LAST') }
scope :preload_associated_models, -> { preload(:assignees, :labels, project: :namespace) }
scope :with_web_entity_associations, -> { preload(:author, :project) }
@ -232,6 +234,8 @@ class Issue < ApplicationRecord
when 'due_date', 'due_date_asc' then order_due_date_asc.with_order_id_desc
when 'due_date_desc' then order_due_date_desc.with_order_id_desc
when 'relative_position', 'relative_position_asc' then order_relative_position_asc.with_order_id_desc
when 'severity_asc' then order_severity_asc.with_order_id_desc
when 'severity_desc' then order_severity_desc.with_order_id_desc
else
super
end

View File

@ -14,7 +14,7 @@ module Search
Gitlab::SearchResults.new(current_user,
params[:search],
projects,
filters: { state: params[:state] })
filters: { state: params[:state], confidential: params[:confidential] })
end
def projects

View File

@ -16,7 +16,7 @@ module Search
params[:search],
projects,
group: group,
filters: { state: params[:state] }
filters: { state: params[:state], confidential: params[:confidential] }
)
end

View File

@ -13,7 +13,8 @@ module Search
params[:search],
project: project,
repository_ref: params[:repository_ref],
filters: { state: params[:state] })
filters: { confidential: params[:confidential], state: params[:state] }
)
end
def scope

View File

@ -104,7 +104,6 @@ module Users
def build_user_params(skip_authorization:)
if current_user&.admin?
user_params = params.slice(*admin_create_params)
user_params[:created_by_id] = current_user&.id
if params[:reset_password]
user_params.merge!(force_random_password: true, password_expires_at: nil)
@ -125,6 +124,8 @@ module Users
end
end
user_params[:created_by_id] = current_user&.id
if user_default_internal_regex_enabled? && !user_params.key?(:external)
user_params[:external] = user_external?
end

View File

@ -0,0 +1,5 @@
---
title: Add severity and published sorting for incident issues
merge_request: 42800
author:
type: added

View File

@ -0,0 +1,5 @@
---
title: Migrate environments folder tabs to GlTabs
merge_request: 42894
author:
type: changed

View File

@ -0,0 +1,5 @@
---
title: 'Unit Test Report: Fix icon for errored status'
merge_request: 42540
author:
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Always set created_by_id when creating a user
merge_request: 43342
author:
type: changed

View File

@ -1,7 +0,0 @@
---
name: migrate_bio_to_user_details
introduced_by_url:
rollout_issue_url:
group:
type: development
default_enabled: true

View File

@ -2,6 +2,6 @@
name: service_desk_custom_address
introduced_by_url:
rollout_issue_url:
group:
group: group::certify
type: development
default_enabled: false

View File

@ -0,0 +1,7 @@
---
name: show_inherited_labels
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/42960
rollout_issue_url:
group: group::project management
type: development
default_enabled: false

View File

@ -1,7 +1,7 @@
---
name: soft_email_confirmation
introduced_by_url:
rollout_issue_url:
group:
introduced_by_url: https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/31245
rollout_issue_url:
group: group::acquisition
type: development
default_enabled: false

View File

@ -1,7 +1,7 @@
---
name: users_search
introduced_by_url:
rollout_issue_url:
group:
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/255282
group: group::global search
type: development
default_enabled: true

View File

@ -1,7 +1,8 @@
# frozen_string_literal: true
Gitlab.ee do
public_key_file = File.read(Rails.root.join(".license_encryption_key.pub"))
prefix = ENV['GITLAB_LICENSE_MODE'] == 'test' ? 'test_' : ''
public_key_file = File.read(Rails.root.join(".#{prefix}license_encryption_key.pub"))
public_key = OpenSSL::PKey::RSA.new(public_key_file)
Gitlab::License.encryption_key = public_key
rescue

View File

@ -66,8 +66,6 @@ class CleanupProjectsWithMissingNamespace < ActiveRecord::Migration[6.0]
end
def ensure_bio_is_assigned_to_user_details
return if Feature.disabled?(:migrate_bio_to_user_details, default_enabled: true)
user_detail.bio = bio.to_s[0...255]
end

View File

@ -117,9 +117,9 @@ For source installations the following settings are nested under `artifacts:` an
|---------|-------------|---------|
| `enabled` | Enable/disable object storage | `false` |
| `remote_directory` | The bucket name where Artifacts will be stored| |
| `direct_upload` | Set to true to enable direct upload of Artifacts without the need of local shared storage. Option may be removed once we decide to support only single storage for all files. | `false` |
| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
| `direct_upload` | Set to `true` to enable direct upload of Artifacts without the need of local shared storage. Option may be removed once we decide to support only single storage for all files. | `false` |
| `background_upload` | Set to `false` to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
| `proxy_download` | Set to `true` to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
| `connection` | Various connection options described below | |
#### Connection settings
@ -318,7 +318,7 @@ _The uploads are stored by default in
In order to migrate back to local storage:
1. Set both `direct_upload` and `background_upload` to false in `gitlab.rb`, under the artifacts object storage settings.
1. Set both `direct_upload` and `background_upload` to `false` in `gitlab.rb`, under the artifacts object storage settings.
1. [Reconfigure GitLab](restart_gitlab.md#omnibus-gitlab-reconfigure).
1. Run `gitlab-rake gitlab:artifacts:migrate_to_local`.
1. Disable object_storage for artifacts in `gitlab.rb`:

View File

@ -92,9 +92,9 @@ then `object_store:`. On Omnibus installations, they are prefixed by
|---------|-------------|---------|
| `enabled` | Enable/disable object storage | `false` |
| `remote_directory` | The bucket name where external diffs will be stored| |
| `direct_upload` | Set to true to enable direct upload of external diffs without the need of local shared storage. Option may be removed once we decide to support only single storage for all files. | `false` |
| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
| `direct_upload` | Set to `true` to enable direct upload of external diffs without the need of local shared storage. Option may be removed once we decide to support only single storage for all files. | `false` |
| `background_upload` | Set to `false` to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
| `proxy_download` | Set to `true` to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
| `connection` | Various connection options described below | |
### S3 compatible connection settings

View File

@ -68,9 +68,9 @@ For source installations the following settings are nested under `uploads:` and
|---------|-------------|---------|
| `enabled` | Enable/disable object storage | `false` |
| `remote_directory` | The bucket name where Uploads will be stored| |
| `direct_upload` | Set to true to remove Puma from the Upload path. Workhorse handles the actual Artifact Upload to Object Storage while Puma does minimal processing to keep track of the upload. There is no need for local shared storage. The option may be removed if support for a single storage type for all files is introduced. Read more on [direct upload](../development/uploads.md#direct-upload). | `false` |
| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 (if `direct_upload` is set to `true` it will override `background_upload`) | `true` |
| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
| `direct_upload` | Set to `true` to remove Puma from the Upload path. Workhorse handles the actual Artifact Upload to Object Storage while Puma does minimal processing to keep track of the upload. There is no need for local shared storage. The option may be removed if support for a single storage type for all files is introduced. Read more on [direct upload](../development/uploads.md#direct-upload). | `false` |
| `background_upload` | Set to `false` to disable automatic upload. Option may be removed once upload is direct to S3 (if `direct_upload` is set to `true` it will override `background_upload`) | `true` |
| `proxy_download` | Set to `true` to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
| `connection` | Various connection options described below | |
### Connection settings

View File

@ -434,6 +434,16 @@ type AlertManagementAlertEdge {
Values for sorting alerts
"""
enum AlertManagementAlertSort {
"""
Created at ascending order
"""
CREATED_ASC
"""
Created at descending order
"""
CREATED_DESC
"""
Created time by ascending order
"""
@ -494,6 +504,16 @@ enum AlertManagementAlertSort {
"""
STATUS_DESC
"""
Updated at ascending order
"""
UPDATED_ASC
"""
Updated at descending order
"""
UPDATED_DESC
"""
Created time by ascending order
"""
@ -8712,6 +8732,16 @@ type IssueSetWeightPayload {
Values for sorting issues
"""
enum IssueSort {
"""
Created at ascending order
"""
CREATED_ASC
"""
Created at descending order
"""
CREATED_DESC
"""
Due date by ascending order
"""
@ -8752,11 +8782,41 @@ enum IssueSort {
"""
PRIORITY_DESC
"""
Published issues shown last
"""
PUBLISHED_ASC
"""
Published issues shown first
"""
PUBLISHED_DESC
"""
Relative position by ascending order
"""
RELATIVE_POSITION_ASC
"""
Severity from less critical to more critical
"""
SEVERITY_ASC
"""
Severity from more critical to less critical
"""
SEVERITY_DESC
"""
Updated at ascending order
"""
UPDATED_ASC
"""
Updated at descending order
"""
UPDATED_DESC
"""
Weight by ascending order
"""
@ -10488,6 +10548,16 @@ type MergeRequestSetWipPayload {
Values for sorting merge requests
"""
enum MergeRequestSort {
"""
Created at ascending order
"""
CREATED_ASC
"""
Created at descending order
"""
CREATED_DESC
"""
Label priority by ascending order
"""
@ -10528,6 +10598,16 @@ enum MergeRequestSort {
"""
PRIORITY_DESC
"""
Updated at ascending order
"""
UPDATED_ASC
"""
Updated at descending order
"""
UPDATED_DESC
"""
Created at ascending order
"""
@ -16761,6 +16841,26 @@ type SnippetPermissions {
Common sort values
"""
enum Sort {
"""
Created at ascending order
"""
CREATED_ASC
"""
Created at descending order
"""
CREATED_DESC
"""
Updated at ascending order
"""
UPDATED_ASC
"""
Updated at descending order
"""
UPDATED_DESC
"""
Created at ascending order
"""

View File

@ -1248,6 +1248,30 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "UPDATED_DESC",
"description": "Updated at descending order",
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "UPDATED_ASC",
"description": "Updated at ascending order",
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "CREATED_DESC",
"description": "Created at descending order",
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "CREATED_ASC",
"description": "Created at ascending order",
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "STARTED_AT_ASC",
"description": "Start time by ascending order",
@ -24108,6 +24132,30 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "UPDATED_DESC",
"description": "Updated at descending order",
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "UPDATED_ASC",
"description": "Updated at ascending order",
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "CREATED_DESC",
"description": "Created at descending order",
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "CREATED_ASC",
"description": "Created at ascending order",
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "PRIORITY_ASC",
"description": "Priority by ascending order",
@ -24162,6 +24210,18 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "SEVERITY_ASC",
"description": "Severity from less critical to more critical",
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "SEVERITY_DESC",
"description": "Severity from more critical to less critical",
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "WEIGHT_ASC",
"description": "Weight by ascending order",
@ -24173,6 +24233,18 @@
"description": "Weight by descending order",
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "PUBLISHED_ASC",
"description": "Published issues shown last",
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "PUBLISHED_DESC",
"description": "Published issues shown first",
"isDeprecated": false,
"deprecationReason": null
}
],
"possibleTypes": null
@ -29154,6 +29226,30 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "UPDATED_DESC",
"description": "Updated at descending order",
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "UPDATED_ASC",
"description": "Updated at ascending order",
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "CREATED_DESC",
"description": "Created at descending order",
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "CREATED_ASC",
"description": "Created at ascending order",
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "PRIORITY_ASC",
"description": "Priority by ascending order",
@ -49085,6 +49181,30 @@
"description": "Created at ascending order",
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "UPDATED_DESC",
"description": "Updated at descending order",
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "UPDATED_ASC",
"description": "Updated at ascending order",
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "CREATED_DESC",
"description": "Created at descending order",
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "CREATED_ASC",
"description": "Created at ascending order",
"isDeprecated": false,
"deprecationReason": null
}
],
"possibleTypes": null

View File

@ -2935,6 +2935,8 @@ Values for sorting alerts.
| Value | Description |
| ----- | ----------- |
| `CREATED_ASC` | Created at ascending order |
| `CREATED_DESC` | Created at descending order |
| `CREATED_TIME_ASC` | Created time by ascending order |
| `CREATED_TIME_DESC` | Created time by descending order |
| `ENDED_AT_ASC` | End time by ascending order |
@ -2947,6 +2949,8 @@ Values for sorting alerts.
| `STARTED_AT_DESC` | Start time by descending order |
| `STATUS_ASC` | Status by order: Ignored > Resolved > Acknowledged > Triggered |
| `STATUS_DESC` | Status by order: Triggered > Acknowledged > Resolved > Ignored |
| `UPDATED_ASC` | Updated at ascending order |
| `UPDATED_DESC` | Updated at descending order |
| `UPDATED_TIME_ASC` | Created time by ascending order |
| `UPDATED_TIME_DESC` | Created time by descending order |
| `created_asc` | Created at ascending order |
@ -3170,6 +3174,8 @@ Values for sorting issues.
| Value | Description |
| ----- | ----------- |
| `CREATED_ASC` | Created at ascending order |
| `CREATED_DESC` | Created at descending order |
| `DUE_DATE_ASC` | Due date by ascending order |
| `DUE_DATE_DESC` | Due date by descending order |
| `LABEL_PRIORITY_ASC` | Label priority by ascending order |
@ -3178,7 +3184,13 @@ Values for sorting issues.
| `MILESTONE_DUE_DESC` | Milestone due date by descending order |
| `PRIORITY_ASC` | Priority by ascending order |
| `PRIORITY_DESC` | Priority by descending order |
| `PUBLISHED_ASC` | Published issues shown last |
| `PUBLISHED_DESC` | Published issues shown first |
| `RELATIVE_POSITION_ASC` | Relative position by ascending order |
| `SEVERITY_ASC` | Severity from less critical to more critical |
| `SEVERITY_DESC` | Severity from more critical to less critical |
| `UPDATED_ASC` | Updated at ascending order |
| `UPDATED_DESC` | Updated at descending order |
| `WEIGHT_ASC` | Weight by ascending order |
| `WEIGHT_DESC` | Weight by descending order |
| `created_asc` | Created at ascending order |
@ -3252,6 +3264,8 @@ Values for sorting merge requests.
| Value | Description |
| ----- | ----------- |
| `CREATED_ASC` | Created at ascending order |
| `CREATED_DESC` | Created at descending order |
| `LABEL_PRIORITY_ASC` | Label priority by ascending order |
| `LABEL_PRIORITY_DESC` | Label priority by descending order |
| `MERGED_AT_ASC` | Merge time by ascending order |
@ -3260,6 +3274,8 @@ Values for sorting merge requests.
| `MILESTONE_DUE_DESC` | Milestone due date by descending order |
| `PRIORITY_ASC` | Priority by ascending order |
| `PRIORITY_DESC` | Priority by descending order |
| `UPDATED_ASC` | Updated at ascending order |
| `UPDATED_DESC` | Updated at descending order |
| `created_asc` | Created at ascending order |
| `created_desc` | Created at descending order |
| `updated_asc` | Updated at ascending order |
@ -3488,6 +3504,10 @@ Common sort values.
| Value | Description |
| ----- | ----------- |
| `CREATED_ASC` | Created at ascending order |
| `CREATED_DESC` | Created at descending order |
| `UPDATED_ASC` | Updated at ascending order |
| `UPDATED_DESC` | Updated at descending order |
| `created_asc` | Created at ascending order |
| `created_desc` | Created at descending order |
| `updated_asc` | Updated at ascending order |

View File

@ -87,6 +87,23 @@ the `plan` parameter associated with a namespace:
]
```
Users on GitLab.com will also see a `max_seats_used` parameter. `max_seats_used`
is the highest number of users the group had.
`max_seats_used` will be non-zero only for namespaces on paid plans.
```json
[
{
"id": 1,
"name": "user1",
"billable_members_count": 2,
"max_seats_used": 3,
...
}
]
```
NOTE: **Note:**
Only group maintainers/owners are presented with `members_count_with_descendants`, as well as `plan` **(BRONZE ONLY)**.
@ -123,6 +140,7 @@ Example response:
"web_url": "https://gitlab.example.com/groups/twitter",
"members_count_with_descendants": 2,
"billable_members_count": 2,
"max_seats_used": 0,
"plan": "default",
"trial_ends_on": null,
"trial": false
@ -162,6 +180,7 @@ Example response:
"web_url": "https://gitlab.example.com/groups/group1",
"members_count_with_descendants": 2,
"billable_members_count": 2,
"max_seats_used": 0,
"plan": "default",
"trial_ends_on": null,
"trial": false
@ -188,6 +207,7 @@ Example response:
"web_url": "https://gitlab.example.com/groups/group1",
"members_count_with_descendants": 2,
"billable_members_count": 2,
"max_seats_used": 0,
"plan": "default",
"trial_ends_on": null,
"trial": false

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
module API
module Entities
class FeatureFlag < Grape::Entity
expose :name
expose :description
expose :active
expose :version, if: :feature_flags_new_version_enabled
expose :created_at
expose :updated_at
expose :scopes, using: FeatureFlag::LegacyScope
expose :strategies, using: FeatureFlag::Strategy, if: :feature_flags_new_version_enabled
end
end
end

View File

@ -0,0 +1,11 @@
# frozen_string_literal: true
module API
module Entities
class FeatureFlag < Grape::Entity
class DetailedLegacyScope < LegacyScope
expose :name
end
end
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
module API
module Entities
class FeatureFlag < Grape::Entity
class LegacyScope < Grape::Entity
expose :id
expose :active
expose :environment_scope
expose :strategies
expose :created_at
expose :updated_at
end
end
end
end

View File

@ -0,0 +1,12 @@
# frozen_string_literal: true
module API
module Entities
class FeatureFlag < Grape::Entity
class Scope < Grape::Entity
expose :id
expose :environment_scope
end
end
end
end

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
module API
module Entities
class FeatureFlag < Grape::Entity
class Strategy < Grape::Entity
expose :id
expose :name
expose :parameters
expose :scopes, using: FeatureFlag::Scope
end
end
end
end

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
module API
module Entities
class FeatureFlag < Grape::Entity
class UserList < Grape::Entity
include RequestAwareEntity
expose :id
expose :iid
expose :project_id
expose :created_at
expose :updated_at
expose :name
expose :user_xids
expose :path do |list|
project_feature_flags_user_list_path(list.project, list)
end
expose :edit_path do |list|
edit_project_feature_flags_user_list_path(list.project, list)
end
end
end
end
end

View File

@ -13,8 +13,6 @@ module Gitlab
end
def perform(start_id, stop_id)
return if Feature.disabled?(:migrate_bio_to_user_details, default_enabled: true)
relation = User
.select("id AS user_id", "substring(COALESCE(bio, '') from 1 for 255) AS bio")
.where("(COALESCE(bio, '') IS DISTINCT FROM '')")

View File

@ -12,13 +12,19 @@ module Gitlab
end
method_name = kwargs.delete(:method) || name.to_s.sub(/_html$/, '')
kwargs[:resolve] = Gitlab::Graphql::MarkdownField::Resolver.new(method_name.to_sym).proc
resolver_method = "#{name}_resolver".to_sym
kwargs[:resolver_method] = resolver_method
kwargs[:description] ||= "The GitLab Flavored Markdown rendering of `#{method_name}`"
# Adding complexity to rendered notes since that could cause queries.
kwargs[:complexity] ||= 5
field name, GraphQL::STRING_TYPE, **kwargs
define_method resolver_method do
# We need to `dup` the context so the MarkdownHelper doesn't modify it
::MarkupHelper.markdown_field(object, method_name.to_sym, context.to_h.dup)
end
end
end
end

View File

@ -1,22 +0,0 @@
# frozen_string_literal: true
module Gitlab
module Graphql
module MarkdownField
class Resolver
attr_reader :method_name
def initialize(method_name)
@method_name = method_name
end
def proc
-> (object, _args, ctx) do
# We need to `dup` the context so the MarkdownHelper doesn't modify it
::MarkupHelper.markdown_field(object, method_name, ctx.to_h.dup)
end
end
end
end
end
end

View File

@ -0,0 +1,49 @@
# frozen_string_literal: true
module Gitlab
module ManifestImport
class Metadata
EXPIRY_TIME = 1.week
attr_reader :user, :fallback
def initialize(user, fallback: {})
@user = user
@fallback = fallback
end
def save(repositories, group_id)
Gitlab::Redis::SharedState.with do |redis|
redis.multi do
redis.set(key_for('repositories'), Gitlab::Json.dump(repositories), ex: EXPIRY_TIME)
redis.set(key_for('group_id'), group_id, ex: EXPIRY_TIME)
end
end
end
def repositories
redis_get('repositories').then do |repositories|
next unless repositories
Gitlab::Json.parse(repositories).map(&:symbolize_keys)
end || fallback[:manifest_import_repositories]
end
def group_id
redis_get('group_id')&.to_i || fallback[:manifest_import_group_id]
end
private
def key_for(field)
"manifest_import:metadata:user:#{user.id}:#{field}"
end
def redis_get(field)
Gitlab::Redis::SharedState.with do |redis|
redis.get(key_for(field))
end
end
end
end
end

View File

@ -193,6 +193,10 @@ module Gitlab
end
params[:state] = filters[:state] if filters.key?(:state)
if Feature.enabled?(:search_filter_by_confidential) && filters.key?(:confidential) && %w(yes no).include?(filters[:confidential])
params[:confidential] = filters[:confidential] == 'yes'
end
end
end

View File

@ -9,6 +9,8 @@ RSpec.describe Groups::LabelsController do
before do
group.add_owner(user)
# by default FFs are enabled in specs so we turn it off
stub_feature_flags(show_inherited_labels: false)
sign_in(user)
end
@ -32,11 +34,41 @@ RSpec.describe Groups::LabelsController do
subgroup.add_owner(user)
end
it 'returns ancestor group labels' do
get :index, params: { group_id: subgroup, include_ancestor_groups: true, only_group_labels: true }, format: :json
RSpec.shared_examples 'returns ancestor group labels' do
it 'returns ancestor group labels' do
get :index, params: params, format: :json
label_ids = json_response.map {|label| label['title']}
expect(label_ids).to match_array([group_label_1.title, subgroup_label_1.title])
label_ids = json_response.map {|label| label['title']}
expect(label_ids).to match_array([group_label_1.title, subgroup_label_1.title])
end
end
context 'when include_ancestor_groups true' do
let(:params) { { group_id: subgroup, include_ancestor_groups: true, only_group_labels: true } }
it_behaves_like 'returns ancestor group labels'
end
context 'when include_ancestor_groups false' do
let(:params) { { group_id: subgroup, only_group_labels: true } }
it 'does not return ancestor group labels', :aggregate_failures do
get :index, params: params, format: :json
label_ids = json_response.map {|label| label['title']}
expect(label_ids).to match_array([subgroup_label_1.title])
expect(label_ids).not_to include([group_label_1.title])
end
end
context 'when show_inherited_labels enabled' do
let(:params) { { group_id: subgroup } }
before do
stub_feature_flags(show_inherited_labels: true)
end
it_behaves_like 'returns ancestor group labels'
end
end

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Import::ManifestController do
RSpec.describe Import::ManifestController, :clean_gitlab_redis_shared_state do
include ImportSpecHelper
let_it_be(:user) { create(:user) }
@ -16,42 +16,93 @@ RSpec.describe Import::ManifestController do
sign_in(user)
end
def assign_session_group
session[:manifest_import_repositories] = []
session[:manifest_import_group_id] = group.id
describe 'POST upload' do
context 'with a valid manifest' do
it 'saves the manifest and redirects to the status page', :aggregate_failures do
post :upload, params: {
group_id: group.id,
manifest: fixture_file_upload('spec/fixtures/aosp_manifest.xml')
}
metadata = Gitlab::ManifestImport::Metadata.new(user)
expect(metadata.group_id).to eq(group.id)
expect(metadata.repositories.size).to eq(660)
expect(metadata.repositories.first).to include(name: 'platform/build', path: 'build/make')
expect(response).to redirect_to(status_import_manifest_path)
end
end
context 'with an invalid manifest' do
it 'displays an error' do
post :upload, params: {
group_id: group.id,
manifest: fixture_file_upload('spec/fixtures/invalid_manifest.xml')
}
expect(assigns(:errors)).to be_present
end
end
context 'when the user cannot create projects in the group' do
it 'displays an error' do
sign_in(create(:user))
post :upload, params: {
group_id: group.id,
manifest: fixture_file_upload('spec/fixtures/aosp_manifest.xml')
}
expect(assigns(:errors)).to be_present
end
end
end
describe 'GET status' do
let(:repo1) { OpenStruct.new(id: 'test1', url: 'http://demo.host/test1') }
let(:repo2) { OpenStruct.new(id: 'test2', url: 'http://demo.host/test2') }
let(:repo1) { { id: 'test1', url: 'http://demo.host/test1' } }
let(:repo2) { { id: 'test2', url: 'http://demo.host/test2' } }
let(:repos) { [repo1, repo2] }
before do
assign_session_group
shared_examples 'status action' do
it "returns variables for json request" do
project = create(:project, import_type: 'manifest', creator_id: user.id)
session[:manifest_import_repositories] = repos
get :status, format: :json
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.dig("imported_projects", 0, "id")).to eq(project.id)
expect(json_response.dig("provider_repos", 0, "id")).to eq(repo1[:id])
expect(json_response.dig("provider_repos", 1, "id")).to eq(repo2[:id])
expect(json_response.dig("namespaces", 0, "id")).to eq(group.id)
end
it "does not show already added project" do
project = create(:project, import_type: 'manifest', namespace: user.namespace, import_status: :finished, import_url: repo1[:url])
get :status, format: :json
expect(json_response.dig("imported_projects", 0, "id")).to eq(project.id)
expect(json_response.dig("provider_repos").length).to eq(1)
expect(json_response.dig("provider_repos", 0, "id")).not_to eq(repo1[:id])
end
end
it "returns variables for json request" do
project = create(:project, import_type: 'manifest', creator_id: user.id)
context 'when the data is stored via Gitlab::ManifestImport::Metadata' do
before do
Gitlab::ManifestImport::Metadata.new(user).save(repos, group.id)
end
get :status, format: :json
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.dig("imported_projects", 0, "id")).to eq(project.id)
expect(json_response.dig("provider_repos", 0, "id")).to eq(repo1.id)
expect(json_response.dig("provider_repos", 1, "id")).to eq(repo2.id)
expect(json_response.dig("namespaces", 0, "id")).to eq(group.id)
include_examples 'status action'
end
it "does not show already added project" do
project = create(:project, import_type: 'manifest', namespace: user.namespace, import_status: :finished, import_url: repo1.url)
context 'when the data is stored in the user session' do
before do
session[:manifest_import_repositories] = repos
session[:manifest_import_group_id] = group.id
end
get :status, format: :json
expect(json_response.dig("imported_projects", 0, "id")).to eq(project.id)
expect(json_response.dig("provider_repos").length).to eq(1)
expect(json_response.dig("provider_repos", 0, "id")).not_to eq(repo1.id)
include_examples 'status action'
end
end
end

View File

@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe Projects::LabelsController do
let(:group) { create(:group) }
let(:project) { create(:project, namespace: group) }
let(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project, reload: true) { create(:project, namespace: group) }
let_it_be(:user) { create(:user) }
before do
project.add_maintainer(user)
@ -14,16 +14,21 @@ RSpec.describe Projects::LabelsController do
end
describe 'GET #index' do
let!(:label_1) { create(:label, project: project, priority: 1, title: 'Label 1') }
let!(:label_2) { create(:label, project: project, priority: 3, title: 'Label 2') }
let!(:label_3) { create(:label, project: project, priority: 1, title: 'Label 3') }
let!(:label_4) { create(:label, project: project, title: 'Label 4') }
let!(:label_5) { create(:label, project: project, title: 'Label 5') }
let_it_be(:label_1) { create(:label, project: project, priority: 1, title: 'Label 1') }
let_it_be(:label_2) { create(:label, project: project, priority: 3, title: 'Label 2') }
let_it_be(:label_3) { create(:label, project: project, priority: 1, title: 'Label 3') }
let_it_be(:label_4) { create(:label, project: project, title: 'Label 4') }
let_it_be(:label_5) { create(:label, project: project, title: 'Label 5') }
let!(:group_label_1) { create(:group_label, group: group, title: 'Group Label 1') }
let!(:group_label_2) { create(:group_label, group: group, title: 'Group Label 2') }
let!(:group_label_3) { create(:group_label, group: group, title: 'Group Label 3') }
let!(:group_label_4) { create(:group_label, group: group, title: 'Group Label 4') }
let_it_be(:group_label_1) { create(:group_label, group: group, title: 'Group Label 1') }
let_it_be(:group_label_2) { create(:group_label, group: group, title: 'Group Label 2') }
let_it_be(:group_label_3) { create(:group_label, group: group, title: 'Group Label 3') }
let_it_be(:group_label_4) { create(:group_label, group: group, title: 'Group Label 4') }
let_it_be(:group_labels) { [group_label_3, group_label_4]}
let_it_be(:project_labels) { [label_4, label_5]}
let_it_be(:group_priority_labels) { [group_label_1, group_label_2]}
let_it_be(:project_priority_labels) { [label_1, label_2, label_3]}
before do
create(:label_priority, project: project, label: group_label_1, priority: 3)
@ -68,6 +73,60 @@ RSpec.describe Projects::LabelsController do
end
end
context 'with subgroups' do
let_it_be(:subgroup) { create(:group, parent: group) }
let_it_be(:subgroup_label_1) { create(:group_label, group: subgroup, title: 'subgroup_label_1') }
let_it_be(:subgroup_label_2) { create(:group_label, group: subgroup, title: 'subgroup_label_2') }
before do
project.update!(namespace: subgroup)
subgroup.add_owner(user)
create(:label_priority, project: project, label: subgroup_label_2, priority: 1)
end
RSpec.shared_examples 'returns ancestor group labels' do
it 'returns ancestor group labels', :aggregate_failures do
get :index, params: params
expect(assigns(:labels)).to match_array([subgroup_label_1] + group_labels + project_labels)
expect(assigns(:prioritized_labels)).to match_array([subgroup_label_2] + group_priority_labels + project_priority_labels)
end
end
context 'when show_inherited_labels disabled' do
before do
stub_feature_flags(show_inherited_labels: false)
end
context 'when include_ancestor_groups false' do
let(:params) { { namespace_id: project.namespace.to_param, project_id: project } }
it 'does not return ancestor group labels', :aggregate_failures do
get :index, params: params
expect(assigns(:labels)).to match_array([subgroup_label_1] + project_labels)
expect(assigns(:prioritized_labels)).to match_array([subgroup_label_2] + project_priority_labels)
end
end
context 'when include_ancestor_groups true' do
let(:params) { { namespace_id: project.namespace.to_param, project_id: project, include_ancestor_groups: true } }
it_behaves_like 'returns ancestor group labels'
end
end
context 'when show_inherited_labels enabled' do
let(:params) { { namespace_id: project.namespace.to_param, project_id: project } }
before do
stub_feature_flags(show_inherited_labels: true)
end
it_behaves_like 'returns ancestor group labels'
end
end
def list_labels
get :index, params: { namespace_id: project.namespace.to_param, project_id: project }
end
@ -75,7 +134,7 @@ RSpec.describe Projects::LabelsController do
describe 'POST #generate' do
context 'personal project' do
let(:personal_project) { create(:project, namespace: user.namespace) }
let_it_be(:personal_project) { create(:project, namespace: user.namespace) }
it 'creates labels' do
post :generate, params: { namespace_id: personal_project.namespace.to_param, project_id: personal_project }
@ -116,8 +175,8 @@ RSpec.describe Projects::LabelsController do
end
describe 'POST #promote' do
let!(:promoted_label_name) { "Promoted Label" }
let!(:label_1) { create(:label, title: promoted_label_name, project: project) }
let_it_be(:promoted_label_name) { "Promoted Label" }
let_it_be(:label_1) { create(:label, title: promoted_label_name, project: project) }
context 'not group reporters' do
it 'denies access' do
@ -196,7 +255,7 @@ RSpec.describe Projects::LabelsController do
end
context 'when requesting a redirected path' do
let!(:redirect_route) { project.redirect_routes.create(path: project.full_path + 'old') }
let_it_be(:redirect_route) { project.redirect_routes.create(path: project.full_path + 'old') }
it 'redirects to the canonical path' do
get :index, params: { namespace_id: project.namespace, project_id: project.to_param + 'old' }
@ -242,7 +301,7 @@ RSpec.describe Projects::LabelsController do
end
context 'when requesting a redirected path' do
let!(:redirect_route) { project.redirect_routes.create(path: project.full_path + 'old') }
let_it_be(:redirect_route) { project.redirect_routes.create(path: project.full_path + 'old') }
it 'returns not found' do
post :generate, params: { namespace_id: project.namespace, project_id: project.to_param + 'old' }

View File

@ -1,42 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe GroupLabelsFinder, '#execute' do
let!(:group) { create(:group) }
let!(:user) { create(:user) }
let!(:label1) { create(:group_label, title: 'Foo', description: 'Lorem ipsum', group: group) }
let!(:label2) { create(:group_label, title: 'Bar', description: 'Fusce consequat', group: group) }
it 'returns all group labels sorted by name if no params' do
result = described_class.new(user, group).execute
expect(result.to_a).to match_array([label2, label1])
end
it 'returns all group labels sorted by name desc' do
result = described_class.new(user, group, sort: 'name_desc').execute
expect(result.to_a).to match_array([label2, label1])
end
it 'returns group labels that match search' do
result = described_class.new(user, group, search: 'Foo').execute
expect(result.to_a).to match_array([label1])
end
it 'returns group labels user subscribed to' do
label2.subscribe(user)
result = described_class.new(user, group, subscribed: 'true').execute
expect(result.to_a).to match_array([label2])
end
it 'returns second page of labels' do
result = described_class.new(user, group, page: '2').execute
expect(result.to_a).to match_array([])
end
end

4
spec/fixtures/invalid_manifest.xml vendored Normal file
View File

@ -0,0 +1,4 @@
<manifest>
<remote review="invalid-url" />
<project name="platform/build"/>
</manifest>

View File

@ -46,9 +46,10 @@ describe('Environments Folder View', () => {
wrapper = mount(EnvironmentsFolderViewComponent, { propsData: mockData });
};
const findEnvironmentsTabAvailable = () => wrapper.find('.js-environments-tab-available');
const findEnvironmentsTabAvailable = () =>
wrapper.find('[data-testid="environments-tab-available"]');
const findEnvironmentsTabStopped = () => wrapper.find('.js-environments-tab-stopped');
const findEnvironmentsTabStopped = () => wrapper.find('[data-testid="environments-tab-stopped"]');
beforeEach(() => {
mock = new MockAdapter(axios);

View File

@ -9,4 +9,20 @@ export default [
status: TestStatus.SKIPPED,
system_output: null,
},
{
classname: 'spec.test_spec',
execution_time: 0,
name: 'Test#error text',
stack_trace: null,
status: TestStatus.ERROR,
system_output: null,
},
{
classname: 'spec.test_spec',
execution_time: 0,
name: 'Test#unknown text',
stack_trace: null,
status: TestStatus.UNKNOWN,
system_output: null,
},
];

View File

@ -61,18 +61,17 @@ describe('Test reports suite table', () => {
expect(allCaseRows().length).toBe(testCases.length);
});
it('renders the correct icon for each status', () => {
const failedTest = testCases.findIndex(x => x.status === TestStatus.FAILED);
const skippedTest = testCases.findIndex(x => x.status === TestStatus.SKIPPED);
const successTest = testCases.findIndex(x => x.status === TestStatus.SUCCESS);
it.each([
TestStatus.ERROR,
TestStatus.FAILED,
TestStatus.SKIPPED,
TestStatus.SUCCESS,
'unknown',
])('renders the correct icon for test case with %s status', status => {
const test = testCases.findIndex(x => x.status === status);
const row = findCaseRowAtIndex(test);
const failedRow = findCaseRowAtIndex(failedTest);
const skippedRow = findCaseRowAtIndex(skippedTest);
const successRow = findCaseRowAtIndex(successTest);
expect(findIconForRow(failedRow, TestStatus.FAILED).exists()).toBe(true);
expect(findIconForRow(skippedRow, TestStatus.SKIPPED).exists()).toBe(true);
expect(findIconForRow(successRow, TestStatus.SUCCESS).exists()).toBe(true);
expect(findIconForRow(row, status).exists()).toBe(true);
});
});
});

View File

@ -223,6 +223,21 @@ RSpec.describe Resolvers::IssuesResolver do
expect(resolve_issues(sort: :milestone_due_desc).items).to eq([milestone_issue3, milestone_issue2, milestone_issue1])
end
end
context 'when sorting by severity' do
let_it_be(:project) { create(:project) }
let_it_be(:issue_high_severity) { create_issue_with_severity(project, severity: :high) }
let_it_be(:issue_low_severity) { create_issue_with_severity(project, severity: :low) }
let_it_be(:issue_no_severity) { create(:incident, project: project) }
it 'sorts issues ascending' do
expect(resolve_issues(sort: :severity_asc)).to eq([issue_no_severity, issue_low_severity, issue_high_severity])
end
it 'sorts issues descending' do
expect(resolve_issues(sort: :severity_desc)).to eq([issue_high_severity, issue_low_severity, issue_no_severity])
end
end
end
it 'returns issues user can see' do
@ -308,6 +323,13 @@ RSpec.describe Resolvers::IssuesResolver do
expect(field.to_graphql.complexity.call({}, { labelName: 'foo' }, 1)).to eq 8
end
def create_issue_with_severity(project, severity:)
issue = create(:incident, project: project)
create(:issuable_severity, issue: issue, severity: severity)
issue
end
def resolve_issues(args = {}, context = { current_user: current_user })
resolve(described_class, obj: project, args: args, ctx: context)
end

View File

@ -9,7 +9,7 @@ RSpec.describe GitlabSchema.types['IssueSort'] do
it 'exposes all the existing issue sort values' do
expect(described_class.values.keys).to include(
*%w[DUE_DATE_ASC DUE_DATE_DESC RELATIVE_POSITION_ASC]
*%w[DUE_DATE_ASC DUE_DATE_DESC RELATIVE_POSITION_ASC SEVERITY_ASC SEVERITY_DESC]
)
end
end

View File

@ -5,9 +5,11 @@ require 'spec_helper'
RSpec.describe Banzai::Filter::MilestoneReferenceFilter do
include FilterSpecHelper
let(:parent_group) { create(:group, :public) }
let(:group) { create(:group, :public, parent: parent_group) }
let(:project) { create(:project, :public, group: group) }
let_it_be(:parent_group) { create(:group, :public) }
let_it_be(:group) { create(:group, :public, parent: parent_group) }
let_it_be(:project) { create(:project, :public, group: group) }
let_it_be(:namespace) { create(:namespace) }
let_it_be(:another_project) { create(:project, :public, namespace: namespace) }
it 'requires project context' do
expect { described_class.call('') }.to raise_error(ArgumentError, /:project/)
@ -188,11 +190,9 @@ RSpec.describe Banzai::Filter::MilestoneReferenceFilter do
end
shared_examples 'cross-project / cross-namespace complete reference' do
let(:namespace) { create(:namespace) }
let(:another_project) { create(:project, :public, namespace: namespace) }
let(:milestone) { create(:milestone, project: another_project) }
let(:reference) { "#{another_project.full_path}%#{milestone.iid}" }
let!(:result) { reference_filter("See #{reference}") }
let_it_be(:milestone) { create(:milestone, project: another_project) }
let(:reference) { "#{another_project.full_path}%#{milestone.iid}" }
let!(:result) { reference_filter("See #{reference}") }
it 'points to referenced project milestone page' do
expect(result.css('a').first.attr('href')).to eq urls
@ -226,12 +226,10 @@ RSpec.describe Banzai::Filter::MilestoneReferenceFilter do
end
shared_examples 'cross-project / same-namespace complete reference' do
let(:namespace) { create(:namespace) }
let(:project) { create(:project, :public, namespace: namespace) }
let(:another_project) { create(:project, :public, namespace: namespace) }
let(:milestone) { create(:milestone, project: another_project) }
let(:reference) { "#{another_project.full_path}%#{milestone.iid}" }
let!(:result) { reference_filter("See #{reference}") }
let_it_be(:project) { create(:project, :public, namespace: namespace) }
let_it_be(:milestone) { create(:milestone, project: another_project) }
let(:reference) { "#{another_project.full_path}%#{milestone.iid}" }
let!(:result) { reference_filter("See #{reference}") }
it 'points to referenced project milestone page' do
expect(result.css('a').first.attr('href')).to eq urls
@ -265,12 +263,10 @@ RSpec.describe Banzai::Filter::MilestoneReferenceFilter do
end
shared_examples 'cross project shorthand reference' do
let(:namespace) { create(:namespace) }
let(:project) { create(:project, :public, namespace: namespace) }
let(:another_project) { create(:project, :public, namespace: namespace) }
let(:milestone) { create(:milestone, project: another_project) }
let(:reference) { "#{another_project.path}%#{milestone.iid}" }
let!(:result) { reference_filter("See #{reference}") }
let_it_be(:project) { create(:project, :public, namespace: namespace) }
let_it_be(:milestone) { create(:milestone, project: another_project) }
let(:reference) { "#{another_project.path}%#{milestone.iid}" }
let!(:result) { reference_filter("See #{reference}") }
it 'points to referenced project milestone page' do
expect(result.css('a').first.attr('href')).to eq urls
@ -439,13 +435,13 @@ RSpec.describe Banzai::Filter::MilestoneReferenceFilter do
context 'when milestone is open' do
context 'project milestones' do
let(:milestone) { create(:milestone, project: project) }
let_it_be_with_reload(:milestone) { create(:milestone, project: project) }
include_context 'project milestones'
end
context 'group milestones' do
let(:milestone) { create(:milestone, group: group) }
let_it_be_with_reload(:milestone) { create(:milestone, group: group) }
include_context 'group milestones'
end
@ -453,13 +449,13 @@ RSpec.describe Banzai::Filter::MilestoneReferenceFilter do
context 'when milestone is closed' do
context 'project milestones' do
let(:milestone) { create(:milestone, :closed, project: project) }
let_it_be_with_reload(:milestone) { create(:milestone, :closed, project: project) }
include_context 'project milestones'
end
context 'group milestones' do
let(:milestone) { create(:milestone, :closed, group: group) }
let_it_be_with_reload(:milestone) { create(:milestone, :closed, group: group) }
include_context 'group milestones'
end

View File

@ -82,21 +82,4 @@ RSpec.describe Gitlab::BackgroundMigration::MigrateUsersBioToUserDetails, :migra
expect(user_detail).to be_nil
end
context 'when `migrate_bio_to_user_details` feature flag is off' do
before do
stub_feature_flags(migrate_bio_to_user_details: false)
end
it 'does nothing' do
already_existing_user_details = user_details.where(user_id: [
user_has_different_details.id,
user_already_has_details.id
])
subject
expect(user_details.all).to match_array(already_existing_user_details)
end
end
end

View File

@ -1,33 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Graphql::MarkdownField::Resolver do
include Gitlab::Routing
let(:resolver) { described_class.new(:note) }
describe '#proc' do
let(:project) { create(:project, :public) }
let(:issue) { create(:issue, project: project) }
let(:note) do
create(:note,
note: "Referencing #{issue.to_reference(full: true)}")
end
it 'renders markdown correctly' do
expect(resolver.proc.call(note, {}, {})).to include(issue_path(issue))
end
context 'when the issue is not publicly accessible' do
let(:project) { create(:project, :private) }
it 'hides the references from users that are not allowed to see the reference' do
expect(resolver.proc.call(note, {}, {})).not_to include(issue_path(issue))
end
it 'shows the reference to users that are allowed to see it' do
expect(resolver.proc.call(note, {}, { current_user: project.owner }))
.to include(issue_path(issue))
end
end
end
end

View File

@ -2,6 +2,8 @@
require 'spec_helper'
RSpec.describe Gitlab::Graphql::MarkdownField do
include Gitlab::Routing
describe '.markdown_field' do
it 'creates the field with some default attributes' do
field = class_with_markdown_field(:test_html, null: true, method: :hello).fields['testHtml']
@ -13,7 +15,7 @@ RSpec.describe Gitlab::Graphql::MarkdownField do
end
context 'developer warnings' do
let(:expected_error) { /Only `method` is allowed to specify the markdown field/ }
let_it_be(:expected_error) { /Only `method` is allowed to specify the markdown field/ }
it 'raises when passing a resolver' do
expect { class_with_markdown_field(:test_html, null: true, resolver: 'not really') }
@ -27,30 +29,61 @@ RSpec.describe Gitlab::Graphql::MarkdownField do
end
context 'resolving markdown' do
let(:note) { build(:note, note: '# Markdown!') }
let(:thing_with_markdown) { double('markdown thing', object: note) }
let(:expected_markdown) { '<h1 data-sourcepos="1:1-1:11" dir="auto">Markdown!</h1>' }
let(:query_type) { GraphQL::ObjectType.new }
let(:schema) { GraphQL::Schema.define(query: query_type, mutation: nil)}
let(:context) { GraphQL::Query::Context.new(query: OpenStruct.new(schema: schema), values: nil, object: nil) }
let_it_be(:note) { build(:note, note: '# Markdown!') }
let_it_be(:expected_markdown) { '<h1 data-sourcepos="1:1-1:11" dir="auto">Markdown!</h1>' }
let_it_be(:query_type) { GraphQL::ObjectType.new }
let_it_be(:schema) { GraphQL::Schema.define(query: query_type, mutation: nil)}
let_it_be(:query) { GraphQL::Query.new(schema, document: nil, context: {}, variables: {}) }
let_it_be(:context) { GraphQL::Query::Context.new(query: query, values: {}, object: nil) }
let(:type_class) { class_with_markdown_field(:note_html, null: false) }
let(:type_instance) { type_class.authorized_new(note, context) }
let(:field) { type_class.fields['noteHtml'] }
it 'renders markdown from the same property as the field name without the `_html` suffix' do
field = class_with_markdown_field(:note_html, null: false).fields['noteHtml']
expect(field.to_graphql.resolve(thing_with_markdown, {}, context)).to eq(expected_markdown)
expect(field.to_graphql.resolve(type_instance, {}, context)).to eq(expected_markdown)
end
it 'renders markdown from a specific property when a `method` argument is passed' do
field = class_with_markdown_field(:test_html, null: false, method: :note).fields['testHtml']
context 'when a `method` argument is passed' do
let(:type_class) { class_with_markdown_field(:test_html, null: false, method: :note) }
let(:field) { type_class.fields['testHtml'] }
expect(field.to_graphql.resolve(thing_with_markdown, {}, context)).to eq(expected_markdown)
it 'renders markdown from a specific property' do
expect(field.to_graphql.resolve(type_instance, {}, context)).to eq(expected_markdown)
end
end
describe 'basic verification that references work' do
let_it_be(:project) { create(:project, :public) }
let(:issue) { create(:issue, project: project) }
let(:note) { build(:note, note: "Referencing #{issue.to_reference(full: true)}") }
it 'renders markdown correctly' do
expect(field.to_graphql.resolve(type_instance, {}, context)).to include(issue_path(issue))
end
context 'when the issue is not publicly accessible' do
let_it_be(:project) { create(:project, :private) }
it 'hides the references from users that are not allowed to see the reference' do
expect(field.to_graphql.resolve(type_instance, {}, context)).not_to include(issue_path(issue))
end
it 'shows the reference to users that are allowed to see it' do
context = GraphQL::Query::Context.new(query: query, values: { current_user: project.owner }, object: nil)
type_instance = type_class.authorized_new(note, context)
expect(field.to_graphql.resolve(type_instance, {}, context)).to include(issue_path(issue))
end
end
end
end
end
def class_with_markdown_field(name, **args)
Class.new(GraphQL::Schema::Object) do
Class.new(Types::BaseObject) do
prepend Gitlab::Graphql::MarkdownField
graphql_name 'MarkdownFieldTest'
markdown_field name, **args
end

View File

@ -17,10 +17,17 @@ RSpec.describe Gitlab::GroupSearchResults do
describe 'issues search' do
let_it_be(:opened_result) { create(:issue, :opened, project: project, title: 'foo opened') }
let_it_be(:closed_result) { create(:issue, :closed, project: project, title: 'foo closed') }
let_it_be(:confidential_result) { create(:issue, :confidential, project: project, title: 'foo confidential') }
let(:query) { 'foo' }
let(:scope) { 'issues' }
before do
project.add_developer(user)
end
include_examples 'search results filtered by state'
include_examples 'search results filtered by confidential'
end
describe 'merge_requests search' do

View File

@ -12,19 +12,7 @@ RSpec.describe Gitlab::ManifestImport::Manifest do
end
context 'missing or invalid attributes' do
let(:file) { Tempfile.new('foo') }
before do
content = <<~EOS
<manifest>
<remote review="invalid-url" />
<project name="platform/build"/>
</manifest>
EOS
file.write(content)
file.rewind
end
let(:file) { File.open(Rails.root.join('spec/fixtures/invalid_manifest.xml')) }
it { expect(manifest.valid?).to be false }

View File

@ -0,0 +1,62 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::ManifestImport::Metadata, :clean_gitlab_redis_shared_state do
let(:user) { double(id: 1) }
let(:repositories) do
[
{ id: 'test1', url: 'http://demo.host/test1' },
{ id: 'test2', url: 'http://demo.host/test2' }
]
end
describe '#save' do
it 'stores data in Redis with an expiry of EXPIRY_TIME' do
status = described_class.new(user)
repositories_key = 'manifest_import:metadata:user:1:repositories'
group_id_key = 'manifest_import:metadata:user:1:group_id'
status.save(repositories, 2)
Gitlab::Redis::SharedState.with do |redis|
expect(redis.ttl(repositories_key)).to be_within(5).of(described_class::EXPIRY_TIME)
expect(redis.ttl(group_id_key)).to be_within(5).of(described_class::EXPIRY_TIME)
end
end
end
describe '#repositories' do
it 'allows repositories to round-trip with symbol keys' do
status = described_class.new(user)
status.save(repositories, 2)
expect(status.repositories).to eq(repositories)
end
it 'uses the fallback when there is nothing in Redis' do
fallback = { manifest_import_repositories: repositories }
status = described_class.new(user, fallback: fallback)
expect(status.repositories).to eq(repositories)
end
end
describe '#group_id' do
it 'returns the group ID as an integer' do
status = described_class.new(user)
status.save(repositories, 2)
expect(status.group_id).to eq(2)
end
it 'uses the fallback when there is nothing in Redis' do
fallback = { manifest_import_group_id: 3 }
status = described_class.new(user, fallback: fallback)
expect(status.group_id).to eq(3)
end
end
end

View File

@ -265,9 +265,15 @@ RSpec.describe Gitlab::ProjectSearchResults do
let_it_be(:project) { create(:project, :public) }
let_it_be(:closed_result) { create(:issue, :closed, project: project, title: 'foo closed') }
let_it_be(:opened_result) { create(:issue, :opened, project: project, title: 'foo opened') }
let_it_be(:confidential_result) { create(:issue, :confidential, project: project, title: 'foo confidential') }
let(:query) { 'foo' }
before do
project.add_developer(user)
end
include_examples 'search results filtered by state'
include_examples 'search results filtered by confidential'
end
end

View File

@ -181,8 +181,10 @@ RSpec.describe Gitlab::SearchResults do
let_it_be(:closed_result) { create(:issue, :closed, project: project, title: 'foo closed') }
let_it_be(:opened_result) { create(:issue, :opened, project: project, title: 'foo open') }
let_it_be(:confidential_result) { create(:issue, :confidential, project: project, title: 'foo confidential') }
include_examples 'search results filtered by state'
include_examples 'search results filtered by confidential'
end
end

View File

@ -145,6 +145,24 @@ RSpec.describe Issue do
end
end
describe '.order_severity' do
let_it_be(:issue_high_severity) { create(:issuable_severity, severity: :high).issue }
let_it_be(:issue_low_severity) { create(:issuable_severity, severity: :low).issue }
let_it_be(:issue_no_severity) { create(:incident) }
context 'sorting ascending' do
subject { described_class.order_severity_asc }
it { is_expected.to eq([issue_no_severity, issue_low_severity, issue_high_severity]) }
end
context 'sorting descending' do
subject { described_class.order_severity_desc }
it { is_expected.to eq([issue_high_severity, issue_low_severity, issue_no_severity]) }
end
end
describe '#order_by_position_and_priority' do
let(:project) { reusable_project }
let(:p1) { create(:label, title: 'P1', project: project, priority: 1) }

View File

@ -53,6 +53,7 @@ RSpec.describe ResourceAccessTokens::CreateService do
access_token = response.payload[:access_token]
expect(access_token.user.reload.user_type).to eq("#{resource_type}_bot")
expect(access_token.user.created_by_id).to eq(user.id)
end
context 'email confirmation status' do

View File

@ -16,6 +16,10 @@ RSpec.describe Users::BuildService do
expect(service.execute).to be_valid
end
it 'sets the created_by_id' do
expect(service.execute.created_by_id).to eq(admin_user.id)
end
context 'calls the UpdateCanonicalEmailService' do
specify do
expect(Users::UpdateCanonicalEmailService).to receive(:new).and_call_original
@ -128,6 +132,16 @@ RSpec.describe Users::BuildService do
it 'raises AccessDeniedError exception' do
expect { service.execute }.to raise_error Gitlab::Access::AccessDeniedError
end
context 'when authorization is skipped' do
subject(:built_user) { service.execute(skip_authorization: true) }
it { is_expected.to be_valid }
it 'sets the created_by_id' do
expect(built_user.created_by_id).to eq(user.id)
end
end
end
context 'with nil user' do

View File

@ -0,0 +1,91 @@
# frozen_string_literal: true
RSpec.shared_examples 'search results filtered by confidential' do
context 'filter not provided (all behavior)' do
let(:filters) { {} }
context 'when Feature search_filter_by_confidential enabled' do
it 'returns confidential and not confidential results', :aggregate_failures do
expect(results.objects('issues')).to include confidential_result
expect(results.objects('issues')).to include opened_result
end
end
context 'when Feature search_filter_by_confidential not enabled' do
before do
stub_feature_flags(search_filter_by_confidential: false)
end
it 'returns confidential and not confidential results', :aggregate_failures do
expect(results.objects('issues')).to include confidential_result
expect(results.objects('issues')).to include opened_result
end
end
end
context 'confidential filter' do
let(:filters) { { confidential: 'yes' } }
context 'when Feature search_filter_by_confidential enabled' do
it 'returns only confidential results', :aggregate_failures do
expect(results.objects('issues')).to include confidential_result
expect(results.objects('issues')).not_to include opened_result
end
end
context 'when Feature search_filter_by_confidential not enabled' do
before do
stub_feature_flags(search_filter_by_confidential: false)
end
it 'returns confidential and not confidential results', :aggregate_failures do
expect(results.objects('issues')).to include confidential_result
expect(results.objects('issues')).to include opened_result
end
end
end
context 'not confidential filter' do
let(:filters) { { confidential: 'no' } }
context 'when Feature search_filter_by_confidential enabled' do
it 'returns not confidential results', :aggregate_failures do
expect(results.objects('issues')).not_to include confidential_result
expect(results.objects('issues')).to include opened_result
end
end
context 'when Feature search_filter_by_confidential not enabled' do
before do
stub_feature_flags(search_filter_by_confidential: false)
end
it 'returns confidential and not confidential results', :aggregate_failures do
expect(results.objects('issues')).to include confidential_result
expect(results.objects('issues')).to include opened_result
end
end
end
context 'unsupported filter' do
let(:filters) { { confidential: 'goodbye' } }
context 'when Feature search_filter_by_confidential enabled' do
it 'returns confidential and not confidential results', :aggregate_failures do
expect(results.objects('issues')).to include confidential_result
expect(results.objects('issues')).to include opened_result
end
end
context 'when Feature search_filter_by_confidential not enabled' do
before do
stub_feature_flags(search_filter_by_confidential: false)
end
it 'returns confidential and not confidential results', :aggregate_failures do
expect(results.objects('issues')).to include confidential_result
expect(results.objects('issues')).to include opened_result
end
end
end
end