Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-12-08 00:15:00 +00:00
parent eab843a2f5
commit 1b7a8ceadb
40 changed files with 423 additions and 50 deletions

View file

@ -12,7 +12,10 @@ The description of the deprecation should state what actions the user should tak
### Breaking Change ### Breaking Change
<!-- Is this a breaking change or not? If so, please add instructions for how users can update their workflow. --> <!-- Does this MR contain a breaking change? If yes:
- Add the ~"breaking change" label to this issue.
- Add instructions for how users can update their workflow. -->
### Affected Topology ### Affected Topology
<!-- <!--
@ -52,3 +55,20 @@ Which tier is this feature available in?
<!-- <!--
Add links to any relevant documentation or code that will provide additional details or clarity regarding the planned change. Also, include a link to the removal issue if relevant. Add links to any relevant documentation or code that will provide additional details or clarity regarding the planned change. Also, include a link to the removal issue if relevant.
--> -->
<!-- Label reminders - you should have one of each of the following labels.
Use the following resources to find the appropriate labels:
- https://gitlab.com/gitlab-org/gitlab/-/labels
- https://about.gitlab.com/handbook/product/categories/features/
-->
<!-- Populate the Section, Group, and Category -->
/label ~devops:: ~group: ~Category:
<!-- Choose the Pricing Tier(s) -->
/label ~"GitLab Free" ~"GitLab Premium" ~"GitLab Ultimate"
<!-- Identifies that this Issue is related to deprecating a feature -->
/label ~"type::deprecation"
<!-- Add the ~"breaking change" label to this issue if necessary -->

View file

@ -38,7 +38,13 @@ export default {
<div v-if="loading && !error" class="text-center loading"> <div v-if="loading && !error" class="text-center loading">
<gl-loading-icon class="mt-5" size="lg" /> <gl-loading-icon class="mt-5" size="lg" />
</div> </div>
<pdf-lab v-if="!loadError" :pdf="pdf" @pdflabload="onLoad" @pdflaberror="onError" /> <pdf-lab
v-if="!loadError"
:pdf="pdf"
@pdflabload="onLoad"
@pdflaberror="onError"
v-on="$listeners"
/>
<p v-if="error" class="text-center"> <p v-if="error" class="text-center">
<span v-if="loadError" ref="loadError"> <span v-if="loadError" ref="loadError">
{{ __('An error occurred while loading the file. Please try again later.') }} {{ __('An error occurred while loading the file. Please try again later.') }}

View file

@ -45,7 +45,7 @@ export default {
.promise.then(this.renderPages) .promise.then(this.renderPages)
.then((pages) => { .then((pages) => {
this.pages = pages; this.pages = pages;
this.$emit('pdflabload'); this.$emit('pdflabload', pages.length);
}) })
.catch((error) => { .catch((error) => {
this.$emit('pdflaberror', error); this.$emit('pdflaberror', error);

View file

@ -40,6 +40,7 @@ export const viewerProps = (type, blob) => {
}, },
pdf: { pdf: {
url: blob.rawPath, url: blob.rawPath,
fileSize: blob.rawSize,
}, },
}[type]; }[type];
}; };

View file

@ -1,16 +1,50 @@
<script> <script>
import { GlButton } from '@gitlab/ui';
import PdfViewer from '~/blob/pdf/pdf_viewer.vue'; import PdfViewer from '~/blob/pdf/pdf_viewer.vue';
import { __ } from '~/locale';
import { PDF_MAX_FILE_SIZE, PDF_MAX_PAGE_LIMIT } from '../../constants';
export default { export default {
components: { PdfViewer }, components: { GlButton, PdfViewer },
i18n: {
tooLargeDescription: __('This PDF is too large to display. Please download to view.'),
tooLargeButtonText: __('Download PDF'),
},
props: { props: {
url: { url: {
type: String, type: String,
required: true, required: true,
}, },
fileSize: {
type: Number,
required: true,
},
},
data() {
return { totalPages: 0 };
},
computed: {
tooLargeToDisplay() {
return this.fileSize > PDF_MAX_FILE_SIZE || this.totalPages > PDF_MAX_PAGE_LIMIT;
},
},
methods: {
handleOnLoad(totalPages) {
this.totalPages = totalPages;
},
}, },
}; };
</script> </script>
<template> <template>
<pdf-viewer :pdf="url" /> <div>
<pdf-viewer v-if="!tooLargeToDisplay" :pdf="url" @pdflabload="handleOnLoad" />
<div v-else class="gl-display-flex gl-flex-direction-column gl-align-items-center gl-p-5">
<p>{{ $options.i18n.tooLargeDescription }}</p>
<gl-button icon="download" category="secondary" variant="confirm" :href="url" download>{{
$options.i18n.tooLargeButtonText
}}</gl-button>
</div>
</div>
</template> </template>

View file

@ -20,3 +20,6 @@ export const COMMIT_MESSAGE_BODY_MAX_LENGTH = 72;
export const LIMITED_CONTAINER_WIDTH_CLASS = 'limit-container-width'; export const LIMITED_CONTAINER_WIDTH_CLASS = 'limit-container-width';
export const I18N_COMMIT_DATA_FETCH_ERROR = __('An error occurred while fetching commit data.'); export const I18N_COMMIT_DATA_FETCH_ERROR = __('An error occurred while fetching commit data.');
export const PDF_MAX_FILE_SIZE = 10000000; // 10 MB
export const PDF_MAX_PAGE_LIMIT = 50;

View file

@ -16,6 +16,11 @@
.snippet-file-content { .snippet-file-content {
border-radius: 3px; border-radius: 3px;
.file-content {
max-height: 500px;
overflow-y: auto;
}
+ .snippet-file-content { + .snippet-file-content {
@include gl-mt-5; @include gl-mt-5;
} }

View file

@ -149,7 +149,6 @@
.commit-content { .commit-content {
padding-right: 10px; padding-right: 10px;
white-space: normal; white-space: normal;
overflow: hidden;
.commit-title { .commit-title {
display: flex; display: flex;

View file

@ -52,7 +52,16 @@ class GroupsFinder < UnionFinder
return [Group.all] if current_user&.can_read_all_resources? && all_available? return [Group.all] if current_user&.can_read_all_resources? && all_available?
groups = [] groups = []
groups << Gitlab::ObjectHierarchy.new(groups_for_ancestors, groups_for_descendants).all_objects if current_user
if current_user
if Feature.enabled?(:use_traversal_ids_groups_finder, default_enabled: :yaml)
groups << current_user.authorized_groups.self_and_ancestors
groups << current_user.groups.self_and_descendants
else
groups << Gitlab::ObjectHierarchy.new(groups_for_ancestors, groups_for_descendants).all_objects
end
end
groups << Group.unscoped.public_to_user(current_user) if include_public_groups? groups << Group.unscoped.public_to_user(current_user) if include_public_groups?
groups << Group.none if groups.empty? groups << Group.none if groups.empty?
groups groups
@ -72,10 +81,14 @@ class GroupsFinder < UnionFinder
.groups .groups
.where('members.access_level >= ?', params[:min_access_level]) .where('members.access_level >= ?', params[:min_access_level])
if Feature.enabled?(:use_traversal_ids_groups_finder, default_enabled: :yaml)
groups.self_and_descendants
else
Gitlab::ObjectHierarchy Gitlab::ObjectHierarchy
.new(groups) .new(groups)
.base_and_descendants .base_and_descendants
end end
end
# rubocop: enable CodeReuse/ActiveRecord # rubocop: enable CodeReuse/ActiveRecord
def exclude_group_ids(groups) def exclude_group_ids(groups)

View file

@ -5,7 +5,7 @@ module Types
graphql_name 'IssueType' graphql_name 'IssueType'
description 'Issue type' description 'Issue type'
::WorkItem::Type.base_types.keys.each do |issue_type| ::WorkItem::Type.allowed_types_for_issues.each do |issue_type|
value issue_type.upcase, value: issue_type, description: "#{issue_type.titleize} issue type" value issue_type.upcase, value: issue_type, description: "#{issue_type.titleize} issue type"
end end
end end

View file

@ -15,7 +15,8 @@ class WorkItem::Type < ApplicationRecord
issue: { name: 'Issue', icon_name: 'issue-type-issue', enum_value: 0 }, issue: { name: 'Issue', icon_name: 'issue-type-issue', enum_value: 0 },
incident: { name: 'Incident', icon_name: 'issue-type-incident', enum_value: 1 }, incident: { name: 'Incident', icon_name: 'issue-type-incident', enum_value: 1 },
test_case: { name: 'Test Case', icon_name: 'issue-type-test-case', enum_value: 2 }, ## EE-only test_case: { name: 'Test Case', icon_name: 'issue-type-test-case', enum_value: 2 }, ## EE-only
requirement: { name: 'Requirement', icon_name: 'issue-type-requirements', enum_value: 3 } ## EE-only requirement: { name: 'Requirement', icon_name: 'issue-type-requirements', enum_value: 3 }, ## EE-only
task: { name: 'Task', icon_name: 'issue-type-task', enum_value: 4 }
}.freeze }.freeze
cache_markdown_field :description, pipeline: :single_line cache_markdown_field :description, pipeline: :single_line
@ -42,6 +43,10 @@ class WorkItem::Type < ApplicationRecord
default_by_type(:issue) default_by_type(:issue)
end end
def self.allowed_types_for_issues
base_types.keys.excluding('task')
end
private private
def strip_whitespace def strip_whitespace

View file

@ -0,0 +1,8 @@
---
name: use_traversal_ids_groups_finder
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/67650
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/345666
milestone: '14.6'
type: development
group: group::access
default_enabled: false

View file

@ -0,0 +1,31 @@
# frozen_string_literal: true
class AddTaskToWorkItemTypes < Gitlab::Database::Migration[1.0]
TASK_ENUM_VALUE = 4
class WorkItemType < ActiveRecord::Base
self.inheritance_column = :_type_disabled
self.table_name = 'work_item_types'
validates :name, uniqueness: { case_sensitive: false, scope: [:namespace_id] }
end
def up
# New instances will not run this migration and add this type via fixtures
# checking if record exists mostly because migration specs will run all migrations
# and that will conflict with the preloaded base work item types
task_work_item = WorkItemType.find_by(name: 'Task', namespace_id: nil)
if task_work_item
say('Task item record exist, skipping creation')
else
WorkItemType.create(name: 'Task', namespace_id: nil, base_type: TASK_ENUM_VALUE, icon_name: 'issue-type-task')
end
end
def down
# There's the remote possibility that issues could already be
# using this issue type, with a tight foreign constraint.
# Therefore we will not attempt to remove any data.
end
end

View file

@ -0,0 +1,7 @@
# frozen_string_literal: true
class AddFutureSubscriptionsToApplicationSettings < Gitlab::Database::Migration[1.0]
def change
add_column :application_settings, :future_subscriptions, :jsonb, null: false, default: []
end
end

View file

@ -0,0 +1,15 @@
# frozen_string_literal: true
class AddIndexToProjectsOnMarkedForDeletionAt < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
INDEX_NAME = 'index_projects_not_aimed_for_deletion'
def up
add_concurrent_index :projects, :id, where: 'marked_for_deletion_at IS NULL', name: INDEX_NAME
end
def down
remove_concurrent_index :projects, :id, name: INDEX_NAME
end
end

View file

@ -0,0 +1 @@
e31592bbeb6ba6175f19cfceaafb37672633028dd021052542909999b46eac38

View file

@ -0,0 +1 @@
c5282e48f31c0896a3ce21fe238eb602dc006b0bfe62aa4f12ee39bbd620c76c

View file

@ -0,0 +1 @@
9954fb041a3f284f53cc9c5c68b1a9dff36513a1851e663c221eccd40736fb16

View file

@ -10479,6 +10479,7 @@ CREATE TABLE application_settings (
sentry_environment text, sentry_environment text,
max_ssh_key_lifetime integer, max_ssh_key_lifetime integer,
static_objects_external_storage_auth_token_encrypted text, static_objects_external_storage_auth_token_encrypted text,
future_subscriptions jsonb DEFAULT '[]'::jsonb NOT NULL,
CONSTRAINT app_settings_container_reg_cleanup_tags_max_list_size_positive CHECK ((container_registry_cleanup_tags_service_max_list_size >= 0)), CONSTRAINT app_settings_container_reg_cleanup_tags_max_list_size_positive CHECK ((container_registry_cleanup_tags_service_max_list_size >= 0)),
CONSTRAINT app_settings_dep_proxy_ttl_policies_worker_capacity_positive CHECK ((dependency_proxy_ttl_group_policy_worker_capacity >= 0)), CONSTRAINT app_settings_dep_proxy_ttl_policies_worker_capacity_positive CHECK ((dependency_proxy_ttl_group_policy_worker_capacity >= 0)),
CONSTRAINT app_settings_ext_pipeline_validation_service_url_text_limit CHECK ((char_length(external_pipeline_validation_service_url) <= 255)), CONSTRAINT app_settings_ext_pipeline_validation_service_url_text_limit CHECK ((char_length(external_pipeline_validation_service_url) <= 255)),
@ -27106,6 +27107,8 @@ CREATE INDEX index_projects_api_vis20_path ON projects USING btree (path, id) WH
CREATE INDEX index_projects_api_vis20_updated_at ON projects USING btree (updated_at, id) WHERE (visibility_level = 20); CREATE INDEX index_projects_api_vis20_updated_at ON projects USING btree (updated_at, id) WHERE (visibility_level = 20);
CREATE INDEX index_projects_not_aimed_for_deletion ON projects USING btree (id) WHERE (marked_for_deletion_at IS NULL);
CREATE INDEX index_projects_on_created_at_and_id ON projects USING btree (created_at, id); CREATE INDEX index_projects_on_created_at_and_id ON projects USING btree (created_at, id);
CREATE INDEX index_projects_on_creator_id_and_created_at_and_id ON projects USING btree (creator_id, created_at, id); CREATE INDEX index_projects_on_creator_id_and_created_at_and_id ON projects USING btree (creator_id, created_at, id);

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.8 KiB

View file

@ -1162,6 +1162,17 @@ review app in the merge request. Make sure the image isn't blurry or overwhelmin
- **Be consistent.** Coordinate screenshots with the other screenshots already on - **Be consistent.** Coordinate screenshots with the other screenshots already on
a documentation page for a consistent reading experience. a documentation page for a consistent reading experience.
### Add callouts
If you need to emphasize an area in a screenshot, use an arrow.
- For color, use `#EE2604`. If you use the Preview application on macOS, this is the default red.
- For the line width, use 3 pt. If you use the Preview application on macOS, this is the third line in the list.
- Use the arrow style shown in the following image.
- If you have multiple arrows, make them parallel when possible.
![callout example](img/callouts.png)
### Save the image ### Save the image
- Resize any wide or tall screenshots if needed, but make sure the screenshot is - Resize any wide or tall screenshots if needed, but make sure the screenshot is

View file

@ -26,17 +26,17 @@ To fork an existing project in GitLab:
1. Select the project to fork to: 1. Select the project to fork to:
- *(Recommended method)* Below **Select a namespace to fork the project**, identify - Recommended method. Below **Select a namespace to fork the project**, identify
the project you want to fork to, and click **Select**. Only namespaces you have the project you want to fork to, and click **Select**. Only namespaces you have
Developer and higher [permissions](../../permissions.md) for are shown. Developer and higher [permissions](../../permissions.md) for are shown.
![Choose namespace](img/forking_workflow_choose_namespace_v13_10.png) ![Choose namespace](img/forking_workflow_choose_namespace_v13_10.png)
- *(Experimental method)* If your GitLab administrator has - Experimental method. If your GitLab administrator has
[enabled the experimental fork project form](#enable-or-disable-the-fork-project-form), read [enabled the experimental fork project form](#enable-or-disable-the-fork-project-form), read
[Create a fork with the fork project form](#create-a-fork-with-the-fork-project-form). [Create a fork with the fork project form](#create-a-fork-with-the-fork-project-form).
Only namespaces you have Developer and higher Only namespaces you have at least the Developer
[permissions](../../permissions.md) for are shown. [role](../../permissions.md) for are shown.
NOTE: NOTE:
The project path must be unique in the namespace. The project path must be unique in the namespace.

View file

@ -23,7 +23,7 @@ module API
expose :issue_type, expose :issue_type,
as: :type, as: :type,
format_with: :upcase, format_with: :upcase,
documentation: { type: "String", desc: "One of #{::WorkItem::Type.base_types.keys.map(&:upcase)}" } documentation: { type: "String", desc: "One of #{::WorkItem::Type.allowed_types_for_issues.map(&:upcase)}" }
expose :assignee, using: ::API::Entities::UserBasic do |issue| expose :assignee, using: ::API::Entities::UserBasic do |issue|
issue.assignees.first issue.assignees.first

View file

@ -82,7 +82,7 @@ module API
desc: 'Return issues sorted in `asc` or `desc` order.' desc: 'Return issues sorted in `asc` or `desc` order.'
optional :due_date, type: String, values: %w[0 overdue week month next_month_and_previous_two_weeks] << '', optional :due_date, type: String, values: %w[0 overdue week month next_month_and_previous_two_weeks] << '',
desc: 'Return issues that have no due date (`0`), or whose due date is this week, this month, between two weeks ago and next month, or which are overdue. Accepts: `overdue`, `week`, `month`, `next_month_and_previous_two_weeks`, `0`' desc: 'Return issues that have no due date (`0`), or whose due date is this week, this month, between two weeks ago and next month, or which are overdue. Accepts: `overdue`, `week`, `month`, `next_month_and_previous_two_weeks`, `0`'
optional :issue_type, type: String, values: WorkItem::Type.base_types.keys, desc: "The type of the issue. Accepts: #{WorkItem::Type.base_types.keys.join(', ')}" optional :issue_type, type: String, values: WorkItem::Type.allowed_types_for_issues, desc: "The type of the issue. Accepts: #{WorkItem::Type.allowed_types_for_issues.join(', ')}"
use :issues_stats_params use :issues_stats_params
use :pagination use :pagination
@ -99,7 +99,7 @@ module API
optional :due_date, type: String, desc: 'Date string in the format YEAR-MONTH-DAY' optional :due_date, type: String, desc: 'Date string in the format YEAR-MONTH-DAY'
optional :confidential, type: Boolean, desc: 'Boolean parameter if the issue should be confidential' optional :confidential, type: Boolean, desc: 'Boolean parameter if the issue should be confidential'
optional :discussion_locked, type: Boolean, desc: " Boolean parameter indicating if the issue's discussion is locked" optional :discussion_locked, type: Boolean, desc: " Boolean parameter indicating if the issue's discussion is locked"
optional :issue_type, type: String, values: WorkItem::Type.base_types.keys, desc: "The type of the issue. Accepts: #{WorkItem::Type.base_types.keys.join(', ')}" optional :issue_type, type: String, values: WorkItem::Type.allowed_types_for_issues, desc: "The type of the issue. Accepts: #{WorkItem::Type.allowed_types_for_issues.join(', ')}"
use :optional_issue_params_ee use :optional_issue_params_ee
end end

View file

@ -12524,6 +12524,9 @@ msgstr ""
msgid "Download CSV" msgid "Download CSV"
msgstr "" msgstr ""
msgid "Download PDF"
msgstr ""
msgid "Download artifacts" msgid "Download artifacts"
msgstr "" msgstr ""
@ -35558,6 +35561,9 @@ msgstr ""
msgid "This GitLab instance is undergoing maintenance and is operating in read-only mode." msgid "This GitLab instance is undergoing maintenance and is operating in read-only mode."
msgstr "" msgstr ""
msgid "This PDF is too large to display. Please download to view."
msgstr ""
msgid "This Project is currently archived and read-only. Please unarchive the project first if you want to resume Pull mirroring" msgid "This Project is currently archived and read-only. Please unarchive the project first if you want to resume Pull mirroring"
msgstr "" msgstr ""

View file

@ -68,7 +68,8 @@ module QA
path: path, path: path,
name: path, name: path,
visibility: 'public', visibility: 'public',
require_two_factor_authentication: @require_two_factor_authentication require_two_factor_authentication: @require_two_factor_authentication,
avatar: avatar
} }
end end

View file

@ -7,7 +7,7 @@ module QA
class GroupBase < Base class GroupBase < Base
include Members include Members
attr_accessor :path attr_accessor :path, :avatar
attributes :id, attributes :id,
:runners_token, :runners_token,

View file

@ -69,7 +69,8 @@ module QA
{ {
path: path, path: path,
name: path, name: path,
visibility: 'public' visibility: 'public',
avatar: avatar
} }
end end

View file

@ -26,6 +26,7 @@ module QA
Resource::Sandbox.fabricate_via_api! do |group| Resource::Sandbox.fabricate_via_api! do |group|
group.api_client = api_client group.api_client = api_client
group.path = "source-group-for-import-#{SecureRandom.hex(4)}" group.path = "source-group-for-import-#{SecureRandom.hex(4)}"
group.avatar = File.new('qa/fixtures/designs/tanuki.jpg', 'r')
end end
end end
@ -37,6 +38,10 @@ module QA
end end
end end
let(:import_failures) do
imported_group.import_details.sum([]) { |details| details[:failures] }
end
before do before do
sandbox.add_member(user, Resource::Members::AccessLevel::MAINTAINER) sandbox.add_member(user, Resource::Members::AccessLevel::MAINTAINER)
end end
@ -73,6 +78,8 @@ module QA
label.group = subgroup label.group = subgroup
label.title = "subgroup-#{SecureRandom.hex(4)}" label.title = "subgroup-#{SecureRandom.hex(4)}"
end end
imported_group # trigger import
end end
it( it(
@ -87,6 +94,8 @@ module QA
expect(imported_subgroup.reload!).to eq(subgroup) expect(imported_subgroup.reload!).to eq(subgroup)
expect(imported_subgroup.labels).to include(*subgroup.labels) expect(imported_subgroup.labels).to include(*subgroup.labels)
expect(import_failures).to be_empty, "Expected no errors, received: #{import_failures}"
end end
end end
end end
@ -108,6 +117,8 @@ module QA
badge.link_url = "http://example.com/badge" badge.link_url = "http://example.com/badge"
badge.image_url = "http://shields.io/badge" badge.image_url = "http://shields.io/badge"
end end
imported_group # trigger import
end end
it( it(
@ -124,6 +135,8 @@ module QA
expect(imported_milestone.updated_at).to eq(source_milestone.updated_at) expect(imported_milestone.updated_at).to eq(source_milestone.updated_at)
expect(imported_group.badges).to eq(source_group.badges) expect(imported_group.badges).to eq(source_group.badges)
expect(import_failures).to be_empty, "Expected no errors, received: #{import_failures}"
end end
end end
end end
@ -139,6 +152,8 @@ module QA
before do before do
member.set_public_email member.set_public_email
source_group.add_member(member, Resource::Members::AccessLevel::DEVELOPER) source_group.add_member(member, Resource::Members::AccessLevel::DEVELOPER)
imported_group # trigger import
end end
after do after do
@ -153,8 +168,11 @@ module QA
imported_member = imported_group.reload!.members.find { |usr| usr.username == member.username } imported_member = imported_group.reload!.members.find { |usr| usr.username == member.username }
aggregate_failures do
expect(imported_member).not_to be_nil expect(imported_member).not_to be_nil
expect(imported_member.access_level).to eq(Resource::Members::AccessLevel::DEVELOPER) expect(imported_member.access_level).to eq(Resource::Members::AccessLevel::DEVELOPER)
expect(import_failures).to be_empty, "Expected no errors, received: #{import_failures}"
end
end end
end end
end end

View file

@ -1197,6 +1197,15 @@ RSpec.describe Projects::IssuesController do
end end
end end
context 'when trying to create a task' do
it 'defaults to issue type' do
issue = post_new_issue(issue_type: 'task')
expect(issue.issue_type).to eq('issue')
expect(issue.work_item_type.base_type).to eq('issue')
end
end
it 'creates the issue successfully', :aggregate_failures do it 'creates the issue successfully', :aggregate_failures do
issue = post_new_issue issue = post_new_issue

View file

@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe GroupsFinder do RSpec.describe GroupsFinder do
include AdminModeHelper include AdminModeHelper
describe '#execute' do shared_examples '#execute' do
let(:user) { create(:user) } let(:user) { create(:user) }
describe 'root level groups' do describe 'root level groups' do
@ -20,6 +20,7 @@ RSpec.describe GroupsFinder do
user_private_group) user_private_group)
:regular | { all_available: false } | %i(user_public_group user_internal_group user_private_group) :regular | { all_available: false } | %i(user_public_group user_internal_group user_private_group)
:regular | {} | %i(public_group internal_group user_public_group user_internal_group user_private_group) :regular | {} | %i(public_group internal_group user_public_group user_internal_group user_private_group)
:regular | { min_access_level: Gitlab::Access::DEVELOPER } | %i(user_public_group user_internal_group user_private_group)
:external | { all_available: true } | %i(public_group user_public_group user_internal_group user_private_group) :external | { all_available: true } | %i(public_group user_public_group user_internal_group user_private_group)
:external | { all_available: false } | %i(user_public_group user_internal_group user_private_group) :external | { all_available: false } | %i(user_public_group user_internal_group user_private_group)
@ -261,4 +262,16 @@ RSpec.describe GroupsFinder do
end end
end end
end end
describe '#execute' do
include_examples '#execute'
context 'when use_traversal_ids_groups_finder feature flags is disabled' do
before do
stub_feature_flags(use_traversal_ids_groups_finder: false)
end
include_examples '#execute'
end
end
end end

View file

@ -1,22 +1,59 @@
import { shallowMount } from '@vue/test-utils'; import { GlButton } from '@gitlab/ui';
import Component from '~/repository/components/blob_viewers/pdf_viewer.vue'; import Component from '~/repository/components/blob_viewers/pdf_viewer.vue';
import PdfViewer from '~/blob/pdf/pdf_viewer.vue'; import PdfViewer from '~/blob/pdf/pdf_viewer.vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
describe('PDF Viewer', () => { describe('PDF Viewer', () => {
let wrapper; let wrapper;
const propsData = { url: 'some/pdf_blob.pdf' }; const defaultPropsData = { url: 'some/pdf_blob.pdf' };
const createComponent = () => { const createComponent = (fileSize = 999) => {
wrapper = shallowMount(Component, { propsData }); wrapper = shallowMountExtended(Component, { propsData: { ...defaultPropsData, fileSize } });
}; };
const findPDFViewer = () => wrapper.findComponent(PdfViewer); const findPDFViewer = () => wrapper.findComponent(PdfViewer);
const findHelpText = () => wrapper.find('p');
const findDownLoadButton = () => wrapper.findComponent(GlButton);
it('renders a PDF Viewer component', () => { it('renders a PDF Viewer component', () => {
createComponent(); createComponent();
expect(findPDFViewer().exists()).toBe(true); expect(findPDFViewer().exists()).toBe(true);
expect(findPDFViewer().props('pdf')).toBe(propsData.url); expect(findPDFViewer().props('pdf')).toBe(defaultPropsData.url);
});
describe('Too large', () => {
beforeEach(() => createComponent(20000000));
it('does not a PDF Viewer component', () => {
expect(findPDFViewer().exists()).toBe(false);
});
it('renders help text', () => {
expect(findHelpText().text()).toBe(
'This PDF is too large to display. Please download to view.',
);
});
it('renders a download button', () => {
expect(findDownLoadButton().text()).toBe('Download PDF');
expect(findDownLoadButton().props('icon')).toBe('download');
});
});
describe('Too many pages', () => {
beforeEach(() => {
createComponent();
findPDFViewer().vm.$emit('pdflabload', 100);
});
it('does not a PDF Viewer component', () => {
expect(findPDFViewer().exists()).toBe(false);
});
it('renders a download button', () => {
expect(findDownLoadButton().exists()).toBe(true);
});
}); });
}); });

View file

@ -5,9 +5,9 @@ require 'spec_helper'
RSpec.describe Types::IssueTypeEnum do RSpec.describe Types::IssueTypeEnum do
specify { expect(described_class.graphql_name).to eq('IssueType') } specify { expect(described_class.graphql_name).to eq('IssueType') }
it 'exposes all the existing issue type values' do it 'exposes all the existing issue type values except for task' do
expect(described_class.values.keys).to include( expect(described_class.values.keys).to match_array(
*%w[ISSUE INCIDENT] %w[ISSUE INCIDENT TEST_CASE REQUIREMENT]
) )
end end
end end

View file

@ -4,18 +4,28 @@ require 'spec_helper'
require_migration! require_migration!
RSpec.describe CreateBaseWorkItemTypes, :migration do RSpec.describe CreateBaseWorkItemTypes, :migration do
let!(:work_item_types) { table(:work_item_types) } include MigrationHelpers::WorkItemTypesHelper
let_it_be(:work_item_types) { table(:work_item_types) }
let(:base_types) do
{
issue: 0,
incident: 1,
test_case: 2,
requirement: 3
}
end
after(:all) do after(:all) do
# Make sure base types are recreated after running the migration # Make sure base types are recreated after running the migration
# because migration specs are not run in a transaction # because migration specs are not run in a transaction
WorkItem::Type.delete_all reset_work_item_types
Gitlab::DatabaseImporters::WorkItems::BaseTypeImporter.import
end end
it 'creates default data' do it 'creates default data' do
# Need to delete all as base types are seeded before entire test suite # Need to delete all as base types are seeded before entire test suite
WorkItem::Type.delete_all work_item_types.delete_all
reversible_migration do |migration| reversible_migration do |migration|
migration.before -> { migration.before -> {
@ -24,8 +34,8 @@ RSpec.describe CreateBaseWorkItemTypes, :migration do
} }
migration.after -> { migration.after -> {
expect(work_item_types.count).to eq 4 expect(work_item_types.count).to eq(4)
expect(work_item_types.all.pluck(:base_type)).to match_array WorkItem::Type.base_types.values expect(work_item_types.all.pluck(:base_type)).to match_array(base_types.values)
} }
end end
end end

View file

@ -4,19 +4,29 @@ require 'spec_helper'
require_migration! require_migration!
RSpec.describe UpsertBaseWorkItemTypes, :migration do RSpec.describe UpsertBaseWorkItemTypes, :migration do
let!(:work_item_types) { table(:work_item_types) } include MigrationHelpers::WorkItemTypesHelper
let_it_be(:work_item_types) { table(:work_item_types) }
let(:base_types) do
{
issue: 0,
incident: 1,
test_case: 2,
requirement: 3
}
end
after(:all) do after(:all) do
# Make sure base types are recreated after running the migration # Make sure base types are recreated after running the migration
# because migration specs are not run in a transaction # because migration specs are not run in a transaction
WorkItem::Type.delete_all reset_work_item_types
Gitlab::DatabaseImporters::WorkItems::BaseTypeImporter.import
end end
context 'when no default types exist' do context 'when no default types exist' do
it 'creates default data' do it 'creates default data' do
# Need to delete all as base types are seeded before entire test suite # Need to delete all as base types are seeded before entire test suite
WorkItem::Type.delete_all work_item_types.delete_all
expect(work_item_types.count).to eq(0) expect(work_item_types.count).to eq(0)
@ -29,7 +39,7 @@ RSpec.describe UpsertBaseWorkItemTypes, :migration do
migration.after -> { migration.after -> {
expect(work_item_types.count).to eq(4) expect(work_item_types.count).to eq(4)
expect(work_item_types.all.pluck(:base_type)).to match_array(WorkItem::Type.base_types.values) expect(work_item_types.all.pluck(:base_type)).to match_array(base_types.values)
} }
end end
end end
@ -37,16 +47,21 @@ RSpec.describe UpsertBaseWorkItemTypes, :migration do
context 'when default types already exist' do context 'when default types already exist' do
it 'does not create default types again' do it 'does not create default types again' do
expect(work_item_types.all.pluck(:base_type)).to match_array(WorkItem::Type.base_types.values) # Database needs to be in a similar state as when this migration was created
work_item_types.delete_all
work_item_types.find_or_create_by!(name: 'Issue', namespace_id: nil, base_type: base_types[:issue], icon_name: 'issue-type-issue')
work_item_types.find_or_create_by!(name: 'Incident', namespace_id: nil, base_type: base_types[:incident], icon_name: 'issue-type-incident')
work_item_types.find_or_create_by!(name: 'Test Case', namespace_id: nil, base_type: base_types[:test_case], icon_name: 'issue-type-test-case')
work_item_types.find_or_create_by!(name: 'Requirement', namespace_id: nil, base_type: base_types[:requirement], icon_name: 'issue-type-requirements')
reversible_migration do |migration| reversible_migration do |migration|
migration.before -> { migration.before -> {
expect(work_item_types.all.pluck(:base_type)).to match_array(WorkItem::Type.base_types.values) expect(work_item_types.all.pluck(:base_type)).to match_array(base_types.values)
} }
migration.after -> { migration.after -> {
expect(work_item_types.count).to eq(4) expect(work_item_types.count).to eq(4)
expect(work_item_types.all.pluck(:base_type)).to match_array(WorkItem::Type.base_types.values) expect(work_item_types.all.pluck(:base_type)).to match_array(base_types.values)
} }
end end
end end

View file

@ -0,0 +1,54 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe AddTaskToWorkItemTypes, :migration do
include MigrationHelpers::WorkItemTypesHelper
let_it_be(:work_item_types) { table(:work_item_types) }
let(:base_types) do
{
issue: 0,
incident: 1,
test_case: 2,
requirement: 3,
task: 4
}
end
after(:all) do
# Make sure base types are recreated after running the migration
# because migration specs are not run in a transaction
reset_work_item_types
end
it 'skips creating the record if it already exists' do
reset_db_state_prior_to_migration
work_item_types.find_or_create_by!(name: 'Task', namespace_id: nil, base_type: base_types[:task], icon_name: 'issue-type-task')
expect do
migrate!
end.to not_change(work_item_types, :count)
end
it 'adds task to base work item types' do
reset_db_state_prior_to_migration
expect do
migrate!
end.to change(work_item_types, :count).from(4).to(5)
expect(work_item_types.all.pluck(:base_type)).to include(base_types[:task])
end
def reset_db_state_prior_to_migration
# Database needs to be in a similar state as when this migration was created
work_item_types.delete_all
work_item_types.find_or_create_by!(name: 'Issue', namespace_id: nil, base_type: base_types[:issue], icon_name: 'issue-type-issue')
work_item_types.find_or_create_by!(name: 'Incident', namespace_id: nil, base_type: base_types[:incident], icon_name: 'issue-type-incident')
work_item_types.find_or_create_by!(name: 'Test Case', namespace_id: nil, base_type: base_types[:test_case], icon_name: 'issue-type-test-case')
work_item_types.find_or_create_by!(name: 'Requirement', namespace_id: nil, base_type: base_types[:requirement], icon_name: 'issue-type-requirements')
end
end

View file

@ -0,0 +1,18 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe AddIndexToProjectsOnMarkedForDeletionAt do
it 'correctly migrates up and down' do
reversible_migration do |migration|
migration.before -> {
expect(ActiveRecord::Base.connection.indexes('projects').map(&:name)).not_to include('index_projects_not_aimed_for_deletion')
}
migration.after -> {
expect(ActiveRecord::Base.connection.indexes('projects').map(&:name)).to include('index_projects_not_aimed_for_deletion')
}
end
end
end

View file

@ -19,10 +19,10 @@ RSpec.describe WorkItem::Type do
it 'deletes type but not unrelated issues' do it 'deletes type but not unrelated issues' do
type = create(:work_item_type) type = create(:work_item_type)
expect(WorkItem::Type.count).to eq(5) expect(WorkItem::Type.count).to eq(6)
expect { type.destroy! }.not_to change(Issue, :count) expect { type.destroy! }.not_to change(Issue, :count)
expect(WorkItem::Type.count).to eq(4) expect(WorkItem::Type.count).to eq(5)
end end
end end

View file

@ -86,7 +86,7 @@ RSpec.describe 'Query.project.pipeline' do
create(:ci_build_need, build: test_job, name: 'my test job') create(:ci_build_need, build: test_job, name: 'my test job')
end end
it 'reports the build needs and execution requirements' do it 'reports the build needs and execution requirements', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/347290' do
post_graphql(query, current_user: user) post_graphql(query, current_user: user)
expect(jobs_graphql_data).to contain_exactly( expect(jobs_graphql_data).to contain_exactly(

View file

@ -0,0 +1,27 @@
# frozen_string_literal: true
module MigrationHelpers
module WorkItemTypesHelper
DEFAULT_WORK_ITEM_TYPES = {
issue: { name: 'Issue', icon_name: 'issue-type-issue', enum_value: 0 },
incident: { name: 'Incident', icon_name: 'issue-type-incident', enum_value: 1 },
test_case: { name: 'Test Case', icon_name: 'issue-type-test-case', enum_value: 2 },
requirement: { name: 'Requirement', icon_name: 'issue-type-requirements', enum_value: 3 },
task: { name: 'Task', icon_name: 'issue-type-task', enum_value: 4 }
}.freeze
def reset_work_item_types
work_item_types_table.delete_all
DEFAULT_WORK_ITEM_TYPES.each do |type, attributes|
work_item_types_table.create!(base_type: attributes[:enum_value], **attributes.slice(:name, :icon_name))
end
end
private
def work_item_types_table
table(:work_item_types)
end
end
end