Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-12-11 00:15:10 +00:00
parent 9fee949311
commit 3d414ac4da
21 changed files with 455 additions and 31 deletions

View File

@ -410,7 +410,7 @@ class ApplicationSetting < ApplicationRecord
if: :external_authorization_service_enabled
validates :spam_check_endpoint_url,
addressable_url: { schemes: %w(grpc) }, allow_blank: true
addressable_url: { schemes: %w(tls grpc) }, allow_blank: true
validates :spam_check_endpoint_url,
presence: true,

View File

@ -6,6 +6,7 @@ module BulkImports
include Gitlab::Utils::StrongMemoize
UPLOADS_RELATION = 'uploads'
SELF_RELATION = 'self'
def initialize(portable)
@portable = portable
@ -28,7 +29,11 @@ module BulkImports
end
def portable_relations
tree_relations + file_relations - skipped_relations
tree_relations + file_relations + self_relation - skipped_relations
end
def self_relation?(relation)
relation == SELF_RELATION
end
def tree_relation?(relation)
@ -45,6 +50,10 @@ module BulkImports
portable_tree[:include].find { |include| include[relation.to_sym] }
end
def portable_relations_tree
@portable_relations_tree ||= attributes_finder.find_relations_tree(portable_class_sym).deep_stringify_keys
end
private
attr_reader :portable
@ -67,10 +76,6 @@ module BulkImports
@portable_class_sym ||= portable_class.to_s.demodulize.underscore.to_sym
end
def portable_relations_tree
@portable_relations_tree ||= attributes_finder.find_relations_tree(portable_class_sym).deep_stringify_keys
end
def import_export_yaml
raise NotImplementedError
end
@ -86,6 +91,10 @@ module BulkImports
def skipped_relations
[]
end
def self_relation
[SELF_RELATION]
end
end
end
end

View File

@ -59,7 +59,7 @@ module BulkImports
end
def export_service
@export_service ||= if config.tree_relation?(relation)
@export_service ||= if config.tree_relation?(relation) || config.self_relation?(relation)
TreeExportService.new(portable, config.export_path, relation)
elsif config.file_relation?(relation)
FileExportService.new(portable, config.export_path, relation)

View File

@ -10,6 +10,8 @@ module BulkImports
end
def execute
return serializer.serialize_root(config.class::SELF_RELATION) if self_relation?
relation_definition = config.tree_relation_definition_for(relation)
raise BulkImports::Error, 'Unsupported relation export type' unless relation_definition
@ -18,6 +20,8 @@ module BulkImports
end
def exported_filename
return "#{relation}.json" if self_relation?
"#{relation}.ndjson"
end
@ -39,5 +43,9 @@ module BulkImports
def json_writer
::Gitlab::ImportExport::Json::NdjsonWriter.new(export_path)
end
def self_relation?
relation == config.class::SELF_RELATION
end
end
end

View File

@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/344855
milestone: '14.5'
type: development
group: group::access
default_enabled: false
default_enabled: true

View File

@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/338930
milestone: '14.3'
type: development
group: group::project management
default_enabled: false
default_enabled: true

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class AddUniquenessForEvidenceOccurrenceId < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
INDEX_NAME = 'finding_evidences_on_vulnerability_occurrence_id'
UNIQUE_INDEX_NAME = 'finding_evidences_on_unique_vulnerability_occurrence_id'
def up
add_concurrent_index :vulnerability_finding_evidences, [:vulnerability_occurrence_id], unique: true, name: UNIQUE_INDEX_NAME
remove_concurrent_index :vulnerability_finding_evidences, [:vulnerability_occurrence_id], name: INDEX_NAME
end
def down
add_concurrent_index :vulnerability_finding_evidences, [:vulnerability_occurrence_id], name: INDEX_NAME
remove_concurrent_index :vulnerability_finding_evidences, [:vulnerability_occurrence_id], name: UNIQUE_INDEX_NAME
end
end

View File

@ -0,0 +1 @@
567a80916756adcca93bdbe82d69a923e539aac74146e714b58a1b023134d2c9

View File

@ -25039,7 +25039,7 @@ CREATE INDEX finding_evidence_sources_on_finding_evidence_id ON vulnerability_fi
CREATE INDEX finding_evidence_supporting_messages_on_finding_evidence_id ON vulnerability_finding_evidence_supporting_messages USING btree (vulnerability_finding_evidence_id);
CREATE INDEX finding_evidences_on_vulnerability_occurrence_id ON vulnerability_finding_evidences USING btree (vulnerability_occurrence_id);
CREATE UNIQUE INDEX finding_evidences_on_unique_vulnerability_occurrence_id ON vulnerability_finding_evidences USING btree (vulnerability_occurrence_id);
CREATE INDEX finding_links_on_vulnerability_occurrence_id ON vulnerability_finding_links USING btree (vulnerability_occurrence_id);

View File

@ -398,6 +398,16 @@ four standard [pagination arguments](#connection-pagination-arguments):
| <a id="querysnippetstype"></a>`type` | [`TypeEnum`](#typeenum) | Type of snippet. |
| <a id="querysnippetsvisibility"></a>`visibility` | [`VisibilityScopesEnum`](#visibilityscopesenum) | Visibility of the snippet. |
### `Query.subscriptionFutureEntries`
Fields related to entries in future subscriptions.
Returns [`SubscriptionFutureEntryConnection`](#subscriptionfutureentryconnection).
This field returns a [connection](#connections). It accepts the
four standard [pagination arguments](#connection-pagination-arguments):
`before: String`, `after: String`, `first: Int`, `last: Int`.
### `Query.timelogs`
Find timelogs visible to the current user.
@ -7566,6 +7576,29 @@ The edge type for [`Submodule`](#submodule).
| <a id="submoduleedgecursor"></a>`cursor` | [`String!`](#string) | A cursor for use in pagination. |
| <a id="submoduleedgenode"></a>`node` | [`Submodule`](#submodule) | The item at the end of the edge. |
#### `SubscriptionFutureEntryConnection`
The connection type for [`SubscriptionFutureEntry`](#subscriptionfutureentry).
##### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="subscriptionfutureentryconnectionedges"></a>`edges` | [`[SubscriptionFutureEntryEdge]`](#subscriptionfutureentryedge) | A list of edges. |
| <a id="subscriptionfutureentryconnectionnodes"></a>`nodes` | [`[SubscriptionFutureEntry]`](#subscriptionfutureentry) | A list of nodes. |
| <a id="subscriptionfutureentryconnectionpageinfo"></a>`pageInfo` | [`PageInfo!`](#pageinfo) | Information to aid in pagination. |
#### `SubscriptionFutureEntryEdge`
The edge type for [`SubscriptionFutureEntry`](#subscriptionfutureentry).
##### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="subscriptionfutureentryedgecursor"></a>`cursor` | [`String!`](#string) | A cursor for use in pagination. |
| <a id="subscriptionfutureentryedgenode"></a>`node` | [`SubscriptionFutureEntry`](#subscriptionfutureentry) | The item at the end of the edge. |
#### `TerraformStateConnection`
The connection type for [`TerraformState`](#terraformstate).
@ -14739,6 +14772,23 @@ Represents the Geo sync and verification state of a snippet repository.
| <a id="submoduletype"></a>`type` | [`EntryType!`](#entrytype) | Type of tree entry. |
| <a id="submoduleweburl"></a>`webUrl` | [`String`](#string) | Web URL for the sub-module. |
### `SubscriptionFutureEntry`
Represents an entry from the future subscriptions.
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="subscriptionfutureentrycompany"></a>`company` | [`String`](#string) | Company of the licensee. |
| <a id="subscriptionfutureentryemail"></a>`email` | [`String`](#string) | Email of the licensee. |
| <a id="subscriptionfutureentryexpiresat"></a>`expiresAt` | [`Date`](#date) | Date when the license expires. |
| <a id="subscriptionfutureentryname"></a>`name` | [`String`](#string) | Name of the licensee. |
| <a id="subscriptionfutureentryplan"></a>`plan` | [`String!`](#string) | Name of the subscription plan. |
| <a id="subscriptionfutureentrystartsat"></a>`startsAt` | [`Date`](#date) | Date when the license started. |
| <a id="subscriptionfutureentrytype"></a>`type` | [`String!`](#string) | Type of license the subscription will yield. |
| <a id="subscriptionfutureentryusersinlicensecount"></a>`usersInLicenseCount` | [`Int`](#int) | Number of paid user seats. |
### `TaskCompletionStatus`
Completion status of tasks.

View File

@ -405,9 +405,9 @@ listed in the descriptions of the relevant settings.
| `sourcegraph_enabled` | boolean | no | Enables Sourcegraph integration. Default is `false`. **If enabled, requires** `sourcegraph_url`. |
| `sourcegraph_public_only` | boolean | no | Blocks Sourcegraph from being loaded on private and internal projects. Default is `true`. |
| `sourcegraph_url` | string | required by: `sourcegraph_enabled` | The Sourcegraph instance URL for integration. |
| `spam_check_endpoint_enabled` | boolean | no | Enables Spam Check via external API endpoint. Default is `false`. |
| `spam_check_endpoint_url` | string | no | URL of the external Spam Check service endpoint. |
| `spam_check_api_key` | string | no | The API key used by GitLab for accessing the Spam Check service endpoint. |
| `spam_check_endpoint_enabled` | boolean | no | Enables spam checking using external Spam Check API endpoint. Default is `false`. |
| `spam_check_endpoint_url` | string | no | URL of the external Spamcheck service endpoint. Valid URI schemes are `grpc` or `tls`. Specifying `tls` forces communication to be encrypted.|
| `spam_check_api_key` | string | no | API key used by GitLab for accessing the Spam Check service endpoint. |
| `suggest_pipeline_enabled` | boolean | no | Enable pipeline suggestion banner. |
| `terminal_max_session_time` | integer | no | Maximum time for web terminal websocket connection (in seconds). Set to `0` for unlimited time. |
| `terms` | text | required by: `enforce_terms` | (**Required by:** `enforce_terms`) Markdown content for the ToS. |

View File

@ -204,7 +204,7 @@ on the GitLab server. The next time you pull the same image, GitLab gets the lat
information about the image from Docker Hub, but serves the existing blobs
from the GitLab server.
## Clear the Dependency Proxy cache
## Reduce storage usage
Blobs are kept forever on the GitLab server, and there is no hard limit on how much data can be
stored.
@ -219,6 +219,16 @@ If you clear the cache, the next time a pipeline runs it must pull an image or t
### Cleanup policies
#### Enable cleanup policies from within GitLab
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/340777) in GitLab 14.6
You can enable an automatic time-to-live (TTL) policy for the Dependency Proxy from the user
interface. To do this, navigate to your group's **Settings > Packages & Registries > Dependency Proxy**
and enable the setting to automatically clear items from the cache after 90 days.
#### Enable cleanup policies with GraphQL
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/294187) in GitLab 14.4.
The cleanup policy is a scheduled job you can use to clear cached images that are no longer used,
@ -249,8 +259,7 @@ mutation {
```
See the [Getting started with GraphQL](../../../api/graphql/getting_started.md)
guide to learn how to make GraphQL queries. Support for enabling and configuring cleanup policies in
the UI is tracked in [this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/340777).
guide to learn how to make GraphQL queries.
When the policy is initially enabled, the default TTL setting is 90 days. Once enabled, stale
dependency proxy files are queued for deletion each day. Deletion may not occur right away due to

View File

@ -0,0 +1,85 @@
# frozen_string_literal: true
module BulkImports
module Projects
module Pipelines
class ProjectAttributesPipeline
include Pipeline
transformer ::BulkImports::Common::Transformers::ProhibitedAttributesTransformer
def extract(context)
download_service(tmp_dir, context).execute
decompression_service(tmp_dir).execute
project_attributes = json_decode(json_attributes)
BulkImports::Pipeline::ExtractedData.new(data: project_attributes)
end
def transform(_, data)
subrelations = config.portable_relations_tree.keys.map(&:to_s)
Gitlab::ImportExport::AttributeCleaner.clean(
relation_hash: data,
relation_class: Project,
excluded_keys: config.relation_excluded_keys(:project)
).except(*subrelations)
end
def load(_, data)
portable.assign_attributes(data)
portable.reconcile_shared_runners_setting!
portable.drop_visibility_level!
portable.save!
end
def after_run(_)
FileUtils.remove_entry(tmp_dir)
end
def json_attributes
@json_attributes ||= File.read(File.join(tmp_dir, filename))
end
private
def tmp_dir
@tmp_dir ||= Dir.mktmpdir
end
def config
@config ||= BulkImports::FileTransfer.config_for(portable)
end
def download_service(tmp_dir, context)
@download_service ||= BulkImports::FileDownloadService.new(
configuration: context.configuration,
relative_url: context.entity.relation_download_url_path(BulkImports::FileTransfer::BaseConfig::SELF_RELATION),
dir: tmp_dir,
filename: compressed_filename
)
end
def decompression_service(tmp_dir)
@decompression_service ||= BulkImports::FileDecompressionService.new(dir: tmp_dir, filename: compressed_filename)
end
def compressed_filename
"#{filename}.gz"
end
def filename
"#{BulkImports::FileTransfer::BaseConfig::SELF_RELATION}.json"
end
def json_decode(string)
Gitlab::Json.parse(string)
rescue JSON::ParserError => e
Gitlab::ErrorTracking.log_exception(e)
raise BulkImports::Error, 'Incorrect JSON format'
end
end
end
end
end

View File

@ -15,6 +15,10 @@ module BulkImports
pipeline: BulkImports::Projects::Pipelines::RepositoryPipeline,
stage: 1
},
project_attributes: {
pipeline: BulkImports::Projects::Pipelines::ProjectAttributesPipeline,
stage: 1
},
labels: {
pipeline: BulkImports::Common::Pipelines::LabelsPipeline,
stage: 2

View File

@ -40,6 +40,13 @@ module Gitlab
end
end
def serialize_root(exportable_path = @exportable_path)
attributes = exportable.as_json(
relations_schema.merge(include: nil, preloads: nil))
json_writer.write_attributes(exportable_path, attributes)
end
def serialize_relation(definition)
raise ArgumentError, 'definition needs to be Hash' unless definition.is_a?(Hash)
raise ArgumentError, 'definition needs to have exactly one Hash element' unless definition.one?
@ -60,12 +67,6 @@ module Gitlab
attr_reader :json_writer, :relations_schema, :exportable
def serialize_root
attributes = exportable.as_json(
relations_schema.merge(include: nil, preloads: nil))
json_writer.write_attributes(@exportable_path, attributes)
end
def serialize_many_relations(key, records, options)
enumerator = Enumerator.new do |items|
key_preloads = preloads&.dig(key)

View File

@ -21,14 +21,16 @@ module Gitlab
update: ::Spamcheck::Action::UPDATE
}.freeze
URL_SCHEME_REGEX = %r{^grpc://|^tls://}.freeze
def initialize
@endpoint_url = Gitlab::CurrentSettings.current_application_settings.spam_check_endpoint_url
# remove the `grpc://` as it's only useful to ensure we're expecting to
# connect with Spamcheck
@endpoint_url = @endpoint_url.gsub(%r(^grpc:\/\/), '')
@creds = client_creds(@endpoint_url)
@creds = stub_creds
# remove the `grpc://` or 'tls://' as it's only useful to ensure we're expecting to
# connect with Spamcheck
@endpoint_url = @endpoint_url.sub(URL_SCHEME_REGEX, '')
end
def issue_spam?(spam_issue:, user:, context: {})
@ -96,11 +98,11 @@ module Gitlab
nanos: ar_timestamp.to_time.nsec)
end
def stub_creds
if Rails.env.development? || Rails.env.test?
:this_channel_is_insecure
def client_creds(url)
if URI(url).scheme == 'tls' || Rails.env.production?
GRPC::Core::ChannelCredentials.new(::Gitlab::X509::Certificate.ca_certs_bundle)
else
GRPC::Core::ChannelCredentials.new ::Gitlab::X509::Certificate.ca_certs_bundle
:this_channel_is_insecure
end
end

View File

@ -0,0 +1,159 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Projects::Pipelines::ProjectAttributesPipeline do
let_it_be(:project) { create(:project) }
let_it_be(:bulk_import) { create(:bulk_import) }
let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project, bulk_import: bulk_import) }
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:tmpdir) { Dir.mktmpdir }
let(:extra) { {} }
let(:project_attributes) do
{
'description' => 'description',
'visibility_level' => 0,
'archived' => false,
'merge_requests_template' => 'test',
'merge_requests_rebase_enabled' => true,
'approvals_before_merge' => 0,
'reset_approvals_on_push' => true,
'merge_requests_ff_only_enabled' => true,
'issues_template' => 'test',
'shared_runners_enabled' => true,
'build_coverage_regex' => 'build_coverage_regex',
'build_allow_git_fetch' => true,
'build_timeout' => 3600,
'pending_delete' => false,
'public_builds' => true,
'last_repository_check_failed' => nil,
'only_allow_merge_if_pipeline_succeeds' => true,
'has_external_issue_tracker' => false,
'request_access_enabled' => true,
'has_external_wiki' => false,
'ci_config_path' => nil,
'only_allow_merge_if_all_discussions_are_resolved' => true,
'printing_merge_request_link_enabled' => true,
'auto_cancel_pending_pipelines' => 'enabled',
'service_desk_enabled' => false,
'delete_error' => nil,
'disable_overriding_approvers_per_merge_request' => true,
'resolve_outdated_diff_discussions' => true,
'jobs_cache_index' => nil,
'external_authorization_classification_label' => nil,
'pages_https_only' => false,
'merge_requests_author_approval' => false,
'merge_requests_disable_committers_approval' => true,
'require_password_to_approve' => true,
'remove_source_branch_after_merge' => true,
'autoclose_referenced_issues' => true,
'suggestion_commit_message' => 'Test!'
}.merge(extra)
end
subject(:pipeline) { described_class.new(context) }
before do
allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
end
after do
FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
end
describe '#run' do
before do
allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: project_attributes))
pipeline.run
end
it 'imports project attributes', :aggregate_failures do
project_attributes.each_pair do |key, value|
expect(project.public_send(key)).to eq(value)
end
end
context 'when project is archived' do
let(:extra) { { 'archived' => true } }
it 'sets project as archived' do
expect(project.archived).to eq(true)
end
end
end
describe '#extract' do
before do
file_download_service = instance_double("BulkImports::FileDownloadService")
file_decompression_service = instance_double("BulkImports::FileDecompressionService")
expect(BulkImports::FileDownloadService)
.to receive(:new)
.with(
configuration: context.configuration,
relative_url: "/#{entity.pluralized_name}/#{entity.source_full_path}/export_relations/download?relation=self",
dir: tmpdir,
filename: 'self.json.gz')
.and_return(file_download_service)
expect(BulkImports::FileDecompressionService)
.to receive(:new)
.with(dir: tmpdir, filename: 'self.json.gz')
.and_return(file_decompression_service)
expect(file_download_service).to receive(:execute)
expect(file_decompression_service).to receive(:execute)
end
it 'downloads, decompresses & decodes json' do
allow(pipeline).to receive(:json_attributes).and_return("{\"test\":\"test\"}")
extracted_data = pipeline.extract(context)
expect(extracted_data.data).to match_array([{ 'test' => 'test' }])
end
context 'when json parsing error occurs' do
it 'raises an error' do
allow(pipeline).to receive(:json_attributes).and_return("invalid")
expect { pipeline.extract(context) }.to raise_error(BulkImports::Error)
end
end
end
describe '#transform' do
it 'removes prohibited attributes from hash' do
input = { 'description' => 'description', 'issues' => [], 'milestones' => [], 'id' => 5 }
expect(Gitlab::ImportExport::AttributeCleaner).to receive(:clean).and_call_original
expect(pipeline.transform(context, input)).to eq({ 'description' => 'description' })
end
end
describe '#load' do
it 'assigns attributes, drops visibility and reconciles shared runner setting' do
expect(project).to receive(:assign_attributes).with(project_attributes)
expect(project).to receive(:reconcile_shared_runners_setting!)
expect(project).to receive(:drop_visibility_level!)
expect(project).to receive(:save!)
pipeline.load(context, project_attributes)
end
end
describe '#json_attributes' do
it 'reads raw json from file' do
filepath = File.join(tmpdir, 'self.json')
FileUtils.touch(filepath)
expect_file_read(filepath)
pipeline.json_attributes
end
end
end

View File

@ -9,6 +9,7 @@ RSpec.describe BulkImports::Projects::Stage do
[
[0, BulkImports::Projects::Pipelines::ProjectPipeline],
[1, BulkImports::Projects::Pipelines::RepositoryPipeline],
[1, BulkImports::Projects::Pipelines::ProjectAttributesPipeline],
[2, BulkImports::Common::Pipelines::LabelsPipeline],
[2, BulkImports::Common::Pipelines::MilestonesPipeline],
[2, BulkImports::Common::Pipelines::BadgesPipeline],

View File

@ -32,6 +32,60 @@ RSpec.describe Gitlab::Spamcheck::Client do
stub_application_setting(spam_check_endpoint_url: endpoint)
end
describe 'url scheme' do
let(:stub) { double(:spamcheck_stub, check_for_spam_issue: response) }
context 'is tls ' do
let(:endpoint) { 'tls://spamcheck.example.com'}
it 'uses secure connection' do
expect(Spamcheck::SpamcheckService::Stub).to receive(:new).with(endpoint.sub(%r{^tls://}, ''),
instance_of(GRPC::Core::ChannelCredentials),
anything).and_return(stub)
subject
end
end
context 'is grpc' do
it 'uses insecure connection' do
expect(Spamcheck::SpamcheckService::Stub).to receive(:new).with(endpoint.sub(%r{^grpc://}, ''),
:this_channel_is_insecure,
anything).and_return(stub)
subject
end
end
end
describe "Rails environment" do
let(:stub) { double(:spamcheck_stub, check_for_spam_issue: response) }
context "production" do
before do
allow(Rails.env).to receive(:production?).and_return(true)
end
it 'uses secure connection' do
expect(Spamcheck::SpamcheckService::Stub).to receive(:new).with(endpoint.sub(%r{^grpc://}, ''),
instance_of(GRPC::Core::ChannelCredentials),
anything).and_return(stub)
subject
end
end
context "not production" do
before do
allow(Rails.env).to receive(:production?).and_return(false)
end
it 'uses insecure connection' do
expect(Spamcheck::SpamcheckService::Stub).to receive(:new).with(endpoint.sub(%r{^grpc://}, ''),
:this_channel_is_insecure,
anything).and_return(stub)
subject
end
end
end
describe '#issue_spam?' do
before do
allow_next_instance_of(::Spamcheck::SpamcheckService::Stub) do |instance|

View File

@ -247,6 +247,7 @@ RSpec.describe ApplicationSetting do
end
it { is_expected.to allow_value('grpc://example.org/spam_check').for(:spam_check_endpoint_url) }
it { is_expected.to allow_value('tls://example.org/spam_check').for(:spam_check_endpoint_url) }
it { is_expected.not_to allow_value('https://example.org/spam_check').for(:spam_check_endpoint_url) }
it { is_expected.not_to allow_value('nonsense').for(:spam_check_endpoint_url) }
it { is_expected.not_to allow_value(nil).for(:spam_check_endpoint_url) }
@ -259,6 +260,7 @@ RSpec.describe ApplicationSetting do
end
it { is_expected.to allow_value('grpc://example.org/spam_check').for(:spam_check_endpoint_url) }
it { is_expected.to allow_value('tls://example.org/spam_check').for(:spam_check_endpoint_url) }
it { is_expected.not_to allow_value('https://example.org/spam_check').for(:spam_check_endpoint_url) }
it { is_expected.not_to allow_value('nonsense').for(:spam_check_endpoint_url) }
it { is_expected.to allow_value(nil).for(:spam_check_endpoint_url) }

View File

@ -5,7 +5,8 @@ require 'spec_helper'
RSpec.describe BulkImports::TreeExportService do
let_it_be(:project) { create(:project) }
let_it_be(:export_path) { Dir.mktmpdir }
let_it_be(:relation) { 'issues' }
let(:relation) { 'issues' }
subject(:service) { described_class.new(project, export_path, relation) }
@ -25,11 +26,31 @@ RSpec.describe BulkImports::TreeExportService do
expect { service.execute }.to raise_error(BulkImports::Error, 'Unsupported relation export type')
end
end
context 'when relation is self' do
let(:relation) { 'self' }
it 'executes export on portable itself' do
expect_next_instance_of(Gitlab::ImportExport::Json::StreamingSerializer) do |serializer|
expect(serializer).to receive(:serialize_root)
end
subject.execute
end
end
end
describe '#exported_filename' do
it 'returns filename of the exported file' do
expect(subject.exported_filename).to eq('issues.ndjson')
end
context 'when relation is self' do
let(:relation) { 'self' }
it 'returns filename of the exported file' do
expect(subject.exported_filename).to eq('self.json')
end
end
end
end