589b2db06c
This sets up all the basics for importing Phabricator tasks into GitLab issues. To import all tasks from a Phabricator instance into GitLab, we'll import all of them into a new project that will have its repository disabled. The import is hooked into a regular ProjectImport setup, but similar to the GitHub parallel importer takes care of all the imports itself. In this iteration, we're importing each page of tasks in a separate sidekiq job. The first thing we do when requesting a new page of tasks is schedule the next page to be imported. But to avoid deadlocks, we only allow a single job per worker type to run at the same time. For now we're only importing basic Issue information, this should be extended to richer information.
127 lines
3.7 KiB
Ruby
127 lines
3.7 KiB
Ruby
require 'spec_helper'
|
|
|
|
describe Gitlab::ImportSources do
|
|
describe '.options' do
|
|
it 'returns a hash' do
|
|
expected =
|
|
{
|
|
'GitHub' => 'github',
|
|
'Bitbucket Cloud' => 'bitbucket',
|
|
'Bitbucket Server' => 'bitbucket_server',
|
|
'GitLab.com' => 'gitlab',
|
|
'Google Code' => 'google_code',
|
|
'FogBugz' => 'fogbugz',
|
|
'Repo by URL' => 'git',
|
|
'GitLab export' => 'gitlab_project',
|
|
'Gitea' => 'gitea',
|
|
'Manifest file' => 'manifest',
|
|
'Phabricator' => 'phabricator'
|
|
}
|
|
|
|
expect(described_class.options).to eq(expected)
|
|
end
|
|
end
|
|
|
|
describe '.values' do
|
|
it 'returns an array' do
|
|
expected =
|
|
%w(
|
|
github
|
|
bitbucket
|
|
bitbucket_server
|
|
gitlab
|
|
google_code
|
|
fogbugz
|
|
git
|
|
gitlab_project
|
|
gitea
|
|
manifest
|
|
phabricator
|
|
)
|
|
|
|
expect(described_class.values).to eq(expected)
|
|
end
|
|
end
|
|
|
|
describe '.importer_names' do
|
|
it 'returns an array of importer names' do
|
|
expected =
|
|
%w(
|
|
github
|
|
bitbucket
|
|
bitbucket_server
|
|
gitlab
|
|
google_code
|
|
fogbugz
|
|
gitlab_project
|
|
gitea
|
|
phabricator
|
|
)
|
|
|
|
expect(described_class.importer_names).to eq(expected)
|
|
end
|
|
end
|
|
|
|
describe '.importer' do
|
|
import_sources = {
|
|
'github' => Gitlab::GithubImport::ParallelImporter,
|
|
'bitbucket' => Gitlab::BitbucketImport::Importer,
|
|
'bitbucket_server' => Gitlab::BitbucketServerImport::Importer,
|
|
'gitlab' => Gitlab::GitlabImport::Importer,
|
|
'google_code' => Gitlab::GoogleCodeImport::Importer,
|
|
'fogbugz' => Gitlab::FogbugzImport::Importer,
|
|
'git' => nil,
|
|
'gitlab_project' => Gitlab::ImportExport::Importer,
|
|
'gitea' => Gitlab::LegacyGithubImport::Importer,
|
|
'manifest' => nil,
|
|
'phabricator' => Gitlab::PhabricatorImport::Importer
|
|
}
|
|
|
|
import_sources.each do |name, klass|
|
|
it "returns #{klass} when given #{name}" do
|
|
expect(described_class.importer(name)).to eq(klass)
|
|
end
|
|
end
|
|
end
|
|
|
|
describe '.title' do
|
|
import_sources = {
|
|
'github' => 'GitHub',
|
|
'bitbucket' => 'Bitbucket Cloud',
|
|
'bitbucket_server' => 'Bitbucket Server',
|
|
'gitlab' => 'GitLab.com',
|
|
'google_code' => 'Google Code',
|
|
'fogbugz' => 'FogBugz',
|
|
'git' => 'Repo by URL',
|
|
'gitlab_project' => 'GitLab export',
|
|
'gitea' => 'Gitea',
|
|
'manifest' => 'Manifest file',
|
|
'phabricator' => 'Phabricator'
|
|
}
|
|
|
|
import_sources.each do |name, title|
|
|
it "returns #{title} when given #{name}" do
|
|
expect(described_class.title(name)).to eq(title)
|
|
end
|
|
end
|
|
end
|
|
|
|
describe 'imports_repository? checker' do
|
|
let(:allowed_importers) { %w[github gitlab_project bitbucket_server phabricator] }
|
|
|
|
it 'fails if any importer other than the allowed ones implements this method' do
|
|
current_importers = described_class.values.select { |kind| described_class.importer(kind).try(:imports_repository?) }
|
|
not_allowed_importers = current_importers - allowed_importers
|
|
|
|
expect(not_allowed_importers).to be_empty, failure_message(not_allowed_importers)
|
|
end
|
|
|
|
def failure_message(importers_class_names)
|
|
<<-MSG
|
|
It looks like the #{importers_class_names.join(', ')} importers implements its own way to import the repository.
|
|
That means that the lfs object download must be handled for each of them. You can use 'LfsImportService' and
|
|
'LfsDownloadService' to implement it. After that, add the importer name to the list of allowed importers in this spec.
|
|
MSG
|
|
end
|
|
end
|
|
end
|