Merge branch 'ac/lfs-direct-upload-ee-to-ce' into 'master'
LFS direct upload port to CE Closes #44548 See merge request gitlab-org/gitlab-ce!17752
This commit is contained in:
commit
949d1b37e7
16 changed files with 637 additions and 130 deletions
4
Gemfile
4
Gemfile
|
@ -118,9 +118,9 @@ gem 'carrierwave', '~> 1.2'
|
|||
gem 'dropzonejs-rails', '~> 0.7.1'
|
||||
|
||||
# for backups
|
||||
gem 'fog-aws', '~> 2.0'
|
||||
gem 'fog-aws', '~> 2.0.1'
|
||||
gem 'fog-core', '~> 1.44'
|
||||
gem 'fog-google', '~> 0.5'
|
||||
gem 'fog-google', '~> 1.3.3'
|
||||
gem 'fog-local', '~> 0.3'
|
||||
gem 'fog-openstack', '~> 0.1'
|
||||
gem 'fog-rackspace', '~> 0.1.1'
|
||||
|
|
|
@ -244,10 +244,11 @@ GEM
|
|||
builder
|
||||
excon (~> 0.58)
|
||||
formatador (~> 0.2)
|
||||
fog-google (0.5.3)
|
||||
fog-google (1.3.3)
|
||||
fog-core
|
||||
fog-json
|
||||
fog-xml
|
||||
google-api-client (~> 0.19.1)
|
||||
fog-json (1.0.2)
|
||||
fog-core (~> 1.0)
|
||||
multi_json (~> 1.10)
|
||||
|
@ -1047,9 +1048,9 @@ DEPENDENCIES
|
|||
flipper-active_record (~> 0.13.0)
|
||||
flipper-active_support_cache_store (~> 0.13.0)
|
||||
fog-aliyun (~> 0.2.0)
|
||||
fog-aws (~> 2.0)
|
||||
fog-aws (~> 2.0.1)
|
||||
fog-core (~> 1.44)
|
||||
fog-google (~> 0.5)
|
||||
fog-google (~> 1.3.3)
|
||||
fog-local (~> 0.3)
|
||||
fog-openstack (~> 0.1)
|
||||
fog-rackspace (~> 0.1.1)
|
||||
|
|
|
@ -17,20 +17,23 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
|
|||
|
||||
def upload_authorize
|
||||
set_workhorse_internal_api_content_type
|
||||
render json: Gitlab::Workhorse.lfs_upload_ok(oid, size)
|
||||
|
||||
authorized = LfsObjectUploader.workhorse_authorize
|
||||
authorized.merge!(LfsOid: oid, LfsSize: size)
|
||||
|
||||
render json: authorized
|
||||
end
|
||||
|
||||
def upload_finalize
|
||||
unless tmp_filename
|
||||
render_lfs_forbidden
|
||||
return
|
||||
end
|
||||
|
||||
if store_file(oid, size, tmp_filename)
|
||||
if store_file!(oid, size)
|
||||
head 200
|
||||
else
|
||||
render plain: 'Unprocessable entity', status: 422
|
||||
end
|
||||
rescue ActiveRecord::RecordInvalid
|
||||
render_400
|
||||
rescue ObjectStorage::RemoteStoreError
|
||||
render_lfs_forbidden
|
||||
end
|
||||
|
||||
private
|
||||
|
@ -51,35 +54,28 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
|
|||
params[:size].to_i
|
||||
end
|
||||
|
||||
def tmp_filename
|
||||
name = request.headers['X-Gitlab-Lfs-Tmp']
|
||||
return if name.include?('/')
|
||||
return unless oid.present? && name.start_with?(oid)
|
||||
def store_file!(oid, size)
|
||||
object = LfsObject.find_by(oid: oid, size: size)
|
||||
unless object&.file&.exists?
|
||||
object = create_file!(oid, size)
|
||||
end
|
||||
|
||||
name
|
||||
return unless object
|
||||
|
||||
link_to_project!(object)
|
||||
end
|
||||
|
||||
def store_file(oid, size, tmp_file)
|
||||
# Define tmp_file_path early because we use it in "ensure"
|
||||
tmp_file_path = File.join(LfsObjectUploader.workhorse_upload_path, tmp_file)
|
||||
|
||||
object = LfsObject.find_or_create_by(oid: oid, size: size)
|
||||
file_exists = object.file.exists? || move_tmp_file_to_storage(object, tmp_file_path)
|
||||
file_exists && link_to_project(object)
|
||||
ensure
|
||||
FileUtils.rm_f(tmp_file_path)
|
||||
def create_file!(oid, size)
|
||||
LfsObject.new(oid: oid, size: size).tap do |object|
|
||||
object.file.store_workhorse_file!(params, :file)
|
||||
object.save!
|
||||
end
|
||||
end
|
||||
|
||||
def move_tmp_file_to_storage(object, path)
|
||||
object.file = File.open(path)
|
||||
object.file.store!
|
||||
object.save
|
||||
end
|
||||
|
||||
def link_to_project(object)
|
||||
def link_to_project!(object)
|
||||
if object && !object.projects.exists?(storage_project.id)
|
||||
object.projects << storage_project
|
||||
object.save
|
||||
object.save!
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -11,6 +11,12 @@ class LfsObject < ActiveRecord::Base
|
|||
|
||||
mount_uploader :file, LfsObjectUploader
|
||||
|
||||
before_save :update_file_store
|
||||
|
||||
def update_file_store
|
||||
self.file_store = file.object_store
|
||||
end
|
||||
|
||||
def project_allowed_access?(project)
|
||||
projects.exists?(project.lfs_storage_project.id)
|
||||
end
|
||||
|
|
|
@ -10,6 +10,9 @@ module ObjectStorage
|
|||
UnknownStoreError = Class.new(StandardError)
|
||||
ObjectStorageUnavailable = Class.new(StandardError)
|
||||
|
||||
DIRECT_UPLOAD_TIMEOUT = 4.hours
|
||||
TMP_UPLOAD_PATH = 'tmp/upload'.freeze
|
||||
|
||||
module Store
|
||||
LOCAL = 1
|
||||
REMOTE = 2
|
||||
|
@ -124,6 +127,10 @@ module ObjectStorage
|
|||
object_store_options.enabled
|
||||
end
|
||||
|
||||
def direct_upload_enabled?
|
||||
object_store_options.direct_upload
|
||||
end
|
||||
|
||||
def background_upload_enabled?
|
||||
object_store_options.background_upload
|
||||
end
|
||||
|
@ -147,6 +154,45 @@ module ObjectStorage
|
|||
def serialization_column(model_class, mount_point)
|
||||
model_class.uploader_options.dig(mount_point, :mount_on) || mount_point
|
||||
end
|
||||
|
||||
def workhorse_authorize
|
||||
if options = workhorse_remote_upload_options
|
||||
{ RemoteObject: options }
|
||||
else
|
||||
{ TempPath: workhorse_local_upload_path }
|
||||
end
|
||||
end
|
||||
|
||||
def workhorse_local_upload_path
|
||||
File.join(self.root, TMP_UPLOAD_PATH)
|
||||
end
|
||||
|
||||
def workhorse_remote_upload_options
|
||||
return unless self.object_store_enabled?
|
||||
return unless self.direct_upload_enabled?
|
||||
|
||||
id = [CarrierWave.generate_cache_id, SecureRandom.hex].join('-')
|
||||
upload_path = File.join(TMP_UPLOAD_PATH, id)
|
||||
connection = ::Fog::Storage.new(self.object_store_credentials)
|
||||
expire_at = Time.now + DIRECT_UPLOAD_TIMEOUT
|
||||
options = { 'Content-Type' => 'application/octet-stream' }
|
||||
|
||||
{
|
||||
ID: id,
|
||||
GetURL: connection.get_object_url(remote_store_path, upload_path, expire_at),
|
||||
DeleteURL: connection.delete_object_url(remote_store_path, upload_path, expire_at),
|
||||
StoreURL: connection.put_object_url(remote_store_path, upload_path, expire_at, options)
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
# allow to configure and overwrite the filename
|
||||
def filename
|
||||
@filename || super || file&.filename # rubocop:disable Gitlab/ModuleWithInstanceVariables
|
||||
end
|
||||
|
||||
def filename=(filename)
|
||||
@filename = filename # rubocop:disable Gitlab/ModuleWithInstanceVariables
|
||||
end
|
||||
|
||||
def file_storage?
|
||||
|
@ -195,10 +241,6 @@ module ObjectStorage
|
|||
end
|
||||
end
|
||||
|
||||
def filename
|
||||
super || file&.filename
|
||||
end
|
||||
|
||||
#
|
||||
# Move the file to another store
|
||||
#
|
||||
|
@ -253,6 +295,18 @@ module ObjectStorage
|
|||
}
|
||||
end
|
||||
|
||||
def store_workhorse_file!(params, identifier)
|
||||
filename = params["#{identifier}.name"]
|
||||
|
||||
if remote_object_id = params["#{identifier}.remote_id"]
|
||||
store_remote_file!(remote_object_id, filename)
|
||||
elsif local_path = params["#{identifier}.path"]
|
||||
store_local_file!(local_path, filename)
|
||||
else
|
||||
raise RemoteStoreError, 'Bad file'
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def schedule_background_upload?
|
||||
|
@ -261,6 +315,38 @@ module ObjectStorage
|
|||
self.file_storage?
|
||||
end
|
||||
|
||||
def store_remote_file!(remote_object_id, filename)
|
||||
raise RemoteStoreError, 'Missing filename' unless filename
|
||||
|
||||
file_path = File.join(TMP_UPLOAD_PATH, remote_object_id)
|
||||
file_path = Pathname.new(file_path).cleanpath.to_s
|
||||
raise RemoteStoreError, 'Bad file path' unless file_path.start_with?(TMP_UPLOAD_PATH + '/')
|
||||
|
||||
self.object_store = Store::REMOTE
|
||||
|
||||
# TODO:
|
||||
# This should be changed to make use of `tmp/cache` mechanism
|
||||
# instead of using custom upload directory,
|
||||
# using tmp/cache makes this implementation way easier than it is today
|
||||
CarrierWave::Storage::Fog::File.new(self, storage, file_path).tap do |file|
|
||||
raise RemoteStoreError, 'Missing file' unless file.exists?
|
||||
|
||||
self.filename = filename
|
||||
self.file = storage.store!(file)
|
||||
end
|
||||
end
|
||||
|
||||
def store_local_file!(local_path, filename)
|
||||
raise RemoteStoreError, 'Missing filename' unless filename
|
||||
|
||||
root_path = File.realpath(self.class.workhorse_local_upload_path)
|
||||
file_path = File.realpath(local_path)
|
||||
raise RemoteStoreError, 'Bad file path' unless file_path.start_with?(root_path)
|
||||
|
||||
self.object_store = Store::LOCAL
|
||||
self.store!(UploadedFile.new(file_path, filename))
|
||||
end
|
||||
|
||||
# this is a hack around CarrierWave. The #migrate method needs to be
|
||||
# able to force the current file to the migrated file upon success.
|
||||
def file=(file)
|
||||
|
|
5
changelogs/unreleased/ac-lfs-direct-upload-ee-to-ce.yml
Normal file
5
changelogs/unreleased/ac-lfs-direct-upload-ee-to-ce.yml
Normal file
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Port direct upload of LFS artifacts from EE
|
||||
merge_request: 17752
|
||||
author:
|
||||
type: added
|
|
@ -711,7 +711,7 @@ test:
|
|||
provider: AWS # Only AWS supported at the moment
|
||||
aws_access_key_id: AWS_ACCESS_KEY_ID
|
||||
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
|
||||
region: eu-central-1
|
||||
region: us-east-1
|
||||
artifacts:
|
||||
path: tmp/tests/artifacts
|
||||
enabled: true
|
||||
|
@ -725,7 +725,7 @@ test:
|
|||
provider: AWS # Only AWS supported at the moment
|
||||
aws_access_key_id: AWS_ACCESS_KEY_ID
|
||||
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
|
||||
region: eu-central-1
|
||||
region: us-east-1
|
||||
uploads:
|
||||
storage_path: tmp/tests/public
|
||||
object_store:
|
||||
|
@ -734,7 +734,7 @@ test:
|
|||
provider: AWS # Only AWS supported at the moment
|
||||
aws_access_key_id: AWS_ACCESS_KEY_ID
|
||||
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
|
||||
region: eu-central-1
|
||||
region: us-east-1
|
||||
gitlab:
|
||||
host: localhost
|
||||
port: 80
|
||||
|
|
|
@ -350,6 +350,7 @@ Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] ||
|
|||
Settings.lfs['object_store'] ||= Settingslogic.new({})
|
||||
Settings.lfs['object_store']['enabled'] = false if Settings.lfs['object_store']['enabled'].nil?
|
||||
Settings.lfs['object_store']['remote_directory'] ||= nil
|
||||
Settings.lfs['object_store']['direct_upload'] = false if Settings.lfs['object_store']['direct_upload'].nil?
|
||||
Settings.lfs['object_store']['background_upload'] = true if Settings.lfs['object_store']['background_upload'].nil?
|
||||
Settings.lfs['object_store']['proxy_download'] = false if Settings.lfs['object_store']['proxy_download'].nil?
|
||||
# Convert upload connection settings to use string keys, to make Fog happy
|
||||
|
|
|
@ -28,16 +28,4 @@ if File.exist?(aws_file)
|
|||
# when fog_public is false and provider is AWS or Google, defaults to 600
|
||||
config.fog_authenticated_url_expiration = 1 << 29
|
||||
end
|
||||
|
||||
# Mocking Fog requests, based on: https://github.com/carrierwaveuploader/carrierwave/wiki/How-to%3A-Test-Fog-based-uploaders
|
||||
if Rails.env.test?
|
||||
Fog.mock!
|
||||
connection = ::Fog::Storage.new(
|
||||
aws_access_key_id: AWS_CONFIG['access_key_id'],
|
||||
aws_secret_access_key: AWS_CONFIG['secret_access_key'],
|
||||
provider: 'AWS',
|
||||
region: AWS_CONFIG['region']
|
||||
)
|
||||
connection.directories.create(key: AWS_CONFIG['bucket'])
|
||||
end
|
||||
end
|
||||
|
|
|
@ -63,6 +63,7 @@ For source installations the following settings are nested under `lfs:` and then
|
|||
|---------|-------------|---------|
|
||||
| `enabled` | Enable/disable object storage | `false` |
|
||||
| `remote_directory` | The bucket name where LFS objects will be stored| |
|
||||
| `direct_upload` | Set to true to enable direct upload of LFS without the need of local shared storage. Option may be removed once we decide to support only single storage for all files. | `false` |
|
||||
| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
|
||||
| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
|
||||
| `connection` | Various connection options described below | |
|
||||
|
|
|
@ -39,14 +39,6 @@ module Gitlab
|
|||
params
|
||||
end
|
||||
|
||||
def lfs_upload_ok(oid, size)
|
||||
{
|
||||
StoreLFSPath: LfsObjectUploader.workhorse_upload_path,
|
||||
LfsOid: oid,
|
||||
LfsSize: size
|
||||
}
|
||||
end
|
||||
|
||||
def artifact_upload_ok
|
||||
{ TempPath: JobArtifactUploader.workhorse_upload_path }
|
||||
end
|
||||
|
|
|
@ -278,6 +278,10 @@ describe Backup::Manager do
|
|||
connection.directories.create(key: Gitlab.config.backup.upload.remote_directory)
|
||||
end
|
||||
|
||||
after do
|
||||
Fog.unmock!
|
||||
end
|
||||
|
||||
context 'target path' do
|
||||
it 'uses the tar filename by default' do
|
||||
expect_any_instance_of(Fog::Collection).to receive(:create)
|
||||
|
|
|
@ -243,17 +243,34 @@ describe 'Git LFS API and storage' do
|
|||
it_behaves_like 'responds with a file'
|
||||
|
||||
context 'when LFS uses object storage' do
|
||||
let(:before_get) do
|
||||
stub_lfs_object_storage
|
||||
lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
|
||||
context 'when proxy download is enabled' do
|
||||
let(:before_get) do
|
||||
stub_lfs_object_storage(proxy_download: true)
|
||||
lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
|
||||
end
|
||||
|
||||
it 'responds with redirect' do
|
||||
expect(response).to have_gitlab_http_status(200)
|
||||
end
|
||||
|
||||
it 'responds with the workhorse send-url' do
|
||||
expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("send-url:")
|
||||
end
|
||||
end
|
||||
|
||||
it 'responds with redirect' do
|
||||
expect(response).to have_gitlab_http_status(302)
|
||||
end
|
||||
context 'when proxy download is disabled' do
|
||||
let(:before_get) do
|
||||
stub_lfs_object_storage(proxy_download: false)
|
||||
lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
|
||||
end
|
||||
|
||||
it 'responds with the file location' do
|
||||
expect(response.location).to include(lfs_object.reload.file.path)
|
||||
it 'responds with redirect' do
|
||||
expect(response).to have_gitlab_http_status(302)
|
||||
end
|
||||
|
||||
it 'responds with the file location' do
|
||||
expect(response.location).to include(lfs_object.reload.file.path)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -962,22 +979,61 @@ describe 'Git LFS API and storage' do
|
|||
end
|
||||
|
||||
context 'and request is sent by gitlab-workhorse to authorize the request' do
|
||||
before do
|
||||
put_authorize
|
||||
shared_examples 'a valid response' do
|
||||
before do
|
||||
put_authorize
|
||||
end
|
||||
|
||||
it 'responds with status 200' do
|
||||
expect(response).to have_gitlab_http_status(200)
|
||||
end
|
||||
|
||||
it 'uses the gitlab-workhorse content type' do
|
||||
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
|
||||
end
|
||||
end
|
||||
|
||||
it 'responds with status 200' do
|
||||
expect(response).to have_gitlab_http_status(200)
|
||||
shared_examples 'a local file' do
|
||||
it_behaves_like 'a valid response' do
|
||||
it 'responds with status 200, location of lfs store and object details' do
|
||||
expect(json_response['TempPath']).to eq(LfsObjectUploader.workhorse_local_upload_path)
|
||||
expect(json_response['RemoteObject']).to be_nil
|
||||
expect(json_response['LfsOid']).to eq(sample_oid)
|
||||
expect(json_response['LfsSize']).to eq(sample_size)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
it 'uses the gitlab-workhorse content type' do
|
||||
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
|
||||
context 'when using local storage' do
|
||||
it_behaves_like 'a local file'
|
||||
end
|
||||
|
||||
it 'responds with status 200, location of lfs store and object details' do
|
||||
expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path)
|
||||
expect(json_response['LfsOid']).to eq(sample_oid)
|
||||
expect(json_response['LfsSize']).to eq(sample_size)
|
||||
context 'when using remote storage' do
|
||||
context 'when direct upload is enabled' do
|
||||
before do
|
||||
stub_lfs_object_storage(enabled: true, direct_upload: true)
|
||||
end
|
||||
|
||||
it_behaves_like 'a valid response' do
|
||||
it 'responds with status 200, location of lfs remote store and object details' do
|
||||
expect(json_response['TempPath']).to be_nil
|
||||
expect(json_response['RemoteObject']).to have_key('ID')
|
||||
expect(json_response['RemoteObject']).to have_key('GetURL')
|
||||
expect(json_response['RemoteObject']).to have_key('StoreURL')
|
||||
expect(json_response['RemoteObject']).to have_key('DeleteURL')
|
||||
expect(json_response['LfsOid']).to eq(sample_oid)
|
||||
expect(json_response['LfsSize']).to eq(sample_size)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when direct upload is disabled' do
|
||||
before do
|
||||
stub_lfs_object_storage(enabled: true, direct_upload: false)
|
||||
end
|
||||
|
||||
it_behaves_like 'a local file'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -1009,26 +1065,81 @@ describe 'Git LFS API and storage' do
|
|||
end
|
||||
|
||||
context 'with object storage enabled' do
|
||||
before do
|
||||
stub_lfs_object_storage(background_upload: true)
|
||||
context 'and direct upload enabled' do
|
||||
let!(:fog_connection) do
|
||||
stub_lfs_object_storage(direct_upload: true)
|
||||
end
|
||||
|
||||
['123123', '../../123123'].each do |remote_id|
|
||||
context "with invalid remote_id: #{remote_id}" do
|
||||
subject do
|
||||
put_finalize_with_args('file.remote_id' => remote_id)
|
||||
end
|
||||
|
||||
it 'responds with status 403' do
|
||||
subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(403)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with valid remote_id' do
|
||||
before do
|
||||
fog_connection.directories.get('lfs-objects').files.create(
|
||||
key: 'tmp/upload/12312300',
|
||||
body: 'content'
|
||||
)
|
||||
end
|
||||
|
||||
subject do
|
||||
put_finalize_with_args(
|
||||
'file.remote_id' => '12312300',
|
||||
'file.name' => 'name')
|
||||
end
|
||||
|
||||
it 'responds with status 200' do
|
||||
subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(200)
|
||||
end
|
||||
|
||||
it 'schedules migration of file to object storage' do
|
||||
subject
|
||||
|
||||
expect(LfsObject.last.projects).to include(project)
|
||||
end
|
||||
|
||||
it 'have valid file' do
|
||||
subject
|
||||
|
||||
expect(LfsObject.last.file_store).to eq(ObjectStorage::Store::REMOTE)
|
||||
expect(LfsObject.last.file).to be_exists
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
it 'schedules migration of file to object storage' do
|
||||
expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('LfsObjectUploader', 'LfsObject', :file, kind_of(Numeric))
|
||||
context 'and background upload enabled' do
|
||||
before do
|
||||
stub_lfs_object_storage(background_upload: true)
|
||||
end
|
||||
|
||||
put_finalize(with_tempfile: true)
|
||||
it 'schedules migration of file to object storage' do
|
||||
expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('LfsObjectUploader', 'LfsObject', :file, kind_of(Numeric))
|
||||
|
||||
put_finalize(with_tempfile: true)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'invalid tempfiles' do
|
||||
it 'rejects slashes in the tempfile name (path traversal' do
|
||||
put_finalize('foo/bar')
|
||||
expect(response).to have_gitlab_http_status(403)
|
||||
before do
|
||||
lfs_object.destroy
|
||||
end
|
||||
|
||||
it 'rejects tempfile names that do not start with the oid' do
|
||||
put_finalize("foo#{sample_oid}")
|
||||
it 'rejects slashes in the tempfile name (path traversal)' do
|
||||
put_finalize('../bar', with_tempfile: true)
|
||||
expect(response).to have_gitlab_http_status(403)
|
||||
end
|
||||
end
|
||||
|
@ -1118,7 +1229,7 @@ describe 'Git LFS API and storage' do
|
|||
end
|
||||
|
||||
it 'with location of lfs store and object details' do
|
||||
expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path)
|
||||
expect(json_response['TempPath']).to eq(LfsObjectUploader.workhorse_local_upload_path)
|
||||
expect(json_response['LfsOid']).to eq(sample_oid)
|
||||
expect(json_response['LfsSize']).to eq(sample_size)
|
||||
end
|
||||
|
@ -1221,22 +1332,29 @@ describe 'Git LFS API and storage' do
|
|||
end
|
||||
|
||||
def put_finalize(lfs_tmp = lfs_tmp_file, with_tempfile: false)
|
||||
setup_tempfile(lfs_tmp) if with_tempfile
|
||||
upload_path = LfsObjectUploader.workhorse_local_upload_path
|
||||
file_path = upload_path + '/' + lfs_tmp if lfs_tmp
|
||||
|
||||
put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}", nil,
|
||||
headers.merge('X-Gitlab-Lfs-Tmp' => lfs_tmp).compact
|
||||
if with_tempfile
|
||||
FileUtils.mkdir_p(upload_path)
|
||||
FileUtils.touch(file_path)
|
||||
end
|
||||
|
||||
args = {
|
||||
'file.path' => file_path,
|
||||
'file.name' => File.basename(file_path)
|
||||
}.compact
|
||||
|
||||
put_finalize_with_args(args)
|
||||
end
|
||||
|
||||
def put_finalize_with_args(args)
|
||||
put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}", args, headers
|
||||
end
|
||||
|
||||
def lfs_tmp_file
|
||||
"#{sample_oid}012345678"
|
||||
end
|
||||
|
||||
def setup_tempfile(lfs_tmp)
|
||||
upload_path = LfsObjectUploader.workhorse_upload_path
|
||||
|
||||
FileUtils.mkdir_p(upload_path)
|
||||
FileUtils.touch(File.join(upload_path, lfs_tmp))
|
||||
end
|
||||
end
|
||||
|
||||
def enable_lfs
|
||||
|
|
|
@ -1,17 +1,22 @@
|
|||
module StubConfiguration
|
||||
def stub_object_storage_uploader(
|
||||
config:, uploader:, remote_directory:,
|
||||
config:,
|
||||
uploader:,
|
||||
remote_directory:,
|
||||
enabled: true,
|
||||
proxy_download: false,
|
||||
background_upload: false)
|
||||
Fog.mock!
|
||||
|
||||
background_upload: false,
|
||||
direct_upload: false
|
||||
)
|
||||
allow(config).to receive(:enabled) { enabled }
|
||||
allow(config).to receive(:proxy_download) { proxy_download }
|
||||
allow(config).to receive(:background_upload) { background_upload }
|
||||
allow(config).to receive(:direct_upload) { direct_upload }
|
||||
|
||||
return unless enabled
|
||||
|
||||
Fog.mock!
|
||||
|
||||
::Fog::Storage.new(uploader.object_store_credentials).tap do |connection|
|
||||
begin
|
||||
connection.directories.create(key: remote_directory)
|
||||
|
|
|
@ -27,7 +27,7 @@ describe GitlabUploader do
|
|||
describe '#file_cache_storage?' do
|
||||
context 'when file storage is used' do
|
||||
before do
|
||||
uploader_class.cache_storage(:file)
|
||||
expect(uploader_class).to receive(:cache_storage) { CarrierWave::Storage::File }
|
||||
end
|
||||
|
||||
it { is_expected.to be_file_cache_storage }
|
||||
|
@ -35,7 +35,7 @@ describe GitlabUploader do
|
|||
|
||||
context 'when is remote storage' do
|
||||
before do
|
||||
uploader_class.cache_storage(:fog)
|
||||
expect(uploader_class).to receive(:cache_storage) { CarrierWave::Storage::Fog }
|
||||
end
|
||||
|
||||
it { is_expected.not_to be_file_cache_storage }
|
||||
|
|
|
@ -21,11 +21,11 @@ describe ObjectStorage do
|
|||
let(:object) { build_stubbed(:user) }
|
||||
let(:uploader) { uploader_class.new(object, :file) }
|
||||
|
||||
before do
|
||||
allow(uploader_class).to receive(:object_store_enabled?).and_return(true)
|
||||
end
|
||||
|
||||
describe '#object_store=' do
|
||||
before do
|
||||
allow(uploader_class).to receive(:object_store_enabled?).and_return(true)
|
||||
end
|
||||
|
||||
it "reload the local storage" do
|
||||
uploader.object_store = described_class::Store::LOCAL
|
||||
expect(uploader.file_storage?).to be_truthy
|
||||
|
@ -35,28 +35,28 @@ describe ObjectStorage do
|
|||
uploader.object_store = described_class::Store::REMOTE
|
||||
expect(uploader.file_storage?).to be_falsey
|
||||
end
|
||||
end
|
||||
|
||||
context 'object_store is Store::LOCAL' do
|
||||
before do
|
||||
uploader.object_store = described_class::Store::LOCAL
|
||||
end
|
||||
context 'object_store is Store::LOCAL' do
|
||||
before do
|
||||
uploader.object_store = described_class::Store::LOCAL
|
||||
end
|
||||
|
||||
describe '#store_dir' do
|
||||
it 'is the composition of (base_dir, dynamic_segment)' do
|
||||
expect(uploader.store_dir).to start_with("uploads/-/system/user/")
|
||||
describe '#store_dir' do
|
||||
it 'is the composition of (base_dir, dynamic_segment)' do
|
||||
expect(uploader.store_dir).to start_with("uploads/-/system/user/")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'object_store is Store::REMOTE' do
|
||||
before do
|
||||
uploader.object_store = described_class::Store::REMOTE
|
||||
end
|
||||
context 'object_store is Store::REMOTE' do
|
||||
before do
|
||||
uploader.object_store = described_class::Store::REMOTE
|
||||
end
|
||||
|
||||
describe '#store_dir' do
|
||||
it 'is the composition of (dynamic_segment)' do
|
||||
expect(uploader.store_dir).to start_with("user/")
|
||||
describe '#store_dir' do
|
||||
it 'is the composition of (dynamic_segment)' do
|
||||
expect(uploader.store_dir).to start_with("user/")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -92,7 +92,7 @@ describe ObjectStorage do
|
|||
describe '#file_cache_storage?' do
|
||||
context 'when file storage is used' do
|
||||
before do
|
||||
uploader_class.cache_storage(:file)
|
||||
expect(uploader_class).to receive(:cache_storage) { CarrierWave::Storage::File }
|
||||
end
|
||||
|
||||
it { expect(uploader).to be_file_cache_storage }
|
||||
|
@ -100,7 +100,7 @@ describe ObjectStorage do
|
|||
|
||||
context 'when is remote storage' do
|
||||
before do
|
||||
uploader_class.cache_storage(:fog)
|
||||
expect(uploader_class).to receive(:cache_storage) { CarrierWave::Storage::Fog }
|
||||
end
|
||||
|
||||
it { expect(uploader).not_to be_file_cache_storage }
|
||||
|
@ -298,7 +298,9 @@ describe ObjectStorage do
|
|||
let(:remote_directory) { 'directory' }
|
||||
|
||||
before do
|
||||
uploader_class.storage_options double(object_store: double(remote_directory: remote_directory))
|
||||
allow(uploader_class).to receive(:options) do
|
||||
double(object_store: double(remote_directory: remote_directory))
|
||||
end
|
||||
end
|
||||
|
||||
subject { uploader.fog_directory }
|
||||
|
@ -310,7 +312,9 @@ describe ObjectStorage do
|
|||
let(:connection) { Settingslogic.new("provider" => "AWS") }
|
||||
|
||||
before do
|
||||
uploader_class.storage_options double(object_store: double(connection: connection))
|
||||
allow(uploader_class).to receive(:options) do
|
||||
double(object_store: double(connection: connection))
|
||||
end
|
||||
end
|
||||
|
||||
subject { uploader.fog_credentials }
|
||||
|
@ -323,4 +327,304 @@ describe ObjectStorage do
|
|||
|
||||
it { is_expected.to eq(false) }
|
||||
end
|
||||
|
||||
describe '.workhorse_authorize' do
|
||||
subject { uploader_class.workhorse_authorize }
|
||||
|
||||
before do
|
||||
# ensure that we use regular Fog libraries
|
||||
# other tests might call `Fog.mock!` and
|
||||
# it will make tests to fail
|
||||
Fog.unmock!
|
||||
end
|
||||
|
||||
shared_examples 'uses local storage' do
|
||||
it "returns temporary path" do
|
||||
is_expected.to have_key(:TempPath)
|
||||
|
||||
expect(subject[:TempPath]).to start_with(uploader_class.root)
|
||||
expect(subject[:TempPath]).to include(described_class::TMP_UPLOAD_PATH)
|
||||
end
|
||||
|
||||
it "does not return remote store" do
|
||||
is_expected.not_to have_key('RemoteObject')
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'uses remote storage' do
|
||||
it "returns remote store" do
|
||||
is_expected.to have_key(:RemoteObject)
|
||||
|
||||
expect(subject[:RemoteObject]).to have_key(:ID)
|
||||
expect(subject[:RemoteObject]).to have_key(:GetURL)
|
||||
expect(subject[:RemoteObject]).to have_key(:DeleteURL)
|
||||
expect(subject[:RemoteObject]).to have_key(:StoreURL)
|
||||
expect(subject[:RemoteObject][:GetURL]).to include(described_class::TMP_UPLOAD_PATH)
|
||||
expect(subject[:RemoteObject][:DeleteURL]).to include(described_class::TMP_UPLOAD_PATH)
|
||||
expect(subject[:RemoteObject][:StoreURL]).to include(described_class::TMP_UPLOAD_PATH)
|
||||
end
|
||||
|
||||
it "does not return local store" do
|
||||
is_expected.not_to have_key('TempPath')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when object storage is disabled' do
|
||||
before do
|
||||
allow(Gitlab.config.uploads.object_store).to receive(:enabled) { false }
|
||||
end
|
||||
|
||||
it_behaves_like 'uses local storage'
|
||||
end
|
||||
|
||||
context 'when object storage is enabled' do
|
||||
before do
|
||||
allow(Gitlab.config.uploads.object_store).to receive(:enabled) { true }
|
||||
end
|
||||
|
||||
context 'when direct upload is enabled' do
|
||||
before do
|
||||
allow(Gitlab.config.uploads.object_store).to receive(:direct_upload) { true }
|
||||
end
|
||||
|
||||
context 'uses AWS' do
|
||||
before do
|
||||
expect(uploader_class).to receive(:object_store_credentials) do
|
||||
{ provider: "AWS",
|
||||
aws_access_key_id: "AWS_ACCESS_KEY_ID",
|
||||
aws_secret_access_key: "AWS_SECRET_ACCESS_KEY",
|
||||
region: "eu-central-1" }
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'uses remote storage' do
|
||||
let(:storage_url) { "https://uploads.s3-eu-central-1.amazonaws.com/" }
|
||||
|
||||
it 'returns links for S3' do
|
||||
expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url)
|
||||
expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url)
|
||||
expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'uses Google' do
|
||||
before do
|
||||
expect(uploader_class).to receive(:object_store_credentials) do
|
||||
{ provider: "Google",
|
||||
google_storage_access_key_id: 'ACCESS_KEY_ID',
|
||||
google_storage_secret_access_key: 'SECRET_ACCESS_KEY' }
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'uses remote storage' do
|
||||
let(:storage_url) { "https://storage.googleapis.com/uploads/" }
|
||||
|
||||
it 'returns links for Google Cloud' do
|
||||
expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url)
|
||||
expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url)
|
||||
expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'uses GDK/minio' do
|
||||
before do
|
||||
expect(uploader_class).to receive(:object_store_credentials) do
|
||||
{ provider: "AWS",
|
||||
aws_access_key_id: "AWS_ACCESS_KEY_ID",
|
||||
aws_secret_access_key: "AWS_SECRET_ACCESS_KEY",
|
||||
endpoint: 'http://127.0.0.1:9000',
|
||||
path_style: true,
|
||||
region: "gdk" }
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'uses remote storage' do
|
||||
let(:storage_url) { "http://127.0.0.1:9000/uploads/" }
|
||||
|
||||
it 'returns links for S3' do
|
||||
expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url)
|
||||
expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url)
|
||||
expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when direct upload is disabled' do
|
||||
before do
|
||||
allow(Gitlab.config.uploads.object_store).to receive(:direct_upload) { false }
|
||||
end
|
||||
|
||||
it_behaves_like 'uses local storage'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#store_workhorse_file!' do
|
||||
subject do
|
||||
uploader.store_workhorse_file!(params, :file)
|
||||
end
|
||||
|
||||
context 'when local file is used' do
|
||||
context 'when valid file is used' do
|
||||
let(:target_path) do
|
||||
File.join(uploader_class.root, uploader_class::TMP_UPLOAD_PATH)
|
||||
end
|
||||
|
||||
before do
|
||||
FileUtils.mkdir_p(target_path)
|
||||
end
|
||||
|
||||
context 'when no filename is specified' do
|
||||
let(:params) do
|
||||
{ "file.path" => "test/file" }
|
||||
end
|
||||
|
||||
it 'raises an error' do
|
||||
expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Missing filename/)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when invalid file is specified' do
|
||||
let(:file_path) do
|
||||
File.join(target_path, "..", "test.file")
|
||||
end
|
||||
|
||||
before do
|
||||
FileUtils.touch(file_path)
|
||||
end
|
||||
|
||||
let(:params) do
|
||||
{ "file.path" => file_path,
|
||||
"file.name" => "my_file.txt" }
|
||||
end
|
||||
|
||||
it 'raises an error' do
|
||||
expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Bad file path/)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when filename is specified' do
|
||||
let(:params) do
|
||||
{ "file.path" => tmp_file,
|
||||
"file.name" => "my_file.txt" }
|
||||
end
|
||||
|
||||
let(:tmp_file) { Tempfile.new('filename', target_path) }
|
||||
|
||||
before do
|
||||
FileUtils.touch(tmp_file)
|
||||
end
|
||||
|
||||
after do
|
||||
FileUtils.rm_f(tmp_file)
|
||||
end
|
||||
|
||||
it 'succeeds' do
|
||||
expect { subject }.not_to raise_error
|
||||
|
||||
expect(uploader).to be_exists
|
||||
end
|
||||
|
||||
it 'proper path is being used' do
|
||||
subject
|
||||
|
||||
expect(uploader.path).to start_with(uploader_class.root)
|
||||
expect(uploader.path).to end_with("my_file.txt")
|
||||
end
|
||||
|
||||
it 'source file to not exist' do
|
||||
subject
|
||||
|
||||
expect(File.exist?(tmp_file.path)).to be_falsey
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when remote file is used' do
|
||||
let!(:fog_connection) do
|
||||
stub_uploads_object_storage(uploader_class)
|
||||
end
|
||||
|
||||
context 'when valid file is used' do
|
||||
context 'when no filename is specified' do
|
||||
let(:params) do
|
||||
{ "file.remote_id" => "test/123123" }
|
||||
end
|
||||
|
||||
it 'raises an error' do
|
||||
expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Missing filename/)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when invalid file is specified' do
|
||||
let(:params) do
|
||||
{ "file.remote_id" => "../test/123123",
|
||||
"file.name" => "my_file.txt" }
|
||||
end
|
||||
|
||||
it 'raises an error' do
|
||||
expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Bad file path/)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when non existing file is specified' do
|
||||
let(:params) do
|
||||
{ "file.remote_id" => "test/12312300",
|
||||
"file.name" => "my_file.txt" }
|
||||
end
|
||||
|
||||
it 'raises an error' do
|
||||
expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Missing file/)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when filename is specified' do
|
||||
let(:params) do
|
||||
{ "file.remote_id" => "test/123123",
|
||||
"file.name" => "my_file.txt" }
|
||||
end
|
||||
|
||||
let!(:fog_file) do
|
||||
fog_connection.directories.get('uploads').files.create(
|
||||
key: 'tmp/upload/test/123123',
|
||||
body: 'content'
|
||||
)
|
||||
end
|
||||
|
||||
it 'succeeds' do
|
||||
expect { subject }.not_to raise_error
|
||||
|
||||
expect(uploader).to be_exists
|
||||
end
|
||||
|
||||
it 'path to not be temporary' do
|
||||
subject
|
||||
|
||||
expect(uploader.path).not_to be_nil
|
||||
expect(uploader.path).not_to include('tmp/upload')
|
||||
expect(uploader.url).to include('/my_file.txt')
|
||||
end
|
||||
|
||||
it 'url is used' do
|
||||
subject
|
||||
|
||||
expect(uploader.url).not_to be_nil
|
||||
expect(uploader.url).to include('/my_file.txt')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when no file is used' do
|
||||
let(:params) { {} }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Bad file/)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
Loading…
Reference in a new issue