2018-07-04 13:32:46 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2018-02-02 08:59:43 -05:00
|
|
|
# This class breaks the actual CarrierWave concept.
|
|
|
|
# Every uploader should use a base_dir that is model agnostic so we can build
|
|
|
|
# back URLs from base_dir-relative paths saved in the `Upload` model.
|
|
|
|
#
|
|
|
|
# As the `.base_dir` is model dependent and **not** saved in the upload model (see #upload_path)
|
|
|
|
# there is no way to build back the correct file path without the model, which defies
|
|
|
|
# CarrierWave way of storing files.
|
|
|
|
#
|
2016-08-18 10:31:44 -04:00
|
|
|
class FileUploader < GitlabUploader
|
2015-11-14 13:29:58 -05:00
|
|
|
include UploaderHelper
|
2018-02-02 08:59:43 -05:00
|
|
|
include RecordsUploads::Concern
|
|
|
|
include ObjectStorage::Concern
|
|
|
|
prepend ObjectStorage::Extension::RecordsUploads
|
2017-02-15 13:11:44 -05:00
|
|
|
|
2019-05-05 06:19:14 -04:00
|
|
|
MARKDOWN_PATTERN = %r{\!?\[.*?\]\(/uploads/(?<secret>[0-9a-f]{32})/(?<file>.*?)\)}.freeze
|
2020-04-02 17:07:51 -04:00
|
|
|
DYNAMIC_PATH_PATTERN = %r{.*/(?<secret>\h{10,32})/(?<identifier>.*)}.freeze
|
2020-03-04 16:07:54 -05:00
|
|
|
VALID_SECRET_PATTERN = %r{\A\h{10,32}\z}.freeze
|
|
|
|
|
|
|
|
InvalidSecret = Class.new(StandardError)
|
2015-11-14 13:29:58 -05:00
|
|
|
|
2018-01-30 09:21:28 -05:00
|
|
|
after :remove, :prune_store_dir
|
|
|
|
|
2018-02-21 11:43:21 -05:00
|
|
|
# FileUploader do not run in a model transaction, so we can simply
|
|
|
|
# enqueue a job after the :store hook.
|
|
|
|
after :store, :schedule_background_upload
|
|
|
|
|
2018-02-02 08:59:43 -05:00
|
|
|
def self.root
|
|
|
|
File.join(options.storage_path, 'uploads')
|
|
|
|
end
|
2014-05-23 04:22:00 -04:00
|
|
|
|
2018-02-02 08:59:43 -05:00
|
|
|
def self.absolute_path(upload)
|
2017-02-28 13:34:43 -05:00
|
|
|
File.join(
|
2018-02-02 08:59:43 -05:00
|
|
|
absolute_base_dir(upload.model),
|
|
|
|
upload.path # already contain the dynamic_segment, see #upload_path
|
2017-02-28 13:34:43 -05:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2018-03-02 10:41:40 -05:00
|
|
|
def self.base_dir(model, store = Store::LOCAL)
|
|
|
|
decorated_model = model
|
2020-02-04 07:09:00 -05:00
|
|
|
decorated_model = Storage::Hashed.new(model) if store == Store::REMOTE
|
2018-03-02 10:41:40 -05:00
|
|
|
|
|
|
|
model_path_segment(decorated_model)
|
2018-02-02 08:59:43 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
# used in migrations and import/exports
|
|
|
|
def self.absolute_base_dir(model)
|
|
|
|
File.join(root, base_dir(model))
|
2017-06-07 23:32:38 -04:00
|
|
|
end
|
|
|
|
|
2017-02-28 13:34:43 -05:00
|
|
|
# Returns the part of `store_dir` that can change based on the model's current
|
|
|
|
# path
|
|
|
|
#
|
|
|
|
# This is used to build Upload paths dynamically based on the model's current
|
|
|
|
# namespace and path, allowing us to ignore renames or transfers.
|
|
|
|
#
|
2017-11-21 12:34:00 -05:00
|
|
|
# model - Object that responds to `full_path` and `disk_path`
|
2017-02-28 13:34:43 -05:00
|
|
|
#
|
|
|
|
# Returns a String without a trailing slash
|
2018-02-02 08:59:43 -05:00
|
|
|
def self.model_path_segment(model)
|
2018-03-02 10:41:40 -05:00
|
|
|
case model
|
2020-02-04 07:09:00 -05:00
|
|
|
when Storage::Hashed then model.disk_path
|
2017-11-21 12:34:00 -05:00
|
|
|
else
|
2018-03-02 10:41:40 -05:00
|
|
|
model.hashed_storage?(:attachments) ? model.disk_path : model.full_path
|
2017-11-21 12:34:00 -05:00
|
|
|
end
|
2017-02-28 13:34:43 -05:00
|
|
|
end
|
|
|
|
|
2018-02-02 08:59:43 -05:00
|
|
|
def self.generate_secret
|
|
|
|
SecureRandom.hex
|
|
|
|
end
|
2015-02-20 09:37:37 -05:00
|
|
|
|
2018-07-13 10:13:13 -04:00
|
|
|
def self.extract_dynamic_path(path)
|
|
|
|
DYNAMIC_PATH_PATTERN.match(path)
|
|
|
|
end
|
|
|
|
|
2018-06-06 16:45:42 -04:00
|
|
|
def upload_paths(identifier)
|
2018-03-02 10:41:40 -05:00
|
|
|
[
|
2018-06-06 16:45:42 -04:00
|
|
|
File.join(secret, identifier),
|
|
|
|
File.join(base_dir(Store::REMOTE), secret, identifier)
|
2018-03-02 10:41:40 -05:00
|
|
|
]
|
|
|
|
end
|
|
|
|
|
2017-05-01 09:14:35 -04:00
|
|
|
attr_accessor :model
|
2015-02-20 09:37:37 -05:00
|
|
|
|
2018-01-29 16:06:17 -05:00
|
|
|
def initialize(model, mounted_as = nil, **uploader_context)
|
|
|
|
super(model, nil, **uploader_context)
|
|
|
|
|
2017-05-01 09:14:35 -04:00
|
|
|
@model = model
|
2018-01-29 16:06:17 -05:00
|
|
|
apply_context!(uploader_context)
|
2014-05-23 04:22:00 -04:00
|
|
|
end
|
|
|
|
|
2018-06-28 10:57:28 -04:00
|
|
|
def initialize_copy(from)
|
|
|
|
super
|
|
|
|
|
|
|
|
@secret = self.class.generate_secret
|
|
|
|
@upload = nil # calling record_upload would delete the old upload if set
|
|
|
|
end
|
|
|
|
|
2018-03-02 10:41:40 -05:00
|
|
|
# enforce the usage of Hashed storage when storing to
|
|
|
|
# remote store as the FileMover doesn't support OS
|
|
|
|
def base_dir(store = nil)
|
|
|
|
self.class.base_dir(@model, store || object_store)
|
2014-05-23 04:22:00 -04:00
|
|
|
end
|
|
|
|
|
2018-02-02 08:59:43 -05:00
|
|
|
# we don't need to know the actual path, an uploader instance should be
|
|
|
|
# able to yield the file content on demand, so we should build the digest
|
|
|
|
def absolute_path
|
|
|
|
self.class.absolute_path(@upload)
|
2017-02-28 13:34:43 -05:00
|
|
|
end
|
|
|
|
|
2018-02-02 08:59:43 -05:00
|
|
|
def upload_path
|
2018-03-02 10:41:40 -05:00
|
|
|
if file_storage?
|
|
|
|
# Legacy path relative to project.full_path
|
2019-05-15 12:26:18 -04:00
|
|
|
local_storage_path(identifier)
|
2018-03-02 10:41:40 -05:00
|
|
|
else
|
2019-05-15 12:26:18 -04:00
|
|
|
remote_storage_path(identifier)
|
2018-03-02 10:41:40 -05:00
|
|
|
end
|
2018-02-02 08:59:43 -05:00
|
|
|
end
|
2016-01-08 11:38:53 -05:00
|
|
|
|
2019-05-15 12:26:18 -04:00
|
|
|
def local_storage_path(file_identifier)
|
|
|
|
File.join(dynamic_segment, file_identifier)
|
|
|
|
end
|
|
|
|
|
|
|
|
def remote_storage_path(file_identifier)
|
|
|
|
File.join(store_dir, file_identifier)
|
|
|
|
end
|
|
|
|
|
2018-03-02 10:41:40 -05:00
|
|
|
def store_dirs
|
|
|
|
{
|
|
|
|
Store::LOCAL => File.join(base_dir, dynamic_segment),
|
|
|
|
Store::REMOTE => File.join(base_dir(ObjectStorage::Store::REMOTE), dynamic_segment)
|
|
|
|
}
|
2018-02-02 08:59:43 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def to_h
|
2016-01-08 11:38:53 -05:00
|
|
|
{
|
2018-02-02 08:59:43 -05:00
|
|
|
alt: markdown_name,
|
2017-02-23 16:54:25 -05:00
|
|
|
url: secure_url,
|
2018-02-02 08:59:43 -05:00
|
|
|
markdown: markdown_link
|
2016-01-08 11:38:53 -05:00
|
|
|
}
|
|
|
|
end
|
2016-03-30 06:11:27 -04:00
|
|
|
|
2018-02-02 08:59:43 -05:00
|
|
|
def upload=(value)
|
2018-01-29 12:57:34 -05:00
|
|
|
super
|
2018-01-30 14:38:10 -05:00
|
|
|
|
|
|
|
return unless value
|
|
|
|
return if apply_context!(value.uploader_context)
|
|
|
|
|
|
|
|
# fallback to the regex based extraction
|
2018-07-13 10:13:13 -04:00
|
|
|
if matches = self.class.extract_dynamic_path(value.path)
|
2018-02-02 08:59:43 -05:00
|
|
|
@secret = matches[:secret]
|
|
|
|
@identifier = matches[:identifier]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def secret
|
|
|
|
@secret ||= self.class.generate_secret
|
2020-03-04 16:07:54 -05:00
|
|
|
|
|
|
|
raise InvalidSecret unless @secret =~ VALID_SECRET_PATTERN
|
|
|
|
|
|
|
|
@secret
|
2018-02-02 08:59:43 -05:00
|
|
|
end
|
|
|
|
|
2018-06-28 10:57:28 -04:00
|
|
|
# return a new uploader with a file copy on another project
|
|
|
|
def self.copy_to(uploader, to_project)
|
2018-10-29 05:05:47 -04:00
|
|
|
moved = self.new(to_project)
|
|
|
|
moved.object_store = uploader.object_store
|
|
|
|
moved.filename = uploader.filename
|
2018-06-28 10:57:28 -04:00
|
|
|
|
|
|
|
moved.copy_file(uploader.file)
|
|
|
|
moved
|
|
|
|
end
|
|
|
|
|
|
|
|
def copy_file(file)
|
2018-06-28 11:25:40 -04:00
|
|
|
to_path = if file_storage?
|
|
|
|
File.join(self.class.root, store_path)
|
|
|
|
else
|
|
|
|
store_path
|
|
|
|
end
|
|
|
|
|
|
|
|
self.file = file.copy_to(to_path)
|
|
|
|
record_upload # after_store is not triggered
|
2018-06-28 10:57:28 -04:00
|
|
|
end
|
|
|
|
|
2017-02-23 16:54:25 -05:00
|
|
|
private
|
|
|
|
|
2018-01-29 16:06:17 -05:00
|
|
|
def apply_context!(uploader_context)
|
|
|
|
@secret, @identifier = uploader_context.values_at(:secret, :identifier)
|
|
|
|
|
|
|
|
!!(@secret && @identifier)
|
|
|
|
end
|
|
|
|
|
|
|
|
def build_upload
|
|
|
|
super.tap do |upload|
|
|
|
|
upload.secret = secret
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-01-30 09:21:28 -05:00
|
|
|
def prune_store_dir
|
|
|
|
storage.delete_dir!(store_dir) # only remove when empty
|
|
|
|
end
|
|
|
|
|
2018-02-02 08:59:43 -05:00
|
|
|
def identifier
|
|
|
|
@identifier ||= filename
|
|
|
|
end
|
|
|
|
|
|
|
|
def dynamic_segment
|
|
|
|
secret
|
2016-03-30 06:11:27 -04:00
|
|
|
end
|
2017-02-23 16:54:25 -05:00
|
|
|
|
|
|
|
def secure_url
|
2019-06-20 17:01:14 -04:00
|
|
|
File.join('/uploads', @secret, filename)
|
2017-02-23 16:54:25 -05:00
|
|
|
end
|
2014-05-23 04:22:00 -04:00
|
|
|
end
|