1
0
Fork 0
mirror of https://github.com/rails/rails.git synced 2022-11-09 12:12:34 -05:00
rails--rails/activestorage/lib/active_storage/service/disk_service.rb

176 lines
4.9 KiB
Ruby
Raw Normal View History

# frozen_string_literal: true
2017-06-30 18:14:22 -04:00
require "fileutils"
require "pathname"
require "openssl"
2017-07-06 09:41:22 -04:00
require "active_support/core_ext/numeric/bytes"
2017-06-30 18:14:22 -04:00
module ActiveStorage
# Wraps a local disk path as an Active Storage service. See ActiveStorage::Service for the generic API
# documentation that applies to all services.
class Service::DiskService < Service
attr_accessor :root
def initialize(root:, public: false, **options)
@root = root
@public = public
end
Prevent content type and disposition bypass in storage service URLs * Force content-type to binary on service urls for relevant content types We have a list of content types that must be forcibly served as binary, but in practice this only means to serve them as attachment always. We should also set the Content-Type to the configured binary type. As a bonus: add text/cache-manifest to the list of content types to be served as binary by default. * Store content-disposition and content-type in GCS Forcing these in the service_url when serving the file works fine for S3 and Azure, since these services include params in the signature. However, GCS specifically excludes response-content-disposition and response-content-type from the signature, which means an attacker can modify these and have files that should be served as text/plain attachments served as inline HTML for example. This makes our attempt to force specific files to be served as binary and as attachment can be easily bypassed. The only way this can be forced in GCS is by storing content-disposition and content-type in the object metadata. * Update GCS object metadata after identifying blob In some cases we create the blob and upload the data before identifying the content-type, which means we can't store that in GCS right when uploading. In these, after creating the attachment, we enqueue a job to identify the blob, and set the content-type. In other cases, files are uploaded to the storage service via direct upload link. We create the blob before the direct upload, which happens independently from the blob creation itself. We then mark the blob as identified, but we have already the content-type we need without having put it in the service. In these two cases, then, we need to update the metadata in the GCS service. * Include content-type and disposition in the verified key for disk service This prevents an attacker from modifying these params in the service signed URL, which is particularly important when we want to force them to have specific values for security reasons. * Allow only a list of specific content types to be served inline This is different from the content types that must be served as binary in the sense that any content type not in this list will be always served as attachment but with its original content type. Only types in this list are allowed to be served either inline or as attachment. Apart from forcing this in the service URL, for GCS we need to store the disposition in the metadata. Fix CVE-2018-16477.
2018-09-06 10:52:52 -04:00
def upload(key, io, checksum: nil, **)
2017-12-02 22:43:28 -05:00
instrument :upload, key: key, checksum: checksum do
IO.copy_stream(io, make_path_for(key))
ensure_integrity_of(key, checksum) if checksum
end
2017-07-09 11:04:28 -04:00
end
def download(key, &block)
if block_given?
2017-12-02 22:43:28 -05:00
instrument :streaming_download, key: key do
stream key, &block
end
else
2017-12-02 22:43:28 -05:00
instrument :download, key: key do
File.binread path_for(key)
rescue Errno::ENOENT
raise ActiveStorage::FileNotFoundError
end
2017-07-09 11:04:28 -04:00
end
end
def download_chunk(key, range)
instrument :download_chunk, key: key, range: range do
File.open(path_for(key), "rb") do |file|
file.seek range.begin
file.read range.size
end
rescue Errno::ENOENT
raise ActiveStorage::FileNotFoundError
end
end
def delete(key)
2017-12-02 22:43:28 -05:00
instrument :delete, key: key do
File.delete path_for(key)
rescue Errno::ENOENT
# Ignore files already deleted
2017-07-09 11:04:28 -04:00
end
end
2017-07-01 06:05:58 -04:00
2017-12-02 22:43:28 -05:00
def delete_prefixed(prefix)
instrument :delete_prefixed, prefix: prefix do
Dir.glob(path_for("#{prefix}*")).each do |path|
FileUtils.rm_rf(path)
end
end
end
def exist?(key)
2017-12-02 22:43:28 -05:00
instrument :exist, key: key do |payload|
answer = File.exist? path_for(key)
payload[:exist] = answer
answer
end
2017-07-09 11:04:28 -04:00
end
def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
2017-12-02 22:43:28 -05:00
instrument :url, key: key do |payload|
verified_token_with_expiration = ActiveStorage.verifier.generate(
{
key: key,
content_type: content_type,
content_length: content_length,
checksum: checksum,
service_name: name
},
2019-09-05 18:44:15 -04:00
expires_in: expires_in,
purpose: :blob_token
)
generated_url = url_helpers.update_rails_disk_service_url(verified_token_with_expiration, host: current_host)
payload[:url] = generated_url
generated_url
end
end
def headers_for_direct_upload(key, content_type:, **)
{ "Content-Type" => content_type }
end
def path_for(key) #:nodoc:
File.join root, folder_for(key), key
end
private
def private_url(key, expires_in:, filename:, content_type:, disposition:, **)
generate_url(key, expires_in: expires_in, filename: filename, content_type: content_type, disposition: disposition)
end
def public_url(key, filename:, content_type: nil, disposition: :attachment, **)
generate_url(key, expires_in: nil, filename: filename, content_type: content_type, disposition: disposition)
end
def generate_url(key, expires_in:, filename:, content_type:, disposition:)
content_disposition = content_disposition_with(type: disposition, filename: filename)
verified_key_with_expiration = ActiveStorage.verifier.generate(
{
key: key,
disposition: content_disposition,
content_type: content_type,
service_name: name
},
expires_in: expires_in,
purpose: :blob_key
)
if current_host.blank?
raise ArgumentError, "Cannot generate URL for #{filename} using Disk service, please set ActiveStorage::Current.host."
end
current_uri = URI.parse(current_host)
url_helpers.rails_disk_service_url(verified_key_with_expiration,
protocol: current_uri.scheme,
host: current_uri.host,
port: current_uri.port,
filename: filename
)
end
def stream(key)
File.open(path_for(key), "rb") do |file|
while data = file.read(5.megabytes)
yield data
end
end
rescue Errno::ENOENT
raise ActiveStorage::FileNotFoundError
end
def folder_for(key)
[ key[0..1], key[2..3] ].join("/")
end
2017-07-06 10:01:11 -04:00
def make_path_for(key)
path_for(key).tap { |path| FileUtils.mkdir_p File.dirname(path) }
2017-07-06 10:01:11 -04:00
end
def ensure_integrity_of(key, checksum)
unless OpenSSL::Digest::MD5.file(path_for(key)).base64digest == checksum
delete key
raise ActiveStorage::IntegrityError
end
end
def url_helpers
@url_helpers ||= Rails.application.routes.url_helpers
end
def current_host
ActiveStorage::Current.host
end
end
end