1
0
Fork 0
mirror of https://github.com/rails/rails.git synced 2022-11-09 12:12:34 -05:00

Set a public ACL for files uploaded to a public GCS service

This commit is contained in:
Brendan Abbott 2020-04-29 05:44:17 +10:00 committed by GitHub
parent cf7c27f2ff
commit 43503bdfec
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 116 additions and 4 deletions

View file

@ -1,3 +1,18 @@
* Add support for `upload` options with `GCSService`.
For example, to add `Cache-Control` headers to uploaded files, modify
`config/storage.yml` with the `upload` key and corresponding Hash:
```yml
google:
service: GCS
...
upload:
cache_control: "public, max-age=60"
```
*Brendan Abbott*
* Add `config.active_storage.web_image_content_types` to allow applications * Add `config.active_storage.web_image_content_types` to allow applications
to add content types (like `image/webp`) in which variants can be processed, to add content types (like `image/webp`) in which variants can be processed,
instead of letting those images be converted to the fallback PNG format. instead of letting those images be converted to the fallback PNG format.

View file

@ -7,9 +7,13 @@ module ActiveStorage
# Wraps the Google Cloud Storage as an Active Storage service. See ActiveStorage::Service for the generic API # Wraps the Google Cloud Storage as an Active Storage service. See ActiveStorage::Service for the generic API
# documentation that applies to all services. # documentation that applies to all services.
class Service::GCSService < Service class Service::GCSService < Service
def initialize(public: false, **config) attr_reader :upload_options
def initialize(public: false, upload: {}, **config)
@config = config @config = config
@public = public @public = public
@upload_options = upload
@upload_options[:acl] = "public_read" if public?
end end
def upload(key, io, checksum: nil, content_type: nil, disposition: nil, filename: nil) def upload(key, io, checksum: nil, content_type: nil, disposition: nil, filename: nil)
@ -19,7 +23,7 @@ module ActiveStorage
# binary and attachment when the file's content type requires it. The only way to force them is to # binary and attachment when the file's content type requires it. The only way to force them is to
# store them as object's metadata. # store them as object's metadata.
content_disposition = content_disposition_with(type: disposition, filename: filename) if disposition && filename content_disposition = content_disposition_with(type: disposition, filename: filename) if disposition && filename
bucket.create_file(io, key, md5: checksum, content_type: content_type, content_disposition: content_disposition) bucket.create_file(io, key, md5: checksum, content_type: content_type, content_disposition: content_disposition, **upload_options)
rescue Google::Cloud::InvalidArgumentError rescue Google::Cloud::InvalidArgumentError
raise ActiveStorage::IntegrityError raise ActiveStorage::IntegrityError
end end
@ -84,7 +88,7 @@ module ActiveStorage
def url_for_direct_upload(key, expires_in:, checksum:, **) def url_for_direct_upload(key, expires_in:, checksum:, **)
instrument :url, key: key do |payload| instrument :url, key: key do |payload|
generated_url = bucket.signed_url key, method: "PUT", expires: expires_in, content_md5: checksum generated_url = bucket.signed_url key, method: "PUT", expires: expires_in, content_md5: checksum, **upload_options
payload[:url] = generated_url payload[:url] = generated_url

View file

@ -9,6 +9,34 @@ if SERVICE_CONFIGURATIONS[:gcs_public]
include ActiveStorage::Service::SharedServiceTests include ActiveStorage::Service::SharedServiceTests
test "public acl options" do
assert_equal "public_read", @service.upload_options[:acl]
end
test "uploaded file is accessible by all users" do
assert_includes @service.bucket.find_file(@key).acl.readers, "allUsers"
end
test "direct upload file is accessible by all users" do
key = SecureRandom.base58(24)
data = "Something else entirely!"
checksum = Digest::MD5.base64digest(data)
url = @service.url_for_direct_upload(key, expires_in: 5.minutes, content_type: "text/plain", content_length: data.size, checksum: checksum)
uri = URI.parse url
request = Net::HTTP::Put.new uri.request_uri
request.body = data
request.add_field "Content-Type", ""
request.add_field "Content-MD5", checksum
Net::HTTP.start(uri.host, uri.port, use_ssl: true) do |http|
http.request request
end
assert_includes @service.bucket.find_file(key).acl.readers, "allUsers"
ensure
@service.delete key
end
test "public URL generation" do test "public URL generation" do
url = @service.url(@key, filename: ActiveStorage::Filename.new("avatar.png")) url = @service.url(@key, filename: ActiveStorage::Filename.new("avatar.png"))

View file

@ -57,6 +57,33 @@ if SERVICE_CONFIGURATIONS[:gcs]
@service.delete key @service.delete key
end end
test "direct upload with custom upload options" do
cache_control = "public, max-age=60"
service = build_service(upload: { cache_control: cache_control })
key = SecureRandom.base58(24)
data = "Something else entirely!"
checksum = Digest::MD5.base64digest(data)
url = service.url_for_direct_upload(key, expires_in: 5.minutes, content_type: "text/plain", content_length: data.size, checksum: checksum)
uri = URI.parse url
request = Net::HTTP::Put.new uri.request_uri
request.body = data
service.headers_for_direct_upload(key, checksum: checksum, filename: ActiveStorage::Filename.new("test.txt"), disposition: :attachment).each do |k, v|
request.add_field k, v
end
request.add_field "Content-Type", ""
Net::HTTP.start(uri.host, uri.port, use_ssl: true) do |http|
http.request request
end
url = service.url(key, expires_in: 2.minutes, disposition: :inline, content_type: "text/html", filename: ActiveStorage::Filename.new("test.html"))
response = Net::HTTP.get_response(URI(url))
assert_equal(cache_control, response["Cache-Control"])
ensure
service.delete key
end
test "upload with content_type and content_disposition" do test "upload with content_type and content_disposition" do
key = SecureRandom.base58(24) key = SecureRandom.base58(24)
data = "Something else entirely!" data = "Something else entirely!"
@ -85,6 +112,21 @@ if SERVICE_CONFIGURATIONS[:gcs]
@service.delete key @service.delete key
end end
test "upload with custom upload options" do
key = SecureRandom.base58(24)
data = "Something else entirely!"
cache_control = "public, max-age=60"
service = build_service(upload: { cache_control: cache_control })
begin
service.upload(key, StringIO.new(data), checksum: Digest::MD5.base64digest(data), disposition: :attachment, filename: ActiveStorage::Filename.new("test.txt"), content_type: "text/plain")
assert_equal cache_control, service.bucket.find_file(key).cache_control
ensure
service.delete key
end
end
test "update metadata" do test "update metadata" do
key = SecureRandom.base58(24) key = SecureRandom.base58(24)
data = "Something else entirely!" data = "Something else entirely!"
@ -104,6 +146,11 @@ if SERVICE_CONFIGURATIONS[:gcs]
assert_match(/storage\.googleapis\.com\/.*response-content-disposition=inline.*test\.txt.*response-content-type=text%2Fplain/, assert_match(/storage\.googleapis\.com\/.*response-content-disposition=inline.*test\.txt.*response-content-type=text%2Fplain/,
@service.url(@key, expires_in: 2.minutes, disposition: :inline, filename: ActiveStorage::Filename.new("test.txt"), content_type: "text/plain")) @service.url(@key, expires_in: 2.minutes, disposition: :inline, filename: ActiveStorage::Filename.new("test.txt"), content_type: "text/plain"))
end end
private
def build_service(configuration)
ActiveStorage::Service.configure(:gcs, SERVICE_CONFIGURATIONS.deep_merge(gcs: configuration))
end
end end
else else
puts "Skipping GCS Service tests because no GCS configuration was supplied" puts "Skipping GCS Service tests because no GCS configuration was supplied"

View file

@ -118,6 +118,7 @@ amazon:
secret_access_key: "" secret_access_key: ""
region: "" region: ""
bucket: "" bucket: ""
public: false
``` ```
Optionally provide a Hash of upload options: Optionally provide a Hash of upload options:
@ -129,7 +130,7 @@ amazon:
secret_access_key: "" secret_access_key: ""
region: "" region: ""
bucket: "" bucket: ""
upload: upload:
server_side_encryption: "" # 'aws:kms' or 'AES256' server_side_encryption: "" # 'aws:kms' or 'AES256'
``` ```
@ -176,6 +177,7 @@ google:
credentials: <%= Rails.root.join("path/to/keyfile.json") %> credentials: <%= Rails.root.join("path/to/keyfile.json") %>
project: "" project: ""
bucket: "" bucket: ""
public: false
``` ```
Optionally provide a Hash of credentials instead of a keyfile path: Optionally provide a Hash of credentials instead of a keyfile path:
@ -198,6 +200,22 @@ google:
bucket: "" bucket: ""
``` ```
Optionally provide a Hash of upload options:
```yaml
google:
service: GCS
credentials: <%= Rails.root.join("path/to/keyfile.json") %>
project: ""
bucket: ""
upload:
acl: "" # will be set to `public_read` on public buckets
cache_control: ""
storage_class: ""
```
The [Google Cloud Storage SDK docs](https://googleapis.dev/ruby/google-cloud-storage/latest/Google/Cloud/Storage/Bucket.html#create_file-instance_method) detail other possible upload options.
Add the [`google-cloud-storage`](https://github.com/GoogleCloudPlatform/google-cloud-ruby/tree/master/google-cloud-storage) gem to your `Gemfile`: Add the [`google-cloud-storage`](https://github.com/GoogleCloudPlatform/google-cloud-ruby/tree/master/google-cloud-storage) gem to your `Gemfile`:
```ruby ```ruby