Support default `cache_control` in GCS Active Storage

This commit is contained in:
Marc-Antoine Leblond 2021-06-16 08:38:13 -04:00
parent 25292a3ef4
commit e3fe39285f
No known key found for this signature in database
GPG Key ID: D0F797795E11CB0D
4 changed files with 87 additions and 4 deletions

View File

@ -1,3 +1,12 @@
* Allow setting a `Cache-Control` on files uploaded to GCS.
```yaml
gcs:
service: GCS
...
cache_control: "public, max-age=3600"
```
*maleblond*
* The parameters sent to `ffmpeg` for generating a video preview image are now
configurable under `config.active_storage.video_preview_arguments`.

View File

@ -19,7 +19,7 @@ module ActiveStorage
# binary and attachment when the file's content type requires it. The only way to force them is to
# store them as object's metadata.
content_disposition = content_disposition_with(type: disposition, filename: filename) if disposition && filename
bucket.create_file(io, key, md5: checksum, content_type: content_type, content_disposition: content_disposition)
bucket.create_file(io, key, md5: checksum, cache_control: @config[:cache_control], content_type: content_type, content_disposition: content_disposition)
rescue Google::Cloud::InvalidArgumentError
raise ActiveStorage::IntegrityError
end
@ -84,8 +84,19 @@ module ActiveStorage
def url_for_direct_upload(key, expires_in:, checksum:, **)
instrument :url, key: key do |payload|
generated_url = bucket.signed_url key, method: "PUT", expires: expires_in, content_md5: checksum
headers = {}
version = :v2
if @config[:cache_control].present?
headers["Cache-Control"] = @config[:cache_control]
# v2 signing doesn't support non `x-goog-` headers. Only switch to v4 signing
# if necessary for back-compat; v4 limits the expiration of the URL to 7 days
# whereas v2 has no limit
version = :v4
end
generated_url = bucket.signed_url(key, method: "PUT", expires: expires_in, content_md5: checksum,
headers: headers, version: version)
payload[:url] = generated_url
generated_url
@ -95,7 +106,13 @@ module ActiveStorage
def headers_for_direct_upload(key, checksum:, filename: nil, disposition: nil, **)
content_disposition = content_disposition_with(type: disposition, filename: filename) if filename
{ "Content-MD5" => checksum, "Content-Disposition" => content_disposition }
headers = { "Content-MD5" => checksum, "Content-Disposition" => content_disposition }
if @config[:cache_control].present?
headers["Cache-Control"] = @config[:cache_control]
end
headers
end
private
@ -137,7 +154,7 @@ module ActiveStorage
end
def client
@client ||= Google::Cloud::Storage.new(**config.except(:bucket))
@client ||= Google::Cloud::Storage.new(**config.except(:bucket, :cache_control))
end
end
end

View File

@ -57,6 +57,36 @@ if SERVICE_CONFIGURATIONS[:gcs]
@service.delete key
end
test "direct upload with cache control" do
config_with_cache_control = { gcs: service_config[:gcs].merge({ cache_control: "public, max-age=1800" }) }
service = ActiveStorage::Service.configure(:gcs, config_with_cache_control)
key = SecureRandom.base58(24)
data = "Some text"
checksum = Digest::MD5.base64digest(data)
url = service.url_for_direct_upload(key, expires_in: 5.minutes, content_type: "text/plain", content_length: data.size, checksum: checksum)
uri = URI.parse url
request = Net::HTTP::Put.new uri.request_uri
request.body = data
headers = service.headers_for_direct_upload(key, checksum: checksum, filename: ActiveStorage::Filename.new("test.txt"), disposition: :attachment)
assert_equal(headers["Cache-Control"], "public, max-age=1800")
headers.each do |k, v|
request.add_field k, v
end
request.add_field "Content-Type", ""
Net::HTTP.start(uri.host, uri.port, use_ssl: true) do |http|
http.request request
end
url = service.url(key, expires_in: 2.minutes, disposition: :inline, content_type: "text/html", filename: ActiveStorage::Filename.new("test.html"))
response = Net::HTTP.get_response(URI(url))
assert_equal("public, max-age=1800", response["Cache-Control"])
ensure
service.delete(key)
end
test "upload with content_type and content_disposition" do
key = SecureRandom.base58(24)
data = "Something else entirely!"
@ -85,6 +115,23 @@ if SERVICE_CONFIGURATIONS[:gcs]
@service.delete key
end
test "upload with cache_control" do
key = SecureRandom.base58(24)
data = "Something else entirely!"
config_with_cache_control = { gcs: service_config[:gcs].merge({ cache_control: "public, max-age=1800" }) }
service = ActiveStorage::Service.configure(:gcs, config_with_cache_control)
service.upload(key, StringIO.new(data), checksum: Digest::MD5.base64digest(data), content_type: "text/plain")
url = service.url(key, expires_in: 2.minutes, disposition: :inline, content_type: "text/html", filename: ActiveStorage::Filename.new("test.html"))
response = Net::HTTP.get_response(URI(url))
assert_equal "public, max-age=1800", response["Cache-Control"]
ensure
service.delete key
end
test "update metadata" do
key = SecureRandom.base58(24)
data = "Something else entirely!"

View File

@ -235,6 +235,15 @@ google:
bucket: ""
```
Optionally provide a Cache-Control metadata to set on uploaded assets:
```yaml
google:
service: GCS
...
cache_control: "public, max-age=3600"
```
Add the [`google-cloud-storage`](https://github.com/GoogleCloudPlatform/google-cloud-ruby/tree/master/google-cloud-storage) gem to your `Gemfile`:
```ruby
@ -822,6 +831,7 @@ Take care to allow:
* `Content-Disposition` (except for Azure Storage)
* `x-ms-blob-content-disposition` (for Azure Storage only)
* `x-ms-blob-type` (for Azure Storage only)
* `Cache-Control` (for GCS, only if `cache_control` is set)
No CORS configuration is required for the Disk service since it shares your apps origin.