mirror of
https://github.com/fog/fog.git
synced 2022-11-09 13:51:43 -05:00
fix internet archive use of headers, remove acls
This commit is contained in:
parent
8b65626047
commit
cb8f86ca6c
10 changed files with 112 additions and 108 deletions
|
@ -6,7 +6,9 @@ module Fog
|
|||
|
||||
COMPLIANT_BUCKET_NAMES = /^(?:[a-z]|\d(?!\d{0,2}(?:\.\d{1,3}){3}$))(?:[a-z0-9]|\-(?![\.])){1,61}[a-z0-9]$/
|
||||
|
||||
DOMAIN_NAME = 'us.archive.org'
|
||||
DOMAIN_NAME = 'archive.org'
|
||||
|
||||
API_DOMAIN_NAME = 's3.us.' + DOMAIN_NAME
|
||||
|
||||
extend Fog::Provider
|
||||
|
||||
|
|
|
@ -1,28 +1,34 @@
|
|||
require 'fog/core/model'
|
||||
require 'fog/internet_archive/models/storage/files'
|
||||
require 'fog/internet_archive/models/storage/ia_attributes.rb'
|
||||
|
||||
module Fog
|
||||
module Storage
|
||||
class InternetArchive
|
||||
|
||||
class Directory < Fog::Model
|
||||
VALID_ACLS = ['private', 'public-read', 'public-read-write', 'authenticated-read']
|
||||
|
||||
# See http://docs.amazonwebservices.com/AmazonS3/latest/API/RESTBucketPUT.html
|
||||
INVALID_LOCATIONS = ['us-east-1']
|
||||
|
||||
attr_reader :acl
|
||||
extend Fog::Storage::IAAttributes::ClassMethods
|
||||
include Fog::Storage::IAAttributes::InstanceMethods
|
||||
|
||||
identity :key, :aliases => ['Name', 'name']
|
||||
|
||||
attribute :creation_date, :aliases => 'CreationDate'
|
||||
|
||||
# treat these differently
|
||||
attribute :collections
|
||||
attribute :subjects
|
||||
|
||||
ia_metadata_attribute :ignore_preexisting_bucket
|
||||
ia_metadata_attribute :interactive_priority
|
||||
|
||||
# acl for internet archive is always public-read
|
||||
def acl
|
||||
'public-read'
|
||||
end
|
||||
|
||||
def acl=(new_acl)
|
||||
unless VALID_ACLS.include?(new_acl)
|
||||
raise ArgumentError.new("acl must be one of [#{VALID_ACLS.join(', ')}]")
|
||||
else
|
||||
@acl = new_acl
|
||||
end
|
||||
'public-read'
|
||||
end
|
||||
|
||||
# See http://archive.org/help/abouts3.txt
|
||||
|
@ -61,21 +67,12 @@ module Fog
|
|||
end
|
||||
|
||||
def public=(new_public)
|
||||
self.acl = new_public ? 'public-read' : 'private'
|
||||
new_public
|
||||
'public-read'
|
||||
end
|
||||
|
||||
def public_url
|
||||
requires :key
|
||||
if service.get_bucket_acl(key).body['AccessControlList'].detect {|grant| grant['Grantee']['URI'] == 'http://acs.amazonaws.com/groups/global/AllUsers' && grant['Permission'] == 'READ'}
|
||||
if key.to_s =~ Fog::InternetArchive::COMPLIANT_BUCKET_NAMES
|
||||
"http://#{key}.s3.#{Fog::InternetArchive::DOMAIN_NAME}"
|
||||
else
|
||||
"http://s3.#{Fog::InternetArchive::DOMAIN_NAME}/#{key}"
|
||||
end
|
||||
else
|
||||
nil
|
||||
end
|
||||
"http://#{Fog::InternetArchive::DOMAIN_NAME}/details/#{key}"
|
||||
end
|
||||
|
||||
def save
|
||||
|
@ -83,7 +80,11 @@ module Fog
|
|||
|
||||
options = {}
|
||||
|
||||
options['x-amz-acl'] = acl if acl
|
||||
options['x-archive-ignore-preexisting-bucket'] = ignore_preexisting_bucket if ignore_preexisting_bucket
|
||||
options['x-archive-interactive-priority'] = interactive_priority if interactive_priority
|
||||
|
||||
set_metadata_array_headers(:collections, options)
|
||||
set_metadata_array_headers(:subjects, options)
|
||||
|
||||
if location = attributes[:location] || (self.service.region != 'us-east-1' && self.service.region)
|
||||
options['LocationConstraint'] = location
|
||||
|
|
|
@ -1,10 +1,15 @@
|
|||
require 'fog/core/model'
|
||||
require 'fog/internet_archive/models/storage/ia_attributes.rb'
|
||||
|
||||
module Fog
|
||||
module Storage
|
||||
class InternetArchive
|
||||
|
||||
class File < Fog::Model
|
||||
|
||||
extend Fog::Storage::IAAttributes::ClassMethods
|
||||
include Fog::Storage::IAAttributes::InstanceMethods
|
||||
|
||||
# @see AWS Object docs http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectOps.html
|
||||
|
||||
# @note Chunk size to use for multipart uploads.
|
||||
|
@ -32,29 +37,6 @@ module Fog
|
|||
attribute :collections
|
||||
attribute :subjects
|
||||
|
||||
# set_metadata_array_headers(:collections, options)
|
||||
def set_metadata_array_headers(array_attribute, options={})
|
||||
attr_values = Array(self.send(array_attribute))
|
||||
opt_values = options.collect do |key,value|
|
||||
options.delete(key) if (key.to_s =~ /^x-(amz||archive)-meta(\d*)-#{array_attribute.to_s[0..-2]}/)
|
||||
end
|
||||
values = (attr_values + opt_values).compact.sort.uniq
|
||||
# got the values, now add them back to the options
|
||||
if values.size == 1
|
||||
options["x-archive-meta-#{array_attribute.to_s[0..-2]}"] = values.first
|
||||
elsif values.size > 1
|
||||
values[0,99].each_with_index do |value, i|
|
||||
options["x-archive-meta#{format("%02d", i+1)}-#{array_attribute.to_s[0..-2]}"] = value
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
# IA specific headers, alias to x-amz-[name] and x-archive-[name]
|
||||
def self.ia_metadata_attribute(name)
|
||||
attribute(name, :aliases=>['amz','archive'].collect{|p|"x-#{p}-#{name.to_s.tr('_','-')}"})
|
||||
end
|
||||
|
||||
ia_metadata_attribute :auto_make_bucket
|
||||
ia_metadata_attribute :cascade_delete
|
||||
ia_metadata_attribute :ignore_preexisting_bucket
|
||||
|
@ -63,27 +45,14 @@ module Fog
|
|||
ia_metadata_attribute :queue_derive
|
||||
ia_metadata_attribute :size_hint
|
||||
|
||||
# you can add other x-archive-metadata-* values, but these are standard
|
||||
IA_STANDARD_METADATA_FIELDS = %q[hidden, title, collection, creator, mediatype, description, date, subject, licenseurl , pick, noindex, notes, rights, contributor, language, coverage, credits]
|
||||
|
||||
# for x-archive-metadata-mediatype, these are the valid values
|
||||
IA_VALID_MEDIA_TYPES = %q[audio, data, etree, image, movies, software, texts, web]
|
||||
|
||||
# Set file's access control list (ACL).
|
||||
#
|
||||
# valid acls: private, public-read, public-read-write, authenticated-read
|
||||
#
|
||||
# @param [String] new_acl one of valid options
|
||||
# @return [String] @acl
|
||||
#
|
||||
def acl=(new_acl)
|
||||
valid_acls = ['private', 'public-read', 'public-read-write', 'authenticated-read']
|
||||
unless valid_acls.include?(new_acl)
|
||||
raise ArgumentError.new("acl must be one of [#{valid_acls.join(', ')}]")
|
||||
end
|
||||
@acl = new_acl
|
||||
# acl for internet archive is always public-read
|
||||
def acl
|
||||
'public-read'
|
||||
end
|
||||
|
||||
def acl=(new_acl)
|
||||
'public-read'
|
||||
end
|
||||
|
||||
# Get file's body if exists, else ' '.
|
||||
#
|
||||
|
@ -143,11 +112,11 @@ module Fog
|
|||
#
|
||||
def destroy(options = {})
|
||||
requires :directory, :key
|
||||
options['x-archive-cascade-delete'] = cascade_delete if cascade_delete
|
||||
service.delete_object(directory.key, key, options)
|
||||
true
|
||||
end
|
||||
|
||||
|
||||
remove_method :metadata
|
||||
def metadata
|
||||
attributes.reject {|key, value| !(key.to_s =~ /^x-(amz||archive)-meta/)}
|
||||
|
@ -178,18 +147,11 @@ module Fog
|
|||
# @return [String] new_puplic
|
||||
#
|
||||
def public=(new_public)
|
||||
if new_public
|
||||
@acl = 'public-read'
|
||||
else
|
||||
@acl = 'private'
|
||||
end
|
||||
new_public
|
||||
'public-read'
|
||||
end
|
||||
|
||||
|
||||
# Get pubically acessible url via http GET.
|
||||
# Checks persmissions before creating.
|
||||
# Defaults to s3 subdomain or compliant bucket name
|
||||
# Get publicly acessible url via http GET.
|
||||
#
|
||||
# required attributes: directory, key
|
||||
#
|
||||
|
@ -197,15 +159,7 @@ module Fog
|
|||
#
|
||||
def public_url
|
||||
requires :directory, :key
|
||||
if service.get_object_acl(directory.key, key).body['AccessControlList'].detect {|grant| grant['Grantee']['URI'] == 'http://acs.amazonaws.com/groups/global/AllUsers' && grant['Permission'] == 'READ'}
|
||||
if directory.key.to_s =~ Fog::InternetArchive::COMPLIANT_BUCKET_NAMES
|
||||
"http://#{directory.key}.s3.#{Fog::InternetArchive::DOMAIN_NAME}/#{Fog::InternetArchive.escape(key)}".gsub('%2F','/')
|
||||
else
|
||||
"http://s3.#{Fog::InternetArchive::DOMAIN_NAME}/#{directory.key}/#{Fog::InternetArchive.escape(key)}".gsub('%2F','/')
|
||||
end
|
||||
else
|
||||
nil
|
||||
end
|
||||
"http://#{Fog::InternetArchive::DOMAIN_NAME}/download/#{directory.key}/#{Fog::InternetArchive.escape(key)}".gsub('%2F','/')
|
||||
end
|
||||
|
||||
# Save file with body as contents to directory.key with name key via http PUT
|
||||
|
@ -213,7 +167,6 @@ module Fog
|
|||
# required attributes: body, directory, key
|
||||
#
|
||||
# @param [Hash] options
|
||||
# @option options [String] acl sets x-amz-acl HTTP header. Valid values include, private | public-read | public-read-write | authenticated-read | bucket-owner-read | bucket-owner-full-control
|
||||
# @option options [String] cache_controle sets Cache-Control header. For example, 'No-cache'
|
||||
# @option options [String] content_disposition sets Content-Disposition HTTP header. For exampple, 'attachment; filename=testing.txt'
|
||||
# @option options [String] content_encoding sets Content-Encoding HTTP header. For example, 'x-gzip'
|
||||
|
@ -227,9 +180,8 @@ module Fog
|
|||
def save(options = {})
|
||||
requires :body, :directory, :key
|
||||
if options != {}
|
||||
Fog::Logger.deprecation("options param is deprecated, use acl= instead [light_black](#{caller.first})[/]")
|
||||
Fog::Logger.deprecation("options param is deprecated [light_black](#{caller.first})[/]")
|
||||
end
|
||||
options['x-amz-acl'] ||= @acl if @acl
|
||||
options['Cache-Control'] = cache_control if cache_control
|
||||
options['Content-Disposition'] = content_disposition if content_disposition
|
||||
options['Content-Encoding'] = content_encoding if content_encoding
|
||||
|
@ -237,12 +189,8 @@ module Fog
|
|||
options['Content-Type'] = content_type if content_type
|
||||
options['Expires'] = expires if expires
|
||||
options.merge!(metadata)
|
||||
# options['x-amz-storage-class'] = storage_class if storage_class
|
||||
# options['x-amz-server-side-encryption'] = encryption if encryption
|
||||
|
||||
options['x-archive-auto-make-bucket'] = auto_make_bucket if auto_make_bucket
|
||||
options['x-archive-cascade-delete'] = cascade_delete if cascade_delete
|
||||
options['x-archive-ignore-preexisting-bucket'] = ignore_preexisting_bucket if ignore_preexisting_bucket
|
||||
options['x-archive-interactive-priority'] = interactive_priority if interactive_priority
|
||||
options['x-archive-keep-old-version'] = keep_old_version if keep_old_version
|
||||
options['x-archive-queue-derive'] = queue_derive if queue_derive
|
||||
|
|
39
lib/fog/internet_archive/models/storage/ia_attributes.rb
Normal file
39
lib/fog/internet_archive/models/storage/ia_attributes.rb
Normal file
|
@ -0,0 +1,39 @@
|
|||
module Fog
|
||||
module Storage
|
||||
module IAAttributes
|
||||
|
||||
# you can add other x-archive-metadata-* values, but these are standard
|
||||
IA_STANDARD_METADATA_FIELDS = %q[hidden, title, collection, creator, mediatype, description, date, subject, licenseurl , pick, noindex, notes, rights, contributor, language, coverage, credits]
|
||||
|
||||
# for x-archive-metadata-mediatype, these are the valid values
|
||||
IA_VALID_MEDIA_TYPES = %q[audio, data, etree, image, movies, software, texts, web]
|
||||
|
||||
module ClassMethods
|
||||
def ia_metadata_attribute(name)
|
||||
attribute(name, :aliases=>['amz','archive'].collect{|p|"x-#{p}-#{name.to_s.tr('_','-')}"})
|
||||
end
|
||||
end
|
||||
|
||||
module InstanceMethods
|
||||
# set_metadata_array_headers(:collections, options)
|
||||
def set_metadata_array_headers(array_attribute, options={})
|
||||
attr_values = Array(self.send(array_attribute))
|
||||
opt_values = options.collect do |key,value|
|
||||
options.delete(key) if (key.to_s =~ /^x-(amz||archive)-meta(\d*)-#{array_attribute.to_s[0..-2]}/)
|
||||
end
|
||||
values = (attr_values + opt_values).compact.sort.uniq
|
||||
# got the values, now add them back to the options
|
||||
if values.size == 1
|
||||
options["x-archive-meta-#{array_attribute.to_s[0..-2]}"] = values.first
|
||||
elsif values.size > 1
|
||||
values[0,99].each_with_index do |value, i|
|
||||
options["x-archive-meta#{format("%02d", i+1)}-#{array_attribute.to_s[0..-2]}"] = value
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
end
|
|
@ -4,7 +4,7 @@ module Fog
|
|||
class Real
|
||||
|
||||
# Get a hash of hidden fields for form uploading to S3, in the form {:field_name => :field_value}
|
||||
# Form should look like: <form action="http://#{bucket_name}.s3.#{Fog::InternetArchive::DOMAIN_NAME}/" method="post" enctype="multipart/form-data">
|
||||
# Form should look like: <form action="http://#{bucket_name}.#{Fog::InternetArchive::API_DOMAIN_NAME}/" method="post" enctype="multipart/form-data">
|
||||
# These hidden fields should then appear, followed by a field named 'file' which is either a textarea or file input.
|
||||
#
|
||||
# @param options Hash:
|
||||
|
|
|
@ -79,6 +79,7 @@ module Fog
|
|||
http_url(params, expires)
|
||||
end
|
||||
|
||||
|
||||
private
|
||||
|
||||
def scheme_host_path_query(params, expires)
|
||||
|
@ -198,12 +199,7 @@ module Fog
|
|||
require 'mime/types'
|
||||
setup_credentials(options)
|
||||
options[:region] ||= 'us-east-1'
|
||||
@host = options[:host] || case options[:region]
|
||||
when 'us-east-1'
|
||||
"s3.#{Fog::InternetArchive::DOMAIN_NAME}"
|
||||
else
|
||||
"s3-#{options[:region]}.#{Fog::InternetArchive::DOMAIN_NAME}"
|
||||
end
|
||||
@host = options[:host] || Fog::InternetArchive::API_DOMAIN_NAME
|
||||
@scheme = options[:scheme] || 'http'
|
||||
@region = options[:region]
|
||||
end
|
||||
|
@ -270,12 +266,7 @@ module Fog
|
|||
else
|
||||
options[:region] ||= 'us-east-1'
|
||||
@region = options[:region]
|
||||
@host = options[:host] || case options[:region]
|
||||
when 'us-east-1'
|
||||
"s3.#{Fog::InternetArchive::DOMAIN_NAME}"
|
||||
else
|
||||
"s3-#{options[:region]}.#{Fog::InternetArchive::DOMAIN_NAME}"
|
||||
end
|
||||
@host = options[:host] || Fog::InternetArchive::API_DOMAIN_NAME
|
||||
@path = options[:path] || '/'
|
||||
@persistent = options.fetch(:persistent, false)
|
||||
@port = options[:port] || 80
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
Shindo.tests("Storage[:internet_archive] | directory", ["internet_archive"]) do
|
||||
|
||||
directory_attributes = {
|
||||
:key => "fogdirectorytests-#{rand(65536)}"
|
||||
:key => "fogdirectorytests-#{rand(65536)}",
|
||||
:collections => ['test_collection']
|
||||
}
|
||||
|
||||
tests('success') do
|
||||
|
@ -16,6 +17,27 @@ Shindo.tests("Storage[:internet_archive] | directory", ["internet_archive"]) do
|
|||
@instance.save
|
||||
end
|
||||
|
||||
tests("#public_url").returns("http://archive.org/details/#{directory_attributes[:key]}") do
|
||||
@instance.public_url
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
tests("#set_metadata_array_headers") do
|
||||
params = directory_attributes
|
||||
|
||||
collection = Fog::Storage[:internetarchive].directories
|
||||
@instance = collection.new(params)
|
||||
|
||||
@instance.collections = ['test_collection', 'opensource']
|
||||
@options = {}
|
||||
@instance.set_metadata_array_headers(:collections, @options)
|
||||
|
||||
tests("#set_metadata_array_headers should set options").returns(true) do
|
||||
@options['x-archive-meta01-collection'] == 'opensource' &&
|
||||
@options['x-archive-meta02-collection'] == 'test_collection'
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
end
|
||||
|
|
|
@ -9,12 +9,12 @@ Shindo.tests("Storage[:internet_archive] | file", ["internet_archive"]) do
|
|||
:body => lorem_file,
|
||||
:public => true,
|
||||
:auto_make_bucket => 1,
|
||||
:collections => ['test_collection']
|
||||
}
|
||||
|
||||
directory_attributes = {
|
||||
# Add a random suffix to prevent collision
|
||||
:key => "fogfilestests-#{rand(65536)}"
|
||||
:key => "fogfilestests-#{rand(65536)}",
|
||||
:collections => ['test_collection']
|
||||
}
|
||||
|
||||
@directory = Fog::Storage[:internetarchive].directories.create(directory_attributes)
|
||||
|
|
|
@ -51,6 +51,7 @@ Shindo.tests('Fog::Storage[:internetarchive] | bucket requests', ["internet_arch
|
|||
Fog::Storage[:internetarchive].get_service.body
|
||||
end
|
||||
|
||||
dirs = Fog::Storage[:internetarchive].directories.get(@ia_bucket_name)
|
||||
file = Fog::Storage[:internetarchive].directories.get(@ia_bucket_name).files.create(:body => 'y', :key => 'x')
|
||||
|
||||
tests("#get_bucket('#{@ia_bucket_name}')").formats(@bucket_format) do
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
Shindo.tests('Fog::Storage[:internetarchive] | multipart upload requests', ["internet_archive"]) do
|
||||
|
||||
@directory = Fog::Storage[:internetarchive].directories.create(:key => 'fogmultipartuploadtests')
|
||||
@directory = Fog::Storage[:internetarchive].directories.create(:key => "fogmultipartuploadtests-#{rand(65536)}")
|
||||
|
||||
tests('success') do
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue