1
0
Fork 0
mirror of https://github.com/fog/fog.git synced 2022-11-09 13:51:43 -05:00

Merge pull request #1743 from pandastream/s3-access-cleanup

[AWS|storage] access cleanup
This commit is contained in:
Wesley Beary 2013-04-17 08:53:43 -07:00
commit 8ca8a059b2
56 changed files with 368 additions and 254 deletions

View file

@ -3,8 +3,6 @@ require 'fog/aws/credential_fetcher'
require 'fog/aws/signaturev4'
module Fog
module AWS
COMPLIANT_BUCKET_NAMES = /^(?:[a-z]|\d(?!\d{0,2}(?:\.\d{1,3}){3}$))(?:[a-z0-9]|\-(?![\.])){1,61}[a-z0-9]$/
extend Fog::Provider
service(:auto_scaling, 'aws/auto_scaling', 'AutoScaling')

View file

@ -22,7 +22,7 @@ module Fog
:prefix => 'prefix'
})
data = service.get_bucket(key, options).body
directory = new(:key => data['Name'])
directory = new(:key => data['Name'], :is_persisted => true)
options = {}
for k, v in data
if ['CommonPrefixes', 'Delimiter', 'IsTruncated', 'Marker', 'MaxKeys', 'Prefix'].include?(k)

View file

@ -9,14 +9,12 @@ module Fog
class Directory < Fog::Model
VALID_ACLS = ['private', 'public-read', 'public-read-write', 'authenticated-read']
# See http://docs.amazonwebservices.com/AmazonS3/latest/API/RESTBucketPUT.html
INVALID_LOCATIONS = ['us-east-1']
attr_reader :acl
identity :key, :aliases => ['Name', 'name']
attribute :creation_date, :aliases => 'CreationDate'
attribute :creation_date, :aliases => 'CreationDate', :type => 'time'
attribute :location, :aliases => 'LocationConstraint', :type => 'string'
def acl=(new_acl)
unless VALID_ACLS.include?(new_acl)
@ -35,16 +33,12 @@ module Fog
end
def location
requires :key
attributes[:location] || bucket_location || self.service.region
@location ||= (bucket_location || self.service.region)
end
# NOTE: you can't change the region once the bucket is created
def location=(new_location)
if INVALID_LOCATIONS.include?(new_location)
raise ArgumentError, "location must not include any of #{INVALID_LOCATIONS.join(', ')}. See http://docs.amazonwebservices.com/AmazonS3/latest/API/RESTBucketPUT.html"
else
@location = new_location
end
@location = new_location
end
def files
@ -86,11 +80,9 @@ module Fog
def public_url
requires :key
if service.get_bucket_acl(key).body['AccessControlList'].detect {|grant| grant['Grantee']['URI'] == 'http://acs.amazonaws.com/groups/global/AllUsers' && grant['Permission'] == 'READ'}
if key.to_s =~ Fog::AWS::COMPLIANT_BUCKET_NAMES
"https://#{key}.s3.amazonaws.com"
else
"https://s3.amazonaws.com/#{key}"
end
service.request_url(
:bucket_name => key
)
else
nil
end
@ -103,18 +95,29 @@ module Fog
options['x-amz-acl'] = acl if acl
if location = attributes[:location] || (self.service.region != 'us-east-1' && self.service.region)
# http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketPUT.html
# Ignore the default region us-east-1
if !persisted? && location != DEFAULT_REGION
options['LocationConstraint'] = location
end
service.put_bucket(key, options)
attributes[:is_persisted] = true
true
end
def persisted?
# is_persisted is true in case of directories.get or after #save
# creation_date is set in case of directories.all
attributes[:is_persisted] || !!attributes[:creation_date]
end
private
def bucket_location
requires :key
return nil unless persisted?
data = service.get_bucket_location(key)
data.body['LocationConstraint']
end

View file

@ -163,11 +163,10 @@ module Fog
def public_url
requires :directory, :key
if service.get_object_acl(directory.key, key).body['AccessControlList'].detect {|grant| grant['Grantee']['URI'] == 'http://acs.amazonaws.com/groups/global/AllUsers' && grant['Permission'] == 'READ'}
if directory.key.to_s =~ Fog::AWS::COMPLIANT_BUCKET_NAMES
"https://#{directory.key}.s3.amazonaws.com/#{Fog::AWS.escape(key)}".gsub('%2F','/')
else
"https://s3.amazonaws.com/#{directory.key}/#{Fog::AWS.escape(key)}".gsub('%2F','/')
end
service.request_url(
:bucket_name => directory.key,
:object_name => key
)
else
nil
end

View file

@ -15,9 +15,9 @@ module Fog
request({
:expects => 204,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:object_name => object_name,
:method => 'DELETE',
:path => CGI.escape(object_name),
:query => {'uploadId' => upload_id}
})
end
@ -25,4 +25,4 @@ module Fog
end # Real
end # Storage
end # AWS
end # Fog
end # Fog

View file

@ -34,10 +34,10 @@ module Fog
:body => data,
:expects => 200,
:headers => { 'Content-Length' => data.length },
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:object_name => object_name,
:method => 'POST',
:parser => Fog::Parsers::Storage::AWS::CompleteMultipartUpload.new,
:path => CGI.escape(object_name),
:query => {'uploadId' => upload_id}
})
end

View file

@ -33,10 +33,10 @@ module Fog
request({
:expects => 200,
:headers => headers,
:host => "#{target_bucket_name}.#{@host}",
:bucket_name => target_bucket_name,
:object_name => target_object_name,
:method => 'PUT',
:parser => Fog::Parsers::Storage::AWS::CopyObject.new,
:path => CGI.escape(target_object_name)
})
end

View file

@ -16,7 +16,7 @@ module Fog
request({
:expects => 204,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:method => 'DELETE'
})
end

View file

@ -16,7 +16,7 @@ module Fog
request({
:expects => 204,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:method => 'DELETE',
:query => {'cors' => nil}
})

View file

@ -16,7 +16,7 @@ module Fog
request({
:expects => 204,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:method => 'DELETE',
:query => {'lifecycle' => nil}
})

View file

@ -16,7 +16,7 @@ module Fog
request({
:expects => 204,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:method => 'DELETE',
:query => {'policy' => nil}
})

View file

@ -16,7 +16,7 @@ module Fog
request({
:expects => 204,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:method => 'DELETE',
:query => {'website' => nil}
})

View file

@ -55,7 +55,7 @@ module Fog
:body => data,
:expects => 200,
:headers => headers,
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:method => 'POST',
:parser => Fog::Parsers::Storage::AWS::DeleteMultipleObjects.new,
:query => {'delete' => nil}

View file

@ -24,7 +24,7 @@ module Fog
request({
:expects => 204,
:headers => headers,
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:idempotent => true,
:method => 'DELETE',
:path => path

View file

@ -44,7 +44,7 @@ module Fog
request({
:expects => 200,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:idempotent => true,
:method => 'GET',
:parser => Fog::Parsers::Storage::AWS::GetBucket.new,

View file

@ -33,7 +33,7 @@ module Fog
request({
:expects => 200,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:idempotent => true,
:method => 'GET',
:parser => Fog::Parsers::Storage::AWS::AccessControlList.new,

View file

@ -29,7 +29,7 @@ module Fog
request({
:expects => 200,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:idempotent => true,
:method => 'GET',
:parser => Fog::Parsers::Storage::AWS::CorsConfiguration.new,

View file

@ -23,7 +23,7 @@ module Fog
request({
:expects => 200,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:idempotent => true,
:method => 'GET',
:parser => Fog::Parsers::Storage::AWS::GetBucketLifecycle.new,

View file

@ -19,7 +19,7 @@ module Fog
request({
:expects => 200,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:idempotent => true,
:method => 'GET',
:parser => Fog::Parsers::Storage::AWS::GetBucketLocation.new,

View file

@ -33,7 +33,7 @@ module Fog
request({
:expects => 200,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:idempotent => true,
:method => 'GET',
:parser => Fog::Parsers::Storage::AWS::GetBucketLogging.new,

View file

@ -55,7 +55,7 @@ module Fog
request({
:expects => 200,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:idempotent => true,
:method => 'GET',
:parser => Fog::Parsers::Storage::AWS::GetBucketObjectVersions.new,

View file

@ -19,7 +19,7 @@ module Fog
response = request({
:expects => 200,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:idempotent => true,
:method => 'GET',
:query => {'policy' => nil}

View file

@ -23,7 +23,7 @@ module Fog
request({
:expects => 200,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:idempotent => true,
:method => 'GET',
:parser => Fog::Parsers::Storage::AWS::GetBucketVersioning.new,

View file

@ -26,7 +26,7 @@ module Fog
request({
:expects => 200,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:idempotent => true,
:method => 'GET',
:parser => Fog::Parsers::Storage::AWS::GetBucketWebsite.new,

View file

@ -55,10 +55,10 @@ module Fog
request(params.merge!({
:expects => [ 200, 206 ],
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:object_name => object_name,
:idempotent => true,
:method => 'GET',
:path => CGI.escape(object_name)
}))
end

View file

@ -43,11 +43,11 @@ module Fog
request({
:expects => 200,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:object_name => object_name,
:idempotent => true,
:method => 'GET',
:parser => Fog::Parsers::Storage::AWS::AccessControlList.new,
:path => CGI.escape(object_name),
:query => query
})
end

View file

@ -5,25 +5,7 @@ module Fog
module GetObjectHttpUrl
def get_object_http_url(bucket_name, object_name, expires, options = {})
unless bucket_name
raise ArgumentError.new('bucket_name is required')
end
unless object_name
raise ArgumentError.new('object_name is required')
end
host, path = if bucket_name =~ /^(?:[a-z]|\d(?!\d{0,2}(?:\.\d{1,3}){3}$))(?:[a-z0-9]|\.(?![\.\-])|\-(?![\.])){1,61}[a-z0-9]$/
["#{bucket_name}.#{@host}", object_name]
else
[@host, "#{bucket_name}/#{object_name}"]
end
http_url({
:headers => {},
:host => host,
:port => @port,
:method => 'GET',
:path => path,
:query => options[:query]
}, expires)
get_object_url(bucket_name, object_name, expires, options.merge(:scheme => 'http'))
end
end

View file

@ -33,10 +33,10 @@ module Fog
request({
:expects => 200,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:object_name => object_name,
:idempotent => true,
:method => 'GET',
:path => CGI.escape(object_name),
:query => {'torrent' => nil}
})
end

View file

@ -11,20 +11,11 @@ module Fog
unless object_name
raise ArgumentError.new('object_name is required')
end
host, path = if bucket_name =~ Fog::AWS::COMPLIANT_BUCKET_NAMES
["#{bucket_name}.#{@host}", object_name]
else
[@host, "#{bucket_name}/#{object_name}"]
end
scheme_host_path_query({
:scheme => options[:scheme],
:headers => {},
:host => host,
:port => @port,
:method => 'GET',
:path => path,
:query => options[:query]
}, expires)
signed_url(options.merge({
:bucket_name => bucket_name,
:object_name => object_name,
:method => 'GET'
}), expires)
end
end

View file

@ -19,7 +19,7 @@ module Fog
request({
:expects => 200,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:idempotent => true,
:method => 'GET',
:parser => Fog::Parsers::Storage::AWS::GetRequestPayment.new,

View file

@ -42,10 +42,10 @@ module Fog
request({
:expects => 200,
:headers => headers,
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:object_name => object_name,
:idempotent => true,
:method => 'HEAD',
:path => CGI.escape(object_name),
:query => query
})
end

View file

@ -30,10 +30,10 @@ module Fog
request({
:expects => 200,
:headers => options,
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:object_name => object_name,
:method => 'POST',
:parser => Fog::Parsers::Storage::AWS::InitiateMultipartUpload.new,
:path => CGI.escape(object_name),
:query => {'uploads' => nil}
})
end

View file

@ -41,7 +41,7 @@ module Fog
request({
:expects => 200,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:idempotent => true,
:method => 'GET',
:parser => Fog::Parsers::Storage::AWS::ListMultipartUploads.new,

View file

@ -40,11 +40,11 @@ module Fog
request({
:expects => 200,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:object_name => object_name,
:idempotent => true,
:method => 'GET',
:parser => Fog::Parsers::Storage::AWS::ListParts.new,
:path => CGI.escape(object_name),
:query => options.merge!({'uploadId' => upload_id})
})
end

View file

@ -31,7 +31,7 @@ DATA
:body => data,
:headers => options,
:idempotent => true,
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:method => 'PUT'
})
end

View file

@ -46,7 +46,7 @@ module Fog
:body => data,
:expects => 200,
:headers => headers,
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:method => 'PUT',
:query => {'acl' => nil}
})

View file

@ -31,7 +31,7 @@ module Fog
:body => data,
:expects => 200,
:headers => headers,
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:method => 'PUT',
:query => {'cors' => nil}
})

View file

@ -66,7 +66,7 @@ module Fog
:expects => 200,
:headers => {'Content-MD5' => Base64.encode64(Digest::MD5.digest(body)).chomp!,
'Content-Type' => 'application/xml'},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:method => 'PUT',
:query => {'lifecycle' => nil}
})

View file

@ -69,7 +69,7 @@ DATA
:body => data,
:expects => 200,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:method => 'PUT',
:query => {'logging' => nil}
})

View file

@ -15,7 +15,7 @@ module Fog
:body => Fog::JSON.encode(policy),
:expects => 204,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:method => 'PUT',
:query => {'policy' => nil}
})

View file

@ -22,7 +22,7 @@ DATA
:body => data,
:expects => 200,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:method => 'PUT',
:query => {'versioning' => nil}
})

View file

@ -35,7 +35,7 @@ DATA
:body => data,
:expects => 200,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:method => 'PUT',
:query => {'website' => nil}
})

View file

@ -33,10 +33,10 @@ module Fog
:body => data[:body],
:expects => 200,
:headers => headers,
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:object_name => object_name,
:idempotent => true,
:method => 'PUT',
:path => CGI.escape(object_name)
})
end

View file

@ -54,9 +54,9 @@ module Fog
:body => data,
:expects => 200,
:headers => headers,
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:object_name => object_name,
:method => 'PUT',
:path => CGI.escape(object_name),
:query => query
})
end

View file

@ -10,14 +10,12 @@ module Fog
unless object_name
raise ArgumentError.new('object_name is required')
end
scheme_host_path_query({
:scheme => options[:scheme],
:headers => headers,
:host => @host,
:port => @port,
signed_url(options.merge({
:bucket_name => bucket_name,
:object_name => object_name,
:method => 'PUT',
:path => "#{bucket_name}/#{object_name}"
}, expires)
:headers => headers,
}), expires)
end
end

View file

@ -21,7 +21,7 @@ DATA
:body => data,
:expects => 200,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:method => 'PUT',
:query => {'requestPayment' => nil}
})

View file

@ -28,9 +28,9 @@ module Fog
:expects => 200,
:idempotent => true,
:headers => headers,
:host => "#{bucket_name}.#{@host}",
:bucket_name => bucket_name,
:object_name => object_name,
:method => 'PUT',
:path => CGI.escape(object_name),
:query => {'uploadId' => upload_id, 'partNumber' => part_number}
})
end

View file

@ -6,6 +6,42 @@ module Fog
class AWS < Fog::Service
extend Fog::AWS::CredentialFetcher::ServiceMethods
COMPLIANT_BUCKET_NAMES = /^(?:[a-z]|\d(?!\d{0,2}(?:\.\d{1,3}){3}$))(?:[a-z0-9]|\.(?![\.\-])|\-(?![\.])){1,61}[a-z0-9]$/
DEFAULT_REGION = 'us-east-1'
DEFAULT_SCHEME = 'https'
DEFAULT_SCHEME_PORT = {
'http' => 80,
'https' => 443
}
VALID_QUERY_KEYS = %w[
acl
cors
delete
lifecycle
location
logging
notification
partNumber
policy
requestPayment
response-cache-control
response-content-disposition
response-content-encoding
response-content-language
response-content-type
response-expires
torrent
uploadId
uploads
versionId
versioning
versions
website
]
requires :aws_access_key_id, :aws_secret_access_key
recognizes :endpoint, :region, :host, :path, :port, :scheme, :persistent, :use_iam_profile, :aws_session_token, :aws_credentials_expire_at, :path_style
@ -79,11 +115,11 @@ module Fog
end
def http_url(params, expires)
scheme_host_path_query(params.merge(:scheme => 'http', :port => 80), expires)
signed_url(params.merge(:scheme => 'http'), expires)
end
def https_url(params, expires)
scheme_host_path_query(params.merge(:scheme => 'https', :port => 443), expires)
signed_url(params.merge(:scheme => 'https'), expires)
end
def url(params, expires)
@ -91,32 +127,124 @@ module Fog
https_url(params, expires)
end
def request_url(params)
params = request_params(params)
params_to_url(params)
end
def signed_url(params, expires)
expires = expires.to_i
signature = signature(params, expires)
params = request_params(params)
params[:query] = (params[:query] || {}).merge({
'AWSAccessKeyId' => @aws_access_key_id,
'Signature' => signature,
'Expires' => expires,
})
params[:query]['x-amz-security-token'] = @aws_session_token if @aws_session_token
params_to_url(params)
end
private
def scheme_host_path_query(params, expires)
params[:scheme] ||= @scheme
if params[:port] == 80 && params[:scheme] == 'http'
params.delete(:port)
def region_to_host(region=nil)
case region.to_s
when DEFAULT_REGION, ''
's3.amazonaws.com'
else
"s3-#{region}.amazonaws.com"
end
if params[:port] == 443 && params[:scheme] == 'https'
params.delete(:port)
end
def object_to_path(object_name=nil)
'/' + Fog::AWS.escape(object_name.to_s).gsub('%2F','/')
end
def bucket_to_path(bucket_name, path=nil)
"/#{Fog::AWS.escape(bucket_name.to_s)}#{path}"
end
# Transforms things like bucket_name, object_name, region
#
# Should yield the same result when called f*f
def request_params(params)
headers = params[:headers] || {}
if params[:scheme]
scheme = params[:scheme]
port = params[:port]
else
scheme = @scheme || DEFAULT_SCHEME
port = @port
end
params[:headers] ||= {}
params[:headers]['Date'] = expires.to_i
params[:path] = Fog::AWS.escape(params[:path]).gsub('%2F', '/')
query = []
params[:headers]['x-amz-security-token'] = @aws_session_token if @aws_session_token
if params[:query]
for key, value in params[:query]
query << "#{key}=#{Fog::AWS.escape(value)}"
if DEFAULT_SCHEME_PORT[scheme] == port
port = nil
end
if params[:region]
region = params[:region]
host = params[:host] || region_to_host(region)
else
region = @region || DEFAULT_REGION
host = params[:host] || @host || region_to_host(region)
end
path = params[:path] || object_to_path(params[:object_name])
path = '/' + path if path[0..0] != '/'
if params[:bucket_name]
bucket_name = params[:bucket_name]
path_style = params.fetch(:path_style, @path_style)
if !path_style && COMPLIANT_BUCKET_NAMES !~ bucket_name
Fog::Logger.warning("fog: the specified s3 bucket name(#{bucket_name}) is not a valid dns name, which will negatively impact performance. For details see: http://docs.amazonwebservices.com/AmazonS3/latest/dev/BucketRestrictions.html")
path_style = true
elsif bucket_name.include?('.')
Fog::Logger.warning("fog: the specified s3 bucket name(#{bucket_name}) might fail with https.")
end
if path_style
path = bucket_to_path bucket_name, path
else
host = [bucket_name, host].join('.')
end
end
query << "AWSAccessKeyId=#{@aws_access_key_id}"
query << "Signature=#{Fog::AWS.escape(signature(params))}"
query << "Expires=#{params[:headers]['Date']}"
query << "x-amz-security-token=#{Fog::AWS.escape(@aws_session_token)}" if @aws_session_token
port_part = params[:port] && ":#{params[:port]}"
"#{params[:scheme]}://#{params[:host]}#{port_part}/#{params[:path]}?#{query.join('&')}"
ret = params.merge({
:scheme => scheme,
:host => host,
:port => port,
:path => path,
:headers => headers,
})
#
ret.delete(:path_style)
ret.delete(:bucket_name)
ret.delete(:object_name)
ret.delete(:region)
ret
end
def params_to_url(params)
query = params[:query] && params[:query].map do |key, value|
if value
[key, Fog::AWS.escape(value.to_s)].join('=')
else
key
end
end.join('&')
URI::Generic.build({
:scheme => params[:scheme],
:host => params[:host],
:port => params[:port],
:path => params[:path],
:query => query,
}).to_s
end
end
@ -210,15 +338,9 @@ module Fog
require 'mime/types'
@use_iam_profile = options[:use_iam_profile]
setup_credentials(options)
options[:region] ||= 'us-east-1'
@host = options[:host] || case options[:region]
when 'us-east-1'
's3.amazonaws.com'
else
"s3-#{options[:region]}.amazonaws.com"
end
@scheme = options[:scheme] || 'https'
@region = options[:region]
@region = options[:region] || DEFAULT_REGION
@host = options[:host] || region_to_host(@region)
@scheme = options[:scheme] || DEFAULT_SCHEME
end
def data
@ -229,7 +351,7 @@ module Fog
self.class.data[@region].delete(@aws_access_key_id)
end
def signature(params)
def signature(params, expires)
"foo"
end
@ -270,6 +392,7 @@ module Fog
@use_iam_profile = options[:use_iam_profile]
setup_credentials(options)
@connection_options = options[:connection_options] || {}
@persistent = options.fetch(:persistent, false)
if @endpoint = options[:endpoint]
endpoint = URI.parse(@endpoint)
@ -279,23 +402,17 @@ module Fog
else
endpoint.path
end
@port = endpoint.port
@scheme = endpoint.scheme
@port = endpoint.port
else
options[:region] ||= 'us-east-1'
@region = options[:region]
@host = options[:host] || case options[:region]
when 'us-east-1'
's3.amazonaws.com'
else
"s3-#{options[:region]}.amazonaws.com"
end
@region = options[:region] || DEFAULT_REGION
@host = options[:host] || region_to_host(@region)
@path = options[:path] || '/'
@persistent = options.fetch(:persistent, false)
@port = options[:port] || 443
@scheme = options[:scheme] || 'https'
@scheme = options[:scheme] || DEFAULT_SCHEME
@port = options[:port] || DEFAULT_SCHEME_PORT[@scheme]
@path_style = options[:path_style] || false
end
@connection = Fog::Connection.new("#{@scheme}://#{@host}:#{@port}#{@path}", @persistent, @connection_options)
end
@ -303,17 +420,19 @@ module Fog
@connection.reset
end
def signature(params)
def signature(params, expires)
headers = params[:headers] || {}
string_to_sign =
<<-DATA
#{params[:method].to_s.upcase}
#{params[:headers]['Content-MD5']}
#{params[:headers]['Content-Type']}
#{params[:headers]['Date']}
#{headers['Content-MD5']}
#{headers['Content-Type']}
#{expires}
DATA
amz_headers, canonical_amz_headers = {}, ''
for key, value in params[:headers]
for key, value in headers
if key[0..5] == 'x-amz-'
amz_headers[key] = value
end
@ -324,57 +443,33 @@ DATA
end
string_to_sign << canonical_amz_headers
subdomain = params[:host].split(".#{@host}").first
valid_dns = !!(subdomain =~ /^(?:[a-z]|\d(?!\d{0,2}(?:\.\d{1,3}){3}$))(?:[a-z0-9]|\.(?![\.\-])|\-(?![\.])){1,61}[a-z0-9]$/)
if !valid_dns || @path_style
Fog::Logger.warning("fog: the specified s3 bucket name(#{subdomain}) is not a valid dns name, which will negatively impact performance. For details see: http://docs.amazonwebservices.com/AmazonS3/latest/dev/BucketRestrictions.html") unless valid_dns
params[:host] = params[:host].split("#{subdomain}.")[-1]
unless subdomain == @host
if params[:path]
params[:path] = "#{subdomain}/#{params[:path]}"
else
params[:path] = subdomain
query_string = ''
if params[:query]
query_args = []
for key in params[:query].keys.sort
if VALID_QUERY_KEYS.include?(key)
value = params[:query][key]
if value
query_args << "#{key}=#{Fog::AWS.escape(value.to_s)}"
else
query_args << key
end
end
end
subdomain = nil
end
canonical_resource = @path.dup
unless subdomain.nil? || subdomain == @host
canonical_resource << "#{Fog::AWS.escape(subdomain).downcase}/"
end
canonical_resource << params[:path].to_s
canonical_resource << '?'
for key in (params[:query] || {}).keys.sort
if %w{
acl
cors
delete
lifecycle
location
logging
notification
partNumber
policy
requestPayment
response-cache-control
response-content-disposition
response-content-encoding
response-content-language
response-content-type
response-expires
torrent
uploadId
uploads
versionId
versioning
versions
website
}.include?(key)
canonical_resource << "#{key}#{"=#{params[:query][key]}" unless params[:query][key].nil?}&"
if query_args.any?
query_string = '?' + query_args.join('&')
end
end
canonical_resource.chop!
canonical_path = (params[:path] || object_to_path(params[:object_name])).to_s
canonical_path = '/' + canonical_path if canonical_path[0..0] != '/'
if params[:bucket_name]
canonical_resource = "/#{params[:bucket_name]}#{canonical_path}"
else
canonical_resource = canonical_path
end
canonical_resource << query_string
string_to_sign << canonical_resource
signed_string = @hmac.sign(string_to_sign)
@ -395,16 +490,23 @@ DATA
def request(params, &block)
refresh_credentials_if_expired
params[:headers]['Date'] = Fog::Time.now.to_date_header
expires = Fog::Time.now.to_date_header
signature = signature(params, expires)
params = request_params(params)
params.delete(:port) unless params[:port]
params[:headers]['Date'] = expires
params[:headers]['x-amz-security-token'] = @aws_session_token if @aws_session_token
params[:headers]['Authorization'] = "AWS #{@aws_access_key_id}:#{signature(params)}"
params[:headers]['Authorization'] = "AWS #{@aws_access_key_id}:#{signature}"
# FIXME: ToHashParser should make this not needed
original_params = params.dup
begin
response = @connection.request(params, &block)
rescue Excon::Errors::TemporaryRedirect => error
uri = URI.parse(error.response.is_a?(Hash) ? error.response[:headers]['Location'] : error.response.headers['Location'])
headers = (error.response.is_a?(Hash) ? error.response[:headers] : error.response.headers)
uri = URI.parse(headers['Location'])
Fog::Logger.warning("fog: followed redirect to #{uri.host}, connecting to the matching region will be more performant")
response = Fog::Connection.new("#{@scheme}://#{uri.host}:#{@port}", false, @connection_options).request(original_params, &block)
end

View file

@ -1,7 +1,43 @@
Shindo.tests("Storage[:aws] | directory", ["aws"]) do
directory_attributes = {
:key => 'fogdirectorytests'
:key => uniq_id('fogdirectorytests')
}
model_tests(Fog::Storage[:aws].directories, directory_attributes, Fog.mocking?) do
tests("#public_url").returns(nil) do
@instance.public_url
end
@instance.acl = 'public-read'
@instance.save
tests("#public_url").returns(true) do
if @instance.public_url =~ %r[\Ahttps://fogdirectorytests-[\da-f]+\.s3\.amazonaws\.com/\z]
true
else
@instance.public_url
end
end
end
directory_attributes = {
:key => uniq_id('different-region'),
:location => 'eu-west-1',
}
model_tests(Fog::Storage[:aws].directories, directory_attributes, Fog.mocking?) do
tests("#location").returns('eu-west-1') do
@instance.location
end
tests("#location").returns('eu-west-1') do
Fog::Storage[:aws].directories.get(@instance.identity).location
end
end
directory_attributes = {
:key => uniq_id('fogdirectorytests')
}
model_tests(Fog::Storage[:aws].directories, directory_attributes, Fog.mocking?) do
@ -13,7 +49,7 @@ Shindo.tests("Storage[:aws] | directory", ["aws"]) do
tests("#versioning=(true) sets versioning to 'Enabled'").returns('Enabled') do
@instance.versioning = true
@instance.connection.get_bucket_versioning(@instance.key).body['VersioningConfiguration']['Status']
@instance.service.get_bucket_versioning(@instance.key).body['VersioningConfiguration']['Status']
end
tests("#versioning=(false)").succeeds do
@ -22,7 +58,7 @@ Shindo.tests("Storage[:aws] | directory", ["aws"]) do
tests("#versioning=(false) sets versioning to 'Suspended'").returns('Suspended') do
@instance.versioning = false
@instance.connection.get_bucket_versioning(@instance.key).body['VersioningConfiguration']['Status']
@instance.service.get_bucket_versioning(@instance.key).body['VersioningConfiguration']['Status']
end
end
@ -36,12 +72,12 @@ Shindo.tests("Storage[:aws] | directory", ["aws"]) do
end
tests("#versioning? true if enabled").returns(true) do
@instance.connection.put_bucket_versioning(@instance.key, 'Enabled')
@instance.service.put_bucket_versioning(@instance.key, 'Enabled')
@instance.versioning?
end
tests("#versioning? false if suspended").returns(false) do
@instance.connection.put_bucket_versioning(@instance.key, 'Suspended')
@instance.service.put_bucket_versioning(@instance.key, 'Suspended')
@instance.versioning?
end
end

View file

@ -9,8 +9,7 @@ Shindo.tests("Storage[:aws] | file", ["aws"]) do
}
directory_attributes = {
# Add a random suffix to prevent collision
:key => "fogfilestests-#{rand(65536)}"
:key => uniq_id("fogfilestests")
}
@directory = Fog::Storage[:aws].directories.create(directory_attributes)

View file

@ -7,7 +7,7 @@ Shindo.tests("Storage[:aws] | files", ["aws"]) do
}
directory_attributes = {
:key => 'fogfilestests'
:key => uniq_id('fogfilestests')
}
@directory = Fog::Storage[:aws].directories.create(directory_attributes)
@ -16,9 +16,9 @@ Shindo.tests("Storage[:aws] | files", ["aws"]) do
model_tests(@directory.files, file_attributes, Fog.mocking?) do
v1 = @instance.version
v2 = @directory.connection.put_object(@directory.key, @instance.key, 'version 2 content').headers['x-amz-version-id']
v3 = @directory.connection.delete_object(@directory.key, @instance.key).headers['x-amz-version-id']
v4 = @directory.connection.put_object(@directory.key, @instance.key, 'version 3 content').headers['x-amz-version-id']
v2 = @directory.service.put_object(@directory.key, @instance.key, 'version 2 content').headers['x-amz-version-id']
v3 = @directory.service.delete_object(@directory.key, @instance.key).headers['x-amz-version-id']
v4 = @directory.service.put_object(@directory.key, @instance.key, 'version 3 content').headers['x-amz-version-id']
tests("#get") do
tests("#get without version fetches the latest version").returns(v4) do
@ -30,6 +30,7 @@ Shindo.tests("Storage[:aws] | files", ["aws"]) do
end
tests("#get with a deleted version returns nil").returns(nil) do
pending # getting 405 Method Not Allowed
@directory.files.get(@instance.key, 'versionId' => v3)
end
end
@ -44,6 +45,7 @@ Shindo.tests("Storage[:aws] | files", ["aws"]) do
end
tests("#head with a deleted version returns nil").returns(nil) do
pending # getting 405 Method Not Allowed
@directory.files.head(@instance.key, 'versionId' => v3)
end
end

View file

@ -14,15 +14,17 @@ Shindo.tests('AWS | url', ["aws"]) do
@file = @storage.directories.new(:key => 'fognonbucket').files.new(:key => 'test.txt')
if Fog.mock?
signature = Fog::Storage::AWS.new.signature(nil)
signature = Fog::Storage::AWS.new.signature(nil, nil)
else
signature = 'tajHIhKHAdFYsigmzybCpaq8N0Q%3D'
end
tests('#url w/ response-cache-control').returns(
"https://fognonbucket.s3.amazonaws.com/test.txt?response-cache-control=No-cache&AWSAccessKeyId=123&Signature=#{signature}&Expires=1356998400"
) do
@file.url(@expires, :query => { 'response-cache-control' => 'No-cache' })
if RUBY_VERSION > '1.8.7' # ruby 1.8.x doesn't provide hash ordering
tests('#url w/ response-cache-control').returns(
"https://fognonbucket.s3.amazonaws.com/test.txt?response-cache-control=No-cache&AWSAccessKeyId=123&Signature=#{signature}&Expires=1356998400"
) do
@file.url(@expires, :query => { 'response-cache-control' => 'No-cache' })
end
end
end

View file

@ -7,7 +7,7 @@ Shindo.tests("Storage[:aws] | version", ["aws"]) do
}
directory_attributes = {
:key => 'fogfilestests'
:key => uniq_id('fogfilestests')
}
@directory = Fog::Storage[:aws].directories.create(directory_attributes)
@ -16,7 +16,7 @@ Shindo.tests("Storage[:aws] | version", ["aws"]) do
model_tests(@directory.files, file_attributes, Fog.mocking?) do
@version_instance = @instance.versions.first
@directory.connection.put_object(@directory.key, @instance.key, 'second version content')
@directory.service.put_object(@directory.key, @instance.key, 'second version content')
tests("#file") do
tests("#file should return the object associated with the version").returns(@version_instance.version) do

View file

@ -7,24 +7,24 @@ Shindo.tests("Storage[:aws] | versions", ["aws"]) do
}
directory_attributes = {
:key => 'fogfilestests'
:key => uniq_id('fogfilestests')
}
model_tests(Fog::Storage[:aws].directories, directory_attributes, Fog.mocking?) do
@instance.versioning = true
versions = []
versions << @instance.connection.put_object(@instance.key, 'one', 'abcde').headers['x-amz-version-id']
versions << @instance.service.put_object(@instance.key, 'one', 'abcde').headers['x-amz-version-id']
puts versions.first
versions << @instance.connection.put_object(@instance.key, 'one', '32423').headers['x-amz-version-id']
versions << @instance.connection.delete_object(@instance.key, 'one').headers['x-amz-version-id']
versions << @instance.service.put_object(@instance.key, 'one', '32423').headers['x-amz-version-id']
versions << @instance.service.delete_object(@instance.key, 'one').headers['x-amz-version-id']
versions.reverse!
puts versions.first
versions << @instance.connection.put_object(@instance.key, 'two', 'aoeu').headers['x-amz-version-id']
versions << @instance.service.put_object(@instance.key, 'two', 'aoeu').headers['x-amz-version-id']
tests('#versions') do
tests('#versions.size includes versions (including DeleteMarkers) for all keys').returns(4) do

View file

@ -1,6 +1,6 @@
Shindo.tests('Fog::Storage[:aws] | multipart upload requests', ["aws"]) do
@directory = Fog::Storage[:aws].directories.create(:key => 'fogmultipartuploadtests')
@directory = Fog::Storage[:aws].directories.create(:key => uniq_id('fogmultipartuploadtests'))
tests('success') do

View file

@ -111,14 +111,16 @@ Shindo.tests('AWS::Storage | object requests', ['aws']) do
end
fognonbucket = uniq_id('fognonbucket')
tests('failure') do
tests("#put_object('fognonbucket', 'fog_non_object', lorem_file)").raises(Excon::Errors::NotFound) do
Fog::Storage[:aws].put_object('fognonbucket', 'fog_non_object', lorem_file)
tests("#put_object('#{fognonbucket}', 'fog_non_object', lorem_file)").raises(Excon::Errors::NotFound) do
Fog::Storage[:aws].put_object(fognonbucket, 'fog_non_object', lorem_file)
end
tests("#copy_object('fognonbucket', 'fog_object', '#{@directory.identity}', 'fog_other_object')").raises(Excon::Errors::NotFound) do
Fog::Storage[:aws].copy_object('fognonbucket', 'fog_object', @directory.identity, 'fog_other_object')
tests("#copy_object('#{fognonbucket}', 'fog_object', '#{@directory.identity}', 'fog_other_object')").raises(Excon::Errors::NotFound) do
Fog::Storage[:aws].copy_object(fognonbucket, 'fog_object', @directory.identity, 'fog_other_object')
end
tests("#copy_object('#{@directory.identity}', 'fog_non_object', '#{@directory.identity}', 'fog_other_object')").raises(Excon::Errors::NotFound) do
@ -126,32 +128,32 @@ Shindo.tests('AWS::Storage | object requests', ['aws']) do
end
tests("#copy_object('#{@directory.identity}', 'fog_object', 'fognonbucket', 'fog_other_object')").raises(Excon::Errors::NotFound) do
Fog::Storage[:aws].copy_object(@directory.identity, 'fog_object', 'fognonbucket', 'fog_other_object')
Fog::Storage[:aws].copy_object(@directory.identity, 'fog_object', fognonbucket, 'fog_other_object')
end
tests("#get_object('fognonbucket', 'fog_non_object')").raises(Excon::Errors::NotFound) do
Fog::Storage[:aws].get_object('fognonbucket', 'fog_non_object')
tests("#get_object('#{fognonbucket}', 'fog_non_object')").raises(Excon::Errors::NotFound) do
Fog::Storage[:aws].get_object(fognonbucket, 'fog_non_object')
end
tests("#get_object('#{@directory.identity}', 'fog_non_object')").raises(Excon::Errors::NotFound) do
Fog::Storage[:aws].get_object(@directory.identity, 'fog_non_object')
end
tests("#head_object('fognonbucket', 'fog_non_object')").raises(Excon::Errors::NotFound) do
Fog::Storage[:aws].head_object('fognonbucket', 'fog_non_object')
tests("#head_object(fognonbucket, 'fog_non_object')").raises(Excon::Errors::NotFound) do
Fog::Storage[:aws].head_object(fognonbucket, 'fog_non_object')
end
tests("#head_object('#{@directory.identity}', 'fog_non_object')").raises(Excon::Errors::NotFound) do
Fog::Storage[:aws].head_object(@directory.identity, 'fog_non_object')
end
tests("#delete_object('fognonbucket', 'fog_non_object')").raises(Excon::Errors::NotFound) do
Fog::Storage[:aws].delete_object('fognonbucket', 'fog_non_object')
tests("#delete_object('#{fognonbucket}', 'fog_non_object')").raises(Excon::Errors::NotFound) do
Fog::Storage[:aws].delete_object(fognonbucket, 'fog_non_object')
end
tests("#delete_multiple_objects('fognonbucket', ['fog_non_object'])").raises(Excon::Errors::NotFound) do
tests("#delete_multiple_objects('#{fognonbucket}', ['fog_non_object'])").raises(Excon::Errors::NotFound) do
pending if Fog.mocking?
Fog::Storage[:aws].delete_multiple_objects('fognonbucket', ['fog_non_object'])
Fog::Storage[:aws].delete_multiple_objects(fognonbucket, ['fog_non_object'])
end
tests("#put_object_acl('#{@directory.identity}', 'fog_object', 'invalid')").raises(Excon::Errors::BadRequest) do