2009-05-19 02:06:49 -04:00
|
|
|
module Fog
|
|
|
|
module AWS
|
|
|
|
class S3
|
|
|
|
|
2009-08-10 11:30:28 -04:00
|
|
|
def self.reload
|
|
|
|
current_directory = File.dirname(__FILE__)
|
|
|
|
load "#{current_directory}/../connection.rb"
|
|
|
|
load "#{current_directory}/../parser.rb"
|
|
|
|
load "#{current_directory}/../response.rb"
|
|
|
|
|
|
|
|
parsers_directory = "#{current_directory}/parsers/s3"
|
|
|
|
load "#{parsers_directory}/copy_object.rb"
|
|
|
|
load "#{parsers_directory}/get_bucket.rb"
|
|
|
|
load "#{parsers_directory}/get_bucket_location.rb"
|
|
|
|
load "#{parsers_directory}/get_request_payment.rb"
|
|
|
|
load "#{parsers_directory}/get_service.rb"
|
|
|
|
|
|
|
|
requests_directory = "#{current_directory}/requests/s3"
|
|
|
|
load "#{requests_directory}/copy_object.rb"
|
|
|
|
load "#{requests_directory}/delete_bucket.rb"
|
|
|
|
load "#{requests_directory}/delete_object.rb"
|
|
|
|
load "#{requests_directory}/get_bucket.rb"
|
|
|
|
load "#{requests_directory}/get_bucket_location.rb"
|
|
|
|
load "#{requests_directory}/get_object.rb"
|
|
|
|
load "#{requests_directory}/get_request_payment.rb"
|
|
|
|
load "#{requests_directory}/get_service.rb"
|
|
|
|
load "#{requests_directory}/head_object.rb"
|
|
|
|
load "#{requests_directory}/put_bucket.rb"
|
|
|
|
load "#{requests_directory}/put_object.rb"
|
|
|
|
load "#{requests_directory}/put_request_payment.rb"
|
|
|
|
end
|
|
|
|
|
2009-08-09 01:40:42 -04:00
|
|
|
if Fog.mocking?
|
|
|
|
attr_accessor :data
|
|
|
|
end
|
|
|
|
|
2009-05-19 02:06:49 -04:00
|
|
|
# Initialize connection to S3
|
|
|
|
#
|
|
|
|
# ==== Notes
|
|
|
|
# options parameter must include values for :aws_access_key_id and
|
|
|
|
# :aws_secret_access_key in order to create a connection
|
|
|
|
#
|
|
|
|
# ==== Examples
|
2009-07-07 22:20:54 -04:00
|
|
|
# sdb = S3.new(
|
|
|
|
# :aws_access_key_id => your_aws_access_key_id,
|
|
|
|
# :aws_secret_access_key => your_aws_secret_access_key
|
|
|
|
# )
|
2009-05-19 02:06:49 -04:00
|
|
|
#
|
|
|
|
# ==== Parameters
|
2009-07-07 22:20:54 -04:00
|
|
|
# * options<~Hash> - config arguments for connection. Defaults to {}.
|
2009-05-19 02:06:49 -04:00
|
|
|
#
|
|
|
|
# ==== Returns
|
2009-07-07 22:20:54 -04:00
|
|
|
# * S3 object with connection to aws.
|
2009-05-19 02:06:49 -04:00
|
|
|
def initialize(options={})
|
|
|
|
@aws_access_key_id = options[:aws_access_key_id]
|
|
|
|
@aws_secret_access_key = options[:aws_secret_access_key]
|
|
|
|
@hmac = HMAC::SHA1.new(@aws_secret_access_key)
|
|
|
|
@host = options[:host] || 's3.amazonaws.com'
|
|
|
|
@port = options[:port] || 443
|
|
|
|
@scheme = options[:scheme] || 'https'
|
2009-07-13 21:22:42 -04:00
|
|
|
@connection = Fog::Connection.new("#{@scheme}://#{@host}:#{@port}")
|
2009-08-07 22:55:11 -04:00
|
|
|
|
|
|
|
if Fog.mocking?
|
2009-08-16 14:45:52 -04:00
|
|
|
@data = { :buckets => {} }
|
2009-08-07 22:55:11 -04:00
|
|
|
end
|
2009-05-19 02:06:49 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
2009-06-07 18:42:51 -04:00
|
|
|
def parse_file(file)
|
|
|
|
metadata = {
|
|
|
|
:body => nil,
|
|
|
|
:headers => {}
|
|
|
|
}
|
|
|
|
|
|
|
|
filename = File.basename(file.path)
|
|
|
|
unless (mime_types = MIME::Types.of(filename)).empty?
|
|
|
|
metadata[:headers]['Content-Type'] = mime_types.first.content_type
|
|
|
|
end
|
|
|
|
|
|
|
|
metadata[:body] = file.read
|
|
|
|
metadata[:headers]['Content-Length'] = metadata[:body].size.to_s
|
|
|
|
metadata[:headers]['Content-MD5'] = Base64.encode64(Digest::MD5.digest(metadata[:body])).strip
|
|
|
|
metadata
|
|
|
|
end
|
|
|
|
|
2009-06-25 21:02:00 -04:00
|
|
|
def request(params)
|
2009-06-23 23:55:57 -04:00
|
|
|
params[:headers]['Date'] = Time.now.utc.strftime("%a, %d %b %Y %H:%M:%S +0000")
|
2009-06-24 12:57:21 -04:00
|
|
|
|
2009-06-24 21:34:39 -04:00
|
|
|
string_to_sign =
|
|
|
|
<<-DATA
|
|
|
|
#{params[:method]}
|
|
|
|
#{params[:headers]['Content-MD5']}
|
|
|
|
#{params[:headers]['Content-Type']}
|
|
|
|
#{params[:headers]['Date']}
|
|
|
|
DATA
|
2009-06-24 12:57:21 -04:00
|
|
|
|
|
|
|
amz_headers, canonical_amz_headers = {}, ''
|
2009-06-25 03:32:27 -04:00
|
|
|
for key, value in params[:headers]
|
2009-06-24 12:57:21 -04:00
|
|
|
if key[0..5] == 'x-amz-'
|
|
|
|
amz_headers[key] = value
|
2009-06-22 13:06:49 -04:00
|
|
|
end
|
|
|
|
end
|
2009-06-24 12:57:21 -04:00
|
|
|
amz_headers = amz_headers.sort {|x, y| x[0] <=> y[0]}
|
|
|
|
for pair in amz_headers
|
2009-06-25 03:32:27 -04:00
|
|
|
canonical_amz_headers << "#{pair[0]}:#{pair[1]}\n"
|
2009-06-24 12:57:21 -04:00
|
|
|
end
|
2009-06-25 03:32:27 -04:00
|
|
|
string_to_sign << "#{canonical_amz_headers}"
|
2009-06-24 12:57:21 -04:00
|
|
|
|
|
|
|
canonical_resource = "/"
|
2009-07-22 22:28:53 -04:00
|
|
|
subdomain = params[:host].split(".#{@host}").first
|
|
|
|
unless subdomain == @host
|
2009-06-24 12:57:21 -04:00
|
|
|
canonical_resource << "#{subdomain}/"
|
|
|
|
end
|
2009-06-24 23:48:51 -04:00
|
|
|
canonical_resource << "#{params[:path]}"
|
2009-06-25 03:32:27 -04:00
|
|
|
if params[:query] && !params[:query].empty?
|
|
|
|
canonical_resource << "?#{params[:query]}"
|
|
|
|
end
|
2009-06-24 12:57:21 -04:00
|
|
|
string_to_sign << "#{canonical_resource}"
|
|
|
|
|
|
|
|
hmac = @hmac.update(string_to_sign)
|
|
|
|
signature = Base64.encode64(hmac.digest).chomp!
|
2009-06-23 23:55:57 -04:00
|
|
|
params[:headers]['Authorization'] = "AWS #{@aws_access_key_id}:#{signature}"
|
2009-06-05 19:51:17 -04:00
|
|
|
|
2009-06-22 01:13:01 -04:00
|
|
|
response = @connection.request({
|
2009-08-16 15:32:36 -04:00
|
|
|
:body => params[:body],
|
|
|
|
:expects => params[:expects],
|
|
|
|
:headers => params[:headers],
|
|
|
|
:host => params[:host],
|
|
|
|
:method => params[:method],
|
|
|
|
:parser => params[:parser],
|
|
|
|
:path => params[:path],
|
|
|
|
:query => params[:query]
|
2009-06-22 01:13:01 -04:00
|
|
|
})
|
2009-06-11 13:25:05 -04:00
|
|
|
|
2009-05-20 21:49:20 -04:00
|
|
|
response
|
2009-05-19 02:06:49 -04:00
|
|
|
end
|
2009-06-05 19:51:17 -04:00
|
|
|
|
2009-05-19 02:06:49 -04:00
|
|
|
end
|
|
|
|
end
|
2009-08-17 18:11:53 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
Fog::AWS::S3.reload
|