more fleshing out of the models, getting close to feature complete for s3

This commit is contained in:
Wesley Beary 2009-09-01 21:41:52 -07:00
parent d4d8e8684c
commit 0a80fbf01c
9 changed files with 169 additions and 88 deletions

View File

@ -33,11 +33,13 @@ module Fog
end end
def objects def objects
@objects ||= begin
Fog::AWS::S3::Objects.new( Fog::AWS::S3::Objects.new(
:bucket => self, :bucket => self,
:connection => connection :connection => connection
) )
end end
end
def payer def payer
@payer ||= begin @payer ||= begin
@ -52,7 +54,8 @@ module Fog
end end
def reload def reload
buckets.get(name) new_attributes = buckets.get(name).attributes
merge_attributes(new_attributes)
end end
def save def save

View File

@ -11,15 +11,15 @@ module Fog
def all def all
data = connection.get_service.body data = connection.get_service.body
owner = Fog::AWS::S3::Owner.new(data.delete('Owner').merge!(:connection => connection)) owner = Fog::AWS::S3::Owner.new(data.delete('Owner').merge!(:connection => connection))
buckets = Fog::AWS::S3::Buckets.new(:connection => connection) self.delete_if {true}
data['Buckets'].each do |bucket| data['Buckets'].each do |bucket|
buckets[bucket['Name']] = Fog::AWS::S3::Bucket.new({ self[bucket['Name']] = Fog::AWS::S3::Bucket.new({
:buckets => buckets, :buckets => self,
:connection => connection, :connection => connection,
:owner => owner :owner => owner
}.merge!(bucket)) }.merge!(bucket))
end end
buckets self
end end
def create(attributes = {}) def create(attributes = {})
@ -29,7 +29,6 @@ module Fog
end end
def get(name, options = {}) def get(name, options = {})
self[name] ||= begin
remap_attributes(options, { remap_attributes(options, {
:is_truncated => 'IsTruncated', :is_truncated => 'IsTruncated',
:marker => 'Marker', :marker => 'Marker',
@ -49,13 +48,10 @@ module Fog
objects_data[key] = value objects_data[key] = value
end end
end end
objects = Fog::AWS::S3::Objects.new({ bucket.objects.merge_attributes(objects_data)
:bucket => bucket,
:connection => connection
}.merge!(objects_data))
data['Contents'].each do |object| data['Contents'].each do |object|
owner = Fog::AWS::S3::Owner.new(object.delete('Owner').merge!(:connection => connection)) owner = Fog::AWS::S3::Owner.new(object.delete('Owner').merge!(:connection => connection))
objects[object['key']] = Fog::AWS::S3::Object.new({ bucket.objects[object['key']] = Fog::AWS::S3::Object.new({
:bucket => bucket, :bucket => bucket,
:connection => connection, :connection => connection,
:objects => self, :objects => self,
@ -66,7 +62,6 @@ module Fog
rescue Fog::Errors::NotFound rescue Fog::Errors::NotFound
nil nil
end end
end
def new(attributes = {}) def new(attributes = {})
Fog::AWS::S3::Bucket.new( Fog::AWS::S3::Bucket.new(
@ -77,6 +72,10 @@ module Fog
) )
end end
def reload
all
end
end end
end end

View File

@ -7,9 +7,10 @@ module Fog
attribute :body attribute :body
attribute :content_length, 'Content-Length' attribute :content_length, 'Content-Length'
attribute :content_type, 'Content-Type' attribute :content_type, 'Content-Type'
attribute :etag, 'Etag' attribute :etag, ['Etag', 'ETag']
attribute :key, 'Key' attribute :key, 'Key'
attribute :last_modified, ['Last-Modified', 'LastModified'] attribute :last_modified, ['Last-Modified', 'LastModified']
attribute :owner
attribute :size, 'Size' attribute :size, 'Size'
attribute :storage_class, 'StorageClass' attribute :storage_class, 'StorageClass'
@ -35,7 +36,7 @@ module Fog
end end
def destroy def destroy
connection.delete_object(bucket, key) connection.delete_object(bucket.name, key)
objects.delete(key) objects.delete(key)
true true
rescue Fog::Errors::NotFound rescue Fog::Errors::NotFound
@ -43,7 +44,8 @@ module Fog
end end
def reload def reload
objects.get(key) new_attributes = objects.get(key).attributes
merge_attributes(new_attributes)
end end
def save(options = {}) def save(options = {})

View File

@ -15,7 +15,12 @@ module Fog
def all(options = {}) def all(options = {})
merge_attributes(options) merge_attributes(options)
bucket.buckets.get(bucket.name, attributes).objects self.delete_if {true}
objects = bucket.buckets.get(bucket.name, attributes).objects
objects.keys.each do |key|
self[key] = objects[key]
end
self
end end
def bucket def bucket
@ -29,10 +34,6 @@ module Fog
end end
def get(key, options = {}) def get(key, options = {})
if self[key] && self[key].body
self[key]
else
self[key] ||= begin
data = connection.get_object(bucket.name, key, options) data = connection.get_object(bucket.name, key, options)
object_data = { :body => data.body} object_data = { :body => data.body}
for key, value in data.headers for key, value in data.headers
@ -48,8 +49,6 @@ module Fog
rescue Fog::Errors::NotFound rescue Fog::Errors::NotFound
nil nil
end end
end
end
def head(key, options = {}) def head(key, options = {})
self[key] ||= begin self[key] ||= begin
@ -78,6 +77,10 @@ module Fog
}.merge!(attributes)) }.merge!(attributes))
end end
def reload
all
end
private private
def bucket=(new_bucket) def bucket=(new_bucket)

View File

@ -23,14 +23,6 @@ module Fog
merge_attributes(new_attributes) merge_attributes(new_attributes)
end end
def attributes
attributes = {}
for attribute in self.class.attributes
attributes[attribute] = send(:"#{attribute}")
end
attributes
end
def inspect def inspect
data = "#<#{self.class.name}" data = "#<#{self.class.name}"
for attribute in self.class.attributes for attribute in self.class.attributes
@ -39,6 +31,14 @@ module Fog
data << ">" data << ">"
end end
def attributes
attributes = {}
for attribute in self.class.attributes
attributes[attribute] = send(:"#{attribute}")
end
attributes
end
def merge_attributes(new_attributes = {}) def merge_attributes(new_attributes = {})
for key, value in new_attributes for key, value in new_attributes
if aliased_key = self.class.aliases[key] if aliased_key = self.class.aliases[key]

View File

@ -4,13 +4,9 @@ describe 'Fog::AWS::S3::Bucket' do
describe "#initialize" do describe "#initialize" do
it "should return a Fog:AWS::S3::Bucket" do
s3.buckets.new.should be_an(Fog::AWS::S3::Bucket)
end
it "should remap attributes from parser" do it "should remap attributes from parser" do
now = Time.now now = Time.now
bucket = s3.buckets.new( bucket = Fog::AWS::S3::Bucket.new(
'CreationDate' => now, 'CreationDate' => now,
'Name' => 'bucketname' 'Name' => 'bucketname'
) )
@ -26,6 +22,11 @@ describe 'Fog::AWS::S3::Bucket' do
s3.buckets.new.buckets.should be_a(Fog::AWS::S3::Buckets) s3.buckets.new.buckets.should be_a(Fog::AWS::S3::Buckets)
end end
it "should be the buckets the bucket is related to" do
buckets = s3.buckets
buckets.new.buckets.should == buckets
end
end end
describe "#destroy" do describe "#destroy" do

View File

@ -8,6 +8,12 @@ describe 'Fog::AWS::S3::Buckets' do
s3.buckets.all.should be_a(Fog::AWS::S3::Buckets) s3.buckets.all.should be_a(Fog::AWS::S3::Buckets)
end end
it "should include persisted buckets" do
bucket = s3.buckets.create(:name => 'fogbucketname')
s3.buckets.all.keys.should include('fogbucketname')
bucket.destroy
end
end end
describe "#create" do describe "#create" do
@ -62,4 +68,12 @@ describe 'Fog::AWS::S3::Buckets' do
end end
describe "#reload" do
it "should return a Fog::AWS::S3::Buckets" do
s3.buckets.all.should be_a(Fog::AWS::S3::Buckets)
end
end
end end

View File

@ -4,7 +4,7 @@ describe 'S3::Object' do
describe "#initialize" do describe "#initialize" do
it "should return an S3::Object" it "should remap attributes from parser"
end end
@ -32,6 +32,12 @@ describe 'S3::Object' do
end end
describe "#reload" do
it "should reload from s3"
end
describe "#save" do describe "#save" do
it "should return the success value" it "should return the success value"

View File

@ -1,46 +1,99 @@
require File.dirname(__FILE__) + '/../../../spec_helper' require File.dirname(__FILE__) + '/../../../spec_helper'
describe 'S3::Objects' do describe 'Fog::AWS::S3::Objects' do
before(:each) do
@bucket = s3.buckets.create(:name => 'fogbucketname')
end
after(:each) do
@bucket.destroy
end
describe "#initialize" do describe "#initialize" do
it "should return an S3::Objects" it "should remap attributes from parser" do
objects = Fog::AWS::S3::Objects.new(
'IsTruncated' => true,
'Marker' => 'marker',
'MaxKeys' => 1,
'Prefix' => 'prefix'
)
objects.is_truncated.should == true
objects.marker.should == 'marker'
objects.max_keys.should == 1
objects.prefix.should == 'prefix'
end
end end
describe "#all" do describe "#all" do
it "should return an S3::Objects" it "should return a Fog::AWS::S3::Objects" do
@bucket.objects.all.should be_a(Fog::AWS::S3::Objects)
end
it "should include persisted objects" do
file = File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r')
object = @bucket.objects.create(:key => 'fogobjectname', :body => file)
@bucket.objects.keys.should include('fogobjectname')
object.destroy
end
end end
describe "#bucket" do describe "#bucket" do
it "should return an S3::Bucket" it "should return a Fog::AWS::S3::Bucket" do
@bucket.objects.bucket.should be_a(Fog::AWS::S3::Bucket)
end
it "should be the bucket the object is related to" do
@bucket.objects.bucket.should == @bucket
end
end end
describe "#create" do describe "#create" do
it "should return an S3::Object that has been persisted to s3" it "should return a Fog::AWS::S3::Object" do
file = File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r')
object = @bucket.objects.create(:key => 'fogobjectname', :body => file)
object.should be_a(Fog::AWS::S3::Object)
object.destroy
end
it "should exist on s3" do
file = File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r')
object = @bucket.objects.create(:key => 'fogobjectname', :body => file)
@bucket.objects.get('fogobjectname').should_not be_nil
object.destroy
end
end end
describe "#get" do describe "#get" do
it "should return an S3::Object with metadata and data" it "should return a Fog::AWS::S3::Object with metadata and data"
end end
describe "#head" do describe "#head" do
it "should return an S3::Object with metadata" it "should return a Fog::AWS::S3::Object with metadata"
end end
describe "#new" do describe "#new" do
it "should return an S3::Object" it "should return a Fog::AWS::S3::Object" do
@bucket.objects.new.should be_a(Fog::AWS::S3::Object)
end
end
describe "#reload" do
it "should reload from s3"
end end