mirror of
https://github.com/fog/fog.git
synced 2022-11-09 13:51:43 -05:00
more streaming fixes
* allow models to pass block for s3.objects.get and receive stream. * cleanup in connection so that in the event of error it will skip the blocks.
This commit is contained in:
parent
797b98e07b
commit
cc38eb74ff
3 changed files with 15 additions and 4 deletions
|
@ -24,8 +24,8 @@ module Fog
|
|||
object
|
||||
end
|
||||
|
||||
def get(key, options = {})
|
||||
data = connection.get_object(bucket.name, key, options)
|
||||
def get(key, options = {}, &block)
|
||||
data = connection.get_object(bucket.name, key, options, &block)
|
||||
object_data = {
|
||||
:body => data.body,
|
||||
:key => key
|
||||
|
|
|
@ -88,7 +88,7 @@ unless Fog.mocking?
|
|||
|
||||
unless params[:method] == 'HEAD'
|
||||
if response.headers['Content-Length']
|
||||
unless params[:block]
|
||||
if error || !params[:block]
|
||||
body << @connection.read(response.headers['Content-Length'].to_i)
|
||||
else
|
||||
remaining = response.headers['Content-Length'].to_i
|
||||
|
@ -102,7 +102,7 @@ unless Fog.mocking?
|
|||
# 2 == "/r/n".length
|
||||
chunk_size = @connection.readline.chomp!.to_i(16) + 2
|
||||
chunk = @connection.read(chunk_size)[0...-2]
|
||||
unless params[:block]
|
||||
if error || !params[:block]
|
||||
body << chunk
|
||||
else
|
||||
params[:block].call(chunk)
|
||||
|
|
|
@ -77,6 +77,17 @@ describe 'Fog::AWS::S3::Objects' do
|
|||
object.destroy
|
||||
end
|
||||
|
||||
it "should return chunked data if given a block" do
|
||||
file = File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r')
|
||||
object = @bucket.objects.create(:key => 'fogobjectname', :body => file)
|
||||
data = ''
|
||||
@bucket.objects.get('fogobjectname') do |chunk|
|
||||
data << chunk
|
||||
end
|
||||
data.should == File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r').read
|
||||
object.destroy
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
describe "#head" do
|
||||
|
|
Loading…
Add table
Reference in a new issue