1
0
Fork 0
mirror of https://github.com/jnunemaker/httparty synced 2023-03-27 23:23:07 -04:00

added option to allow for streaming large files without loading them into memory

This commit is contained in:
Will Bryant 2014-11-03 12:53:06 -08:00
parent a028de4298
commit 672cdae73a
4 changed files with 13 additions and 4 deletions

View file

@ -100,7 +100,7 @@ module HTTParty
chunks = []
http_response.read_body do |fragment|
chunks << fragment
chunks << fragment unless options[:stream_body]
block.call(fragment)
end

View file

@ -389,7 +389,6 @@ describe HTTParty::Request do
end
end
describe 'with non-200 responses' do
context "3xx responses" do
it 'returns a valid object for 304 not modified' do

View file

@ -699,6 +699,16 @@ describe HTTParty do
end.should == chunks.join
end
it "should return an empty body if stream_body option is turned on" do
chunks = ["Chunk1", "Chunk2", "Chunk3", "Chunk4"]
options = {stream_body: true, format: 'html'}
stub_chunked_http_response_with(chunks, options)
HTTParty.get('http://www.google.com', options) do |fragment|
chunks.should include(fragment)
end.should == nil
end
it "should be able parse response type json automatically" do
stub_http_response_with('twitter.json')
tweets = HTTParty.get('http://twitter.com/statuses/public_timeline.json')

View file

@ -13,14 +13,14 @@ module HTTParty
HTTParty::Request.should_receive(:new).and_return(http_request)
end
def stub_chunked_http_response_with(chunks)
def stub_chunked_http_response_with(chunks, options={format: "html"})
response = Net::HTTPResponse.new("1.1", 200, nil)
response.stub(:chunked_data).and_return(chunks)
def response.read_body(&block)
@body || chunked_data.each(&block)
end
http_request = HTTParty::Request.new(Net::HTTP::Get, 'http://localhost', format: "html")
http_request = HTTParty::Request.new(Net::HTTP::Get, 'http://localhost', options)
http_request.stub_chain(:http, :request).and_yield(response).and_return(response)
HTTParty::Request.should_receive(:new).and_return(http_request)