2009-07-19 19:17:33 -04:00
|
|
|
require File.expand_path(File.join(File.dirname(__FILE__), 'spec_helper'))
|
2008-07-27 11:52:18 -04:00
|
|
|
|
2009-09-08 23:34:21 -04:00
|
|
|
class CustomParser
|
|
|
|
def self.parse(body)
|
|
|
|
return {:sexy => true}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2008-07-28 10:49:53 -04:00
|
|
|
describe HTTParty do
|
2009-01-28 23:41:01 -05:00
|
|
|
before(:each) do
|
|
|
|
@klass = Class.new
|
|
|
|
@klass.instance_eval { include HTTParty }
|
|
|
|
end
|
|
|
|
|
2008-07-28 12:08:21 -04:00
|
|
|
describe "base uri" do
|
2009-01-28 23:41:01 -05:00
|
|
|
before(:each) do
|
|
|
|
@klass.base_uri('api.foo.com/v1')
|
2008-11-08 13:59:57 -05:00
|
|
|
end
|
|
|
|
|
2008-11-08 11:18:25 -05:00
|
|
|
it "should have reader" do
|
2009-01-28 23:41:01 -05:00
|
|
|
@klass.base_uri.should == 'http://api.foo.com/v1'
|
2008-07-27 11:52:18 -04:00
|
|
|
end
|
2009-09-08 23:34:21 -04:00
|
|
|
|
2008-11-08 11:18:25 -05:00
|
|
|
it 'should have writer' do
|
2009-01-28 23:41:01 -05:00
|
|
|
@klass.base_uri('http://api.foobar.com')
|
|
|
|
@klass.base_uri.should == 'http://api.foobar.com'
|
2008-07-28 12:08:21 -04:00
|
|
|
end
|
2009-03-03 11:13:41 -05:00
|
|
|
|
|
|
|
it 'should not modify the parameter during assignment' do
|
|
|
|
uri = 'http://api.foobar.com'
|
|
|
|
@klass.base_uri(uri)
|
|
|
|
uri.should == 'http://api.foobar.com'
|
|
|
|
end
|
2008-07-27 11:52:18 -04:00
|
|
|
end
|
2009-09-08 23:34:21 -04:00
|
|
|
|
2008-12-06 23:41:28 -05:00
|
|
|
describe "#normalize_base_uri" do
|
|
|
|
it "should add http if not present for non ssl requests" do
|
|
|
|
uri = HTTParty.normalize_base_uri('api.foobar.com')
|
|
|
|
uri.should == 'http://api.foobar.com'
|
|
|
|
end
|
2009-09-08 23:34:21 -04:00
|
|
|
|
2008-12-06 23:41:28 -05:00
|
|
|
it "should add https if not present for ssl requests" do
|
|
|
|
uri = HTTParty.normalize_base_uri('api.foo.com/v1:443')
|
|
|
|
uri.should == 'https://api.foo.com/v1:443'
|
|
|
|
end
|
2009-09-08 23:34:21 -04:00
|
|
|
|
2008-12-06 23:41:28 -05:00
|
|
|
it "should not remove https for ssl requests" do
|
|
|
|
uri = HTTParty.normalize_base_uri('https://api.foo.com/v1:443')
|
|
|
|
uri.should == 'https://api.foo.com/v1:443'
|
|
|
|
end
|
2009-03-03 11:13:41 -05:00
|
|
|
|
|
|
|
it 'should not modify the parameter' do
|
|
|
|
uri = 'http://api.foobar.com'
|
|
|
|
HTTParty.normalize_base_uri(uri)
|
|
|
|
uri.should == 'http://api.foobar.com'
|
|
|
|
end
|
2008-12-06 23:41:28 -05:00
|
|
|
end
|
2009-09-08 23:34:21 -04:00
|
|
|
|
2008-07-28 12:08:21 -04:00
|
|
|
describe "headers" do
|
2009-09-08 19:02:09 -04:00
|
|
|
def expect_headers(header={})
|
2009-08-21 20:32:15 -04:00
|
|
|
HTTParty::Request.should_receive(:new) \
|
2009-09-08 19:02:09 -04:00
|
|
|
.with(anything, anything, hash_including({ :headers => header })) \
|
2009-08-21 20:32:15 -04:00
|
|
|
.and_return(mock("mock response", :perform => nil))
|
|
|
|
end
|
2009-09-08 19:02:09 -04:00
|
|
|
|
2008-07-28 12:08:21 -04:00
|
|
|
it "should default to empty hash" do
|
2009-01-28 23:41:01 -05:00
|
|
|
@klass.headers.should == {}
|
2008-07-28 12:08:21 -04:00
|
|
|
end
|
2009-09-08 19:02:09 -04:00
|
|
|
|
2008-07-28 12:08:21 -04:00
|
|
|
it "should be able to be updated" do
|
|
|
|
init_headers = {:foo => 'bar', :baz => 'spax'}
|
2009-01-28 23:41:01 -05:00
|
|
|
@klass.headers init_headers
|
|
|
|
@klass.headers.should == init_headers
|
2008-07-28 12:08:21 -04:00
|
|
|
end
|
2009-09-08 19:02:09 -04:00
|
|
|
|
|
|
|
it "uses the class headers when sending a request" do
|
|
|
|
expect_headers(:foo => 'bar')
|
|
|
|
@klass.headers(:foo => 'bar')
|
|
|
|
@klass.get('')
|
|
|
|
end
|
|
|
|
|
|
|
|
it "overwrites class headers when passing in headers" do
|
|
|
|
expect_headers(:baz => 'spax')
|
|
|
|
@klass.headers(:foo => 'bar')
|
|
|
|
@klass.get('', :headers => {:baz => 'spax'})
|
|
|
|
end
|
|
|
|
|
|
|
|
context "with cookies" do
|
|
|
|
it 'utilizes the class-level cookies' do
|
|
|
|
expect_headers(:foo => 'bar', 'cookie' => 'type=snickerdoodle')
|
|
|
|
@klass.headers(:foo => 'bar')
|
|
|
|
@klass.cookies(:type => 'snickerdoodle')
|
|
|
|
@klass.get('')
|
2009-08-21 20:32:15 -04:00
|
|
|
end
|
2009-09-08 19:02:09 -04:00
|
|
|
|
|
|
|
it 'adds cookies to the headers' do
|
|
|
|
expect_headers(:foo => 'bar', 'cookie' => 'type=snickerdoodle')
|
|
|
|
@klass.headers(:foo => 'bar')
|
|
|
|
@klass.get('', :cookies => {:type => 'snickerdoodle'})
|
2009-08-21 20:32:15 -04:00
|
|
|
end
|
2009-09-08 19:02:09 -04:00
|
|
|
|
|
|
|
it 'adds optional cookies to the optional headers' do
|
|
|
|
expect_headers(:baz => 'spax', 'cookie' => 'type=snickerdoodle')
|
|
|
|
@klass.get('', :cookies => {:type => 'snickerdoodle'}, :headers => {:baz => 'spax'})
|
2009-08-21 20:32:15 -04:00
|
|
|
end
|
|
|
|
end
|
2008-07-28 11:56:58 -04:00
|
|
|
end
|
2009-01-28 23:54:42 -05:00
|
|
|
|
|
|
|
describe "cookies" do
|
|
|
|
def expect_cookie_header(s)
|
|
|
|
HTTParty::Request.should_receive(:new) \
|
|
|
|
.with(anything, anything, hash_including({ :headers => { "cookie" => s } })) \
|
|
|
|
.and_return(mock("mock response", :perform => nil))
|
|
|
|
end
|
|
|
|
|
|
|
|
it "should not be in the headers by default" do
|
|
|
|
HTTParty::Request.stub!(:new).and_return(stub(nil, :perform => nil))
|
|
|
|
@klass.get("")
|
|
|
|
@klass.headers.keys.should_not include("cookie")
|
|
|
|
end
|
|
|
|
|
|
|
|
it "should raise an ArgumentError if passed a non-Hash" do
|
|
|
|
lambda do
|
|
|
|
@klass.cookies("nonsense")
|
|
|
|
end.should raise_error(ArgumentError)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "should allow a cookie to be specified with a one-off request" do
|
|
|
|
expect_cookie_header "type=snickerdoodle"
|
|
|
|
@klass.get("", :cookies => { :type => "snickerdoodle" })
|
|
|
|
end
|
|
|
|
|
|
|
|
describe "when a cookie is set at the class level" do
|
|
|
|
before(:each) do
|
|
|
|
@klass.cookies({ :type => "snickerdoodle" })
|
|
|
|
end
|
|
|
|
|
|
|
|
it "should include that cookie in the request" do
|
|
|
|
expect_cookie_header "type=snickerdoodle"
|
|
|
|
@klass.get("")
|
|
|
|
end
|
|
|
|
|
2009-06-23 20:28:42 -04:00
|
|
|
it "should pass the proper cookies when requested multiple times" do
|
|
|
|
2.times do
|
|
|
|
expect_cookie_header "type=snickerdoodle"
|
|
|
|
@klass.get("")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2009-01-28 23:54:42 -05:00
|
|
|
it "should allow the class defaults to be overridden" do
|
|
|
|
expect_cookie_header "type=chocolate_chip"
|
|
|
|
|
|
|
|
@klass.get("", :cookies => { :type => "chocolate_chip" })
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe "in a class with multiple methods that use different cookies" do
|
|
|
|
before(:each) do
|
|
|
|
@klass.instance_eval do
|
|
|
|
def first_method
|
|
|
|
get("first_method", :cookies => { :first_method_cookie => "foo" })
|
|
|
|
end
|
|
|
|
|
|
|
|
def second_method
|
|
|
|
get("second_method", :cookies => { :second_method_cookie => "foo" })
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it "should not allow cookies used in one method to carry over into other methods" do
|
|
|
|
expect_cookie_header "first_method_cookie=foo"
|
|
|
|
@klass.first_method
|
|
|
|
|
|
|
|
expect_cookie_header "second_method_cookie=foo"
|
|
|
|
@klass.second_method
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2009-09-08 23:34:21 -04:00
|
|
|
|
2008-07-28 12:40:40 -04:00
|
|
|
describe "default params" do
|
|
|
|
it "should default to empty hash" do
|
2009-01-28 23:41:01 -05:00
|
|
|
@klass.default_params.should == {}
|
2008-07-28 12:40:40 -04:00
|
|
|
end
|
2009-09-08 23:34:21 -04:00
|
|
|
|
2008-07-28 12:40:40 -04:00
|
|
|
it "should be able to be updated" do
|
|
|
|
new_defaults = {:foo => 'bar', :baz => 'spax'}
|
2009-01-28 23:41:01 -05:00
|
|
|
@klass.default_params new_defaults
|
|
|
|
@klass.default_params.should == new_defaults
|
2008-07-28 12:40:40 -04:00
|
|
|
end
|
|
|
|
end
|
2009-09-08 23:34:21 -04:00
|
|
|
|
2008-07-28 12:08:21 -04:00
|
|
|
describe "basic http authentication" do
|
|
|
|
it "should work" do
|
2009-01-28 23:41:01 -05:00
|
|
|
@klass.basic_auth 'foobar', 'secret'
|
|
|
|
@klass.default_options[:basic_auth].should == {:username => 'foobar', :password => 'secret'}
|
2008-07-28 12:08:21 -04:00
|
|
|
end
|
2008-07-27 11:52:18 -04:00
|
|
|
end
|
2009-09-08 23:34:21 -04:00
|
|
|
|
|
|
|
describe "parser" do
|
|
|
|
before(:each) do
|
|
|
|
@parser = Proc.new{ |data| CustomParser.parse(data) }
|
|
|
|
@klass.parser @parser
|
|
|
|
end
|
|
|
|
|
|
|
|
it "should set parser options" do
|
|
|
|
@klass.default_options[:parser].should == @parser
|
|
|
|
end
|
|
|
|
|
|
|
|
it "should be able parse response with custom parser" do
|
|
|
|
FakeWeb.register_uri(:get, 'http://twitter.com/statuses/public_timeline.xml', :body => 'tweets')
|
|
|
|
custom_parsed_response = @klass.get('http://twitter.com/statuses/public_timeline.xml')
|
|
|
|
custom_parsed_response[:sexy].should == true
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2008-07-28 11:56:58 -04:00
|
|
|
describe "format" do
|
|
|
|
it "should allow xml" do
|
2009-01-28 23:41:01 -05:00
|
|
|
@klass.format :xml
|
|
|
|
@klass.default_options[:format].should == :xml
|
2008-07-28 11:56:58 -04:00
|
|
|
end
|
2009-09-08 23:34:21 -04:00
|
|
|
|
2008-07-28 11:56:58 -04:00
|
|
|
it "should allow json" do
|
2009-01-28 23:41:01 -05:00
|
|
|
@klass.format :json
|
|
|
|
@klass.default_options[:format].should == :json
|
2008-07-28 11:56:58 -04:00
|
|
|
end
|
2009-09-08 23:34:21 -04:00
|
|
|
|
2009-02-01 20:55:38 -05:00
|
|
|
it "should allow yaml" do
|
|
|
|
@klass.format :yaml
|
|
|
|
@klass.default_options[:format].should == :yaml
|
|
|
|
end
|
2009-09-08 23:34:21 -04:00
|
|
|
|
2009-03-05 15:53:13 -05:00
|
|
|
it "should allow plain" do
|
|
|
|
@klass.format :plain
|
|
|
|
@klass.default_options[:format].should == :plain
|
|
|
|
end
|
2009-09-08 23:34:21 -04:00
|
|
|
|
2008-07-28 11:56:58 -04:00
|
|
|
it 'should not allow funky format' do
|
|
|
|
lambda do
|
2009-01-28 23:41:01 -05:00
|
|
|
@klass.format :foobar
|
2008-07-28 11:56:58 -04:00
|
|
|
end.should raise_error(HTTParty::UnsupportedFormat)
|
|
|
|
end
|
2009-03-05 16:06:02 -05:00
|
|
|
|
|
|
|
it 'should only print each format once with an exception' do
|
|
|
|
lambda do
|
|
|
|
@klass.format :foobar
|
2009-07-16 13:07:54 -04:00
|
|
|
end.should raise_error(HTTParty::UnsupportedFormat, "Must be one of: html, json, plain, xml, yaml")
|
2009-03-05 16:06:02 -05:00
|
|
|
end
|
|
|
|
|
2008-07-28 11:56:58 -04:00
|
|
|
end
|
2008-08-27 16:38:19 -04:00
|
|
|
|
2008-11-08 13:59:57 -05:00
|
|
|
describe "with explicit override of automatic redirect handling" do
|
2008-08-27 16:38:19 -04:00
|
|
|
|
2008-11-08 13:59:57 -05:00
|
|
|
it "should fail with redirected GET" do
|
|
|
|
lambda do
|
2009-01-28 23:41:01 -05:00
|
|
|
@klass.get('/foo', :no_follow => true)
|
2008-11-08 13:59:57 -05:00
|
|
|
end.should raise_error(HTTParty::RedirectionTooDeep)
|
2008-08-27 16:38:19 -04:00
|
|
|
end
|
2008-09-19 19:31:06 -04:00
|
|
|
|
2008-11-08 13:59:57 -05:00
|
|
|
it "should fail with redirected POST" do
|
|
|
|
lambda do
|
2009-01-28 23:41:01 -05:00
|
|
|
@klass.post('/foo', :no_follow => true)
|
2008-11-08 13:59:57 -05:00
|
|
|
end.should raise_error(HTTParty::RedirectionTooDeep)
|
|
|
|
end
|
2008-09-19 19:31:06 -04:00
|
|
|
|
2008-11-08 13:59:57 -05:00
|
|
|
it "should fail with redirected DELETE" do
|
|
|
|
lambda do
|
2009-01-28 23:41:01 -05:00
|
|
|
@klass.delete('/foo', :no_follow => true)
|
2008-11-08 13:59:57 -05:00
|
|
|
end.should raise_error(HTTParty::RedirectionTooDeep)
|
|
|
|
end
|
2008-09-19 19:31:06 -04:00
|
|
|
|
2008-11-08 13:59:57 -05:00
|
|
|
it "should fail with redirected PUT" do
|
|
|
|
lambda do
|
2009-01-28 23:41:01 -05:00
|
|
|
@klass.put('/foo', :no_follow => true)
|
2008-11-08 13:59:57 -05:00
|
|
|
end.should raise_error(HTTParty::RedirectionTooDeep)
|
2008-09-19 19:31:06 -04:00
|
|
|
end
|
2009-11-22 23:18:29 -05:00
|
|
|
|
|
|
|
it "should fail with redirected HEAD" do
|
|
|
|
lambda do
|
|
|
|
@klass.head('/foo', :no_follow => true)
|
|
|
|
end.should raise_error(HTTParty::RedirectionTooDeep)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "should fail with redirected OPTIONS" do
|
|
|
|
lambda do
|
|
|
|
@klass.options('/foo', :no_follow => true)
|
|
|
|
end.should raise_error(HTTParty::RedirectionTooDeep)
|
|
|
|
end
|
2008-07-28 13:32:35 -04:00
|
|
|
end
|
2009-09-08 23:34:21 -04:00
|
|
|
|
2008-11-30 23:58:06 -05:00
|
|
|
describe "with multiple class definitions" do
|
2009-01-28 23:41:01 -05:00
|
|
|
before(:each) do
|
|
|
|
@klass.instance_eval do
|
|
|
|
base_uri "http://first.com"
|
|
|
|
default_params :one => 1
|
|
|
|
end
|
|
|
|
|
|
|
|
@additional_klass = Class.new
|
|
|
|
@additional_klass.instance_eval do
|
|
|
|
include HTTParty
|
|
|
|
base_uri "http://second.com"
|
|
|
|
default_params :two => 2
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2008-11-30 23:58:06 -05:00
|
|
|
it "should not run over each others options" do
|
2009-01-28 23:41:01 -05:00
|
|
|
@klass.default_options.should == { :base_uri => 'http://first.com', :default_params => { :one => 1 } }
|
|
|
|
@additional_klass.default_options.should == { :base_uri => 'http://second.com', :default_params => { :two => 2 } }
|
2008-11-30 23:58:06 -05:00
|
|
|
end
|
|
|
|
end
|
2009-09-08 23:34:21 -04:00
|
|
|
|
2008-12-06 19:41:23 -05:00
|
|
|
describe "#get" do
|
|
|
|
it "should be able to get html" do
|
|
|
|
stub_http_response_with('google.html')
|
|
|
|
HTTParty.get('http://www.google.com').should == file_fixture('google.html')
|
|
|
|
end
|
2009-09-08 23:34:21 -04:00
|
|
|
|
2008-12-06 19:41:23 -05:00
|
|
|
it "should be able parse response type json automatically" do
|
|
|
|
stub_http_response_with('twitter.json')
|
|
|
|
tweets = HTTParty.get('http://twitter.com/statuses/public_timeline.json')
|
|
|
|
tweets.size.should == 20
|
|
|
|
tweets.first['user'].should == {
|
2009-09-08 23:34:21 -04:00
|
|
|
"name" => "Pyk",
|
|
|
|
"url" => nil,
|
|
|
|
"id" => "7694602",
|
|
|
|
"description" => nil,
|
|
|
|
"protected" => false,
|
|
|
|
"screen_name" => "Pyk",
|
|
|
|
"followers_count" => 1,
|
|
|
|
"location" => "Opera Plaza, California",
|
2008-12-06 19:41:23 -05:00
|
|
|
"profile_image_url" => "http://static.twitter.com/images/default_profile_normal.png"
|
|
|
|
}
|
|
|
|
end
|
2009-09-08 23:34:21 -04:00
|
|
|
|
2008-12-06 19:41:23 -05:00
|
|
|
it "should be able parse response type xml automatically" do
|
|
|
|
stub_http_response_with('twitter.xml')
|
|
|
|
tweets = HTTParty.get('http://twitter.com/statuses/public_timeline.xml')
|
|
|
|
tweets['statuses'].size.should == 20
|
|
|
|
tweets['statuses'].first['user'].should == {
|
2009-09-08 23:34:21 -04:00
|
|
|
"name" => "Magic 8 Bot",
|
|
|
|
"url" => nil,
|
|
|
|
"id" => "17656026",
|
|
|
|
"description" => "ask me a question",
|
|
|
|
"protected" => "false",
|
|
|
|
"screen_name" => "magic8bot",
|
|
|
|
"followers_count" => "90",
|
|
|
|
"profile_image_url" => "http://s3.amazonaws.com/twitter_production/profile_images/65565851/8ball_large_normal.jpg",
|
2008-12-06 19:41:23 -05:00
|
|
|
"location" => nil
|
|
|
|
}
|
|
|
|
end
|
2009-09-08 23:34:21 -04:00
|
|
|
|
2009-01-28 12:55:25 -05:00
|
|
|
it "should not get undefined method add_node for nil class for the following xml" do
|
|
|
|
stub_http_response_with('undefined_method_add_node_for_nil.xml')
|
|
|
|
result = HTTParty.get('http://foobar.com')
|
|
|
|
result.should == {"Entities"=>{"href"=>"https://s3-sandbox.parature.com/api/v1/5578/5633/Account", "results"=>"0", "total"=>"0", "page_size"=>"25", "page"=>"1"}}
|
|
|
|
end
|
2009-09-08 23:34:21 -04:00
|
|
|
|
2009-01-28 13:17:39 -05:00
|
|
|
it "should parse empty response fine" do
|
|
|
|
stub_http_response_with('empty.xml')
|
|
|
|
result = HTTParty.get('http://foobar.com')
|
|
|
|
result.should == nil
|
|
|
|
end
|
2009-10-28 17:39:14 -04:00
|
|
|
|
|
|
|
it "should accept http URIs" do
|
|
|
|
stub_http_response_with('google.html')
|
|
|
|
lambda do
|
|
|
|
HTTParty.get('http://google.com')
|
2009-11-19 23:39:11 -05:00
|
|
|
end.should_not raise_error(HTTParty::UnsupportedURIScheme)
|
2009-10-28 17:39:14 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
it "should accept https URIs" do
|
|
|
|
stub_http_response_with('google.html')
|
|
|
|
lambda do
|
|
|
|
HTTParty.get('https://google.com')
|
2009-11-19 23:39:11 -05:00
|
|
|
end.should_not raise_error(HTTParty::UnsupportedURIScheme)
|
2009-10-28 17:39:14 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
it "should raise an ArgumentError on URIs that are not http or https" do
|
|
|
|
lambda do
|
|
|
|
HTTParty.get("file:///there_is_no_party_on/my/filesystem")
|
2009-11-19 23:39:11 -05:00
|
|
|
end.should raise_error(HTTParty::UnsupportedURIScheme)
|
2009-10-28 17:39:14 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
it "should raise an InvalidURIError on URIs that can't be parsed at all" do
|
|
|
|
lambda do
|
|
|
|
HTTParty.get("It's the one that says 'Bad URI'")
|
|
|
|
end.should raise_error(URI::InvalidURIError)
|
|
|
|
end
|
2008-12-06 19:41:23 -05:00
|
|
|
end
|
2009-09-08 23:34:21 -04:00
|
|
|
end
|