2011-09-23 23:46:33 -04:00
|
|
|
# Copyright (c) 2011 Evan Phoenix
|
2014-02-17 12:35:40 -05:00
|
|
|
# Copyright (c) 2005 Zed A. Shaw
|
2006-05-21 10:46:42 -04:00
|
|
|
|
2017-05-12 15:16:55 -04:00
|
|
|
require_relative "helper"
|
2019-08-06 19:55:40 -04:00
|
|
|
require "digest"
|
2006-01-28 14:03:53 -05:00
|
|
|
|
2016-11-22 10:05:49 -05:00
|
|
|
require "puma/puma_http11"
|
2006-01-28 14:03:53 -05:00
|
|
|
|
2016-11-22 10:05:49 -05:00
|
|
|
class Http11ParserTest < Minitest::Test
|
2019-08-06 20:04:37 -04:00
|
|
|
|
2019-08-06 19:56:58 -04:00
|
|
|
parallelize_me!
|
2011-12-01 18:50:14 -05:00
|
|
|
|
2006-01-30 01:25:20 -05:00
|
|
|
def test_parse_simple
|
2016-11-22 10:05:49 -05:00
|
|
|
parser = Puma::HttpParser.new
|
2006-01-30 01:25:20 -05:00
|
|
|
req = {}
|
2016-04-07 13:46:36 -04:00
|
|
|
http = "GET /?a=1 HTTP/1.1\r\n\r\n"
|
2006-05-14 19:42:19 -04:00
|
|
|
nread = parser.execute(req, http, 0)
|
2007-10-15 14:00:40 -04:00
|
|
|
|
2006-01-30 01:25:20 -05:00
|
|
|
assert nread == http.length, "Failed to parse the full HTTP request"
|
|
|
|
assert parser.finished?, "Parser didn't finish"
|
|
|
|
assert !parser.error?, "Parser had error"
|
|
|
|
assert nread == parser.nread, "Number read returned from execute does not match"
|
2007-10-15 14:00:40 -04:00
|
|
|
|
|
|
|
assert_equal '/', req['REQUEST_PATH']
|
|
|
|
assert_equal 'HTTP/1.1', req['HTTP_VERSION']
|
2016-04-07 13:46:36 -04:00
|
|
|
assert_equal '/?a=1', req['REQUEST_URI']
|
2013-11-10 02:48:24 -05:00
|
|
|
assert_equal 'GET', req['REQUEST_METHOD']
|
2007-10-15 14:00:40 -04:00
|
|
|
assert_nil req['FRAGMENT']
|
2016-04-07 13:46:36 -04:00
|
|
|
assert_equal "a=1", req['QUERY_STRING']
|
|
|
|
|
|
|
|
parser.reset
|
|
|
|
assert parser.nread == 0, "Number read after reset should be 0"
|
|
|
|
end
|
|
|
|
|
2016-04-22 20:06:18 -04:00
|
|
|
def test_parse_escaping_in_query
|
2016-11-22 10:05:49 -05:00
|
|
|
parser = Puma::HttpParser.new
|
2016-04-22 20:06:18 -04:00
|
|
|
req = {}
|
|
|
|
http = "GET /admin/users?search=%27%%27 HTTP/1.1\r\n\r\n"
|
|
|
|
nread = parser.execute(req, http, 0)
|
|
|
|
|
|
|
|
assert nread == http.length, "Failed to parse the full HTTP request"
|
|
|
|
assert parser.finished?, "Parser didn't finish"
|
|
|
|
assert !parser.error?, "Parser had error"
|
|
|
|
assert nread == parser.nread, "Number read returned from execute does not match"
|
|
|
|
|
|
|
|
assert_equal '/admin/users?search=%27%%27', req['REQUEST_URI']
|
|
|
|
assert_equal "search=%27%%27", req['QUERY_STRING']
|
|
|
|
|
|
|
|
parser.reset
|
|
|
|
assert parser.nread == 0, "Number read after reset should be 0"
|
|
|
|
end
|
|
|
|
|
2016-04-07 13:46:36 -04:00
|
|
|
def test_parse_absolute_uri
|
2016-11-22 10:05:49 -05:00
|
|
|
parser = Puma::HttpParser.new
|
2016-04-07 13:46:36 -04:00
|
|
|
req = {}
|
|
|
|
http = "GET http://192.168.1.96:3000/api/v1/matches/test?1=1 HTTP/1.1\r\n\r\n"
|
|
|
|
nread = parser.execute(req, http, 0)
|
|
|
|
|
|
|
|
assert nread == http.length, "Failed to parse the full HTTP request"
|
|
|
|
assert parser.finished?, "Parser didn't finish"
|
|
|
|
assert !parser.error?, "Parser had error"
|
|
|
|
assert nread == parser.nread, "Number read returned from execute does not match"
|
|
|
|
|
|
|
|
assert_equal "GET", req['REQUEST_METHOD']
|
|
|
|
assert_equal 'http://192.168.1.96:3000/api/v1/matches/test?1=1', req['REQUEST_URI']
|
|
|
|
assert_equal 'HTTP/1.1', req['HTTP_VERSION']
|
|
|
|
|
|
|
|
assert_nil req['REQUEST_PATH']
|
|
|
|
assert_nil req['FRAGMENT']
|
2007-10-15 14:00:40 -04:00
|
|
|
assert_nil req['QUERY_STRING']
|
2014-02-17 12:35:40 -05:00
|
|
|
|
2006-01-30 01:25:20 -05:00
|
|
|
parser.reset
|
|
|
|
assert parser.nread == 0, "Number read after reset should be 0"
|
2016-04-07 13:46:36 -04:00
|
|
|
|
2006-01-30 01:25:20 -05:00
|
|
|
end
|
2014-02-17 12:35:40 -05:00
|
|
|
|
2006-08-12 18:00:11 -04:00
|
|
|
def test_parse_dumbfuck_headers
|
2016-11-22 10:05:49 -05:00
|
|
|
parser = Puma::HttpParser.new
|
2006-08-12 18:00:11 -04:00
|
|
|
req = {}
|
|
|
|
should_be_good = "GET / HTTP/1.1\r\naaaaaaaaaaaaa:++++++++++\r\n\r\n"
|
|
|
|
nread = parser.execute(req, should_be_good, 0)
|
|
|
|
assert_equal should_be_good.length, nread
|
|
|
|
assert parser.finished?
|
|
|
|
assert !parser.error?
|
|
|
|
end
|
2014-02-17 12:35:40 -05:00
|
|
|
|
2006-01-30 01:25:20 -05:00
|
|
|
def test_parse_error
|
2016-11-22 10:05:49 -05:00
|
|
|
parser = Puma::HttpParser.new
|
2006-01-30 01:25:20 -05:00
|
|
|
req = {}
|
|
|
|
bad_http = "GET / SsUTF/1.1"
|
|
|
|
|
|
|
|
error = false
|
|
|
|
begin
|
2011-12-07 17:48:41 -05:00
|
|
|
parser.execute(req, bad_http, 0)
|
|
|
|
rescue
|
2006-01-30 01:25:20 -05:00
|
|
|
error = true
|
2006-01-28 14:03:53 -05:00
|
|
|
end
|
2006-01-30 01:25:20 -05:00
|
|
|
|
|
|
|
assert error, "failed to throw exception"
|
|
|
|
assert !parser.finished?, "Parser shouldn't be finished"
|
|
|
|
assert parser.error?, "Parser SHOULD have error"
|
|
|
|
end
|
2006-03-19 18:31:30 -05:00
|
|
|
|
2007-10-15 14:00:40 -04:00
|
|
|
def test_fragment_in_uri
|
2016-11-22 10:05:49 -05:00
|
|
|
parser = Puma::HttpParser.new
|
2007-10-15 14:00:40 -04:00
|
|
|
req = {}
|
|
|
|
get = "GET /forums/1/topics/2375?page=1#posts-17408 HTTP/1.1\r\n\r\n"
|
2016-11-22 10:05:49 -05:00
|
|
|
|
|
|
|
parser.execute(req, get, 0)
|
|
|
|
|
2007-10-15 14:00:40 -04:00
|
|
|
assert parser.finished?
|
2007-10-18 16:45:16 -04:00
|
|
|
assert_equal '/forums/1/topics/2375?page=1', req['REQUEST_URI']
|
2007-10-15 14:00:40 -04:00
|
|
|
assert_equal 'posts-17408', req['FRAGMENT']
|
|
|
|
end
|
|
|
|
|
2019-08-28 13:08:01 -04:00
|
|
|
def test_semicolon_in_path
|
2019-09-19 09:48:16 -04:00
|
|
|
skip_on :jruby # Not yet supported on JRuby, see https://github.com/puma/puma/issues/1978
|
2019-08-28 13:08:01 -04:00
|
|
|
parser = Puma::HttpParser.new
|
|
|
|
req = {}
|
|
|
|
get = "GET /forums/1/path;stillpath/2375?page=1 HTTP/1.1\r\n\r\n"
|
|
|
|
|
|
|
|
parser.execute(req, get, 0)
|
|
|
|
|
|
|
|
assert parser.finished?
|
|
|
|
assert_equal '/forums/1/path;stillpath/2375?page=1', req['REQUEST_URI']
|
|
|
|
assert_equal '/forums/1/path;stillpath/2375', req['REQUEST_PATH']
|
|
|
|
end
|
|
|
|
|
2006-04-02 22:27:59 -04:00
|
|
|
# lame random garbage maker
|
|
|
|
def rand_data(min, max, readable=true)
|
|
|
|
count = min + ((rand(max)+1) *10).to_i
|
|
|
|
res = count.to_s + "/"
|
2014-02-17 12:35:40 -05:00
|
|
|
|
2006-04-02 22:27:59 -04:00
|
|
|
if readable
|
2006-08-16 17:51:38 -04:00
|
|
|
res << Digest::SHA1.hexdigest(rand(count * 100).to_s) * (count / 40)
|
2006-04-02 22:27:59 -04:00
|
|
|
else
|
2006-08-16 17:51:38 -04:00
|
|
|
res << Digest::SHA1.digest(rand(count * 100).to_s) * (count / 20)
|
2006-04-02 22:27:59 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
return res
|
|
|
|
end
|
2013-11-10 02:48:24 -05:00
|
|
|
|
|
|
|
def test_max_uri_path_length
|
2016-11-22 10:05:49 -05:00
|
|
|
parser = Puma::HttpParser.new
|
2013-11-10 02:48:24 -05:00
|
|
|
req = {}
|
|
|
|
|
2020-08-23 05:38:24 -04:00
|
|
|
# Support URI path length to a max of 8192
|
2020-03-10 13:39:36 -04:00
|
|
|
path = "/" + rand_data(7000, 100)
|
2013-11-10 02:48:24 -05:00
|
|
|
http = "GET #{path} HTTP/1.1\r\n\r\n"
|
|
|
|
parser.execute(req, http, 0)
|
|
|
|
assert_equal path, req['REQUEST_PATH']
|
|
|
|
parser.reset
|
|
|
|
|
2020-08-23 05:38:24 -04:00
|
|
|
# Raise exception if URI path length > 8192
|
2020-03-10 13:39:36 -04:00
|
|
|
path = "/" + rand_data(9000, 100)
|
2013-11-10 02:48:24 -05:00
|
|
|
http = "GET #{path} HTTP/1.1\r\n\r\n"
|
|
|
|
assert_raises Puma::HttpParserError do
|
|
|
|
parser.execute(req, http, 0)
|
|
|
|
parser.reset
|
|
|
|
end
|
|
|
|
end
|
2006-04-02 22:27:59 -04:00
|
|
|
|
|
|
|
def test_horrible_queries
|
2016-11-22 10:05:49 -05:00
|
|
|
parser = Puma::HttpParser.new
|
2006-04-02 22:27:59 -04:00
|
|
|
|
|
|
|
# then that large header names are caught
|
2006-08-16 17:51:38 -04:00
|
|
|
10.times do |c|
|
2006-04-02 22:27:59 -04:00
|
|
|
get = "GET /#{rand_data(10,120)} HTTP/1.1\r\nX-#{rand_data(1024, 1024+(c*1024))}: Test\r\n\r\n"
|
2011-09-22 22:24:43 -04:00
|
|
|
assert_raises Puma::HttpParserError do
|
2006-05-14 19:42:19 -04:00
|
|
|
parser.execute({}, get, 0)
|
2006-04-02 22:27:59 -04:00
|
|
|
parser.reset
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# then that large mangled field values are caught
|
2006-08-16 17:51:38 -04:00
|
|
|
10.times do |c|
|
2006-04-02 22:27:59 -04:00
|
|
|
get = "GET /#{rand_data(10,120)} HTTP/1.1\r\nX-Test: #{rand_data(1024, 1024+(c*1024), false)}\r\n\r\n"
|
2011-09-22 22:24:43 -04:00
|
|
|
assert_raises Puma::HttpParserError do
|
2006-05-14 19:42:19 -04:00
|
|
|
parser.execute({}, get, 0)
|
2006-04-02 22:27:59 -04:00
|
|
|
parser.reset
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# then large headers are rejected too
|
2017-08-02 21:02:40 -04:00
|
|
|
get = "GET /#{rand_data(10,120)} HTTP/1.1\r\n"
|
|
|
|
get += "X-Test: test\r\n" * (80 * 1024)
|
2011-09-22 22:24:43 -04:00
|
|
|
assert_raises Puma::HttpParserError do
|
2006-05-14 19:42:19 -04:00
|
|
|
parser.execute({}, get, 0)
|
2006-04-02 22:27:59 -04:00
|
|
|
parser.reset
|
|
|
|
end
|
|
|
|
|
|
|
|
# finally just that random garbage gets blocked all the time
|
|
|
|
10.times do |c|
|
|
|
|
get = "GET #{rand_data(1024, 1024+(c*1024), false)} #{rand_data(1024, 1024+(c*1024), false)}\r\n\r\n"
|
2011-09-22 22:24:43 -04:00
|
|
|
assert_raises Puma::HttpParserError do
|
2006-05-14 19:42:19 -04:00
|
|
|
parser.execute({}, get, 0)
|
2006-04-02 22:27:59 -04:00
|
|
|
parser.reset
|
|
|
|
end
|
|
|
|
end
|
2019-08-06 19:55:40 -04:00
|
|
|
end
|
2019-08-06 20:04:37 -04:00
|
|
|
|
2019-08-06 19:55:40 -04:00
|
|
|
def test_trims_whitespace_from_headers
|
|
|
|
parser = Puma::HttpParser.new
|
|
|
|
req = {}
|
|
|
|
http = "GET / HTTP/1.1\r\nX-Strip-Me: Strip This \r\n\r\n"
|
2006-04-02 22:27:59 -04:00
|
|
|
|
2019-08-10 11:14:11 -04:00
|
|
|
parser.execute(req, http, 0)
|
2019-08-06 20:04:37 -04:00
|
|
|
|
2019-08-06 19:55:40 -04:00
|
|
|
assert_equal "Strip This", req["HTTP_X_STRIP_ME"]
|
2006-04-02 22:27:59 -04:00
|
|
|
end
|
2006-01-28 14:03:53 -05:00
|
|
|
end
|