mirror of
https://github.com/ruby/ruby.git
synced 2022-11-09 12:17:21 -05:00
webrick/httpproxy: stream request and response bodies
Reading entire request or response bodies into memory can lead to trivial denial-of-service attacks. Introduce Fibers in both cases to allow streaming. WEBrick::HTTPRequest gains a new body_reader method to prepare itself as a source for IO.copy_stream. This allows the WEBrick::HTTPRequest object to be used as the Net::HTTPGenericRequest#body_stream= arg for Net::HTTP. For HTTP proxy response bodies, we also use a Fiber to to make the HTTP request and read the response body. * lib/webrick/httprequest.rb (body_reader): new method (readpartial): ditto * lib/webrick/httpproxy.rb (perform_proxy_request): use Fiber to stream response body (do_GET, do_HEAD): adjust call (do_POST): adjust call and supply body_reader * test/webrick/test_httprequest.rb (test_chunked): test for IO.copy_stream compatibility * test/webrick/test_httpproxy.rb (test_big_bodies): new test git-svn-id: svn+ssh://ci.ruby-lang.org/ruby/trunk@62966 b2dd03c8-39d4-4d8f-98ff-823fe69b080e
This commit is contained in:
parent
32e277acbf
commit
706c028909
4 changed files with 172 additions and 32 deletions
|
@ -211,21 +211,15 @@ module WEBrick
|
||||||
end
|
end
|
||||||
|
|
||||||
def do_GET(req, res)
|
def do_GET(req, res)
|
||||||
perform_proxy_request(req, res) do |http, path, header|
|
perform_proxy_request(req, res, Net::HTTP::Get)
|
||||||
http.get(path, header)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def do_HEAD(req, res)
|
def do_HEAD(req, res)
|
||||||
perform_proxy_request(req, res) do |http, path, header|
|
perform_proxy_request(req, res, Net::HTTP::Head)
|
||||||
http.head(path, header)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def do_POST(req, res)
|
def do_POST(req, res)
|
||||||
perform_proxy_request(req, res) do |http, path, header|
|
perform_proxy_request(req, res, Net::HTTP::Post, req.body_reader)
|
||||||
http.post(path, req.body || "", header)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def do_OPTIONS(req, res)
|
def do_OPTIONS(req, res)
|
||||||
|
@ -301,15 +295,16 @@ module WEBrick
|
||||||
return FakeProxyURI
|
return FakeProxyURI
|
||||||
end
|
end
|
||||||
|
|
||||||
def perform_proxy_request(req, res)
|
def perform_proxy_request(req, res, req_class, body_stream = nil)
|
||||||
uri = req.request_uri
|
uri = req.request_uri
|
||||||
path = uri.path.dup
|
path = uri.path.dup
|
||||||
path << "?" << uri.query if uri.query
|
path << "?" << uri.query if uri.query
|
||||||
header = setup_proxy_header(req, res)
|
header = setup_proxy_header(req, res)
|
||||||
upstream = setup_upstream_proxy_authentication(req, res, header)
|
upstream = setup_upstream_proxy_authentication(req, res, header)
|
||||||
response = nil
|
|
||||||
|
|
||||||
|
body_tmp = []
|
||||||
http = Net::HTTP.new(uri.host, uri.port, upstream.host, upstream.port)
|
http = Net::HTTP.new(uri.host, uri.port, upstream.host, upstream.port)
|
||||||
|
req_fib = Fiber.new do
|
||||||
http.start do
|
http.start do
|
||||||
if @config[:ProxyTimeout]
|
if @config[:ProxyTimeout]
|
||||||
################################## these issues are
|
################################## these issues are
|
||||||
|
@ -317,22 +312,39 @@ module WEBrick
|
||||||
http.read_timeout = 60 # secs # Ruby's bug, but why?)
|
http.read_timeout = 60 # secs # Ruby's bug, but why?)
|
||||||
##################################
|
##################################
|
||||||
end
|
end
|
||||||
response = yield(http, path, header)
|
if body_stream && req['transfer-encoding'] =~ /\bchunked\b/i
|
||||||
|
header['Transfer-Encoding'] = 'chunked'
|
||||||
end
|
end
|
||||||
|
http_req = req_class.new(path, header)
|
||||||
|
http_req.body_stream = body_stream if body_stream
|
||||||
|
http.request(http_req) do |response|
|
||||||
# Persistent connection requirements are mysterious for me.
|
# Persistent connection requirements are mysterious for me.
|
||||||
# So I will close the connection in every response.
|
# So I will close the connection in every response.
|
||||||
res['proxy-connection'] = "close"
|
res['proxy-connection'] = "close"
|
||||||
res['connection'] = "close"
|
res['connection'] = "close"
|
||||||
|
|
||||||
# Convert Net::HTTP::HTTPResponse to WEBrick::HTTPResponse
|
# stream Net::HTTP::HTTPResponse to WEBrick::HTTPResponse
|
||||||
res.status = response.code.to_i
|
res.status = response.code.to_i
|
||||||
|
res.chunked = response.chunked?
|
||||||
choose_header(response, res)
|
choose_header(response, res)
|
||||||
set_cookie(response, res)
|
set_cookie(response, res)
|
||||||
set_via(res)
|
set_via(res)
|
||||||
res.body = response.body
|
response.read_body do |buf|
|
||||||
|
body_tmp << buf
|
||||||
|
Fiber.yield # wait for res.body Proc#call
|
||||||
|
end
|
||||||
|
end # http.request
|
||||||
|
end
|
||||||
|
end
|
||||||
|
req_fib.resume # read HTTP response headers and first chunk of the body
|
||||||
|
res.body = ->(socket) do
|
||||||
|
while buf = body_tmp.shift
|
||||||
|
socket.write(buf)
|
||||||
|
buf.clear
|
||||||
|
req_fib.resume # continue response.read_body
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
# :stopdoc:
|
# :stopdoc:
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -257,6 +257,32 @@ module WEBrick
|
||||||
@body.empty? ? nil : @body
|
@body.empty? ? nil : @body
|
||||||
end
|
end
|
||||||
|
|
||||||
|
##
|
||||||
|
# Prepares the HTTPRequest object for use as the
|
||||||
|
# source for IO.copy_stream
|
||||||
|
|
||||||
|
def body_reader
|
||||||
|
@body_tmp = []
|
||||||
|
@body_rd = Fiber.new do
|
||||||
|
body do |buf|
|
||||||
|
@body_tmp << buf
|
||||||
|
Fiber.yield
|
||||||
|
end
|
||||||
|
end
|
||||||
|
@body_rd.resume # grab the first chunk and yield
|
||||||
|
self
|
||||||
|
end
|
||||||
|
|
||||||
|
# for IO.copy_stream. Note: we may return a larger string than +size+
|
||||||
|
# here; but IO.copy_stream does not care.
|
||||||
|
def readpartial(size, buf = ''.b) # :nodoc
|
||||||
|
res = @body_tmp.shift or raise EOFError, 'end of file reached'
|
||||||
|
buf.replace(res)
|
||||||
|
res.clear
|
||||||
|
@body_rd.resume # get more chunks
|
||||||
|
buf
|
||||||
|
end
|
||||||
|
|
||||||
##
|
##
|
||||||
# Request query as a Hash
|
# Request query as a Hash
|
||||||
|
|
||||||
|
|
|
@ -118,6 +118,100 @@ class TestWEBrickHTTPProxy < Test::Unit::TestCase
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def test_big_bodies
|
||||||
|
require 'digest/md5'
|
||||||
|
rand_str = File.read(__FILE__)
|
||||||
|
rand_str.freeze
|
||||||
|
nr = 1024 ** 2 / rand_str.size # bigger works, too
|
||||||
|
exp = Digest::MD5.new
|
||||||
|
nr.times { exp.update(rand_str) }
|
||||||
|
exp = exp.hexdigest
|
||||||
|
TestWEBrick.start_httpserver do |o_server, o_addr, o_port, o_log|
|
||||||
|
o_server.mount_proc('/') do |req, res|
|
||||||
|
case req.request_method
|
||||||
|
when 'GET'
|
||||||
|
res['content-type'] = 'application/octet-stream'
|
||||||
|
if req.path == '/length'
|
||||||
|
res['content-length'] = (nr * rand_str.size).to_s
|
||||||
|
else
|
||||||
|
res.chunked = true
|
||||||
|
end
|
||||||
|
res.body = ->(socket) { nr.times { socket.write(rand_str) } }
|
||||||
|
when 'POST'
|
||||||
|
dig = Digest::MD5.new
|
||||||
|
req.body { |buf| dig.update(buf); buf.clear }
|
||||||
|
res['content-type'] = 'text/plain'
|
||||||
|
res['content-length'] = '32'
|
||||||
|
res.body = dig.hexdigest
|
||||||
|
end
|
||||||
|
end
|
||||||
|
http = Net::HTTP.new(o_addr, o_port)
|
||||||
|
IO.pipe do |rd, wr|
|
||||||
|
headers = {
|
||||||
|
'Content-Type' => 'application/octet-stream',
|
||||||
|
'Transfer-Encoding' => 'chunked',
|
||||||
|
}
|
||||||
|
post = Net::HTTP::Post.new('/', headers)
|
||||||
|
th = Thread.new { nr.times { wr.write(rand_str) }; wr.close }
|
||||||
|
post.body_stream = rd
|
||||||
|
http.request(post) do |res|
|
||||||
|
assert_equal 'text/plain', res['content-type']
|
||||||
|
assert_equal 32, res.content_length
|
||||||
|
assert_equal exp, res.body
|
||||||
|
end
|
||||||
|
assert_nil th.value
|
||||||
|
end
|
||||||
|
|
||||||
|
TestWEBrick.start_httpproxy do |p_server, p_addr, p_port, p_log|
|
||||||
|
http = Net::HTTP.new(o_addr, o_port, p_addr, p_port)
|
||||||
|
http.request_get('/length') do |res|
|
||||||
|
assert_equal(nr * rand_str.size, res.content_length)
|
||||||
|
dig = Digest::MD5.new
|
||||||
|
res.read_body { |buf| dig.update(buf); buf.clear }
|
||||||
|
assert_equal exp, dig.hexdigest
|
||||||
|
end
|
||||||
|
http.request_get('/') do |res|
|
||||||
|
assert_predicate res, :chunked?
|
||||||
|
dig = Digest::MD5.new
|
||||||
|
res.read_body { |buf| dig.update(buf); buf.clear }
|
||||||
|
assert_equal exp, dig.hexdigest
|
||||||
|
end
|
||||||
|
|
||||||
|
IO.pipe do |rd, wr|
|
||||||
|
headers = {
|
||||||
|
'Content-Type' => 'application/octet-stream',
|
||||||
|
'Content-Length' => (nr * rand_str.size).to_s,
|
||||||
|
}
|
||||||
|
post = Net::HTTP::Post.new('/', headers)
|
||||||
|
th = Thread.new { nr.times { wr.write(rand_str) }; wr.close }
|
||||||
|
post.body_stream = rd
|
||||||
|
http.request(post) do |res|
|
||||||
|
assert_equal 'text/plain', res['content-type']
|
||||||
|
assert_equal 32, res.content_length
|
||||||
|
assert_equal exp, res.body
|
||||||
|
end
|
||||||
|
assert_nil th.value
|
||||||
|
end
|
||||||
|
|
||||||
|
IO.pipe do |rd, wr|
|
||||||
|
headers = {
|
||||||
|
'Content-Type' => 'application/octet-stream',
|
||||||
|
'Transfer-Encoding' => 'chunked',
|
||||||
|
}
|
||||||
|
post = Net::HTTP::Post.new('/', headers)
|
||||||
|
th = Thread.new { nr.times { wr.write(rand_str) }; wr.close }
|
||||||
|
post.body_stream = rd
|
||||||
|
http.request(post) do |res|
|
||||||
|
assert_equal 'text/plain', res['content-type']
|
||||||
|
assert_equal 32, res.content_length
|
||||||
|
assert_equal exp, res.body
|
||||||
|
end
|
||||||
|
assert_nil th.value
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def make_certificate(key, cn)
|
def make_certificate(key, cn)
|
||||||
subject = OpenSSL::X509::Name.parse("/DC=org/DC=ruby-lang/CN=#{cn}")
|
subject = OpenSSL::X509::Name.parse("/DC=org/DC=ruby-lang/CN=#{cn}")
|
||||||
exts = [
|
exts = [
|
||||||
|
|
|
@ -237,6 +237,7 @@ GET /
|
||||||
|
|
||||||
def test_chunked
|
def test_chunked
|
||||||
crlf = "\x0d\x0a"
|
crlf = "\x0d\x0a"
|
||||||
|
expect = File.read(__FILE__).freeze
|
||||||
msg = <<-_end_of_message_
|
msg = <<-_end_of_message_
|
||||||
POST /path HTTP/1.1
|
POST /path HTTP/1.1
|
||||||
Host: test.ruby-lang.org:8080
|
Host: test.ruby-lang.org:8080
|
||||||
|
@ -253,7 +254,14 @@ GET /
|
||||||
msg << "0" << crlf
|
msg << "0" << crlf
|
||||||
req = WEBrick::HTTPRequest.new(WEBrick::Config::HTTP)
|
req = WEBrick::HTTPRequest.new(WEBrick::Config::HTTP)
|
||||||
req.parse(StringIO.new(msg))
|
req.parse(StringIO.new(msg))
|
||||||
assert_equal(File.read(__FILE__), req.body)
|
assert_equal(expect, req.body)
|
||||||
|
|
||||||
|
# chunked req.body_reader
|
||||||
|
req = WEBrick::HTTPRequest.new(WEBrick::Config::HTTP)
|
||||||
|
req.parse(StringIO.new(msg))
|
||||||
|
dst = StringIO.new
|
||||||
|
IO.copy_stream(req.body_reader, dst)
|
||||||
|
assert_equal(expect, dst.string)
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_forwarded
|
def test_forwarded
|
||||||
|
|
Loading…
Reference in a new issue