2002-12-16 14:06:36 -05:00
|
|
|
require 'uri'
|
|
|
|
require 'stringio'
|
|
|
|
require 'time'
|
|
|
|
|
2003-02-05 05:44:05 -05:00
|
|
|
module Kernel
|
|
|
|
private
|
|
|
|
alias open_uri_original_open open # :nodoc:
|
2007-12-23 05:35:58 -05:00
|
|
|
class << self
|
|
|
|
alias open_uri_original_open open # :nodoc:
|
|
|
|
end
|
2003-02-05 05:44:05 -05:00
|
|
|
|
2011-08-26 18:22:37 -04:00
|
|
|
# Allows the opening of various resources including URIs.
|
2003-11-13 06:39:16 -05:00
|
|
|
#
|
2011-08-26 18:22:37 -04:00
|
|
|
# If the first argument responds to the 'open' method, 'open' is called on
|
|
|
|
# it with the rest of the arguments.
|
2003-11-13 06:39:16 -05:00
|
|
|
#
|
2011-08-26 18:22:37 -04:00
|
|
|
# If the first argument is a string that begins with xxx://, it is parsed by
|
|
|
|
# URI.parse. If the parsed object responds to the 'open' method,
|
|
|
|
# 'open' is called on it with the rest of the arguments.
|
|
|
|
#
|
|
|
|
# Otherwise, the original Kernel#open is called.
|
2003-11-13 06:39:16 -05:00
|
|
|
#
|
2005-02-12 00:38:44 -05:00
|
|
|
# Since open-uri.rb provides URI::HTTP#open, URI::HTTPS#open and
|
2011-08-26 18:22:37 -04:00
|
|
|
# URI::FTP#open, Kernel[#.]open can accept URIs and strings that begin with
|
|
|
|
# http://, https:// and ftp://. In these cases, the opened file object is
|
|
|
|
# extended by OpenURI::Meta.
|
2004-04-12 06:17:46 -04:00
|
|
|
def open(name, *rest, &block) # :doc:
|
2003-11-13 06:54:31 -05:00
|
|
|
if name.respond_to?(:open)
|
2003-02-05 05:44:05 -05:00
|
|
|
name.open(*rest, &block)
|
2004-01-07 06:31:21 -05:00
|
|
|
elsif name.respond_to?(:to_str) &&
|
2003-11-13 06:39:16 -05:00
|
|
|
%r{\A[A-Za-z][A-Za-z0-9+\-\.]*://} =~ name &&
|
|
|
|
(uri = URI.parse(name)).respond_to?(:open)
|
|
|
|
uri.open(*rest, &block)
|
2003-02-05 05:44:05 -05:00
|
|
|
else
|
|
|
|
open_uri_original_open(name, *rest, &block)
|
|
|
|
end
|
2002-12-16 14:06:36 -05:00
|
|
|
end
|
2003-02-26 03:39:05 -05:00
|
|
|
module_function :open
|
2003-02-05 05:44:05 -05:00
|
|
|
end
|
2002-12-16 14:06:36 -05:00
|
|
|
|
2006-08-04 14:05:50 -04:00
|
|
|
# OpenURI is an easy-to-use wrapper for net/http, net/https and net/ftp.
|
|
|
|
#
|
|
|
|
#== Example
|
|
|
|
#
|
2011-08-26 18:22:37 -04:00
|
|
|
# It is possible to open an http, https or ftp URL as though it were a file:
|
2006-08-04 14:05:50 -04:00
|
|
|
#
|
|
|
|
# open("http://www.ruby-lang.org/") {|f|
|
|
|
|
# f.each_line {|line| p line}
|
|
|
|
# }
|
|
|
|
#
|
2011-08-26 18:22:37 -04:00
|
|
|
# The opened file has several getter methods for its meta-information, as
|
|
|
|
# follows, since it is extended by OpenURI::Meta.
|
2006-08-04 14:05:50 -04:00
|
|
|
#
|
|
|
|
# open("http://www.ruby-lang.org/en") {|f|
|
|
|
|
# f.each_line {|line| p line}
|
|
|
|
# p f.base_uri # <URI::HTTP:0x40e6ef2 URL:http://www.ruby-lang.org/en/>
|
|
|
|
# p f.content_type # "text/html"
|
|
|
|
# p f.charset # "iso-8859-1"
|
|
|
|
# p f.content_encoding # []
|
|
|
|
# p f.last_modified # Thu Dec 05 02:45:02 UTC 2002
|
|
|
|
# }
|
|
|
|
#
|
|
|
|
# Additional header fields can be specified by an optional hash argument.
|
|
|
|
#
|
|
|
|
# open("http://www.ruby-lang.org/en/",
|
|
|
|
# "User-Agent" => "Ruby/#{RUBY_VERSION}",
|
|
|
|
# "From" => "foo@bar.invalid",
|
|
|
|
# "Referer" => "http://www.ruby-lang.org/") {|f|
|
|
|
|
# # ...
|
|
|
|
# }
|
|
|
|
#
|
|
|
|
# The environment variables such as http_proxy, https_proxy and ftp_proxy
|
|
|
|
# are in effect by default. :proxy => nil disables proxy.
|
|
|
|
#
|
|
|
|
# open("http://www.ruby-lang.org/en/raa.html", :proxy => nil) {|f|
|
|
|
|
# # ...
|
|
|
|
# }
|
|
|
|
#
|
|
|
|
# URI objects can be opened in a similar way.
|
|
|
|
#
|
|
|
|
# uri = URI.parse("http://www.ruby-lang.org/en/")
|
|
|
|
# uri.open {|f|
|
|
|
|
# # ...
|
|
|
|
# }
|
|
|
|
#
|
|
|
|
# URI objects can be read directly. The returned string is also extended by
|
|
|
|
# OpenURI::Meta.
|
|
|
|
#
|
|
|
|
# str = uri.read
|
|
|
|
# p str.base_uri
|
|
|
|
#
|
|
|
|
# Author:: Tanaka Akira <akr@m17n.org>
|
|
|
|
|
2003-02-05 05:44:05 -05:00
|
|
|
module OpenURI
|
2003-11-24 03:02:36 -05:00
|
|
|
Options = {
|
|
|
|
:proxy => true,
|
2005-09-28 21:18:14 -04:00
|
|
|
:proxy_http_basic_authentication => true,
|
2003-11-24 03:02:36 -05:00
|
|
|
:progress_proc => true,
|
|
|
|
:content_length_proc => true,
|
2005-02-10 21:47:11 -05:00
|
|
|
:http_basic_authentication => true,
|
2005-09-15 00:56:25 -04:00
|
|
|
:read_timeout => true,
|
2006-02-19 10:17:16 -05:00
|
|
|
:ssl_ca_cert => nil,
|
|
|
|
:ssl_verify_mode => nil,
|
2007-08-29 05:38:36 -04:00
|
|
|
:ftp_active_mode => false,
|
2007-10-28 08:55:51 -04:00
|
|
|
:redirect => true,
|
2003-11-24 03:02:36 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
def OpenURI.check_options(options) # :nodoc:
|
|
|
|
options.each {|k, v|
|
|
|
|
next unless Symbol === k
|
|
|
|
unless Options.include? k
|
|
|
|
raise ArgumentError, "unrecognized option: #{k}"
|
|
|
|
end
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2003-02-05 05:44:05 -05:00
|
|
|
def OpenURI.scan_open_optional_arguments(*rest) # :nodoc:
|
2002-12-16 14:06:36 -05:00
|
|
|
if !rest.empty? && (String === rest.first || Integer === rest.first)
|
|
|
|
mode = rest.shift
|
|
|
|
if !rest.empty? && Integer === rest.first
|
|
|
|
perm = rest.shift
|
|
|
|
end
|
|
|
|
end
|
2003-02-05 05:44:05 -05:00
|
|
|
return mode, perm, rest
|
|
|
|
end
|
|
|
|
|
|
|
|
def OpenURI.open_uri(name, *rest) # :nodoc:
|
|
|
|
uri = URI::Generic === name ? name : URI.parse(name)
|
2010-10-29 18:02:39 -04:00
|
|
|
mode, _, rest = OpenURI.scan_open_optional_arguments(*rest)
|
2003-02-05 05:44:05 -05:00
|
|
|
options = rest.shift if !rest.empty? && Hash === rest.first
|
|
|
|
raise ArgumentError.new("extra arguments") if !rest.empty?
|
2003-11-24 03:02:36 -05:00
|
|
|
options ||= {}
|
|
|
|
OpenURI.check_options(options)
|
2002-12-16 14:06:36 -05:00
|
|
|
|
2008-10-05 04:51:22 -04:00
|
|
|
if /\Arb?(?:\Z|:([^:]+))/ =~ mode
|
|
|
|
encoding, = $1,Encoding.find($1) if $1
|
|
|
|
mode = nil
|
|
|
|
end
|
|
|
|
|
2002-12-16 14:06:36 -05:00
|
|
|
unless mode == nil ||
|
2002-12-18 14:22:46 -05:00
|
|
|
mode == 'r' || mode == 'rb' ||
|
2003-11-13 03:58:20 -05:00
|
|
|
mode == File::RDONLY
|
2002-12-16 14:06:36 -05:00
|
|
|
raise ArgumentError.new("invalid access mode #{mode} (#{uri.class} resource is read only.)")
|
|
|
|
end
|
|
|
|
|
2003-11-24 03:02:36 -05:00
|
|
|
io = open_loop(uri, options)
|
2008-10-05 04:51:22 -04:00
|
|
|
io.set_encoding(encoding) if encoding
|
2002-12-16 14:06:36 -05:00
|
|
|
if block_given?
|
|
|
|
begin
|
|
|
|
yield io
|
|
|
|
ensure
|
|
|
|
io.close
|
|
|
|
end
|
|
|
|
else
|
|
|
|
io
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2003-02-05 05:44:05 -05:00
|
|
|
def OpenURI.open_loop(uri, options) # :nodoc:
|
2005-09-30 12:48:46 -04:00
|
|
|
proxy_opts = []
|
|
|
|
proxy_opts << :proxy_http_basic_authentication if options.include? :proxy_http_basic_authentication
|
|
|
|
proxy_opts << :proxy if options.include? :proxy
|
|
|
|
proxy_opts.compact!
|
|
|
|
if 1 < proxy_opts.length
|
|
|
|
raise ArgumentError, "multiple proxy options specified"
|
|
|
|
end
|
|
|
|
case proxy_opts.first
|
|
|
|
when :proxy_http_basic_authentication
|
|
|
|
opt_proxy, proxy_user, proxy_pass = options.fetch(:proxy_http_basic_authentication)
|
2005-09-28 21:18:14 -04:00
|
|
|
proxy_user = proxy_user.to_str
|
|
|
|
proxy_pass = proxy_pass.to_str
|
|
|
|
if opt_proxy == true
|
|
|
|
raise ArgumentError.new("Invalid authenticated proxy option: #{options[:proxy_http_basic_authentication].inspect}")
|
|
|
|
end
|
2005-09-30 12:48:46 -04:00
|
|
|
when :proxy
|
|
|
|
opt_proxy = options.fetch(:proxy)
|
|
|
|
proxy_user = nil
|
|
|
|
proxy_pass = nil
|
|
|
|
when nil
|
|
|
|
opt_proxy = true
|
2005-09-28 21:18:14 -04:00
|
|
|
proxy_user = nil
|
|
|
|
proxy_pass = nil
|
|
|
|
end
|
|
|
|
case opt_proxy
|
2002-12-16 14:06:36 -05:00
|
|
|
when true
|
2005-11-02 17:58:39 -05:00
|
|
|
find_proxy = lambda {|u| pxy = u.find_proxy; pxy ? [pxy, nil, nil] : nil}
|
2002-12-16 14:06:36 -05:00
|
|
|
when nil, false
|
|
|
|
find_proxy = lambda {|u| nil}
|
|
|
|
when String
|
|
|
|
opt_proxy = URI.parse(opt_proxy)
|
2005-09-28 21:18:14 -04:00
|
|
|
find_proxy = lambda {|u| [opt_proxy, proxy_user, proxy_pass]}
|
2002-12-16 14:06:36 -05:00
|
|
|
when URI::Generic
|
2005-09-28 21:18:14 -04:00
|
|
|
find_proxy = lambda {|u| [opt_proxy, proxy_user, proxy_pass]}
|
2002-12-16 14:06:36 -05:00
|
|
|
else
|
|
|
|
raise ArgumentError.new("Invalid proxy option: #{opt_proxy}")
|
|
|
|
end
|
|
|
|
|
|
|
|
uri_set = {}
|
2003-11-24 09:36:18 -05:00
|
|
|
buf = nil
|
|
|
|
while true
|
|
|
|
redirect = catch(:open_uri_redirect) {
|
|
|
|
buf = Buffer.new
|
2005-02-12 00:01:18 -05:00
|
|
|
uri.buffer_open(buf, find_proxy.call(uri), options)
|
2003-11-24 09:36:18 -05:00
|
|
|
nil
|
|
|
|
}
|
|
|
|
if redirect
|
|
|
|
if redirect.relative?
|
2004-04-04 03:57:39 -04:00
|
|
|
# Although it violates RFC2616, Location: field may have relative
|
|
|
|
# URI. It is converted to absolute URI using uri as a base URI.
|
2003-11-24 09:36:18 -05:00
|
|
|
redirect = uri + redirect
|
|
|
|
end
|
2007-10-28 08:55:51 -04:00
|
|
|
if !options.fetch(:redirect, true)
|
|
|
|
raise HTTPRedirect.new(buf.io.status.join(' '), buf.io, redirect)
|
|
|
|
end
|
2003-11-24 09:36:18 -05:00
|
|
|
unless OpenURI.redirectable?(uri, redirect)
|
|
|
|
raise "redirection forbidden: #{uri} -> #{redirect}"
|
|
|
|
end
|
2005-02-12 03:40:23 -05:00
|
|
|
if options.include? :http_basic_authentication
|
|
|
|
# send authentication only for the URI directly specified.
|
|
|
|
options = options.dup
|
|
|
|
options.delete :http_basic_authentication
|
|
|
|
end
|
2003-11-24 09:36:18 -05:00
|
|
|
uri = redirect
|
|
|
|
raise "HTTP redirection loop: #{uri}" if uri_set.include? uri.to_s
|
2004-04-21 07:11:40 -04:00
|
|
|
uri_set[uri.to_s] = true
|
2002-12-16 14:06:36 -05:00
|
|
|
else
|
2003-11-24 09:36:18 -05:00
|
|
|
break
|
2003-02-05 04:58:18 -05:00
|
|
|
end
|
2002-12-16 14:06:36 -05:00
|
|
|
end
|
|
|
|
io = buf.io
|
|
|
|
io.base_uri = uri
|
|
|
|
io
|
|
|
|
end
|
|
|
|
|
2003-11-24 09:36:18 -05:00
|
|
|
def OpenURI.redirectable?(uri1, uri2) # :nodoc:
|
|
|
|
# This test is intended to forbid a redirection from http://... to
|
2011-05-27 19:45:12 -04:00
|
|
|
# file:///etc/passwd, file:///dev/zero, etc. CVE-2011-1521
|
2007-10-28 08:55:51 -04:00
|
|
|
# https to http redirect is also forbidden intentionally.
|
|
|
|
# It avoids sending secure cookie or referer by non-secure HTTP protocol.
|
|
|
|
# (RFC 2109 4.3.1, RFC 2965 3.3, RFC 2616 15.1.3)
|
2003-11-24 09:36:18 -05:00
|
|
|
# However this is ad hoc. It should be extensible/configurable.
|
|
|
|
uri1.scheme.downcase == uri2.scheme.downcase ||
|
2009-01-07 17:33:00 -05:00
|
|
|
(/\A(?:http|ftp)\z/i =~ uri1.scheme && /\A(?:http|ftp)\z/i =~ uri2.scheme)
|
2002-12-16 14:06:36 -05:00
|
|
|
end
|
|
|
|
|
2005-02-12 00:01:18 -05:00
|
|
|
def OpenURI.open_http(buf, target, proxy, options) # :nodoc:
|
|
|
|
if proxy
|
2005-09-28 21:18:14 -04:00
|
|
|
proxy_uri, proxy_user, proxy_pass = proxy
|
|
|
|
raise "Non-HTTP proxy URI: #{proxy_uri}" if proxy_uri.class != URI::HTTP
|
2005-02-12 00:01:18 -05:00
|
|
|
end
|
|
|
|
|
2005-02-12 03:40:23 -05:00
|
|
|
if target.userinfo && "1.9.0" <= RUBY_VERSION
|
|
|
|
# don't raise for 1.8 because compatibility.
|
2005-02-18 23:55:16 -05:00
|
|
|
raise ArgumentError, "userinfo not supported. [RFC3986]"
|
2005-02-12 00:12:56 -05:00
|
|
|
end
|
|
|
|
|
2005-09-28 21:18:14 -04:00
|
|
|
header = {}
|
|
|
|
options.each {|k, v| header[k] = v if String === k }
|
|
|
|
|
2005-02-12 00:01:18 -05:00
|
|
|
require 'net/http'
|
|
|
|
klass = Net::HTTP
|
|
|
|
if URI::HTTP === target
|
|
|
|
# HTTP or HTTPS
|
|
|
|
if proxy
|
2005-09-28 21:18:14 -04:00
|
|
|
if proxy_user && proxy_pass
|
2010-10-05 23:30:49 -04:00
|
|
|
klass = Net::HTTP::Proxy(proxy_uri.hostname, proxy_uri.port, proxy_user, proxy_pass)
|
2005-09-28 21:18:14 -04:00
|
|
|
else
|
2010-10-05 23:30:49 -04:00
|
|
|
klass = Net::HTTP::Proxy(proxy_uri.hostname, proxy_uri.port)
|
2005-09-28 21:18:14 -04:00
|
|
|
end
|
2005-02-12 00:01:18 -05:00
|
|
|
end
|
2010-10-05 23:30:49 -04:00
|
|
|
target_host = target.hostname
|
2005-02-12 00:01:18 -05:00
|
|
|
target_port = target.port
|
|
|
|
request_uri = target.request_uri
|
|
|
|
else
|
|
|
|
# FTP over HTTP proxy
|
2010-10-05 23:30:49 -04:00
|
|
|
target_host = proxy_uri.hostname
|
2005-09-28 21:18:14 -04:00
|
|
|
target_port = proxy_uri.port
|
2005-02-12 00:01:18 -05:00
|
|
|
request_uri = target.to_s
|
2005-09-28 21:18:14 -04:00
|
|
|
if proxy_user && proxy_pass
|
|
|
|
header["Proxy-Authorization"] = 'Basic ' + ["#{proxy_user}:#{proxy_pass}"].pack('m').delete("\r\n")
|
|
|
|
end
|
2005-02-12 00:01:18 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
http = klass.new(target_host, target_port)
|
|
|
|
if target.class == URI::HTTPS
|
|
|
|
require 'net/https'
|
|
|
|
http.use_ssl = true
|
2006-02-19 10:17:16 -05:00
|
|
|
http.verify_mode = options[:ssl_verify_mode] || OpenSSL::SSL::VERIFY_PEER
|
2005-02-12 00:01:18 -05:00
|
|
|
store = OpenSSL::X509::Store.new
|
2006-02-19 10:17:16 -05:00
|
|
|
if options[:ssl_ca_cert]
|
|
|
|
if File.directory? options[:ssl_ca_cert]
|
|
|
|
store.add_path options[:ssl_ca_cert]
|
|
|
|
else
|
|
|
|
store.add_file options[:ssl_ca_cert]
|
|
|
|
end
|
|
|
|
else
|
|
|
|
store.set_default_paths
|
|
|
|
end
|
2005-02-12 00:01:18 -05:00
|
|
|
http.cert_store = store
|
|
|
|
end
|
2005-09-15 00:56:25 -04:00
|
|
|
if options.include? :read_timeout
|
|
|
|
http.read_timeout = options[:read_timeout]
|
|
|
|
end
|
2005-02-12 00:01:18 -05:00
|
|
|
|
|
|
|
resp = nil
|
|
|
|
http.start {
|
|
|
|
req = Net::HTTP::Get.new(request_uri, header)
|
|
|
|
if options.include? :http_basic_authentication
|
|
|
|
user, pass = options[:http_basic_authentication]
|
|
|
|
req.basic_auth user, pass
|
|
|
|
end
|
|
|
|
http.request(req) {|response|
|
|
|
|
resp = response
|
|
|
|
if options[:content_length_proc] && Net::HTTPSuccess === resp
|
|
|
|
if resp.key?('Content-Length')
|
|
|
|
options[:content_length_proc].call(resp['Content-Length'].to_i)
|
|
|
|
else
|
|
|
|
options[:content_length_proc].call(nil)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
resp.read_body {|str|
|
|
|
|
buf << str
|
|
|
|
if options[:progress_proc] && Net::HTTPSuccess === resp
|
|
|
|
options[:progress_proc].call(buf.size)
|
|
|
|
end
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
io = buf.io
|
|
|
|
io.rewind
|
|
|
|
io.status = [resp.code, resp.message]
|
|
|
|
resp.each {|name,value| buf.io.meta_add_field name, value }
|
|
|
|
case resp
|
|
|
|
when Net::HTTPSuccess
|
|
|
|
when Net::HTTPMovedPermanently, # 301
|
|
|
|
Net::HTTPFound, # 302
|
|
|
|
Net::HTTPSeeOther, # 303
|
|
|
|
Net::HTTPTemporaryRedirect # 307
|
2008-02-08 08:11:46 -05:00
|
|
|
begin
|
|
|
|
loc_uri = URI.parse(resp['location'])
|
|
|
|
rescue URI::InvalidURIError
|
|
|
|
raise OpenURI::HTTPError.new(io.status.join(' ') + ' (Invalid Location URI)', io)
|
|
|
|
end
|
|
|
|
throw :open_uri_redirect, loc_uri
|
2005-02-12 00:01:18 -05:00
|
|
|
else
|
|
|
|
raise OpenURI::HTTPError.new(io.status.join(' '), io)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2002-12-16 14:06:36 -05:00
|
|
|
class HTTPError < StandardError
|
|
|
|
def initialize(message, io)
|
|
|
|
super(message)
|
|
|
|
@io = io
|
|
|
|
end
|
|
|
|
attr_reader :io
|
|
|
|
end
|
|
|
|
|
2007-10-28 08:55:51 -04:00
|
|
|
class HTTPRedirect < HTTPError
|
|
|
|
def initialize(message, io, uri)
|
|
|
|
super(message, io)
|
|
|
|
@uri = uri
|
|
|
|
end
|
|
|
|
attr_reader :uri
|
|
|
|
end
|
|
|
|
|
2003-02-05 05:44:05 -05:00
|
|
|
class Buffer # :nodoc:
|
2002-12-16 14:06:36 -05:00
|
|
|
def initialize
|
|
|
|
@io = StringIO.new
|
2003-11-14 20:09:21 -05:00
|
|
|
@size = 0
|
2002-12-16 14:06:36 -05:00
|
|
|
end
|
2003-11-14 20:09:21 -05:00
|
|
|
attr_reader :size
|
2002-12-16 14:06:36 -05:00
|
|
|
|
|
|
|
StringMax = 10240
|
|
|
|
def <<(str)
|
|
|
|
@io << str
|
2003-11-14 20:09:21 -05:00
|
|
|
@size += str.length
|
|
|
|
if StringIO === @io && StringMax < @size
|
2002-12-16 14:06:36 -05:00
|
|
|
require 'tempfile'
|
|
|
|
io = Tempfile.new('open-uri')
|
2004-01-31 15:33:33 -05:00
|
|
|
io.binmode
|
2007-12-24 09:04:16 -05:00
|
|
|
Meta.init io, @io if Meta === @io
|
2002-12-16 14:06:36 -05:00
|
|
|
io << @io.string
|
|
|
|
@io = io
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def io
|
2007-12-24 09:04:16 -05:00
|
|
|
Meta.init @io unless Meta === @io
|
2002-12-16 14:06:36 -05:00
|
|
|
@io
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Mixin for holding meta-information.
|
|
|
|
module Meta
|
2003-02-05 05:44:05 -05:00
|
|
|
def Meta.init(obj, src=nil) # :nodoc:
|
2002-12-16 14:06:36 -05:00
|
|
|
obj.extend Meta
|
|
|
|
obj.instance_eval {
|
|
|
|
@base_uri = nil
|
|
|
|
@meta = {}
|
|
|
|
}
|
|
|
|
if src
|
|
|
|
obj.status = src.status
|
|
|
|
obj.base_uri = src.base_uri
|
|
|
|
src.meta.each {|name, value|
|
|
|
|
obj.meta_add_field(name, value)
|
|
|
|
}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2011-08-26 18:22:37 -04:00
|
|
|
# returns an Array that consists of status code and message.
|
2002-12-16 14:06:36 -05:00
|
|
|
attr_accessor :status
|
|
|
|
|
2011-08-26 18:22:37 -04:00
|
|
|
# returns a URI that is the base of relative URIs in the data.
|
|
|
|
# It may differ from the URI supplied by a user due to redirection.
|
2002-12-16 14:06:36 -05:00
|
|
|
attr_accessor :base_uri
|
|
|
|
|
2011-08-26 18:22:37 -04:00
|
|
|
# returns a Hash that represents header fields.
|
2002-12-16 14:06:36 -05:00
|
|
|
# The Hash keys are downcased for canonicalization.
|
|
|
|
attr_reader :meta
|
|
|
|
|
2008-02-28 04:10:32 -05:00
|
|
|
def meta_setup_encoding # :nodoc:
|
|
|
|
charset = self.charset
|
2008-02-28 09:38:33 -05:00
|
|
|
enc = nil
|
|
|
|
if charset
|
|
|
|
begin
|
|
|
|
enc = Encoding.find(charset)
|
|
|
|
rescue ArgumentError
|
|
|
|
end
|
2008-02-28 04:10:32 -05:00
|
|
|
end
|
2008-02-28 09:38:33 -05:00
|
|
|
enc = Encoding::ASCII_8BIT unless enc
|
2008-02-28 04:10:32 -05:00
|
|
|
if self.respond_to? :force_encoding
|
|
|
|
self.force_encoding(enc)
|
|
|
|
elsif self.respond_to? :string
|
|
|
|
self.string.force_encoding(enc)
|
|
|
|
else # Tempfile
|
|
|
|
self.set_encoding enc
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2003-02-05 05:44:05 -05:00
|
|
|
def meta_add_field(name, value) # :nodoc:
|
2008-02-28 04:10:32 -05:00
|
|
|
name = name.downcase
|
|
|
|
@meta[name] = value
|
|
|
|
meta_setup_encoding if name == 'content-type'
|
2002-12-16 14:06:36 -05:00
|
|
|
end
|
|
|
|
|
2011-08-26 18:22:37 -04:00
|
|
|
# returns a Time that represents the Last-Modified field.
|
2002-12-16 14:06:36 -05:00
|
|
|
def last_modified
|
|
|
|
if v = @meta['last-modified']
|
|
|
|
Time.httpdate(v)
|
|
|
|
else
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
RE_LWS = /[\r\n\t ]+/n
|
|
|
|
RE_TOKEN = %r{[^\x00- ()<>@,;:\\"/\[\]?={}\x7f]+}n
|
2005-05-25 00:03:20 -04:00
|
|
|
RE_QUOTED_STRING = %r{"(?:[\r\n\t !#-\[\]-~\x80-\xff]|\\[\x00-\x7f])*"}n
|
2002-12-16 14:06:36 -05:00
|
|
|
RE_PARAMETERS = %r{(?:;#{RE_LWS}?#{RE_TOKEN}#{RE_LWS}?=#{RE_LWS}?(?:#{RE_TOKEN}|#{RE_QUOTED_STRING})#{RE_LWS}?)*}n
|
|
|
|
|
2003-02-05 05:44:05 -05:00
|
|
|
def content_type_parse # :nodoc:
|
2002-12-16 14:06:36 -05:00
|
|
|
v = @meta['content-type']
|
2004-04-04 03:57:39 -04:00
|
|
|
# The last (?:;#{RE_LWS}?)? matches extra ";" which violates RFC2045.
|
2004-03-27 07:43:02 -05:00
|
|
|
if v && %r{\A#{RE_LWS}?(#{RE_TOKEN})#{RE_LWS}?/(#{RE_TOKEN})#{RE_LWS}?(#{RE_PARAMETERS})(?:;#{RE_LWS}?)?\z}no =~ v
|
2002-12-16 14:06:36 -05:00
|
|
|
type = $1.downcase
|
|
|
|
subtype = $2.downcase
|
|
|
|
parameters = []
|
2004-03-27 07:43:02 -05:00
|
|
|
$3.scan(/;#{RE_LWS}?(#{RE_TOKEN})#{RE_LWS}?=#{RE_LWS}?(?:(#{RE_TOKEN})|(#{RE_QUOTED_STRING}))/no) {|att, val, qval|
|
2009-09-12 22:29:47 -04:00
|
|
|
if qval
|
|
|
|
val = qval[1...-1].gsub(/[\r\n\t !#-\[\]-~\x80-\xff]+|(\\[\x00-\x7f])/n) { $1 ? $1[1,1] : $& }
|
|
|
|
end
|
2002-12-16 14:06:36 -05:00
|
|
|
parameters << [att.downcase, val]
|
|
|
|
}
|
|
|
|
["#{type}/#{subtype}", *parameters]
|
|
|
|
else
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# returns "type/subtype" which is MIME Content-Type.
|
|
|
|
# It is downcased for canonicalization.
|
|
|
|
# Content-Type parameters are stripped.
|
|
|
|
def content_type
|
2010-10-29 18:02:39 -04:00
|
|
|
type, *_ = content_type_parse
|
2002-12-16 14:06:36 -05:00
|
|
|
type || 'application/octet-stream'
|
|
|
|
end
|
|
|
|
|
|
|
|
# returns a charset parameter in Content-Type field.
|
|
|
|
# It is downcased for canonicalization.
|
2003-05-15 05:48:12 -04:00
|
|
|
#
|
|
|
|
# If charset parameter is not given but a block is given,
|
|
|
|
# the block is called and its result is returned.
|
|
|
|
# It can be used to guess charset.
|
|
|
|
#
|
|
|
|
# If charset parameter and block is not given,
|
|
|
|
# nil is returned except text type in HTTP.
|
|
|
|
# In that case, "iso-8859-1" is returned as defined by RFC2616 3.7.1.
|
2002-12-16 14:06:36 -05:00
|
|
|
def charset
|
|
|
|
type, *parameters = content_type_parse
|
|
|
|
if pair = parameters.assoc('charset')
|
|
|
|
pair.last.downcase
|
2003-05-15 05:48:12 -04:00
|
|
|
elsif block_given?
|
|
|
|
yield
|
2002-12-16 14:06:36 -05:00
|
|
|
elsif type && %r{\Atext/} =~ type &&
|
2003-11-24 06:01:57 -05:00
|
|
|
@base_uri && /\Ahttp\z/i =~ @base_uri.scheme
|
2002-12-16 14:06:36 -05:00
|
|
|
"iso-8859-1" # RFC2616 3.7.1
|
|
|
|
else
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# returns a list of encodings in Content-Encoding field
|
|
|
|
# as an Array of String.
|
|
|
|
# The encodings are downcased for canonicalization.
|
|
|
|
def content_encoding
|
|
|
|
v = @meta['content-encoding']
|
|
|
|
if v && %r{\A#{RE_LWS}?#{RE_TOKEN}#{RE_LWS}?(?:,#{RE_LWS}?#{RE_TOKEN}#{RE_LWS}?)*}o =~ v
|
|
|
|
v.scan(RE_TOKEN).map {|content_coding| content_coding.downcase}
|
|
|
|
else
|
|
|
|
[]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2003-11-13 06:39:16 -05:00
|
|
|
# Mixin for HTTP and FTP URIs.
|
2002-12-16 14:06:36 -05:00
|
|
|
module OpenRead
|
2003-11-24 03:02:36 -05:00
|
|
|
# OpenURI::OpenRead#open provides `open' for URI::HTTP and URI::FTP.
|
|
|
|
#
|
|
|
|
# OpenURI::OpenRead#open takes optional 3 arguments as:
|
|
|
|
#
|
2011-08-26 19:11:38 -04:00
|
|
|
# OpenURI::OpenRead#open([mode [, perm]] [, options]) [{|io| ... }]
|
2003-11-24 03:02:36 -05:00
|
|
|
#
|
2011-08-26 19:11:38 -04:00
|
|
|
# OpenURI::OpenRead#open returns an IO-like object if block is not given.
|
|
|
|
# Otherwise it yields the IO object and return the value of the block.
|
|
|
|
# The IO object is extended with OpenURI::Meta.
|
|
|
|
#
|
|
|
|
# +mode+ and +perm+ are the same as Kernel#open.
|
|
|
|
#
|
|
|
|
# However, +mode+ must be read mode because OpenURI::OpenRead#open doesn't
|
2003-11-24 03:02:36 -05:00
|
|
|
# support write mode (yet).
|
2011-08-26 19:11:38 -04:00
|
|
|
# Also +perm+ is ignored because it is meaningful only for file creation.
|
2003-11-24 03:02:36 -05:00
|
|
|
#
|
2011-08-26 19:11:38 -04:00
|
|
|
# +options+ must be a hash.
|
2003-11-24 03:02:36 -05:00
|
|
|
#
|
2011-08-26 19:11:38 -04:00
|
|
|
# Each option with a string key specifies an extra header field for HTTP.
|
|
|
|
# I.e., it is ignored for FTP without HTTP proxy.
|
2003-11-24 03:02:36 -05:00
|
|
|
#
|
2011-08-26 18:22:37 -04:00
|
|
|
# The hash may include other options, where keys are symbols:
|
2003-11-24 03:02:36 -05:00
|
|
|
#
|
2005-02-07 22:06:37 -05:00
|
|
|
# [:proxy]
|
|
|
|
# Synopsis:
|
|
|
|
# :proxy => "http://proxy.foo.com:8000/"
|
|
|
|
# :proxy => URI.parse("http://proxy.foo.com:8000/")
|
|
|
|
# :proxy => true
|
|
|
|
# :proxy => false
|
|
|
|
# :proxy => nil
|
2008-09-14 11:18:53 -04:00
|
|
|
#
|
2005-02-07 22:06:37 -05:00
|
|
|
# If :proxy option is specified, the value should be String, URI,
|
|
|
|
# boolean or nil.
|
2011-08-26 19:11:38 -04:00
|
|
|
#
|
2005-02-07 22:06:37 -05:00
|
|
|
# When String or URI is given, it is treated as proxy URI.
|
2011-08-26 19:11:38 -04:00
|
|
|
#
|
2005-02-07 22:06:37 -05:00
|
|
|
# When true is given or the option itself is not specified,
|
|
|
|
# environment variable `scheme_proxy' is examined.
|
2005-02-12 00:38:44 -05:00
|
|
|
# `scheme' is replaced by `http', `https' or `ftp'.
|
2011-08-26 19:11:38 -04:00
|
|
|
#
|
2005-02-07 22:06:37 -05:00
|
|
|
# When false or nil is given, the environment variables are ignored and
|
|
|
|
# connection will be made to a server directly.
|
2003-11-24 03:02:36 -05:00
|
|
|
#
|
2005-09-28 21:18:14 -04:00
|
|
|
# [:proxy_http_basic_authentication]
|
|
|
|
# Synopsis:
|
2011-08-26 19:11:38 -04:00
|
|
|
# :proxy_http_basic_authentication =>
|
|
|
|
# ["http://proxy.foo.com:8000/", "proxy-user", "proxy-password"]
|
|
|
|
# :proxy_http_basic_authentication =>
|
|
|
|
# [URI.parse("http://proxy.foo.com:8000/"),
|
|
|
|
# "proxy-user", "proxy-password"]
|
2008-09-14 11:18:53 -04:00
|
|
|
#
|
2011-08-26 19:11:38 -04:00
|
|
|
# If :proxy option is specified, the value should be an Array with 3
|
|
|
|
# elements. It should contain a proxy URI, a proxy user name and a proxy
|
|
|
|
# password. The proxy URI should be a String, an URI or nil. The proxy
|
|
|
|
# user name and password should be a String.
|
2005-09-28 21:18:14 -04:00
|
|
|
#
|
|
|
|
# If nil is given for the proxy URI, this option is just ignored.
|
|
|
|
#
|
2008-09-14 11:18:53 -04:00
|
|
|
# If :proxy and :proxy_http_basic_authentication is specified,
|
2005-09-30 12:48:46 -04:00
|
|
|
# ArgumentError is raised.
|
2005-09-28 21:18:14 -04:00
|
|
|
#
|
2005-02-10 21:47:11 -05:00
|
|
|
# [:http_basic_authentication]
|
|
|
|
# Synopsis:
|
|
|
|
# :http_basic_authentication=>[user, password]
|
|
|
|
#
|
|
|
|
# If :http_basic_authentication is specified,
|
|
|
|
# the value should be an array which contains 2 strings:
|
|
|
|
# username and password.
|
|
|
|
# It is used for HTTP Basic authentication defined by RFC 2617.
|
|
|
|
#
|
2005-02-07 22:06:37 -05:00
|
|
|
# [:content_length_proc]
|
|
|
|
# Synopsis:
|
|
|
|
# :content_length_proc => lambda {|content_length| ... }
|
2008-09-14 11:18:53 -04:00
|
|
|
#
|
2005-02-07 22:06:37 -05:00
|
|
|
# If :content_length_proc option is specified, the option value procedure
|
|
|
|
# is called before actual transfer is started.
|
2011-08-26 18:22:37 -04:00
|
|
|
# It takes one argument, which is expected content length in bytes.
|
2008-09-14 11:18:53 -04:00
|
|
|
#
|
2005-02-07 22:06:37 -05:00
|
|
|
# If two or more transfer is done by HTTP redirection, the procedure
|
|
|
|
# is called only one for a last transfer.
|
2008-09-14 11:18:53 -04:00
|
|
|
#
|
2005-02-07 22:06:37 -05:00
|
|
|
# When expected content length is unknown, the procedure is called with
|
2011-08-26 18:22:37 -04:00
|
|
|
# nil. This happens when the HTTP response has no Content-Length header.
|
2003-11-24 03:02:36 -05:00
|
|
|
#
|
2005-02-07 22:06:37 -05:00
|
|
|
# [:progress_proc]
|
|
|
|
# Synopsis:
|
|
|
|
# :progress_proc => lambda {|size| ...}
|
2003-11-24 03:02:36 -05:00
|
|
|
#
|
2005-02-07 22:06:37 -05:00
|
|
|
# If :progress_proc option is specified, the proc is called with one
|
|
|
|
# argument each time when `open' gets content fragment from network.
|
2011-08-26 19:11:38 -04:00
|
|
|
# The argument +size+ is the accumulated transferred size in bytes.
|
2003-11-24 03:02:36 -05:00
|
|
|
#
|
2005-02-07 22:06:37 -05:00
|
|
|
# If two or more transfer is done by HTTP redirection, the procedure
|
|
|
|
# is called only one for a last transfer.
|
2003-11-24 03:02:36 -05:00
|
|
|
#
|
2005-02-07 22:06:37 -05:00
|
|
|
# :progress_proc and :content_length_proc are intended to be used for
|
|
|
|
# progress bar.
|
|
|
|
# For example, it can be implemented as follows using Ruby/ProgressBar.
|
2003-11-24 03:02:36 -05:00
|
|
|
#
|
2005-02-07 22:06:37 -05:00
|
|
|
# pbar = nil
|
|
|
|
# open("http://...",
|
|
|
|
# :content_length_proc => lambda {|t|
|
|
|
|
# if t && 0 < t
|
|
|
|
# pbar = ProgressBar.new("...", t)
|
|
|
|
# pbar.file_transfer_mode
|
|
|
|
# end
|
|
|
|
# },
|
|
|
|
# :progress_proc => lambda {|s|
|
|
|
|
# pbar.set s if pbar
|
|
|
|
# }) {|f| ... }
|
2003-11-24 03:02:36 -05:00
|
|
|
#
|
2005-09-15 00:56:25 -04:00
|
|
|
# [:read_timeout]
|
|
|
|
# Synopsis:
|
|
|
|
# :read_timeout=>nil (no timeout)
|
|
|
|
# :read_timeout=>10 (10 second)
|
|
|
|
#
|
|
|
|
# :read_timeout option specifies a timeout of read for http connections.
|
|
|
|
#
|
2006-02-19 10:17:16 -05:00
|
|
|
# [:ssl_ca_cert]
|
|
|
|
# Synopsis:
|
|
|
|
# :ssl_ca_cert=>filename
|
|
|
|
#
|
|
|
|
# :ssl_ca_cert is used to specify CA certificate for SSL.
|
|
|
|
# If it is given, default certificates are not used.
|
|
|
|
#
|
|
|
|
# [:ssl_verify_mode]
|
|
|
|
# Synopsis:
|
|
|
|
# :ssl_verify_mode=>mode
|
|
|
|
#
|
|
|
|
# :ssl_verify_mode is used to specify openssl verify mode.
|
|
|
|
#
|
2007-08-29 05:38:36 -04:00
|
|
|
# [:ftp_active_mode]
|
|
|
|
# Synopsis:
|
|
|
|
# :ftp_active_mode=>bool
|
|
|
|
#
|
2011-08-26 19:11:38 -04:00
|
|
|
# <tt>:ftp_active_mode => true</tt> is used to make ftp active mode.
|
|
|
|
# Ruby 1.9 uses passive mode by default.
|
|
|
|
# Note that the active mode is default in Ruby 1.8 or prior.
|
2007-08-29 05:38:36 -04:00
|
|
|
#
|
2007-10-28 08:55:51 -04:00
|
|
|
# [:redirect]
|
|
|
|
# Synopsis:
|
|
|
|
# :redirect=>bool
|
|
|
|
#
|
2011-08-26 19:11:38 -04:00
|
|
|
# +:redirect+ is true by default. <tt>:redirect => false</tt> is used to
|
|
|
|
# disable all HTTP redirects.
|
|
|
|
#
|
|
|
|
# OpenURI::HTTPRedirect exception raised on redirection.
|
|
|
|
# Using +true+ also means that redirections between http and ftp are
|
|
|
|
# permitted.
|
2007-10-28 08:55:51 -04:00
|
|
|
#
|
2003-02-05 05:44:05 -05:00
|
|
|
def open(*rest, &block)
|
|
|
|
OpenURI.open_uri(self, *rest, &block)
|
2002-12-16 14:06:36 -05:00
|
|
|
end
|
|
|
|
|
2003-11-24 03:02:36 -05:00
|
|
|
# OpenURI::OpenRead#read([options]) reads a content referenced by self and
|
|
|
|
# returns the content as string.
|
|
|
|
# The string is extended with OpenURI::Meta.
|
2011-08-26 19:11:38 -04:00
|
|
|
# The argument +options+ is same as OpenURI::OpenRead#open.
|
2002-12-16 14:06:36 -05:00
|
|
|
def read(options={})
|
|
|
|
self.open(options) {|f|
|
|
|
|
str = f.read
|
|
|
|
Meta.init str, f
|
|
|
|
str
|
|
|
|
}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
module URI
|
|
|
|
class Generic
|
|
|
|
# returns a proxy URI.
|
|
|
|
# The proxy URI is obtained from environment variables such as http_proxy,
|
|
|
|
# ftp_proxy, no_proxy, etc.
|
|
|
|
# If there is no proper proxy, nil is returned.
|
2003-11-24 06:01:57 -05:00
|
|
|
#
|
2003-11-25 03:53:42 -05:00
|
|
|
# Note that capitalized variables (HTTP_PROXY, FTP_PROXY, NO_PROXY, etc.)
|
|
|
|
# are examined too.
|
|
|
|
#
|
|
|
|
# But http_proxy and HTTP_PROXY is treated specially under CGI environment.
|
|
|
|
# It's because HTTP_PROXY may be set by Proxy: header.
|
|
|
|
# So HTTP_PROXY is not used.
|
2004-01-07 06:31:21 -05:00
|
|
|
# http_proxy is not used too if the variable is case insensitive.
|
2003-11-25 03:53:42 -05:00
|
|
|
# CGI_HTTP_PROXY can be used instead.
|
2002-12-16 14:06:36 -05:00
|
|
|
def find_proxy
|
2003-11-24 06:01:57 -05:00
|
|
|
name = self.scheme.downcase + '_proxy'
|
2003-11-25 03:53:42 -05:00
|
|
|
proxy_uri = nil
|
|
|
|
if name == 'http_proxy' && ENV.include?('REQUEST_METHOD') # CGI?
|
|
|
|
# HTTP_PROXY conflicts with *_proxy for proxy settings and
|
2004-01-07 06:31:21 -05:00
|
|
|
# HTTP_* for header information in CGI.
|
2003-11-25 03:53:42 -05:00
|
|
|
# So it should be careful to use it.
|
2003-11-25 04:56:49 -05:00
|
|
|
pairs = ENV.reject {|k, v| /\Ahttp_proxy\z/i !~ k }
|
|
|
|
case pairs.length
|
|
|
|
when 0 # no proxy setting anyway.
|
|
|
|
proxy_uri = nil
|
|
|
|
when 1
|
2010-10-29 18:02:39 -04:00
|
|
|
k, _ = pairs.shift
|
2003-11-25 04:56:49 -05:00
|
|
|
if k == 'http_proxy' && ENV[k.upcase] == nil
|
2004-01-07 06:31:21 -05:00
|
|
|
# http_proxy is safe to use because ENV is case sensitive.
|
2003-11-25 04:56:49 -05:00
|
|
|
proxy_uri = ENV[name]
|
|
|
|
else
|
|
|
|
proxy_uri = nil
|
|
|
|
end
|
2004-01-07 06:31:21 -05:00
|
|
|
else # http_proxy is safe to use because ENV is case sensitive.
|
2007-07-13 03:59:40 -04:00
|
|
|
proxy_uri = ENV.to_hash[name]
|
2003-11-25 03:53:42 -05:00
|
|
|
end
|
|
|
|
if !proxy_uri
|
|
|
|
# Use CGI_HTTP_PROXY. cf. libwww-perl.
|
|
|
|
proxy_uri = ENV["CGI_#{name.upcase}"]
|
|
|
|
end
|
2004-01-31 13:11:55 -05:00
|
|
|
elsif name == 'http_proxy'
|
|
|
|
unless proxy_uri = ENV[name]
|
|
|
|
if proxy_uri = ENV[name.upcase]
|
|
|
|
warn 'The environment variable HTTP_PROXY is discouraged. Use http_proxy.'
|
|
|
|
end
|
|
|
|
end
|
2003-11-25 03:53:42 -05:00
|
|
|
else
|
|
|
|
proxy_uri = ENV[name] || ENV[name.upcase]
|
2003-11-24 06:01:57 -05:00
|
|
|
end
|
2003-11-25 03:53:42 -05:00
|
|
|
|
2010-10-05 23:30:49 -04:00
|
|
|
if proxy_uri && self.hostname
|
2004-02-01 10:31:59 -05:00
|
|
|
require 'socket'
|
|
|
|
begin
|
2010-10-05 23:30:49 -04:00
|
|
|
addr = IPSocket.getaddress(self.hostname)
|
2004-02-01 10:31:59 -05:00
|
|
|
proxy_uri = nil if /\A127\.|\A::1\z/ =~ addr
|
|
|
|
rescue SocketError
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2003-11-25 03:53:42 -05:00
|
|
|
if proxy_uri
|
2002-12-16 14:06:36 -05:00
|
|
|
proxy_uri = URI.parse(proxy_uri)
|
|
|
|
name = 'no_proxy'
|
|
|
|
if no_proxy = ENV[name] || ENV[name.upcase]
|
|
|
|
no_proxy.scan(/([^:,]*)(?::(\d+))?/) {|host, port|
|
2003-10-04 22:00:29 -04:00
|
|
|
if /(\A|\.)#{Regexp.quote host}\z/i =~ self.host &&
|
2002-12-16 14:06:36 -05:00
|
|
|
(!port || self.port == port.to_i)
|
|
|
|
proxy_uri = nil
|
|
|
|
break
|
|
|
|
end
|
|
|
|
}
|
|
|
|
end
|
|
|
|
proxy_uri
|
|
|
|
else
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
class HTTP
|
2005-02-12 00:01:18 -05:00
|
|
|
def buffer_open(buf, proxy, options) # :nodoc:
|
|
|
|
OpenURI.open_http(buf, self, proxy, options)
|
2002-12-16 14:06:36 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
include OpenURI::OpenRead
|
|
|
|
end
|
|
|
|
|
|
|
|
class FTP
|
2005-02-12 00:01:18 -05:00
|
|
|
def buffer_open(buf, proxy, options) # :nodoc:
|
|
|
|
if proxy
|
|
|
|
OpenURI.open_http(buf, self, proxy, options)
|
|
|
|
return
|
|
|
|
end
|
2002-12-16 14:06:36 -05:00
|
|
|
require 'net/ftp'
|
2005-02-19 10:53:43 -05:00
|
|
|
|
2009-09-12 12:18:03 -04:00
|
|
|
path = self.path
|
|
|
|
path = path.sub(%r{\A/}, '%2F') # re-encode the beginning slash because uri library decodes it.
|
|
|
|
directories = path.split(%r{/}, -1)
|
2005-02-19 10:53:43 -05:00
|
|
|
directories.each {|d|
|
|
|
|
d.gsub!(/%([0-9A-Fa-f][0-9A-Fa-f])/) { [$1].pack("H2") }
|
|
|
|
}
|
|
|
|
unless filename = directories.pop
|
|
|
|
raise ArgumentError, "no filename: #{self.inspect}"
|
|
|
|
end
|
|
|
|
directories.each {|d|
|
|
|
|
if /[\r\n]/ =~ d
|
|
|
|
raise ArgumentError, "invalid directory: #{d.inspect}"
|
|
|
|
end
|
|
|
|
}
|
|
|
|
if /[\r\n]/ =~ filename
|
|
|
|
raise ArgumentError, "invalid filename: #{filename.inspect}"
|
|
|
|
end
|
|
|
|
typecode = self.typecode
|
|
|
|
if typecode && /\A[aid]\z/ !~ typecode
|
|
|
|
raise ArgumentError, "invalid typecode: #{typecode.inspect}"
|
|
|
|
end
|
|
|
|
|
|
|
|
# The access sequence is defined by RFC 1738
|
2009-09-12 09:49:07 -04:00
|
|
|
ftp = Net::FTP.new
|
2010-10-05 23:30:49 -04:00
|
|
|
ftp.connect(self.hostname, self.port)
|
2007-08-29 05:38:36 -04:00
|
|
|
ftp.passive = true if !options[:ftp_active_mode]
|
2002-12-16 14:06:36 -05:00
|
|
|
# todo: extract user/passwd from .netrc.
|
|
|
|
user = 'anonymous'
|
|
|
|
passwd = nil
|
|
|
|
user, passwd = self.userinfo.split(/:/) if self.userinfo
|
|
|
|
ftp.login(user, passwd)
|
2005-02-19 10:53:43 -05:00
|
|
|
directories.each {|cwd|
|
|
|
|
ftp.voidcmd("CWD #{cwd}")
|
|
|
|
}
|
|
|
|
if typecode
|
|
|
|
# xxx: typecode D is not handled.
|
|
|
|
ftp.voidcmd("TYPE #{typecode.upcase}")
|
|
|
|
end
|
2003-11-24 03:02:36 -05:00
|
|
|
if options[:content_length_proc]
|
2005-02-19 10:53:43 -05:00
|
|
|
options[:content_length_proc].call(ftp.size(filename))
|
2003-11-24 03:02:36 -05:00
|
|
|
end
|
2005-02-19 10:53:43 -05:00
|
|
|
ftp.retrbinary("RETR #{filename}", 4096) { |str|
|
2003-11-14 20:09:21 -05:00
|
|
|
buf << str
|
2003-11-24 03:02:36 -05:00
|
|
|
options[:progress_proc].call(buf.size) if options[:progress_proc]
|
2003-11-14 20:09:21 -05:00
|
|
|
}
|
2002-12-16 14:06:36 -05:00
|
|
|
ftp.close
|
|
|
|
buf.io.rewind
|
|
|
|
end
|
|
|
|
|
|
|
|
include OpenURI::OpenRead
|
|
|
|
end
|
|
|
|
end
|