mirror of
https://github.com/ruby/ruby.git
synced 2022-11-09 12:17:21 -05:00
f61ee674d8
- In one of the cases, filenames were checked for ending with "gz" - this is changed to check for ending with ".gz" - The change was made to make it even easier to read the code, and to match only from the start of the input (as opposed to start of the line) https://github.com/rubygems/rubygems/commit/aac4290271
345 lines
9.2 KiB
Ruby
345 lines
9.2 KiB
Ruby
# frozen_string_literal: true
|
|
require 'rubygems'
|
|
require 'rubygems/request'
|
|
require 'rubygems/request/connection_pools'
|
|
require 'rubygems/s3_uri_signer'
|
|
require 'rubygems/uri_formatter'
|
|
require 'rubygems/uri_parsing'
|
|
require 'rubygems/user_interaction'
|
|
require 'resolv'
|
|
|
|
##
|
|
# RemoteFetcher handles the details of fetching gems and gem information from
|
|
# a remote source.
|
|
|
|
class Gem::RemoteFetcher
|
|
|
|
include Gem::UserInteraction
|
|
include Gem::UriParsing
|
|
|
|
##
|
|
# A FetchError exception wraps up the various possible IO and HTTP failures
|
|
# that could happen while downloading from the internet.
|
|
|
|
class FetchError < Gem::Exception
|
|
|
|
include Gem::UriParsing
|
|
|
|
##
|
|
# The URI which was being accessed when the exception happened.
|
|
|
|
attr_accessor :uri
|
|
|
|
def initialize(message, uri)
|
|
super message
|
|
|
|
uri = parse_uri(uri)
|
|
|
|
uri.password = 'REDACTED' if uri.respond_to?(:password) && uri.password
|
|
|
|
@uri = uri.to_s
|
|
end
|
|
|
|
def to_s # :nodoc:
|
|
"#{super} (#{uri})"
|
|
end
|
|
|
|
end
|
|
|
|
##
|
|
# A FetchError that indicates that the reason for not being
|
|
# able to fetch data was that the host could not be contacted
|
|
|
|
class UnknownHostError < FetchError
|
|
end
|
|
|
|
@fetcher = nil
|
|
|
|
##
|
|
# Cached RemoteFetcher instance.
|
|
|
|
def self.fetcher
|
|
@fetcher ||= self.new Gem.configuration[:http_proxy]
|
|
end
|
|
|
|
attr_accessor :headers
|
|
|
|
##
|
|
# Initialize a remote fetcher using the source URI and possible proxy
|
|
# information.
|
|
#
|
|
# +proxy+
|
|
# * [String]: explicit specification of proxy; overrides any environment
|
|
# variable setting
|
|
# * nil: respect environment variables (HTTP_PROXY, HTTP_PROXY_USER,
|
|
# HTTP_PROXY_PASS)
|
|
# * <tt>:no_proxy</tt>: ignore environment variables and _don't_ use a proxy
|
|
#
|
|
# +headers+: A set of additional HTTP headers to be sent to the server when
|
|
# fetching the gem.
|
|
|
|
def initialize(proxy=nil, dns=nil, headers={})
|
|
require 'net/http'
|
|
require 'stringio'
|
|
require 'time'
|
|
require 'uri'
|
|
|
|
Socket.do_not_reverse_lookup = true
|
|
|
|
@proxy = proxy
|
|
@pools = {}
|
|
@pool_lock = Mutex.new
|
|
@cert_files = Gem::Request.get_cert_files
|
|
|
|
@headers = headers
|
|
end
|
|
|
|
##
|
|
# Given a name and requirement, downloads this gem into cache and returns the
|
|
# filename. Returns nil if the gem cannot be located.
|
|
#--
|
|
# Should probably be integrated with #download below, but that will be a
|
|
# larger, more encompassing effort. -erikh
|
|
|
|
def download_to_cache(dependency)
|
|
found, _ = Gem::SpecFetcher.fetcher.spec_for_dependency dependency
|
|
|
|
return if found.empty?
|
|
|
|
spec, source = found.max_by { |(s,_)| s.version }
|
|
|
|
download spec, source.uri
|
|
end
|
|
|
|
##
|
|
# Moves the gem +spec+ from +source_uri+ to the cache dir unless it is
|
|
# already there. If the source_uri is local the gem cache dir copy is
|
|
# always replaced.
|
|
|
|
def download(spec, source_uri, install_dir = Gem.dir)
|
|
cache_dir =
|
|
if Dir.pwd == install_dir # see fetch_command
|
|
install_dir
|
|
elsif File.writable? install_dir
|
|
File.join install_dir, "cache"
|
|
else
|
|
File.join Gem.user_dir, "cache"
|
|
end
|
|
|
|
gem_file_name = File.basename spec.cache_file
|
|
local_gem_path = File.join cache_dir, gem_file_name
|
|
|
|
FileUtils.mkdir_p cache_dir rescue nil unless File.exist? cache_dir
|
|
|
|
source_uri = parse_uri(source_uri)
|
|
|
|
scheme = source_uri.scheme
|
|
|
|
# URI.parse gets confused by MS Windows paths with forward slashes.
|
|
scheme = nil if scheme =~ /^[a-z]$/i
|
|
|
|
# REFACTOR: split this up and dispatch on scheme (eg download_http)
|
|
# REFACTOR: be sure to clean up fake fetcher when you do this... cleaner
|
|
case scheme
|
|
when 'http', 'https', 's3' then
|
|
unless File.exist? local_gem_path
|
|
begin
|
|
verbose "Downloading gem #{gem_file_name}"
|
|
|
|
remote_gem_path = source_uri + "gems/#{gem_file_name}"
|
|
|
|
self.cache_update_path remote_gem_path, local_gem_path
|
|
rescue FetchError
|
|
raise if spec.original_platform == spec.platform
|
|
|
|
alternate_name = "#{spec.original_name}.gem"
|
|
|
|
verbose "Failed, downloading gem #{alternate_name}"
|
|
|
|
remote_gem_path = source_uri + "gems/#{alternate_name}"
|
|
|
|
self.cache_update_path remote_gem_path, local_gem_path
|
|
end
|
|
end
|
|
when 'file' then
|
|
begin
|
|
path = source_uri.path
|
|
path = File.dirname(path) if File.extname(path) == '.gem'
|
|
|
|
remote_gem_path = Gem::Util.correct_for_windows_path(File.join(path, 'gems', gem_file_name))
|
|
|
|
FileUtils.cp(remote_gem_path, local_gem_path)
|
|
rescue Errno::EACCES
|
|
local_gem_path = source_uri.to_s
|
|
end
|
|
|
|
verbose "Using local gem #{local_gem_path}"
|
|
when nil then # TODO test for local overriding cache
|
|
source_path = if Gem.win_platform? && source_uri.scheme &&
|
|
!source_uri.path.include?(':')
|
|
"#{source_uri.scheme}:#{source_uri.path}"
|
|
else
|
|
source_uri.path
|
|
end
|
|
|
|
source_path = Gem::UriFormatter.new(source_path).unescape
|
|
|
|
begin
|
|
FileUtils.cp source_path, local_gem_path unless
|
|
File.identical?(source_path, local_gem_path)
|
|
rescue Errno::EACCES
|
|
local_gem_path = source_uri.to_s
|
|
end
|
|
|
|
verbose "Using local gem #{local_gem_path}"
|
|
else
|
|
raise ArgumentError, "unsupported URI scheme #{source_uri.scheme}"
|
|
end
|
|
|
|
local_gem_path
|
|
end
|
|
|
|
##
|
|
# File Fetcher. Dispatched by +fetch_path+. Use it instead.
|
|
|
|
def fetch_file(uri, *_)
|
|
Gem.read_binary Gem::Util.correct_for_windows_path uri.path
|
|
end
|
|
|
|
##
|
|
# HTTP Fetcher. Dispatched by +fetch_path+. Use it instead.
|
|
|
|
def fetch_http(uri, last_modified = nil, head = false, depth = 0)
|
|
fetch_type = head ? Net::HTTP::Head : Net::HTTP::Get
|
|
response = request uri, fetch_type, last_modified do |req|
|
|
headers.each { |k,v| req.add_field(k,v) }
|
|
end
|
|
|
|
case response
|
|
when Net::HTTPOK, Net::HTTPNotModified then
|
|
response.uri = uri if response.respond_to? :uri
|
|
head ? response : response.body
|
|
when Net::HTTPMovedPermanently, Net::HTTPFound, Net::HTTPSeeOther,
|
|
Net::HTTPTemporaryRedirect then
|
|
raise FetchError.new('too many redirects', uri) if depth > 10
|
|
|
|
unless location = response['Location']
|
|
raise FetchError.new("redirecting but no redirect location was given", uri)
|
|
end
|
|
location = parse_uri location
|
|
|
|
if https?(uri) && !https?(location)
|
|
raise FetchError.new("redirecting to non-https resource: #{location}", uri)
|
|
end
|
|
|
|
fetch_http(location, last_modified, head, depth + 1)
|
|
else
|
|
raise FetchError.new("bad response #{response.message} #{response.code}", uri)
|
|
end
|
|
end
|
|
|
|
alias :fetch_https :fetch_http
|
|
|
|
##
|
|
# Downloads +uri+ and returns it as a String.
|
|
|
|
def fetch_path(uri, mtime = nil, head = false)
|
|
uri = parse_uri uri
|
|
|
|
unless uri.scheme
|
|
raise ArgumentError, "uri scheme is invalid: #{uri.scheme.inspect}"
|
|
end
|
|
|
|
data = send "fetch_#{uri.scheme}", uri, mtime, head
|
|
|
|
if data and !head and uri.to_s.end_with?(".gz")
|
|
begin
|
|
data = Gem::Util.gunzip data
|
|
rescue Zlib::GzipFile::Error
|
|
raise FetchError.new("server did not return a valid file", uri)
|
|
end
|
|
end
|
|
|
|
data
|
|
rescue Timeout::Error
|
|
raise UnknownHostError.new('timed out', uri)
|
|
rescue IOError, SocketError, SystemCallError,
|
|
*(OpenSSL::SSL::SSLError if defined?(OpenSSL)) => e
|
|
if e.message =~ /getaddrinfo/
|
|
raise UnknownHostError.new('no such name', uri)
|
|
else
|
|
raise FetchError.new("#{e.class}: #{e}", uri)
|
|
end
|
|
end
|
|
|
|
def fetch_s3(uri, mtime = nil, head = false)
|
|
begin
|
|
public_uri = s3_uri_signer(uri).sign
|
|
rescue Gem::S3URISigner::ConfigurationError, Gem::S3URISigner::InstanceProfileError => e
|
|
raise FetchError.new(e.message, "s3://#{uri.host}")
|
|
end
|
|
fetch_https public_uri, mtime, head
|
|
end
|
|
|
|
# we have our own signing code here to avoid a dependency on the aws-sdk gem
|
|
def s3_uri_signer(uri)
|
|
Gem::S3URISigner.new(uri)
|
|
end
|
|
|
|
##
|
|
# Downloads +uri+ to +path+ if necessary. If no path is given, it just
|
|
# passes the data.
|
|
|
|
def cache_update_path(uri, path = nil, update = true)
|
|
mtime = path && File.stat(path).mtime rescue nil
|
|
|
|
data = fetch_path(uri, mtime)
|
|
|
|
if data == nil # indicates the server returned 304 Not Modified
|
|
return Gem.read_binary(path)
|
|
end
|
|
|
|
if update and path
|
|
Gem.write_binary(path, data)
|
|
end
|
|
|
|
data
|
|
end
|
|
|
|
##
|
|
# Performs a Net::HTTP request of type +request_class+ on +uri+ returning
|
|
# a Net::HTTP response object. request maintains a table of persistent
|
|
# connections to reduce connect overhead.
|
|
|
|
def request(uri, request_class, last_modified = nil)
|
|
proxy = proxy_for @proxy, uri
|
|
pool = pools_for(proxy).pool_for uri
|
|
|
|
request = Gem::Request.new uri, request_class, last_modified, pool
|
|
|
|
request.fetch do |req|
|
|
yield req if block_given?
|
|
end
|
|
end
|
|
|
|
def https?(uri)
|
|
uri.scheme.downcase == 'https'
|
|
end
|
|
|
|
def close_all
|
|
@pools.each_value {|pool| pool.close_all}
|
|
end
|
|
|
|
private
|
|
|
|
def proxy_for(proxy, uri)
|
|
Gem::Request.proxy_uri(proxy || Gem::Request.get_proxy_from_env(uri.scheme))
|
|
end
|
|
|
|
def pools_for(proxy)
|
|
@pool_lock.synchronize do
|
|
@pools[proxy] ||= Gem::Request::ConnectionPools.new proxy, @cert_files
|
|
end
|
|
end
|
|
|
|
end
|