1
0
Fork 0
mirror of https://github.com/ruby/ruby.git synced 2022-11-09 12:17:21 -05:00

downloader cache

* tool/downloader.rb (Downloader.download): manage download cache.

git-svn-id: svn+ssh://ci.ruby-lang.org/ruby/trunk@58833 b2dd03c8-39d4-4d8f-98ff-823fe69b080e
This commit is contained in:
nobu 2017-05-21 16:45:35 +00:00
parent 2ee57d5ff1
commit 48070cef94
4 changed files with 100 additions and 34 deletions

View file

@ -1096,20 +1096,19 @@ update-mspec:
update-rubyspec:
update-config_files: PHONY
$(Q) $(BASERUBY) -C "$(srcdir)/tool" \
../tool/downloader.rb -e gnu \
$(Q) $(BASERUBY) -C "$(srcdir)" tool/downloader.rb -d tool -e gnu \
config.guess config.sub
update-gems: PHONY
$(ECHO) Downloading bundled gem files...
$(Q) $(BASERUBY) -C "$(srcdir)/gems" \
-I../tool -rdownloader -answ \
$(Q) $(BASERUBY) -C "$(srcdir)" \
-I./tool -rdownloader -answ \
-e 'gem, ver = *$$F' \
-e 'old = Dir.glob("#{gem}-*.gem")' \
-e 'gem = "#{gem}-#{ver}.gem"' \
-e 'Downloader::RubyGems.download(gem, nil, nil) and' \
-e 'Downloader::RubyGems.download(gem, "gems", nil) and' \
-e 'File.unlink(*(old-[gem]))' \
bundled_gems
gems/bundled_gems
extract-gems: PHONY
$(ECHO) Extracting bundled gem files...

View file

@ -52,7 +52,7 @@ if test "$HAVE_BASERUBY" = yes -a "`RUBYOPT=- $BASERUBY -e 'print 42' 2>/dev/nul
if test "`RUBYOPT=- $BASERUBY --disable=gems -e 'print 42' 2>/dev/null`" = 42; then
BASERUBY="$BASERUBY --disable=gems"
fi
$BASERUBY -C "$srcdir/tool" downloader.rb -e gnu config.guess config.sub
$BASERUBY -C "$srcdir" tool/downloader.rb -d tool -e gnu config.guess config.sub
else
BASERUBY="echo executable host ruby is required. use --with-baseruby option.; false"
HAVE_BASERUBY=no

View file

@ -1,7 +1,9 @@
# Used by configure and make to download or update mirrored Ruby and GCC
# files. This will use HTTPS if possible, falling back to HTTP.
require 'fileutils'
require 'open-uri'
require 'pathname'
begin
require 'net/https'
rescue LoadError
@ -120,24 +122,35 @@ class Downloader
# 'UnicodeData.txt', 'enc/unicode/data'
def self.download(url, name, dir = nil, since = true, options = {})
options = options.dup
file = under(dir, name)
url = URI(url)
dryrun = options.delete(:dryrun)
if since.nil? and File.exist?(file)
if name
file = Pathname.new(under(dir, name))
else
name = File.basename(url.path)
end
cache = cache_file(url, name, options.delete(:cache_dir))
file ||= cache
if since.nil? and file.exist?
if $VERBOSE
$stdout.puts "#{file} already exists"
$stdout.flush
end
return true
save_cache(cache, file, name)
return file.to_path
end
if dryrun
puts "Download #{url} into #{file}"
return false
return
end
if !https? and url.start_with?("https:")
if link_cache(cache, file, name, $VERBOSE)
return file.to_path
end
if !https? and URI::HTTPS === url
warn "*** using http instead of https ***"
url = url.sub(/\Ahttps/, 'http')
url.scheme = 'http'
url = URI(url.to_s)
end
url = URI(url)
if $VERBOSE
$stdout.print "downloading #{name} ... "
$stdout.flush
@ -150,37 +163,40 @@ class Downloader
$stdout.puts "#{name} not modified"
$stdout.flush
end
return true
return file.to_path
end
raise
rescue Timeout::Error
if since.nil? and File.exist?(file)
if since.nil? and file.exist?
puts "Request for #{url} timed out, using old version."
return true
return file.to_path
end
raise
rescue SocketError
if since.nil? and File.exist?(file)
if since.nil? and file.exist?
puts "No network connection, unable to download #{url}, using old version."
return true
return file.to_path
end
raise
end
mtime = nil
open(file, "wb", 0600) do |f|
dest = (cache && !cache.exist? ? cache : file)
dest.parent.mkpath
dest.open("wb", 0600) do |f|
f.write(data)
f.chmod(mode_for(data))
mtime = data.meta["last-modified"]
end
if mtime
mtime = Time.httpdate(mtime)
File.utime(mtime, mtime, file)
dest.utime(mtime, mtime)
end
if $VERBOSE
$stdout.puts "done"
$stdout.flush
end
true
save_cache(cache, file, name)
return file.to_path
rescue => e
raise "failed to download #{name}\n#{e.message}: #{url}"
end
@ -188,6 +204,55 @@ class Downloader
def self.under(dir, name)
dir ? File.join(dir, File.basename(name)) : name
end
def self.cache_file(url, name, cache_dir = nil)
case cache_dir
when false
return nil
when nil
cache_dir = ENV['CACHE_DIR']
if !cache_dir or cache_dir.empty?
cache_dir = ".downloaded-cache"
end
end
Pathname.new(cache_dir) + (name || File.basename(URI(url).path))
end
def self.link_cache(cache, file, name, verbose = false)
return false unless cache and cache.exist?
return true if cache.eql?(file)
begin
file.make_symlink(cache.relative_path_from(file.parent))
rescue SystemCallError
else
if verbose
$stdout.puts "made symlink #{name} to #{cache}"
$stdout.flush
end
return true
end
begin
file.make_link(cache)
rescue SystemCallError
else
if verbose
$stdout.puts "made link #{name} to #{cache}"
$stdout.flush
end
return true
end
end
def self.save_cache(cache, file, name)
if cache and !cache.eql?(file) and !cache.exist?
begin
file.rename(cache)
rescue
else
link_cache(cache, file, name)
end
end
end
end
Downloader.https = https.freeze

View file

@ -3,13 +3,16 @@
# Used to download, extract and patch extension libraries (extlibs)
# for Ruby. See common.mk for Ruby's usage.
require 'fileutils'
require 'digest'
require_relative 'downloader'
class ExtLibs
def do_download(url, base, cache_dir)
Downloader.download(url, base, cache_dir, nil)
def cache_file(url, cache_dir)
Downloader.cache_file(url, nil, :cache_dir => cache_dir)
end
def do_download(url, cache_dir)
Downloader.download(url, nil, nil, nil, :cache_dir => cache_dir)
end
def do_checksum(cache, chksums)
@ -77,22 +80,23 @@ class ExtLibs
end
def do_command(mode, dest, url, cache_dir, chksums)
base = File.basename(url)
cache = File.join(cache_dir, base)
target = File.join(dest, base[/.*(?=\.tar(?:\.\w+)?\z)/])
extracted = false
base = /.*(?=\.tar(?:\.\w+)?\z)/
case mode
when :download
do_download(url, base, cache_dir)
cache = do_download(url, cache_dir)
do_checksum(cache, chksums)
when :extract
cache = cache_file(url, cache_dir)
target = File.join(dest, File.basename(cache)[base])
unless File.directory?(target)
do_checksum(cache, chksums)
extracted = do_extract(cache, dest)
end
when :all
do_download(url, base, cache_dir)
cache = do_download(url, cache_dir)
target = File.join(dest, File.basename(cache)[base])
unless File.directory?(target)
do_checksum(cache, chksums)
extracted = do_extract(cache, dest)
@ -102,7 +106,7 @@ class ExtLibs
end
def run(argv)
cache_dir = ENV['CACHE_DIR'] || ".downloaded-cache"
cache_dir = nil
mode = :all
until argv.empty?
case argv[0]
@ -131,8 +135,6 @@ class ExtLibs
argv.shift
end
FileUtils.mkdir_p(cache_dir)
success = true
argv.each do |dir|
Dir.glob("#{dir}/**/extlibs") do |list|