2013-06-22 16:51:08 -05:00
|
|
|
require 'utils/json'
|
2013-02-07 12:43:22 -06:00
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
class AbstractDownloadStrategy
|
2013-09-23 21:39:19 -05:00
|
|
|
attr_reader :name, :resource
|
|
|
|
|
2013-09-17 21:25:40 -05:00
|
|
|
def initialize name, resource
|
2013-09-23 21:39:19 -05:00
|
|
|
@name = name
|
|
|
|
@resource = resource
|
2013-09-17 21:25:40 -05:00
|
|
|
@url = resource.url
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2009-12-15 12:47:12 +00:00
|
|
|
|
|
|
|
def expand_safe_system_args args
|
2013-04-07 00:49:56 -05:00
|
|
|
args = args.dup
|
2009-12-15 12:47:12 +00:00
|
|
|
args.each_with_index do |arg, ii|
|
|
|
|
if arg.is_a? Hash
|
|
|
|
unless ARGV.verbose?
|
|
|
|
args[ii] = arg[:quiet_flag]
|
|
|
|
else
|
|
|
|
args.delete_at ii
|
|
|
|
end
|
|
|
|
return args
|
|
|
|
end
|
|
|
|
end
|
|
|
|
# 2 as default because commands are eg. svn up, git pull
|
|
|
|
args.insert(2, '-q') unless ARGV.verbose?
|
2013-04-07 00:49:56 -05:00
|
|
|
args
|
2009-12-15 12:47:12 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def quiet_safe_system *args
|
2010-10-25 21:12:31 -07:00
|
|
|
safe_system(*expand_safe_system_args(args))
|
2009-12-15 12:47:12 +00:00
|
|
|
end
|
2013-05-26 09:15:47 -05:00
|
|
|
|
|
|
|
# All download strategies are expected to implement these methods
|
|
|
|
def fetch; end
|
|
|
|
def stage; end
|
|
|
|
def cached_location; end
|
2013-10-31 14:28:49 -05:00
|
|
|
def clear_cache; end
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
|
2013-10-09 21:41:15 -05:00
|
|
|
class VCSDownloadStrategy < AbstractDownloadStrategy
|
2014-02-14 15:31:29 -05:00
|
|
|
REF_TYPES = [:branch, :revision, :revisions, :tag].freeze
|
|
|
|
|
2013-10-09 21:41:15 -05:00
|
|
|
def initialize name, resource
|
|
|
|
super
|
2014-02-14 15:31:29 -05:00
|
|
|
@ref_type, @ref = extract_ref(resource.specs)
|
2014-07-16 23:19:58 -05:00
|
|
|
@clone = HOMEBREW_CACHE.join(cache_filename)
|
2013-10-09 21:41:15 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:16 -05:00
|
|
|
def fetch
|
|
|
|
ohai "Cloning #{@url}"
|
|
|
|
|
|
|
|
if cached_location.exist? && repo_valid?
|
|
|
|
puts "Updating #{cached_location}"
|
|
|
|
update
|
|
|
|
elsif cached_location.exist?
|
|
|
|
puts "Removing invalid repository from cache"
|
|
|
|
clear_cache
|
|
|
|
clone_repo
|
|
|
|
else
|
|
|
|
clone_repo
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-12-06 15:59:17 -05:00
|
|
|
def stage
|
|
|
|
ohai "Checking out #{@ref_type} #{@ref}" if @ref_type && @ref
|
|
|
|
end
|
|
|
|
|
2013-10-10 11:24:36 -05:00
|
|
|
def cached_location
|
|
|
|
@clone
|
|
|
|
end
|
2013-10-31 14:28:49 -05:00
|
|
|
|
|
|
|
def clear_cache
|
|
|
|
cached_location.rmtree if cached_location.exist?
|
|
|
|
end
|
2014-12-05 18:57:28 -05:00
|
|
|
|
|
|
|
def head?
|
|
|
|
resource.version.head?
|
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def cache_tag
|
|
|
|
"__UNKNOWN__"
|
|
|
|
end
|
|
|
|
|
|
|
|
def cache_filename
|
|
|
|
"#{name}--#{cache_tag}"
|
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def repo_valid?
|
|
|
|
true
|
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def clone_repo
|
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def update
|
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def extract_ref(specs)
|
|
|
|
key = REF_TYPES.find { |type| specs.key?(type) }
|
|
|
|
return key, specs[key]
|
|
|
|
end
|
2013-10-09 21:41:15 -05:00
|
|
|
end
|
|
|
|
|
2011-03-28 19:53:43 -07:00
|
|
|
class CurlDownloadStrategy < AbstractDownloadStrategy
|
2014-12-05 22:11:23 -05:00
|
|
|
attr_reader :mirrors, :tarball_path, :temporary_path
|
2013-03-28 17:37:29 -05:00
|
|
|
|
2014-12-05 22:11:23 -05:00
|
|
|
def initialize(name, resource)
|
|
|
|
super
|
|
|
|
@mirrors = resource.mirrors.dup
|
|
|
|
@tarball_path = HOMEBREW_CACHE.join("#{name}-#{resource.version}#{ext}")
|
|
|
|
@temporary_path = Pathname.new("#{tarball_path}.incomplete")
|
2010-01-16 17:54:14 -08:00
|
|
|
end
|
2010-07-06 08:29:02 -07:00
|
|
|
|
2010-05-05 21:16:32 -07:00
|
|
|
def cached_location
|
2013-09-23 21:39:19 -05:00
|
|
|
tarball_path
|
2010-05-05 21:16:32 -07:00
|
|
|
end
|
|
|
|
|
2013-10-31 14:28:49 -05:00
|
|
|
def clear_cache
|
|
|
|
[cached_location, temporary_path].each { |f| f.unlink if f.exist? }
|
|
|
|
end
|
|
|
|
|
2012-08-04 18:50:17 +02:00
|
|
|
def downloaded_size
|
2013-09-23 21:39:19 -05:00
|
|
|
temporary_path.size? or 0
|
2012-08-04 18:50:17 +02:00
|
|
|
end
|
|
|
|
|
2010-06-25 19:13:20 -07:00
|
|
|
# Private method, can be overridden if needed.
|
|
|
|
def _fetch
|
2013-09-23 21:39:19 -05:00
|
|
|
curl @url, '-C', downloaded_size, '-o', temporary_path
|
2010-06-25 19:13:20 -07:00
|
|
|
end
|
|
|
|
|
2010-01-16 17:54:14 -08:00
|
|
|
def fetch
|
|
|
|
ohai "Downloading #{@url}"
|
2013-09-23 21:39:19 -05:00
|
|
|
unless tarball_path.exist?
|
|
|
|
had_incomplete_download = temporary_path.exist?
|
2009-09-05 20:47:15 +01:00
|
|
|
begin
|
2010-06-25 19:13:20 -07:00
|
|
|
_fetch
|
2013-02-10 20:07:08 -06:00
|
|
|
rescue ErrorDuringExecution
|
2013-05-12 13:42:37 -07:00
|
|
|
# 33 == range not supported
|
|
|
|
# try wiping the incomplete download and retrying once
|
|
|
|
if $?.exitstatus == 33 && had_incomplete_download
|
|
|
|
ohai "Trying a full download"
|
2013-09-23 21:39:19 -05:00
|
|
|
temporary_path.unlink
|
2013-05-12 13:42:37 -07:00
|
|
|
had_incomplete_download = false
|
|
|
|
retry
|
|
|
|
else
|
2014-02-06 13:00:32 -07:00
|
|
|
if @url =~ %r[^file://]
|
|
|
|
msg = "File does not exist: #{@url.sub(%r[^file://], "")}"
|
|
|
|
else
|
|
|
|
msg = "Download failed: #{@url}"
|
|
|
|
end
|
|
|
|
raise CurlDownloadStrategyError, msg
|
2013-05-12 13:42:37 -07:00
|
|
|
end
|
2009-09-05 20:47:15 +01:00
|
|
|
end
|
2013-09-23 21:39:19 -05:00
|
|
|
ignore_interrupts { temporary_path.rename(tarball_path) }
|
2009-08-21 20:30:13 +01:00
|
|
|
else
|
2013-09-23 21:39:19 -05:00
|
|
|
puts "Already downloaded: #{tarball_path}"
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2012-06-26 01:35:37 -05:00
|
|
|
rescue CurlDownloadStrategyError
|
2013-10-11 18:56:38 -05:00
|
|
|
raise if mirrors.empty?
|
2012-06-26 01:35:37 -05:00
|
|
|
puts "Trying a mirror..."
|
2013-10-11 18:56:38 -05:00
|
|
|
@url = mirrors.shift
|
2012-06-26 01:35:37 -05:00
|
|
|
retry
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2009-12-07 10:12:38 -08:00
|
|
|
|
2014-04-06 11:29:59 -05:00
|
|
|
# gunzip and bunzip2 write the output file in the same directory as the input
|
|
|
|
# file regardless of the current working directory, so we need to write it to
|
|
|
|
# the correct location ourselves.
|
|
|
|
def buffered_write(tool)
|
|
|
|
target = File.basename(basename_without_params, tarball_path.extname)
|
|
|
|
|
2014-07-05 13:50:54 -05:00
|
|
|
Utils.popen_read(tool, "-f", tarball_path.to_s, "-c") do |pipe|
|
2014-04-06 11:29:59 -05:00
|
|
|
File.open(target, "wb") do |f|
|
|
|
|
buf = ""
|
|
|
|
f.write(buf) while pipe.read(1024, buf)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def stage
|
2013-09-23 21:39:19 -05:00
|
|
|
case tarball_path.compression_type
|
2012-04-29 11:45:46 -07:00
|
|
|
when :zip
|
2013-09-23 21:39:19 -05:00
|
|
|
with_system_path { quiet_safe_system 'unzip', {:quiet_flag => '-qq'}, tarball_path }
|
2009-12-21 18:29:13 +00:00
|
|
|
chdir
|
2013-06-08 10:27:47 -07:00
|
|
|
when :gzip_only
|
2014-04-06 11:29:59 -05:00
|
|
|
with_system_path { buffered_write("gunzip") }
|
2014-04-06 11:19:30 -05:00
|
|
|
when :bzip2_only
|
2014-04-06 11:29:59 -05:00
|
|
|
with_system_path { buffered_write("bunzip2") }
|
2012-05-03 20:31:00 -07:00
|
|
|
when :gzip, :bzip2, :compress, :tar
|
2012-04-29 11:45:46 -07:00
|
|
|
# Assume these are also tarred
|
2013-09-23 21:39:19 -05:00
|
|
|
with_system_path { safe_system 'tar', 'xf', tarball_path }
|
2009-12-21 18:29:13 +00:00
|
|
|
chdir
|
2012-04-29 11:45:46 -07:00
|
|
|
when :xz
|
2013-09-23 21:39:19 -05:00
|
|
|
with_system_path { safe_system "#{xzpath} -dc \"#{tarball_path}\" | tar xf -" }
|
2011-08-26 14:30:27 -05:00
|
|
|
chdir
|
2013-11-29 10:10:28 -08:00
|
|
|
when :lzip
|
|
|
|
with_system_path { safe_system "#{lzippath} -dc \"#{tarball_path}\" | tar xf -" }
|
|
|
|
chdir
|
2014-04-06 12:34:42 -05:00
|
|
|
when :xar
|
|
|
|
safe_system "/usr/bin/xar", "-xf", tarball_path
|
2012-04-29 11:45:46 -07:00
|
|
|
when :rar
|
2013-09-23 21:39:19 -05:00
|
|
|
quiet_safe_system 'unrar', 'x', {:quiet_flag => '-inul'}, tarball_path
|
2012-10-28 19:29:24 +01:00
|
|
|
when :p7zip
|
2013-09-23 21:39:19 -05:00
|
|
|
safe_system '7zr', 'x', tarball_path
|
2009-12-21 18:29:13 +00:00
|
|
|
else
|
2013-09-23 21:39:19 -05:00
|
|
|
FileUtils.cp tarball_path, basename_without_params
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
end
|
2009-12-07 10:12:38 -08:00
|
|
|
|
2013-04-07 00:49:56 -05:00
|
|
|
private
|
|
|
|
|
2013-08-05 19:44:34 -05:00
|
|
|
def curl(*args)
|
2013-10-11 18:56:38 -05:00
|
|
|
args << '--connect-timeout' << '5' unless mirrors.empty?
|
2013-08-05 19:44:34 -05:00
|
|
|
super
|
|
|
|
end
|
|
|
|
|
2013-06-07 16:34:52 -05:00
|
|
|
def xzpath
|
|
|
|
"#{HOMEBREW_PREFIX}/opt/xz/bin/xz"
|
|
|
|
end
|
|
|
|
|
2013-11-29 10:10:28 -08:00
|
|
|
def lzippath
|
|
|
|
"#{HOMEBREW_PREFIX}/opt/lzip/bin/lzip"
|
|
|
|
end
|
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def chdir
|
2013-10-09 21:41:14 -05:00
|
|
|
entries = Dir['*']
|
2009-08-21 20:30:13 +01:00
|
|
|
case entries.length
|
2013-10-09 21:41:14 -05:00
|
|
|
when 0 then raise "Empty archive"
|
|
|
|
when 1 then Dir.chdir entries.first rescue nil
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
end
|
2009-12-07 10:12:38 -08:00
|
|
|
|
2013-06-06 17:01:43 -07:00
|
|
|
def basename_without_params
|
|
|
|
# Strip any ?thing=wad out of .c?thing=wad style extensions
|
|
|
|
File.basename(@url)[/[^?]+/]
|
|
|
|
end
|
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def ext
|
2014-07-17 15:14:16 -05:00
|
|
|
# We need a Pathname because we've monkeypatched extname to support double
|
|
|
|
# extensions (e.g. tar.gz).
|
|
|
|
# We can't use basename_without_params, because given a URL like
|
|
|
|
# http://example.com/download.php?file=foo-1.0.tar.gz
|
|
|
|
# the extension we want is ".tar.gz", not ".php".
|
|
|
|
Pathname.new(@url).extname[/[^?]+/]
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2011-07-20 15:37:33 +02:00
|
|
|
# Detect and download from Apache Mirror
|
|
|
|
class CurlApacheMirrorDownloadStrategy < CurlDownloadStrategy
|
2013-10-30 00:11:46 -05:00
|
|
|
def apache_mirrors
|
|
|
|
rd, wr = IO.pipe
|
|
|
|
buf = ""
|
|
|
|
|
|
|
|
pid = fork do
|
|
|
|
rd.close
|
|
|
|
$stdout.reopen(wr)
|
|
|
|
$stderr.reopen(wr)
|
|
|
|
curl "#{@url}&asjson=1"
|
|
|
|
end
|
|
|
|
wr.close
|
|
|
|
|
2013-11-11 12:35:51 -06:00
|
|
|
rd.readline if ARGV.verbose? # Remove Homebrew output
|
2013-10-30 00:11:46 -05:00
|
|
|
buf << rd.read until rd.eof?
|
|
|
|
rd.close
|
|
|
|
Process.wait(pid)
|
|
|
|
buf
|
|
|
|
end
|
|
|
|
|
2011-07-20 15:37:33 +02:00
|
|
|
def _fetch
|
2013-11-26 22:56:03 -06:00
|
|
|
return super if @tried_apache_mirror
|
|
|
|
@tried_apache_mirror = true
|
|
|
|
|
2013-10-30 00:11:46 -05:00
|
|
|
mirrors = Utils::JSON.load(apache_mirrors)
|
2013-11-26 22:56:03 -06:00
|
|
|
@url = mirrors.fetch('preferred') + mirrors.fetch('path_info')
|
2011-07-20 15:37:33 +02:00
|
|
|
|
2013-11-26 22:56:03 -06:00
|
|
|
ohai "Best Mirror #{@url}"
|
|
|
|
super
|
2013-06-22 16:51:08 -05:00
|
|
|
rescue IndexError, Utils::JSON::Error
|
2013-11-26 22:56:03 -06:00
|
|
|
raise CurlDownloadStrategyError, "Couldn't determine mirror, try again later."
|
2011-07-20 15:37:33 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2010-06-25 19:13:20 -07:00
|
|
|
# Download via an HTTP POST.
|
2010-06-30 10:21:56 -07:00
|
|
|
# Query parameters on the URL are converted into POST parameters
|
2011-03-28 19:53:43 -07:00
|
|
|
class CurlPostDownloadStrategy < CurlDownloadStrategy
|
2010-06-25 19:13:20 -07:00
|
|
|
def _fetch
|
|
|
|
base_url,data = @url.split('?')
|
2013-09-23 21:39:19 -05:00
|
|
|
curl base_url, '-d', data, '-C', downloaded_size, '-o', temporary_path
|
2010-06-25 19:13:20 -07:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-08-05 07:04:15 +01:00
|
|
|
# Download from an SSL3-only host.
|
|
|
|
class CurlSSL3DownloadStrategy < CurlDownloadStrategy
|
|
|
|
def _fetch
|
2013-09-23 21:39:19 -05:00
|
|
|
curl @url, '-3', '-C', downloaded_size, '-o', temporary_path
|
2013-08-05 07:04:15 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2009-12-01 12:01:05 -08:00
|
|
|
# Use this strategy to download but not unzip a file.
|
|
|
|
# Useful for installing jars.
|
2011-03-28 19:53:43 -07:00
|
|
|
class NoUnzipCurlDownloadStrategy < CurlDownloadStrategy
|
2009-12-01 12:01:05 -08:00
|
|
|
def stage
|
2013-09-23 21:39:19 -05:00
|
|
|
FileUtils.cp tarball_path, basename_without_params
|
2009-12-01 12:01:05 -08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-06-08 10:27:47 -07:00
|
|
|
# This strategy is provided for use with sites that only provide HTTPS and
|
|
|
|
# also have a broken cert. Try not to need this, as we probably won't accept
|
|
|
|
# the formula.
|
2011-03-28 19:53:43 -07:00
|
|
|
class CurlUnsafeDownloadStrategy < CurlDownloadStrategy
|
2010-07-10 11:59:43 -07:00
|
|
|
def _fetch
|
2013-09-23 21:39:19 -05:00
|
|
|
curl @url, '--insecure', '-C', downloaded_size, '-o', temporary_path
|
2010-07-10 11:59:43 -07:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2010-11-24 09:40:37 +00:00
|
|
|
# This strategy extracts our binary packages.
|
2012-01-23 19:29:55 -08:00
|
|
|
class CurlBottleDownloadStrategy < CurlDownloadStrategy
|
2014-10-19 23:01:09 -05:00
|
|
|
def curl(*args)
|
|
|
|
mirror = ENV["HOMEBREW_SOURCEFORGE_MIRROR"]
|
|
|
|
args << "-G" << "-d" << "use_mirror=#{mirror}" if mirror
|
2010-11-24 09:40:37 +00:00
|
|
|
super
|
|
|
|
end
|
2013-09-23 21:39:19 -05:00
|
|
|
|
2013-09-25 17:41:11 -05:00
|
|
|
def stage
|
|
|
|
ohai "Pouring #{tarball_path.basename}"
|
|
|
|
super
|
|
|
|
end
|
2010-11-24 09:40:37 +00:00
|
|
|
end
|
|
|
|
|
2013-06-08 16:41:23 +01:00
|
|
|
# This strategy extracts local binary packages.
|
|
|
|
class LocalBottleDownloadStrategy < CurlDownloadStrategy
|
2013-09-25 18:51:30 -05:00
|
|
|
def initialize formula
|
2013-06-08 16:41:23 +01:00
|
|
|
super formula.name, formula.active_spec
|
2013-09-25 18:51:30 -05:00
|
|
|
@tarball_path = formula.local_bottle_path
|
2013-06-08 16:41:23 +01:00
|
|
|
end
|
2013-09-25 17:41:11 -05:00
|
|
|
|
|
|
|
def stage
|
|
|
|
ohai "Pouring #{tarball_path.basename}"
|
|
|
|
super
|
|
|
|
end
|
2013-06-08 16:41:23 +01:00
|
|
|
end
|
|
|
|
|
2013-09-24 01:34:28 -07:00
|
|
|
# S3DownloadStrategy downloads tarballs from AWS S3.
|
|
|
|
# To use it, add ":using => S3DownloadStrategy" to the URL section of your
|
|
|
|
# formula. This download strategy uses AWS access tokens (in the
|
|
|
|
# environment variables AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY)
|
|
|
|
# to sign the request. This strategy is good in a corporate setting,
|
|
|
|
# because it lets you use a private S3 bucket as a repo for internal
|
|
|
|
# distribution. (It will work for public buckets as well.)
|
|
|
|
class S3DownloadStrategy < CurlDownloadStrategy
|
|
|
|
def _fetch
|
|
|
|
# Put the aws gem requirement here (vs top of file) so it's only
|
|
|
|
# a dependency of S3 users, not all Homebrew users
|
|
|
|
require 'rubygems'
|
|
|
|
begin
|
|
|
|
require 'aws-sdk'
|
|
|
|
rescue LoadError
|
|
|
|
onoe "Install the aws-sdk gem into the gem repo used by brew."
|
|
|
|
raise
|
|
|
|
end
|
|
|
|
|
|
|
|
if @url !~ %r[^https?://+([^.]+).s3.amazonaws.com/+(.+)$] then
|
|
|
|
raise "Bad S3 URL: " + @url
|
|
|
|
end
|
|
|
|
(bucket,key) = $1,$2
|
|
|
|
|
|
|
|
obj = AWS::S3.new().buckets[bucket].objects[key]
|
|
|
|
begin
|
|
|
|
s3url = obj.url_for(:get)
|
|
|
|
rescue AWS::Errors::MissingCredentialsError
|
|
|
|
ohai "AWS credentials missing, trying public URL instead."
|
|
|
|
s3url = obj.public_url
|
|
|
|
end
|
|
|
|
|
2013-10-09 21:55:58 -05:00
|
|
|
curl s3url, '-C', downloaded_size, '-o', temporary_path
|
2013-09-24 01:34:28 -07:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-10-09 21:41:15 -05:00
|
|
|
class SubversionDownloadStrategy < VCSDownloadStrategy
|
2014-12-06 12:29:16 -05:00
|
|
|
def initialize(name, resource)
|
|
|
|
super
|
2014-12-06 12:29:16 -05:00
|
|
|
@url = @url.sub(/^svn\+/, "") if @url.start_with?("svn+http://")
|
2014-12-06 12:29:16 -05:00
|
|
|
end
|
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def fetch
|
2014-12-09 14:22:35 -05:00
|
|
|
clear_cache unless @url.chomp("/") == repo_url or quiet_system "svn", "switch", @url, cached_location
|
2014-12-06 12:29:16 -05:00
|
|
|
super
|
|
|
|
end
|
2013-09-28 18:00:09 -05:00
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def stage
|
2014-12-06 15:59:17 -05:00
|
|
|
super
|
2014-12-09 14:22:35 -05:00
|
|
|
quiet_safe_system "svn", "export", "--force", cached_location, Dir.pwd
|
2010-03-01 11:35:27 -08:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:16 -05:00
|
|
|
private
|
|
|
|
|
|
|
|
def repo_url
|
2014-12-09 14:22:35 -05:00
|
|
|
`svn info '#{cached_location}' 2>/dev/null`.strip[/^URL: (.+)$/, 1]
|
2014-12-06 12:29:16 -05:00
|
|
|
end
|
|
|
|
|
2010-03-01 11:35:27 -08:00
|
|
|
def shell_quote str
|
|
|
|
# Oh god escaping shell args.
|
|
|
|
# See http://notetoself.vrensk.com/2008/08/escaping-single-quotes-in-ruby-harder-than-expected/
|
|
|
|
str.gsub(/\\|'/) { |c| "\\#{c}" }
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_externals
|
2013-12-03 21:13:02 -06:00
|
|
|
`svn propget svn:externals '#{shell_quote(@url)}'`.chomp.each_line do |line|
|
2010-10-25 21:12:41 -07:00
|
|
|
name, url = line.split(/\s+/)
|
2010-03-01 11:35:27 -08:00
|
|
|
yield name, url
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-10-07 21:33:15 -05:00
|
|
|
def fetch_args
|
|
|
|
[]
|
|
|
|
end
|
|
|
|
|
2010-03-01 11:35:27 -08:00
|
|
|
def fetch_repo target, url, revision=nil, ignore_externals=false
|
|
|
|
# Use "svn up" when the repository already exists locally.
|
|
|
|
# This saves on bandwidth and will have a similar effect to verifying the
|
|
|
|
# cache as it will make any changes to get the right revision.
|
2013-09-28 18:00:09 -05:00
|
|
|
svncommand = target.directory? ? 'up' : 'checkout'
|
2014-10-07 21:33:15 -05:00
|
|
|
args = ['svn', svncommand] + fetch_args
|
2012-04-22 10:46:30 -07:00
|
|
|
# SVN shipped with XCode 3.1.4 can't force a checkout.
|
2013-10-11 00:11:59 -05:00
|
|
|
args << '--force' unless MacOS.version == :leopard
|
2013-09-28 18:00:09 -05:00
|
|
|
args << url unless target.directory?
|
2011-08-21 04:55:40 -07:00
|
|
|
args << target
|
2010-03-01 11:35:27 -08:00
|
|
|
args << '-r' << revision if revision
|
|
|
|
args << '--ignore-externals' if ignore_externals
|
2010-10-25 21:12:31 -07:00
|
|
|
quiet_safe_system(*args)
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
|
|
|
def cache_tag
|
|
|
|
head? ? "svn-HEAD" : "svn"
|
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
|
|
|
def repo_valid?
|
2014-12-09 14:22:35 -05:00
|
|
|
cached_location.join(".svn").directory?
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2014-12-06 12:29:16 -05:00
|
|
|
|
|
|
|
def clone_repo
|
|
|
|
case @ref_type
|
|
|
|
when :revision
|
2014-12-09 14:22:35 -05:00
|
|
|
fetch_repo cached_location, @url, @ref
|
2014-12-06 12:29:16 -05:00
|
|
|
when :revisions
|
|
|
|
# nil is OK for main_revision, as fetch_repo will then get latest
|
|
|
|
main_revision = @ref[:trunk]
|
2014-12-09 14:22:35 -05:00
|
|
|
fetch_repo cached_location, @url, main_revision, true
|
2014-12-06 12:29:16 -05:00
|
|
|
|
|
|
|
get_externals do |external_name, external_url|
|
2014-12-09 14:22:35 -05:00
|
|
|
fetch_repo cached_location+external_name, external_url, @ref[external_name], true
|
2014-12-06 12:29:16 -05:00
|
|
|
end
|
|
|
|
else
|
2014-12-09 14:22:35 -05:00
|
|
|
fetch_repo cached_location, @url
|
2014-12-06 12:29:16 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
alias_method :update, :clone_repo
|
2010-06-28 14:55:31 -07:00
|
|
|
end
|
|
|
|
|
2014-10-05 20:15:18 -05:00
|
|
|
StrictSubversionDownloadStrategy = SubversionDownloadStrategy
|
2009-08-21 20:30:13 +01:00
|
|
|
|
2011-09-06 19:57:00 -05:00
|
|
|
# Download from SVN servers with invalid or self-signed certs
|
|
|
|
class UnsafeSubversionDownloadStrategy < SubversionDownloadStrategy
|
2014-10-07 21:33:15 -05:00
|
|
|
def fetch_args
|
|
|
|
%w[--non-interactive --trust-server-cert]
|
2011-09-06 19:57:00 -05:00
|
|
|
end
|
2014-12-06 12:29:16 -05:00
|
|
|
private :fetch_args
|
2011-09-06 19:57:00 -05:00
|
|
|
end
|
|
|
|
|
2013-10-09 21:41:15 -05:00
|
|
|
class GitDownloadStrategy < VCSDownloadStrategy
|
2014-02-14 15:31:29 -05:00
|
|
|
SHALLOW_CLONE_WHITELIST = [
|
|
|
|
%r{git://},
|
|
|
|
%r{https://github\.com},
|
|
|
|
%r{http://git\.sv\.gnu\.org},
|
|
|
|
%r{http://llvm\.org},
|
|
|
|
]
|
|
|
|
|
2014-02-15 01:42:32 -05:00
|
|
|
def initialize name, resource
|
2014-02-14 15:31:29 -05:00
|
|
|
super
|
2014-12-03 17:25:51 -05:00
|
|
|
@ref_type ||= :branch
|
|
|
|
@ref ||= "master"
|
2014-02-14 15:31:29 -05:00
|
|
|
@shallow = resource.specs.fetch(:shallow) { true }
|
|
|
|
end
|
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def stage
|
2014-12-06 15:59:17 -05:00
|
|
|
super
|
|
|
|
|
2009-10-17 14:35:24 +02:00
|
|
|
dst = Dir.getwd
|
2014-12-09 14:22:35 -05:00
|
|
|
cached_location.cd do
|
2009-08-21 20:30:13 +01:00
|
|
|
# http://stackoverflow.com/questions/160608/how-to-do-a-git-export-like-svn-export
|
2013-10-11 00:11:59 -05:00
|
|
|
safe_system 'git', 'checkout-index', '-a', '-f', "--prefix=#{dst}/"
|
2013-02-14 17:29:58 -06:00
|
|
|
checkout_submodules(dst) if submodules?
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
end
|
2013-02-14 17:29:58 -06:00
|
|
|
|
|
|
|
private
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def cache_tag
|
|
|
|
"git"
|
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def update
|
2014-12-09 14:22:35 -05:00
|
|
|
cached_location.cd do
|
2014-12-06 12:29:15 -05:00
|
|
|
config_repo
|
|
|
|
update_repo
|
|
|
|
checkout
|
|
|
|
reset
|
|
|
|
update_submodules if submodules?
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-02-14 15:31:29 -05:00
|
|
|
def shallow_clone?
|
|
|
|
@shallow && support_depth?
|
|
|
|
end
|
|
|
|
|
2014-02-14 15:31:29 -05:00
|
|
|
def support_depth?
|
|
|
|
@ref_type != :revision && SHALLOW_CLONE_WHITELIST.any? { |rx| rx === @url }
|
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def git_dir
|
2014-12-09 14:22:35 -05:00
|
|
|
cached_location.join(".git")
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2013-02-17 15:41:24 -06:00
|
|
|
def has_ref?
|
2014-07-22 18:14:03 -04:00
|
|
|
quiet_system 'git', '--git-dir', git_dir, 'rev-parse', '-q', '--verify', "#{@ref}^{commit}"
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def repo_valid?
|
2013-10-11 00:11:59 -05:00
|
|
|
quiet_system "git", "--git-dir", git_dir, "status", "-s"
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def submodules?
|
2014-12-09 14:22:35 -05:00
|
|
|
cached_location.join(".gitmodules").exist?
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def clone_args
|
2013-02-14 17:29:59 -06:00
|
|
|
args = %w{clone}
|
2014-02-14 15:31:29 -05:00
|
|
|
args << '--depth' << '1' if shallow_clone?
|
2013-02-14 17:29:58 -06:00
|
|
|
|
2013-10-09 21:41:14 -05:00
|
|
|
case @ref_type
|
2013-02-14 17:29:58 -06:00
|
|
|
when :branch, :tag then args << '--branch' << @ref
|
|
|
|
end
|
|
|
|
|
2014-12-09 14:22:35 -05:00
|
|
|
args << @url << cached_location
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
|
|
|
def refspec
|
2013-10-09 21:41:14 -05:00
|
|
|
case @ref_type
|
2013-02-14 17:29:58 -06:00
|
|
|
when :branch then "+refs/heads/#@ref:refs/remotes/origin/#@ref"
|
|
|
|
when :tag then "+refs/tags/#@ref:refs/tags/#@ref"
|
|
|
|
else "+refs/heads/master:refs/remotes/origin/master"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def config_repo
|
2013-10-11 00:11:59 -05:00
|
|
|
safe_system 'git', 'config', 'remote.origin.url', @url
|
|
|
|
safe_system 'git', 'config', 'remote.origin.fetch', refspec
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def update_repo
|
2014-12-03 17:25:51 -05:00
|
|
|
if @ref_type == :branch || !has_ref?
|
2013-10-11 00:11:59 -05:00
|
|
|
quiet_safe_system 'git', 'fetch', 'origin'
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
|
|
|
def clone_repo
|
2013-10-11 00:11:59 -05:00
|
|
|
safe_system 'git', *clone_args
|
2014-12-09 14:22:35 -05:00
|
|
|
cached_location.cd { update_submodules } if submodules?
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2013-02-17 15:43:16 -06:00
|
|
|
def checkout
|
2014-12-03 17:25:51 -05:00
|
|
|
quiet_safe_system "git", "checkout", "-f", @ref, "--"
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
2013-02-14 17:29:58 -06:00
|
|
|
|
2013-02-17 15:53:34 -06:00
|
|
|
def reset_args
|
2013-10-09 21:41:14 -05:00
|
|
|
ref = case @ref_type
|
2013-02-17 15:53:34 -06:00
|
|
|
when :branch then "origin/#@ref"
|
|
|
|
when :revision, :tag then @ref
|
|
|
|
end
|
|
|
|
|
2013-10-11 00:11:59 -05:00
|
|
|
%W{reset --hard #{ref}}
|
2013-02-17 15:53:34 -06:00
|
|
|
end
|
|
|
|
|
|
|
|
def reset
|
2013-10-11 00:11:59 -05:00
|
|
|
quiet_safe_system 'git', *reset_args
|
2013-02-17 15:53:34 -06:00
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def update_submodules
|
2014-12-06 20:57:23 -05:00
|
|
|
quiet_safe_system "git", "submodule", "update", "--init", "--recursive"
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
|
|
|
def checkout_submodules(dst)
|
2014-12-09 14:22:35 -05:00
|
|
|
escaped_clone_path = cached_location.to_s.gsub(/\//, '\/')
|
2014-07-13 21:29:55 +02:00
|
|
|
sub_cmd = "git checkout-index -a -f --prefix=#{dst}/${toplevel/#{escaped_clone_path}/}/$path/"
|
2014-12-06 20:57:23 -05:00
|
|
|
quiet_safe_system "git", "submodule", "foreach", "--recursive", sub_cmd
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2009-09-27 19:30:39 -04:00
|
|
|
|
2013-10-09 21:41:15 -05:00
|
|
|
class CVSDownloadStrategy < VCSDownloadStrategy
|
2009-09-27 19:30:39 -04:00
|
|
|
def stage
|
2014-12-09 14:22:35 -05:00
|
|
|
FileUtils.cp_r Dir[cached_location+"{.}"], Dir.pwd
|
2009-09-27 19:30:39 -04:00
|
|
|
end
|
|
|
|
|
2013-04-07 00:49:56 -05:00
|
|
|
private
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def cache_tag
|
|
|
|
"cvs"
|
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def repo_valid?
|
2014-12-09 14:22:35 -05:00
|
|
|
cached_location.join("CVS").directory?
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def clone_repo
|
|
|
|
# URL of cvs cvs://:pserver:anoncvs@www.gccxml.org:/cvsroot/GCC_XML:gccxml
|
|
|
|
# will become:
|
|
|
|
# cvs -d :pserver:anoncvs@www.gccxml.org:/cvsroot/GCC_XML login
|
|
|
|
# cvs -d :pserver:anoncvs@www.gccxml.org:/cvsroot/GCC_XML co gccxml
|
|
|
|
mod, url = split_url(@url)
|
|
|
|
|
|
|
|
HOMEBREW_CACHE.cd do
|
|
|
|
safe_system cvspath, "-d", url, "login"
|
|
|
|
safe_system cvspath, "-d", url, "checkout", "-d", cache_filename, mod
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def update
|
2014-12-09 14:22:35 -05:00
|
|
|
cached_location.cd { quiet_safe_system cvspath, { :quiet_flag => "-Q" }, "up" }
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2009-09-27 19:30:39 -04:00
|
|
|
def split_url(in_url)
|
|
|
|
parts=in_url.sub(%r[^cvs://], '').split(/:/)
|
|
|
|
mod=parts.pop
|
|
|
|
url=parts.join(':')
|
|
|
|
[ mod, url ]
|
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def cvspath
|
2012-09-28 21:04:03 -05:00
|
|
|
@path ||= %W[
|
2014-12-06 12:29:15 -05:00
|
|
|
/usr/bin/cvs
|
|
|
|
#{HOMEBREW_PREFIX}/bin/cvs
|
|
|
|
#{HOMEBREW_PREFIX}/opt/cvs/bin/cvs
|
|
|
|
#{which("cvs")}
|
2012-09-28 21:04:03 -05:00
|
|
|
].find { |p| File.executable? p }
|
2012-09-25 10:11:13 -04:00
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2012-09-25 10:11:13 -04:00
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
class MercurialDownloadStrategy < VCSDownloadStrategy
|
2009-09-27 19:30:39 -04:00
|
|
|
def stage
|
2014-12-06 15:59:17 -05:00
|
|
|
super
|
|
|
|
|
2013-10-09 21:41:14 -05:00
|
|
|
dst = Dir.getwd
|
2014-12-09 14:22:35 -05:00
|
|
|
cached_location.cd do
|
2013-10-09 21:41:14 -05:00
|
|
|
if @ref_type and @ref
|
2012-09-25 10:11:13 -04:00
|
|
|
safe_system hgpath, 'archive', '--subrepos', '-y', '-r', @ref, '-t', 'files', dst
|
2009-10-17 14:35:24 +02:00
|
|
|
else
|
2012-09-25 10:11:13 -04:00
|
|
|
safe_system hgpath, 'archive', '--subrepos', '-y', '-t', 'files', dst
|
2009-10-17 14:35:24 +02:00
|
|
|
end
|
2009-09-27 19:30:39 -04:00
|
|
|
end
|
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def cache_tag
|
|
|
|
"hg"
|
|
|
|
end
|
2010-02-02 13:43:44 +01:00
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def repo_valid?
|
2014-12-09 14:22:35 -05:00
|
|
|
cached_location.join(".hg").directory?
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def clone_repo
|
|
|
|
url = @url.sub(%r[^hg://], "")
|
2014-12-09 14:22:35 -05:00
|
|
|
safe_system hgpath, "clone", url, cached_location
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def update
|
2014-12-09 14:22:35 -05:00
|
|
|
cached_location.cd { quiet_safe_system hgpath, "pull", "--update" }
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def hgpath
|
2012-09-28 21:04:03 -05:00
|
|
|
@path ||= %W[
|
2014-12-06 12:29:15 -05:00
|
|
|
#{which("hg")}
|
|
|
|
#{HOMEBREW_PREFIX}/bin/hg
|
|
|
|
#{HOMEBREW_PREFIX}/opt/mercurial/bin/hg
|
2012-09-28 21:04:03 -05:00
|
|
|
].find { |p| File.executable? p }
|
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2012-09-28 21:04:03 -05:00
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
class BazaarDownloadStrategy < VCSDownloadStrategy
|
2010-02-02 13:43:44 +01:00
|
|
|
def stage
|
2014-12-06 15:53:37 -05:00
|
|
|
# The export command doesn't work on checkouts
|
2011-11-29 10:17:38 +01:00
|
|
|
# See https://bugs.launchpad.net/bzr/+bug/897511
|
2014-12-09 14:22:35 -05:00
|
|
|
FileUtils.cp_r Dir[cached_location+"{.}"], Dir.pwd
|
2014-06-01 15:26:40 -05:00
|
|
|
FileUtils.rm_r ".bzr"
|
2010-02-02 13:43:44 +01:00
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def cache_tag
|
|
|
|
"bzr"
|
|
|
|
end
|
2010-03-22 21:19:20 -07:00
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def repo_valid?
|
2014-12-09 14:22:35 -05:00
|
|
|
cached_location.join(".bzr").directory?
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def clone_repo
|
|
|
|
url = @url.sub(%r[^bzr://], "")
|
|
|
|
# "lightweight" means history-less
|
2014-12-09 14:22:35 -05:00
|
|
|
safe_system bzrpath, "checkout", "--lightweight", url, cached_location
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def update
|
2014-12-09 14:22:35 -05:00
|
|
|
cached_location.cd { quiet_safe_system bzrpath, "update" }
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def bzrpath
|
2012-09-28 21:04:03 -05:00
|
|
|
@path ||= %W[
|
2014-12-06 12:29:15 -05:00
|
|
|
#{which("bzr")}
|
|
|
|
#{HOMEBREW_PREFIX}/bin/bzr
|
2012-09-28 21:04:03 -05:00
|
|
|
].find { |p| File.executable? p }
|
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2012-09-28 21:04:03 -05:00
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
class FossilDownloadStrategy < VCSDownloadStrategy
|
2010-09-29 20:22:34 +08:00
|
|
|
def stage
|
2014-12-06 15:59:17 -05:00
|
|
|
super
|
2014-12-09 14:22:35 -05:00
|
|
|
args = [fossilpath, "open", cached_location]
|
2014-12-06 16:48:20 -05:00
|
|
|
args << @ref if @ref_type && @ref
|
|
|
|
safe_system(*args)
|
2010-09-29 20:22:34 +08:00
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def cache_tag
|
|
|
|
"fossil"
|
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def clone_repo
|
|
|
|
url = @url.sub(%r[^fossil://], "")
|
2014-12-09 14:22:35 -05:00
|
|
|
safe_system fossilpath, "clone", url, cached_location
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def update
|
2014-12-09 14:22:35 -05:00
|
|
|
safe_system fossilpath, "pull", "-R", cached_location
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def fossilpath
|
|
|
|
@path ||= %W[
|
|
|
|
#{which("fossil")}
|
|
|
|
#{HOMEBREW_PREFIX}/bin/fossil
|
|
|
|
].find { |p| File.executable? p }
|
|
|
|
end
|
2010-09-29 20:22:34 +08:00
|
|
|
end
|
|
|
|
|
2012-06-25 21:39:28 -05:00
|
|
|
class DownloadStrategyDetector
|
2012-10-15 01:19:31 -05:00
|
|
|
def self.detect(url, strategy=nil)
|
2013-09-26 16:59:45 -05:00
|
|
|
if strategy.nil?
|
|
|
|
detect_from_url(url)
|
|
|
|
elsif Class === strategy && strategy < AbstractDownloadStrategy
|
|
|
|
strategy
|
|
|
|
elsif Symbol === strategy
|
2012-10-15 01:19:31 -05:00
|
|
|
detect_from_symbol(strategy)
|
|
|
|
else
|
2013-09-26 16:59:45 -05:00
|
|
|
raise TypeError,
|
|
|
|
"Unknown download strategy specification #{strategy.inspect}"
|
2012-06-25 21:39:28 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2012-10-15 01:19:31 -05:00
|
|
|
def self.detect_from_url(url)
|
|
|
|
case url
|
2014-03-18 15:28:21 -05:00
|
|
|
when %r[^https?://.+\.git$], %r[^git://]
|
|
|
|
GitDownloadStrategy
|
|
|
|
when %r[^http://www\.apache\.org/dyn/closer\.cgi]
|
|
|
|
CurlApacheMirrorDownloadStrategy
|
|
|
|
when %r[^https?://(.+?\.)?googlecode\.com/svn], %r[^https?://svn\.], %r[^svn://], %r[^https?://(.+?\.)?sourceforge\.net/svnroot/]
|
|
|
|
SubversionDownloadStrategy
|
|
|
|
when %r[^cvs://]
|
|
|
|
CVSDownloadStrategy
|
|
|
|
when %r[^https?://(.+?\.)?googlecode\.com/hg]
|
|
|
|
MercurialDownloadStrategy
|
|
|
|
when %r[^hg://]
|
|
|
|
MercurialDownloadStrategy
|
|
|
|
when %r[^bzr://]
|
|
|
|
BazaarDownloadStrategy
|
|
|
|
when %r[^fossil://]
|
|
|
|
FossilDownloadStrategy
|
|
|
|
when %r[^http://svn\.apache\.org/repos/], %r[^svn\+http://]
|
|
|
|
SubversionDownloadStrategy
|
|
|
|
when %r[^https?://(.+?\.)?sourceforge\.net/hgweb/]
|
|
|
|
MercurialDownloadStrategy
|
|
|
|
else
|
|
|
|
CurlDownloadStrategy
|
2012-06-25 21:39:28 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2012-10-15 01:19:31 -05:00
|
|
|
def self.detect_from_symbol(symbol)
|
|
|
|
case symbol
|
2014-03-18 15:34:04 -05:00
|
|
|
when :hg then MercurialDownloadStrategy
|
2012-06-25 21:39:28 -05:00
|
|
|
when :nounzip then NoUnzipCurlDownloadStrategy
|
2014-03-18 15:34:04 -05:00
|
|
|
when :git then GitDownloadStrategy
|
|
|
|
when :bzr then BazaarDownloadStrategy
|
|
|
|
when :svn then SubversionDownloadStrategy
|
|
|
|
when :curl then CurlDownloadStrategy
|
|
|
|
when :ssl3 then CurlSSL3DownloadStrategy
|
|
|
|
when :cvs then CVSDownloadStrategy
|
|
|
|
when :post then CurlPostDownloadStrategy
|
2012-06-25 21:39:28 -05:00
|
|
|
else
|
2012-10-15 01:19:31 -05:00
|
|
|
raise "Unknown download strategy #{strategy} was requested."
|
2012-06-25 21:39:28 -05:00
|
|
|
end
|
2010-03-22 21:19:20 -07:00
|
|
|
end
|
|
|
|
end
|