2013-02-07 12:43:22 -06:00
|
|
|
require 'open-uri'
|
2013-06-22 16:51:08 -05:00
|
|
|
require 'utils/json'
|
2013-02-07 12:43:22 -06:00
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
class AbstractDownloadStrategy
|
2013-06-08 16:41:23 +01:00
|
|
|
attr_accessor :local_bottle_path
|
|
|
|
|
2012-06-26 01:35:37 -05:00
|
|
|
def initialize name, package
|
|
|
|
@url = package.url
|
2013-04-16 14:18:07 -05:00
|
|
|
specs = package.specs
|
|
|
|
@spec, @ref = specs.dup.shift unless specs.empty?
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2009-12-15 12:47:12 +00:00
|
|
|
|
|
|
|
def expand_safe_system_args args
|
2013-04-07 00:49:56 -05:00
|
|
|
args = args.dup
|
2009-12-15 12:47:12 +00:00
|
|
|
args.each_with_index do |arg, ii|
|
|
|
|
if arg.is_a? Hash
|
|
|
|
unless ARGV.verbose?
|
|
|
|
args[ii] = arg[:quiet_flag]
|
|
|
|
else
|
|
|
|
args.delete_at ii
|
|
|
|
end
|
|
|
|
return args
|
|
|
|
end
|
|
|
|
end
|
|
|
|
# 2 as default because commands are eg. svn up, git pull
|
|
|
|
args.insert(2, '-q') unless ARGV.verbose?
|
2013-04-07 00:49:56 -05:00
|
|
|
args
|
2009-12-15 12:47:12 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def quiet_safe_system *args
|
2010-10-25 21:12:31 -07:00
|
|
|
safe_system(*expand_safe_system_args(args))
|
2009-12-15 12:47:12 +00:00
|
|
|
end
|
2013-05-26 09:15:47 -05:00
|
|
|
|
|
|
|
# All download strategies are expected to implement these methods
|
|
|
|
def fetch; end
|
|
|
|
def stage; end
|
|
|
|
def cached_location; end
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
|
2011-03-28 19:53:43 -07:00
|
|
|
class CurlDownloadStrategy < AbstractDownloadStrategy
|
2012-06-26 01:35:37 -05:00
|
|
|
def initialize name, package
|
2010-01-16 17:54:14 -08:00
|
|
|
super
|
2013-03-28 17:37:29 -05:00
|
|
|
|
|
|
|
if name.to_s.empty? || name == '__UNKNOWN__'
|
2013-06-06 17:01:43 -07:00
|
|
|
@tarball_path = Pathname.new("#{HOMEBREW_CACHE}/#{basename_without_params}")
|
2009-08-30 16:11:44 +01:00
|
|
|
else
|
2013-04-15 15:00:57 -05:00
|
|
|
@tarball_path = Pathname.new("#{HOMEBREW_CACHE}/#{name}-#{package.version}#{ext}")
|
2009-08-30 16:11:44 +01:00
|
|
|
end
|
2013-03-28 17:37:29 -05:00
|
|
|
|
|
|
|
@mirrors = package.mirrors
|
2013-04-15 15:00:57 -05:00
|
|
|
@temporary_path = Pathname.new("#@tarball_path.incomplete")
|
2010-01-16 17:54:14 -08:00
|
|
|
end
|
2010-07-06 08:29:02 -07:00
|
|
|
|
2010-05-05 21:16:32 -07:00
|
|
|
def cached_location
|
|
|
|
@tarball_path
|
|
|
|
end
|
|
|
|
|
2012-08-04 18:50:17 +02:00
|
|
|
def downloaded_size
|
|
|
|
@temporary_path.size? or 0
|
|
|
|
end
|
|
|
|
|
2010-06-25 19:13:20 -07:00
|
|
|
# Private method, can be overridden if needed.
|
|
|
|
def _fetch
|
2012-08-04 18:50:17 +02:00
|
|
|
curl @url, '-C', downloaded_size, '-o', @temporary_path
|
2010-06-25 19:13:20 -07:00
|
|
|
end
|
|
|
|
|
2010-01-16 17:54:14 -08:00
|
|
|
def fetch
|
|
|
|
ohai "Downloading #{@url}"
|
|
|
|
unless @tarball_path.exist?
|
2013-05-12 13:42:37 -07:00
|
|
|
had_incomplete_download = @temporary_path.exist?
|
2009-09-05 20:47:15 +01:00
|
|
|
begin
|
2010-06-25 19:13:20 -07:00
|
|
|
_fetch
|
2013-02-10 20:07:08 -06:00
|
|
|
rescue ErrorDuringExecution
|
2013-05-12 13:42:37 -07:00
|
|
|
# 33 == range not supported
|
|
|
|
# try wiping the incomplete download and retrying once
|
|
|
|
if $?.exitstatus == 33 && had_incomplete_download
|
|
|
|
ohai "Trying a full download"
|
|
|
|
@temporary_path.unlink
|
|
|
|
had_incomplete_download = false
|
|
|
|
retry
|
|
|
|
else
|
|
|
|
raise CurlDownloadStrategyError, "Download failed: #{@url}"
|
|
|
|
end
|
2009-09-05 20:47:15 +01:00
|
|
|
end
|
2012-08-04 18:50:17 +02:00
|
|
|
ignore_interrupts { @temporary_path.rename(@tarball_path) }
|
2009-08-21 20:30:13 +01:00
|
|
|
else
|
2012-03-16 00:35:56 +00:00
|
|
|
puts "Already downloaded: #{@tarball_path}"
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2012-06-26 01:35:37 -05:00
|
|
|
rescue CurlDownloadStrategyError
|
|
|
|
raise if @mirrors.empty?
|
|
|
|
puts "Trying a mirror..."
|
|
|
|
@url = @mirrors.shift
|
|
|
|
retry
|
2013-02-10 20:07:08 -06:00
|
|
|
else
|
|
|
|
@tarball_path
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2009-12-07 10:12:38 -08:00
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def stage
|
2012-11-25 14:33:52 +00:00
|
|
|
ohai "Pouring #{File.basename(@tarball_path)}" if @tarball_path.to_s.match bottle_regex
|
|
|
|
|
2012-04-29 11:45:46 -07:00
|
|
|
case @tarball_path.compression_type
|
|
|
|
when :zip
|
2013-03-10 17:03:17 +00:00
|
|
|
with_system_path { quiet_safe_system 'unzip', {:quiet_flag => '-qq'}, @tarball_path }
|
2009-12-21 18:29:13 +00:00
|
|
|
chdir
|
2013-06-08 10:27:47 -07:00
|
|
|
when :gzip_only
|
2013-06-27 12:23:35 -05:00
|
|
|
# gunzip writes the compressed data in the location of the original,
|
|
|
|
# regardless of the current working directory; the only way to
|
|
|
|
# write elsewhere is to use the stdout
|
|
|
|
with_system_path do
|
2013-08-13 16:13:23 -05:00
|
|
|
target = File.basename(basename_without_params, ".gz")
|
|
|
|
|
|
|
|
IO.popen("gunzip -f '#{@tarball_path}' -c") do |pipe|
|
|
|
|
File.open(target, "w") do |f|
|
|
|
|
buf = ""
|
|
|
|
f.write(buf) while pipe.read(1024, buf)
|
|
|
|
end
|
2013-06-27 12:23:35 -05:00
|
|
|
end
|
|
|
|
end
|
2012-05-03 20:31:00 -07:00
|
|
|
when :gzip, :bzip2, :compress, :tar
|
2012-04-29 11:45:46 -07:00
|
|
|
# Assume these are also tarred
|
2009-12-21 18:29:13 +00:00
|
|
|
# TODO check if it's really a tar archive
|
2013-03-10 17:03:17 +00:00
|
|
|
with_system_path { safe_system 'tar', 'xf', @tarball_path }
|
2009-12-21 18:29:13 +00:00
|
|
|
chdir
|
2012-04-29 11:45:46 -07:00
|
|
|
when :xz
|
2013-06-07 16:34:52 -05:00
|
|
|
raise "You must install XZutils: brew install xz" unless File.executable? xzpath
|
|
|
|
with_system_path { safe_system "#{xzpath} -dc \"#{@tarball_path}\" | tar xf -" }
|
2011-08-26 14:30:27 -05:00
|
|
|
chdir
|
2012-04-29 11:45:46 -07:00
|
|
|
when :pkg
|
2013-06-06 17:01:43 -07:00
|
|
|
safe_system '/usr/sbin/pkgutil', '--expand', @tarball_path, basename_without_params
|
2010-06-28 21:28:01 -07:00
|
|
|
chdir
|
2012-04-29 11:45:46 -07:00
|
|
|
when :rar
|
2012-06-15 11:53:05 +02:00
|
|
|
raise "You must install unrar: brew install unrar" unless which "unrar"
|
2010-03-25 23:59:35 -07:00
|
|
|
quiet_safe_system 'unrar', 'x', {:quiet_flag => '-inul'}, @tarball_path
|
2012-10-28 19:29:24 +01:00
|
|
|
when :p7zip
|
|
|
|
raise "You must install 7zip: brew install p7zip" unless which "7zr"
|
|
|
|
safe_system '7zr', 'x', @tarball_path
|
2009-12-21 18:29:13 +00:00
|
|
|
else
|
2013-06-06 17:01:43 -07:00
|
|
|
FileUtils.cp @tarball_path, basename_without_params
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
end
|
2009-12-07 10:12:38 -08:00
|
|
|
|
2013-04-07 00:49:56 -05:00
|
|
|
private
|
|
|
|
|
2013-08-05 19:44:34 -05:00
|
|
|
def curl(*args)
|
|
|
|
args << '--connect-timeout' << '5' unless @mirrors.empty?
|
|
|
|
super
|
|
|
|
end
|
|
|
|
|
2013-06-07 16:34:52 -05:00
|
|
|
def xzpath
|
|
|
|
"#{HOMEBREW_PREFIX}/opt/xz/bin/xz"
|
|
|
|
end
|
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def chdir
|
|
|
|
entries=Dir['*']
|
|
|
|
case entries.length
|
|
|
|
when 0 then raise "Empty archive"
|
|
|
|
when 1 then Dir.chdir entries.first rescue nil
|
|
|
|
end
|
|
|
|
end
|
2009-12-07 10:12:38 -08:00
|
|
|
|
2013-06-06 17:01:43 -07:00
|
|
|
def basename_without_params
|
|
|
|
# Strip any ?thing=wad out of .c?thing=wad style extensions
|
|
|
|
File.basename(@url)[/[^?]+/]
|
|
|
|
end
|
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def ext
|
|
|
|
# GitHub uses odd URLs for zip files, so check for those
|
2011-07-29 11:40:53 +01:00
|
|
|
rx=%r[https?://(www\.)?github\.com/.*/(zip|tar)ball/]
|
2009-08-21 20:30:13 +01:00
|
|
|
if rx.match @url
|
|
|
|
if $2 == 'zip'
|
|
|
|
'.zip'
|
|
|
|
else
|
|
|
|
'.tgz'
|
|
|
|
end
|
|
|
|
else
|
2013-06-06 17:01:43 -07:00
|
|
|
# Strip any ?thing=wad out of .c?thing=wad style extensions
|
|
|
|
(Pathname.new(@url).extname)[/[^?]+/]
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2011-07-20 15:37:33 +02:00
|
|
|
# Detect and download from Apache Mirror
|
|
|
|
class CurlApacheMirrorDownloadStrategy < CurlDownloadStrategy
|
|
|
|
def _fetch
|
2013-06-22 16:51:08 -05:00
|
|
|
mirrors = Utils::JSON.load(open("#{@url}&asjson=1").read)
|
2013-01-23 17:56:28 -06:00
|
|
|
url = mirrors.fetch('preferred') + mirrors.fetch('path_info')
|
2011-07-20 15:37:33 +02:00
|
|
|
|
2013-02-02 16:21:25 -06:00
|
|
|
ohai "Best Mirror #{url}"
|
2012-08-04 18:50:17 +02:00
|
|
|
curl url, '-C', downloaded_size, '-o', @temporary_path
|
2013-06-22 16:51:08 -05:00
|
|
|
rescue IndexError, Utils::JSON::Error
|
2013-01-23 17:56:28 -06:00
|
|
|
raise "Couldn't determine mirror. Try again later."
|
2011-07-20 15:37:33 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2010-06-25 19:13:20 -07:00
|
|
|
# Download via an HTTP POST.
|
2010-06-30 10:21:56 -07:00
|
|
|
# Query parameters on the URL are converted into POST parameters
|
2011-03-28 19:53:43 -07:00
|
|
|
class CurlPostDownloadStrategy < CurlDownloadStrategy
|
2010-06-25 19:13:20 -07:00
|
|
|
def _fetch
|
|
|
|
base_url,data = @url.split('?')
|
2012-08-04 18:50:17 +02:00
|
|
|
curl base_url, '-d', data, '-C', downloaded_size, '-o', @temporary_path
|
2010-06-25 19:13:20 -07:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-08-05 07:04:15 +01:00
|
|
|
# Download from an SSL3-only host.
|
|
|
|
class CurlSSL3DownloadStrategy < CurlDownloadStrategy
|
|
|
|
def _fetch
|
|
|
|
curl @url, '-3', '-C', downloaded_size, '-o', @temporary_path
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2009-12-01 12:01:05 -08:00
|
|
|
# Use this strategy to download but not unzip a file.
|
|
|
|
# Useful for installing jars.
|
2011-03-28 19:53:43 -07:00
|
|
|
class NoUnzipCurlDownloadStrategy < CurlDownloadStrategy
|
2009-12-01 12:01:05 -08:00
|
|
|
def stage
|
2013-06-06 17:01:43 -07:00
|
|
|
FileUtils.cp @tarball_path, basename_without_params
|
2009-12-01 12:01:05 -08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-06-08 10:27:47 -07:00
|
|
|
# This strategy is provided for use with sites that only provide HTTPS and
|
|
|
|
# also have a broken cert. Try not to need this, as we probably won't accept
|
|
|
|
# the formula.
|
2011-03-28 19:53:43 -07:00
|
|
|
class CurlUnsafeDownloadStrategy < CurlDownloadStrategy
|
2010-07-10 11:59:43 -07:00
|
|
|
def _fetch
|
2012-08-04 18:50:17 +02:00
|
|
|
curl @url, '--insecure', '-C', downloaded_size, '-o', @temporary_path
|
2010-07-10 11:59:43 -07:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2010-11-24 09:40:37 +00:00
|
|
|
# This strategy extracts our binary packages.
|
2012-01-23 19:29:55 -08:00
|
|
|
class CurlBottleDownloadStrategy < CurlDownloadStrategy
|
2012-06-26 01:35:37 -05:00
|
|
|
def initialize name, package
|
2010-11-24 09:40:37 +00:00
|
|
|
super
|
2012-06-26 01:35:37 -05:00
|
|
|
@tarball_path = HOMEBREW_CACHE/"#{name}-#{package.version}#{ext}"
|
2013-01-30 19:36:24 -08:00
|
|
|
mirror = ENV['HOMEBREW_SOURCEFORGE_MIRROR']
|
|
|
|
@url = "#{@url}?use_mirror=#{mirror}" if mirror
|
2010-11-24 09:40:37 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-06-08 16:41:23 +01:00
|
|
|
# This strategy extracts local binary packages.
|
|
|
|
class LocalBottleDownloadStrategy < CurlDownloadStrategy
|
|
|
|
def initialize formula, local_bottle_path
|
|
|
|
super formula.name, formula.active_spec
|
|
|
|
@tarball_path = local_bottle_path
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2012-01-23 19:29:55 -08:00
|
|
|
class SubversionDownloadStrategy < AbstractDownloadStrategy
|
2012-06-26 01:35:37 -05:00
|
|
|
def initialize name, package
|
2010-05-07 10:25:01 -07:00
|
|
|
super
|
2012-08-28 09:50:03 -04:00
|
|
|
@@svn ||= 'svn'
|
2013-04-07 00:49:56 -05:00
|
|
|
|
|
|
|
if name.to_s.empty? || name == '__UNKNOWN__'
|
|
|
|
raise NotImplementedError, "strategy requires a name parameter"
|
|
|
|
else
|
2013-04-15 15:00:57 -05:00
|
|
|
@co = Pathname.new("#{HOMEBREW_CACHE}/#{name}--svn")
|
2013-04-07 00:49:56 -05:00
|
|
|
end
|
|
|
|
|
2013-04-16 02:18:26 -07:00
|
|
|
@co = Pathname.new(@co.to_s + '-HEAD') if ARGV.build_head?
|
2010-05-07 10:25:01 -07:00
|
|
|
end
|
|
|
|
|
|
|
|
def cached_location
|
|
|
|
@co
|
|
|
|
end
|
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def fetch
|
2011-03-15 17:55:40 +01:00
|
|
|
@url.sub!(/^svn\+/, '') if @url =~ %r[^svn\+http://]
|
2009-08-21 20:30:13 +01:00
|
|
|
ohai "Checking out #{@url}"
|
2010-02-03 23:58:53 +00:00
|
|
|
if @spec == :revision
|
2010-03-01 11:35:27 -08:00
|
|
|
fetch_repo @co, @url, @ref
|
2010-02-03 23:58:53 +00:00
|
|
|
elsif @spec == :revisions
|
2010-03-01 11:35:27 -08:00
|
|
|
# nil is OK for main_revision, as fetch_repo will then get latest
|
|
|
|
main_revision = @ref.delete :trunk
|
|
|
|
fetch_repo @co, @url, main_revision, true
|
|
|
|
|
|
|
|
get_externals do |external_name, external_url|
|
|
|
|
fetch_repo @co+external_name, external_url, @ref[external_name], true
|
2010-02-03 23:58:53 +00:00
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
else
|
2010-03-01 11:35:27 -08:00
|
|
|
fetch_repo @co, @url
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
end
|
2009-12-07 10:12:38 -08:00
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def stage
|
2012-08-10 11:40:42 -07:00
|
|
|
quiet_safe_system @@svn, 'export', '--force', @co, Dir.pwd
|
2010-03-01 11:35:27 -08:00
|
|
|
end
|
|
|
|
|
|
|
|
def shell_quote str
|
|
|
|
# Oh god escaping shell args.
|
|
|
|
# See http://notetoself.vrensk.com/2008/08/escaping-single-quotes-in-ruby-harder-than-expected/
|
|
|
|
str.gsub(/\\|'/) { |c| "\\#{c}" }
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_externals
|
2013-08-15 15:46:03 -05:00
|
|
|
`'#{shell_quote(@@svn)}' propget svn:externals '#{shell_quote(@url)}'`.chomp.each_line do |line|
|
2010-10-25 21:12:41 -07:00
|
|
|
name, url = line.split(/\s+/)
|
2010-03-01 11:35:27 -08:00
|
|
|
yield name, url
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def fetch_repo target, url, revision=nil, ignore_externals=false
|
|
|
|
# Use "svn up" when the repository already exists locally.
|
|
|
|
# This saves on bandwidth and will have a similar effect to verifying the
|
|
|
|
# cache as it will make any changes to get the right revision.
|
|
|
|
svncommand = target.exist? ? 'up' : 'checkout'
|
2012-08-10 11:40:42 -07:00
|
|
|
args = [@@svn, svncommand]
|
2012-04-22 10:46:30 -07:00
|
|
|
# SVN shipped with XCode 3.1.4 can't force a checkout.
|
2012-08-10 11:40:42 -07:00
|
|
|
args << '--force' unless MacOS.version == :leopard and @@svn == '/usr/bin/svn'
|
2011-08-21 04:55:40 -07:00
|
|
|
args << url if !target.exist?
|
|
|
|
args << target
|
2010-03-01 11:35:27 -08:00
|
|
|
args << '-r' << revision if revision
|
|
|
|
args << '--ignore-externals' if ignore_externals
|
2010-10-25 21:12:31 -07:00
|
|
|
quiet_safe_system(*args)
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2010-06-28 14:55:31 -07:00
|
|
|
end
|
|
|
|
|
|
|
|
# Require a newer version of Subversion than 1.4.x (Leopard-provided version)
|
2011-03-28 19:53:43 -07:00
|
|
|
class StrictSubversionDownloadStrategy < SubversionDownloadStrategy
|
2012-08-10 11:40:42 -07:00
|
|
|
def find_svn
|
2012-08-28 09:50:03 -04:00
|
|
|
exe = `svn -print-path`
|
2010-06-28 14:55:31 -07:00
|
|
|
`#{exe} --version` =~ /version (\d+\.\d+(\.\d+)*)/
|
|
|
|
svn_version = $1
|
|
|
|
version_tuple=svn_version.split(".").collect {|v|Integer(v)}
|
|
|
|
|
|
|
|
if version_tuple[0] == 1 and version_tuple[1] <= 4
|
|
|
|
onoe "Detected Subversion (#{exe}, version #{svn_version}) is too old."
|
|
|
|
puts "Subversion 1.4.x will not export externals correctly for this formula."
|
|
|
|
puts "You must either `brew install subversion` or set HOMEBREW_SVN to the path"
|
|
|
|
puts "of a newer svn binary."
|
|
|
|
end
|
|
|
|
return exe
|
2009-12-04 17:44:18 +00:00
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
|
2011-09-06 19:57:00 -05:00
|
|
|
# Download from SVN servers with invalid or self-signed certs
|
|
|
|
class UnsafeSubversionDownloadStrategy < SubversionDownloadStrategy
|
|
|
|
def fetch_repo target, url, revision=nil, ignore_externals=false
|
|
|
|
# Use "svn up" when the repository already exists locally.
|
|
|
|
# This saves on bandwidth and will have a similar effect to verifying the
|
|
|
|
# cache as it will make any changes to get the right revision.
|
|
|
|
svncommand = target.exist? ? 'up' : 'checkout'
|
2012-08-10 11:40:42 -07:00
|
|
|
args = [@@svn, svncommand, '--non-interactive', '--trust-server-cert', '--force']
|
2011-09-06 19:57:00 -05:00
|
|
|
args << url if !target.exist?
|
|
|
|
args << target
|
|
|
|
args << '-r' << revision if revision
|
|
|
|
args << '--ignore-externals' if ignore_externals
|
|
|
|
quiet_safe_system(*args)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2011-03-28 19:53:43 -07:00
|
|
|
class GitDownloadStrategy < AbstractDownloadStrategy
|
2012-06-26 01:35:37 -05:00
|
|
|
def initialize name, package
|
2010-05-05 21:16:32 -07:00
|
|
|
super
|
2012-08-28 09:50:03 -04:00
|
|
|
@@git ||= 'git'
|
2013-04-07 00:49:56 -05:00
|
|
|
|
|
|
|
if name.to_s.empty? || name == '__UNKNOWN__'
|
|
|
|
raise NotImplementedError, "strategy requires a name parameter"
|
|
|
|
else
|
2013-04-15 15:00:57 -05:00
|
|
|
@clone = Pathname.new("#{HOMEBREW_CACHE}/#{name}--git")
|
2013-04-07 00:49:56 -05:00
|
|
|
end
|
2010-05-05 21:16:32 -07:00
|
|
|
end
|
|
|
|
|
|
|
|
def cached_location
|
|
|
|
@clone
|
|
|
|
end
|
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def fetch
|
2012-08-28 09:50:03 -04:00
|
|
|
raise "You must: brew install git" unless which "git"
|
2010-07-09 07:13:19 -07:00
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
ohai "Cloning #@url"
|
2010-09-22 21:38:52 -07:00
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
if @clone.exist? && repo_valid?
|
|
|
|
puts "Updating #@clone"
|
2010-09-22 21:38:52 -07:00
|
|
|
Dir.chdir(@clone) do
|
2013-02-14 17:29:58 -06:00
|
|
|
config_repo
|
2013-02-14 17:29:58 -06:00
|
|
|
update_repo
|
2013-02-14 17:29:58 -06:00
|
|
|
checkout
|
2013-02-17 15:53:34 -06:00
|
|
|
reset
|
2013-02-14 17:29:58 -06:00
|
|
|
update_submodules if submodules?
|
2010-09-22 21:38:52 -07:00
|
|
|
end
|
2013-02-14 17:29:58 -06:00
|
|
|
elsif @clone.exist?
|
|
|
|
puts "Removing invalid .git repo from cache"
|
|
|
|
FileUtils.rm_rf @clone
|
|
|
|
clone_repo
|
2009-08-21 20:30:13 +01:00
|
|
|
else
|
2013-02-14 17:29:58 -06:00
|
|
|
clone_repo
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
end
|
2009-12-07 10:12:38 -08:00
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def stage
|
2009-10-17 14:35:24 +02:00
|
|
|
dst = Dir.getwd
|
2009-08-21 20:30:13 +01:00
|
|
|
Dir.chdir @clone do
|
2009-10-17 14:35:24 +02:00
|
|
|
if @spec and @ref
|
2013-02-14 17:29:58 -06:00
|
|
|
ohai "Checking out #@spec #@ref"
|
2011-08-26 15:06:24 +01:00
|
|
|
else
|
2013-02-17 15:53:34 -06:00
|
|
|
reset
|
2009-10-17 14:35:24 +02:00
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
# http://stackoverflow.com/questions/160608/how-to-do-a-git-export-like-svn-export
|
2012-08-10 11:33:41 -07:00
|
|
|
safe_system @@git, 'checkout-index', '-a', '-f', "--prefix=#{dst}/"
|
2013-02-14 17:29:58 -06:00
|
|
|
checkout_submodules(dst) if submodules?
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
end
|
2013-02-14 17:29:58 -06:00
|
|
|
|
|
|
|
private
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def git_dir
|
|
|
|
@clone.join(".git")
|
|
|
|
end
|
|
|
|
|
2013-02-17 15:41:24 -06:00
|
|
|
def has_ref?
|
|
|
|
quiet_system @@git, '--git-dir', git_dir, 'rev-parse', '-q', '--verify', @ref
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def support_depth?
|
|
|
|
@spec != :revision and host_supports_depth?
|
|
|
|
end
|
|
|
|
|
|
|
|
def host_supports_depth?
|
|
|
|
@url =~ %r{git://} or @url =~ %r{https://github.com/}
|
|
|
|
end
|
|
|
|
|
|
|
|
def repo_valid?
|
2013-02-14 17:29:58 -06:00
|
|
|
quiet_system @@git, "--git-dir", git_dir, "status", "-s"
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def submodules?
|
|
|
|
@clone.join(".gitmodules").exist?
|
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def clone_args
|
2013-02-14 17:29:59 -06:00
|
|
|
args = %w{clone}
|
2013-02-14 17:29:58 -06:00
|
|
|
args << '--depth' << '1' if support_depth?
|
|
|
|
|
|
|
|
case @spec
|
|
|
|
when :branch, :tag then args << '--branch' << @ref
|
|
|
|
end
|
|
|
|
|
|
|
|
args << @url << @clone
|
|
|
|
end
|
|
|
|
|
|
|
|
def refspec
|
|
|
|
case @spec
|
|
|
|
when :branch then "+refs/heads/#@ref:refs/remotes/origin/#@ref"
|
|
|
|
when :tag then "+refs/tags/#@ref:refs/tags/#@ref"
|
|
|
|
else "+refs/heads/master:refs/remotes/origin/master"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def config_repo
|
|
|
|
safe_system @@git, 'config', 'remote.origin.url', @url
|
|
|
|
safe_system @@git, 'config', 'remote.origin.fetch', refspec
|
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def update_repo
|
2013-02-17 15:41:24 -06:00
|
|
|
unless @spec == :tag && has_ref?
|
2013-02-14 17:29:58 -06:00
|
|
|
quiet_safe_system @@git, 'fetch', 'origin'
|
|
|
|
end
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
|
|
|
def clone_repo
|
|
|
|
safe_system @@git, *clone_args
|
2013-02-14 17:29:59 -06:00
|
|
|
@clone.cd { update_submodules } if submodules?
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2013-02-17 15:43:16 -06:00
|
|
|
def checkout_args
|
2013-02-14 17:29:58 -06:00
|
|
|
ref = case @spec
|
2013-02-14 17:29:59 -06:00
|
|
|
when :branch, :tag, :revision then @ref
|
|
|
|
else `git symbolic-ref refs/remotes/origin/HEAD`.strip.split("/").last
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2013-02-17 15:54:22 -06:00
|
|
|
args = %w{checkout -f}
|
2013-02-17 15:43:16 -06:00
|
|
|
args << { :quiet_flag => '-q' }
|
|
|
|
args << ref
|
|
|
|
end
|
|
|
|
|
|
|
|
def checkout
|
|
|
|
nostdout { quiet_safe_system @@git, *checkout_args }
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
2013-02-14 17:29:58 -06:00
|
|
|
|
2013-02-17 15:53:34 -06:00
|
|
|
def reset_args
|
|
|
|
ref = case @spec
|
|
|
|
when :branch then "origin/#@ref"
|
|
|
|
when :revision, :tag then @ref
|
|
|
|
else "origin/HEAD"
|
|
|
|
end
|
|
|
|
|
|
|
|
args = %w{reset}
|
|
|
|
args << { :quiet_flag => "-q" }
|
|
|
|
args << "--hard" << ref
|
|
|
|
end
|
|
|
|
|
|
|
|
def reset
|
|
|
|
quiet_safe_system @@git, *reset_args
|
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def update_submodules
|
|
|
|
safe_system @@git, 'submodule', 'update', '--init'
|
|
|
|
end
|
|
|
|
|
|
|
|
def checkout_submodules(dst)
|
|
|
|
sub_cmd = %W{#@@git checkout-index -a -f --prefix=#{dst}/$path/}
|
|
|
|
safe_system @@git, 'submodule', '--quiet', 'foreach', '--recursive', *sub_cmd
|
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2009-09-27 19:30:39 -04:00
|
|
|
|
2011-03-28 19:53:43 -07:00
|
|
|
class CVSDownloadStrategy < AbstractDownloadStrategy
|
2012-06-26 01:35:37 -05:00
|
|
|
def initialize name, package
|
2010-06-06 15:38:46 -07:00
|
|
|
super
|
2013-04-07 00:49:56 -05:00
|
|
|
|
|
|
|
if name.to_s.empty? || name == '__UNKNOWN__'
|
|
|
|
raise NotImplementedError, "strategy requires a name parameter"
|
|
|
|
else
|
2013-04-15 15:00:57 -05:00
|
|
|
@unique_token = "#{name}--cvs"
|
|
|
|
@co = Pathname.new("#{HOMEBREW_CACHE}/#{@unique_token}")
|
2013-04-07 00:49:56 -05:00
|
|
|
end
|
2010-06-06 15:38:46 -07:00
|
|
|
end
|
|
|
|
|
|
|
|
def cached_location; @co; end
|
|
|
|
|
2009-09-27 19:30:39 -04:00
|
|
|
def fetch
|
|
|
|
ohai "Checking out #{@url}"
|
|
|
|
|
|
|
|
# URL of cvs cvs://:pserver:anoncvs@www.gccxml.org:/cvsroot/GCC_XML:gccxml
|
|
|
|
# will become:
|
|
|
|
# cvs -d :pserver:anoncvs@www.gccxml.org:/cvsroot/GCC_XML login
|
|
|
|
# cvs -d :pserver:anoncvs@www.gccxml.org:/cvsroot/GCC_XML co gccxml
|
|
|
|
mod, url = split_url(@url)
|
|
|
|
|
|
|
|
unless @co.exist?
|
|
|
|
Dir.chdir HOMEBREW_CACHE do
|
|
|
|
safe_system '/usr/bin/cvs', '-d', url, 'login'
|
|
|
|
safe_system '/usr/bin/cvs', '-d', url, 'checkout', '-d', @unique_token, mod
|
|
|
|
end
|
|
|
|
else
|
2010-06-06 15:38:46 -07:00
|
|
|
puts "Updating #{@co}"
|
|
|
|
Dir.chdir(@co) { safe_system '/usr/bin/cvs', 'up' }
|
2009-09-27 19:30:39 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def stage
|
2011-11-29 20:33:20 +01:00
|
|
|
FileUtils.cp_r Dir[@co+"{.}"], Dir.pwd
|
2009-09-27 19:30:39 -04:00
|
|
|
|
|
|
|
require 'find'
|
|
|
|
Find.find(Dir.pwd) do |path|
|
|
|
|
if FileTest.directory?(path) && File.basename(path) == "CVS"
|
|
|
|
Find.prune
|
|
|
|
FileUtil.rm_r path, :force => true
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-04-07 00:49:56 -05:00
|
|
|
private
|
|
|
|
|
2009-09-27 19:30:39 -04:00
|
|
|
def split_url(in_url)
|
|
|
|
parts=in_url.sub(%r[^cvs://], '').split(/:/)
|
|
|
|
mod=parts.pop
|
|
|
|
url=parts.join(':')
|
|
|
|
[ mod, url ]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2011-03-28 19:53:43 -07:00
|
|
|
class MercurialDownloadStrategy < AbstractDownloadStrategy
|
2012-06-26 01:35:37 -05:00
|
|
|
def initialize name, package
|
2010-06-07 14:15:32 -07:00
|
|
|
super
|
2013-04-15 15:00:57 -05:00
|
|
|
|
|
|
|
if name.to_s.empty? || name == '__UNKNOWN__'
|
|
|
|
raise NotImplementedError, "strategy requires a name parameter"
|
|
|
|
else
|
|
|
|
@clone = Pathname.new("#{HOMEBREW_CACHE}/#{name}--hg")
|
|
|
|
end
|
2010-06-07 14:15:32 -07:00
|
|
|
end
|
|
|
|
|
|
|
|
def cached_location; @clone; end
|
|
|
|
|
2012-09-25 10:11:13 -04:00
|
|
|
def hgpath
|
2013-01-21 10:33:56 +01:00
|
|
|
# #{HOMEBREW_PREFIX}/share/python/hg is deprecated, but we levae it in for a while
|
2012-09-28 21:04:03 -05:00
|
|
|
@path ||= %W[
|
|
|
|
#{which("hg")}
|
|
|
|
#{HOMEBREW_PREFIX}/bin/hg
|
2013-06-04 16:59:15 +02:00
|
|
|
#{Formula.factory('mercurial').opt_prefix}/bin/hg
|
2012-09-28 21:04:03 -05:00
|
|
|
#{HOMEBREW_PREFIX}/share/python/hg
|
|
|
|
].find { |p| File.executable? p }
|
2012-09-25 10:11:13 -04:00
|
|
|
end
|
|
|
|
|
2009-09-27 19:30:39 -04:00
|
|
|
def fetch
|
2012-09-28 21:04:03 -05:00
|
|
|
raise "You must: brew install mercurial" unless hgpath
|
2009-12-02 13:21:34 +00:00
|
|
|
|
2009-09-27 19:30:39 -04:00
|
|
|
ohai "Cloning #{@url}"
|
|
|
|
|
|
|
|
unless @clone.exist?
|
2010-06-07 14:15:32 -07:00
|
|
|
url=@url.sub(%r[^hg://], '')
|
2012-09-25 10:11:13 -04:00
|
|
|
safe_system hgpath, 'clone', url, @clone
|
2009-09-27 19:30:39 -04:00
|
|
|
else
|
2009-12-17 16:01:45 +00:00
|
|
|
puts "Updating #{@clone}"
|
2010-06-10 21:22:40 +02:00
|
|
|
Dir.chdir(@clone) do
|
2012-09-25 10:11:13 -04:00
|
|
|
safe_system hgpath, 'pull'
|
|
|
|
safe_system hgpath, 'update'
|
2010-06-10 21:22:40 +02:00
|
|
|
end
|
2009-09-27 19:30:39 -04:00
|
|
|
end
|
|
|
|
end
|
2009-12-07 10:12:38 -08:00
|
|
|
|
2009-09-27 19:30:39 -04:00
|
|
|
def stage
|
|
|
|
dst=Dir.getwd
|
|
|
|
Dir.chdir @clone do
|
2009-10-17 14:35:24 +02:00
|
|
|
if @spec and @ref
|
|
|
|
ohai "Checking out #{@spec} #{@ref}"
|
2012-09-25 10:11:13 -04:00
|
|
|
safe_system hgpath, 'archive', '--subrepos', '-y', '-r', @ref, '-t', 'files', dst
|
2009-10-17 14:35:24 +02:00
|
|
|
else
|
2012-09-25 10:11:13 -04:00
|
|
|
safe_system hgpath, 'archive', '--subrepos', '-y', '-t', 'files', dst
|
2009-10-17 14:35:24 +02:00
|
|
|
end
|
2009-09-27 19:30:39 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2010-02-02 13:43:44 +01:00
|
|
|
|
2011-03-28 19:53:43 -07:00
|
|
|
class BazaarDownloadStrategy < AbstractDownloadStrategy
|
2012-06-26 01:35:37 -05:00
|
|
|
def initialize name, package
|
2010-06-07 14:15:32 -07:00
|
|
|
super
|
2013-04-15 15:00:57 -05:00
|
|
|
|
|
|
|
if name.to_s.empty? || name == '__UNKNOWN__'
|
|
|
|
raise NotImplementedError, "strategy requires a name parameter"
|
|
|
|
else
|
|
|
|
@clone = Pathname.new("#{HOMEBREW_CACHE}/#{name}--bzr")
|
|
|
|
end
|
2010-06-07 14:15:32 -07:00
|
|
|
end
|
|
|
|
|
|
|
|
def cached_location; @clone; end
|
|
|
|
|
2012-09-28 21:04:03 -05:00
|
|
|
def bzrpath
|
|
|
|
@path ||= %W[
|
|
|
|
#{which("bzr")}
|
|
|
|
#{HOMEBREW_PREFIX}/bin/bzr
|
|
|
|
].find { |p| File.executable? p }
|
|
|
|
end
|
|
|
|
|
2010-02-02 13:43:44 +01:00
|
|
|
def fetch
|
2012-09-28 21:04:03 -05:00
|
|
|
raise "You must: brew install bazaar" unless bzrpath
|
2010-02-02 13:43:44 +01:00
|
|
|
|
|
|
|
ohai "Cloning #{@url}"
|
|
|
|
unless @clone.exist?
|
2010-06-07 14:15:32 -07:00
|
|
|
url=@url.sub(%r[^bzr://], '')
|
2010-02-02 13:43:44 +01:00
|
|
|
# 'lightweight' means history-less
|
2012-09-28 21:04:03 -05:00
|
|
|
safe_system bzrpath, 'checkout', '--lightweight', url, @clone
|
2010-02-02 13:43:44 +01:00
|
|
|
else
|
|
|
|
puts "Updating #{@clone}"
|
2012-09-28 21:04:03 -05:00
|
|
|
Dir.chdir(@clone) { safe_system bzrpath, 'update' }
|
2010-02-02 13:43:44 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def stage
|
2011-11-29 10:17:38 +01:00
|
|
|
# FIXME: The export command doesn't work on checkouts
|
|
|
|
# See https://bugs.launchpad.net/bzr/+bug/897511
|
|
|
|
FileUtils.cp_r Dir[@clone+"{.}"], Dir.pwd
|
|
|
|
FileUtils.rm_r Dir[Dir.pwd+"/.bzr"]
|
|
|
|
|
|
|
|
#dst=Dir.getwd
|
|
|
|
#Dir.chdir @clone do
|
|
|
|
# if @spec and @ref
|
|
|
|
# ohai "Checking out #{@spec} #{@ref}"
|
|
|
|
# Dir.chdir @clone do
|
2012-09-28 21:04:03 -05:00
|
|
|
# safe_system bzrpath, 'export', '-r', @ref, dst
|
2011-11-29 10:17:38 +01:00
|
|
|
# end
|
|
|
|
# else
|
2012-09-28 21:04:03 -05:00
|
|
|
# safe_system bzrpath, 'export', dst
|
2011-11-29 10:17:38 +01:00
|
|
|
# end
|
|
|
|
#end
|
2010-02-02 13:43:44 +01:00
|
|
|
end
|
|
|
|
end
|
2010-03-22 21:19:20 -07:00
|
|
|
|
2010-09-29 20:22:34 +08:00
|
|
|
class FossilDownloadStrategy < AbstractDownloadStrategy
|
2012-06-26 01:35:37 -05:00
|
|
|
def initialize name, package
|
2010-09-29 20:22:34 +08:00
|
|
|
super
|
2013-04-15 15:00:57 -05:00
|
|
|
if name.to_s.empty? || name == '__UNKNOWN__'
|
|
|
|
raise NotImplementedError, "strategy requires a name parameter"
|
|
|
|
else
|
|
|
|
@clone = Pathname.new("#{HOMEBREW_CACHE}/#{name}--fossil")
|
|
|
|
end
|
2010-09-29 20:22:34 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def cached_location; @clone; end
|
|
|
|
|
2012-09-28 21:04:03 -05:00
|
|
|
def fossilpath
|
|
|
|
@path ||= %W[
|
|
|
|
#{which("fossil")}
|
|
|
|
#{HOMEBREW_PREFIX}/bin/fossil
|
|
|
|
].find { |p| File.executable? p }
|
|
|
|
end
|
|
|
|
|
2010-09-29 20:22:34 +08:00
|
|
|
def fetch
|
2012-09-28 21:04:03 -05:00
|
|
|
raise "You must: brew install fossil" unless fossilpath
|
2010-09-29 20:22:34 +08:00
|
|
|
|
|
|
|
ohai "Cloning #{@url}"
|
|
|
|
unless @clone.exist?
|
|
|
|
url=@url.sub(%r[^fossil://], '')
|
2012-09-28 21:04:03 -05:00
|
|
|
safe_system fossilpath, 'clone', url, @clone
|
2010-09-29 20:22:34 +08:00
|
|
|
else
|
|
|
|
puts "Updating #{@clone}"
|
2012-09-28 21:04:03 -05:00
|
|
|
safe_system fossilpath, 'pull', '-R', @clone
|
2010-09-29 20:22:34 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def stage
|
|
|
|
# TODO: The 'open' and 'checkout' commands are very noisy and have no '-q' option.
|
2012-09-28 21:04:03 -05:00
|
|
|
safe_system fossilpath, 'open', @clone
|
2010-09-29 20:22:34 +08:00
|
|
|
if @spec and @ref
|
|
|
|
ohai "Checking out #{@spec} #{@ref}"
|
2012-09-28 21:04:03 -05:00
|
|
|
safe_system fossilpath, 'checkout', @ref
|
2010-09-29 20:22:34 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2012-06-25 21:39:28 -05:00
|
|
|
class DownloadStrategyDetector
|
2012-10-15 01:19:31 -05:00
|
|
|
def self.detect(url, strategy=nil)
|
|
|
|
if strategy.is_a? Class and strategy.ancestors.include? AbstractDownloadStrategy
|
|
|
|
strategy
|
|
|
|
elsif strategy.is_a? Symbol
|
|
|
|
detect_from_symbol(strategy)
|
|
|
|
else
|
|
|
|
detect_from_url(url)
|
2012-06-25 21:39:28 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2012-10-15 01:19:31 -05:00
|
|
|
def self.detect_from_url(url)
|
|
|
|
case url
|
2012-06-25 21:39:28 -05:00
|
|
|
# We use a special URL pattern for cvs
|
|
|
|
when %r[^cvs://] then CVSDownloadStrategy
|
|
|
|
# Standard URLs
|
|
|
|
when %r[^bzr://] then BazaarDownloadStrategy
|
|
|
|
when %r[^git://] then GitDownloadStrategy
|
|
|
|
when %r[^https?://.+\.git$] then GitDownloadStrategy
|
|
|
|
when %r[^hg://] then MercurialDownloadStrategy
|
|
|
|
when %r[^svn://] then SubversionDownloadStrategy
|
|
|
|
when %r[^svn\+http://] then SubversionDownloadStrategy
|
|
|
|
when %r[^fossil://] then FossilDownloadStrategy
|
|
|
|
# Some well-known source hosts
|
|
|
|
when %r[^https?://(.+?\.)?googlecode\.com/hg] then MercurialDownloadStrategy
|
|
|
|
when %r[^https?://(.+?\.)?googlecode\.com/svn] then SubversionDownloadStrategy
|
|
|
|
when %r[^https?://(.+?\.)?sourceforge\.net/svnroot/] then SubversionDownloadStrategy
|
2013-08-08 21:56:13 -07:00
|
|
|
when %r[^https?://(.+?\.)?sourceforge\.net/hgweb/] then MercurialDownloadStrategy
|
2012-06-25 21:39:28 -05:00
|
|
|
when %r[^http://svn.apache.org/repos/] then SubversionDownloadStrategy
|
|
|
|
when %r[^http://www.apache.org/dyn/closer.cgi] then CurlApacheMirrorDownloadStrategy
|
|
|
|
# Common URL patterns
|
|
|
|
when %r[^https?://svn\.] then SubversionDownloadStrategy
|
2013-01-27 18:58:50 +00:00
|
|
|
when bottle_native_regex, bottle_regex
|
2012-06-25 21:39:28 -05:00
|
|
|
CurlBottleDownloadStrategy
|
|
|
|
# Otherwise just try to download
|
|
|
|
else CurlDownloadStrategy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2012-10-15 01:19:31 -05:00
|
|
|
def self.detect_from_symbol(symbol)
|
|
|
|
case symbol
|
2012-06-25 21:39:28 -05:00
|
|
|
when :bzr then BazaarDownloadStrategy
|
|
|
|
when :curl then CurlDownloadStrategy
|
|
|
|
when :cvs then CVSDownloadStrategy
|
|
|
|
when :git then GitDownloadStrategy
|
|
|
|
when :hg then MercurialDownloadStrategy
|
|
|
|
when :nounzip then NoUnzipCurlDownloadStrategy
|
|
|
|
when :post then CurlPostDownloadStrategy
|
2013-08-05 07:04:15 +01:00
|
|
|
when :ssl3 then CurlSSL3DownloadStrategy
|
2013-08-08 21:57:13 -07:00
|
|
|
when :svn then SubversionDownloadStrategy
|
2012-06-25 21:39:28 -05:00
|
|
|
else
|
2012-10-15 01:19:31 -05:00
|
|
|
raise "Unknown download strategy #{strategy} was requested."
|
2012-06-25 21:39:28 -05:00
|
|
|
end
|
2010-03-22 21:19:20 -07:00
|
|
|
end
|
|
|
|
end
|