2009-08-21 20:30:13 +01:00
|
|
|
class AbstractDownloadStrategy
|
2009-10-17 14:35:24 +02:00
|
|
|
def initialize url, name, version, specs
|
2009-08-21 20:30:13 +01:00
|
|
|
@url=url
|
2009-10-19 03:01:03 +01:00
|
|
|
case specs when Hash
|
2009-10-17 14:35:24 +02:00
|
|
|
@spec = specs.keys.first # only use first spec
|
|
|
|
@ref = specs.values.first
|
|
|
|
end
|
2009-08-30 16:11:44 +01:00
|
|
|
@unique_token="#{name}-#{version}" unless name.to_s.empty? or name == '__UNKNOWN__'
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2009-12-15 12:47:12 +00:00
|
|
|
|
|
|
|
def expand_safe_system_args args
|
|
|
|
args.each_with_index do |arg, ii|
|
|
|
|
if arg.is_a? Hash
|
|
|
|
unless ARGV.verbose?
|
|
|
|
args[ii] = arg[:quiet_flag]
|
|
|
|
else
|
|
|
|
args.delete_at ii
|
|
|
|
end
|
|
|
|
return args
|
|
|
|
end
|
|
|
|
end
|
|
|
|
# 2 as default because commands are eg. svn up, git pull
|
|
|
|
args.insert(2, '-q') unless ARGV.verbose?
|
|
|
|
return args
|
|
|
|
end
|
|
|
|
|
|
|
|
def quiet_safe_system *args
|
|
|
|
safe_system *expand_safe_system_args(args)
|
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
|
2009-10-02 18:36:58 +01:00
|
|
|
class CurlDownloadStrategy <AbstractDownloadStrategy
|
2010-02-19 21:55:17 -08:00
|
|
|
attr_reader :tarball_path
|
|
|
|
|
2010-01-16 17:54:14 -08:00
|
|
|
def initialize url, name, version, specs
|
|
|
|
super
|
2009-08-30 16:11:44 +01:00
|
|
|
if @unique_token
|
2010-01-16 17:54:14 -08:00
|
|
|
@tarball_path=HOMEBREW_CACHE+(@unique_token+ext)
|
2009-08-30 16:11:44 +01:00
|
|
|
else
|
2010-01-16 17:54:14 -08:00
|
|
|
@tarball_path=HOMEBREW_CACHE+File.basename(@url)
|
2009-08-30 16:11:44 +01:00
|
|
|
end
|
2010-01-16 17:54:14 -08:00
|
|
|
end
|
|
|
|
|
2010-05-05 21:16:32 -07:00
|
|
|
def cached_location
|
|
|
|
@tarball_path
|
|
|
|
end
|
|
|
|
|
2010-01-16 17:54:14 -08:00
|
|
|
def fetch
|
|
|
|
ohai "Downloading #{@url}"
|
|
|
|
unless @tarball_path.exist?
|
2009-09-05 20:47:15 +01:00
|
|
|
begin
|
2010-01-16 17:54:14 -08:00
|
|
|
curl @url, '-o', @tarball_path
|
2009-09-17 16:20:20 +01:00
|
|
|
rescue Exception
|
2010-01-16 17:54:14 -08:00
|
|
|
ignore_interrupts { @tarball_path.unlink if @tarball_path.exist? }
|
2009-09-05 20:47:15 +01:00
|
|
|
raise
|
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
else
|
2009-10-23 14:47:15 +01:00
|
|
|
puts "File already downloaded and cached to #{HOMEBREW_CACHE}"
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2010-01-16 17:54:14 -08:00
|
|
|
return @tarball_path # thus performs checksum verification
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2009-12-07 10:12:38 -08:00
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def stage
|
2010-01-16 17:54:14 -08:00
|
|
|
if @tarball_path.extname == '.jar'
|
2009-12-21 18:29:13 +00:00
|
|
|
magic_bytes = nil
|
|
|
|
else
|
2009-09-23 16:22:16 -06:00
|
|
|
# get the first four bytes
|
2010-01-16 17:54:14 -08:00
|
|
|
File.open(@tarball_path) { |f| magic_bytes = f.read(4) }
|
2009-12-21 18:29:13 +00:00
|
|
|
end
|
|
|
|
|
2010-01-16 17:54:14 -08:00
|
|
|
# magic numbers stolen from /usr/share/file/magic/
|
2009-12-21 18:29:13 +00:00
|
|
|
case magic_bytes
|
|
|
|
when /^PK\003\004/ # .zip archive
|
2010-01-16 17:54:14 -08:00
|
|
|
quiet_safe_system '/usr/bin/unzip', {:quiet_flag => '-qq'}, @tarball_path
|
2009-12-21 18:29:13 +00:00
|
|
|
chdir
|
2009-12-27 14:33:12 +01:00
|
|
|
when /^\037\213/, /^BZh/, /^\037\235/ # gzip/bz2/compress compressed
|
2009-12-21 18:29:13 +00:00
|
|
|
# TODO check if it's really a tar archive
|
2010-01-16 17:54:14 -08:00
|
|
|
safe_system '/usr/bin/tar', 'xf', @tarball_path
|
2009-12-21 18:29:13 +00:00
|
|
|
chdir
|
2010-03-25 23:59:35 -07:00
|
|
|
when 'Rar!'
|
|
|
|
quiet_safe_system 'unrar', 'x', {:quiet_flag => '-inul'}, @tarball_path
|
2009-12-21 18:29:13 +00:00
|
|
|
else
|
|
|
|
# we are assuming it is not an archive, use original filename
|
|
|
|
# this behaviour is due to ScriptFileFormula expectations
|
|
|
|
# So I guess we should cp, but we mv, for this historic reason
|
|
|
|
# HOWEVER if this breaks some expectation you had we *will* change the
|
|
|
|
# behaviour, just open an issue at github
|
|
|
|
# We also do this for jar files, as they are in fact zip files, but
|
|
|
|
# we don't want to unzip them
|
2010-01-16 17:54:14 -08:00
|
|
|
FileUtils.mv @tarball_path, File.basename(@url)
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
end
|
2009-12-07 10:12:38 -08:00
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
private
|
|
|
|
def chdir
|
|
|
|
entries=Dir['*']
|
|
|
|
case entries.length
|
|
|
|
when 0 then raise "Empty archive"
|
|
|
|
when 1 then Dir.chdir entries.first rescue nil
|
|
|
|
end
|
|
|
|
end
|
2009-12-07 10:12:38 -08:00
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def ext
|
|
|
|
# GitHub uses odd URLs for zip files, so check for those
|
|
|
|
rx=%r[http://(www\.)?github\.com/.*/(zip|tar)ball/]
|
|
|
|
if rx.match @url
|
|
|
|
if $2 == 'zip'
|
|
|
|
'.zip'
|
|
|
|
else
|
|
|
|
'.tgz'
|
|
|
|
end
|
|
|
|
else
|
|
|
|
Pathname.new(@url).extname
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2009-12-01 12:01:05 -08:00
|
|
|
# Use this strategy to download but not unzip a file.
|
|
|
|
# Useful for installing jars.
|
|
|
|
class NoUnzipCurlDownloadStrategy <CurlDownloadStrategy
|
|
|
|
def stage
|
2010-01-16 17:54:14 -08:00
|
|
|
FileUtils.mv @tarball_path, File.basename(@url)
|
2009-12-01 12:01:05 -08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
class SubversionDownloadStrategy <AbstractDownloadStrategy
|
2010-05-07 10:25:01 -07:00
|
|
|
def initialize url, name, version, specs
|
|
|
|
super
|
2010-03-01 11:35:27 -08:00
|
|
|
@co=HOMEBREW_CACHE+@unique_token
|
2010-05-07 10:25:01 -07:00
|
|
|
end
|
|
|
|
|
|
|
|
def cached_location
|
|
|
|
@co
|
|
|
|
end
|
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def fetch
|
|
|
|
ohai "Checking out #{@url}"
|
2010-02-03 23:58:53 +00:00
|
|
|
if @spec == :revision
|
2010-03-01 11:35:27 -08:00
|
|
|
fetch_repo @co, @url, @ref
|
2010-02-03 23:58:53 +00:00
|
|
|
elsif @spec == :revisions
|
2010-03-01 11:35:27 -08:00
|
|
|
# nil is OK for main_revision, as fetch_repo will then get latest
|
|
|
|
main_revision = @ref.delete :trunk
|
|
|
|
fetch_repo @co, @url, main_revision, true
|
|
|
|
|
|
|
|
get_externals do |external_name, external_url|
|
|
|
|
fetch_repo @co+external_name, external_url, @ref[external_name], true
|
2010-02-03 23:58:53 +00:00
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
else
|
2010-03-01 11:35:27 -08:00
|
|
|
fetch_repo @co, @url
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
end
|
2009-12-07 10:12:38 -08:00
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def stage
|
2010-03-01 11:35:27 -08:00
|
|
|
quiet_safe_system svn, 'export', '--force', @co, Dir.pwd
|
|
|
|
end
|
|
|
|
|
|
|
|
def shell_quote str
|
|
|
|
# Oh god escaping shell args.
|
|
|
|
# See http://notetoself.vrensk.com/2008/08/escaping-single-quotes-in-ruby-harder-than-expected/
|
|
|
|
str.gsub(/\\|'/) { |c| "\\#{c}" }
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_externals
|
|
|
|
`'#{shell_quote(svn)}' propget svn:externals '#{shell_quote(@url)}'`.chomp.each_line do |line|
|
|
|
|
name, url = line.split /\s+/
|
|
|
|
yield name, url
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def fetch_repo target, url, revision=nil, ignore_externals=false
|
|
|
|
# Use "svn up" when the repository already exists locally.
|
|
|
|
# This saves on bandwidth and will have a similar effect to verifying the
|
|
|
|
# cache as it will make any changes to get the right revision.
|
|
|
|
svncommand = target.exist? ? 'up' : 'checkout'
|
|
|
|
args = [svn, svncommand, '--force', url, target]
|
|
|
|
args << '-r' << revision if revision
|
|
|
|
args << '--ignore-externals' if ignore_externals
|
|
|
|
quiet_safe_system *args
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2009-12-04 17:44:18 +00:00
|
|
|
|
2009-12-07 10:12:38 -08:00
|
|
|
# Override this method in a DownloadStrategy to force the use of a non-
|
2010-03-01 11:35:27 -08:00
|
|
|
# system svn binary. mplayer.rb uses this to require a svn new enough to
|
|
|
|
# understand its externals.
|
2009-12-04 17:44:18 +00:00
|
|
|
def svn
|
|
|
|
'/usr/bin/svn'
|
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
class GitDownloadStrategy <AbstractDownloadStrategy
|
2010-05-05 21:16:32 -07:00
|
|
|
def initialize url, name, version, specs
|
|
|
|
super
|
|
|
|
@clone=HOMEBREW_CACHE+@unique_token
|
|
|
|
end
|
|
|
|
|
|
|
|
def cached_location
|
|
|
|
@clone
|
|
|
|
end
|
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def fetch
|
|
|
|
ohai "Cloning #{@url}"
|
|
|
|
unless @clone.exist?
|
2009-12-19 16:22:21 +00:00
|
|
|
safe_system 'git', 'clone', @url, @clone # indeed, leave it verbose
|
2009-08-21 20:30:13 +01:00
|
|
|
else
|
2009-12-17 16:01:45 +00:00
|
|
|
puts "Updating #{@clone}"
|
|
|
|
Dir.chdir(@clone) { quiet_safe_system 'git', 'fetch', @url }
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
end
|
2009-12-07 10:12:38 -08:00
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def stage
|
2009-10-17 14:35:24 +02:00
|
|
|
dst = Dir.getwd
|
2009-08-21 20:30:13 +01:00
|
|
|
Dir.chdir @clone do
|
2009-10-17 14:35:24 +02:00
|
|
|
if @spec and @ref
|
|
|
|
ohai "Checking out #{@spec} #{@ref}"
|
|
|
|
case @spec
|
|
|
|
when :branch
|
2009-12-15 12:47:12 +00:00
|
|
|
nostdout { quiet_safe_system 'git', 'checkout', "origin/#{@ref}" }
|
2009-10-17 14:35:24 +02:00
|
|
|
when :tag
|
2009-12-15 12:47:12 +00:00
|
|
|
nostdout { quiet_safe_system 'git', 'checkout', @ref }
|
2009-10-17 14:35:24 +02:00
|
|
|
end
|
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
# http://stackoverflow.com/questions/160608/how-to-do-a-git-export-like-svn-export
|
2010-02-15 20:16:11 -08:00
|
|
|
safe_system 'git', 'checkout-index', '-a', '-f', "--prefix=#{dst}/"
|
2010-04-07 14:54:30 -07:00
|
|
|
# check for submodules
|
|
|
|
if File.exist?('.gitmodules')
|
|
|
|
safe_system 'git', 'submodule', 'init'
|
|
|
|
safe_system 'git', 'submodule', 'update'
|
2010-04-08 13:25:55 -07:00
|
|
|
sub_cmd = "git checkout-index -a -f \"--prefix=#{dst}/$path/\""
|
2010-04-07 14:54:30 -07:00
|
|
|
safe_system 'git', 'submodule', '--quiet', 'foreach', '--recursive', sub_cmd
|
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2009-09-27 19:30:39 -04:00
|
|
|
|
|
|
|
class CVSDownloadStrategy <AbstractDownloadStrategy
|
2010-06-06 15:38:46 -07:00
|
|
|
def initialize url, name, version, specs
|
|
|
|
super
|
|
|
|
@co=HOMEBREW_CACHE+@unique_token
|
|
|
|
end
|
|
|
|
|
|
|
|
def cached_location; @co; end
|
|
|
|
|
2009-09-27 19:30:39 -04:00
|
|
|
def fetch
|
|
|
|
ohai "Checking out #{@url}"
|
|
|
|
|
|
|
|
# URL of cvs cvs://:pserver:anoncvs@www.gccxml.org:/cvsroot/GCC_XML:gccxml
|
|
|
|
# will become:
|
|
|
|
# cvs -d :pserver:anoncvs@www.gccxml.org:/cvsroot/GCC_XML login
|
|
|
|
# cvs -d :pserver:anoncvs@www.gccxml.org:/cvsroot/GCC_XML co gccxml
|
|
|
|
mod, url = split_url(@url)
|
|
|
|
|
|
|
|
unless @co.exist?
|
|
|
|
Dir.chdir HOMEBREW_CACHE do
|
|
|
|
safe_system '/usr/bin/cvs', '-d', url, 'login'
|
|
|
|
safe_system '/usr/bin/cvs', '-d', url, 'checkout', '-d', @unique_token, mod
|
|
|
|
end
|
|
|
|
else
|
2010-06-06 15:38:46 -07:00
|
|
|
puts "Updating #{@co}"
|
|
|
|
Dir.chdir(@co) { safe_system '/usr/bin/cvs', 'up' }
|
2009-09-27 19:30:39 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def stage
|
2010-06-06 15:38:46 -07:00
|
|
|
FileUtils.cp_r Dir[@co+"*"], Dir.pwd
|
2009-09-27 19:30:39 -04:00
|
|
|
|
|
|
|
require 'find'
|
|
|
|
Find.find(Dir.pwd) do |path|
|
|
|
|
if FileTest.directory?(path) && File.basename(path) == "CVS"
|
|
|
|
Find.prune
|
|
|
|
FileUtil.rm_r path, :force => true
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
def split_url(in_url)
|
|
|
|
parts=in_url.sub(%r[^cvs://], '').split(/:/)
|
|
|
|
mod=parts.pop
|
|
|
|
url=parts.join(':')
|
|
|
|
[ mod, url ]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
class MercurialDownloadStrategy <AbstractDownloadStrategy
|
2010-06-07 14:15:32 -07:00
|
|
|
def initialize url, name, version, specs
|
|
|
|
super
|
|
|
|
@clone=HOMEBREW_CACHE+@unique_token
|
|
|
|
end
|
|
|
|
|
|
|
|
def cached_location; @clone; end
|
|
|
|
|
2009-09-27 19:30:39 -04:00
|
|
|
def fetch
|
2009-12-02 13:21:34 +00:00
|
|
|
raise "You must install mercurial, there are two options:\n\n"+
|
|
|
|
" brew install pip && pip install mercurial\n"+
|
|
|
|
" easy_install mercurial\n\n"+
|
|
|
|
"Homebrew recommends pip over the OS X provided easy_install." \
|
|
|
|
unless system "/usr/bin/which hg"
|
|
|
|
|
2009-09-27 19:30:39 -04:00
|
|
|
ohai "Cloning #{@url}"
|
|
|
|
|
|
|
|
unless @clone.exist?
|
2010-06-07 14:15:32 -07:00
|
|
|
url=@url.sub(%r[^hg://], '')
|
2009-12-19 14:10:18 +00:00
|
|
|
safe_system 'hg', 'clone', url, @clone
|
2009-09-27 19:30:39 -04:00
|
|
|
else
|
2009-12-17 16:01:45 +00:00
|
|
|
puts "Updating #{@clone}"
|
2010-06-10 21:22:40 +02:00
|
|
|
Dir.chdir(@clone) { safe_system 'hg', 'pull', '-u' }
|
2009-09-27 19:30:39 -04:00
|
|
|
end
|
|
|
|
end
|
2009-12-07 10:12:38 -08:00
|
|
|
|
2009-09-27 19:30:39 -04:00
|
|
|
def stage
|
|
|
|
dst=Dir.getwd
|
|
|
|
Dir.chdir @clone do
|
2009-10-17 14:35:24 +02:00
|
|
|
if @spec and @ref
|
|
|
|
ohai "Checking out #{@spec} #{@ref}"
|
|
|
|
Dir.chdir @clone do
|
|
|
|
safe_system 'hg', 'archive', '-y', '-r', @ref, '-t', 'files', dst
|
|
|
|
end
|
|
|
|
else
|
|
|
|
safe_system 'hg', 'archive', '-y', '-t', 'files', dst
|
|
|
|
end
|
2009-09-27 19:30:39 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2010-02-02 13:43:44 +01:00
|
|
|
|
|
|
|
class BazaarDownloadStrategy <AbstractDownloadStrategy
|
2010-06-07 14:15:32 -07:00
|
|
|
def initialize url, name, version, specs
|
|
|
|
super
|
|
|
|
@clone=HOMEBREW_CACHE+@unique_token
|
|
|
|
end
|
|
|
|
|
|
|
|
def cached_location; @clone; end
|
|
|
|
|
2010-02-02 13:43:44 +01:00
|
|
|
def fetch
|
|
|
|
raise "You must install bazaar first" \
|
|
|
|
unless system "/usr/bin/which bzr"
|
|
|
|
|
|
|
|
ohai "Cloning #{@url}"
|
|
|
|
unless @clone.exist?
|
2010-06-07 14:15:32 -07:00
|
|
|
url=@url.sub(%r[^bzr://], '')
|
2010-02-02 13:43:44 +01:00
|
|
|
# 'lightweight' means history-less
|
|
|
|
safe_system 'bzr', 'checkout', '--lightweight', url, @clone
|
|
|
|
else
|
|
|
|
puts "Updating #{@clone}"
|
|
|
|
Dir.chdir(@clone) { safe_system 'bzr', 'update' }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def stage
|
|
|
|
dst=Dir.getwd
|
|
|
|
Dir.chdir @clone do
|
|
|
|
if @spec and @ref
|
|
|
|
ohai "Checking out #{@spec} #{@ref}"
|
|
|
|
Dir.chdir @clone do
|
|
|
|
safe_system 'bzr', 'export', '-r', @ref, dst
|
|
|
|
end
|
|
|
|
else
|
|
|
|
safe_system 'bzr', 'export', dst
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2010-03-22 21:19:20 -07:00
|
|
|
|
|
|
|
def detect_download_strategy url
|
|
|
|
case url
|
|
|
|
when %r[^cvs://] then CVSDownloadStrategy
|
|
|
|
when %r[^hg://] then MercurialDownloadStrategy
|
|
|
|
when %r[^svn://] then SubversionDownloadStrategy
|
|
|
|
when %r[^svn+http://] then SubversionDownloadStrategy
|
|
|
|
when %r[^git://] then GitDownloadStrategy
|
|
|
|
when %r[^bzr://] then BazaarDownloadStrategy
|
|
|
|
when %r[^https?://(.+?\.)?googlecode\.com/hg] then MercurialDownloadStrategy
|
|
|
|
when %r[^https?://(.+?\.)?googlecode\.com/svn] then SubversionDownloadStrategy
|
|
|
|
when %r[^https?://(.+?\.)?sourceforge\.net/svnroot/] then SubversionDownloadStrategy
|
|
|
|
when %r[^http://svn.apache.org/repos/] then SubversionDownloadStrategy
|
|
|
|
else CurlDownloadStrategy
|
|
|
|
end
|
|
|
|
end
|