2009-08-21 20:30:13 +01:00
|
|
|
class AbstractDownloadStrategy
|
2009-10-17 14:35:24 +02:00
|
|
|
def initialize url, name, version, specs
|
2009-08-21 20:30:13 +01:00
|
|
|
@url=url
|
2009-10-19 03:01:03 +01:00
|
|
|
case specs when Hash
|
2009-10-17 14:35:24 +02:00
|
|
|
@spec = specs.keys.first # only use first spec
|
|
|
|
@ref = specs.values.first
|
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2009-12-15 12:47:12 +00:00
|
|
|
|
|
|
|
def expand_safe_system_args args
|
|
|
|
args.each_with_index do |arg, ii|
|
|
|
|
if arg.is_a? Hash
|
|
|
|
unless ARGV.verbose?
|
|
|
|
args[ii] = arg[:quiet_flag]
|
|
|
|
else
|
|
|
|
args.delete_at ii
|
|
|
|
end
|
|
|
|
return args
|
|
|
|
end
|
|
|
|
end
|
|
|
|
# 2 as default because commands are eg. svn up, git pull
|
|
|
|
args.insert(2, '-q') unless ARGV.verbose?
|
|
|
|
return args
|
|
|
|
end
|
|
|
|
|
|
|
|
def quiet_safe_system *args
|
2010-10-25 21:12:31 -07:00
|
|
|
safe_system(*expand_safe_system_args(args))
|
2009-12-15 12:47:12 +00:00
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
|
2011-03-28 19:53:43 -07:00
|
|
|
class CurlDownloadStrategy < AbstractDownloadStrategy
|
2010-02-19 21:55:17 -08:00
|
|
|
attr_reader :tarball_path
|
2010-07-06 08:29:02 -07:00
|
|
|
|
2010-01-16 17:54:14 -08:00
|
|
|
def initialize url, name, version, specs
|
|
|
|
super
|
2010-07-06 08:29:02 -07:00
|
|
|
@unique_token="#{name}-#{version}" unless name.to_s.empty? or name == '__UNKNOWN__'
|
2009-08-30 16:11:44 +01:00
|
|
|
if @unique_token
|
2010-01-16 17:54:14 -08:00
|
|
|
@tarball_path=HOMEBREW_CACHE+(@unique_token+ext)
|
2009-08-30 16:11:44 +01:00
|
|
|
else
|
2010-01-16 17:54:14 -08:00
|
|
|
@tarball_path=HOMEBREW_CACHE+File.basename(@url)
|
2009-08-30 16:11:44 +01:00
|
|
|
end
|
2010-01-16 17:54:14 -08:00
|
|
|
end
|
2010-07-06 08:29:02 -07:00
|
|
|
|
2010-05-05 21:16:32 -07:00
|
|
|
def cached_location
|
|
|
|
@tarball_path
|
|
|
|
end
|
|
|
|
|
2010-06-25 19:13:20 -07:00
|
|
|
# Private method, can be overridden if needed.
|
|
|
|
def _fetch
|
|
|
|
curl @url, '-o', @tarball_path
|
|
|
|
end
|
|
|
|
|
2010-01-16 17:54:14 -08:00
|
|
|
def fetch
|
|
|
|
ohai "Downloading #{@url}"
|
|
|
|
unless @tarball_path.exist?
|
2009-09-05 20:47:15 +01:00
|
|
|
begin
|
2010-06-25 19:13:20 -07:00
|
|
|
_fetch
|
2011-09-19 23:29:07 +01:00
|
|
|
rescue Exception => e
|
2010-01-16 17:54:14 -08:00
|
|
|
ignore_interrupts { @tarball_path.unlink if @tarball_path.exist? }
|
2011-09-19 23:29:07 +01:00
|
|
|
if e.kind_of? ErrorDuringExecution
|
|
|
|
raise CurlDownloadStrategyError, "Download failed: #{@url}"
|
|
|
|
else
|
|
|
|
raise
|
|
|
|
end
|
2009-09-05 20:47:15 +01:00
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
else
|
2010-11-24 09:40:37 +00:00
|
|
|
puts "File already downloaded in #{File.dirname(@tarball_path)}"
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2010-01-16 17:54:14 -08:00
|
|
|
return @tarball_path # thus performs checksum verification
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2009-12-07 10:12:38 -08:00
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def stage
|
2010-01-16 17:54:14 -08:00
|
|
|
if @tarball_path.extname == '.jar'
|
2009-12-21 18:29:13 +00:00
|
|
|
magic_bytes = nil
|
2010-06-28 21:28:01 -07:00
|
|
|
elsif @tarball_path.extname == '.pkg'
|
|
|
|
# Use more than 4 characters to not clash with magicbytes
|
|
|
|
magic_bytes = "____pkg"
|
2009-12-21 18:29:13 +00:00
|
|
|
else
|
2009-09-23 16:22:16 -06:00
|
|
|
# get the first four bytes
|
2010-01-16 17:54:14 -08:00
|
|
|
File.open(@tarball_path) { |f| magic_bytes = f.read(4) }
|
2009-12-21 18:29:13 +00:00
|
|
|
end
|
|
|
|
|
2010-01-16 17:54:14 -08:00
|
|
|
# magic numbers stolen from /usr/share/file/magic/
|
2009-12-21 18:29:13 +00:00
|
|
|
case magic_bytes
|
|
|
|
when /^PK\003\004/ # .zip archive
|
2010-01-16 17:54:14 -08:00
|
|
|
quiet_safe_system '/usr/bin/unzip', {:quiet_flag => '-qq'}, @tarball_path
|
2009-12-21 18:29:13 +00:00
|
|
|
chdir
|
2009-12-27 14:33:12 +01:00
|
|
|
when /^\037\213/, /^BZh/, /^\037\235/ # gzip/bz2/compress compressed
|
2009-12-21 18:29:13 +00:00
|
|
|
# TODO check if it's really a tar archive
|
2010-01-16 17:54:14 -08:00
|
|
|
safe_system '/usr/bin/tar', 'xf', @tarball_path
|
2009-12-21 18:29:13 +00:00
|
|
|
chdir
|
2010-06-28 21:28:01 -07:00
|
|
|
when '____pkg'
|
|
|
|
safe_system '/usr/sbin/pkgutil', '--expand', @tarball_path, File.basename(@url)
|
|
|
|
chdir
|
2010-03-25 23:59:35 -07:00
|
|
|
when 'Rar!'
|
|
|
|
quiet_safe_system 'unrar', 'x', {:quiet_flag => '-inul'}, @tarball_path
|
2009-12-21 18:29:13 +00:00
|
|
|
else
|
|
|
|
# we are assuming it is not an archive, use original filename
|
|
|
|
# this behaviour is due to ScriptFileFormula expectations
|
|
|
|
# So I guess we should cp, but we mv, for this historic reason
|
|
|
|
# HOWEVER if this breaks some expectation you had we *will* change the
|
|
|
|
# behaviour, just open an issue at github
|
|
|
|
# We also do this for jar files, as they are in fact zip files, but
|
|
|
|
# we don't want to unzip them
|
2010-01-16 17:54:14 -08:00
|
|
|
FileUtils.mv @tarball_path, File.basename(@url)
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
end
|
2009-12-07 10:12:38 -08:00
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
private
|
|
|
|
def chdir
|
|
|
|
entries=Dir['*']
|
|
|
|
case entries.length
|
|
|
|
when 0 then raise "Empty archive"
|
|
|
|
when 1 then Dir.chdir entries.first rescue nil
|
|
|
|
end
|
|
|
|
end
|
2009-12-07 10:12:38 -08:00
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def ext
|
|
|
|
# GitHub uses odd URLs for zip files, so check for those
|
2011-07-29 11:40:53 +01:00
|
|
|
rx=%r[https?://(www\.)?github\.com/.*/(zip|tar)ball/]
|
2009-08-21 20:30:13 +01:00
|
|
|
if rx.match @url
|
|
|
|
if $2 == 'zip'
|
|
|
|
'.zip'
|
|
|
|
else
|
|
|
|
'.tgz'
|
|
|
|
end
|
|
|
|
else
|
|
|
|
Pathname.new(@url).extname
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2011-07-20 15:37:33 +02:00
|
|
|
# Detect and download from Apache Mirror
|
|
|
|
class CurlApacheMirrorDownloadStrategy < CurlDownloadStrategy
|
|
|
|
def _fetch
|
|
|
|
# Fetch mirror list site
|
|
|
|
require 'open-uri'
|
|
|
|
mirror_list = open(@url).read()
|
|
|
|
|
|
|
|
# Parse out suggested mirror
|
|
|
|
# Yep, this is ghetto, grep the first <strong></strong> element content
|
|
|
|
mirror_url = mirror_list[/<strong>([^<]+)/, 1]
|
|
|
|
|
2011-07-27 10:56:59 +01:00
|
|
|
raise "Couldn't determine mirror. Try again later." if mirror_url.nil?
|
|
|
|
|
|
|
|
ohai "Best Mirror #{mirror_url}"
|
2011-07-20 15:37:33 +02:00
|
|
|
# Start download from that mirror
|
|
|
|
curl mirror_url, '-o', @tarball_path
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2010-06-25 19:13:20 -07:00
|
|
|
# Download via an HTTP POST.
|
2010-06-30 10:21:56 -07:00
|
|
|
# Query parameters on the URL are converted into POST parameters
|
2011-03-28 19:53:43 -07:00
|
|
|
class CurlPostDownloadStrategy < CurlDownloadStrategy
|
2010-06-25 19:13:20 -07:00
|
|
|
def _fetch
|
|
|
|
base_url,data = @url.split('?')
|
|
|
|
curl base_url, '-d', data, '-o', @tarball_path
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2009-12-01 12:01:05 -08:00
|
|
|
# Use this strategy to download but not unzip a file.
|
|
|
|
# Useful for installing jars.
|
2011-03-28 19:53:43 -07:00
|
|
|
class NoUnzipCurlDownloadStrategy < CurlDownloadStrategy
|
2009-12-01 12:01:05 -08:00
|
|
|
def stage
|
2010-06-28 13:14:16 -07:00
|
|
|
FileUtils.cp @tarball_path, File.basename(@url)
|
2009-12-01 12:01:05 -08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2011-05-19 07:37:24 -07:00
|
|
|
# Normal strategy tries to untar as well
|
|
|
|
class GzipOnlyDownloadStrategy < CurlDownloadStrategy
|
|
|
|
def stage
|
|
|
|
FileUtils.mv @tarball_path, File.basename(@url)
|
|
|
|
safe_system '/usr/bin/gunzip', '-f', File.basename(@url)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2010-07-10 11:59:43 -07:00
|
|
|
# This Download Strategy is provided for use with sites that
|
|
|
|
# only provide HTTPS and also have a broken cert.
|
2010-10-19 21:50:54 -07:00
|
|
|
# Try not to need this, as we probably won't accept the formula.
|
2011-03-28 19:53:43 -07:00
|
|
|
class CurlUnsafeDownloadStrategy < CurlDownloadStrategy
|
2010-07-10 11:59:43 -07:00
|
|
|
def _fetch
|
|
|
|
curl @url, '--insecure', '-o', @tarball_path
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2010-11-24 09:40:37 +00:00
|
|
|
# This strategy extracts our binary packages.
|
|
|
|
class CurlBottleDownloadStrategy <CurlDownloadStrategy
|
|
|
|
def initialize url, name, version, specs
|
|
|
|
super
|
2011-08-24 01:13:15 +01:00
|
|
|
@tarball_path = HOMEBREW_CACHE/"#{name}-#{version}.bottle#{ext}"
|
2010-11-24 09:40:37 +00:00
|
|
|
end
|
|
|
|
def stage
|
|
|
|
ohai "Pouring #{File.basename(@tarball_path)}"
|
|
|
|
super
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
class SubversionDownloadStrategy <AbstractDownloadStrategy
|
2010-05-07 10:25:01 -07:00
|
|
|
def initialize url, name, version, specs
|
|
|
|
super
|
2010-07-06 08:29:02 -07:00
|
|
|
@unique_token="#{name}--svn" unless name.to_s.empty? or name == '__UNKNOWN__'
|
2011-08-21 04:55:40 -07:00
|
|
|
@unique_token += "-HEAD" if ARGV.include? '--HEAD'
|
2010-03-01 11:35:27 -08:00
|
|
|
@co=HOMEBREW_CACHE+@unique_token
|
2010-05-07 10:25:01 -07:00
|
|
|
end
|
|
|
|
|
|
|
|
def cached_location
|
|
|
|
@co
|
|
|
|
end
|
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def fetch
|
2011-03-15 17:55:40 +01:00
|
|
|
@url.sub!(/^svn\+/, '') if @url =~ %r[^svn\+http://]
|
2009-08-21 20:30:13 +01:00
|
|
|
ohai "Checking out #{@url}"
|
2010-02-03 23:58:53 +00:00
|
|
|
if @spec == :revision
|
2010-03-01 11:35:27 -08:00
|
|
|
fetch_repo @co, @url, @ref
|
2010-02-03 23:58:53 +00:00
|
|
|
elsif @spec == :revisions
|
2010-03-01 11:35:27 -08:00
|
|
|
# nil is OK for main_revision, as fetch_repo will then get latest
|
|
|
|
main_revision = @ref.delete :trunk
|
|
|
|
fetch_repo @co, @url, main_revision, true
|
|
|
|
|
|
|
|
get_externals do |external_name, external_url|
|
|
|
|
fetch_repo @co+external_name, external_url, @ref[external_name], true
|
2010-02-03 23:58:53 +00:00
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
else
|
2010-03-01 11:35:27 -08:00
|
|
|
fetch_repo @co, @url
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
end
|
2009-12-07 10:12:38 -08:00
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def stage
|
2010-03-01 11:35:27 -08:00
|
|
|
quiet_safe_system svn, 'export', '--force', @co, Dir.pwd
|
|
|
|
end
|
|
|
|
|
|
|
|
def shell_quote str
|
|
|
|
# Oh god escaping shell args.
|
|
|
|
# See http://notetoself.vrensk.com/2008/08/escaping-single-quotes-in-ruby-harder-than-expected/
|
|
|
|
str.gsub(/\\|'/) { |c| "\\#{c}" }
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_externals
|
|
|
|
`'#{shell_quote(svn)}' propget svn:externals '#{shell_quote(@url)}'`.chomp.each_line do |line|
|
2010-10-25 21:12:41 -07:00
|
|
|
name, url = line.split(/\s+/)
|
2010-03-01 11:35:27 -08:00
|
|
|
yield name, url
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def fetch_repo target, url, revision=nil, ignore_externals=false
|
|
|
|
# Use "svn up" when the repository already exists locally.
|
|
|
|
# This saves on bandwidth and will have a similar effect to verifying the
|
|
|
|
# cache as it will make any changes to get the right revision.
|
|
|
|
svncommand = target.exist? ? 'up' : 'checkout'
|
2011-08-21 04:55:40 -07:00
|
|
|
args = [svn, svncommand, '--force']
|
|
|
|
args << url if !target.exist?
|
|
|
|
args << target
|
2010-03-01 11:35:27 -08:00
|
|
|
args << '-r' << revision if revision
|
|
|
|
args << '--ignore-externals' if ignore_externals
|
2010-10-25 21:12:31 -07:00
|
|
|
quiet_safe_system(*args)
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2009-12-04 17:44:18 +00:00
|
|
|
|
2010-06-28 14:55:31 -07:00
|
|
|
# Try HOMEBREW_SVN, a Homebrew-built svn, and finally the OS X system svn.
|
|
|
|
# Not all features are available in the 10.5 system-provided svn.
|
2009-12-04 17:44:18 +00:00
|
|
|
def svn
|
2010-06-28 14:55:31 -07:00
|
|
|
return ENV['HOMEBREW_SVN'] if ENV['HOMEBREW_SVN']
|
|
|
|
return "#{HOMEBREW_PREFIX}/bin/svn" if File.exist? "#{HOMEBREW_PREFIX}/bin/svn"
|
|
|
|
return '/usr/bin/svn'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Require a newer version of Subversion than 1.4.x (Leopard-provided version)
|
2011-03-28 19:53:43 -07:00
|
|
|
class StrictSubversionDownloadStrategy < SubversionDownloadStrategy
|
2010-06-28 14:55:31 -07:00
|
|
|
def svn
|
|
|
|
exe = super
|
|
|
|
`#{exe} --version` =~ /version (\d+\.\d+(\.\d+)*)/
|
|
|
|
svn_version = $1
|
|
|
|
version_tuple=svn_version.split(".").collect {|v|Integer(v)}
|
|
|
|
|
|
|
|
if version_tuple[0] == 1 and version_tuple[1] <= 4
|
|
|
|
onoe "Detected Subversion (#{exe}, version #{svn_version}) is too old."
|
|
|
|
puts "Subversion 1.4.x will not export externals correctly for this formula."
|
|
|
|
puts "You must either `brew install subversion` or set HOMEBREW_SVN to the path"
|
|
|
|
puts "of a newer svn binary."
|
|
|
|
end
|
|
|
|
return exe
|
2009-12-04 17:44:18 +00:00
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
|
2011-03-28 19:53:43 -07:00
|
|
|
class GitDownloadStrategy < AbstractDownloadStrategy
|
2010-05-05 21:16:32 -07:00
|
|
|
def initialize url, name, version, specs
|
|
|
|
super
|
2010-07-06 08:29:02 -07:00
|
|
|
@unique_token="#{name}--git" unless name.to_s.empty? or name == '__UNKNOWN__'
|
2010-05-05 21:16:32 -07:00
|
|
|
@clone=HOMEBREW_CACHE+@unique_token
|
|
|
|
end
|
|
|
|
|
|
|
|
def cached_location
|
|
|
|
@clone
|
|
|
|
end
|
|
|
|
|
2011-03-19 14:30:35 -07:00
|
|
|
def support_depth?
|
2011-08-19 11:08:46 -07:00
|
|
|
!commit_history_required? and depth_supported_host?
|
|
|
|
end
|
|
|
|
|
|
|
|
def commit_history_required?
|
|
|
|
@spec == :sha
|
|
|
|
end
|
|
|
|
|
|
|
|
def depth_supported_host?
|
2011-03-19 14:30:35 -07:00
|
|
|
@url =~ %r(git://) or @url =~ %r(https://github.com/)
|
|
|
|
end
|
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def fetch
|
2011-07-27 12:51:16 +01:00
|
|
|
raise "You must install Git: brew install git" unless system "/usr/bin/which -s git"
|
2010-07-09 07:13:19 -07:00
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
ohai "Cloning #{@url}"
|
2010-09-22 21:38:52 -07:00
|
|
|
|
|
|
|
if @clone.exist?
|
|
|
|
Dir.chdir(@clone) do
|
|
|
|
# Check for interupted clone from a previous install
|
|
|
|
unless system 'git', 'status', '-s'
|
2011-08-26 15:06:24 +01:00
|
|
|
puts "Removing invalid .git repo from cache"
|
2010-09-22 21:38:52 -07:00
|
|
|
FileUtils.rm_rf @clone
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
unless @clone.exist?
|
2011-03-19 14:30:35 -07:00
|
|
|
# Note: first-time checkouts are always done verbosely
|
|
|
|
git_args = %w(git clone)
|
|
|
|
git_args << "--depth" << "1" if support_depth?
|
|
|
|
git_args << @url << @clone
|
|
|
|
safe_system *git_args
|
2009-08-21 20:30:13 +01:00
|
|
|
else
|
2009-12-17 16:01:45 +00:00
|
|
|
puts "Updating #{@clone}"
|
2010-09-13 15:00:54 -07:00
|
|
|
Dir.chdir(@clone) do
|
2011-07-27 12:51:16 +01:00
|
|
|
safe_system 'git', 'remote', 'set-url', 'origin', @url
|
|
|
|
quiet_safe_system 'git', 'fetch', 'origin'
|
2010-09-13 15:00:54 -07:00
|
|
|
quiet_safe_system 'git', 'fetch', '--tags' if @spec == :tag
|
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
end
|
2009-12-07 10:12:38 -08:00
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def stage
|
2009-10-17 14:35:24 +02:00
|
|
|
dst = Dir.getwd
|
2009-08-21 20:30:13 +01:00
|
|
|
Dir.chdir @clone do
|
2009-10-17 14:35:24 +02:00
|
|
|
if @spec and @ref
|
|
|
|
ohai "Checking out #{@spec} #{@ref}"
|
|
|
|
case @spec
|
|
|
|
when :branch
|
2009-12-15 12:47:12 +00:00
|
|
|
nostdout { quiet_safe_system 'git', 'checkout', "origin/#{@ref}" }
|
2009-10-17 14:35:24 +02:00
|
|
|
when :tag
|
2009-12-15 12:47:12 +00:00
|
|
|
nostdout { quiet_safe_system 'git', 'checkout', @ref }
|
2009-10-17 14:35:24 +02:00
|
|
|
end
|
2011-08-26 15:06:24 +01:00
|
|
|
else
|
|
|
|
# otherwise the checkout-index won't checkout HEAD
|
|
|
|
# https://github.com/mxcl/homebrew/issues/7124
|
2011-09-13 22:19:50 -05:00
|
|
|
# must specify origin/master, otherwise it resets to the current local HEAD
|
|
|
|
quiet_safe_system "git", "reset", "--hard", "origin/master"
|
2009-10-17 14:35:24 +02:00
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
# http://stackoverflow.com/questions/160608/how-to-do-a-git-export-like-svn-export
|
2010-02-15 20:16:11 -08:00
|
|
|
safe_system 'git', 'checkout-index', '-a', '-f', "--prefix=#{dst}/"
|
2010-04-07 14:54:30 -07:00
|
|
|
# check for submodules
|
|
|
|
if File.exist?('.gitmodules')
|
|
|
|
safe_system 'git', 'submodule', 'init'
|
|
|
|
safe_system 'git', 'submodule', 'update'
|
2010-04-08 13:25:55 -07:00
|
|
|
sub_cmd = "git checkout-index -a -f \"--prefix=#{dst}/$path/\""
|
2010-04-07 14:54:30 -07:00
|
|
|
safe_system 'git', 'submodule', '--quiet', 'foreach', '--recursive', sub_cmd
|
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2009-09-27 19:30:39 -04:00
|
|
|
|
2011-03-28 19:53:43 -07:00
|
|
|
class CVSDownloadStrategy < AbstractDownloadStrategy
|
2010-06-06 15:38:46 -07:00
|
|
|
def initialize url, name, version, specs
|
|
|
|
super
|
2010-07-06 08:29:02 -07:00
|
|
|
@unique_token="#{name}--cvs" unless name.to_s.empty? or name == '__UNKNOWN__'
|
2010-06-06 15:38:46 -07:00
|
|
|
@co=HOMEBREW_CACHE+@unique_token
|
|
|
|
end
|
|
|
|
|
|
|
|
def cached_location; @co; end
|
|
|
|
|
2009-09-27 19:30:39 -04:00
|
|
|
def fetch
|
|
|
|
ohai "Checking out #{@url}"
|
|
|
|
|
|
|
|
# URL of cvs cvs://:pserver:anoncvs@www.gccxml.org:/cvsroot/GCC_XML:gccxml
|
|
|
|
# will become:
|
|
|
|
# cvs -d :pserver:anoncvs@www.gccxml.org:/cvsroot/GCC_XML login
|
|
|
|
# cvs -d :pserver:anoncvs@www.gccxml.org:/cvsroot/GCC_XML co gccxml
|
|
|
|
mod, url = split_url(@url)
|
|
|
|
|
|
|
|
unless @co.exist?
|
|
|
|
Dir.chdir HOMEBREW_CACHE do
|
|
|
|
safe_system '/usr/bin/cvs', '-d', url, 'login'
|
|
|
|
safe_system '/usr/bin/cvs', '-d', url, 'checkout', '-d', @unique_token, mod
|
|
|
|
end
|
|
|
|
else
|
2010-06-06 15:38:46 -07:00
|
|
|
puts "Updating #{@co}"
|
|
|
|
Dir.chdir(@co) { safe_system '/usr/bin/cvs', 'up' }
|
2009-09-27 19:30:39 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def stage
|
2010-06-06 15:38:46 -07:00
|
|
|
FileUtils.cp_r Dir[@co+"*"], Dir.pwd
|
2009-09-27 19:30:39 -04:00
|
|
|
|
|
|
|
require 'find'
|
|
|
|
Find.find(Dir.pwd) do |path|
|
|
|
|
if FileTest.directory?(path) && File.basename(path) == "CVS"
|
|
|
|
Find.prune
|
|
|
|
FileUtil.rm_r path, :force => true
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
def split_url(in_url)
|
|
|
|
parts=in_url.sub(%r[^cvs://], '').split(/:/)
|
|
|
|
mod=parts.pop
|
|
|
|
url=parts.join(':')
|
|
|
|
[ mod, url ]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2011-03-28 19:53:43 -07:00
|
|
|
class MercurialDownloadStrategy < AbstractDownloadStrategy
|
2010-06-07 14:15:32 -07:00
|
|
|
def initialize url, name, version, specs
|
|
|
|
super
|
2010-07-06 08:29:02 -07:00
|
|
|
@unique_token="#{name}--hg" unless name.to_s.empty? or name == '__UNKNOWN__'
|
2010-06-07 14:15:32 -07:00
|
|
|
@clone=HOMEBREW_CACHE+@unique_token
|
|
|
|
end
|
|
|
|
|
|
|
|
def cached_location; @clone; end
|
|
|
|
|
2009-09-27 19:30:39 -04:00
|
|
|
def fetch
|
2011-02-13 12:23:48 +00:00
|
|
|
raise "You must `easy_install mercurial'" unless system "/usr/bin/which hg"
|
2009-12-02 13:21:34 +00:00
|
|
|
|
2009-09-27 19:30:39 -04:00
|
|
|
ohai "Cloning #{@url}"
|
|
|
|
|
|
|
|
unless @clone.exist?
|
2010-06-07 14:15:32 -07:00
|
|
|
url=@url.sub(%r[^hg://], '')
|
2009-12-19 14:10:18 +00:00
|
|
|
safe_system 'hg', 'clone', url, @clone
|
2009-09-27 19:30:39 -04:00
|
|
|
else
|
2009-12-17 16:01:45 +00:00
|
|
|
puts "Updating #{@clone}"
|
2010-06-10 21:22:40 +02:00
|
|
|
Dir.chdir(@clone) do
|
|
|
|
safe_system 'hg', 'pull'
|
|
|
|
safe_system 'hg', 'update'
|
|
|
|
end
|
2009-09-27 19:30:39 -04:00
|
|
|
end
|
|
|
|
end
|
2009-12-07 10:12:38 -08:00
|
|
|
|
2009-09-27 19:30:39 -04:00
|
|
|
def stage
|
|
|
|
dst=Dir.getwd
|
|
|
|
Dir.chdir @clone do
|
2009-10-17 14:35:24 +02:00
|
|
|
if @spec and @ref
|
|
|
|
ohai "Checking out #{@spec} #{@ref}"
|
|
|
|
Dir.chdir @clone do
|
|
|
|
safe_system 'hg', 'archive', '-y', '-r', @ref, '-t', 'files', dst
|
|
|
|
end
|
|
|
|
else
|
|
|
|
safe_system 'hg', 'archive', '-y', '-t', 'files', dst
|
|
|
|
end
|
2009-09-27 19:30:39 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2010-02-02 13:43:44 +01:00
|
|
|
|
2011-03-28 19:53:43 -07:00
|
|
|
class BazaarDownloadStrategy < AbstractDownloadStrategy
|
2010-06-07 14:15:32 -07:00
|
|
|
def initialize url, name, version, specs
|
|
|
|
super
|
2010-07-06 08:29:02 -07:00
|
|
|
@unique_token="#{name}--bzr" unless name.to_s.empty? or name == '__UNKNOWN__'
|
2010-06-07 14:15:32 -07:00
|
|
|
@clone=HOMEBREW_CACHE+@unique_token
|
|
|
|
end
|
|
|
|
|
|
|
|
def cached_location; @clone; end
|
|
|
|
|
2010-02-02 13:43:44 +01:00
|
|
|
def fetch
|
|
|
|
raise "You must install bazaar first" \
|
|
|
|
unless system "/usr/bin/which bzr"
|
|
|
|
|
|
|
|
ohai "Cloning #{@url}"
|
|
|
|
unless @clone.exist?
|
2010-06-07 14:15:32 -07:00
|
|
|
url=@url.sub(%r[^bzr://], '')
|
2010-02-02 13:43:44 +01:00
|
|
|
# 'lightweight' means history-less
|
|
|
|
safe_system 'bzr', 'checkout', '--lightweight', url, @clone
|
|
|
|
else
|
|
|
|
puts "Updating #{@clone}"
|
|
|
|
Dir.chdir(@clone) { safe_system 'bzr', 'update' }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def stage
|
|
|
|
dst=Dir.getwd
|
|
|
|
Dir.chdir @clone do
|
|
|
|
if @spec and @ref
|
|
|
|
ohai "Checking out #{@spec} #{@ref}"
|
|
|
|
Dir.chdir @clone do
|
|
|
|
safe_system 'bzr', 'export', '-r', @ref, dst
|
|
|
|
end
|
|
|
|
else
|
|
|
|
safe_system 'bzr', 'export', dst
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2010-03-22 21:19:20 -07:00
|
|
|
|
2010-09-29 20:22:34 +08:00
|
|
|
class FossilDownloadStrategy < AbstractDownloadStrategy
|
|
|
|
def initialize url, name, version, specs
|
|
|
|
super
|
|
|
|
@unique_token="#{name}--fossil" unless name.to_s.empty? or name == '__UNKNOWN__'
|
|
|
|
@clone=HOMEBREW_CACHE+@unique_token
|
|
|
|
end
|
|
|
|
|
|
|
|
def cached_location; @clone; end
|
|
|
|
|
|
|
|
def fetch
|
|
|
|
raise "You must install fossil first" \
|
|
|
|
unless system "/usr/bin/which fossil"
|
|
|
|
|
|
|
|
ohai "Cloning #{@url}"
|
|
|
|
unless @clone.exist?
|
|
|
|
url=@url.sub(%r[^fossil://], '')
|
|
|
|
safe_system 'fossil', 'clone', url, @clone
|
|
|
|
else
|
|
|
|
puts "Updating #{@clone}"
|
|
|
|
safe_system 'fossil', 'pull', '-R', @clone
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def stage
|
|
|
|
# TODO: The 'open' and 'checkout' commands are very noisy and have no '-q' option.
|
|
|
|
safe_system 'fossil', 'open', @clone
|
|
|
|
if @spec and @ref
|
|
|
|
ohai "Checking out #{@spec} #{@ref}"
|
|
|
|
safe_system 'fossil', 'checkout', @ref
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2010-03-22 21:19:20 -07:00
|
|
|
def detect_download_strategy url
|
|
|
|
case url
|
2010-04-10 09:43:17 -07:00
|
|
|
# We use a special URL pattern for cvs
|
2010-03-22 21:19:20 -07:00
|
|
|
when %r[^cvs://] then CVSDownloadStrategy
|
2010-04-10 09:43:17 -07:00
|
|
|
# Standard URLs
|
|
|
|
when %r[^bzr://] then BazaarDownloadStrategy
|
|
|
|
when %r[^git://] then GitDownloadStrategy
|
2010-03-22 21:19:20 -07:00
|
|
|
when %r[^hg://] then MercurialDownloadStrategy
|
|
|
|
when %r[^svn://] then SubversionDownloadStrategy
|
2011-03-15 09:41:23 -07:00
|
|
|
when %r[^svn\+http://] then SubversionDownloadStrategy
|
2010-09-29 20:22:34 +08:00
|
|
|
when %r[^fossil://] then FossilDownloadStrategy
|
2010-04-10 09:43:17 -07:00
|
|
|
# Some well-known source hosts
|
2011-04-04 22:52:11 +02:00
|
|
|
when %r[^https?://github\.com/.+\.git$] then GitDownloadStrategy
|
2010-03-22 21:19:20 -07:00
|
|
|
when %r[^https?://(.+?\.)?googlecode\.com/hg] then MercurialDownloadStrategy
|
|
|
|
when %r[^https?://(.+?\.)?googlecode\.com/svn] then SubversionDownloadStrategy
|
|
|
|
when %r[^https?://(.+?\.)?sourceforge\.net/svnroot/] then SubversionDownloadStrategy
|
|
|
|
when %r[^http://svn.apache.org/repos/] then SubversionDownloadStrategy
|
2011-07-20 15:37:33 +02:00
|
|
|
when %r[^http://www.apache.org/dyn/closer.cgi] then CurlApacheMirrorDownloadStrategy
|
2010-04-10 09:43:17 -07:00
|
|
|
# Otherwise just try to download
|
2010-03-22 21:19:20 -07:00
|
|
|
else CurlDownloadStrategy
|
|
|
|
end
|
|
|
|
end
|