2015-08-03 13:09:07 +01:00
|
|
|
require "utils/json"
|
2013-02-07 12:43:22 -06:00
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
class AbstractDownloadStrategy
|
2014-12-09 15:55:28 -05:00
|
|
|
include FileUtils
|
|
|
|
|
2014-12-10 00:53:57 -05:00
|
|
|
attr_reader :meta, :name, :version, :resource
|
2013-09-23 21:39:19 -05:00
|
|
|
|
2015-08-03 13:09:07 +01:00
|
|
|
def initialize(name, resource)
|
2013-09-23 21:39:19 -05:00
|
|
|
@name = name
|
|
|
|
@resource = resource
|
2014-12-09 19:59:16 -05:00
|
|
|
@url = resource.url
|
2014-12-10 00:53:57 -05:00
|
|
|
@version = resource.version
|
2014-12-09 19:59:16 -05:00
|
|
|
@meta = resource.specs
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2009-12-15 12:47:12 +00:00
|
|
|
|
2014-12-23 01:04:44 -05:00
|
|
|
# Download and cache the resource as {#cached_location}.
|
|
|
|
def fetch
|
|
|
|
end
|
|
|
|
|
|
|
|
# Unpack {#cached_location} into the current working directory.
|
|
|
|
def stage
|
|
|
|
end
|
|
|
|
|
2014-12-23 01:27:10 -05:00
|
|
|
# @!attribute [r] cached_location
|
2014-12-23 01:04:44 -05:00
|
|
|
# The path to the cached file or directory associated with the resource.
|
|
|
|
def cached_location
|
|
|
|
end
|
|
|
|
|
|
|
|
# Remove {#cached_location} and any other files associated with the resource
|
|
|
|
# from the cache.
|
|
|
|
def clear_cache
|
2014-12-23 01:04:44 -05:00
|
|
|
rm_rf(cached_location)
|
2014-12-23 01:04:44 -05:00
|
|
|
end
|
|
|
|
|
2015-08-03 13:09:07 +01:00
|
|
|
def expand_safe_system_args(args)
|
2013-04-07 00:49:56 -05:00
|
|
|
args = args.dup
|
2009-12-15 12:47:12 +00:00
|
|
|
args.each_with_index do |arg, ii|
|
|
|
|
if arg.is_a? Hash
|
|
|
|
unless ARGV.verbose?
|
|
|
|
args[ii] = arg[:quiet_flag]
|
|
|
|
else
|
|
|
|
args.delete_at ii
|
|
|
|
end
|
|
|
|
return args
|
|
|
|
end
|
|
|
|
end
|
|
|
|
# 2 as default because commands are eg. svn up, git pull
|
2015-08-03 13:09:07 +01:00
|
|
|
args.insert(2, "-q") unless ARGV.verbose?
|
2013-04-07 00:49:56 -05:00
|
|
|
args
|
2009-12-15 12:47:12 +00:00
|
|
|
end
|
|
|
|
|
2015-08-03 13:09:07 +01:00
|
|
|
def quiet_safe_system(*args)
|
2010-10-25 21:12:31 -07:00
|
|
|
safe_system(*expand_safe_system_args(args))
|
2009-12-15 12:47:12 +00:00
|
|
|
end
|
2013-05-26 09:15:47 -05:00
|
|
|
|
2014-12-09 16:46:28 -05:00
|
|
|
private
|
|
|
|
|
|
|
|
def xzpath
|
|
|
|
"#{HOMEBREW_PREFIX}/opt/xz/bin/xz"
|
|
|
|
end
|
|
|
|
|
|
|
|
def lzippath
|
|
|
|
"#{HOMEBREW_PREFIX}/opt/lzip/bin/lzip"
|
|
|
|
end
|
|
|
|
|
2015-04-26 11:11:33 -07:00
|
|
|
def lhapath
|
|
|
|
"#{HOMEBREW_PREFIX}/opt/lha/bin/lha"
|
|
|
|
end
|
|
|
|
|
2014-12-09 16:46:28 -05:00
|
|
|
def cvspath
|
|
|
|
@cvspath ||= %W[
|
|
|
|
/usr/bin/cvs
|
|
|
|
#{HOMEBREW_PREFIX}/bin/cvs
|
|
|
|
#{HOMEBREW_PREFIX}/opt/cvs/bin/cvs
|
|
|
|
#{which("cvs")}
|
2015-08-03 13:09:07 +01:00
|
|
|
].find { |p| File.executable? p }
|
2014-12-09 16:46:28 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def hgpath
|
|
|
|
@hgpath ||= %W[
|
|
|
|
#{which("hg")}
|
|
|
|
#{HOMEBREW_PREFIX}/bin/hg
|
|
|
|
#{HOMEBREW_PREFIX}/opt/mercurial/bin/hg
|
2015-08-03 13:09:07 +01:00
|
|
|
].find { |p| File.executable? p }
|
2014-12-09 16:46:28 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def bzrpath
|
|
|
|
@bzrpath ||= %W[
|
|
|
|
#{which("bzr")}
|
|
|
|
#{HOMEBREW_PREFIX}/bin/bzr
|
|
|
|
#{HOMEBREW_PREFIX}/opt/bazaar/bin/bzr
|
2015-08-03 13:09:07 +01:00
|
|
|
].find { |p| File.executable? p }
|
2014-12-09 16:46:28 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def fossilpath
|
|
|
|
@fossilpath ||= %W[
|
|
|
|
#{which("fossil")}
|
|
|
|
#{HOMEBREW_PREFIX}/bin/fossil
|
|
|
|
#{HOMEBREW_PREFIX}/opt/fossil/bin/fossil
|
2015-08-03 13:09:07 +01:00
|
|
|
].find { |p| File.executable? p }
|
2014-12-09 16:46:28 -05:00
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
|
2013-10-09 21:41:15 -05:00
|
|
|
class VCSDownloadStrategy < AbstractDownloadStrategy
|
2015-03-07 14:59:30 +00:00
|
|
|
REF_TYPES = [:tag, :branch, :revisions, :revision].freeze
|
2014-02-14 15:31:29 -05:00
|
|
|
|
2015-08-03 13:09:07 +01:00
|
|
|
def initialize(name, resource)
|
2013-10-09 21:41:15 -05:00
|
|
|
super
|
2014-12-09 19:59:16 -05:00
|
|
|
@ref_type, @ref = extract_ref(meta)
|
2015-03-07 14:59:30 +00:00
|
|
|
@revision = meta[:revision]
|
2014-07-16 23:19:58 -05:00
|
|
|
@clone = HOMEBREW_CACHE.join(cache_filename)
|
2013-10-09 21:41:15 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:16 -05:00
|
|
|
def fetch
|
|
|
|
ohai "Cloning #{@url}"
|
|
|
|
|
|
|
|
if cached_location.exist? && repo_valid?
|
|
|
|
puts "Updating #{cached_location}"
|
|
|
|
update
|
|
|
|
elsif cached_location.exist?
|
|
|
|
puts "Removing invalid repository from cache"
|
|
|
|
clear_cache
|
|
|
|
clone_repo
|
|
|
|
else
|
|
|
|
clone_repo
|
|
|
|
end
|
2015-03-07 14:59:30 +00:00
|
|
|
|
|
|
|
if @ref_type == :tag && @revision && current_revision
|
|
|
|
unless current_revision == @revision
|
|
|
|
raise <<-EOS.undent
|
|
|
|
#{@ref} tag should be #{@revision}
|
2015-06-28 19:14:45 -07:00
|
|
|
but is actually #{current_revision}
|
2015-03-07 14:59:30 +00:00
|
|
|
EOS
|
|
|
|
end
|
|
|
|
end
|
2014-12-06 12:29:16 -05:00
|
|
|
end
|
|
|
|
|
2013-10-10 11:24:36 -05:00
|
|
|
def cached_location
|
|
|
|
@clone
|
|
|
|
end
|
2013-10-31 14:28:49 -05:00
|
|
|
|
2014-12-05 18:57:28 -05:00
|
|
|
def head?
|
2014-12-10 00:53:57 -05:00
|
|
|
version.head?
|
2014-12-05 18:57:28 -05:00
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def cache_tag
|
|
|
|
"__UNKNOWN__"
|
|
|
|
end
|
|
|
|
|
|
|
|
def cache_filename
|
|
|
|
"#{name}--#{cache_tag}"
|
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def repo_valid?
|
|
|
|
true
|
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def clone_repo
|
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def update
|
|
|
|
end
|
|
|
|
|
2015-03-07 14:59:30 +00:00
|
|
|
def current_revision
|
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def extract_ref(specs)
|
|
|
|
key = REF_TYPES.find { |type| specs.key?(type) }
|
2015-08-03 13:09:07 +01:00
|
|
|
[key, specs[key]]
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2013-10-09 21:41:15 -05:00
|
|
|
end
|
|
|
|
|
2015-01-04 15:33:25 -05:00
|
|
|
class AbstractFileDownloadStrategy < AbstractDownloadStrategy
|
2009-08-21 20:30:13 +01:00
|
|
|
def stage
|
2014-12-10 00:53:57 -05:00
|
|
|
case cached_location.compression_type
|
2012-04-29 11:45:46 -07:00
|
|
|
when :zip
|
2015-08-03 13:09:07 +01:00
|
|
|
with_system_path { quiet_safe_system "unzip", { :quiet_flag => "-qq" }, cached_location }
|
2009-12-21 18:29:13 +00:00
|
|
|
chdir
|
2013-06-08 10:27:47 -07:00
|
|
|
when :gzip_only
|
2014-04-06 11:29:59 -05:00
|
|
|
with_system_path { buffered_write("gunzip") }
|
2014-04-06 11:19:30 -05:00
|
|
|
when :bzip2_only
|
2014-04-06 11:29:59 -05:00
|
|
|
with_system_path { buffered_write("bunzip2") }
|
2012-05-03 20:31:00 -07:00
|
|
|
when :gzip, :bzip2, :compress, :tar
|
2012-04-29 11:45:46 -07:00
|
|
|
# Assume these are also tarred
|
2015-08-03 13:09:07 +01:00
|
|
|
with_system_path { safe_system "tar", "xf", cached_location }
|
2009-12-21 18:29:13 +00:00
|
|
|
chdir
|
2012-04-29 11:45:46 -07:00
|
|
|
when :xz
|
2015-01-05 10:46:14 -05:00
|
|
|
with_system_path { pipe_to_tar(xzpath) }
|
2011-08-26 14:30:27 -05:00
|
|
|
chdir
|
2013-11-29 10:10:28 -08:00
|
|
|
when :lzip
|
2015-01-05 10:46:14 -05:00
|
|
|
with_system_path { pipe_to_tar(lzippath) }
|
2013-11-29 10:10:28 -08:00
|
|
|
chdir
|
2015-04-26 11:11:33 -07:00
|
|
|
when :lha
|
|
|
|
safe_system lhapath, "x", cached_location
|
2014-04-06 12:34:42 -05:00
|
|
|
when :xar
|
2014-12-10 00:53:57 -05:00
|
|
|
safe_system "/usr/bin/xar", "-xf", cached_location
|
2012-04-29 11:45:46 -07:00
|
|
|
when :rar
|
2015-08-03 13:09:07 +01:00
|
|
|
quiet_safe_system "unrar", "x", { :quiet_flag => "-inul" }, cached_location
|
2012-10-28 19:29:24 +01:00
|
|
|
when :p7zip
|
2015-08-03 13:09:07 +01:00
|
|
|
safe_system "7zr", "x", cached_location
|
2009-12-21 18:29:13 +00:00
|
|
|
else
|
2014-12-10 00:53:57 -05:00
|
|
|
cp cached_location, basename_without_params
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
end
|
2009-12-07 10:12:38 -08:00
|
|
|
|
2013-04-07 00:49:56 -05:00
|
|
|
private
|
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def chdir
|
2015-08-03 13:09:07 +01:00
|
|
|
entries = Dir["*"]
|
2009-08-21 20:30:13 +01:00
|
|
|
case entries.length
|
2013-10-09 21:41:14 -05:00
|
|
|
when 0 then raise "Empty archive"
|
|
|
|
when 1 then Dir.chdir entries.first rescue nil
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
end
|
2009-12-07 10:12:38 -08:00
|
|
|
|
2015-01-05 10:46:14 -05:00
|
|
|
def pipe_to_tar(tool)
|
|
|
|
Utils.popen_read(tool, "-dc", cached_location.to_s) do |rd|
|
|
|
|
Utils.popen_write("tar", "xf", "-") do |wr|
|
|
|
|
buf = ""
|
|
|
|
wr.write(buf) while rd.read(16384, buf)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-12-10 00:53:57 -05:00
|
|
|
# gunzip and bunzip2 write the output file in the same directory as the input
|
|
|
|
# file regardless of the current working directory, so we need to write it to
|
|
|
|
# the correct location ourselves.
|
|
|
|
def buffered_write(tool)
|
2014-12-10 00:53:57 -05:00
|
|
|
target = File.basename(basename_without_params, cached_location.extname)
|
2014-12-10 00:53:57 -05:00
|
|
|
|
2014-12-10 00:53:57 -05:00
|
|
|
Utils.popen_read(tool, "-f", cached_location.to_s, "-c") do |pipe|
|
2014-12-10 00:53:57 -05:00
|
|
|
File.open(target, "wb") do |f|
|
|
|
|
buf = ""
|
2015-01-05 10:45:05 -05:00
|
|
|
f.write(buf) while pipe.read(16384, buf)
|
2014-12-10 00:53:57 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-06-06 17:01:43 -07:00
|
|
|
def basename_without_params
|
|
|
|
# Strip any ?thing=wad out of .c?thing=wad style extensions
|
|
|
|
File.basename(@url)[/[^?]+/]
|
|
|
|
end
|
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def ext
|
2014-07-17 15:14:16 -05:00
|
|
|
# We need a Pathname because we've monkeypatched extname to support double
|
|
|
|
# extensions (e.g. tar.gz).
|
|
|
|
# We can't use basename_without_params, because given a URL like
|
2015-01-04 05:02:27 +01:00
|
|
|
# https://example.com/download.php?file=foo-1.0.tar.gz
|
2014-07-17 15:14:16 -05:00
|
|
|
# the extension we want is ".tar.gz", not ".php".
|
|
|
|
Pathname.new(@url).extname[/[^?]+/]
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-01-04 15:33:25 -05:00
|
|
|
class CurlDownloadStrategy < AbstractFileDownloadStrategy
|
|
|
|
attr_reader :mirrors, :tarball_path, :temporary_path
|
|
|
|
|
|
|
|
def initialize(name, resource)
|
|
|
|
super
|
|
|
|
@mirrors = resource.mirrors.dup
|
|
|
|
@tarball_path = HOMEBREW_CACHE.join("#{name}-#{version}#{ext}")
|
|
|
|
@temporary_path = Pathname.new("#{cached_location}.incomplete")
|
|
|
|
end
|
|
|
|
|
|
|
|
def fetch
|
|
|
|
ohai "Downloading #{@url}"
|
2015-04-19 21:25:14 +01:00
|
|
|
|
2015-06-18 21:46:47 +08:00
|
|
|
unless cached_location.exist?
|
|
|
|
urls = actual_urls
|
|
|
|
unless urls.empty?
|
2015-06-20 22:33:00 +08:00
|
|
|
ohai "Downloading from #{urls.last}"
|
2015-06-18 21:46:47 +08:00
|
|
|
if !ENV["HOMEBREW_NO_INSECURE_REDIRECT"].nil? && @url.start_with?("https://") &&
|
2015-08-03 13:09:07 +01:00
|
|
|
urls.any? { |u| !u.start_with? "https://" }
|
2015-08-17 18:50:00 +08:00
|
|
|
puts "HTTPS to HTTP redirect detected & HOMEBREW_NO_INSECURE_REDIRECT is set."
|
|
|
|
raise CurlDownloadStrategyError.new(@url)
|
2015-06-18 21:46:47 +08:00
|
|
|
end
|
2015-06-23 00:23:40 +08:00
|
|
|
@url = urls.last
|
2015-04-19 21:25:14 +01:00
|
|
|
end
|
|
|
|
|
2015-01-04 15:33:25 -05:00
|
|
|
had_incomplete_download = temporary_path.exist?
|
|
|
|
begin
|
|
|
|
_fetch
|
|
|
|
rescue ErrorDuringExecution
|
|
|
|
# 33 == range not supported
|
|
|
|
# try wiping the incomplete download and retrying once
|
|
|
|
if $?.exitstatus == 33 && had_incomplete_download
|
|
|
|
ohai "Trying a full download"
|
|
|
|
temporary_path.unlink
|
|
|
|
had_incomplete_download = false
|
|
|
|
retry
|
|
|
|
else
|
|
|
|
raise CurlDownloadStrategyError.new(@url)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
ignore_interrupts { temporary_path.rename(cached_location) }
|
|
|
|
else
|
|
|
|
puts "Already downloaded: #{cached_location}"
|
|
|
|
end
|
|
|
|
rescue CurlDownloadStrategyError
|
|
|
|
raise if mirrors.empty?
|
|
|
|
puts "Trying a mirror..."
|
|
|
|
@url = mirrors.shift
|
|
|
|
retry
|
|
|
|
end
|
|
|
|
|
|
|
|
def cached_location
|
|
|
|
tarball_path
|
|
|
|
end
|
|
|
|
|
|
|
|
def clear_cache
|
|
|
|
super
|
|
|
|
rm_rf(temporary_path)
|
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
# Private method, can be overridden if needed.
|
|
|
|
def _fetch
|
|
|
|
curl @url, "-C", downloaded_size, "-o", temporary_path
|
|
|
|
end
|
|
|
|
|
2015-07-08 23:53:20 -05:00
|
|
|
# Curl options to be always passed to curl,
|
|
|
|
# with raw head calls (`curl -I`) or with actual `fetch`.
|
|
|
|
def _curl_opts
|
|
|
|
copts = []
|
|
|
|
copts << "--user" << meta.fetch(:user) if meta.key?(:user)
|
|
|
|
copts
|
|
|
|
end
|
|
|
|
|
2015-04-19 21:25:14 +01:00
|
|
|
def actual_urls
|
|
|
|
urls = []
|
2015-07-08 23:53:20 -05:00
|
|
|
curl_args = _curl_opts << "-I" << "-L" << @url
|
|
|
|
Utils.popen_read("curl", *curl_args).scan(/^Location: (.+)$/).map do |m|
|
2015-04-19 21:25:14 +01:00
|
|
|
urls << URI.join(urls.last || @url, m.first.chomp).to_s
|
|
|
|
end
|
|
|
|
urls
|
|
|
|
end
|
|
|
|
|
2015-01-04 15:33:25 -05:00
|
|
|
def downloaded_size
|
|
|
|
temporary_path.size? || 0
|
|
|
|
end
|
|
|
|
|
|
|
|
def curl(*args)
|
2015-07-08 23:53:20 -05:00
|
|
|
args.concat _curl_opts
|
2015-08-03 13:09:07 +01:00
|
|
|
args << "--connect-timeout" << "5" unless mirrors.empty?
|
2015-01-04 15:33:25 -05:00
|
|
|
super
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2011-07-20 15:37:33 +02:00
|
|
|
# Detect and download from Apache Mirror
|
|
|
|
class CurlApacheMirrorDownloadStrategy < CurlDownloadStrategy
|
2013-10-30 00:11:46 -05:00
|
|
|
def apache_mirrors
|
|
|
|
rd, wr = IO.pipe
|
|
|
|
buf = ""
|
|
|
|
|
|
|
|
pid = fork do
|
2015-08-18 17:05:47 +08:00
|
|
|
ENV.delete "HOMEBREW_CURL_VERBOSE"
|
2013-10-30 00:11:46 -05:00
|
|
|
rd.close
|
|
|
|
$stdout.reopen(wr)
|
|
|
|
$stderr.reopen(wr)
|
|
|
|
curl "#{@url}&asjson=1"
|
|
|
|
end
|
|
|
|
wr.close
|
|
|
|
|
2013-11-11 12:35:51 -06:00
|
|
|
rd.readline if ARGV.verbose? # Remove Homebrew output
|
2013-10-30 00:11:46 -05:00
|
|
|
buf << rd.read until rd.eof?
|
|
|
|
rd.close
|
|
|
|
Process.wait(pid)
|
|
|
|
buf
|
|
|
|
end
|
|
|
|
|
2011-07-20 15:37:33 +02:00
|
|
|
def _fetch
|
2013-11-26 22:56:03 -06:00
|
|
|
return super if @tried_apache_mirror
|
|
|
|
@tried_apache_mirror = true
|
|
|
|
|
2013-10-30 00:11:46 -05:00
|
|
|
mirrors = Utils::JSON.load(apache_mirrors)
|
2015-04-09 20:52:49 +02:00
|
|
|
path_info = mirrors.fetch("path_info")
|
2015-08-03 13:09:07 +01:00
|
|
|
@url = mirrors.fetch("preferred") + path_info
|
2015-04-09 20:52:49 +02:00
|
|
|
@mirrors |= %W[https://archive.apache.org/dist/#{path_info}]
|
2011-07-20 15:37:33 +02:00
|
|
|
|
2013-11-26 22:56:03 -06:00
|
|
|
ohai "Best Mirror #{@url}"
|
|
|
|
super
|
2013-06-22 16:51:08 -05:00
|
|
|
rescue IndexError, Utils::JSON::Error
|
2013-11-26 22:56:03 -06:00
|
|
|
raise CurlDownloadStrategyError, "Couldn't determine mirror, try again later."
|
2011-07-20 15:37:33 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2010-06-25 19:13:20 -07:00
|
|
|
# Download via an HTTP POST.
|
2010-06-30 10:21:56 -07:00
|
|
|
# Query parameters on the URL are converted into POST parameters
|
2011-03-28 19:53:43 -07:00
|
|
|
class CurlPostDownloadStrategy < CurlDownloadStrategy
|
2010-06-25 19:13:20 -07:00
|
|
|
def _fetch
|
2015-08-03 13:09:07 +01:00
|
|
|
base_url, data = @url.split("?")
|
|
|
|
curl base_url, "-d", data, "-C", downloaded_size, "-o", temporary_path
|
2010-06-25 19:13:20 -07:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2009-12-01 12:01:05 -08:00
|
|
|
# Use this strategy to download but not unzip a file.
|
|
|
|
# Useful for installing jars.
|
2011-03-28 19:53:43 -07:00
|
|
|
class NoUnzipCurlDownloadStrategy < CurlDownloadStrategy
|
2009-12-01 12:01:05 -08:00
|
|
|
def stage
|
2014-12-10 00:53:57 -05:00
|
|
|
cp cached_location, basename_without_params
|
2009-12-01 12:01:05 -08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2010-11-24 09:40:37 +00:00
|
|
|
# This strategy extracts our binary packages.
|
2012-01-23 19:29:55 -08:00
|
|
|
class CurlBottleDownloadStrategy < CurlDownloadStrategy
|
2013-09-25 17:41:11 -05:00
|
|
|
def stage
|
2014-12-10 00:53:57 -05:00
|
|
|
ohai "Pouring #{cached_location.basename}"
|
2013-09-25 17:41:11 -05:00
|
|
|
super
|
|
|
|
end
|
2010-11-24 09:40:37 +00:00
|
|
|
end
|
|
|
|
|
2013-06-08 16:41:23 +01:00
|
|
|
# This strategy extracts local binary packages.
|
2015-01-04 15:33:25 -05:00
|
|
|
class LocalBottleDownloadStrategy < AbstractFileDownloadStrategy
|
|
|
|
attr_reader :cached_location
|
|
|
|
|
2015-06-15 21:32:15 -04:00
|
|
|
def initialize(path)
|
|
|
|
@cached_location = path
|
2013-06-08 16:41:23 +01:00
|
|
|
end
|
2013-09-25 17:41:11 -05:00
|
|
|
|
|
|
|
def stage
|
2014-12-10 00:53:57 -05:00
|
|
|
ohai "Pouring #{cached_location.basename}"
|
2013-09-25 17:41:11 -05:00
|
|
|
super
|
|
|
|
end
|
2013-06-08 16:41:23 +01:00
|
|
|
end
|
|
|
|
|
2013-09-24 01:34:28 -07:00
|
|
|
# S3DownloadStrategy downloads tarballs from AWS S3.
|
|
|
|
# To use it, add ":using => S3DownloadStrategy" to the URL section of your
|
|
|
|
# formula. This download strategy uses AWS access tokens (in the
|
|
|
|
# environment variables AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY)
|
|
|
|
# to sign the request. This strategy is good in a corporate setting,
|
|
|
|
# because it lets you use a private S3 bucket as a repo for internal
|
|
|
|
# distribution. (It will work for public buckets as well.)
|
|
|
|
class S3DownloadStrategy < CurlDownloadStrategy
|
|
|
|
def _fetch
|
|
|
|
# Put the aws gem requirement here (vs top of file) so it's only
|
|
|
|
# a dependency of S3 users, not all Homebrew users
|
2015-08-03 13:09:07 +01:00
|
|
|
require "rubygems"
|
2013-09-24 01:34:28 -07:00
|
|
|
begin
|
2015-08-03 13:09:07 +01:00
|
|
|
require "aws-sdk-v1"
|
2013-09-24 01:34:28 -07:00
|
|
|
rescue LoadError
|
|
|
|
onoe "Install the aws-sdk gem into the gem repo used by brew."
|
|
|
|
raise
|
|
|
|
end
|
|
|
|
|
2015-08-03 13:09:07 +01:00
|
|
|
if @url !~ %r{^https?://+([^.]+).s3.amazonaws.com/+(.+)$}
|
2013-09-24 01:34:28 -07:00
|
|
|
raise "Bad S3 URL: " + @url
|
|
|
|
end
|
2015-08-03 13:09:07 +01:00
|
|
|
bucket = $1
|
|
|
|
key = $2
|
2013-09-24 01:34:28 -07:00
|
|
|
|
2015-08-03 13:09:07 +01:00
|
|
|
obj = AWS::S3.new.buckets[bucket].objects[key]
|
2013-09-24 01:34:28 -07:00
|
|
|
begin
|
|
|
|
s3url = obj.url_for(:get)
|
|
|
|
rescue AWS::Errors::MissingCredentialsError
|
|
|
|
ohai "AWS credentials missing, trying public URL instead."
|
|
|
|
s3url = obj.public_url
|
|
|
|
end
|
|
|
|
|
2015-08-03 13:09:07 +01:00
|
|
|
curl s3url, "-C", downloaded_size, "-o", temporary_path
|
2013-09-24 01:34:28 -07:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-10-09 21:41:15 -05:00
|
|
|
class SubversionDownloadStrategy < VCSDownloadStrategy
|
2014-12-06 12:29:16 -05:00
|
|
|
def initialize(name, resource)
|
|
|
|
super
|
2015-04-27 20:39:20 -04:00
|
|
|
@url = @url.sub("svn+http://", "")
|
2014-12-06 12:29:16 -05:00
|
|
|
end
|
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def fetch
|
2015-08-03 13:09:07 +01:00
|
|
|
clear_cache unless @url.chomp("/") == repo_url || quiet_system("svn", "switch", @url, cached_location)
|
2014-12-06 12:29:16 -05:00
|
|
|
super
|
|
|
|
end
|
2013-09-28 18:00:09 -05:00
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def stage
|
2014-12-06 15:59:17 -05:00
|
|
|
super
|
2014-12-09 14:22:35 -05:00
|
|
|
quiet_safe_system "svn", "export", "--force", cached_location, Dir.pwd
|
2010-03-01 11:35:27 -08:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:16 -05:00
|
|
|
private
|
|
|
|
|
|
|
|
def repo_url
|
2014-12-16 15:27:36 -05:00
|
|
|
Utils.popen_read("svn", "info", cached_location.to_s).strip[/^URL: (.+)$/, 1]
|
2010-03-01 11:35:27 -08:00
|
|
|
end
|
|
|
|
|
|
|
|
def get_externals
|
2014-12-16 15:27:36 -05:00
|
|
|
Utils.popen_read("svn", "propget", "svn:externals", @url).chomp.each_line do |line|
|
2010-10-25 21:12:41 -07:00
|
|
|
name, url = line.split(/\s+/)
|
2010-03-01 11:35:27 -08:00
|
|
|
yield name, url
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-08-03 13:09:07 +01:00
|
|
|
def fetch_repo(target, url, revision = nil, ignore_externals = false)
|
2010-03-01 11:35:27 -08:00
|
|
|
# Use "svn up" when the repository already exists locally.
|
|
|
|
# This saves on bandwidth and will have a similar effect to verifying the
|
|
|
|
# cache as it will make any changes to get the right revision.
|
2015-08-03 13:09:07 +01:00
|
|
|
svncommand = target.directory? ? "up" : "checkout"
|
|
|
|
args = ["svn", svncommand]
|
2013-09-28 18:00:09 -05:00
|
|
|
args << url unless target.directory?
|
2011-08-21 04:55:40 -07:00
|
|
|
args << target
|
2015-10-16 03:59:19 -04:00
|
|
|
if revision
|
|
|
|
ohai "Checking out #{ref}"
|
|
|
|
args << "-r" << revision
|
|
|
|
end
|
2015-08-03 13:09:07 +01:00
|
|
|
args << "--ignore-externals" if ignore_externals
|
2010-10-25 21:12:31 -07:00
|
|
|
quiet_safe_system(*args)
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
|
|
|
def cache_tag
|
|
|
|
head? ? "svn-HEAD" : "svn"
|
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
|
|
|
def repo_valid?
|
2014-12-09 14:22:35 -05:00
|
|
|
cached_location.join(".svn").directory?
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2014-12-06 12:29:16 -05:00
|
|
|
|
|
|
|
def clone_repo
|
|
|
|
case @ref_type
|
|
|
|
when :revision
|
2014-12-09 14:22:35 -05:00
|
|
|
fetch_repo cached_location, @url, @ref
|
2014-12-06 12:29:16 -05:00
|
|
|
when :revisions
|
|
|
|
# nil is OK for main_revision, as fetch_repo will then get latest
|
|
|
|
main_revision = @ref[:trunk]
|
2014-12-09 14:22:35 -05:00
|
|
|
fetch_repo cached_location, @url, main_revision, true
|
2014-12-06 12:29:16 -05:00
|
|
|
|
|
|
|
get_externals do |external_name, external_url|
|
2014-12-09 14:22:35 -05:00
|
|
|
fetch_repo cached_location+external_name, external_url, @ref[external_name], true
|
2014-12-06 12:29:16 -05:00
|
|
|
end
|
|
|
|
else
|
2014-12-09 14:22:35 -05:00
|
|
|
fetch_repo cached_location, @url
|
2014-12-06 12:29:16 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
alias_method :update, :clone_repo
|
2010-06-28 14:55:31 -07:00
|
|
|
end
|
|
|
|
|
2013-10-09 21:41:15 -05:00
|
|
|
class GitDownloadStrategy < VCSDownloadStrategy
|
2014-02-14 15:31:29 -05:00
|
|
|
SHALLOW_CLONE_WHITELIST = [
|
|
|
|
%r{git://},
|
|
|
|
%r{https://github\.com},
|
|
|
|
%r{http://git\.sv\.gnu\.org},
|
2015-08-03 13:09:07 +01:00
|
|
|
%r{http://llvm\.org}
|
2014-02-14 15:31:29 -05:00
|
|
|
]
|
|
|
|
|
2015-08-03 13:09:07 +01:00
|
|
|
def initialize(name, resource)
|
2014-02-14 15:31:29 -05:00
|
|
|
super
|
2014-12-03 17:25:51 -05:00
|
|
|
@ref_type ||= :branch
|
|
|
|
@ref ||= "master"
|
2014-12-09 19:59:16 -05:00
|
|
|
@shallow = meta.fetch(:shallow) { true }
|
2014-02-14 15:31:29 -05:00
|
|
|
end
|
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def stage
|
2014-12-06 15:59:17 -05:00
|
|
|
super
|
2015-02-14 16:32:24 -05:00
|
|
|
cp_r File.join(cached_location, "."), Dir.pwd
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2013-02-14 17:29:58 -06:00
|
|
|
|
|
|
|
private
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def cache_tag
|
|
|
|
"git"
|
|
|
|
end
|
|
|
|
|
2014-12-18 12:57:37 -05:00
|
|
|
def cache_version
|
|
|
|
0
|
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def update
|
2014-12-09 14:22:35 -05:00
|
|
|
cached_location.cd do
|
2014-12-06 12:29:15 -05:00
|
|
|
config_repo
|
|
|
|
update_repo
|
|
|
|
checkout
|
|
|
|
reset
|
|
|
|
update_submodules if submodules?
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-02-14 15:31:29 -05:00
|
|
|
def shallow_clone?
|
|
|
|
@shallow && support_depth?
|
|
|
|
end
|
|
|
|
|
2015-06-30 20:29:48 -07:00
|
|
|
def is_shallow_clone?
|
|
|
|
git_dir.join("shallow").exist?
|
|
|
|
end
|
|
|
|
|
2014-02-14 15:31:29 -05:00
|
|
|
def support_depth?
|
|
|
|
@ref_type != :revision && SHALLOW_CLONE_WHITELIST.any? { |rx| rx === @url }
|
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def git_dir
|
2014-12-09 14:22:35 -05:00
|
|
|
cached_location.join(".git")
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2013-02-17 15:41:24 -06:00
|
|
|
def has_ref?
|
2015-08-03 13:09:07 +01:00
|
|
|
quiet_system "git", "--git-dir", git_dir, "rev-parse", "-q", "--verify", "#{@ref}^{commit}"
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2015-03-07 14:59:30 +00:00
|
|
|
def current_revision
|
2015-08-03 13:09:07 +01:00
|
|
|
Utils.popen_read("git", "--git-dir", git_dir, "rev-parse", "-q", "--verify", "HEAD").strip
|
2015-03-07 14:59:30 +00:00
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def repo_valid?
|
2013-10-11 00:11:59 -05:00
|
|
|
quiet_system "git", "--git-dir", git_dir, "status", "-s"
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def submodules?
|
2014-12-09 14:22:35 -05:00
|
|
|
cached_location.join(".gitmodules").exist?
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def clone_args
|
2015-08-03 13:09:07 +01:00
|
|
|
args = %w[clone]
|
|
|
|
args << "--depth" << "1" if shallow_clone?
|
2013-02-14 17:29:58 -06:00
|
|
|
|
2013-10-09 21:41:14 -05:00
|
|
|
case @ref_type
|
2015-08-03 13:09:07 +01:00
|
|
|
when :branch, :tag then args << "--branch" << @ref
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2014-12-09 14:22:35 -05:00
|
|
|
args << @url << cached_location
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
|
|
|
def refspec
|
2013-10-09 21:41:14 -05:00
|
|
|
case @ref_type
|
2015-08-03 13:09:07 +01:00
|
|
|
when :branch then "+refs/heads/#{@ref}:refs/remotes/origin/#{@ref}"
|
|
|
|
when :tag then "+refs/tags/#{@ref}:refs/tags/#{@ref}"
|
2013-02-14 17:29:58 -06:00
|
|
|
else "+refs/heads/master:refs/remotes/origin/master"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def config_repo
|
2015-08-03 13:09:07 +01:00
|
|
|
safe_system "git", "config", "remote.origin.url", @url
|
|
|
|
safe_system "git", "config", "remote.origin.fetch", refspec
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def update_repo
|
2014-12-03 17:25:51 -05:00
|
|
|
if @ref_type == :branch || !has_ref?
|
2015-06-30 20:29:48 -07:00
|
|
|
if !shallow_clone? && is_shallow_clone?
|
2015-08-03 13:09:07 +01:00
|
|
|
quiet_safe_system "git", "fetch", "origin", "--unshallow"
|
2015-06-30 20:29:48 -07:00
|
|
|
else
|
2015-08-03 13:09:07 +01:00
|
|
|
quiet_safe_system "git", "fetch", "origin"
|
2015-06-30 20:29:48 -07:00
|
|
|
end
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
|
|
|
def clone_repo
|
2015-08-03 13:09:07 +01:00
|
|
|
safe_system "git", *clone_args
|
2014-12-18 12:57:37 -05:00
|
|
|
cached_location.cd do
|
|
|
|
safe_system "git", "config", "homebrew.cacheversion", cache_version
|
2015-10-16 03:59:19 -04:00
|
|
|
checkout
|
2014-12-18 12:57:37 -05:00
|
|
|
update_submodules if submodules?
|
|
|
|
end
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2013-02-17 15:43:16 -06:00
|
|
|
def checkout
|
2015-10-16 03:59:19 -04:00
|
|
|
ohai "Checking out #{@ref_type} #{@ref}" if @ref_type && @ref
|
2014-12-03 17:25:51 -05:00
|
|
|
quiet_safe_system "git", "checkout", "-f", @ref, "--"
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
2013-02-14 17:29:58 -06:00
|
|
|
|
2013-02-17 15:53:34 -06:00
|
|
|
def reset_args
|
2013-10-09 21:41:14 -05:00
|
|
|
ref = case @ref_type
|
2015-08-03 13:09:07 +01:00
|
|
|
when :branch then "origin/#{@ref}"
|
2013-02-17 15:53:34 -06:00
|
|
|
when :revision, :tag then @ref
|
|
|
|
end
|
|
|
|
|
2015-08-03 13:09:07 +01:00
|
|
|
%W[reset --hard #{ref}]
|
2013-02-17 15:53:34 -06:00
|
|
|
end
|
|
|
|
|
|
|
|
def reset
|
2015-08-03 13:09:07 +01:00
|
|
|
quiet_safe_system "git", *reset_args
|
2013-02-17 15:53:34 -06:00
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def update_submodules
|
2015-02-13 22:16:57 -05:00
|
|
|
quiet_safe_system "git", "submodule", "foreach", "--recursive", "git submodule sync"
|
2014-12-06 20:57:23 -05:00
|
|
|
quiet_safe_system "git", "submodule", "update", "--init", "--recursive"
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2009-09-27 19:30:39 -04:00
|
|
|
|
2013-10-09 21:41:15 -05:00
|
|
|
class CVSDownloadStrategy < VCSDownloadStrategy
|
2014-12-18 13:06:05 -05:00
|
|
|
def initialize(name, resource)
|
|
|
|
super
|
2015-08-03 13:09:07 +01:00
|
|
|
@url = @url.sub(%r{^cvs://}, "")
|
2014-12-22 00:43:02 -05:00
|
|
|
|
|
|
|
if meta.key?(:module)
|
|
|
|
@module = meta.fetch(:module)
|
2015-08-03 13:09:07 +01:00
|
|
|
elsif @url !~ %r{:[^/]+$}
|
2014-12-22 00:43:02 -05:00
|
|
|
@module = name
|
2014-12-22 00:43:02 -05:00
|
|
|
else
|
|
|
|
@module, @url = split_url(@url)
|
|
|
|
end
|
2014-12-18 13:06:05 -05:00
|
|
|
end
|
|
|
|
|
2009-09-27 19:30:39 -04:00
|
|
|
def stage
|
2015-02-14 15:28:40 -05:00
|
|
|
cp_r File.join(cached_location, "."), Dir.pwd
|
2009-09-27 19:30:39 -04:00
|
|
|
end
|
|
|
|
|
2013-04-07 00:49:56 -05:00
|
|
|
private
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def cache_tag
|
|
|
|
"cvs"
|
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def repo_valid?
|
2014-12-09 14:22:35 -05:00
|
|
|
cached_location.join("CVS").directory?
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def clone_repo
|
|
|
|
HOMEBREW_CACHE.cd do
|
2015-04-23 04:08:14 -07:00
|
|
|
# Login is only needed (and allowed) with pserver; skip for anoncvs.
|
|
|
|
quiet_safe_system cvspath, { :quiet_flag => "-Q" }, "-d", @url, "login" if @url.include? "pserver"
|
2014-12-22 00:43:02 -05:00
|
|
|
quiet_safe_system cvspath, { :quiet_flag => "-Q" }, "-d", @url, "checkout", "-d", cache_filename, @module
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def update
|
2014-12-09 14:22:35 -05:00
|
|
|
cached_location.cd { quiet_safe_system cvspath, { :quiet_flag => "-Q" }, "up" }
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2009-09-27 19:30:39 -04:00
|
|
|
def split_url(in_url)
|
2014-12-18 13:06:05 -05:00
|
|
|
parts = in_url.split(/:/)
|
2009-09-27 19:30:39 -04:00
|
|
|
mod=parts.pop
|
2015-08-03 13:09:07 +01:00
|
|
|
url=parts.join(":")
|
|
|
|
[mod, url]
|
2009-09-27 19:30:39 -04:00
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2012-09-25 10:11:13 -04:00
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
class MercurialDownloadStrategy < VCSDownloadStrategy
|
2014-12-18 13:06:05 -05:00
|
|
|
def initialize(name, resource)
|
|
|
|
super
|
2015-08-03 13:09:07 +01:00
|
|
|
@url = @url.sub(%r{^hg://}, "")
|
2014-12-18 13:06:05 -05:00
|
|
|
end
|
|
|
|
|
2009-09-27 19:30:39 -04:00
|
|
|
def stage
|
2014-12-06 15:59:17 -05:00
|
|
|
super
|
|
|
|
|
2013-10-09 21:41:14 -05:00
|
|
|
dst = Dir.getwd
|
2014-12-09 14:22:35 -05:00
|
|
|
cached_location.cd do
|
2015-08-03 13:09:07 +01:00
|
|
|
if @ref_type && @ref
|
2015-10-16 03:59:19 -04:00
|
|
|
ohai "Checking out #{@ref_type} #{@ref}" if @ref_type && @ref
|
2015-08-03 13:09:07 +01:00
|
|
|
safe_system hgpath, "archive", "--subrepos", "-y", "-r", @ref, "-t", "files", dst
|
2009-10-17 14:35:24 +02:00
|
|
|
else
|
2015-08-03 13:09:07 +01:00
|
|
|
safe_system hgpath, "archive", "--subrepos", "-y", "-t", "files", dst
|
2009-10-17 14:35:24 +02:00
|
|
|
end
|
2009-09-27 19:30:39 -04:00
|
|
|
end
|
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def cache_tag
|
|
|
|
"hg"
|
|
|
|
end
|
2010-02-02 13:43:44 +01:00
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def repo_valid?
|
2014-12-09 14:22:35 -05:00
|
|
|
cached_location.join(".hg").directory?
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def clone_repo
|
2014-12-18 20:09:52 -05:00
|
|
|
safe_system hgpath, "clone", @url, cached_location
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def update
|
2014-12-09 14:22:35 -05:00
|
|
|
cached_location.cd { quiet_safe_system hgpath, "pull", "--update" }
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2012-09-28 21:04:03 -05:00
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
class BazaarDownloadStrategy < VCSDownloadStrategy
|
2014-12-18 13:06:05 -05:00
|
|
|
def initialize(name, resource)
|
|
|
|
super
|
2015-08-03 13:09:07 +01:00
|
|
|
@url = @url.sub(%r{^bzr://}, "")
|
2014-12-18 13:06:05 -05:00
|
|
|
end
|
|
|
|
|
2010-02-02 13:43:44 +01:00
|
|
|
def stage
|
2014-12-06 15:53:37 -05:00
|
|
|
# The export command doesn't work on checkouts
|
2011-11-29 10:17:38 +01:00
|
|
|
# See https://bugs.launchpad.net/bzr/+bug/897511
|
2015-02-14 15:28:40 -05:00
|
|
|
cp_r File.join(cached_location, "."), Dir.pwd
|
2014-12-09 15:55:28 -05:00
|
|
|
rm_r ".bzr"
|
2010-02-02 13:43:44 +01:00
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def cache_tag
|
|
|
|
"bzr"
|
|
|
|
end
|
2010-03-22 21:19:20 -07:00
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def repo_valid?
|
2014-12-09 14:22:35 -05:00
|
|
|
cached_location.join(".bzr").directory?
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def clone_repo
|
|
|
|
# "lightweight" means history-less
|
2014-12-18 20:09:52 -05:00
|
|
|
safe_system bzrpath, "checkout", "--lightweight", @url, cached_location
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def update
|
2014-12-09 14:22:35 -05:00
|
|
|
cached_location.cd { quiet_safe_system bzrpath, "update" }
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2012-09-28 21:04:03 -05:00
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
class FossilDownloadStrategy < VCSDownloadStrategy
|
2014-12-18 13:06:05 -05:00
|
|
|
def initialize(name, resource)
|
|
|
|
super
|
2015-08-03 13:09:07 +01:00
|
|
|
@url = @url.sub(%r{^fossil://}, "")
|
2014-12-18 13:06:05 -05:00
|
|
|
end
|
|
|
|
|
2010-09-29 20:22:34 +08:00
|
|
|
def stage
|
2014-12-06 15:59:17 -05:00
|
|
|
super
|
2014-12-09 14:22:35 -05:00
|
|
|
args = [fossilpath, "open", cached_location]
|
2014-12-06 16:48:20 -05:00
|
|
|
args << @ref if @ref_type && @ref
|
|
|
|
safe_system(*args)
|
2010-09-29 20:22:34 +08:00
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def cache_tag
|
|
|
|
"fossil"
|
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def clone_repo
|
2014-12-18 20:09:52 -05:00
|
|
|
safe_system fossilpath, "clone", @url, cached_location
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def update
|
2014-12-09 14:22:35 -05:00
|
|
|
safe_system fossilpath, "pull", "-R", cached_location
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2010-09-29 20:22:34 +08:00
|
|
|
end
|
|
|
|
|
2012-06-25 21:39:28 -05:00
|
|
|
class DownloadStrategyDetector
|
2015-08-03 13:09:07 +01:00
|
|
|
def self.detect(url, strategy = nil)
|
2013-09-26 16:59:45 -05:00
|
|
|
if strategy.nil?
|
|
|
|
detect_from_url(url)
|
|
|
|
elsif Class === strategy && strategy < AbstractDownloadStrategy
|
2015-08-03 13:09:07 +01:00
|
|
|
strategy
|
2013-09-26 16:59:45 -05:00
|
|
|
elsif Symbol === strategy
|
2012-10-15 01:19:31 -05:00
|
|
|
detect_from_symbol(strategy)
|
|
|
|
else
|
2013-09-26 16:59:45 -05:00
|
|
|
raise TypeError,
|
|
|
|
"Unknown download strategy specification #{strategy.inspect}"
|
2012-06-25 21:39:28 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2012-10-15 01:19:31 -05:00
|
|
|
def self.detect_from_url(url)
|
|
|
|
case url
|
2015-08-03 13:09:07 +01:00
|
|
|
when %r{^https?://.+\.git$}, %r{^git://}
|
2014-03-18 15:28:21 -05:00
|
|
|
GitDownloadStrategy
|
2015-09-02 22:07:51 +02:00
|
|
|
when %r{^https?://www\.apache\.org/dyn/closer\.cgi}, %r{^https?://www\.apache\.org/dyn/closer\.lua}
|
2014-03-18 15:28:21 -05:00
|
|
|
CurlApacheMirrorDownloadStrategy
|
2015-08-03 13:09:07 +01:00
|
|
|
when %r{^https?://(.+?\.)?googlecode\.com/svn}, %r{^https?://svn\.}, %r{^svn://}, %r{^https?://(.+?\.)?sourceforge\.net/svnroot/}
|
2014-03-18 15:28:21 -05:00
|
|
|
SubversionDownloadStrategy
|
2015-08-03 13:09:07 +01:00
|
|
|
when %r{^cvs://}
|
2014-03-18 15:28:21 -05:00
|
|
|
CVSDownloadStrategy
|
2015-08-03 13:09:07 +01:00
|
|
|
when %r{^https?://(.+?\.)?googlecode\.com/hg}
|
2014-03-18 15:28:21 -05:00
|
|
|
MercurialDownloadStrategy
|
2015-08-03 13:09:07 +01:00
|
|
|
when %r{^hg://}
|
2014-03-18 15:28:21 -05:00
|
|
|
MercurialDownloadStrategy
|
2015-08-03 13:09:07 +01:00
|
|
|
when %r{^bzr://}
|
2014-03-18 15:28:21 -05:00
|
|
|
BazaarDownloadStrategy
|
2015-08-03 13:09:07 +01:00
|
|
|
when %r{^fossil://}
|
2014-03-18 15:28:21 -05:00
|
|
|
FossilDownloadStrategy
|
2015-08-03 13:09:07 +01:00
|
|
|
when %r{^http://svn\.apache\.org/repos/}, %r{^svn\+http://}
|
2014-03-18 15:28:21 -05:00
|
|
|
SubversionDownloadStrategy
|
2015-08-03 13:09:07 +01:00
|
|
|
when %r{^https?://(.+?\.)?sourceforge\.net/hgweb/}
|
2014-03-18 15:28:21 -05:00
|
|
|
MercurialDownloadStrategy
|
|
|
|
else
|
|
|
|
CurlDownloadStrategy
|
2012-06-25 21:39:28 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2012-10-15 01:19:31 -05:00
|
|
|
def self.detect_from_symbol(symbol)
|
|
|
|
case symbol
|
2014-03-18 15:34:04 -05:00
|
|
|
when :hg then MercurialDownloadStrategy
|
2012-06-25 21:39:28 -05:00
|
|
|
when :nounzip then NoUnzipCurlDownloadStrategy
|
2014-03-18 15:34:04 -05:00
|
|
|
when :git then GitDownloadStrategy
|
|
|
|
when :bzr then BazaarDownloadStrategy
|
|
|
|
when :svn then SubversionDownloadStrategy
|
|
|
|
when :curl then CurlDownloadStrategy
|
|
|
|
when :ssl3 then CurlSSL3DownloadStrategy
|
|
|
|
when :cvs then CVSDownloadStrategy
|
|
|
|
when :post then CurlPostDownloadStrategy
|
2014-12-18 13:06:05 -05:00
|
|
|
when :fossil then FossilDownloadStrategy
|
2012-06-25 21:39:28 -05:00
|
|
|
else
|
2012-10-15 01:19:31 -05:00
|
|
|
raise "Unknown download strategy #{strategy} was requested."
|
2012-06-25 21:39:28 -05:00
|
|
|
end
|
2010-03-22 21:19:20 -07:00
|
|
|
end
|
|
|
|
end
|