2016-11-20 13:00:01 -05:00
|
|
|
require "json"
|
2016-05-01 14:46:24 +03:00
|
|
|
require "rexml/document"
|
2016-06-03 13:46:18 +01:00
|
|
|
require "time"
|
2018-07-01 23:35:29 +02:00
|
|
|
require "unpack_strategy"
|
2013-02-07 12:43:22 -06:00
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
class AbstractDownloadStrategy
|
2017-06-26 07:30:28 +02:00
|
|
|
extend Forwardable
|
2014-12-09 15:55:28 -05:00
|
|
|
include FileUtils
|
|
|
|
|
2018-03-24 10:53:49 +00:00
|
|
|
module Pourable
|
|
|
|
def stage
|
|
|
|
ohai "Pouring #{cached_location.basename}"
|
|
|
|
super
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-08-03 10:51:01 +02:00
|
|
|
attr_reader :cached_location
|
2018-08-03 10:50:49 +02:00
|
|
|
attr_reader :meta, :name, :version, :shutup
|
|
|
|
private :meta, :name, :version, :shutup
|
2013-09-23 21:39:19 -05:00
|
|
|
|
2018-08-02 10:29:40 +02:00
|
|
|
def initialize(url, name, version, **meta)
|
2018-08-02 09:59:22 +02:00
|
|
|
@url = url
|
2013-09-23 21:39:19 -05:00
|
|
|
@name = name
|
2018-08-01 05:18:00 +02:00
|
|
|
@version = version
|
2018-08-02 10:29:40 +02:00
|
|
|
@meta = meta
|
2018-04-07 20:28:56 +01:00
|
|
|
@shutup = false
|
2018-03-24 10:53:49 +00:00
|
|
|
extend Pourable if meta[:bottle]
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2009-12-15 12:47:12 +00:00
|
|
|
|
2014-12-23 01:04:44 -05:00
|
|
|
# Download and cache the resource as {#cached_location}.
|
2017-10-07 00:31:28 +02:00
|
|
|
def fetch; end
|
2014-12-23 01:04:44 -05:00
|
|
|
|
2016-11-03 02:51:22 +00:00
|
|
|
# Suppress output
|
2016-07-12 10:03:32 +03:00
|
|
|
def shutup!
|
|
|
|
@shutup = true
|
|
|
|
end
|
|
|
|
|
|
|
|
def puts(*args)
|
|
|
|
super(*args) unless shutup
|
|
|
|
end
|
|
|
|
|
|
|
|
def ohai(*args)
|
|
|
|
super(*args) unless shutup
|
|
|
|
end
|
|
|
|
|
2016-04-10 22:53:56 -04:00
|
|
|
# Unpack {#cached_location} into the current working directory, and possibly
|
|
|
|
# chdir into the newly-unpacked directory.
|
|
|
|
# Unlike {Resource#stage}, this does not take a block.
|
2018-07-01 23:35:29 +02:00
|
|
|
def stage
|
2018-07-30 22:23:26 +02:00
|
|
|
UnpackStrategy.detect(cached_location,
|
|
|
|
extension_only: true,
|
|
|
|
ref_type: @ref_type, ref: @ref)
|
2018-07-22 19:30:16 +02:00
|
|
|
.extract_nestedly(basename: basename_without_params,
|
2018-07-30 22:23:26 +02:00
|
|
|
extension_only: true,
|
2018-07-22 19:30:16 +02:00
|
|
|
verbose: ARGV.verbose? && !shutup)
|
2018-08-03 10:50:49 +02:00
|
|
|
chdir
|
|
|
|
end
|
|
|
|
|
|
|
|
def chdir
|
|
|
|
entries = Dir["*"]
|
|
|
|
case entries.length
|
|
|
|
when 0 then raise "Empty archive"
|
|
|
|
when 1 then begin
|
|
|
|
Dir.chdir entries.first
|
|
|
|
rescue
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
end
|
2018-07-01 23:35:29 +02:00
|
|
|
end
|
2018-08-03 10:50:49 +02:00
|
|
|
private :chdir
|
2014-12-23 01:04:44 -05:00
|
|
|
|
2016-01-14 18:57:31 +08:00
|
|
|
# @!attribute [r]
|
|
|
|
# return most recent modified time for all files in the current working directory after stage.
|
|
|
|
def source_modified_time
|
|
|
|
Pathname.pwd.to_enum(:find).select(&:file?).map(&:mtime).max
|
|
|
|
end
|
|
|
|
|
2014-12-23 01:04:44 -05:00
|
|
|
# Remove {#cached_location} and any other files associated with the resource
|
|
|
|
# from the cache.
|
|
|
|
def clear_cache
|
2014-12-23 01:04:44 -05:00
|
|
|
rm_rf(cached_location)
|
2014-12-23 01:04:44 -05:00
|
|
|
end
|
|
|
|
|
2016-07-12 10:03:32 +03:00
|
|
|
def safe_system(*args)
|
2018-07-22 19:30:16 +02:00
|
|
|
if shutup
|
2018-07-19 13:36:27 +02:00
|
|
|
return if quiet_system(*args)
|
|
|
|
raise(ErrorDuringExecution.new(args, status: $CHILD_STATUS))
|
2016-07-12 10:03:32 +03:00
|
|
|
else
|
|
|
|
super(*args)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-07-01 23:35:29 +02:00
|
|
|
def basename_without_params
|
2018-07-08 20:13:41 +02:00
|
|
|
return unless @url
|
|
|
|
|
2018-07-01 23:35:29 +02:00
|
|
|
# Strip any ?thing=wad out of .c?thing=wad style extensions
|
|
|
|
File.basename(@url)[/[^?]+/]
|
2009-12-15 12:47:12 +00:00
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
|
2013-10-09 21:41:15 -05:00
|
|
|
class VCSDownloadStrategy < AbstractDownloadStrategy
|
2015-03-07 14:59:30 +00:00
|
|
|
REF_TYPES = [:tag, :branch, :revisions, :revision].freeze
|
2014-02-14 15:31:29 -05:00
|
|
|
|
2018-08-02 10:29:40 +02:00
|
|
|
def initialize(url, name, version, **meta)
|
2013-10-09 21:41:15 -05:00
|
|
|
super
|
2014-12-09 19:59:16 -05:00
|
|
|
@ref_type, @ref = extract_ref(meta)
|
2015-03-07 14:59:30 +00:00
|
|
|
@revision = meta[:revision]
|
2018-08-03 10:51:01 +02:00
|
|
|
@cached_location = HOMEBREW_CACHE/"#{name}--#{cache_tag}"
|
2013-10-09 21:41:15 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:16 -05:00
|
|
|
def fetch
|
|
|
|
ohai "Cloning #{@url}"
|
|
|
|
|
|
|
|
if cached_location.exist? && repo_valid?
|
|
|
|
puts "Updating #{cached_location}"
|
|
|
|
update
|
|
|
|
elsif cached_location.exist?
|
|
|
|
puts "Removing invalid repository from cache"
|
|
|
|
clear_cache
|
|
|
|
clone_repo
|
|
|
|
else
|
|
|
|
clone_repo
|
|
|
|
end
|
2015-03-07 14:59:30 +00:00
|
|
|
|
2016-07-13 10:11:59 +03:00
|
|
|
version.update_commit(last_commit) if head?
|
|
|
|
|
2016-09-23 22:02:23 +02:00
|
|
|
return unless @ref_type == :tag
|
|
|
|
return unless @revision && current_revision
|
|
|
|
return if current_revision == @revision
|
2017-10-15 02:28:32 +02:00
|
|
|
raise <<~EOS
|
2016-09-23 22:02:23 +02:00
|
|
|
#{@ref} tag should be #{@revision}
|
|
|
|
but is actually #{current_revision}
|
|
|
|
EOS
|
2014-12-06 12:29:16 -05:00
|
|
|
end
|
|
|
|
|
2016-07-22 12:21:22 +03:00
|
|
|
def fetch_last_commit
|
|
|
|
fetch
|
|
|
|
last_commit
|
|
|
|
end
|
|
|
|
|
|
|
|
def commit_outdated?(commit)
|
|
|
|
@last_commit ||= fetch_last_commit
|
|
|
|
commit != @last_commit
|
|
|
|
end
|
|
|
|
|
2018-08-01 05:18:00 +02:00
|
|
|
def head?
|
|
|
|
version.respond_to?(:head?) && version.head?
|
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
2016-05-24 23:00:29 +03:00
|
|
|
# Return last commit's unique identifier for the repository.
|
|
|
|
# Return most recent modified timestamp unless overridden.
|
|
|
|
def last_commit
|
|
|
|
source_modified_time.to_i.to_s
|
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
private
|
|
|
|
|
|
|
|
def cache_tag
|
2018-07-01 23:35:29 +02:00
|
|
|
raise NotImplementedError
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def repo_valid?
|
2018-07-01 23:35:29 +02:00
|
|
|
raise NotImplementedError
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2017-10-07 00:31:28 +02:00
|
|
|
def clone_repo; end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
2017-10-07 00:31:28 +02:00
|
|
|
def update; end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
2017-10-07 00:31:28 +02:00
|
|
|
def current_revision; end
|
2015-03-07 14:59:30 +00:00
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def extract_ref(specs)
|
|
|
|
key = REF_TYPES.find { |type| specs.key?(type) }
|
2015-08-03 13:09:07 +01:00
|
|
|
[key, specs[key]]
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2013-10-09 21:41:15 -05:00
|
|
|
end
|
|
|
|
|
2015-01-04 15:33:25 -05:00
|
|
|
class AbstractFileDownloadStrategy < AbstractDownloadStrategy
|
2018-08-03 15:11:04 +02:00
|
|
|
attr_reader :temporary_path
|
|
|
|
|
2018-08-03 10:51:01 +02:00
|
|
|
def initialize(url, name, version, **meta)
|
|
|
|
super
|
|
|
|
@cached_location = HOMEBREW_CACHE/"#{name}-#{version}#{ext}"
|
2018-08-03 15:11:04 +02:00
|
|
|
@temporary_path = Pathname.new("#{cached_location}.incomplete")
|
2018-08-03 10:51:01 +02:00
|
|
|
end
|
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def stage
|
2018-07-01 23:35:29 +02:00
|
|
|
super
|
|
|
|
chdir
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2009-12-07 10:12:38 -08:00
|
|
|
|
2013-04-07 00:49:56 -05:00
|
|
|
private
|
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def ext
|
2014-07-17 15:14:16 -05:00
|
|
|
# We need a Pathname because we've monkeypatched extname to support double
|
|
|
|
# extensions (e.g. tar.gz).
|
|
|
|
# We can't use basename_without_params, because given a URL like
|
2015-01-04 05:02:27 +01:00
|
|
|
# https://example.com/download.php?file=foo-1.0.tar.gz
|
2014-07-17 15:14:16 -05:00
|
|
|
# the extension we want is ".tar.gz", not ".php".
|
2018-02-12 14:22:10 -06:00
|
|
|
Pathname.new(@url).ascend do |path|
|
|
|
|
ext = path.extname[/[^?]+/]
|
|
|
|
return ext if ext
|
2018-01-09 19:56:54 -06:00
|
|
|
end
|
2018-02-12 14:22:10 -06:00
|
|
|
nil
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-01-04 15:33:25 -05:00
|
|
|
class CurlDownloadStrategy < AbstractFileDownloadStrategy
|
2018-08-03 15:11:04 +02:00
|
|
|
attr_reader :mirrors
|
2015-01-04 15:33:25 -05:00
|
|
|
|
2018-08-02 10:29:40 +02:00
|
|
|
def initialize(url, name, version, **meta)
|
2015-01-04 15:33:25 -05:00
|
|
|
super
|
2018-08-02 10:29:40 +02:00
|
|
|
@mirrors = meta.fetch(:mirrors, [])
|
2015-01-04 15:33:25 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def fetch
|
|
|
|
ohai "Downloading #{@url}"
|
2015-04-19 21:25:14 +01:00
|
|
|
|
2016-09-17 15:17:27 +01:00
|
|
|
if cached_location.exist?
|
|
|
|
puts "Already downloaded: #{cached_location}"
|
|
|
|
else
|
2015-01-04 15:33:25 -05:00
|
|
|
begin
|
|
|
|
_fetch
|
|
|
|
rescue ErrorDuringExecution
|
2017-09-10 07:23:18 +02:00
|
|
|
raise CurlDownloadStrategyError, @url
|
2015-01-04 15:33:25 -05:00
|
|
|
end
|
|
|
|
ignore_interrupts { temporary_path.rename(cached_location) }
|
|
|
|
end
|
|
|
|
rescue CurlDownloadStrategyError
|
|
|
|
raise if mirrors.empty?
|
|
|
|
puts "Trying a mirror..."
|
|
|
|
@url = mirrors.shift
|
|
|
|
retry
|
|
|
|
end
|
|
|
|
|
|
|
|
def clear_cache
|
|
|
|
super
|
|
|
|
rm_rf(temporary_path)
|
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
# Private method, can be overridden if needed.
|
|
|
|
def _fetch
|
2016-07-13 20:56:12 +08:00
|
|
|
url = @url
|
|
|
|
|
2016-07-13 11:43:43 +01:00
|
|
|
if ENV["HOMEBREW_ARTIFACT_DOMAIN"]
|
2016-07-14 13:14:49 +08:00
|
|
|
url = url.sub(%r{^((ht|f)tps?://)?}, ENV["HOMEBREW_ARTIFACT_DOMAIN"].chomp("/") + "/")
|
2016-07-13 20:56:12 +08:00
|
|
|
ohai "Downloading from #{url}"
|
2016-07-13 11:43:43 +01:00
|
|
|
end
|
|
|
|
|
2018-08-01 05:39:28 +02:00
|
|
|
temporary_path.dirname.mkpath
|
|
|
|
|
2017-08-08 18:10:13 +02:00
|
|
|
curl_download resolved_url(url), to: temporary_path
|
2015-01-04 15:33:25 -05:00
|
|
|
end
|
|
|
|
|
2015-07-08 23:53:20 -05:00
|
|
|
# Curl options to be always passed to curl,
|
2017-08-08 18:10:13 +02:00
|
|
|
# with raw head calls (`curl --head`) or with actual `fetch`.
|
2018-08-02 11:16:36 +02:00
|
|
|
def _curl_args
|
|
|
|
args = []
|
|
|
|
|
|
|
|
if meta.key?(:cookies)
|
2018-08-02 15:41:44 +02:00
|
|
|
escape_cookie = ->(cookie) { URI.encode_www_form([cookie]) }
|
2018-08-02 11:16:36 +02:00
|
|
|
args += ["-b", meta.fetch(:cookies).map(&escape_cookie).join(";")]
|
|
|
|
end
|
|
|
|
|
|
|
|
args += ["-e", meta.fetch(:referer)] if meta.key?(:referer)
|
|
|
|
|
|
|
|
args += ["--user", meta.fetch(:user)] if meta.key?(:user)
|
|
|
|
|
|
|
|
args
|
|
|
|
end
|
|
|
|
|
2015-07-08 23:53:20 -05:00
|
|
|
def _curl_opts
|
2018-08-02 11:16:36 +02:00
|
|
|
return { user_agent: meta.fetch(:user_agent) } if meta.key?(:user_agent)
|
|
|
|
{}
|
2015-07-08 23:53:20 -05:00
|
|
|
end
|
|
|
|
|
2017-08-08 18:10:13 +02:00
|
|
|
def resolved_url(url)
|
|
|
|
redirect_url, _, status = curl_output(
|
2018-08-02 11:16:36 +02:00
|
|
|
"--silent", "--head",
|
2017-08-08 18:10:13 +02:00
|
|
|
"--write-out", "%{redirect_url}",
|
|
|
|
"--output", "/dev/null",
|
|
|
|
url.to_s
|
|
|
|
)
|
|
|
|
|
|
|
|
return url unless status.success?
|
|
|
|
return url if redirect_url.empty?
|
|
|
|
|
|
|
|
ohai "Downloading from #{redirect_url}"
|
|
|
|
if ENV["HOMEBREW_NO_INSECURE_REDIRECT"] &&
|
|
|
|
url.start_with?("https://") && !redirect_url.start_with?("https://")
|
|
|
|
puts "HTTPS to HTTP redirect detected & HOMEBREW_NO_INSECURE_REDIRECT is set."
|
|
|
|
raise CurlDownloadStrategyError, url
|
2015-04-19 21:25:14 +01:00
|
|
|
end
|
|
|
|
|
2017-08-08 18:10:13 +02:00
|
|
|
redirect_url
|
2015-01-04 15:33:25 -05:00
|
|
|
end
|
|
|
|
|
2018-08-02 11:16:36 +02:00
|
|
|
def curl_output(*args, **options)
|
|
|
|
super(*_curl_args, *args, **_curl_opts, **options)
|
|
|
|
end
|
|
|
|
|
2017-08-08 18:10:13 +02:00
|
|
|
def curl(*args, **options)
|
2015-08-03 13:09:07 +01:00
|
|
|
args << "--connect-timeout" << "5" unless mirrors.empty?
|
2018-08-02 11:16:36 +02:00
|
|
|
super(*_curl_args, *args, **_curl_opts, **options)
|
2015-01-04 15:33:25 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2011-07-20 15:37:33 +02:00
|
|
|
# Detect and download from Apache Mirror
|
|
|
|
class CurlApacheMirrorDownloadStrategy < CurlDownloadStrategy
|
2013-10-30 00:11:46 -05:00
|
|
|
def apache_mirrors
|
2018-08-02 11:16:36 +02:00
|
|
|
mirrors, = curl_output("--silent", "--location", "#{@url}&asjson=1")
|
2017-08-08 18:10:13 +02:00
|
|
|
JSON.parse(mirrors)
|
2013-10-30 00:11:46 -05:00
|
|
|
end
|
|
|
|
|
2011-07-20 15:37:33 +02:00
|
|
|
def _fetch
|
2013-11-26 22:56:03 -06:00
|
|
|
return super if @tried_apache_mirror
|
|
|
|
@tried_apache_mirror = true
|
|
|
|
|
2017-08-08 18:10:13 +02:00
|
|
|
mirrors = apache_mirrors
|
2015-04-09 20:52:49 +02:00
|
|
|
path_info = mirrors.fetch("path_info")
|
2015-08-03 13:09:07 +01:00
|
|
|
@url = mirrors.fetch("preferred") + path_info
|
2015-04-09 20:52:49 +02:00
|
|
|
@mirrors |= %W[https://archive.apache.org/dist/#{path_info}]
|
2011-07-20 15:37:33 +02:00
|
|
|
|
2013-11-26 22:56:03 -06:00
|
|
|
ohai "Best Mirror #{@url}"
|
|
|
|
super
|
2016-11-20 13:00:01 -05:00
|
|
|
rescue IndexError, JSON::ParserError
|
2013-11-26 22:56:03 -06:00
|
|
|
raise CurlDownloadStrategyError, "Couldn't determine mirror, try again later."
|
2011-07-20 15:37:33 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2010-06-25 19:13:20 -07:00
|
|
|
# Download via an HTTP POST.
|
2010-06-30 10:21:56 -07:00
|
|
|
# Query parameters on the URL are converted into POST parameters
|
2011-03-28 19:53:43 -07:00
|
|
|
class CurlPostDownloadStrategy < CurlDownloadStrategy
|
2010-06-25 19:13:20 -07:00
|
|
|
def _fetch
|
2018-08-02 11:16:36 +02:00
|
|
|
base_url, data = if meta.key?(:data)
|
2018-08-02 15:41:44 +02:00
|
|
|
escape_data = ->(d) { ["-d", URI.encode_www_form([d])] }
|
2018-08-02 11:16:36 +02:00
|
|
|
[@url, meta[:data].flat_map(&escape_data)]
|
|
|
|
else
|
|
|
|
@url.split("?", 2)
|
|
|
|
end
|
|
|
|
|
2017-08-08 18:10:13 +02:00
|
|
|
curl_download base_url, "--data", data, to: temporary_path
|
2010-06-25 19:13:20 -07:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2009-12-01 12:01:05 -08:00
|
|
|
# Use this strategy to download but not unzip a file.
|
|
|
|
# Useful for installing jars.
|
2011-03-28 19:53:43 -07:00
|
|
|
class NoUnzipCurlDownloadStrategy < CurlDownloadStrategy
|
2009-12-01 12:01:05 -08:00
|
|
|
def stage
|
2018-07-23 20:59:21 +02:00
|
|
|
UnpackStrategy::Uncompressed.new(cached_location)
|
2018-07-24 18:43:20 +02:00
|
|
|
.extract(basename: basename_without_params,
|
|
|
|
verbose: ARGV.verbose? && !shutup)
|
2009-12-01 12:01:05 -08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-06-08 16:41:23 +01:00
|
|
|
# This strategy extracts local binary packages.
|
2015-01-04 15:33:25 -05:00
|
|
|
class LocalBottleDownloadStrategy < AbstractFileDownloadStrategy
|
2015-06-15 21:32:15 -04:00
|
|
|
def initialize(path)
|
|
|
|
@cached_location = path
|
2013-06-08 16:41:23 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-09-24 01:34:28 -07:00
|
|
|
# S3DownloadStrategy downloads tarballs from AWS S3.
|
|
|
|
# To use it, add ":using => S3DownloadStrategy" to the URL section of your
|
|
|
|
# formula. This download strategy uses AWS access tokens (in the
|
|
|
|
# environment variables AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY)
|
|
|
|
# to sign the request. This strategy is good in a corporate setting,
|
|
|
|
# because it lets you use a private S3 bucket as a repo for internal
|
|
|
|
# distribution. (It will work for public buckets as well.)
|
|
|
|
class S3DownloadStrategy < CurlDownloadStrategy
|
|
|
|
def _fetch
|
2018-03-20 16:46:00 -04:00
|
|
|
if @url !~ %r{^https?://([^.].*)\.s3\.amazonaws\.com/(.+)$} &&
|
|
|
|
@url !~ %r{^s3://([^.].*?)/(.+)$}
|
2013-09-24 01:34:28 -07:00
|
|
|
raise "Bad S3 URL: " + @url
|
|
|
|
end
|
2017-06-10 20:23:20 +03:00
|
|
|
bucket = Regexp.last_match(1)
|
|
|
|
key = Regexp.last_match(2)
|
2013-09-24 01:34:28 -07:00
|
|
|
|
2017-05-13 11:42:01 +01:00
|
|
|
ENV["AWS_ACCESS_KEY_ID"] = ENV["HOMEBREW_AWS_ACCESS_KEY_ID"]
|
|
|
|
ENV["AWS_SECRET_ACCESS_KEY"] = ENV["HOMEBREW_AWS_SECRET_ACCESS_KEY"]
|
|
|
|
|
2013-09-24 01:34:28 -07:00
|
|
|
begin
|
2018-03-20 16:46:00 -04:00
|
|
|
signer = Aws::S3::Presigner.new
|
|
|
|
s3url = signer.presigned_url :get_object, bucket: bucket, key: key
|
|
|
|
rescue Aws::Sigv4::Errors::MissingCredentialsError
|
2013-09-24 01:34:28 -07:00
|
|
|
ohai "AWS credentials missing, trying public URL instead."
|
2018-03-20 16:46:00 -04:00
|
|
|
s3url = @url
|
2013-09-24 01:34:28 -07:00
|
|
|
end
|
|
|
|
|
2017-08-08 18:10:13 +02:00
|
|
|
curl_download s3url, to: temporary_path
|
2013-09-24 01:34:28 -07:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-01-08 16:44:54 +09:00
|
|
|
# GitHubPrivateRepositoryDownloadStrategy downloads contents from GitHub
|
|
|
|
# Private Repository. To use it, add
|
|
|
|
# ":using => GitHubPrivateRepositoryDownloadStrategy" to the URL section of
|
|
|
|
# your formula. This download strategy uses GitHub access tokens (in the
|
|
|
|
# environment variables HOMEBREW_GITHUB_API_TOKEN) to sign the request. This
|
|
|
|
# strategy is suitable for corporate use just like S3DownloadStrategy, because
|
2017-03-06 11:05:49 -05:00
|
|
|
# it lets you use a private GitHub repository for internal distribution. It
|
2017-01-08 16:44:54 +09:00
|
|
|
# works with public one, but in that case simply use CurlDownloadStrategy.
|
|
|
|
class GitHubPrivateRepositoryDownloadStrategy < CurlDownloadStrategy
|
2017-01-08 18:00:31 +09:00
|
|
|
require "utils/formatter"
|
2017-01-08 18:29:20 +09:00
|
|
|
require "utils/github"
|
2017-01-08 18:00:31 +09:00
|
|
|
|
2018-08-02 10:29:40 +02:00
|
|
|
def initialize(url, name, version, **meta)
|
2017-01-02 12:56:20 +09:00
|
|
|
super
|
2017-01-08 16:44:54 +09:00
|
|
|
parse_url_pattern
|
2017-01-08 18:00:31 +09:00
|
|
|
set_github_token
|
2017-01-08 16:44:54 +09:00
|
|
|
end
|
|
|
|
|
|
|
|
def parse_url_pattern
|
2017-01-08 18:29:20 +09:00
|
|
|
url_pattern = %r{https://github.com/([^/]+)/([^/]+)/(\S+)}
|
2017-01-08 16:44:54 +09:00
|
|
|
unless @url =~ url_pattern
|
|
|
|
raise CurlDownloadStrategyError, "Invalid url pattern for GitHub Repository."
|
|
|
|
end
|
|
|
|
|
2017-01-08 18:29:20 +09:00
|
|
|
_, @owner, @repo, @filepath = *@url.match(url_pattern)
|
2017-01-08 16:44:54 +09:00
|
|
|
end
|
|
|
|
|
|
|
|
def download_url
|
|
|
|
"https://#{@github_token}@github.com/#{@owner}/#{@repo}/#{@filepath}"
|
|
|
|
end
|
|
|
|
|
|
|
|
def _fetch
|
2017-08-08 18:10:13 +02:00
|
|
|
curl_download download_url, to: temporary_path
|
2017-01-08 16:44:54 +09:00
|
|
|
end
|
|
|
|
|
|
|
|
private
|
2017-01-02 12:56:20 +09:00
|
|
|
|
2017-01-08 16:44:54 +09:00
|
|
|
def set_github_token
|
2017-01-03 14:58:08 +09:00
|
|
|
@github_token = ENV["HOMEBREW_GITHUB_API_TOKEN"]
|
2017-01-08 16:44:54 +09:00
|
|
|
unless @github_token
|
|
|
|
raise CurlDownloadStrategyError, "Environmental variable HOMEBREW_GITHUB_API_TOKEN is required."
|
|
|
|
end
|
2017-01-08 18:00:31 +09:00
|
|
|
validate_github_repository_access!
|
|
|
|
end
|
|
|
|
|
|
|
|
def validate_github_repository_access!
|
2017-01-08 18:29:20 +09:00
|
|
|
# Test access to the repository
|
|
|
|
GitHub.repository(@owner, @repo)
|
|
|
|
rescue GitHub::HTTPNotFoundError
|
|
|
|
# We only handle HTTPNotFoundError here,
|
|
|
|
# becase AuthenticationFailedError is handled within util/github.
|
2017-10-15 02:28:32 +02:00
|
|
|
message = <<~EOS
|
|
|
|
HOMEBREW_GITHUB_API_TOKEN can not access the repository: #{@owner}/#{@repo}
|
|
|
|
This token may not have permission to access the repository or the url of formula may be incorrect.
|
2017-01-08 18:29:20 +09:00
|
|
|
EOS
|
|
|
|
raise CurlDownloadStrategyError, message
|
2017-01-08 16:44:54 +09:00
|
|
|
end
|
|
|
|
end
|
2017-01-02 12:56:20 +09:00
|
|
|
|
2017-01-08 16:44:54 +09:00
|
|
|
# GitHubPrivateRepositoryReleaseDownloadStrategy downloads tarballs from GitHub
|
|
|
|
# Release assets. To use it, add
|
|
|
|
# ":using => GitHubPrivateRepositoryReleaseDownloadStrategy" to the URL section
|
|
|
|
# of your formula. This download strategy uses GitHub access tokens (in the
|
|
|
|
# environment variables HOMEBREW_GITHUB_API_TOKEN) to sign the request.
|
|
|
|
class GitHubPrivateRepositoryReleaseDownloadStrategy < GitHubPrivateRepositoryDownloadStrategy
|
|
|
|
def parse_url_pattern
|
2017-01-08 18:29:20 +09:00
|
|
|
url_pattern = %r{https://github.com/([^/]+)/([^/]+)/releases/download/([^/]+)/(\S+)}
|
2017-01-08 16:44:54 +09:00
|
|
|
unless @url =~ url_pattern
|
|
|
|
raise CurlDownloadStrategyError, "Invalid url pattern for GitHub Release."
|
|
|
|
end
|
2017-01-02 12:56:20 +09:00
|
|
|
|
2017-01-08 18:29:20 +09:00
|
|
|
_, @owner, @repo, @tag, @filename = *@url.match(url_pattern)
|
2017-01-02 12:56:20 +09:00
|
|
|
end
|
|
|
|
|
2017-01-08 16:44:54 +09:00
|
|
|
def download_url
|
|
|
|
"https://#{@github_token}@api.github.com/repos/#{@owner}/#{@repo}/releases/assets/#{asset_id}"
|
|
|
|
end
|
|
|
|
|
2017-01-02 12:56:20 +09:00
|
|
|
def _fetch
|
|
|
|
# HTTP request header `Accept: application/octet-stream` is required.
|
|
|
|
# Without this, the GitHub API will respond with metadata, not binary.
|
2017-08-08 18:10:13 +02:00
|
|
|
curl_download download_url, "--header", "Accept: application/octet-stream", to: temporary_path
|
2017-01-02 12:56:20 +09:00
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def asset_id
|
|
|
|
@asset_id ||= resolve_asset_id
|
|
|
|
end
|
|
|
|
|
|
|
|
def resolve_asset_id
|
|
|
|
release_metadata = fetch_release_metadata
|
2017-01-08 18:29:20 +09:00
|
|
|
assets = release_metadata["assets"].select { |a| a["name"] == @filename }
|
2017-01-03 14:36:08 +09:00
|
|
|
raise CurlDownloadStrategyError, "Asset file not found." if assets.empty?
|
2017-01-02 12:56:20 +09:00
|
|
|
|
2017-01-08 18:29:20 +09:00
|
|
|
assets.first["id"]
|
2017-01-02 12:56:20 +09:00
|
|
|
end
|
|
|
|
|
|
|
|
def fetch_release_metadata
|
2017-01-08 16:44:54 +09:00
|
|
|
release_url = "https://api.github.com/repos/#{@owner}/#{@repo}/releases/tags/#{@tag}"
|
2018-03-07 16:14:55 +00:00
|
|
|
GitHub.open_api(release_url)
|
2017-01-02 12:56:20 +09:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-14 11:38:39 -07:00
|
|
|
# ScpDownloadStrategy downloads files using ssh via scp. To use it, add
|
|
|
|
# ":using => ScpDownloadStrategy" to the URL section of your formula or
|
|
|
|
# provide a URL starting with scp://. This strategy uses ssh credentials for
|
|
|
|
# authentication. If a public/private keypair is configured, it will not
|
|
|
|
# prompt for a password.
|
|
|
|
#
|
|
|
|
# Usage:
|
|
|
|
#
|
|
|
|
# class Abc < Formula
|
|
|
|
# url "scp://example.com/src/abc.1.0.tar.gz"
|
|
|
|
# ...
|
|
|
|
class ScpDownloadStrategy < AbstractFileDownloadStrategy
|
2018-08-02 10:29:40 +02:00
|
|
|
def initialize(url, name, version, **meta)
|
2018-03-14 11:38:39 -07:00
|
|
|
super
|
|
|
|
parse_url_pattern
|
|
|
|
end
|
|
|
|
|
|
|
|
def parse_url_pattern
|
|
|
|
url_pattern = %r{scp://([^@]+@)?([^@:/]+)(:\d+)?/(\S+)}
|
|
|
|
if @url !~ url_pattern
|
|
|
|
raise ScpDownloadStrategyError, "Invalid URL for scp: #{@url}"
|
|
|
|
end
|
|
|
|
|
|
|
|
_, @user, @host, @port, @path = *@url.match(url_pattern)
|
|
|
|
end
|
|
|
|
|
|
|
|
def fetch
|
|
|
|
ohai "Downloading #{@url}"
|
|
|
|
|
|
|
|
if cached_location.exist?
|
|
|
|
puts "Already downloaded: #{cached_location}"
|
|
|
|
else
|
|
|
|
begin
|
|
|
|
safe_system "scp", scp_source, temporary_path.to_s
|
|
|
|
rescue ErrorDuringExecution
|
|
|
|
raise ScpDownloadStrategyError, "Failed to run scp #{scp_source}"
|
|
|
|
end
|
|
|
|
|
|
|
|
ignore_interrupts { temporary_path.rename(cached_location) }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def clear_cache
|
|
|
|
super
|
|
|
|
rm_rf(temporary_path)
|
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def scp_source
|
|
|
|
path_prefix = "/" unless @path.start_with?("~")
|
|
|
|
port_arg = "-P #{@port[1..-1]} " if @port
|
|
|
|
"#{port_arg}#{@user}#{@host}:#{path_prefix}#{@path}"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-10-09 21:41:15 -05:00
|
|
|
class SubversionDownloadStrategy < VCSDownloadStrategy
|
2018-08-02 10:29:40 +02:00
|
|
|
def initialize(url, name, version, **meta)
|
2014-12-06 12:29:16 -05:00
|
|
|
super
|
2015-04-27 20:39:20 -04:00
|
|
|
@url = @url.sub("svn+http://", "")
|
2014-12-06 12:29:16 -05:00
|
|
|
end
|
|
|
|
|
2009-08-21 20:30:13 +01:00
|
|
|
def fetch
|
2015-08-03 13:09:07 +01:00
|
|
|
clear_cache unless @url.chomp("/") == repo_url || quiet_system("svn", "switch", @url, cached_location)
|
2014-12-06 12:29:16 -05:00
|
|
|
super
|
|
|
|
end
|
2013-09-28 18:00:09 -05:00
|
|
|
|
2016-04-29 19:06:37 +03:00
|
|
|
def source_modified_time
|
2018-07-29 20:54:13 -03:00
|
|
|
info = system_command("svn", args: ["info", "--xml"], chdir: cached_location.to_s).stdout
|
2018-07-29 11:36:43 -03:00
|
|
|
xml = REXML::Document.new(info)
|
2016-05-01 14:46:24 +03:00
|
|
|
Time.parse REXML::XPath.first(xml, "//date/text()").to_s
|
2016-04-29 19:06:37 +03:00
|
|
|
end
|
|
|
|
|
2016-05-24 23:00:29 +03:00
|
|
|
def last_commit
|
2018-07-29 20:54:13 -03:00
|
|
|
system_command("svn", args: ["info", "--show-item", "revision"], chdir: cached_location.to_s).stdout.strip
|
2016-05-24 23:00:29 +03:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:16 -05:00
|
|
|
private
|
|
|
|
|
2018-07-25 16:59:57 -03:00
|
|
|
def repo_url
|
2018-07-29 20:54:13 -03:00
|
|
|
system_command("svn", args: ["info"], chdir: cached_location.to_s).stdout.strip[/^URL: (.+)$/, 1]
|
2010-03-01 11:35:27 -08:00
|
|
|
end
|
|
|
|
|
2016-09-24 17:59:14 +02:00
|
|
|
def externals
|
2014-12-16 15:27:36 -05:00
|
|
|
Utils.popen_read("svn", "propget", "svn:externals", @url).chomp.each_line do |line|
|
2010-10-25 21:12:41 -07:00
|
|
|
name, url = line.split(/\s+/)
|
2010-03-01 11:35:27 -08:00
|
|
|
yield name, url
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-08-03 13:09:07 +01:00
|
|
|
def fetch_repo(target, url, revision = nil, ignore_externals = false)
|
2018-07-25 19:32:36 -03:00
|
|
|
# Use "svn update" when the repository already exists locally.
|
2010-03-01 11:35:27 -08:00
|
|
|
# This saves on bandwidth and will have a similar effect to verifying the
|
|
|
|
# cache as it will make any changes to get the right revision.
|
2018-07-30 11:12:48 -03:00
|
|
|
args = []
|
2018-08-02 11:16:36 +02:00
|
|
|
|
2015-10-16 03:59:19 -04:00
|
|
|
if revision
|
2015-10-17 04:00:41 +08:00
|
|
|
ohai "Checking out #{@ref}"
|
2015-10-16 03:59:19 -04:00
|
|
|
args << "-r" << revision
|
|
|
|
end
|
2018-08-02 11:16:36 +02:00
|
|
|
|
2015-08-03 13:09:07 +01:00
|
|
|
args << "--ignore-externals" if ignore_externals
|
2018-08-02 11:16:36 +02:00
|
|
|
|
|
|
|
if meta[:trust_cert] == true
|
|
|
|
args << "--trust-server-cert"
|
|
|
|
args << "--non-interactive"
|
|
|
|
end
|
|
|
|
|
2018-07-30 11:12:48 -03:00
|
|
|
if target.directory?
|
|
|
|
system_command("svn", args: ["update", *args], chdir: target.to_s)
|
2018-07-30 10:04:54 -03:00
|
|
|
else
|
2018-07-30 11:12:48 -03:00
|
|
|
system_command("svn", args: ["checkout", url, target, *args])
|
2018-07-30 10:04:54 -03:00
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
|
|
|
def cache_tag
|
|
|
|
head? ? "svn-HEAD" : "svn"
|
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
|
|
|
def repo_valid?
|
2017-06-01 16:06:51 +02:00
|
|
|
(cached_location/".svn").directory?
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2014-12-06 12:29:16 -05:00
|
|
|
|
|
|
|
def clone_repo
|
|
|
|
case @ref_type
|
|
|
|
when :revision
|
2014-12-09 14:22:35 -05:00
|
|
|
fetch_repo cached_location, @url, @ref
|
2014-12-06 12:29:16 -05:00
|
|
|
when :revisions
|
|
|
|
# nil is OK for main_revision, as fetch_repo will then get latest
|
|
|
|
main_revision = @ref[:trunk]
|
2014-12-09 14:22:35 -05:00
|
|
|
fetch_repo cached_location, @url, main_revision, true
|
2014-12-06 12:29:16 -05:00
|
|
|
|
2016-09-24 17:59:14 +02:00
|
|
|
externals do |external_name, external_url|
|
2017-06-01 16:06:51 +02:00
|
|
|
fetch_repo cached_location/external_name, external_url, @ref[external_name], true
|
2014-12-06 12:29:16 -05:00
|
|
|
end
|
|
|
|
else
|
2014-12-09 14:22:35 -05:00
|
|
|
fetch_repo cached_location, @url
|
2014-12-06 12:29:16 -05:00
|
|
|
end
|
|
|
|
end
|
2016-09-23 18:13:48 +02:00
|
|
|
alias update clone_repo
|
2010-06-28 14:55:31 -07:00
|
|
|
end
|
|
|
|
|
2013-10-09 21:41:15 -05:00
|
|
|
class GitDownloadStrategy < VCSDownloadStrategy
|
2014-02-14 15:31:29 -05:00
|
|
|
SHALLOW_CLONE_WHITELIST = [
|
|
|
|
%r{git://},
|
|
|
|
%r{https://github\.com},
|
|
|
|
%r{http://git\.sv\.gnu\.org},
|
2016-09-17 15:17:27 +01:00
|
|
|
%r{http://llvm\.org},
|
|
|
|
].freeze
|
2014-02-14 15:31:29 -05:00
|
|
|
|
2018-08-02 10:29:40 +02:00
|
|
|
def initialize(url, name, version, **meta)
|
2014-02-14 15:31:29 -05:00
|
|
|
super
|
2014-12-03 17:25:51 -05:00
|
|
|
@ref_type ||= :branch
|
|
|
|
@ref ||= "master"
|
2014-12-09 19:59:16 -05:00
|
|
|
@shallow = meta.fetch(:shallow) { true }
|
2014-02-14 15:31:29 -05:00
|
|
|
end
|
|
|
|
|
2016-01-14 18:57:31 +08:00
|
|
|
def source_modified_time
|
|
|
|
Time.parse Utils.popen_read("git", "--git-dir", git_dir, "show", "-s", "--format=%cD")
|
|
|
|
end
|
|
|
|
|
2016-05-24 23:00:29 +03:00
|
|
|
def last_commit
|
2016-07-07 02:14:31 +02:00
|
|
|
Utils.popen_read("git", "--git-dir", git_dir, "rev-parse", "--short=7", "HEAD").chomp
|
2016-05-24 23:00:29 +03:00
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
private
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def cache_tag
|
|
|
|
"git"
|
|
|
|
end
|
|
|
|
|
2014-12-18 12:57:37 -05:00
|
|
|
def cache_version
|
|
|
|
0
|
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def update
|
2018-08-03 11:13:12 +02:00
|
|
|
config_repo
|
|
|
|
update_repo
|
|
|
|
checkout
|
|
|
|
reset
|
|
|
|
update_submodules if submodules?
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2014-02-14 15:31:29 -05:00
|
|
|
def shallow_clone?
|
|
|
|
@shallow && support_depth?
|
|
|
|
end
|
|
|
|
|
2016-09-21 09:48:24 +02:00
|
|
|
def shallow_dir?
|
2017-06-01 16:06:51 +02:00
|
|
|
(git_dir/"shallow").exist?
|
2015-06-30 20:29:48 -07:00
|
|
|
end
|
|
|
|
|
2014-02-14 15:31:29 -05:00
|
|
|
def support_depth?
|
2016-09-20 22:03:08 +02:00
|
|
|
@ref_type != :revision && SHALLOW_CLONE_WHITELIST.any? { |regex| @url =~ regex }
|
2014-02-14 15:31:29 -05:00
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def git_dir
|
2017-06-01 16:06:51 +02:00
|
|
|
cached_location/".git"
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2016-09-21 09:48:24 +02:00
|
|
|
def ref?
|
2018-08-03 11:13:12 +02:00
|
|
|
system_command("git",
|
|
|
|
args: ["--git-dir", git_dir, "rev-parse", "-q", "--verify", "#{@ref}^{commit}"],
|
|
|
|
print_stderr: false)
|
|
|
|
.success?
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2015-03-07 14:59:30 +00:00
|
|
|
def current_revision
|
2018-08-03 11:13:12 +02:00
|
|
|
system_command("git", args: ["--git-dir", git_dir, "rev-parse", "-q", "--verify", "HEAD"])
|
|
|
|
.stdout.strip
|
2015-03-07 14:59:30 +00:00
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def repo_valid?
|
2018-08-03 11:13:12 +02:00
|
|
|
system_command("git", args: ["--git-dir", git_dir, "status", "-s"], print_stderr: false)
|
|
|
|
.success?
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def submodules?
|
2017-06-01 16:06:51 +02:00
|
|
|
(cached_location/".gitmodules").exist?
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def clone_args
|
2015-08-03 13:09:07 +01:00
|
|
|
args = %w[clone]
|
|
|
|
args << "--depth" << "1" if shallow_clone?
|
2013-02-14 17:29:58 -06:00
|
|
|
|
2013-10-09 21:41:14 -05:00
|
|
|
case @ref_type
|
2016-09-21 08:32:57 +02:00
|
|
|
when :branch, :tag
|
|
|
|
args << "--branch" << @ref
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2014-12-09 14:22:35 -05:00
|
|
|
args << @url << cached_location
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
|
|
|
def refspec
|
2013-10-09 21:41:14 -05:00
|
|
|
case @ref_type
|
2015-08-03 13:09:07 +01:00
|
|
|
when :branch then "+refs/heads/#{@ref}:refs/remotes/origin/#{@ref}"
|
|
|
|
when :tag then "+refs/tags/#{@ref}:refs/tags/#{@ref}"
|
2013-02-14 17:29:58 -06:00
|
|
|
else "+refs/heads/master:refs/remotes/origin/master"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def config_repo
|
2018-08-03 11:13:12 +02:00
|
|
|
system_command! "git",
|
|
|
|
args: ["config", "remote.origin.url", @url],
|
|
|
|
chdir: cached_location
|
|
|
|
system_command! "git",
|
|
|
|
args: ["config", "remote.origin.fetch", refspec],
|
|
|
|
chdir: cached_location
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def update_repo
|
2016-09-23 22:02:23 +02:00
|
|
|
return unless @ref_type == :branch || !ref?
|
|
|
|
|
|
|
|
if !shallow_clone? && shallow_dir?
|
2018-08-03 11:13:12 +02:00
|
|
|
system_command! "git",
|
|
|
|
args: ["fetch", "origin", "--unshallow"],
|
|
|
|
chdir: cached_location
|
2016-09-23 22:02:23 +02:00
|
|
|
else
|
2018-08-03 11:13:12 +02:00
|
|
|
system_command! "git",
|
|
|
|
args: ["fetch", "origin"],
|
|
|
|
chdir: cached_location
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
|
|
|
def clone_repo
|
2018-08-03 11:13:12 +02:00
|
|
|
system_command! "git", args: clone_args
|
|
|
|
|
|
|
|
system_command! "git",
|
|
|
|
args: ["config", "homebrew.cacheversion", cache_version],
|
|
|
|
chdir: cached_location
|
|
|
|
checkout
|
|
|
|
update_submodules if submodules?
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2013-02-17 15:43:16 -06:00
|
|
|
def checkout
|
2015-10-16 03:59:19 -04:00
|
|
|
ohai "Checking out #{@ref_type} #{@ref}" if @ref_type && @ref
|
2018-08-03 11:13:12 +02:00
|
|
|
system_command! "git", args: ["checkout", "-f", @ref, "--"], chdir: cached_location
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
2013-02-14 17:29:58 -06:00
|
|
|
|
2018-08-03 11:13:12 +02:00
|
|
|
def reset
|
2013-10-09 21:41:14 -05:00
|
|
|
ref = case @ref_type
|
2016-09-21 08:32:57 +02:00
|
|
|
when :branch
|
|
|
|
"origin/#{@ref}"
|
|
|
|
when :revision, :tag
|
|
|
|
@ref
|
|
|
|
end
|
2013-02-17 15:53:34 -06:00
|
|
|
|
2018-08-03 11:13:12 +02:00
|
|
|
system_command! "git",
|
|
|
|
args: ["reset", "--hard", *ref],
|
|
|
|
chdir: cached_location
|
2013-02-17 15:53:34 -06:00
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def update_submodules
|
2018-08-03 11:13:12 +02:00
|
|
|
system_command! "git",
|
|
|
|
args: ["submodule", "foreach", "--recursive", "git submodule sync"],
|
|
|
|
chdir: cached_location
|
|
|
|
system_command! "git",
|
|
|
|
args: ["submodule", "update", "--init", "--recursive"],
|
|
|
|
chdir: cached_location
|
2016-05-31 17:11:57 +02:00
|
|
|
fix_absolute_submodule_gitdir_references!
|
|
|
|
end
|
|
|
|
|
2018-08-03 11:13:12 +02:00
|
|
|
# When checking out Git repositories with recursive submodules, some Git
|
|
|
|
# versions create `.git` files with absolute instead of relative `gitdir:`
|
|
|
|
# pointers. This works for the cached location, but breaks various Git
|
|
|
|
# operations once the affected Git resource is staged, i.e. recursively
|
|
|
|
# copied to a new location. (This bug was introduced in Git 2.7.0 and fixed
|
|
|
|
# in 2.8.3. Clones created with affected version remain broken.)
|
|
|
|
# See https://github.com/Homebrew/homebrew-core/pull/1520 for an example.
|
2016-05-31 17:11:57 +02:00
|
|
|
def fix_absolute_submodule_gitdir_references!
|
2018-08-03 11:13:12 +02:00
|
|
|
submodule_dirs = system_command!("git",
|
|
|
|
args: ["submodule", "--quiet", "foreach", "--recursive", "pwd"],
|
|
|
|
chdir: cached_location)
|
|
|
|
.stdout
|
|
|
|
|
2016-05-31 17:11:57 +02:00
|
|
|
submodule_dirs.lines.map(&:chomp).each do |submodule_dir|
|
|
|
|
work_dir = Pathname.new(submodule_dir)
|
|
|
|
|
|
|
|
# Only check and fix if `.git` is a regular file, not a directory.
|
|
|
|
dot_git = work_dir/".git"
|
|
|
|
next unless dot_git.file?
|
|
|
|
|
|
|
|
git_dir = dot_git.read.chomp[/^gitdir: (.*)$/, 1]
|
|
|
|
if git_dir.nil?
|
|
|
|
onoe "Failed to parse '#{dot_git}'." if ARGV.homebrew_developer?
|
|
|
|
next
|
|
|
|
end
|
|
|
|
|
|
|
|
# Only attempt to fix absolute paths.
|
|
|
|
next unless git_dir.start_with?("/")
|
|
|
|
|
|
|
|
# Make the `gitdir:` reference relative to the working directory.
|
|
|
|
relative_git_dir = Pathname.new(git_dir).relative_path_from(work_dir)
|
|
|
|
dot_git.atomic_write("gitdir: #{relative_git_dir}\n")
|
|
|
|
end
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2009-09-27 19:30:39 -04:00
|
|
|
|
2016-07-22 12:21:22 +03:00
|
|
|
class GitHubGitDownloadStrategy < GitDownloadStrategy
|
2018-08-02 10:29:40 +02:00
|
|
|
def initialize(url, name, version, **meta)
|
2016-07-22 12:21:22 +03:00
|
|
|
super
|
2016-09-23 22:02:23 +02:00
|
|
|
|
|
|
|
return unless %r{^https?://github\.com/(?<user>[^/]+)/(?<repo>[^/]+)\.git$} =~ @url
|
|
|
|
@user = user
|
|
|
|
@repo = repo
|
2016-07-22 12:21:22 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
def github_last_commit
|
|
|
|
return if ENV["HOMEBREW_NO_GITHUB_API"]
|
|
|
|
|
2017-08-08 18:10:13 +02:00
|
|
|
output, _, status = curl_output(
|
|
|
|
"--silent", "--head", "--location",
|
|
|
|
"-H", "Accept: application/vnd.github.v3.sha",
|
|
|
|
"https://api.github.com/repos/#{@user}/#{@repo}/commits/#{@ref}"
|
|
|
|
)
|
|
|
|
|
|
|
|
return unless status.success?
|
2016-07-22 12:21:22 +03:00
|
|
|
|
2017-08-08 18:10:13 +02:00
|
|
|
commit = output[/^ETag: \"(\h+)\"/, 1]
|
2016-07-22 12:21:22 +03:00
|
|
|
version.update_commit(commit) if commit
|
|
|
|
commit
|
|
|
|
end
|
|
|
|
|
|
|
|
def multiple_short_commits_exist?(commit)
|
|
|
|
return if ENV["HOMEBREW_NO_GITHUB_API"]
|
2017-08-08 18:10:13 +02:00
|
|
|
|
|
|
|
output, _, status = curl_output(
|
|
|
|
"--silent", "--head", "--location",
|
|
|
|
"-H", "Accept: application/vnd.github.v3.sha",
|
|
|
|
"https://api.github.com/repos/#{@user}/#{@repo}/commits/#{commit}"
|
|
|
|
)
|
2016-07-22 12:21:22 +03:00
|
|
|
|
|
|
|
!(status.success? && output && output[/^Status: (200)/, 1] == "200")
|
|
|
|
end
|
|
|
|
|
|
|
|
def commit_outdated?(commit)
|
|
|
|
@last_commit ||= github_last_commit
|
|
|
|
if !@last_commit
|
|
|
|
super
|
|
|
|
else
|
2016-08-19 12:32:20 +02:00
|
|
|
return true unless commit
|
2016-07-22 12:21:22 +03:00
|
|
|
return true unless @last_commit.start_with?(commit)
|
2016-10-22 01:53:19 +03:00
|
|
|
if multiple_short_commits_exist?(commit)
|
|
|
|
true
|
|
|
|
else
|
|
|
|
version.update_commit(commit)
|
|
|
|
false
|
|
|
|
end
|
2016-07-22 12:21:22 +03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-10-09 21:41:15 -05:00
|
|
|
class CVSDownloadStrategy < VCSDownloadStrategy
|
2018-08-02 10:29:40 +02:00
|
|
|
def initialize(url, name, version, **meta)
|
2014-12-18 13:06:05 -05:00
|
|
|
super
|
2015-08-03 13:09:07 +01:00
|
|
|
@url = @url.sub(%r{^cvs://}, "")
|
2014-12-22 00:43:02 -05:00
|
|
|
|
|
|
|
if meta.key?(:module)
|
|
|
|
@module = meta.fetch(:module)
|
2015-08-03 13:09:07 +01:00
|
|
|
elsif @url !~ %r{:[^/]+$}
|
2014-12-22 00:43:02 -05:00
|
|
|
@module = name
|
2014-12-22 00:43:02 -05:00
|
|
|
else
|
|
|
|
@module, @url = split_url(@url)
|
|
|
|
end
|
2014-12-18 13:06:05 -05:00
|
|
|
end
|
|
|
|
|
2016-05-24 12:19:18 +03:00
|
|
|
def source_modified_time
|
|
|
|
# Filter CVS's files because the timestamp for each of them is the moment
|
|
|
|
# of clone.
|
|
|
|
max_mtime = Time.at(0)
|
|
|
|
cached_location.find do |f|
|
|
|
|
Find.prune if f.directory? && f.basename.to_s == "CVS"
|
|
|
|
next unless f.file?
|
|
|
|
mtime = f.mtime
|
|
|
|
max_mtime = mtime if mtime > max_mtime
|
|
|
|
end
|
|
|
|
max_mtime
|
|
|
|
end
|
|
|
|
|
2013-04-07 00:49:56 -05:00
|
|
|
private
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def cache_tag
|
|
|
|
"cvs"
|
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def repo_valid?
|
2017-06-01 16:06:51 +02:00
|
|
|
(cached_location/"CVS").directory?
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2018-07-01 23:35:29 +02:00
|
|
|
def quiet_flag
|
|
|
|
"-Q" unless ARGV.verbose?
|
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def clone_repo
|
2018-07-01 23:35:29 +02:00
|
|
|
with_cvs_env do
|
2015-04-23 04:08:14 -07:00
|
|
|
# Login is only needed (and allowed) with pserver; skip for anoncvs.
|
2018-07-01 23:35:29 +02:00
|
|
|
safe_system "cvs", *quiet_flag, "-d", @url, "login" if @url.include? "pserver"
|
|
|
|
safe_system "cvs", *quiet_flag, "-d", @url, "checkout", "-d", cached_location.basename, @module,
|
|
|
|
chdir: cached_location.dirname
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def update
|
2018-07-01 23:35:29 +02:00
|
|
|
with_cvs_env do
|
|
|
|
safe_system "cvs", *quiet_flag, "update", chdir: cached_location
|
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2009-09-27 19:30:39 -04:00
|
|
|
def split_url(in_url)
|
2014-12-18 13:06:05 -05:00
|
|
|
parts = in_url.split(/:/)
|
2017-06-01 16:06:51 +02:00
|
|
|
mod = parts.pop
|
|
|
|
url = parts.join(":")
|
2015-08-03 13:09:07 +01:00
|
|
|
[mod, url]
|
2009-09-27 19:30:39 -04:00
|
|
|
end
|
2018-07-01 23:35:29 +02:00
|
|
|
|
|
|
|
def with_cvs_env
|
|
|
|
with_env PATH => PATH.new("/usr/bin", Formula["cvs"].opt_bin, ENV["PATH"]) do
|
|
|
|
yield
|
|
|
|
end
|
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2012-09-25 10:11:13 -04:00
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
class MercurialDownloadStrategy < VCSDownloadStrategy
|
2018-08-02 10:29:40 +02:00
|
|
|
def initialize(url, name, version, **meta)
|
2014-12-18 13:06:05 -05:00
|
|
|
super
|
2015-08-03 13:09:07 +01:00
|
|
|
@url = @url.sub(%r{^hg://}, "")
|
2014-12-18 13:06:05 -05:00
|
|
|
end
|
|
|
|
|
2016-05-01 00:17:45 +03:00
|
|
|
def source_modified_time
|
2018-07-01 23:35:29 +02:00
|
|
|
with_hg_env do
|
|
|
|
Time.parse Utils.popen_read("hg", "tip", "--template", "{date|isodate}", "-R", cached_location.to_s)
|
|
|
|
end
|
2016-05-01 00:17:45 +03:00
|
|
|
end
|
|
|
|
|
2016-05-24 23:00:29 +03:00
|
|
|
def last_commit
|
2018-07-01 23:35:29 +02:00
|
|
|
with_hg_env do
|
|
|
|
Utils.popen_read("hg", "parent", "--template", "{node|short}", "-R", cached_location.to_s)
|
|
|
|
end
|
2016-05-24 23:00:29 +03:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
private
|
|
|
|
|
|
|
|
def cache_tag
|
|
|
|
"hg"
|
|
|
|
end
|
2010-02-02 13:43:44 +01:00
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def repo_valid?
|
2017-06-01 16:06:51 +02:00
|
|
|
(cached_location/".hg").directory?
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def clone_repo
|
2018-07-01 23:35:29 +02:00
|
|
|
with_hg_env do
|
|
|
|
safe_system "hg", "clone", @url, cached_location
|
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def update
|
2018-07-01 23:35:29 +02:00
|
|
|
with_hg_env do
|
|
|
|
safe_system "hg", "--cwd", cached_location, "pull", "--update"
|
|
|
|
|
|
|
|
update_args = if @ref_type && @ref
|
|
|
|
ohai "Checking out #{@ref_type} #{@ref}"
|
|
|
|
[@ref]
|
|
|
|
else
|
|
|
|
["--clean"]
|
|
|
|
end
|
|
|
|
|
|
|
|
safe_system "hg", "--cwd", cached_location, "update", *update_args
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def with_hg_env
|
|
|
|
with_env PATH => PATH.new(Formula["mercurial"].opt_bin, ENV["PATH"]) do
|
|
|
|
yield
|
2018-07-05 11:54:13 +02:00
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2012-09-28 21:04:03 -05:00
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
class BazaarDownloadStrategy < VCSDownloadStrategy
|
2018-08-02 10:29:40 +02:00
|
|
|
def initialize(url, name, version, **meta)
|
2014-12-18 13:06:05 -05:00
|
|
|
super
|
2018-05-31 19:43:01 +01:00
|
|
|
@url.sub!(%r{^bzr://}, "")
|
|
|
|
ENV["BZR_HOME"] = HOMEBREW_TEMP
|
2014-12-18 13:06:05 -05:00
|
|
|
end
|
|
|
|
|
2016-05-01 00:21:08 +03:00
|
|
|
def source_modified_time
|
2018-07-01 23:35:29 +02:00
|
|
|
timestamp = with_bazaar_env do
|
|
|
|
Utils.popen_read("bzr", "log", "-l", "1", "--timezone=utc", cached_location.to_s)[/^timestamp: (.+)$/, 1]
|
|
|
|
end
|
2018-05-31 19:43:01 +01:00
|
|
|
raise "Could not get any timestamps from bzr!" if timestamp.to_s.empty?
|
|
|
|
Time.parse timestamp
|
2016-05-01 00:21:08 +03:00
|
|
|
end
|
|
|
|
|
2016-05-24 23:00:29 +03:00
|
|
|
def last_commit
|
2018-07-01 23:35:29 +02:00
|
|
|
with_bazaar_env do
|
|
|
|
Utils.popen_read("bzr", "revno", cached_location.to_s).chomp
|
|
|
|
end
|
2016-05-24 23:00:29 +03:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
private
|
|
|
|
|
|
|
|
def cache_tag
|
|
|
|
"bzr"
|
|
|
|
end
|
2010-03-22 21:19:20 -07:00
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def repo_valid?
|
2017-06-01 16:06:51 +02:00
|
|
|
(cached_location/".bzr").directory?
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def clone_repo
|
2018-07-01 23:35:29 +02:00
|
|
|
with_bazaar_env do
|
|
|
|
# "lightweight" means history-less
|
|
|
|
safe_system "bzr", "checkout", "--lightweight", @url, cached_location
|
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def update
|
2018-07-01 23:35:29 +02:00
|
|
|
with_bazaar_env do
|
|
|
|
safe_system "bzr", "update", chdir: cached_location
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def with_bazaar_env
|
|
|
|
with_env "PATH" => PATH.new(Formula["bazaar"].opt_bin, ENV["PATH"]) do
|
|
|
|
yield
|
2018-07-05 11:54:13 +02:00
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2012-09-28 21:04:03 -05:00
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
class FossilDownloadStrategy < VCSDownloadStrategy
|
2018-08-02 10:29:40 +02:00
|
|
|
def initialize(url, name, version, **meta)
|
2014-12-18 13:06:05 -05:00
|
|
|
super
|
2015-08-03 13:09:07 +01:00
|
|
|
@url = @url.sub(%r{^fossil://}, "")
|
2014-12-18 13:06:05 -05:00
|
|
|
end
|
|
|
|
|
2016-05-01 00:21:58 +03:00
|
|
|
def source_modified_time
|
2018-07-01 23:35:29 +02:00
|
|
|
with_fossil_env do
|
|
|
|
Time.parse Utils.popen_read("fossil", "info", "tip", "-R", cached_location.to_s)[/^uuid: +\h+ (.+)$/, 1]
|
|
|
|
end
|
2016-05-01 00:21:58 +03:00
|
|
|
end
|
|
|
|
|
2016-05-24 23:00:29 +03:00
|
|
|
def last_commit
|
2018-07-01 23:35:29 +02:00
|
|
|
with_fossil_env do
|
|
|
|
Utils.popen_read("fossil", "info", "tip", "-R", cached_location.to_s)[/^uuid: +(\h+) .+$/, 1]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def repo_valid?
|
|
|
|
with_fossil_env do
|
|
|
|
quiet_system "fossil", "branch", "-R", cached_location
|
|
|
|
end
|
2016-05-24 23:00:29 +03:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
private
|
|
|
|
|
|
|
|
def cache_tag
|
|
|
|
"fossil"
|
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def clone_repo
|
2018-07-01 23:35:29 +02:00
|
|
|
with_fossil_env do
|
|
|
|
safe_system "fossil", "clone", @url, cached_location
|
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def update
|
2018-07-01 23:35:29 +02:00
|
|
|
with_fossil_env do
|
|
|
|
safe_system "fossil", "pull", "-R", cached_location
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def with_fossil_env
|
|
|
|
with_env "PATH" => PATH.new(Formula["fossil"].opt_bin, ENV["PATH"]) do
|
|
|
|
yield
|
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2010-09-29 20:22:34 +08:00
|
|
|
end
|
|
|
|
|
2012-06-25 21:39:28 -05:00
|
|
|
class DownloadStrategyDetector
|
2015-08-03 13:09:07 +01:00
|
|
|
def self.detect(url, strategy = nil)
|
2013-09-26 16:59:45 -05:00
|
|
|
if strategy.nil?
|
|
|
|
detect_from_url(url)
|
2018-03-20 16:46:00 -04:00
|
|
|
elsif strategy == S3DownloadStrategy
|
|
|
|
require_aws_sdk
|
|
|
|
strategy
|
2016-09-20 22:03:08 +02:00
|
|
|
elsif strategy.is_a?(Class) && strategy < AbstractDownloadStrategy
|
2015-08-03 13:09:07 +01:00
|
|
|
strategy
|
2016-09-20 22:03:08 +02:00
|
|
|
elsif strategy.is_a?(Symbol)
|
2012-10-15 01:19:31 -05:00
|
|
|
detect_from_symbol(strategy)
|
|
|
|
else
|
2013-09-26 16:59:45 -05:00
|
|
|
raise TypeError,
|
|
|
|
"Unknown download strategy specification #{strategy.inspect}"
|
2012-06-25 21:39:28 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2012-10-15 01:19:31 -05:00
|
|
|
def self.detect_from_url(url)
|
|
|
|
case url
|
2016-07-22 12:21:22 +03:00
|
|
|
when %r{^https?://github\.com/[^/]+/[^/]+\.git$}
|
|
|
|
GitHubGitDownloadStrategy
|
2015-08-03 13:09:07 +01:00
|
|
|
when %r{^https?://.+\.git$}, %r{^git://}
|
2014-03-18 15:28:21 -05:00
|
|
|
GitDownloadStrategy
|
2015-09-02 22:07:51 +02:00
|
|
|
when %r{^https?://www\.apache\.org/dyn/closer\.cgi}, %r{^https?://www\.apache\.org/dyn/closer\.lua}
|
2014-03-18 15:28:21 -05:00
|
|
|
CurlApacheMirrorDownloadStrategy
|
2015-08-03 13:09:07 +01:00
|
|
|
when %r{^https?://(.+?\.)?googlecode\.com/svn}, %r{^https?://svn\.}, %r{^svn://}, %r{^https?://(.+?\.)?sourceforge\.net/svnroot/}
|
2014-03-18 15:28:21 -05:00
|
|
|
SubversionDownloadStrategy
|
2015-08-03 13:09:07 +01:00
|
|
|
when %r{^cvs://}
|
2014-03-18 15:28:21 -05:00
|
|
|
CVSDownloadStrategy
|
2017-08-08 18:10:13 +02:00
|
|
|
when %r{^hg://}, %r{^https?://(.+?\.)?googlecode\.com/hg}
|
2014-03-18 15:28:21 -05:00
|
|
|
MercurialDownloadStrategy
|
2015-08-03 13:09:07 +01:00
|
|
|
when %r{^bzr://}
|
2014-03-18 15:28:21 -05:00
|
|
|
BazaarDownloadStrategy
|
2015-08-03 13:09:07 +01:00
|
|
|
when %r{^fossil://}
|
2014-03-18 15:28:21 -05:00
|
|
|
FossilDownloadStrategy
|
2017-08-08 18:10:13 +02:00
|
|
|
when %r{^svn\+http://}, %r{^http://svn\.apache\.org/repos/}
|
2014-03-18 15:28:21 -05:00
|
|
|
SubversionDownloadStrategy
|
2015-08-03 13:09:07 +01:00
|
|
|
when %r{^https?://(.+?\.)?sourceforge\.net/hgweb/}
|
2014-03-18 15:28:21 -05:00
|
|
|
MercurialDownloadStrategy
|
2018-03-20 16:46:00 -04:00
|
|
|
when %r{^s3://}
|
|
|
|
require_aws_sdk
|
|
|
|
S3DownloadStrategy
|
2018-03-14 11:38:39 -07:00
|
|
|
when %r{^scp://}
|
|
|
|
ScpDownloadStrategy
|
2014-03-18 15:28:21 -05:00
|
|
|
else
|
|
|
|
CurlDownloadStrategy
|
2012-06-25 21:39:28 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2012-10-15 01:19:31 -05:00
|
|
|
def self.detect_from_symbol(symbol)
|
|
|
|
case symbol
|
2014-03-18 15:34:04 -05:00
|
|
|
when :hg then MercurialDownloadStrategy
|
2012-06-25 21:39:28 -05:00
|
|
|
when :nounzip then NoUnzipCurlDownloadStrategy
|
2014-03-18 15:34:04 -05:00
|
|
|
when :git then GitDownloadStrategy
|
|
|
|
when :bzr then BazaarDownloadStrategy
|
|
|
|
when :svn then SubversionDownloadStrategy
|
|
|
|
when :curl then CurlDownloadStrategy
|
|
|
|
when :cvs then CVSDownloadStrategy
|
|
|
|
when :post then CurlPostDownloadStrategy
|
2014-12-18 13:06:05 -05:00
|
|
|
when :fossil then FossilDownloadStrategy
|
2012-06-25 21:39:28 -05:00
|
|
|
else
|
2016-07-03 23:58:44 -06:00
|
|
|
raise "Unknown download strategy #{symbol} was requested."
|
2012-06-25 21:39:28 -05:00
|
|
|
end
|
2010-03-22 21:19:20 -07:00
|
|
|
end
|
2018-03-20 16:46:00 -04:00
|
|
|
|
|
|
|
def self.require_aws_sdk
|
|
|
|
Homebrew.install_gem! "aws-sdk-s3", "~> 1.8"
|
|
|
|
require "aws-sdk-s3"
|
|
|
|
end
|
2010-03-22 21:19:20 -07:00
|
|
|
end
|