brew/Library/Homebrew/download_strategy.rb

1155 lines
29 KiB
Ruby
Raw Normal View History

require "json"
require "rexml/document"
require "time"
2018-07-01 23:35:29 +02:00
require "unpack_strategy"
class AbstractDownloadStrategy
extend Forwardable
include FileUtils
module Pourable
def stage
ohai "Pouring #{cached_location.basename}"
super
end
end
attr_reader :meta, :name, :version
attr_reader :shutup
def initialize(name, version, resource)
@name = name
@url = resource.url
@version = version
@meta = resource.specs
@shutup = false
extend Pourable if meta[:bottle]
end
2014-12-23 01:04:44 -05:00
# Download and cache the resource as {#cached_location}.
def fetch; end
2014-12-23 01:04:44 -05:00
# Suppress output
def shutup!
@shutup = true
end
def puts(*args)
super(*args) unless shutup
end
def ohai(*args)
super(*args) unless shutup
end
# Unpack {#cached_location} into the current working directory, and possibly
# chdir into the newly-unpacked directory.
# Unlike {Resource#stage}, this does not take a block.
2018-07-01 23:35:29 +02:00
def stage
UnpackStrategy.detect(cached_location,
extension_only: true,
ref_type: @ref_type, ref: @ref)
2018-07-22 19:30:16 +02:00
.extract_nestedly(basename: basename_without_params,
extension_only: true,
2018-07-22 19:30:16 +02:00
verbose: ARGV.verbose? && !shutup)
2018-07-01 23:35:29 +02:00
end
2014-12-23 01:04:44 -05:00
2014-12-23 01:27:10 -05:00
# @!attribute [r] cached_location
2014-12-23 01:04:44 -05:00
# The path to the cached file or directory associated with the resource.
def cached_location; end
2014-12-23 01:04:44 -05:00
# @!attribute [r]
# return most recent modified time for all files in the current working directory after stage.
def source_modified_time
Pathname.pwd.to_enum(:find).select(&:file?).map(&:mtime).max
end
2014-12-23 01:04:44 -05:00
# Remove {#cached_location} and any other files associated with the resource
# from the cache.
def clear_cache
rm_rf(cached_location)
2014-12-23 01:04:44 -05:00
end
def safe_system(*args)
2018-07-22 19:30:16 +02:00
if shutup
2018-07-19 13:36:27 +02:00
return if quiet_system(*args)
raise(ErrorDuringExecution.new(args, status: $CHILD_STATUS))
else
super(*args)
end
end
2018-07-01 23:35:29 +02:00
def basename_without_params
2018-07-08 20:13:41 +02:00
return unless @url
2018-07-01 23:35:29 +02:00
# Strip any ?thing=wad out of .c?thing=wad style extensions
File.basename(@url)[/[^?]+/]
end
end
class VCSDownloadStrategy < AbstractDownloadStrategy
REF_TYPES = [:tag, :branch, :revisions, :revision].freeze
def initialize(name, version, resource)
super
@ref_type, @ref = extract_ref(meta)
@revision = meta[:revision]
2017-06-01 16:06:51 +02:00
@clone = HOMEBREW_CACHE/cache_filename
end
def fetch
ohai "Cloning #{@url}"
if cached_location.exist? && repo_valid?
puts "Updating #{cached_location}"
update
elsif cached_location.exist?
puts "Removing invalid repository from cache"
clear_cache
clone_repo
else
clone_repo
end
2016-07-13 10:11:59 +03:00
version.update_commit(last_commit) if head?
2016-09-23 22:02:23 +02:00
return unless @ref_type == :tag
return unless @revision && current_revision
return if current_revision == @revision
2017-10-15 02:28:32 +02:00
raise <<~EOS
2016-09-23 22:02:23 +02:00
#{@ref} tag should be #{@revision}
but is actually #{current_revision}
EOS
end
2016-07-22 12:21:22 +03:00
def fetch_last_commit
fetch
last_commit
end
def commit_outdated?(commit)
@last_commit ||= fetch_last_commit
commit != @last_commit
end
def cached_location
2018-07-25 16:52:21 -03:00
@clone
end
def head?
version.respond_to?(:head?) && version.head?
end
# Return last commit's unique identifier for the repository.
# Return most recent modified timestamp unless overridden.
def last_commit
source_modified_time.to_i.to_s
end
private
def cache_tag
2018-07-01 23:35:29 +02:00
raise NotImplementedError
end
def cache_filename
"#{name}--#{cache_tag}"
end
def repo_valid?
2018-07-01 23:35:29 +02:00
raise NotImplementedError
end
def clone_repo; end
def update; end
def current_revision; end
def extract_ref(specs)
key = REF_TYPES.find { |type| specs.key?(type) }
[key, specs[key]]
end
end
class AbstractFileDownloadStrategy < AbstractDownloadStrategy
def stage
2018-07-01 23:35:29 +02:00
super
chdir
end
private
def chdir
entries = Dir["*"]
case entries.length
2013-10-09 21:41:14 -05:00
when 0 then raise "Empty archive"
when 1 then begin
2016-09-17 17:01:04 +01:00
Dir.chdir entries.first
rescue
nil
end
end
end
def ext
# We need a Pathname because we've monkeypatched extname to support double
# extensions (e.g. tar.gz).
# We can't use basename_without_params, because given a URL like
# https://example.com/download.php?file=foo-1.0.tar.gz
# the extension we want is ".tar.gz", not ".php".
2018-02-12 14:22:10 -06:00
Pathname.new(@url).ascend do |path|
ext = path.extname[/[^?]+/]
return ext if ext
end
2018-02-12 14:22:10 -06:00
nil
end
end
class CurlDownloadStrategy < AbstractFileDownloadStrategy
attr_reader :mirrors, :tarball_path, :temporary_path
def initialize(name, version, resource)
super
@mirrors = resource.mirrors.dup
2017-06-01 16:06:51 +02:00
@tarball_path = HOMEBREW_CACHE/"#{name}-#{version}#{ext}"
@temporary_path = Pathname.new("#{cached_location}.incomplete")
end
def fetch
ohai "Downloading #{@url}"
if cached_location.exist?
puts "Already downloaded: #{cached_location}"
else
begin
_fetch
rescue ErrorDuringExecution
raise CurlDownloadStrategyError, @url
end
ignore_interrupts { temporary_path.rename(cached_location) }
end
rescue CurlDownloadStrategyError
raise if mirrors.empty?
puts "Trying a mirror..."
@url = mirrors.shift
retry
end
def cached_location
tarball_path
end
def clear_cache
super
rm_rf(temporary_path)
end
private
# Private method, can be overridden if needed.
def _fetch
url = @url
if ENV["HOMEBREW_ARTIFACT_DOMAIN"]
url = url.sub(%r{^((ht|f)tps?://)?}, ENV["HOMEBREW_ARTIFACT_DOMAIN"].chomp("/") + "/")
ohai "Downloading from #{url}"
end
2018-08-01 05:39:28 +02:00
temporary_path.dirname.mkpath
2017-08-08 18:10:13 +02:00
curl_download resolved_url(url), to: temporary_path
end
# Curl options to be always passed to curl,
2017-08-08 18:10:13 +02:00
# with raw head calls (`curl --head`) or with actual `fetch`.
def _curl_opts
return ["--user", meta.fetch(:user)] if meta.key?(:user)
2017-08-08 18:10:13 +02:00
[]
end
2017-08-08 18:10:13 +02:00
def resolved_url(url)
redirect_url, _, status = curl_output(
*_curl_opts, "--silent", "--head",
"--write-out", "%{redirect_url}",
"--output", "/dev/null",
url.to_s
)
return url unless status.success?
return url if redirect_url.empty?
ohai "Downloading from #{redirect_url}"
if ENV["HOMEBREW_NO_INSECURE_REDIRECT"] &&
url.start_with?("https://") && !redirect_url.start_with?("https://")
puts "HTTPS to HTTP redirect detected & HOMEBREW_NO_INSECURE_REDIRECT is set."
raise CurlDownloadStrategyError, url
end
2017-08-08 18:10:13 +02:00
redirect_url
end
2017-08-08 18:10:13 +02:00
def curl(*args, **options)
args.concat _curl_opts
args << "--connect-timeout" << "5" unless mirrors.empty?
2017-08-08 18:10:13 +02:00
super(*args, **options)
end
end
# Detect and download from Apache Mirror
class CurlApacheMirrorDownloadStrategy < CurlDownloadStrategy
def apache_mirrors
2018-07-30 10:11:00 +02:00
mirrors, = curl_output(*_curl_opts, "--silent", "--location", "#{@url}&asjson=1")
2017-08-08 18:10:13 +02:00
JSON.parse(mirrors)
end
def _fetch
return super if @tried_apache_mirror
@tried_apache_mirror = true
2017-08-08 18:10:13 +02:00
mirrors = apache_mirrors
path_info = mirrors.fetch("path_info")
@url = mirrors.fetch("preferred") + path_info
@mirrors |= %W[https://archive.apache.org/dist/#{path_info}]
ohai "Best Mirror #{@url}"
super
rescue IndexError, JSON::ParserError
raise CurlDownloadStrategyError, "Couldn't determine mirror, try again later."
end
end
# Download via an HTTP POST.
2010-06-30 10:21:56 -07:00
# Query parameters on the URL are converted into POST parameters
class CurlPostDownloadStrategy < CurlDownloadStrategy
def _fetch
base_url, data = @url.split("?")
2017-08-08 18:10:13 +02:00
curl_download base_url, "--data", data, to: temporary_path
end
end
# Use this strategy to download but not unzip a file.
# Useful for installing jars.
class NoUnzipCurlDownloadStrategy < CurlDownloadStrategy
def stage
UnpackStrategy::Uncompressed.new(cached_location)
.extract(basename: basename_without_params)
end
end
# This strategy extracts local binary packages.
class LocalBottleDownloadStrategy < AbstractFileDownloadStrategy
attr_reader :cached_location
def initialize(path)
@cached_location = path
end
end
# S3DownloadStrategy downloads tarballs from AWS S3.
# To use it, add ":using => S3DownloadStrategy" to the URL section of your
# formula. This download strategy uses AWS access tokens (in the
# environment variables AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY)
# to sign the request. This strategy is good in a corporate setting,
# because it lets you use a private S3 bucket as a repo for internal
# distribution. (It will work for public buckets as well.)
class S3DownloadStrategy < CurlDownloadStrategy
def _fetch
if @url !~ %r{^https?://([^.].*)\.s3\.amazonaws\.com/(.+)$} &&
@url !~ %r{^s3://([^.].*?)/(.+)$}
raise "Bad S3 URL: " + @url
end
bucket = Regexp.last_match(1)
key = Regexp.last_match(2)
ENV["AWS_ACCESS_KEY_ID"] = ENV["HOMEBREW_AWS_ACCESS_KEY_ID"]
ENV["AWS_SECRET_ACCESS_KEY"] = ENV["HOMEBREW_AWS_SECRET_ACCESS_KEY"]
begin
signer = Aws::S3::Presigner.new
s3url = signer.presigned_url :get_object, bucket: bucket, key: key
rescue Aws::Sigv4::Errors::MissingCredentialsError
ohai "AWS credentials missing, trying public URL instead."
s3url = @url
end
2017-08-08 18:10:13 +02:00
curl_download s3url, to: temporary_path
end
end
# GitHubPrivateRepositoryDownloadStrategy downloads contents from GitHub
# Private Repository. To use it, add
# ":using => GitHubPrivateRepositoryDownloadStrategy" to the URL section of
# your formula. This download strategy uses GitHub access tokens (in the
# environment variables HOMEBREW_GITHUB_API_TOKEN) to sign the request. This
# strategy is suitable for corporate use just like S3DownloadStrategy, because
# it lets you use a private GitHub repository for internal distribution. It
# works with public one, but in that case simply use CurlDownloadStrategy.
class GitHubPrivateRepositoryDownloadStrategy < CurlDownloadStrategy
require "utils/formatter"
require "utils/github"
def initialize(name, version, resource)
super
parse_url_pattern
set_github_token
end
def parse_url_pattern
url_pattern = %r{https://github.com/([^/]+)/([^/]+)/(\S+)}
unless @url =~ url_pattern
raise CurlDownloadStrategyError, "Invalid url pattern for GitHub Repository."
end
_, @owner, @repo, @filepath = *@url.match(url_pattern)
end
def download_url
"https://#{@github_token}@github.com/#{@owner}/#{@repo}/#{@filepath}"
end
def _fetch
2017-08-08 18:10:13 +02:00
curl_download download_url, to: temporary_path
end
private
def set_github_token
@github_token = ENV["HOMEBREW_GITHUB_API_TOKEN"]
unless @github_token
raise CurlDownloadStrategyError, "Environmental variable HOMEBREW_GITHUB_API_TOKEN is required."
end
validate_github_repository_access!
end
def validate_github_repository_access!
# Test access to the repository
GitHub.repository(@owner, @repo)
rescue GitHub::HTTPNotFoundError
# We only handle HTTPNotFoundError here,
# becase AuthenticationFailedError is handled within util/github.
2017-10-15 02:28:32 +02:00
message = <<~EOS
HOMEBREW_GITHUB_API_TOKEN can not access the repository: #{@owner}/#{@repo}
This token may not have permission to access the repository or the url of formula may be incorrect.
EOS
raise CurlDownloadStrategyError, message
end
end
# GitHubPrivateRepositoryReleaseDownloadStrategy downloads tarballs from GitHub
# Release assets. To use it, add
# ":using => GitHubPrivateRepositoryReleaseDownloadStrategy" to the URL section
# of your formula. This download strategy uses GitHub access tokens (in the
# environment variables HOMEBREW_GITHUB_API_TOKEN) to sign the request.
class GitHubPrivateRepositoryReleaseDownloadStrategy < GitHubPrivateRepositoryDownloadStrategy
def parse_url_pattern
url_pattern = %r{https://github.com/([^/]+)/([^/]+)/releases/download/([^/]+)/(\S+)}
unless @url =~ url_pattern
raise CurlDownloadStrategyError, "Invalid url pattern for GitHub Release."
end
_, @owner, @repo, @tag, @filename = *@url.match(url_pattern)
end
def download_url
"https://#{@github_token}@api.github.com/repos/#{@owner}/#{@repo}/releases/assets/#{asset_id}"
end
def _fetch
# HTTP request header `Accept: application/octet-stream` is required.
# Without this, the GitHub API will respond with metadata, not binary.
2017-08-08 18:10:13 +02:00
curl_download download_url, "--header", "Accept: application/octet-stream", to: temporary_path
end
private
def asset_id
@asset_id ||= resolve_asset_id
end
def resolve_asset_id
release_metadata = fetch_release_metadata
assets = release_metadata["assets"].select { |a| a["name"] == @filename }
raise CurlDownloadStrategyError, "Asset file not found." if assets.empty?
assets.first["id"]
end
def fetch_release_metadata
release_url = "https://api.github.com/repos/#{@owner}/#{@repo}/releases/tags/#{@tag}"
2018-03-07 16:14:55 +00:00
GitHub.open_api(release_url)
end
end
2018-03-14 11:38:39 -07:00
# ScpDownloadStrategy downloads files using ssh via scp. To use it, add
# ":using => ScpDownloadStrategy" to the URL section of your formula or
# provide a URL starting with scp://. This strategy uses ssh credentials for
# authentication. If a public/private keypair is configured, it will not
# prompt for a password.
#
# Usage:
#
# class Abc < Formula
# url "scp://example.com/src/abc.1.0.tar.gz"
# ...
class ScpDownloadStrategy < AbstractFileDownloadStrategy
attr_reader :tarball_path, :temporary_path
def initialize(name, version, resource)
2018-03-14 11:38:39 -07:00
super
@tarball_path = HOMEBREW_CACHE/"#{name}-#{version}#{ext}"
@temporary_path = Pathname.new("#{cached_location}.incomplete")
parse_url_pattern
end
def parse_url_pattern
url_pattern = %r{scp://([^@]+@)?([^@:/]+)(:\d+)?/(\S+)}
if @url !~ url_pattern
raise ScpDownloadStrategyError, "Invalid URL for scp: #{@url}"
end
_, @user, @host, @port, @path = *@url.match(url_pattern)
end
def fetch
ohai "Downloading #{@url}"
if cached_location.exist?
puts "Already downloaded: #{cached_location}"
else
begin
safe_system "scp", scp_source, temporary_path.to_s
rescue ErrorDuringExecution
raise ScpDownloadStrategyError, "Failed to run scp #{scp_source}"
end
ignore_interrupts { temporary_path.rename(cached_location) }
end
end
def cached_location
tarball_path
end
def clear_cache
super
rm_rf(temporary_path)
end
private
def scp_source
path_prefix = "/" unless @path.start_with?("~")
port_arg = "-P #{@port[1..-1]} " if @port
"#{port_arg}#{@user}#{@host}:#{path_prefix}#{@path}"
end
end
class SubversionDownloadStrategy < VCSDownloadStrategy
def initialize(name, version, resource)
2014-12-06 12:29:16 -05:00
super
2015-04-27 20:39:20 -04:00
@url = @url.sub("svn+http://", "")
2014-12-06 12:29:16 -05:00
end
def fetch
clear_cache unless @url.chomp("/") == repo_url || quiet_system("svn", "switch", @url, cached_location)
super
end
def source_modified_time
2018-07-29 20:54:13 -03:00
info = system_command("svn", args: ["info", "--xml"], chdir: cached_location.to_s).stdout
xml = REXML::Document.new(info)
Time.parse REXML::XPath.first(xml, "//date/text()").to_s
end
def last_commit
2018-07-29 20:54:13 -03:00
system_command("svn", args: ["info", "--show-item", "revision"], chdir: cached_location.to_s).stdout.strip
end
private
2018-07-25 16:59:57 -03:00
def repo_url
2018-07-29 20:54:13 -03:00
system_command("svn", args: ["info"], chdir: cached_location.to_s).stdout.strip[/^URL: (.+)$/, 1]
end
2016-09-24 17:59:14 +02:00
def externals
Utils.popen_read("svn", "propget", "svn:externals", @url).chomp.each_line do |line|
name, url = line.split(/\s+/)
yield name, url
end
end
def fetch_repo(target, url, revision = nil, ignore_externals = false)
# Use "svn update" when the repository already exists locally.
# This saves on bandwidth and will have a similar effect to verifying the
# cache as it will make any changes to get the right revision.
args = []
if revision
ohai "Checking out #{@ref}"
args << "-r" << revision
end
args << "--ignore-externals" if ignore_externals
if target.directory?
system_command("svn", args: ["update", *args], chdir: target.to_s)
else
system_command("svn", args: ["checkout", url, target, *args])
end
end
2014-12-06 12:29:15 -05:00
def cache_tag
head? ? "svn-HEAD" : "svn"
end
2014-12-06 12:29:15 -05:00
def repo_valid?
2017-06-01 16:06:51 +02:00
(cached_location/".svn").directory?
2014-12-06 12:29:15 -05:00
end
def clone_repo
case @ref_type
when :revision
fetch_repo cached_location, @url, @ref
when :revisions
# nil is OK for main_revision, as fetch_repo will then get latest
main_revision = @ref[:trunk]
fetch_repo cached_location, @url, main_revision, true
2016-09-24 17:59:14 +02:00
externals do |external_name, external_url|
2017-06-01 16:06:51 +02:00
fetch_repo cached_location/external_name, external_url, @ref[external_name], true
end
else
fetch_repo cached_location, @url
end
end
2016-09-23 18:13:48 +02:00
alias update clone_repo
end
class GitDownloadStrategy < VCSDownloadStrategy
SHALLOW_CLONE_WHITELIST = [
%r{git://},
%r{https://github\.com},
%r{http://git\.sv\.gnu\.org},
%r{http://llvm\.org},
].freeze
def initialize(name, version, resource)
super
@ref_type ||= :branch
@ref ||= "master"
@shallow = meta.fetch(:shallow) { true }
end
def source_modified_time
Time.parse Utils.popen_read("git", "--git-dir", git_dir, "show", "-s", "--format=%cD")
end
def last_commit
Utils.popen_read("git", "--git-dir", git_dir, "rev-parse", "--short=7", "HEAD").chomp
end
private
2014-12-06 12:29:15 -05:00
def cache_tag
"git"
end
2014-12-18 12:57:37 -05:00
def cache_version
0
end
def update
cached_location.cd do
config_repo
update_repo
checkout
reset
update_submodules if submodules?
end
end
def shallow_clone?
@shallow && support_depth?
end
2016-09-21 09:48:24 +02:00
def shallow_dir?
2017-06-01 16:06:51 +02:00
(git_dir/"shallow").exist?
end
def support_depth?
2016-09-20 22:03:08 +02:00
@ref_type != :revision && SHALLOW_CLONE_WHITELIST.any? { |regex| @url =~ regex }
end
def git_dir
2017-06-01 16:06:51 +02:00
cached_location/".git"
end
2016-09-21 09:48:24 +02:00
def ref?
quiet_system "git", "--git-dir", git_dir, "rev-parse", "-q", "--verify", "#{@ref}^{commit}"
end
def current_revision
Utils.popen_read("git", "--git-dir", git_dir, "rev-parse", "-q", "--verify", "HEAD").strip
end
def repo_valid?
quiet_system "git", "--git-dir", git_dir, "status", "-s"
end
def submodules?
2017-06-01 16:06:51 +02:00
(cached_location/".gitmodules").exist?
end
def clone_args
args = %w[clone]
args << "--depth" << "1" if shallow_clone?
case @ref_type
2016-09-21 08:32:57 +02:00
when :branch, :tag
args << "--branch" << @ref
end
args << @url << cached_location
end
def refspec
case @ref_type
when :branch then "+refs/heads/#{@ref}:refs/remotes/origin/#{@ref}"
when :tag then "+refs/tags/#{@ref}:refs/tags/#{@ref}"
else "+refs/heads/master:refs/remotes/origin/master"
end
end
def config_repo
safe_system "git", "config", "remote.origin.url", @url
safe_system "git", "config", "remote.origin.fetch", refspec
end
def update_repo
2016-09-23 22:02:23 +02:00
return unless @ref_type == :branch || !ref?
if !shallow_clone? && shallow_dir?
safe_system "git", "fetch", "origin", "--unshallow"
2016-09-23 22:02:23 +02:00
else
safe_system "git", "fetch", "origin"
end
end
def clone_repo
safe_system "git", *clone_args
2014-12-18 12:57:37 -05:00
cached_location.cd do
safe_system "git", "config", "homebrew.cacheversion", cache_version
checkout
2014-12-18 12:57:37 -05:00
update_submodules if submodules?
end
end
def checkout
ohai "Checking out #{@ref_type} #{@ref}" if @ref_type && @ref
safe_system "git", "checkout", "-f", @ref, "--"
end
def reset_args
ref = case @ref_type
2016-09-21 08:32:57 +02:00
when :branch
"origin/#{@ref}"
when :revision, :tag
@ref
end
%W[reset --hard #{ref}]
end
def reset
safe_system "git", *reset_args
end
def update_submodules
safe_system "git", "submodule", "foreach", "--recursive", "git submodule sync"
safe_system "git", "submodule", "update", "--init", "--recursive"
fix_absolute_submodule_gitdir_references!
end
def fix_absolute_submodule_gitdir_references!
# When checking out Git repositories with recursive submodules, some Git
# versions create `.git` files with absolute instead of relative `gitdir:`
# pointers. This works for the cached location, but breaks various Git
# operations once the affected Git resource is staged, i.e. recursively
# copied to a new location. (This bug was introduced in Git 2.7.0 and fixed
# in 2.8.3. Clones created with affected version remain broken.)
# See https://github.com/Homebrew/homebrew-core/pull/1520 for an example.
submodule_dirs = Utils.popen_read(
"git", "submodule", "--quiet", "foreach", "--recursive", "pwd"
)
submodule_dirs.lines.map(&:chomp).each do |submodule_dir|
work_dir = Pathname.new(submodule_dir)
# Only check and fix if `.git` is a regular file, not a directory.
dot_git = work_dir/".git"
next unless dot_git.file?
git_dir = dot_git.read.chomp[/^gitdir: (.*)$/, 1]
if git_dir.nil?
onoe "Failed to parse '#{dot_git}'." if ARGV.homebrew_developer?
next
end
# Only attempt to fix absolute paths.
next unless git_dir.start_with?("/")
# Make the `gitdir:` reference relative to the working directory.
relative_git_dir = Pathname.new(git_dir).relative_path_from(work_dir)
dot_git.atomic_write("gitdir: #{relative_git_dir}\n")
end
end
end
2016-07-22 12:21:22 +03:00
class GitHubGitDownloadStrategy < GitDownloadStrategy
def initialize(name, version, resource)
2016-07-22 12:21:22 +03:00
super
2016-09-23 22:02:23 +02:00
return unless %r{^https?://github\.com/(?<user>[^/]+)/(?<repo>[^/]+)\.git$} =~ @url
@user = user
@repo = repo
2016-07-22 12:21:22 +03:00
end
def github_last_commit
return if ENV["HOMEBREW_NO_GITHUB_API"]
2017-08-08 18:10:13 +02:00
output, _, status = curl_output(
"--silent", "--head", "--location",
"-H", "Accept: application/vnd.github.v3.sha",
"https://api.github.com/repos/#{@user}/#{@repo}/commits/#{@ref}"
)
return unless status.success?
2016-07-22 12:21:22 +03:00
2017-08-08 18:10:13 +02:00
commit = output[/^ETag: \"(\h+)\"/, 1]
2016-07-22 12:21:22 +03:00
version.update_commit(commit) if commit
commit
end
def multiple_short_commits_exist?(commit)
return if ENV["HOMEBREW_NO_GITHUB_API"]
2017-08-08 18:10:13 +02:00
output, _, status = curl_output(
"--silent", "--head", "--location",
"-H", "Accept: application/vnd.github.v3.sha",
"https://api.github.com/repos/#{@user}/#{@repo}/commits/#{commit}"
)
2016-07-22 12:21:22 +03:00
!(status.success? && output && output[/^Status: (200)/, 1] == "200")
end
def commit_outdated?(commit)
@last_commit ||= github_last_commit
if !@last_commit
super
else
2016-08-19 12:32:20 +02:00
return true unless commit
2016-07-22 12:21:22 +03:00
return true unless @last_commit.start_with?(commit)
if multiple_short_commits_exist?(commit)
true
else
version.update_commit(commit)
false
end
2016-07-22 12:21:22 +03:00
end
end
end
class CVSDownloadStrategy < VCSDownloadStrategy
def initialize(name, version, resource)
2014-12-18 13:06:05 -05:00
super
@url = @url.sub(%r{^cvs://}, "")
2014-12-22 00:43:02 -05:00
if meta.key?(:module)
@module = meta.fetch(:module)
elsif @url !~ %r{:[^/]+$}
@module = name
2014-12-22 00:43:02 -05:00
else
@module, @url = split_url(@url)
end
2014-12-18 13:06:05 -05:00
end
def source_modified_time
# Filter CVS's files because the timestamp for each of them is the moment
# of clone.
max_mtime = Time.at(0)
cached_location.find do |f|
Find.prune if f.directory? && f.basename.to_s == "CVS"
next unless f.file?
mtime = f.mtime
max_mtime = mtime if mtime > max_mtime
end
max_mtime
end
private
2014-12-06 12:29:15 -05:00
def cache_tag
"cvs"
end
def repo_valid?
2017-06-01 16:06:51 +02:00
(cached_location/"CVS").directory?
end
2018-07-01 23:35:29 +02:00
def quiet_flag
"-Q" unless ARGV.verbose?
end
def clone_repo
2018-07-01 23:35:29 +02:00
with_cvs_env do
# Login is only needed (and allowed) with pserver; skip for anoncvs.
2018-07-01 23:35:29 +02:00
safe_system "cvs", *quiet_flag, "-d", @url, "login" if @url.include? "pserver"
safe_system "cvs", *quiet_flag, "-d", @url, "checkout", "-d", cached_location.basename, @module,
chdir: cached_location.dirname
end
end
def update
2018-07-01 23:35:29 +02:00
with_cvs_env do
safe_system "cvs", *quiet_flag, "update", chdir: cached_location
end
end
def split_url(in_url)
2014-12-18 13:06:05 -05:00
parts = in_url.split(/:/)
2017-06-01 16:06:51 +02:00
mod = parts.pop
url = parts.join(":")
[mod, url]
end
2018-07-01 23:35:29 +02:00
def with_cvs_env
with_env PATH => PATH.new("/usr/bin", Formula["cvs"].opt_bin, ENV["PATH"]) do
yield
end
end
2014-12-06 12:29:15 -05:00
end
2014-12-06 12:29:15 -05:00
class MercurialDownloadStrategy < VCSDownloadStrategy
def initialize(name, version, resource)
2014-12-18 13:06:05 -05:00
super
@url = @url.sub(%r{^hg://}, "")
2014-12-18 13:06:05 -05:00
end
def source_modified_time
2018-07-01 23:35:29 +02:00
with_hg_env do
Time.parse Utils.popen_read("hg", "tip", "--template", "{date|isodate}", "-R", cached_location.to_s)
end
end
def last_commit
2018-07-01 23:35:29 +02:00
with_hg_env do
Utils.popen_read("hg", "parent", "--template", "{node|short}", "-R", cached_location.to_s)
end
end
2014-12-06 12:29:15 -05:00
private
def cache_tag
"hg"
end
2010-02-02 13:43:44 +01:00
2014-12-06 12:29:15 -05:00
def repo_valid?
2017-06-01 16:06:51 +02:00
(cached_location/".hg").directory?
2014-12-06 12:29:15 -05:00
end
def clone_repo
2018-07-01 23:35:29 +02:00
with_hg_env do
safe_system "hg", "clone", @url, cached_location
end
end
def update
2018-07-01 23:35:29 +02:00
with_hg_env do
safe_system "hg", "--cwd", cached_location, "pull", "--update"
update_args = if @ref_type && @ref
ohai "Checking out #{@ref_type} #{@ref}"
[@ref]
else
["--clean"]
end
safe_system "hg", "--cwd", cached_location, "update", *update_args
end
end
def with_hg_env
with_env PATH => PATH.new(Formula["mercurial"].opt_bin, ENV["PATH"]) do
yield
end
end
2014-12-06 12:29:15 -05:00
end
2014-12-06 12:29:15 -05:00
class BazaarDownloadStrategy < VCSDownloadStrategy
def initialize(name, version, resource)
2014-12-18 13:06:05 -05:00
super
@url.sub!(%r{^bzr://}, "")
ENV["BZR_HOME"] = HOMEBREW_TEMP
2014-12-18 13:06:05 -05:00
end
def source_modified_time
2018-07-01 23:35:29 +02:00
timestamp = with_bazaar_env do
Utils.popen_read("bzr", "log", "-l", "1", "--timezone=utc", cached_location.to_s)[/^timestamp: (.+)$/, 1]
end
raise "Could not get any timestamps from bzr!" if timestamp.to_s.empty?
Time.parse timestamp
end
def last_commit
2018-07-01 23:35:29 +02:00
with_bazaar_env do
Utils.popen_read("bzr", "revno", cached_location.to_s).chomp
end
end
2014-12-06 12:29:15 -05:00
private
def cache_tag
"bzr"
end
2014-12-06 12:29:15 -05:00
def repo_valid?
2017-06-01 16:06:51 +02:00
(cached_location/".bzr").directory?
2014-12-06 12:29:15 -05:00
end
def clone_repo
2018-07-01 23:35:29 +02:00
with_bazaar_env do
# "lightweight" means history-less
safe_system "bzr", "checkout", "--lightweight", @url, cached_location
end
end
def update
2018-07-01 23:35:29 +02:00
with_bazaar_env do
safe_system "bzr", "update", chdir: cached_location
end
end
def with_bazaar_env
with_env "PATH" => PATH.new(Formula["bazaar"].opt_bin, ENV["PATH"]) do
yield
end
end
2014-12-06 12:29:15 -05:00
end
2014-12-06 12:29:15 -05:00
class FossilDownloadStrategy < VCSDownloadStrategy
def initialize(name, version, resource)
2014-12-18 13:06:05 -05:00
super
@url = @url.sub(%r{^fossil://}, "")
2014-12-18 13:06:05 -05:00
end
def source_modified_time
2018-07-01 23:35:29 +02:00
with_fossil_env do
Time.parse Utils.popen_read("fossil", "info", "tip", "-R", cached_location.to_s)[/^uuid: +\h+ (.+)$/, 1]
end
end
def last_commit
2018-07-01 23:35:29 +02:00
with_fossil_env do
Utils.popen_read("fossil", "info", "tip", "-R", cached_location.to_s)[/^uuid: +(\h+) .+$/, 1]
end
end
def repo_valid?
with_fossil_env do
quiet_system "fossil", "branch", "-R", cached_location
end
end
2014-12-06 12:29:15 -05:00
private
def cache_tag
"fossil"
end
2014-12-06 12:29:15 -05:00
def clone_repo
2018-07-01 23:35:29 +02:00
with_fossil_env do
safe_system "fossil", "clone", @url, cached_location
end
end
def update
2018-07-01 23:35:29 +02:00
with_fossil_env do
safe_system "fossil", "pull", "-R", cached_location
end
end
def with_fossil_env
with_env "PATH" => PATH.new(Formula["fossil"].opt_bin, ENV["PATH"]) do
yield
end
end
end
class DownloadStrategyDetector
def self.detect(url, strategy = nil)
if strategy.nil?
detect_from_url(url)
elsif strategy == S3DownloadStrategy
require_aws_sdk
strategy
2016-09-20 22:03:08 +02:00
elsif strategy.is_a?(Class) && strategy < AbstractDownloadStrategy
strategy
2016-09-20 22:03:08 +02:00
elsif strategy.is_a?(Symbol)
detect_from_symbol(strategy)
else
raise TypeError,
"Unknown download strategy specification #{strategy.inspect}"
end
end
def self.detect_from_url(url)
case url
2016-07-22 12:21:22 +03:00
when %r{^https?://github\.com/[^/]+/[^/]+\.git$}
GitHubGitDownloadStrategy
when %r{^https?://.+\.git$}, %r{^git://}
GitDownloadStrategy
when %r{^https?://www\.apache\.org/dyn/closer\.cgi}, %r{^https?://www\.apache\.org/dyn/closer\.lua}
CurlApacheMirrorDownloadStrategy
when %r{^https?://(.+?\.)?googlecode\.com/svn}, %r{^https?://svn\.}, %r{^svn://}, %r{^https?://(.+?\.)?sourceforge\.net/svnroot/}
SubversionDownloadStrategy
when %r{^cvs://}
CVSDownloadStrategy
2017-08-08 18:10:13 +02:00
when %r{^hg://}, %r{^https?://(.+?\.)?googlecode\.com/hg}
MercurialDownloadStrategy
when %r{^bzr://}
BazaarDownloadStrategy
when %r{^fossil://}
FossilDownloadStrategy
2017-08-08 18:10:13 +02:00
when %r{^svn\+http://}, %r{^http://svn\.apache\.org/repos/}
SubversionDownloadStrategy
when %r{^https?://(.+?\.)?sourceforge\.net/hgweb/}
MercurialDownloadStrategy
when %r{^s3://}
require_aws_sdk
S3DownloadStrategy
2018-03-14 11:38:39 -07:00
when %r{^scp://}
ScpDownloadStrategy
else
CurlDownloadStrategy
end
end
def self.detect_from_symbol(symbol)
case symbol
when :hg then MercurialDownloadStrategy
when :nounzip then NoUnzipCurlDownloadStrategy
when :git then GitDownloadStrategy
when :bzr then BazaarDownloadStrategy
when :svn then SubversionDownloadStrategy
when :curl then CurlDownloadStrategy
when :cvs then CVSDownloadStrategy
when :post then CurlPostDownloadStrategy
2014-12-18 13:06:05 -05:00
when :fossil then FossilDownloadStrategy
else
raise "Unknown download strategy #{symbol} was requested."
end
end
def self.require_aws_sdk
Homebrew.install_gem! "aws-sdk-s3", "~> 1.8"
require "aws-sdk-s3"
end
end