2023-03-15 14:29:15 -07:00
|
|
|
# typed: true
|
2019-04-19 15:38:03 +09:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2016-11-20 13:00:01 -05:00
|
|
|
require "json"
|
2016-06-03 13:46:18 +01:00
|
|
|
require "time"
|
2018-07-01 23:35:29 +02:00
|
|
|
require "unpack_strategy"
|
2018-08-10 04:11:54 +02:00
|
|
|
require "lazy_object"
|
|
|
|
require "cgi"
|
2020-08-21 20:31:43 +01:00
|
|
|
require "lock_file"
|
2024-01-26 17:33:55 -08:00
|
|
|
require "system_command"
|
2013-02-07 12:43:22 -06:00
|
|
|
|
2023-04-24 10:32:10 +01:00
|
|
|
# Need to define this before requiring Mechanize to avoid:
|
|
|
|
# uninitialized constant Mechanize
|
|
|
|
# rubocop:disable Lint/EmptyClass
|
|
|
|
class Mechanize; end
|
|
|
|
require "vendor/gems/mechanize/lib/mechanize/http/content_disposition_parser"
|
|
|
|
# rubocop:enable Lint/EmptyClass
|
2019-01-30 04:01:00 +01:00
|
|
|
|
2020-10-10 15:23:03 +02:00
|
|
|
require "utils/curl"
|
2022-11-07 23:12:27 +09:00
|
|
|
require "utils/github"
|
2024-01-29 18:14:31 -08:00
|
|
|
require "utils/timer"
|
2020-10-10 15:23:03 +02:00
|
|
|
|
2021-03-11 17:45:44 +00:00
|
|
|
require "github_packages"
|
|
|
|
|
2020-08-26 10:50:34 +02:00
|
|
|
# @abstract Abstract superclass for all download strategies.
|
|
|
|
#
|
|
|
|
# @api private
|
2009-08-21 20:30:13 +01:00
|
|
|
class AbstractDownloadStrategy
|
2017-06-26 07:30:28 +02:00
|
|
|
extend Forwardable
|
2014-12-09 15:55:28 -05:00
|
|
|
include FileUtils
|
2020-08-02 14:32:31 +02:00
|
|
|
include Context
|
2024-01-26 17:33:55 -08:00
|
|
|
include SystemCommand::Mixin
|
2014-12-09 15:55:28 -05:00
|
|
|
|
2020-08-26 10:50:34 +02:00
|
|
|
# Extension for bottle downloads.
|
|
|
|
#
|
|
|
|
# @api private
|
2018-03-24 10:53:49 +00:00
|
|
|
module Pourable
|
|
|
|
def stage
|
2018-08-25 21:42:34 +02:00
|
|
|
ohai "Pouring #{basename}"
|
2018-03-24 10:53:49 +00:00
|
|
|
super
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-03-25 12:18:22 +01:00
|
|
|
# The download URL.
|
|
|
|
#
|
|
|
|
# @api public
|
|
|
|
sig { returns(String) }
|
|
|
|
attr_reader :url
|
|
|
|
|
|
|
|
# Location of the cached download.
|
|
|
|
#
|
|
|
|
# @api public
|
|
|
|
sig { returns(Pathname) }
|
|
|
|
attr_reader :cached_location
|
|
|
|
|
|
|
|
attr_reader :cache, :meta, :name, :version
|
2020-05-12 08:32:27 +01:00
|
|
|
|
2020-08-02 14:32:31 +02:00
|
|
|
private :meta, :name, :version
|
2013-09-23 21:39:19 -05:00
|
|
|
|
2018-08-02 10:29:40 +02:00
|
|
|
def initialize(url, name, version, **meta)
|
2018-08-02 09:59:22 +02:00
|
|
|
@url = url
|
2013-09-23 21:39:19 -05:00
|
|
|
@name = name
|
2018-08-01 05:18:00 +02:00
|
|
|
@version = version
|
2018-08-03 18:04:28 +02:00
|
|
|
@cache = meta.fetch(:cache, HOMEBREW_CACHE)
|
2018-08-02 10:29:40 +02:00
|
|
|
@meta = meta
|
2020-10-03 22:25:03 +05:30
|
|
|
@quiet = false
|
2018-03-24 10:53:49 +00:00
|
|
|
extend Pourable if meta[:bottle]
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2009-12-15 12:47:12 +00:00
|
|
|
|
2020-08-26 10:50:34 +02:00
|
|
|
# Download and cache the resource at {#cached_location}.
|
|
|
|
#
|
|
|
|
# @api public
|
2021-03-24 10:55:33 +01:00
|
|
|
def fetch(timeout: nil); end
|
2014-12-23 01:04:44 -05:00
|
|
|
|
2020-08-26 10:50:34 +02:00
|
|
|
# Disable any output during downloading.
|
|
|
|
#
|
|
|
|
# @api public
|
2020-10-20 12:03:48 +02:00
|
|
|
sig { void }
|
2023-09-01 19:22:25 +01:00
|
|
|
def quiet!
|
2020-08-11 20:18:30 +02:00
|
|
|
@quiet = true
|
|
|
|
end
|
|
|
|
|
2023-09-01 19:22:25 +01:00
|
|
|
# Disable any output during downloading.
|
|
|
|
#
|
|
|
|
# @deprecated
|
|
|
|
# @api private
|
|
|
|
sig { void }
|
|
|
|
def shutup!
|
2023-12-07 22:58:54 +00:00
|
|
|
odeprecated "AbstractDownloadStrategy#shutup!", "AbstractDownloadStrategy#quiet!"
|
2023-09-01 19:22:25 +01:00
|
|
|
quiet!
|
|
|
|
end
|
|
|
|
|
2020-10-03 22:25:03 +05:30
|
|
|
def quiet?
|
|
|
|
Context.current.quiet? || @quiet
|
|
|
|
end
|
|
|
|
|
2021-02-24 01:13:10 +00:00
|
|
|
# Unpack {#cached_location} into the current working directory.
|
|
|
|
#
|
|
|
|
# Additionally, if a block is given, the working directory was previously empty
|
|
|
|
# and a single directory is extracted from the archive, the block will be called
|
|
|
|
# with the working directory changed to that directory. Otherwise this method
|
|
|
|
# will return, or the block will be called, without changing the current working
|
|
|
|
# directory.
|
2020-08-26 10:50:34 +02:00
|
|
|
#
|
|
|
|
# @api public
|
2021-02-12 11:10:18 -05:00
|
|
|
def stage(&block)
|
2018-07-30 22:23:26 +02:00
|
|
|
UnpackStrategy.detect(cached_location,
|
2023-02-22 22:15:26 +01:00
|
|
|
prioritize_extension: true,
|
2018-07-30 22:23:26 +02:00
|
|
|
ref_type: @ref_type, ref: @ref)
|
2024-03-07 16:20:20 +00:00
|
|
|
.extract_nestedly(basename:,
|
2023-02-22 22:15:26 +01:00
|
|
|
prioritize_extension: true,
|
2020-08-02 14:32:31 +02:00
|
|
|
verbose: verbose? && !quiet?)
|
2021-02-24 01:13:10 +00:00
|
|
|
chdir(&block) if block
|
2018-08-03 10:50:49 +02:00
|
|
|
end
|
|
|
|
|
2021-02-12 11:10:18 -05:00
|
|
|
def chdir(&block)
|
2018-08-03 10:50:49 +02:00
|
|
|
entries = Dir["*"]
|
2023-01-02 19:18:51 +00:00
|
|
|
raise "Empty archive" if entries.empty?
|
2019-07-16 21:10:36 +01:00
|
|
|
|
2021-02-12 11:10:18 -05:00
|
|
|
if entries.length != 1
|
2021-02-24 01:13:10 +00:00
|
|
|
yield
|
2021-02-12 11:10:18 -05:00
|
|
|
return
|
|
|
|
end
|
|
|
|
|
2023-03-15 14:29:15 -07:00
|
|
|
if File.directory? entries.fetch(0)
|
|
|
|
Dir.chdir(entries.fetch(0), &block)
|
2021-02-24 01:13:10 +00:00
|
|
|
else
|
2021-02-12 11:10:18 -05:00
|
|
|
yield
|
2018-08-03 10:50:49 +02:00
|
|
|
end
|
2018-07-01 23:35:29 +02:00
|
|
|
end
|
2018-08-03 10:50:49 +02:00
|
|
|
private :chdir
|
2014-12-23 01:04:44 -05:00
|
|
|
|
2018-10-18 21:42:43 -04:00
|
|
|
# @!attribute [r] source_modified_time
|
|
|
|
# Returns the most recent modified time for all files in the current working directory after stage.
|
2020-08-26 10:50:34 +02:00
|
|
|
#
|
|
|
|
# @api public
|
2021-03-25 12:18:22 +01:00
|
|
|
sig { returns(Time) }
|
2016-01-14 18:57:31 +08:00
|
|
|
def source_modified_time
|
|
|
|
Pathname.pwd.to_enum(:find).select(&:file?).map(&:mtime).max
|
|
|
|
end
|
|
|
|
|
2014-12-23 01:04:44 -05:00
|
|
|
# Remove {#cached_location} and any other files associated with the resource
|
|
|
|
# from the cache.
|
2020-08-26 10:50:34 +02:00
|
|
|
#
|
|
|
|
# @api public
|
2014-12-23 01:04:44 -05:00
|
|
|
def clear_cache
|
2014-12-23 01:04:44 -05:00
|
|
|
rm_rf(cached_location)
|
2014-12-23 01:04:44 -05:00
|
|
|
end
|
|
|
|
|
2018-08-10 04:11:54 +02:00
|
|
|
def basename
|
2018-08-25 21:42:34 +02:00
|
|
|
cached_location.basename
|
2009-12-15 12:47:12 +00:00
|
|
|
end
|
2018-08-04 11:32:36 +02:00
|
|
|
|
|
|
|
private
|
|
|
|
|
2020-08-26 10:50:34 +02:00
|
|
|
def puts(*args)
|
|
|
|
super(*args) unless quiet?
|
|
|
|
end
|
|
|
|
|
|
|
|
def ohai(*args)
|
|
|
|
super(*args) unless quiet?
|
|
|
|
end
|
|
|
|
|
2020-09-10 23:39:19 +02:00
|
|
|
def silent_command(*args, **options)
|
2024-03-07 16:20:20 +00:00
|
|
|
system_command(*args, print_stderr: false, env:, **options)
|
2018-08-04 11:32:36 +02:00
|
|
|
end
|
|
|
|
|
2020-09-10 23:39:19 +02:00
|
|
|
def command!(*args, **options)
|
|
|
|
system_command!(
|
2018-08-04 11:32:36 +02:00
|
|
|
*args,
|
2020-09-10 23:50:24 +02:00
|
|
|
env: env.merge(options.fetch(:env, {})),
|
|
|
|
**command_output_options,
|
|
|
|
**options,
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def command_output_options
|
|
|
|
{
|
2020-08-02 14:32:31 +02:00
|
|
|
print_stdout: !quiet?,
|
|
|
|
print_stderr: !quiet?,
|
|
|
|
verbose: verbose? && !quiet?,
|
2020-09-10 23:50:24 +02:00
|
|
|
}
|
2018-08-04 11:32:36 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
def env
|
|
|
|
{}
|
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
|
2020-08-26 10:50:34 +02:00
|
|
|
# @abstract Abstract superclass for all download strategies downloading from a version control system.
|
|
|
|
#
|
|
|
|
# @api private
|
2013-10-09 21:41:15 -05:00
|
|
|
class VCSDownloadStrategy < AbstractDownloadStrategy
|
2015-03-07 14:59:30 +00:00
|
|
|
REF_TYPES = [:tag, :branch, :revisions, :revision].freeze
|
2014-02-14 15:31:29 -05:00
|
|
|
|
2018-08-02 10:29:40 +02:00
|
|
|
def initialize(url, name, version, **meta)
|
2013-10-09 21:41:15 -05:00
|
|
|
super
|
2014-12-09 19:59:16 -05:00
|
|
|
@ref_type, @ref = extract_ref(meta)
|
2015-03-07 14:59:30 +00:00
|
|
|
@revision = meta[:revision]
|
2024-01-01 17:46:48 +00:00
|
|
|
@cached_location = @cache/Utils.safe_filename("#{name}--#{cache_tag}")
|
2013-10-09 21:41:15 -05:00
|
|
|
end
|
|
|
|
|
2020-08-26 10:50:34 +02:00
|
|
|
# Download and cache the repository at {#cached_location}.
|
|
|
|
#
|
|
|
|
# @api public
|
2021-03-24 10:55:33 +01:00
|
|
|
def fetch(timeout: nil)
|
|
|
|
end_time = Time.now + timeout if timeout
|
|
|
|
|
2018-08-10 04:11:54 +02:00
|
|
|
ohai "Cloning #{url}"
|
2014-12-06 12:29:16 -05:00
|
|
|
|
|
|
|
if cached_location.exist? && repo_valid?
|
|
|
|
puts "Updating #{cached_location}"
|
2023-04-28 10:11:24 -07:00
|
|
|
update(timeout: end_time)
|
2014-12-06 12:29:16 -05:00
|
|
|
elsif cached_location.exist?
|
|
|
|
puts "Removing invalid repository from cache"
|
|
|
|
clear_cache
|
2021-03-24 10:55:33 +01:00
|
|
|
clone_repo(timeout: end_time)
|
2014-12-06 12:29:16 -05:00
|
|
|
else
|
2021-03-24 10:55:33 +01:00
|
|
|
clone_repo(timeout: end_time)
|
2014-12-06 12:29:16 -05:00
|
|
|
end
|
2015-03-07 14:59:30 +00:00
|
|
|
|
2016-07-13 10:11:59 +03:00
|
|
|
version.update_commit(last_commit) if head?
|
|
|
|
|
2021-01-07 13:49:05 -08:00
|
|
|
return if @ref_type != :tag || @revision.blank? || current_revision.blank? || current_revision == @revision
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2017-10-15 02:28:32 +02:00
|
|
|
raise <<~EOS
|
2016-09-23 22:02:23 +02:00
|
|
|
#{@ref} tag should be #{@revision}
|
|
|
|
but is actually #{current_revision}
|
|
|
|
EOS
|
2014-12-06 12:29:16 -05:00
|
|
|
end
|
|
|
|
|
2016-07-22 12:21:22 +03:00
|
|
|
def fetch_last_commit
|
|
|
|
fetch
|
|
|
|
last_commit
|
|
|
|
end
|
|
|
|
|
|
|
|
def commit_outdated?(commit)
|
|
|
|
@last_commit ||= fetch_last_commit
|
|
|
|
commit != @last_commit
|
|
|
|
end
|
|
|
|
|
2018-08-01 05:18:00 +02:00
|
|
|
def head?
|
|
|
|
version.respond_to?(:head?) && version.head?
|
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
2021-03-25 12:18:22 +01:00
|
|
|
# @!attribute [r] last_commit
|
2016-05-24 23:00:29 +03:00
|
|
|
# Return last commit's unique identifier for the repository.
|
|
|
|
# Return most recent modified timestamp unless overridden.
|
2020-08-26 10:50:34 +02:00
|
|
|
#
|
|
|
|
# @api public
|
2021-03-25 12:18:22 +01:00
|
|
|
sig { returns(String) }
|
2016-05-24 23:00:29 +03:00
|
|
|
def last_commit
|
|
|
|
source_modified_time.to_i.to_s
|
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
private
|
|
|
|
|
|
|
|
def cache_tag
|
2018-07-01 23:35:29 +02:00
|
|
|
raise NotImplementedError
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def repo_valid?
|
2018-07-01 23:35:29 +02:00
|
|
|
raise NotImplementedError
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2021-03-24 10:55:33 +01:00
|
|
|
sig { params(timeout: T.nilable(Time)).void }
|
|
|
|
def clone_repo(timeout: nil); end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
2021-03-24 10:55:33 +01:00
|
|
|
sig { params(timeout: T.nilable(Time)).void }
|
|
|
|
def update(timeout: nil); end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
2017-10-07 00:31:28 +02:00
|
|
|
def current_revision; end
|
2015-03-07 14:59:30 +00:00
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def extract_ref(specs)
|
|
|
|
key = REF_TYPES.find { |type| specs.key?(type) }
|
2015-08-03 13:09:07 +01:00
|
|
|
[key, specs[key]]
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2013-10-09 21:41:15 -05:00
|
|
|
end
|
|
|
|
|
2020-08-26 10:50:34 +02:00
|
|
|
# @abstract Abstract superclass for all download strategies downloading a single file.
|
|
|
|
#
|
|
|
|
# @api private
|
2015-01-04 15:33:25 -05:00
|
|
|
class AbstractFileDownloadStrategy < AbstractDownloadStrategy
|
2020-08-26 10:50:34 +02:00
|
|
|
# Path for storing an incomplete download while the download is still in progress.
|
|
|
|
#
|
|
|
|
# @api public
|
2018-09-01 15:59:25 +02:00
|
|
|
def temporary_path
|
|
|
|
@temporary_path ||= Pathname.new("#{cached_location}.incomplete")
|
2018-08-03 10:51:01 +02:00
|
|
|
end
|
|
|
|
|
2020-08-26 10:50:34 +02:00
|
|
|
# Path of the symlink (whose name includes the resource name, version and extension)
|
|
|
|
# pointing to {#cached_location}.
|
|
|
|
#
|
|
|
|
# @api public
|
2018-08-10 04:11:54 +02:00
|
|
|
def symlink_location
|
|
|
|
return @symlink_location if defined?(@symlink_location)
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2018-08-10 04:11:54 +02:00
|
|
|
ext = Pathname(parse_basename(url)).extname
|
2024-01-01 17:46:48 +00:00
|
|
|
@symlink_location = @cache/Utils.safe_filename("#{name}--#{version}#{ext}")
|
2018-08-10 04:11:54 +02:00
|
|
|
end
|
|
|
|
|
2020-11-05 17:17:03 -05:00
|
|
|
# Path for storing the completed download.
|
2020-08-26 10:50:34 +02:00
|
|
|
#
|
|
|
|
# @api public
|
2018-08-10 04:11:54 +02:00
|
|
|
def cached_location
|
|
|
|
return @cached_location if defined?(@cached_location)
|
|
|
|
|
|
|
|
url_sha256 = Digest::SHA256.hexdigest(url)
|
|
|
|
downloads = Pathname.glob(HOMEBREW_CACHE/"downloads/#{url_sha256}--*")
|
2018-08-25 21:57:52 +02:00
|
|
|
.reject { |path| path.extname.end_with?(".incomplete") }
|
2018-08-10 04:11:54 +02:00
|
|
|
|
|
|
|
@cached_location = if downloads.count == 1
|
|
|
|
downloads.first
|
|
|
|
else
|
2024-01-01 17:46:48 +00:00
|
|
|
HOMEBREW_CACHE/"downloads/#{url_sha256}--#{Utils.safe_filename(resolved_basename)}"
|
2018-08-10 04:11:54 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def basename
|
2020-06-02 09:49:23 +01:00
|
|
|
cached_location.basename.sub(/^[\da-f]{64}--/, "")
|
2018-08-10 04:11:54 +02:00
|
|
|
end
|
|
|
|
|
2013-04-07 00:49:56 -05:00
|
|
|
private
|
|
|
|
|
2018-08-10 04:11:54 +02:00
|
|
|
def resolved_url
|
|
|
|
resolved_url, = resolved_url_and_basename
|
|
|
|
resolved_url
|
|
|
|
end
|
|
|
|
|
|
|
|
def resolved_basename
|
|
|
|
_, resolved_basename = resolved_url_and_basename
|
|
|
|
resolved_basename
|
|
|
|
end
|
|
|
|
|
|
|
|
def resolved_url_and_basename
|
|
|
|
return @resolved_url_and_basename if defined?(@resolved_url_and_basename)
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2018-08-10 04:11:54 +02:00
|
|
|
@resolved_url_and_basename = [url, parse_basename(url)]
|
|
|
|
end
|
|
|
|
|
2023-04-18 01:40:06 +01:00
|
|
|
sig { params(url: String, search_query: T::Boolean).returns(String) }
|
2022-04-08 19:22:40 +01:00
|
|
|
def parse_basename(url, search_query: true)
|
2023-04-18 01:40:06 +01:00
|
|
|
components = { path: T.let([], T::Array[String]), query: T.let([], T::Array[String]) }
|
|
|
|
|
|
|
|
if url.match?(URI::DEFAULT_PARSER.make_regexp)
|
2018-08-10 04:11:54 +02:00
|
|
|
uri = URI(url)
|
|
|
|
|
|
|
|
if uri.query
|
|
|
|
query_params = CGI.parse(uri.query)
|
|
|
|
query_params["response-content-disposition"].each do |param|
|
|
|
|
query_basename = param[/attachment;\s*filename=(["']?)(.+)\1/i, 2]
|
2021-03-16 13:00:12 +00:00
|
|
|
return File.basename(query_basename) if query_basename
|
2018-08-10 04:11:54 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-04-18 01:40:06 +01:00
|
|
|
if (uri_path = uri.path.presence)
|
2024-02-22 23:29:55 +00:00
|
|
|
components[:path] = uri_path.split("/").filter_map do |part|
|
2023-04-18 01:40:06 +01:00
|
|
|
URI::DEFAULT_PARSER.unescape(part).presence
|
2024-02-22 23:29:55 +00:00
|
|
|
end
|
2023-04-18 01:40:06 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
if search_query && (uri_query = uri.query.presence)
|
2024-01-11 13:42:38 -08:00
|
|
|
components[:query] = URI.decode_www_form(uri_query).map { _2 }
|
2022-04-08 19:22:40 +01:00
|
|
|
end
|
2018-08-06 00:16:57 +02:00
|
|
|
else
|
2023-04-18 01:40:06 +01:00
|
|
|
components[:path] = [url]
|
2018-08-06 00:16:57 +02:00
|
|
|
end
|
|
|
|
|
2014-07-17 15:14:16 -05:00
|
|
|
# We need a Pathname because we've monkeypatched extname to support double
|
|
|
|
# extensions (e.g. tar.gz).
|
2018-08-10 04:11:54 +02:00
|
|
|
# Given a URL like https://example.com/download.php?file=foo-1.0.tar.gz
|
|
|
|
# the basename we want is "foo-1.0.tar.gz", not "download.php".
|
2023-04-18 01:40:06 +01:00
|
|
|
[*components[:path], *components[:query]].reverse_each do |path|
|
|
|
|
path = Pathname(path)
|
|
|
|
return path.basename.to_s if path.extname.present?
|
2018-01-09 19:56:54 -06:00
|
|
|
end
|
2018-08-10 04:11:54 +02:00
|
|
|
|
2023-04-18 01:40:06 +01:00
|
|
|
filename = components[:path].last
|
|
|
|
return "" if filename.blank?
|
|
|
|
|
|
|
|
File.basename(filename)
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-08-26 10:50:34 +02:00
|
|
|
# Strategy for downloading files using `curl`.
|
|
|
|
#
|
|
|
|
# @api public
|
2015-01-04 15:33:25 -05:00
|
|
|
class CurlDownloadStrategy < AbstractFileDownloadStrategy
|
2020-10-10 15:23:03 +02:00
|
|
|
include Utils::Curl
|
|
|
|
|
2018-08-03 15:11:04 +02:00
|
|
|
attr_reader :mirrors
|
2015-01-04 15:33:25 -05:00
|
|
|
|
2018-08-02 10:29:40 +02:00
|
|
|
def initialize(url, name, version, **meta)
|
#curl_download: default try_partial to false
When its `try_partial` argument is `true`, `#curl_download` makes a
`HEAD` request before downloading the file using `#curl`. Currently
`try_partial` defaults to `true`, so any `#curl_download` call that
doesn't explicitly specify `try_partial: false` will make a `HEAD`
request first. This can potentially involve several requests if the
URL redirects, so it can be a bit of unnecessary overhead when a
partial download isn't needed.
Partial downloads are generally only useful when we're working with
larger files, however there's currently only one place in brew where
`#curl_download` is used and this is the case:
`CurlDownloadStrategy`. The other `#curl_download` calls are fetching
smaller [text] files and don't need to support partial downloads.
This commit changes the default `try_partial` value to `false`,
making partial downloads opt-in rather than opt-out.
We want `try_partial` to continue to default to `true` in
`CurlDownloadStrategy` and there are various ways to accomplish this.
In this commit, I've chosen to update its `#initialize` method to
accept a `try_partial` argument that defaults to `true`, as this
value can also be used in classes that inherit from
`CurlDownloadStrategy` (e.g., `HomebrewCurlDownloadStrategy`). This
instance variable is passed to `#curl_download` in related methods,
effectively maintaining the previous `try_partial: true` value, while
also allowing this value to be overridden when necessary.
Other uses of `#curl_download` in brew are
`Formulary::FromUrlLoader#load_file` and
`Cask::CaskLoader::FromURILoader#load`, which did not provide a
`try_partial` argument but should have been using
`try_partial: false`. With the `try_partial: false` default in this
commit, these calls are now fine without a `try_partial` argument.
The only other use of `#curl_download` in brew is
`SPDX#download_latest_license_data!`. These calls were previously
using `try_partial: false` but we can now omit this argument with
the new `false` default (aligning with the above).
2022-04-22 12:05:14 -04:00
|
|
|
@try_partial = true
|
2018-08-02 10:29:40 +02:00
|
|
|
@mirrors = meta.fetch(:mirrors, [])
|
2023-04-17 11:47:33 +02:00
|
|
|
|
|
|
|
# Merge `:header` with `:headers`.
|
|
|
|
if (header = meta.delete(:header))
|
|
|
|
meta[:headers] ||= []
|
|
|
|
meta[:headers] << header
|
|
|
|
end
|
|
|
|
|
|
|
|
super
|
2015-01-04 15:33:25 -05:00
|
|
|
end
|
|
|
|
|
2020-08-26 10:50:34 +02:00
|
|
|
# Download and cache the file at {#cached_location}.
|
|
|
|
#
|
|
|
|
# @api public
|
2021-03-24 10:55:33 +01:00
|
|
|
def fetch(timeout: nil)
|
|
|
|
end_time = Time.now + timeout if timeout
|
|
|
|
|
2018-10-14 00:13:04 +02:00
|
|
|
download_lock = LockFile.new(temporary_path.basename)
|
|
|
|
download_lock.lock
|
|
|
|
|
2018-08-10 04:11:54 +02:00
|
|
|
urls = [url, *mirrors]
|
2015-04-19 21:25:14 +01:00
|
|
|
|
2018-08-10 04:11:54 +02:00
|
|
|
begin
|
|
|
|
url = urls.shift
|
|
|
|
|
2023-06-27 14:33:23 +01:00
|
|
|
if (domain = Homebrew::EnvConfig.artifact_domain)
|
|
|
|
url = url.sub(%r{^https?://#{GitHubPackages::URL_DOMAIN}/}o, "#{domain.chomp("/")}/")
|
|
|
|
end
|
|
|
|
|
2018-08-10 04:11:54 +02:00
|
|
|
ohai "Downloading #{url}"
|
|
|
|
|
2023-04-24 11:56:25 -07:00
|
|
|
use_cached_location = cached_location.exist?
|
|
|
|
use_cached_location = false if version.respond_to?(:latest?) && version.latest?
|
|
|
|
|
|
|
|
resolved_url, _, last_modified, _, is_redirection = begin
|
2024-01-29 18:14:31 -08:00
|
|
|
resolve_url_basename_time_file_size(url, timeout: Utils::Timer.remaining!(end_time))
|
2023-04-24 11:56:25 -07:00
|
|
|
rescue ErrorDuringExecution
|
|
|
|
raise unless use_cached_location
|
|
|
|
end
|
|
|
|
|
2021-07-27 09:55:22 +03:00
|
|
|
# Authorization is no longer valid after redirects
|
2021-08-18 03:02:13 +01:00
|
|
|
meta[:headers]&.delete_if { |header| header.start_with?("Authorization") } if is_redirection
|
2018-10-30 20:18:51 +01:00
|
|
|
|
2023-04-24 11:56:25 -07:00
|
|
|
# The cached location is no longer fresh if Last-Modified is after the file's timestamp
|
|
|
|
use_cached_location = false if cached_location.exist? && last_modified && last_modified > cached_location.mtime
|
2018-10-30 20:18:51 +01:00
|
|
|
|
2023-04-24 11:56:25 -07:00
|
|
|
if use_cached_location
|
2018-08-10 04:11:54 +02:00
|
|
|
puts "Already downloaded: #{cached_location}"
|
|
|
|
else
|
|
|
|
begin
|
2024-03-07 16:20:20 +00:00
|
|
|
_fetch(url:, resolved_url:, timeout: Utils::Timer.remaining!(end_time))
|
2018-08-10 04:11:54 +02:00
|
|
|
rescue ErrorDuringExecution
|
|
|
|
raise CurlDownloadStrategyError, url
|
|
|
|
end
|
|
|
|
ignore_interrupts do
|
2018-09-05 00:38:19 +02:00
|
|
|
cached_location.dirname.mkpath
|
2018-08-10 04:11:54 +02:00
|
|
|
temporary_path.rename(cached_location)
|
|
|
|
symlink_location.dirname.mkpath
|
|
|
|
end
|
2015-01-04 15:33:25 -05:00
|
|
|
end
|
2018-08-25 22:03:16 +02:00
|
|
|
|
|
|
|
FileUtils.ln_s cached_location.relative_path_from(symlink_location.dirname), symlink_location, force: true
|
2018-08-10 04:11:54 +02:00
|
|
|
rescue CurlDownloadStrategyError
|
|
|
|
raise if urls.empty?
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2018-08-10 04:11:54 +02:00
|
|
|
puts "Trying a mirror..."
|
|
|
|
retry
|
2021-03-24 10:55:33 +01:00
|
|
|
rescue Timeout::Error => e
|
|
|
|
raise Timeout::Error, "Timed out downloading #{self.url}: #{e}"
|
2015-01-04 15:33:25 -05:00
|
|
|
end
|
2018-10-14 00:13:04 +02:00
|
|
|
ensure
|
2018-10-25 14:39:28 -05:00
|
|
|
download_lock&.unlock
|
2020-04-06 09:56:56 +02:00
|
|
|
download_lock&.path&.unlink
|
2015-01-04 15:33:25 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def clear_cache
|
|
|
|
super
|
|
|
|
rm_rf(temporary_path)
|
|
|
|
end
|
|
|
|
|
2021-03-24 10:55:33 +01:00
|
|
|
def resolved_time_file_size(timeout: nil)
|
2024-03-07 16:20:20 +00:00
|
|
|
_, _, time, file_size = resolve_url_basename_time_file_size(url, timeout:)
|
2020-12-04 00:07:02 +01:00
|
|
|
[time, file_size]
|
|
|
|
end
|
|
|
|
|
2015-01-04 15:33:25 -05:00
|
|
|
private
|
|
|
|
|
2021-03-24 10:55:33 +01:00
|
|
|
def resolved_url_and_basename(timeout: nil)
|
|
|
|
resolved_url, basename, = resolve_url_basename_time_file_size(url, timeout: nil)
|
2018-11-28 20:59:16 -06:00
|
|
|
[resolved_url, basename]
|
2018-08-10 04:11:54 +02:00
|
|
|
end
|
2016-07-13 20:56:12 +08:00
|
|
|
|
2021-03-24 10:55:33 +01:00
|
|
|
def resolve_url_basename_time_file_size(url, timeout: nil)
|
2018-11-28 20:59:16 -06:00
|
|
|
@resolved_info_cache ||= {}
|
|
|
|
return @resolved_info_cache[url] if @resolved_info_cache.include?(url)
|
|
|
|
|
2023-10-04 14:28:52 +01:00
|
|
|
begin
|
2024-03-07 16:20:20 +00:00
|
|
|
parsed_output = curl_headers(url.to_s, wanted_headers: ["content-disposition"], timeout:)
|
2023-10-04 14:28:52 +01:00
|
|
|
rescue ErrorDuringExecution
|
|
|
|
return [url, parse_basename(url), nil, nil, false]
|
|
|
|
end
|
|
|
|
|
2023-03-31 21:35:58 +02:00
|
|
|
parsed_headers = parsed_output.fetch(:responses).map { |r| r.fetch(:headers) }
|
2021-03-17 13:23:23 -04:00
|
|
|
|
2023-03-31 21:35:58 +02:00
|
|
|
final_url = curl_response_follow_redirections(parsed_output.fetch(:responses), url)
|
2018-08-10 04:11:54 +02:00
|
|
|
|
2019-01-30 04:01:00 +01:00
|
|
|
content_disposition_parser = Mechanize::HTTP::ContentDispositionParser.new
|
|
|
|
|
|
|
|
parse_content_disposition = lambda do |line|
|
2021-02-12 18:33:37 +05:30
|
|
|
next unless (content_disposition = content_disposition_parser.parse(line.sub(/; *$/, ""), true))
|
2019-01-30 04:01:00 +01:00
|
|
|
|
2019-02-13 13:29:37 +00:00
|
|
|
filename = nil
|
|
|
|
|
2021-02-12 18:33:37 +05:30
|
|
|
if (filename_with_encoding = content_disposition.parameters["filename*"])
|
2019-01-30 04:01:00 +01:00
|
|
|
encoding, encoded_filename = filename_with_encoding.split("''", 2)
|
2023-02-27 22:13:59 +00:00
|
|
|
# If the `filename*` has incorrectly added double quotes, e.g.
|
|
|
|
# content-disposition: attachment; filename="myapp-1.2.3.pkg"; filename*=UTF-8''"myapp-1.2.3.pkg"
|
|
|
|
# Then the encoded_filename will come back as the empty string, in which case we should fall back to the
|
|
|
|
# `filename` parameter.
|
2023-03-08 12:49:25 +00:00
|
|
|
if encoding.present? && encoded_filename.present?
|
2023-02-27 22:13:59 +00:00
|
|
|
filename = URI.decode_www_form_component(encoded_filename).encode(encoding)
|
|
|
|
end
|
2019-01-30 04:01:00 +01:00
|
|
|
end
|
2019-02-13 13:29:37 +00:00
|
|
|
|
2023-03-21 12:19:06 +00:00
|
|
|
filename = content_disposition.filename if filename.blank?
|
2023-03-21 12:18:17 +00:00
|
|
|
next if filename.blank?
|
2023-03-21 09:12:07 +11:00
|
|
|
|
2021-03-01 13:59:05 +00:00
|
|
|
# Servers may include '/' in their Content-Disposition filename header. Take only the basename of this, because:
|
|
|
|
# - Unpacking code assumes this is a single file - not something living in a subdirectory.
|
|
|
|
# - Directory traversal attacks are possible without limiting this to just the basename.
|
2023-03-21 09:12:07 +11:00
|
|
|
File.basename(filename)
|
2019-01-30 04:01:00 +01:00
|
|
|
end
|
|
|
|
|
2023-03-31 21:35:58 +02:00
|
|
|
filenames = parsed_headers.flat_map do |headers|
|
|
|
|
next [] unless (header = headers["content-disposition"])
|
2018-08-10 04:11:54 +02:00
|
|
|
|
2023-03-31 21:35:58 +02:00
|
|
|
[*parse_content_disposition.call("Content-Disposition: #{header}")]
|
|
|
|
end
|
|
|
|
|
|
|
|
time = parsed_headers
|
|
|
|
.flat_map { |headers| [*headers["last-modified"]] }
|
2019-08-20 03:17:59 +02:00
|
|
|
.map { |t| t.match?(/^\d+$/) ? Time.at(t.to_i) : Time.parse(t) }
|
2018-10-30 20:18:51 +01:00
|
|
|
.last
|
|
|
|
|
2023-03-31 21:35:58 +02:00
|
|
|
file_size = parsed_headers
|
|
|
|
.flat_map { |headers| [*headers["content-length"]&.to_i] }
|
|
|
|
.last
|
2020-12-04 00:07:02 +01:00
|
|
|
|
2021-03-17 13:23:23 -04:00
|
|
|
is_redirection = url != final_url
|
|
|
|
basename = filenames.last || parse_basename(final_url, search_query: !is_redirection)
|
2018-08-10 04:11:54 +02:00
|
|
|
|
2021-03-17 13:23:23 -04:00
|
|
|
@resolved_info_cache[url] = [final_url, basename, time, file_size, is_redirection]
|
2018-08-10 04:11:54 +02:00
|
|
|
end
|
|
|
|
|
2021-03-24 10:55:33 +01:00
|
|
|
def _fetch(url:, resolved_url:, timeout:)
|
2018-08-10 04:11:54 +02:00
|
|
|
ohai "Downloading from #{resolved_url}" if url != resolved_url
|
|
|
|
|
2020-04-05 15:44:50 +01:00
|
|
|
if Homebrew::EnvConfig.no_insecure_redirect? &&
|
2018-08-10 04:11:54 +02:00
|
|
|
url.start_with?("https://") && !resolved_url.start_with?("https://")
|
2021-11-25 09:10:59 +00:00
|
|
|
$stderr.puts "HTTPS to HTTP redirect detected and HOMEBREW_NO_INSECURE_REDIRECT is set."
|
2018-08-10 04:11:54 +02:00
|
|
|
raise CurlDownloadStrategyError, url
|
|
|
|
end
|
|
|
|
|
2021-07-26 12:39:25 +02:00
|
|
|
_curl_download resolved_url, temporary_path, timeout
|
|
|
|
end
|
|
|
|
|
|
|
|
def _curl_download(resolved_url, to, timeout)
|
2024-03-07 16:20:20 +00:00
|
|
|
curl_download resolved_url, to:, try_partial: @try_partial, timeout:
|
2015-01-04 15:33:25 -05:00
|
|
|
end
|
|
|
|
|
2015-07-08 23:53:20 -05:00
|
|
|
# Curl options to be always passed to curl,
|
2017-08-08 18:10:13 +02:00
|
|
|
# with raw head calls (`curl --head`) or with actual `fetch`.
|
2018-08-02 11:16:36 +02:00
|
|
|
def _curl_args
|
|
|
|
args = []
|
|
|
|
|
2019-05-31 22:00:48 +02:00
|
|
|
args += ["-b", meta.fetch(:cookies).map { |k, v| "#{k}=#{v}" }.join(";")] if meta.key?(:cookies)
|
2018-08-02 11:16:36 +02:00
|
|
|
|
|
|
|
args += ["-e", meta.fetch(:referer)] if meta.key?(:referer)
|
|
|
|
|
|
|
|
args += ["--user", meta.fetch(:user)] if meta.key?(:user)
|
|
|
|
|
2023-04-17 11:47:33 +02:00
|
|
|
args += meta.fetch(:headers, []).flat_map { |h| ["--header", h.strip] }
|
2020-03-10 10:16:25 +00:00
|
|
|
|
2021-10-04 14:21:03 +01:00
|
|
|
if meta[:insecure]
|
|
|
|
unless @insecure_warning_shown
|
2023-10-04 22:24:57 -04:00
|
|
|
opoo DevelopmentTools.insecure_download_warning("an updated certificates file")
|
2021-10-04 14:21:03 +01:00
|
|
|
@insecure_warning_shown = true
|
|
|
|
end
|
2021-10-04 17:16:20 +01:00
|
|
|
args += ["--insecure"]
|
2021-10-04 14:21:03 +01:00
|
|
|
end
|
|
|
|
|
2018-08-02 11:16:36 +02:00
|
|
|
args
|
|
|
|
end
|
|
|
|
|
2015-07-08 23:53:20 -05:00
|
|
|
def _curl_opts
|
2018-08-02 11:16:36 +02:00
|
|
|
return { user_agent: meta.fetch(:user_agent) } if meta.key?(:user_agent)
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2018-08-02 11:16:36 +02:00
|
|
|
{}
|
2015-07-08 23:53:20 -05:00
|
|
|
end
|
|
|
|
|
2018-08-02 11:16:36 +02:00
|
|
|
def curl_output(*args, **options)
|
|
|
|
super(*_curl_args, *args, **_curl_opts, **options)
|
|
|
|
end
|
|
|
|
|
2017-08-08 18:10:13 +02:00
|
|
|
def curl(*args, **options)
|
2021-09-06 22:56:25 -04:00
|
|
|
options[:connect_timeout] = 15 unless mirrors.empty?
|
2020-09-10 23:50:24 +02:00
|
|
|
super(*_curl_args, *args, **_curl_opts, **command_output_options, **options)
|
2015-01-04 15:33:25 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-07-26 12:39:25 +02:00
|
|
|
# Strategy for downloading a file using homebrew's curl.
|
|
|
|
#
|
|
|
|
# @api public
|
|
|
|
class HomebrewCurlDownloadStrategy < CurlDownloadStrategy
|
|
|
|
private
|
|
|
|
|
|
|
|
def _curl_download(resolved_url, to, timeout)
|
|
|
|
raise HomebrewCurlDownloadStrategyError, url unless Formula["curl"].any_version_installed?
|
|
|
|
|
2024-03-07 16:20:20 +00:00
|
|
|
curl_download resolved_url, to:, try_partial: @try_partial, timeout:, use_homebrew_curl: true
|
2021-07-26 12:39:25 +02:00
|
|
|
end
|
2022-10-16 22:30:16 +02:00
|
|
|
|
|
|
|
def curl_output(*args, **options)
|
|
|
|
raise HomebrewCurlDownloadStrategyError, url unless Formula["curl"].any_version_installed?
|
|
|
|
|
|
|
|
options[:use_homebrew_curl] = true
|
|
|
|
super(*args, **options)
|
|
|
|
end
|
2021-07-26 12:39:25 +02:00
|
|
|
end
|
|
|
|
|
2021-03-11 17:45:44 +00:00
|
|
|
# Strategy for downloading a file from an GitHub Packages URL.
|
|
|
|
#
|
|
|
|
# @api public
|
|
|
|
class CurlGitHubPackagesDownloadStrategy < CurlDownloadStrategy
|
2021-03-31 20:26:24 +01:00
|
|
|
attr_writer :resolved_basename
|
2021-03-11 17:45:44 +00:00
|
|
|
|
2021-03-31 20:26:24 +01:00
|
|
|
def initialize(url, name, version, **meta)
|
2021-04-05 14:58:17 +01:00
|
|
|
meta[:headers] ||= []
|
2022-01-27 15:44:34 +00:00
|
|
|
# GitHub Packages authorization header.
|
|
|
|
# HOMEBREW_GITHUB_PACKAGES_AUTH set in brew.sh
|
|
|
|
meta[:headers] << "Authorization: #{HOMEBREW_GITHUB_PACKAGES_AUTH}"
|
2022-10-08 01:08:15 +01:00
|
|
|
super(url, name, version, **meta)
|
2021-03-31 20:26:24 +01:00
|
|
|
end
|
2021-03-11 17:45:44 +00:00
|
|
|
|
2021-03-31 20:26:24 +01:00
|
|
|
private
|
2021-03-11 17:45:44 +00:00
|
|
|
|
2023-06-22 14:25:18 +01:00
|
|
|
def resolve_url_basename_time_file_size(url, timeout: nil)
|
|
|
|
return super if @resolved_basename.blank?
|
|
|
|
|
|
|
|
[url, @resolved_basename, nil, nil, false]
|
2021-03-11 17:45:44 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-08-26 10:50:34 +02:00
|
|
|
# Strategy for downloading a file from an Apache Mirror URL.
|
|
|
|
#
|
|
|
|
# @api public
|
2011-07-20 15:37:33 +02:00
|
|
|
class CurlApacheMirrorDownloadStrategy < CurlDownloadStrategy
|
2018-08-10 04:11:54 +02:00
|
|
|
def mirrors
|
2020-11-09 20:03:48 +11:00
|
|
|
combined_mirrors
|
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def combined_mirrors
|
2018-08-10 04:11:54 +02:00
|
|
|
return @combined_mirrors if defined?(@combined_mirrors)
|
|
|
|
|
|
|
|
backup_mirrors = apache_mirrors.fetch("backup", [])
|
|
|
|
.map { |mirror| "#{mirror}#{apache_mirrors["path_info"]}" }
|
|
|
|
|
|
|
|
@combined_mirrors = [*@mirrors, *backup_mirrors]
|
2013-10-30 00:11:46 -05:00
|
|
|
end
|
|
|
|
|
2021-03-24 10:55:33 +01:00
|
|
|
def resolve_url_basename_time_file_size(url, timeout: nil)
|
2018-09-02 03:30:37 +02:00
|
|
|
if url == self.url
|
2024-03-07 16:20:20 +00:00
|
|
|
super("#{apache_mirrors["preferred"]}#{apache_mirrors["path_info"]}", timeout:)
|
2018-09-02 03:30:37 +02:00
|
|
|
else
|
|
|
|
super
|
|
|
|
end
|
2018-08-10 04:11:54 +02:00
|
|
|
end
|
2011-07-20 15:37:33 +02:00
|
|
|
|
2018-08-10 04:11:54 +02:00
|
|
|
def apache_mirrors
|
|
|
|
return @apache_mirrors if defined?(@apache_mirrors)
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2018-08-10 04:11:54 +02:00
|
|
|
json, = curl_output("--silent", "--location", "#{url}&asjson=1")
|
|
|
|
@apache_mirrors = JSON.parse(json)
|
|
|
|
rescue JSON::ParserError
|
2013-11-26 22:56:03 -06:00
|
|
|
raise CurlDownloadStrategyError, "Couldn't determine mirror, try again later."
|
2011-07-20 15:37:33 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-08-26 10:50:34 +02:00
|
|
|
# Strategy for downloading via an HTTP POST request using `curl`.
|
2018-10-18 21:42:43 -04:00
|
|
|
# Query parameters on the URL are converted into POST parameters.
|
2020-08-26 10:50:34 +02:00
|
|
|
#
|
|
|
|
# @api public
|
2011-03-28 19:53:43 -07:00
|
|
|
class CurlPostDownloadStrategy < CurlDownloadStrategy
|
2018-08-10 04:11:54 +02:00
|
|
|
private
|
|
|
|
|
2021-03-24 10:55:33 +01:00
|
|
|
def _fetch(url:, resolved_url:, timeout:)
|
2018-08-06 00:16:57 +02:00
|
|
|
args = if meta.key?(:data)
|
2018-08-02 15:41:44 +02:00
|
|
|
escape_data = ->(d) { ["-d", URI.encode_www_form([d])] }
|
2018-08-10 04:11:54 +02:00
|
|
|
[url, *meta[:data].flat_map(&escape_data)]
|
2018-08-02 11:16:36 +02:00
|
|
|
else
|
2018-08-10 04:11:54 +02:00
|
|
|
url, query = url.split("?", 2)
|
2018-08-06 00:16:57 +02:00
|
|
|
query.nil? ? [url, "-X", "POST"] : [url, "-d", query]
|
2018-08-02 11:16:36 +02:00
|
|
|
end
|
|
|
|
|
2024-03-07 16:20:20 +00:00
|
|
|
curl_download(*args, to: temporary_path, try_partial: @try_partial, timeout:)
|
2010-06-25 19:13:20 -07:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-08-26 10:50:34 +02:00
|
|
|
# Strategy for downloading archives without automatically extracting them.
|
|
|
|
# (Useful for downloading `.jar` files.)
|
|
|
|
#
|
|
|
|
# @api public
|
2011-03-28 19:53:43 -07:00
|
|
|
class NoUnzipCurlDownloadStrategy < CurlDownloadStrategy
|
2009-12-01 12:01:05 -08:00
|
|
|
def stage
|
2018-07-23 20:59:21 +02:00
|
|
|
UnpackStrategy::Uncompressed.new(cached_location)
|
2024-03-07 16:20:20 +00:00
|
|
|
.extract(basename:,
|
2020-08-02 14:32:31 +02:00
|
|
|
verbose: verbose? && !quiet?)
|
2021-02-22 13:48:19 +00:00
|
|
|
yield if block_given?
|
2009-12-01 12:01:05 -08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-08-26 10:50:34 +02:00
|
|
|
# Strategy for extracting local binary packages.
|
|
|
|
#
|
|
|
|
# @api private
|
2015-01-04 15:33:25 -05:00
|
|
|
class LocalBottleDownloadStrategy < AbstractFileDownloadStrategy
|
2020-08-19 17:12:32 +01:00
|
|
|
def initialize(path) # rubocop:disable Lint/MissingSuper
|
2015-06-15 21:32:15 -04:00
|
|
|
@cached_location = path
|
2021-06-10 14:51:19 -04:00
|
|
|
extend Pourable
|
2013-06-08 16:41:23 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-08-26 10:50:34 +02:00
|
|
|
# Strategy for downloading a Subversion repository.
|
|
|
|
#
|
|
|
|
# @api public
|
2013-10-09 21:41:15 -05:00
|
|
|
class SubversionDownloadStrategy < VCSDownloadStrategy
|
2018-08-02 10:29:40 +02:00
|
|
|
def initialize(url, name, version, **meta)
|
2014-12-06 12:29:16 -05:00
|
|
|
super
|
2015-04-27 20:39:20 -04:00
|
|
|
@url = @url.sub("svn+http://", "")
|
2014-12-06 12:29:16 -05:00
|
|
|
end
|
|
|
|
|
2020-08-26 10:50:34 +02:00
|
|
|
# Download and cache the repository at {#cached_location}.
|
|
|
|
#
|
|
|
|
# @api public
|
2021-03-24 10:55:33 +01:00
|
|
|
def fetch(timeout: nil)
|
2020-09-10 23:39:19 +02:00
|
|
|
if @url.chomp("/") != repo_url || !silent_command("svn", args: ["switch", @url, cached_location]).success?
|
2018-08-04 11:32:36 +02:00
|
|
|
clear_cache
|
|
|
|
end
|
2014-12-06 12:29:16 -05:00
|
|
|
super
|
|
|
|
end
|
2013-09-28 18:00:09 -05:00
|
|
|
|
2021-03-25 12:18:22 +01:00
|
|
|
# @see AbstractDownloadStrategy#source_modified_time
|
|
|
|
# @api public
|
2020-10-20 12:03:48 +02:00
|
|
|
sig { returns(Time) }
|
2016-04-29 19:06:37 +03:00
|
|
|
def source_modified_time
|
2023-07-06 16:47:09 +01:00
|
|
|
time = if Version.new(T.must(Utils::Svn.version)) >= Version.new("1.9")
|
2020-09-10 23:39:19 +02:00
|
|
|
out, = silent_command("svn", args: ["info", "--show-item", "last-changed-date"], chdir: cached_location)
|
2020-08-11 18:53:13 +01:00
|
|
|
out
|
|
|
|
else
|
2020-09-10 23:39:19 +02:00
|
|
|
out, = silent_command("svn", args: ["info"], chdir: cached_location)
|
2020-08-11 18:53:13 +01:00
|
|
|
out[/^Last Changed Date: (.+)$/, 1]
|
|
|
|
end
|
|
|
|
Time.parse time
|
2016-04-29 19:06:37 +03:00
|
|
|
end
|
|
|
|
|
2021-03-25 12:18:22 +01:00
|
|
|
# @see VCSDownloadStrategy#last_commit
|
|
|
|
# @api public
|
|
|
|
sig { returns(String) }
|
2016-05-24 23:00:29 +03:00
|
|
|
def last_commit
|
2020-09-10 23:39:19 +02:00
|
|
|
out, = silent_command("svn", args: ["info", "--show-item", "revision"], chdir: cached_location)
|
2018-08-04 11:32:36 +02:00
|
|
|
out.strip
|
2016-05-24 23:00:29 +03:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:16 -05:00
|
|
|
private
|
|
|
|
|
2018-07-25 16:59:57 -03:00
|
|
|
def repo_url
|
2020-09-10 23:39:19 +02:00
|
|
|
out, = silent_command("svn", args: ["info"], chdir: cached_location)
|
2018-08-04 11:32:36 +02:00
|
|
|
out.strip[/^URL: (.+)$/, 1]
|
2010-03-01 11:35:27 -08:00
|
|
|
end
|
|
|
|
|
2016-09-24 17:59:14 +02:00
|
|
|
def externals
|
2020-09-10 23:39:19 +02:00
|
|
|
out, = silent_command("svn", args: ["propget", "svn:externals", @url])
|
2018-08-04 11:32:36 +02:00
|
|
|
out.chomp.split("\n").each do |line|
|
2010-10-25 21:12:41 -07:00
|
|
|
name, url = line.split(/\s+/)
|
2010-03-01 11:35:27 -08:00
|
|
|
yield name, url
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-03-24 10:55:33 +01:00
|
|
|
sig {
|
|
|
|
params(target: Pathname, url: String, revision: T.nilable(String), ignore_externals: T::Boolean,
|
2021-05-18 11:58:44 +01:00
|
|
|
timeout: T.nilable(Time)).void
|
2021-03-24 10:55:33 +01:00
|
|
|
}
|
|
|
|
def fetch_repo(target, url, revision = nil, ignore_externals: false, timeout: nil)
|
2018-07-25 19:32:36 -03:00
|
|
|
# Use "svn update" when the repository already exists locally.
|
2010-03-01 11:35:27 -08:00
|
|
|
# This saves on bandwidth and will have a similar effect to verifying the
|
|
|
|
# cache as it will make any changes to get the right revision.
|
2019-10-02 07:03:13 -07:00
|
|
|
args = []
|
2020-08-02 14:32:31 +02:00
|
|
|
args << "--quiet" unless verbose?
|
2018-08-02 11:16:36 +02:00
|
|
|
|
2015-10-16 03:59:19 -04:00
|
|
|
if revision
|
2015-10-17 04:00:41 +08:00
|
|
|
ohai "Checking out #{@ref}"
|
2015-10-16 03:59:19 -04:00
|
|
|
args << "-r" << revision
|
|
|
|
end
|
2018-08-02 11:16:36 +02:00
|
|
|
|
2015-08-03 13:09:07 +01:00
|
|
|
args << "--ignore-externals" if ignore_externals
|
2018-08-02 11:16:36 +02:00
|
|
|
|
2021-10-21 20:51:00 -04:00
|
|
|
args.concat Utils::Svn.invalid_cert_flags if meta[:trust_cert] == true
|
2018-08-02 11:16:36 +02:00
|
|
|
|
2018-07-30 11:12:48 -03:00
|
|
|
if target.directory?
|
2024-01-29 18:14:31 -08:00
|
|
|
command! "svn", args: ["update", *args], chdir: target.to_s, timeout: Utils::Timer.remaining(timeout)
|
2018-07-30 10:04:54 -03:00
|
|
|
else
|
2024-01-29 18:14:31 -08:00
|
|
|
command! "svn", args: ["checkout", url, target, *args], timeout: Utils::Timer.remaining(timeout)
|
2018-07-30 10:04:54 -03:00
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
2020-10-20 12:03:48 +02:00
|
|
|
sig { returns(String) }
|
2014-12-06 12:29:15 -05:00
|
|
|
def cache_tag
|
|
|
|
head? ? "svn-HEAD" : "svn"
|
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
|
|
|
def repo_valid?
|
2017-06-01 16:06:51 +02:00
|
|
|
(cached_location/".svn").directory?
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2014-12-06 12:29:16 -05:00
|
|
|
|
2021-03-24 10:55:33 +01:00
|
|
|
sig { params(timeout: T.nilable(Time)).void }
|
|
|
|
def clone_repo(timeout: nil)
|
2014-12-06 12:29:16 -05:00
|
|
|
case @ref_type
|
|
|
|
when :revision
|
2024-03-07 16:20:20 +00:00
|
|
|
fetch_repo cached_location, @url, @ref, timeout:
|
2014-12-06 12:29:16 -05:00
|
|
|
when :revisions
|
|
|
|
# nil is OK for main_revision, as fetch_repo will then get latest
|
|
|
|
main_revision = @ref[:trunk]
|
2024-03-07 16:20:20 +00:00
|
|
|
fetch_repo(cached_location, @url, main_revision, ignore_externals: true, timeout:)
|
2014-12-06 12:29:16 -05:00
|
|
|
|
2016-09-24 17:59:14 +02:00
|
|
|
externals do |external_name, external_url|
|
2021-03-24 10:55:33 +01:00
|
|
|
fetch_repo cached_location/external_name, external_url, @ref[external_name], ignore_externals: true,
|
2024-03-07 16:20:20 +00:00
|
|
|
timeout:
|
2014-12-06 12:29:16 -05:00
|
|
|
end
|
|
|
|
else
|
2024-03-07 16:20:20 +00:00
|
|
|
fetch_repo cached_location, @url, timeout:
|
2014-12-06 12:29:16 -05:00
|
|
|
end
|
|
|
|
end
|
2016-09-23 18:13:48 +02:00
|
|
|
alias update clone_repo
|
2010-06-28 14:55:31 -07:00
|
|
|
end
|
|
|
|
|
2020-08-26 10:50:34 +02:00
|
|
|
# Strategy for downloading a Git repository.
|
|
|
|
#
|
|
|
|
# @api public
|
2013-10-09 21:41:15 -05:00
|
|
|
class GitDownloadStrategy < VCSDownloadStrategy
|
2018-08-02 10:29:40 +02:00
|
|
|
def initialize(url, name, version, **meta)
|
2022-06-12 10:48:47 -04:00
|
|
|
# Needs to be before the call to `super`, as the VCSDownloadStrategy's
|
|
|
|
# constructor calls `cache_tag` and sets the cache path.
|
2022-10-22 10:53:02 -04:00
|
|
|
@only_path = meta[:only_path]
|
2022-06-12 10:48:47 -04:00
|
|
|
|
2023-06-10 14:31:46 -04:00
|
|
|
if @only_path.present?
|
|
|
|
# "Cone" mode of sparse checkout requires patterns to be directories
|
|
|
|
@only_path = "/#{@only_path}" unless @only_path.start_with?("/")
|
|
|
|
@only_path = "#{@only_path}/" unless @only_path.end_with?("/")
|
|
|
|
end
|
2023-06-10 14:08:05 -04:00
|
|
|
|
2014-02-14 15:31:29 -05:00
|
|
|
super
|
2014-12-03 17:25:51 -05:00
|
|
|
@ref_type ||= :branch
|
|
|
|
@ref ||= "master"
|
2014-02-14 15:31:29 -05:00
|
|
|
end
|
|
|
|
|
2021-03-25 12:18:22 +01:00
|
|
|
# @see AbstractDownloadStrategy#source_modified_time
|
|
|
|
# @api public
|
2020-10-20 12:03:48 +02:00
|
|
|
sig { returns(Time) }
|
2016-01-14 18:57:31 +08:00
|
|
|
def source_modified_time
|
2020-09-10 23:39:19 +02:00
|
|
|
out, = silent_command("git", args: ["--git-dir", git_dir, "show", "-s", "--format=%cD"])
|
2018-08-04 11:32:36 +02:00
|
|
|
Time.parse(out)
|
2016-01-14 18:57:31 +08:00
|
|
|
end
|
|
|
|
|
2021-03-25 12:18:22 +01:00
|
|
|
# @see VCSDownloadStrategy#last_commit
|
|
|
|
# @api public
|
2023-02-06 13:50:39 -05:00
|
|
|
sig { returns(String) }
|
2016-05-24 23:00:29 +03:00
|
|
|
def last_commit
|
2020-09-10 23:39:19 +02:00
|
|
|
out, = silent_command("git", args: ["--git-dir", git_dir, "rev-parse", "--short=7", "HEAD"])
|
2018-08-04 11:32:36 +02:00
|
|
|
out.chomp
|
2016-05-24 23:00:29 +03:00
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
private
|
|
|
|
|
2020-10-20 12:03:48 +02:00
|
|
|
sig { returns(String) }
|
2014-12-06 12:29:15 -05:00
|
|
|
def cache_tag
|
2022-06-12 10:48:47 -04:00
|
|
|
if partial_clone_sparse_checkout?
|
|
|
|
"git-sparse"
|
|
|
|
else
|
|
|
|
"git"
|
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2020-10-20 12:03:48 +02:00
|
|
|
sig { returns(Integer) }
|
2014-12-18 12:57:37 -05:00
|
|
|
def cache_version
|
|
|
|
0
|
|
|
|
end
|
|
|
|
|
2021-03-24 10:55:33 +01:00
|
|
|
sig { params(timeout: T.nilable(Time)).void }
|
|
|
|
def update(timeout: nil)
|
2018-08-03 11:13:12 +02:00
|
|
|
config_repo
|
2024-03-07 16:20:20 +00:00
|
|
|
update_repo(timeout:)
|
|
|
|
checkout(timeout:)
|
2018-08-03 11:13:12 +02:00
|
|
|
reset
|
2024-03-07 16:20:20 +00:00
|
|
|
update_submodules(timeout:) if submodules?
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2016-09-21 09:48:24 +02:00
|
|
|
def shallow_dir?
|
2017-06-01 16:06:51 +02:00
|
|
|
(git_dir/"shallow").exist?
|
2015-06-30 20:29:48 -07:00
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def git_dir
|
2017-06-01 16:06:51 +02:00
|
|
|
cached_location/".git"
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2016-09-21 09:48:24 +02:00
|
|
|
def ref?
|
2020-09-10 23:39:19 +02:00
|
|
|
silent_command("git",
|
2018-08-04 11:32:36 +02:00
|
|
|
args: ["--git-dir", git_dir, "rev-parse", "-q", "--verify", "#{@ref}^{commit}"])
|
2018-08-03 11:13:12 +02:00
|
|
|
.success?
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2015-03-07 14:59:30 +00:00
|
|
|
def current_revision
|
2020-09-10 23:39:19 +02:00
|
|
|
out, = silent_command("git", args: ["--git-dir", git_dir, "rev-parse", "-q", "--verify", "HEAD"])
|
2018-08-04 11:32:36 +02:00
|
|
|
out.strip
|
2015-03-07 14:59:30 +00:00
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def repo_valid?
|
2020-09-10 23:39:19 +02:00
|
|
|
silent_command("git", args: ["--git-dir", git_dir, "status", "-s"]).success?
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2013-02-14 17:29:58 -06:00
|
|
|
def submodules?
|
2017-06-01 16:06:51 +02:00
|
|
|
(cached_location/".gitmodules").exist?
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2022-05-02 19:53:44 -04:00
|
|
|
def partial_clone_sparse_checkout?
|
2022-10-22 10:53:02 -04:00
|
|
|
return false if @only_path.blank?
|
2022-05-02 19:53:44 -04:00
|
|
|
|
2022-05-04 11:22:32 -04:00
|
|
|
Utils::Git.supports_partial_clone_sparse_checkout?
|
2022-05-02 19:53:44 -04:00
|
|
|
end
|
|
|
|
|
2020-10-20 12:03:48 +02:00
|
|
|
sig { returns(T::Array[String]) }
|
2013-02-14 17:29:58 -06:00
|
|
|
def clone_args
|
2015-08-03 13:09:07 +01:00
|
|
|
args = %w[clone]
|
2013-02-14 17:29:58 -06:00
|
|
|
|
2013-10-09 21:41:14 -05:00
|
|
|
case @ref_type
|
2016-09-21 08:32:57 +02:00
|
|
|
when :branch, :tag
|
|
|
|
args << "--branch" << @ref
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2022-05-02 19:53:44 -04:00
|
|
|
args << "--no-checkout" << "--filter=blob:none" if partial_clone_sparse_checkout?
|
|
|
|
|
2023-04-23 16:42:58 +08:00
|
|
|
args << "--config" << "advice.detachedHead=false" # silences detached head warning
|
|
|
|
args << "--config" << "core.fsmonitor=false" # prevent fsmonitor from watching this repo
|
2023-03-15 14:29:15 -07:00
|
|
|
args << @url << cached_location.to_s
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2020-10-20 12:03:48 +02:00
|
|
|
sig { returns(String) }
|
2013-02-14 17:29:58 -06:00
|
|
|
def refspec
|
2013-10-09 21:41:14 -05:00
|
|
|
case @ref_type
|
2015-08-03 13:09:07 +01:00
|
|
|
when :branch then "+refs/heads/#{@ref}:refs/remotes/origin/#{@ref}"
|
|
|
|
when :tag then "+refs/tags/#{@ref}:refs/tags/#{@ref}"
|
2021-10-22 12:05:39 -05:00
|
|
|
else default_refspec
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-10-22 12:05:39 -05:00
|
|
|
sig { returns(String) }
|
|
|
|
def default_refspec
|
|
|
|
# https://git-scm.com/book/en/v2/Git-Internals-The-Refspec
|
|
|
|
"+refs/heads/*:refs/remotes/origin/*"
|
|
|
|
end
|
|
|
|
|
2021-03-24 10:55:33 +01:00
|
|
|
sig { void }
|
2013-02-14 17:29:58 -06:00
|
|
|
def config_repo
|
2020-09-10 23:39:19 +02:00
|
|
|
command! "git",
|
|
|
|
args: ["config", "remote.origin.url", @url],
|
|
|
|
chdir: cached_location
|
|
|
|
command! "git",
|
|
|
|
args: ["config", "remote.origin.fetch", refspec],
|
|
|
|
chdir: cached_location
|
|
|
|
command! "git",
|
|
|
|
args: ["config", "remote.origin.tagOpt", "--no-tags"],
|
|
|
|
chdir: cached_location
|
2021-12-02 11:07:39 -06:00
|
|
|
command! "git",
|
|
|
|
args: ["config", "advice.detachedHead", "false"],
|
|
|
|
chdir: cached_location
|
2023-04-23 16:42:58 +08:00
|
|
|
command! "git",
|
|
|
|
args: ["config", "core.fsmonitor", "false"],
|
|
|
|
chdir: cached_location
|
2022-05-02 19:53:44 -04:00
|
|
|
|
2022-05-02 19:58:06 -04:00
|
|
|
return unless partial_clone_sparse_checkout?
|
|
|
|
|
|
|
|
command! "git",
|
|
|
|
args: ["config", "origin.partialclonefilter", "blob:none"],
|
|
|
|
chdir: cached_location
|
|
|
|
configure_sparse_checkout
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2021-03-24 10:55:33 +01:00
|
|
|
sig { params(timeout: T.nilable(Time)).void }
|
|
|
|
def update_repo(timeout: nil)
|
2021-01-07 13:49:05 -08:00
|
|
|
return if @ref_type != :branch && ref?
|
2016-09-23 22:02:23 +02:00
|
|
|
|
2021-05-05 10:38:56 -04:00
|
|
|
# Convert any shallow clone to full clone
|
|
|
|
if shallow_dir?
|
2020-09-10 23:39:19 +02:00
|
|
|
command! "git",
|
2021-03-24 10:55:33 +01:00
|
|
|
args: ["fetch", "origin", "--unshallow"],
|
|
|
|
chdir: cached_location,
|
2024-01-29 18:14:31 -08:00
|
|
|
timeout: Utils::Timer.remaining(timeout)
|
2016-09-23 22:02:23 +02:00
|
|
|
else
|
2020-09-10 23:39:19 +02:00
|
|
|
command! "git",
|
2021-03-24 10:55:33 +01:00
|
|
|
args: ["fetch", "origin"],
|
|
|
|
chdir: cached_location,
|
2024-01-29 18:14:31 -08:00
|
|
|
timeout: Utils::Timer.remaining(timeout)
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2021-03-24 10:55:33 +01:00
|
|
|
sig { params(timeout: T.nilable(Time)).void }
|
|
|
|
def clone_repo(timeout: nil)
|
2024-01-29 18:14:31 -08:00
|
|
|
command! "git", args: clone_args, timeout: Utils::Timer.remaining(timeout)
|
2018-08-03 11:13:12 +02:00
|
|
|
|
2020-09-10 23:39:19 +02:00
|
|
|
command! "git",
|
2021-03-24 10:55:33 +01:00
|
|
|
args: ["config", "homebrew.cacheversion", cache_version],
|
|
|
|
chdir: cached_location,
|
2024-01-29 18:14:31 -08:00
|
|
|
timeout: Utils::Timer.remaining(timeout)
|
2022-05-02 19:53:44 -04:00
|
|
|
|
|
|
|
configure_sparse_checkout if partial_clone_sparse_checkout?
|
|
|
|
|
2024-03-07 16:20:20 +00:00
|
|
|
checkout(timeout:)
|
|
|
|
update_submodules(timeout:) if submodules?
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
|
|
|
|
2021-03-24 10:55:33 +01:00
|
|
|
sig { params(timeout: T.nilable(Time)).void }
|
|
|
|
def checkout(timeout: nil)
|
2015-10-16 03:59:19 -04:00
|
|
|
ohai "Checking out #{@ref_type} #{@ref}" if @ref_type && @ref
|
2024-01-29 18:14:31 -08:00
|
|
|
command! "git", args: ["checkout", "-f", @ref, "--"], chdir: cached_location,
|
|
|
|
timeout: Utils::Timer.remaining(timeout)
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
2013-02-14 17:29:58 -06:00
|
|
|
|
2021-03-24 10:55:33 +01:00
|
|
|
sig { void }
|
2018-08-03 11:13:12 +02:00
|
|
|
def reset
|
2013-10-09 21:41:14 -05:00
|
|
|
ref = case @ref_type
|
2016-09-21 08:32:57 +02:00
|
|
|
when :branch
|
|
|
|
"origin/#{@ref}"
|
|
|
|
when :revision, :tag
|
|
|
|
@ref
|
|
|
|
end
|
2013-02-17 15:53:34 -06:00
|
|
|
|
2020-09-10 23:39:19 +02:00
|
|
|
command! "git",
|
|
|
|
args: ["reset", "--hard", *ref, "--"],
|
|
|
|
chdir: cached_location
|
2013-02-17 15:53:34 -06:00
|
|
|
end
|
|
|
|
|
2021-03-24 10:55:33 +01:00
|
|
|
sig { params(timeout: T.nilable(Time)).void }
|
|
|
|
def update_submodules(timeout: nil)
|
2020-09-10 23:39:19 +02:00
|
|
|
command! "git",
|
2021-03-24 10:55:33 +01:00
|
|
|
args: ["submodule", "foreach", "--recursive", "git submodule sync"],
|
|
|
|
chdir: cached_location,
|
2024-01-29 18:14:31 -08:00
|
|
|
timeout: Utils::Timer.remaining(timeout)
|
2020-09-10 23:39:19 +02:00
|
|
|
command! "git",
|
2021-03-24 10:55:33 +01:00
|
|
|
args: ["submodule", "update", "--init", "--recursive"],
|
|
|
|
chdir: cached_location,
|
2024-01-29 18:14:31 -08:00
|
|
|
timeout: Utils::Timer.remaining(timeout)
|
2016-05-31 17:11:57 +02:00
|
|
|
fix_absolute_submodule_gitdir_references!
|
|
|
|
end
|
|
|
|
|
2018-08-03 11:13:12 +02:00
|
|
|
# When checking out Git repositories with recursive submodules, some Git
|
|
|
|
# versions create `.git` files with absolute instead of relative `gitdir:`
|
|
|
|
# pointers. This works for the cached location, but breaks various Git
|
|
|
|
# operations once the affected Git resource is staged, i.e. recursively
|
|
|
|
# copied to a new location. (This bug was introduced in Git 2.7.0 and fixed
|
|
|
|
# in 2.8.3. Clones created with affected version remain broken.)
|
|
|
|
# See https://github.com/Homebrew/homebrew-core/pull/1520 for an example.
|
2016-05-31 17:11:57 +02:00
|
|
|
def fix_absolute_submodule_gitdir_references!
|
2020-09-10 23:39:19 +02:00
|
|
|
submodule_dirs = command!("git",
|
|
|
|
args: ["submodule", "--quiet", "foreach", "--recursive", "pwd"],
|
|
|
|
chdir: cached_location).stdout
|
2018-08-03 11:13:12 +02:00
|
|
|
|
2016-05-31 17:11:57 +02:00
|
|
|
submodule_dirs.lines.map(&:chomp).each do |submodule_dir|
|
|
|
|
work_dir = Pathname.new(submodule_dir)
|
|
|
|
|
|
|
|
# Only check and fix if `.git` is a regular file, not a directory.
|
|
|
|
dot_git = work_dir/".git"
|
|
|
|
next unless dot_git.file?
|
|
|
|
|
|
|
|
git_dir = dot_git.read.chomp[/^gitdir: (.*)$/, 1]
|
|
|
|
if git_dir.nil?
|
2020-04-05 15:44:50 +01:00
|
|
|
onoe "Failed to parse '#{dot_git}'." if Homebrew::EnvConfig.developer?
|
2016-05-31 17:11:57 +02:00
|
|
|
next
|
|
|
|
end
|
|
|
|
|
|
|
|
# Only attempt to fix absolute paths.
|
|
|
|
next unless git_dir.start_with?("/")
|
|
|
|
|
|
|
|
# Make the `gitdir:` reference relative to the working directory.
|
|
|
|
relative_git_dir = Pathname.new(git_dir).relative_path_from(work_dir)
|
|
|
|
dot_git.atomic_write("gitdir: #{relative_git_dir}\n")
|
|
|
|
end
|
2013-02-14 17:29:58 -06:00
|
|
|
end
|
2022-05-02 19:53:44 -04:00
|
|
|
|
|
|
|
def configure_sparse_checkout
|
|
|
|
command! "git",
|
|
|
|
args: ["config", "core.sparseCheckout", "true"],
|
|
|
|
chdir: cached_location
|
2023-06-09 22:58:00 -04:00
|
|
|
command! "git",
|
|
|
|
args: ["config", "core.sparseCheckoutCone", "true"],
|
|
|
|
chdir: cached_location
|
2022-05-02 19:53:44 -04:00
|
|
|
|
2023-03-13 09:22:09 -05:00
|
|
|
(git_dir/"info").mkpath
|
2023-06-10 14:08:05 -04:00
|
|
|
(git_dir/"info/sparse-checkout").atomic_write("#{@only_path}\n")
|
2022-05-02 19:53:44 -04:00
|
|
|
end
|
2009-08-21 20:30:13 +01:00
|
|
|
end
|
2009-09-27 19:30:39 -04:00
|
|
|
|
2020-08-26 10:50:34 +02:00
|
|
|
# Strategy for downloading a Git repository from GitHub.
|
|
|
|
#
|
|
|
|
# @api public
|
2016-07-22 12:21:22 +03:00
|
|
|
class GitHubGitDownloadStrategy < GitDownloadStrategy
|
2018-08-02 10:29:40 +02:00
|
|
|
def initialize(url, name, version, **meta)
|
2016-07-22 12:21:22 +03:00
|
|
|
super
|
2016-09-23 22:02:23 +02:00
|
|
|
|
2023-03-15 14:29:15 -07:00
|
|
|
match_data = %r{^https?://github\.com/(?<user>[^/]+)/(?<repo>[^/]+)\.git$}.match(@url)
|
2023-03-15 18:21:41 -07:00
|
|
|
return unless match_data
|
|
|
|
|
|
|
|
@user = match_data[:user]
|
|
|
|
@repo = match_data[:repo]
|
2016-07-22 12:21:22 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
def commit_outdated?(commit)
|
2023-03-15 14:29:15 -07:00
|
|
|
@last_commit ||= GitHub.last_commit(@user, @repo, @ref, version)
|
2020-11-09 20:09:16 +11:00
|
|
|
if @last_commit
|
2016-08-19 12:32:20 +02:00
|
|
|
return true unless commit
|
2016-07-22 12:21:22 +03:00
|
|
|
return true unless @last_commit.start_with?(commit)
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2022-11-06 18:12:43 +09:00
|
|
|
if GitHub.multiple_short_commits_exist?(@user, @repo, commit)
|
2016-10-22 01:53:19 +03:00
|
|
|
true
|
|
|
|
else
|
|
|
|
version.update_commit(commit)
|
|
|
|
false
|
|
|
|
end
|
2020-11-09 20:09:16 +11:00
|
|
|
else
|
|
|
|
super
|
2016-07-22 12:21:22 +03:00
|
|
|
end
|
|
|
|
end
|
2021-10-22 12:05:39 -05:00
|
|
|
|
|
|
|
sig { returns(String) }
|
|
|
|
def default_refspec
|
2021-10-25 08:18:03 -05:00
|
|
|
if default_branch
|
|
|
|
"+refs/heads/#{default_branch}:refs/remotes/origin/#{default_branch}"
|
|
|
|
else
|
|
|
|
super
|
2021-10-22 12:05:39 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-03-15 14:29:15 -07:00
|
|
|
sig { returns(T.nilable(String)) }
|
2021-10-22 12:05:39 -05:00
|
|
|
def default_branch
|
2021-10-25 08:18:03 -05:00
|
|
|
return @default_branch if defined?(@default_branch)
|
|
|
|
|
2021-10-22 12:05:39 -05:00
|
|
|
command! "git",
|
|
|
|
args: ["remote", "set-head", "origin", "--auto"],
|
|
|
|
chdir: cached_location
|
|
|
|
|
|
|
|
result = command! "git",
|
|
|
|
args: ["symbolic-ref", "refs/remotes/origin/HEAD"],
|
|
|
|
chdir: cached_location
|
|
|
|
|
2021-10-25 08:18:03 -05:00
|
|
|
@default_branch = result.stdout[%r{^refs/remotes/origin/(.*)$}, 1]
|
2021-10-22 12:05:39 -05:00
|
|
|
end
|
2016-07-22 12:21:22 +03:00
|
|
|
end
|
|
|
|
|
2020-08-26 10:50:34 +02:00
|
|
|
# Strategy for downloading a CVS repository.
|
|
|
|
#
|
|
|
|
# @api public
|
2013-10-09 21:41:15 -05:00
|
|
|
class CVSDownloadStrategy < VCSDownloadStrategy
|
2018-08-02 10:29:40 +02:00
|
|
|
def initialize(url, name, version, **meta)
|
2014-12-18 13:06:05 -05:00
|
|
|
super
|
2015-08-03 13:09:07 +01:00
|
|
|
@url = @url.sub(%r{^cvs://}, "")
|
2014-12-22 00:43:02 -05:00
|
|
|
|
|
|
|
if meta.key?(:module)
|
|
|
|
@module = meta.fetch(:module)
|
2019-10-13 19:26:39 +01:00
|
|
|
elsif !@url.match?(%r{:[^/]+$})
|
2014-12-22 00:43:02 -05:00
|
|
|
@module = name
|
2014-12-22 00:43:02 -05:00
|
|
|
else
|
|
|
|
@module, @url = split_url(@url)
|
|
|
|
end
|
2014-12-18 13:06:05 -05:00
|
|
|
end
|
|
|
|
|
2021-03-25 12:18:22 +01:00
|
|
|
# @see AbstractDownloadStrategy#source_modified_time
|
|
|
|
# @api public
|
2020-10-20 12:03:48 +02:00
|
|
|
sig { returns(Time) }
|
2016-05-24 12:19:18 +03:00
|
|
|
def source_modified_time
|
|
|
|
# Filter CVS's files because the timestamp for each of them is the moment
|
|
|
|
# of clone.
|
|
|
|
max_mtime = Time.at(0)
|
|
|
|
cached_location.find do |f|
|
|
|
|
Find.prune if f.directory? && f.basename.to_s == "CVS"
|
|
|
|
next unless f.file?
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2016-05-24 12:19:18 +03:00
|
|
|
mtime = f.mtime
|
|
|
|
max_mtime = mtime if mtime > max_mtime
|
|
|
|
end
|
|
|
|
max_mtime
|
|
|
|
end
|
|
|
|
|
2013-04-07 00:49:56 -05:00
|
|
|
private
|
|
|
|
|
2018-08-04 11:32:36 +02:00
|
|
|
def env
|
2022-06-15 05:40:43 +01:00
|
|
|
{ "PATH" => PATH.new("/usr/bin", Formula["cvs"].opt_bin, ENV.fetch("PATH")) }
|
2018-08-04 11:32:36 +02:00
|
|
|
end
|
|
|
|
|
2020-10-20 12:03:48 +02:00
|
|
|
sig { returns(String) }
|
2014-12-06 12:29:15 -05:00
|
|
|
def cache_tag
|
|
|
|
"cvs"
|
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def repo_valid?
|
2017-06-01 16:06:51 +02:00
|
|
|
(cached_location/"CVS").directory?
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2018-07-01 23:35:29 +02:00
|
|
|
def quiet_flag
|
2020-08-02 14:32:31 +02:00
|
|
|
"-Q" unless verbose?
|
2018-07-01 23:35:29 +02:00
|
|
|
end
|
|
|
|
|
2021-03-24 10:55:33 +01:00
|
|
|
sig { params(timeout: T.nilable(Time)).void }
|
|
|
|
def clone_repo(timeout: nil)
|
2018-08-04 11:32:36 +02:00
|
|
|
# Login is only needed (and allowed) with pserver; skip for anoncvs.
|
2024-01-29 18:14:31 -08:00
|
|
|
if @url.include? "pserver"
|
|
|
|
command! "cvs", args: [*quiet_flag, "-d", @url, "login"],
|
|
|
|
timeout: Utils::Timer.remaining(timeout)
|
|
|
|
end
|
2018-08-04 11:32:36 +02:00
|
|
|
|
2020-09-10 23:39:19 +02:00
|
|
|
command! "cvs",
|
2021-03-24 10:55:33 +01:00
|
|
|
args: [*quiet_flag, "-d", @url, "checkout", "-d", cached_location.basename, @module],
|
|
|
|
chdir: cached_location.dirname,
|
2024-01-29 18:14:31 -08:00
|
|
|
timeout: Utils::Timer.remaining(timeout)
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2021-03-24 10:55:33 +01:00
|
|
|
sig { params(timeout: T.nilable(Time)).void }
|
|
|
|
def update(timeout: nil)
|
2020-09-10 23:39:19 +02:00
|
|
|
command! "cvs",
|
2021-03-24 10:55:33 +01:00
|
|
|
args: [*quiet_flag, "update"],
|
|
|
|
chdir: cached_location,
|
2024-01-29 18:14:31 -08:00
|
|
|
timeout: Utils::Timer.remaining(timeout)
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2009-09-27 19:30:39 -04:00
|
|
|
def split_url(in_url)
|
2021-03-01 13:43:47 +00:00
|
|
|
parts = in_url.split(":")
|
2017-06-01 16:06:51 +02:00
|
|
|
mod = parts.pop
|
|
|
|
url = parts.join(":")
|
2015-08-03 13:09:07 +01:00
|
|
|
[mod, url]
|
2009-09-27 19:30:39 -04:00
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2012-09-25 10:11:13 -04:00
|
|
|
|
2020-08-26 10:50:34 +02:00
|
|
|
# Strategy for downloading a Mercurial repository.
|
|
|
|
#
|
|
|
|
# @api public
|
2014-12-06 12:29:15 -05:00
|
|
|
class MercurialDownloadStrategy < VCSDownloadStrategy
|
2018-08-02 10:29:40 +02:00
|
|
|
def initialize(url, name, version, **meta)
|
2014-12-18 13:06:05 -05:00
|
|
|
super
|
2015-08-03 13:09:07 +01:00
|
|
|
@url = @url.sub(%r{^hg://}, "")
|
2014-12-18 13:06:05 -05:00
|
|
|
end
|
|
|
|
|
2021-03-25 12:18:22 +01:00
|
|
|
# @see AbstractDownloadStrategy#source_modified_time
|
|
|
|
# @api public
|
2020-10-20 12:03:48 +02:00
|
|
|
sig { returns(Time) }
|
2016-05-01 00:17:45 +03:00
|
|
|
def source_modified_time
|
2020-09-10 23:39:19 +02:00
|
|
|
out, = silent_command("hg",
|
2018-08-04 11:32:36 +02:00
|
|
|
args: ["tip", "--template", "{date|isodate}", "-R", cached_location])
|
|
|
|
|
|
|
|
Time.parse(out)
|
2016-05-01 00:17:45 +03:00
|
|
|
end
|
|
|
|
|
2021-03-25 12:18:22 +01:00
|
|
|
# @see VCSDownloadStrategy#last_commit
|
|
|
|
# @api public
|
|
|
|
sig { returns(String) }
|
2016-05-24 23:00:29 +03:00
|
|
|
def last_commit
|
2020-09-10 23:39:19 +02:00
|
|
|
out, = silent_command("hg", args: ["parent", "--template", "{node|short}", "-R", cached_location])
|
2018-08-04 11:32:36 +02:00
|
|
|
out.chomp
|
2016-05-24 23:00:29 +03:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
private
|
|
|
|
|
2018-08-04 11:32:36 +02:00
|
|
|
def env
|
2022-06-15 05:40:43 +01:00
|
|
|
{ "PATH" => PATH.new(Formula["mercurial"].opt_bin, ENV.fetch("PATH")) }
|
2018-08-04 11:32:36 +02:00
|
|
|
end
|
|
|
|
|
2020-10-20 12:03:48 +02:00
|
|
|
sig { returns(String) }
|
2014-12-06 12:29:15 -05:00
|
|
|
def cache_tag
|
|
|
|
"hg"
|
|
|
|
end
|
2010-02-02 13:43:44 +01:00
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def repo_valid?
|
2017-06-01 16:06:51 +02:00
|
|
|
(cached_location/".hg").directory?
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2021-03-24 10:55:33 +01:00
|
|
|
sig { params(timeout: T.nilable(Time)).void }
|
|
|
|
def clone_repo(timeout: nil)
|
2023-10-17 00:22:23 -04:00
|
|
|
clone_args = %w[clone]
|
|
|
|
|
|
|
|
case @ref_type
|
|
|
|
when :branch
|
|
|
|
clone_args << "--branch" << @ref
|
|
|
|
when :revision, :tag
|
|
|
|
clone_args << "--rev" << @ref
|
|
|
|
end
|
|
|
|
|
|
|
|
clone_args << @url << cached_location.to_s
|
2024-01-29 18:14:31 -08:00
|
|
|
command! "hg", args: clone_args, timeout: Utils::Timer.remaining(timeout)
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2021-03-24 10:55:33 +01:00
|
|
|
sig { params(timeout: T.nilable(Time)).void }
|
|
|
|
def update(timeout: nil)
|
2023-10-17 00:22:23 -04:00
|
|
|
pull_args = %w[pull]
|
2018-07-01 23:35:29 +02:00
|
|
|
|
2023-10-17 00:22:23 -04:00
|
|
|
case @ref_type
|
|
|
|
when :branch
|
|
|
|
pull_args << "--branch" << @ref
|
|
|
|
when :revision, :tag
|
|
|
|
pull_args << "--rev" << @ref
|
|
|
|
end
|
|
|
|
|
2024-01-29 18:14:31 -08:00
|
|
|
command! "hg", args: ["--cwd", cached_location, *pull_args], timeout: Utils::Timer.remaining(timeout)
|
2023-10-17 00:22:23 -04:00
|
|
|
|
|
|
|
update_args = %w[update --clean]
|
|
|
|
update_args << if @ref_type && @ref
|
2018-08-04 11:32:36 +02:00
|
|
|
ohai "Checking out #{@ref_type} #{@ref}"
|
2023-10-17 00:22:23 -04:00
|
|
|
@ref
|
2018-08-04 11:32:36 +02:00
|
|
|
else
|
2023-10-17 00:22:23 -04:00
|
|
|
"default"
|
2018-07-01 23:35:29 +02:00
|
|
|
end
|
|
|
|
|
2024-01-29 18:14:31 -08:00
|
|
|
command! "hg", args: ["--cwd", cached_location, *update_args], timeout: Utils::Timer.remaining(timeout)
|
2023-10-17 00:22:23 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def current_revision
|
|
|
|
out, = silent_command("hg", args: ["--cwd", cached_location, "identify", "--id"])
|
|
|
|
out.strip
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2012-09-28 21:04:03 -05:00
|
|
|
|
2020-08-26 10:50:34 +02:00
|
|
|
# Strategy for downloading a Bazaar repository.
|
|
|
|
#
|
|
|
|
# @api public
|
2014-12-06 12:29:15 -05:00
|
|
|
class BazaarDownloadStrategy < VCSDownloadStrategy
|
2018-08-02 10:29:40 +02:00
|
|
|
def initialize(url, name, version, **meta)
|
2014-12-18 13:06:05 -05:00
|
|
|
super
|
2023-05-01 10:45:13 -04:00
|
|
|
@url = @url.sub(%r{^bzr://}, "")
|
2014-12-18 13:06:05 -05:00
|
|
|
end
|
|
|
|
|
2021-03-25 12:18:22 +01:00
|
|
|
# @see AbstractDownloadStrategy#source_modified_time
|
|
|
|
# @api public
|
2020-10-20 12:03:48 +02:00
|
|
|
sig { returns(Time) }
|
2016-05-01 00:21:08 +03:00
|
|
|
def source_modified_time
|
2020-09-10 23:39:19 +02:00
|
|
|
out, = silent_command("bzr", args: ["log", "-l", "1", "--timezone=utc", cached_location])
|
2018-08-04 11:32:36 +02:00
|
|
|
timestamp = out.chomp
|
2018-09-14 17:02:19 +01:00
|
|
|
raise "Could not get any timestamps from bzr!" if timestamp.blank?
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2018-08-04 11:32:36 +02:00
|
|
|
Time.parse(timestamp)
|
2016-05-01 00:21:08 +03:00
|
|
|
end
|
|
|
|
|
2021-03-25 12:18:22 +01:00
|
|
|
# @see VCSDownloadStrategy#last_commit
|
|
|
|
# @api public
|
|
|
|
sig { returns(String) }
|
2016-05-24 23:00:29 +03:00
|
|
|
def last_commit
|
2020-09-10 23:39:19 +02:00
|
|
|
out, = silent_command("bzr", args: ["revno", cached_location])
|
2018-08-04 11:32:36 +02:00
|
|
|
out.chomp
|
2016-05-24 23:00:29 +03:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
private
|
|
|
|
|
2018-08-04 11:32:36 +02:00
|
|
|
def env
|
|
|
|
{
|
2022-07-29 17:02:39 +08:00
|
|
|
"PATH" => PATH.new(Formula["breezy"].opt_bin, ENV.fetch("PATH")),
|
2018-08-04 11:32:36 +02:00
|
|
|
"BZR_HOME" => HOMEBREW_TEMP,
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2020-10-20 12:03:48 +02:00
|
|
|
sig { returns(String) }
|
2014-12-06 12:29:15 -05:00
|
|
|
def cache_tag
|
|
|
|
"bzr"
|
|
|
|
end
|
2010-03-22 21:19:20 -07:00
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
def repo_valid?
|
2017-06-01 16:06:51 +02:00
|
|
|
(cached_location/".bzr").directory?
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2021-03-24 10:55:33 +01:00
|
|
|
sig { params(timeout: T.nilable(Time)).void }
|
|
|
|
def clone_repo(timeout: nil)
|
2018-08-04 11:32:36 +02:00
|
|
|
# "lightweight" means history-less
|
2020-09-10 23:39:19 +02:00
|
|
|
command! "bzr",
|
2021-03-24 10:55:33 +01:00
|
|
|
args: ["checkout", "--lightweight", @url, cached_location],
|
2024-01-29 18:14:31 -08:00
|
|
|
timeout: Utils::Timer.remaining(timeout)
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2021-03-24 10:55:33 +01:00
|
|
|
sig { params(timeout: T.nilable(Time)).void }
|
|
|
|
def update(timeout: nil)
|
2020-09-10 23:39:19 +02:00
|
|
|
command! "bzr",
|
2021-03-24 10:55:33 +01:00
|
|
|
args: ["update"],
|
|
|
|
chdir: cached_location,
|
2024-01-29 18:14:31 -08:00
|
|
|
timeout: Utils::Timer.remaining(timeout)
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2012-09-28 21:04:03 -05:00
|
|
|
|
2020-08-26 10:50:34 +02:00
|
|
|
# Strategy for downloading a Fossil repository.
|
|
|
|
#
|
|
|
|
# @api public
|
2014-12-06 12:29:15 -05:00
|
|
|
class FossilDownloadStrategy < VCSDownloadStrategy
|
2018-08-02 10:29:40 +02:00
|
|
|
def initialize(url, name, version, **meta)
|
2014-12-18 13:06:05 -05:00
|
|
|
super
|
2015-08-03 13:09:07 +01:00
|
|
|
@url = @url.sub(%r{^fossil://}, "")
|
2014-12-18 13:06:05 -05:00
|
|
|
end
|
|
|
|
|
2021-03-25 12:18:22 +01:00
|
|
|
# @see AbstractDownloadStrategy#source_modified_time
|
|
|
|
# @api public
|
2020-10-20 12:03:48 +02:00
|
|
|
sig { returns(Time) }
|
2016-05-01 00:21:58 +03:00
|
|
|
def source_modified_time
|
2020-09-10 23:39:19 +02:00
|
|
|
out, = silent_command("fossil", args: ["info", "tip", "-R", cached_location])
|
2018-08-04 11:32:36 +02:00
|
|
|
Time.parse(out[/^uuid: +\h+ (.+)$/, 1])
|
2016-05-01 00:21:58 +03:00
|
|
|
end
|
|
|
|
|
2021-03-25 12:18:22 +01:00
|
|
|
# @see VCSDownloadStrategy#last_commit
|
|
|
|
# @api public
|
|
|
|
sig { returns(String) }
|
2016-05-24 23:00:29 +03:00
|
|
|
def last_commit
|
2020-09-10 23:39:19 +02:00
|
|
|
out, = silent_command("fossil", args: ["info", "tip", "-R", cached_location])
|
2018-08-04 11:32:36 +02:00
|
|
|
out[/^uuid: +(\h+) .+$/, 1]
|
2018-07-01 23:35:29 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
def repo_valid?
|
2020-09-10 23:39:19 +02:00
|
|
|
silent_command("fossil", args: ["branch", "-R", cached_location]).success?
|
2016-05-24 23:00:29 +03:00
|
|
|
end
|
|
|
|
|
2014-12-06 12:29:15 -05:00
|
|
|
private
|
|
|
|
|
2018-08-04 11:32:36 +02:00
|
|
|
def env
|
2022-06-15 05:40:43 +01:00
|
|
|
{ "PATH" => PATH.new(Formula["fossil"].opt_bin, ENV.fetch("PATH")) }
|
2018-08-04 11:32:36 +02:00
|
|
|
end
|
|
|
|
|
2020-10-20 12:03:48 +02:00
|
|
|
sig { returns(String) }
|
2014-12-06 12:29:15 -05:00
|
|
|
def cache_tag
|
|
|
|
"fossil"
|
|
|
|
end
|
2014-12-06 12:29:15 -05:00
|
|
|
|
2021-03-24 10:55:33 +01:00
|
|
|
sig { params(timeout: T.nilable(Time)).void }
|
|
|
|
def clone_repo(timeout: nil)
|
2024-01-29 18:14:31 -08:00
|
|
|
command! "fossil", args: ["clone", @url, cached_location], timeout: Utils::Timer.remaining(timeout)
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
|
|
|
|
2021-03-24 10:55:33 +01:00
|
|
|
sig { params(timeout: T.nilable(Time)).void }
|
|
|
|
def update(timeout: nil)
|
2024-01-29 18:14:31 -08:00
|
|
|
command! "fossil", args: ["pull", "-R", cached_location], timeout: Utils::Timer.remaining(timeout)
|
2014-12-06 12:29:15 -05:00
|
|
|
end
|
2010-09-29 20:22:34 +08:00
|
|
|
end
|
|
|
|
|
2020-08-26 10:50:34 +02:00
|
|
|
# Helper class for detecting a download strategy from a URL.
|
|
|
|
#
|
|
|
|
# @api private
|
2012-06-25 21:39:28 -05:00
|
|
|
class DownloadStrategyDetector
|
2018-08-03 11:55:37 +02:00
|
|
|
def self.detect(url, using = nil)
|
2020-07-07 11:29:33 +01:00
|
|
|
if using.nil?
|
2013-09-26 16:59:45 -05:00
|
|
|
detect_from_url(url)
|
2018-08-03 11:55:37 +02:00
|
|
|
elsif using.is_a?(Class) && using < AbstractDownloadStrategy
|
|
|
|
using
|
|
|
|
elsif using.is_a?(Symbol)
|
|
|
|
detect_from_symbol(using)
|
2012-10-15 01:19:31 -05:00
|
|
|
else
|
2013-09-26 16:59:45 -05:00
|
|
|
raise TypeError,
|
2020-07-07 11:29:33 +01:00
|
|
|
"Unknown download strategy specification #{using.inspect}"
|
2012-06-25 21:39:28 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2012-10-15 01:19:31 -05:00
|
|
|
def self.detect_from_url(url)
|
|
|
|
case url
|
2021-03-11 17:45:44 +00:00
|
|
|
when GitHubPackages::URL_REGEX
|
|
|
|
CurlGitHubPackagesDownloadStrategy
|
2016-07-22 12:21:22 +03:00
|
|
|
when %r{^https?://github\.com/[^/]+/[^/]+\.git$}
|
|
|
|
GitHubGitDownloadStrategy
|
2018-09-02 16:15:09 +01:00
|
|
|
when %r{^https?://.+\.git$},
|
2020-11-07 03:19:26 +01:00
|
|
|
%r{^git://},
|
|
|
|
%r{^https?://git\.sr\.ht/[^/]+/[^/]+$}
|
2014-03-18 15:28:21 -05:00
|
|
|
GitDownloadStrategy
|
2018-09-02 16:15:09 +01:00
|
|
|
when %r{^https?://www\.apache\.org/dyn/closer\.cgi},
|
|
|
|
%r{^https?://www\.apache\.org/dyn/closer\.lua}
|
2014-03-18 15:28:21 -05:00
|
|
|
CurlApacheMirrorDownloadStrategy
|
2022-04-15 16:23:54 +01:00
|
|
|
when %r{^https?://([A-Za-z0-9\-.]+\.)?googlecode\.com/svn},
|
2018-09-02 16:15:09 +01:00
|
|
|
%r{^https?://svn\.},
|
|
|
|
%r{^svn://},
|
2020-11-13 10:07:02 -05:00
|
|
|
%r{^svn\+http://},
|
2020-11-13 17:21:51 +01:00
|
|
|
%r{^http://svn\.apache\.org/repos/},
|
2022-04-15 16:23:54 +01:00
|
|
|
%r{^https?://([A-Za-z0-9\-.]+\.)?sourceforge\.net/svnroot/}
|
2014-03-18 15:28:21 -05:00
|
|
|
SubversionDownloadStrategy
|
2015-08-03 13:09:07 +01:00
|
|
|
when %r{^cvs://}
|
2014-03-18 15:28:21 -05:00
|
|
|
CVSDownloadStrategy
|
2018-09-02 16:15:09 +01:00
|
|
|
when %r{^hg://},
|
2022-04-15 16:23:54 +01:00
|
|
|
%r{^https?://([A-Za-z0-9\-.]+\.)?googlecode\.com/hg},
|
|
|
|
%r{^https?://([A-Za-z0-9\-.]+\.)?sourceforge\.net/hgweb/}
|
2014-03-18 15:28:21 -05:00
|
|
|
MercurialDownloadStrategy
|
2015-08-03 13:09:07 +01:00
|
|
|
when %r{^bzr://}
|
2014-03-18 15:28:21 -05:00
|
|
|
BazaarDownloadStrategy
|
2015-08-03 13:09:07 +01:00
|
|
|
when %r{^fossil://}
|
2014-03-18 15:28:21 -05:00
|
|
|
FossilDownloadStrategy
|
|
|
|
else
|
|
|
|
CurlDownloadStrategy
|
2012-06-25 21:39:28 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2012-10-15 01:19:31 -05:00
|
|
|
def self.detect_from_symbol(symbol)
|
|
|
|
case symbol
|
2018-08-03 11:55:37 +02:00
|
|
|
when :hg then MercurialDownloadStrategy
|
|
|
|
when :nounzip then NoUnzipCurlDownloadStrategy
|
|
|
|
when :git then GitDownloadStrategy
|
|
|
|
when :bzr then BazaarDownloadStrategy
|
|
|
|
when :svn then SubversionDownloadStrategy
|
|
|
|
when :curl then CurlDownloadStrategy
|
2021-07-26 12:39:25 +02:00
|
|
|
when :homebrew_curl then HomebrewCurlDownloadStrategy
|
2018-08-03 11:55:37 +02:00
|
|
|
when :cvs then CVSDownloadStrategy
|
|
|
|
when :post then CurlPostDownloadStrategy
|
|
|
|
when :fossil then FossilDownloadStrategy
|
2012-06-25 21:39:28 -05:00
|
|
|
else
|
2019-12-16 11:34:31 +00:00
|
|
|
raise TypeError, "Unknown download strategy #{symbol} was requested."
|
2012-06-25 21:39:28 -05:00
|
|
|
end
|
2010-03-22 21:19:20 -07:00
|
|
|
end
|
|
|
|
end
|