2024-08-12 10:30:59 +01:00
|
|
|
# typed: true # rubocop:todo Sorbet/StrictSigil
|
2023-04-18 00:22:13 +01:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
require "url"
|
|
|
|
require "checksum"
|
2024-07-16 21:19:37 +02:00
|
|
|
require "download_strategy"
|
2023-04-18 00:22:13 +01:00
|
|
|
|
2024-07-14 21:03:08 -04:00
|
|
|
module Downloadable
|
2023-04-18 00:22:13 +01:00
|
|
|
include Context
|
|
|
|
extend T::Helpers
|
|
|
|
|
|
|
|
abstract!
|
|
|
|
|
2024-07-14 22:51:54 -04:00
|
|
|
sig { overridable.returns(T.nilable(URL)) }
|
2023-04-18 00:22:13 +01:00
|
|
|
attr_reader :url
|
|
|
|
|
2024-07-14 22:51:54 -04:00
|
|
|
sig { overridable.returns(T.nilable(Checksum)) }
|
2023-04-18 00:22:13 +01:00
|
|
|
attr_reader :checksum
|
|
|
|
|
2024-07-14 22:51:54 -04:00
|
|
|
sig { overridable.returns(T::Array[String]) }
|
2023-04-18 00:22:13 +01:00
|
|
|
attr_reader :mirrors
|
|
|
|
|
|
|
|
sig { void }
|
|
|
|
def initialize
|
|
|
|
@mirrors = T.let([], T::Array[String])
|
|
|
|
end
|
|
|
|
|
|
|
|
def initialize_dup(other)
|
|
|
|
super
|
|
|
|
@checksum = @checksum.dup
|
|
|
|
@mirrors = @mirrors.dup
|
|
|
|
@version = @version.dup
|
|
|
|
end
|
|
|
|
|
2024-07-14 22:51:54 -04:00
|
|
|
sig { overridable.returns(T.self_type) }
|
2023-04-18 00:22:13 +01:00
|
|
|
def freeze
|
|
|
|
@checksum.freeze
|
|
|
|
@mirrors.freeze
|
|
|
|
@version.freeze
|
|
|
|
super
|
|
|
|
end
|
|
|
|
|
2024-07-14 21:03:08 -04:00
|
|
|
sig { abstract.returns(String) }
|
|
|
|
def name; end
|
2024-07-14 11:42:22 -04:00
|
|
|
|
|
|
|
sig { returns(String) }
|
|
|
|
def download_type
|
2024-07-14 21:03:08 -04:00
|
|
|
T.must(self.class.name&.split("::")&.last).gsub(/([[:lower:]])([[:upper:]])/, '\1 \2').downcase
|
2024-07-14 11:42:22 -04:00
|
|
|
end
|
|
|
|
|
2024-07-14 22:51:54 -04:00
|
|
|
sig(:final) { returns(T::Boolean) }
|
2023-04-18 00:22:13 +01:00
|
|
|
def downloaded?
|
|
|
|
cached_download.exist?
|
|
|
|
end
|
|
|
|
|
2024-07-14 22:51:54 -04:00
|
|
|
sig { overridable.returns(Pathname) }
|
2023-04-18 00:22:13 +01:00
|
|
|
def cached_download
|
|
|
|
downloader.cached_location
|
|
|
|
end
|
|
|
|
|
2024-07-14 22:51:54 -04:00
|
|
|
sig { overridable.void }
|
2023-04-18 00:22:13 +01:00
|
|
|
def clear_cache
|
|
|
|
downloader.clear_cache
|
|
|
|
end
|
|
|
|
|
2024-07-14 22:51:54 -04:00
|
|
|
sig { overridable.returns(T.nilable(Version)) }
|
2023-04-18 00:22:13 +01:00
|
|
|
def version
|
|
|
|
return @version if @version && !@version.null?
|
|
|
|
|
|
|
|
version = determine_url&.version
|
|
|
|
version unless version&.null?
|
|
|
|
end
|
|
|
|
|
2024-07-14 21:03:08 -04:00
|
|
|
sig { overridable.returns(T.class_of(AbstractDownloadStrategy)) }
|
2023-04-18 00:22:13 +01:00
|
|
|
def download_strategy
|
|
|
|
@download_strategy ||= determine_url&.download_strategy
|
|
|
|
end
|
|
|
|
|
2024-07-14 21:03:08 -04:00
|
|
|
sig { overridable.returns(AbstractDownloadStrategy) }
|
2023-04-18 00:22:13 +01:00
|
|
|
def downloader
|
|
|
|
@downloader ||= begin
|
|
|
|
primary_url, *mirrors = determine_url_mirrors
|
2024-07-14 21:03:08 -04:00
|
|
|
raise ArgumentError, "attempted to use a `Downloadable` without a URL!" if primary_url.blank?
|
2023-04-18 00:22:13 +01:00
|
|
|
|
|
|
|
download_strategy.new(primary_url, download_name, version,
|
2024-03-07 16:20:20 +00:00
|
|
|
mirrors:, cache:, **T.must(@url).specs)
|
2023-04-18 00:22:13 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2024-07-14 11:42:22 -04:00
|
|
|
sig {
|
2024-07-14 21:03:08 -04:00
|
|
|
overridable.params(
|
2024-07-14 11:42:22 -04:00
|
|
|
verify_download_integrity: T::Boolean,
|
|
|
|
timeout: T.nilable(T.any(Integer, Float)),
|
|
|
|
quiet: T::Boolean,
|
|
|
|
).returns(Pathname)
|
|
|
|
}
|
|
|
|
def fetch(verify_download_integrity: true, timeout: nil, quiet: false)
|
2023-04-18 00:22:13 +01:00
|
|
|
cache.mkpath
|
|
|
|
|
|
|
|
begin
|
2024-07-14 11:42:22 -04:00
|
|
|
downloader.quiet! if quiet
|
2024-03-07 16:20:20 +00:00
|
|
|
downloader.fetch(timeout:)
|
2023-04-18 00:22:13 +01:00
|
|
|
rescue ErrorDuringExecution, CurlDownloadStrategyError => e
|
|
|
|
raise DownloadError.new(self, e)
|
|
|
|
end
|
|
|
|
|
|
|
|
download = cached_download
|
|
|
|
verify_download_integrity(download) if verify_download_integrity
|
|
|
|
download
|
|
|
|
end
|
|
|
|
|
2024-07-14 21:03:08 -04:00
|
|
|
sig { overridable.params(filename: Pathname).void }
|
2023-04-18 00:22:13 +01:00
|
|
|
def verify_download_integrity(filename)
|
|
|
|
if filename.file?
|
|
|
|
ohai "Verifying checksum for '#{filename.basename}'" if verbose?
|
|
|
|
filename.verify_checksum(checksum)
|
|
|
|
end
|
|
|
|
rescue ChecksumMissingError
|
|
|
|
opoo <<~EOS
|
|
|
|
Cannot verify integrity of '#{filename.basename}'.
|
|
|
|
No checksum was provided.
|
|
|
|
For your reference, the checksum is:
|
|
|
|
sha256 "#{filename.sha256}"
|
|
|
|
EOS
|
|
|
|
end
|
|
|
|
|
|
|
|
sig { overridable.returns(String) }
|
|
|
|
def download_name
|
2024-08-14 21:59:04 +02:00
|
|
|
@download_name ||= File.basename(determine_url.to_s)
|
2023-04-18 00:22:13 +01:00
|
|
|
end
|
|
|
|
|
2023-05-02 02:09:53 +01:00
|
|
|
private
|
|
|
|
|
2023-04-18 00:22:13 +01:00
|
|
|
sig { overridable.returns(T.nilable(URL)) }
|
|
|
|
def determine_url
|
|
|
|
@url
|
|
|
|
end
|
|
|
|
|
|
|
|
sig { overridable.returns(T::Array[String]) }
|
|
|
|
def determine_url_mirrors
|
|
|
|
[determine_url.to_s, *mirrors].uniq
|
|
|
|
end
|
|
|
|
|
|
|
|
sig { overridable.returns(Pathname) }
|
|
|
|
def cache
|
|
|
|
HOMEBREW_CACHE
|
|
|
|
end
|
|
|
|
end
|