2020-10-10 14:16:11 +02:00
|
|
|
# typed: false
|
2019-04-19 15:38:03 +09:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2020-06-06 21:10:16 +01:00
|
|
|
require "cask/denylist"
|
2018-09-03 19:39:07 +01:00
|
|
|
require "cask/download"
|
2016-08-18 22:11:42 +03:00
|
|
|
require "digest"
|
2020-12-14 14:30:36 +01:00
|
|
|
require "livecheck/livecheck"
|
2018-10-10 21:36:02 +00:00
|
|
|
require "utils/curl"
|
2017-05-07 06:41:40 +02:00
|
|
|
require "utils/git"
|
2020-08-26 09:42:39 +02:00
|
|
|
require "utils/shared_audits"
|
2016-08-18 22:11:42 +03:00
|
|
|
|
2018-09-06 08:29:14 +02:00
|
|
|
module Cask
|
2020-08-24 21:32:40 +02:00
|
|
|
# Audit a cask for various problems.
|
|
|
|
#
|
|
|
|
# @api private
|
2016-09-24 13:52:43 +02:00
|
|
|
class Audit
|
2020-10-20 12:03:48 +02:00
|
|
|
extend T::Sig
|
|
|
|
|
2019-05-07 17:06:54 +02:00
|
|
|
extend Predicable
|
2016-08-18 22:11:42 +03:00
|
|
|
|
2020-09-04 04:14:37 +02:00
|
|
|
attr_reader :cask, :download
|
2016-08-18 22:11:42 +03:00
|
|
|
|
2022-08-01 14:30:04 +02:00
|
|
|
attr_predicate :appcast?, :new_cask?, :strict?, :signing?, :online?, :token_conflicts?
|
2019-05-07 17:06:54 +02:00
|
|
|
|
2020-09-04 04:14:37 +02:00
|
|
|
def initialize(cask, appcast: nil, download: nil, quarantine: nil,
|
2022-08-01 14:30:04 +02:00
|
|
|
token_conflicts: nil, online: nil, strict: nil, signing: nil,
|
2022-09-13 10:54:05 +02:00
|
|
|
new_cask: nil, only: [], except: [])
|
2020-09-04 04:14:37 +02:00
|
|
|
|
2022-08-16 10:01:35 +02:00
|
|
|
# `new_cask` implies `online`, `token_conflicts`, `strict` and `signing`
|
2020-09-04 04:14:37 +02:00
|
|
|
online = new_cask if online.nil?
|
|
|
|
strict = new_cask if strict.nil?
|
2022-08-01 14:30:04 +02:00
|
|
|
signing = new_cask if signing.nil?
|
|
|
|
token_conflicts = new_cask if token_conflicts.nil?
|
2020-09-04 04:14:37 +02:00
|
|
|
|
|
|
|
# `online` implies `appcast` and `download`
|
|
|
|
appcast = online if appcast.nil?
|
|
|
|
download = online if download.nil?
|
|
|
|
|
2022-08-01 14:30:04 +02:00
|
|
|
# `signing` implies `download`
|
|
|
|
download = signing if download.nil?
|
2020-09-04 04:14:37 +02:00
|
|
|
|
2016-09-24 13:52:43 +02:00
|
|
|
@cask = cask
|
2020-04-23 21:16:17 +02:00
|
|
|
@appcast = appcast
|
2020-07-21 19:05:55 +02:00
|
|
|
@download = Download.new(cask, quarantine: quarantine) if download
|
2020-04-23 21:16:17 +02:00
|
|
|
@online = online
|
|
|
|
@strict = strict
|
2022-08-01 14:30:04 +02:00
|
|
|
@signing = signing
|
2020-04-23 21:16:17 +02:00
|
|
|
@new_cask = new_cask
|
|
|
|
@token_conflicts = token_conflicts
|
2022-09-13 10:54:05 +02:00
|
|
|
@only = only
|
|
|
|
@except = except
|
2016-09-24 13:52:43 +02:00
|
|
|
end
|
2016-08-18 22:11:42 +03:00
|
|
|
|
2016-09-24 13:52:43 +02:00
|
|
|
def run!
|
2022-09-13 10:54:05 +02:00
|
|
|
only_audits = @only
|
|
|
|
except_audits = @except
|
|
|
|
|
|
|
|
private_methods.map(&:to_s).grep(/^check_/).each do |audit_method_name|
|
|
|
|
name = audit_method_name.delete_prefix("check_")
|
|
|
|
next if !only_audits.empty? && only_audits&.exclude?(name)
|
|
|
|
next if except_audits&.include?(name)
|
|
|
|
|
|
|
|
send(audit_method_name)
|
|
|
|
end
|
|
|
|
|
2016-09-24 13:52:43 +02:00
|
|
|
self
|
2018-09-02 20:14:54 +01:00
|
|
|
rescue => e
|
2020-07-06 15:29:15 -04:00
|
|
|
odebug e, e.backtrace
|
2016-09-24 13:52:43 +02:00
|
|
|
add_error "exception while auditing #{cask}: #{e.message}"
|
|
|
|
self
|
|
|
|
end
|
2016-08-18 22:11:42 +03:00
|
|
|
|
2020-07-28 09:08:37 +02:00
|
|
|
def errors
|
|
|
|
@errors ||= []
|
|
|
|
end
|
|
|
|
|
|
|
|
def warnings
|
|
|
|
@warnings ||= []
|
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { returns(T::Boolean) }
|
|
|
|
def errors?
|
|
|
|
errors.any?
|
|
|
|
end
|
|
|
|
|
|
|
|
sig { returns(T::Boolean) }
|
|
|
|
def warnings?
|
|
|
|
warnings.any?
|
|
|
|
end
|
|
|
|
|
|
|
|
sig { returns(T::Boolean) }
|
|
|
|
def success?
|
|
|
|
!(errors? || warnings?)
|
|
|
|
end
|
|
|
|
|
|
|
|
sig { params(message: T.nilable(String), location: T.nilable(String)).void }
|
2021-04-03 03:49:41 +02:00
|
|
|
def add_error(message, location: nil)
|
|
|
|
errors << ({ message: message, location: location })
|
2016-09-24 13:52:43 +02:00
|
|
|
end
|
2016-08-18 22:11:42 +03:00
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { params(message: T.nilable(String), location: T.nilable(String)).void }
|
2021-04-03 03:49:41 +02:00
|
|
|
def add_warning(message, location: nil)
|
2020-09-14 02:55:47 +02:00
|
|
|
if strict?
|
2021-04-03 03:49:41 +02:00
|
|
|
add_error message, location: location
|
2020-09-14 02:55:47 +02:00
|
|
|
else
|
2021-04-03 03:49:41 +02:00
|
|
|
warnings << ({ message: message, location: location })
|
2020-09-14 02:55:47 +02:00
|
|
|
end
|
2020-07-28 09:08:37 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
def result
|
|
|
|
if errors?
|
|
|
|
Formatter.error("failed")
|
|
|
|
elsif warnings?
|
|
|
|
Formatter.warning("warning")
|
|
|
|
else
|
|
|
|
Formatter.success("passed")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-03-21 17:11:17 -04:00
|
|
|
sig { params(include_passed: T::Boolean, include_warnings: T::Boolean).returns(String) }
|
|
|
|
def summary(include_passed: false, include_warnings: true)
|
2021-03-21 13:59:43 -04:00
|
|
|
return if success? && !include_passed
|
2021-03-21 17:11:17 -04:00
|
|
|
return if warnings? && !errors? && !include_warnings
|
2021-03-21 13:59:43 -04:00
|
|
|
|
2020-07-28 09:08:37 +02:00
|
|
|
summary = ["audit for #{cask}: #{result}"]
|
|
|
|
|
|
|
|
errors.each do |error|
|
2021-04-03 03:49:41 +02:00
|
|
|
summary << " #{Formatter.error("-")} #{error[:message]}"
|
2020-07-28 09:08:37 +02:00
|
|
|
end
|
|
|
|
|
2021-03-21 17:11:17 -04:00
|
|
|
if include_warnings
|
|
|
|
warnings.each do |warning|
|
2021-04-03 03:49:41 +02:00
|
|
|
summary << " #{Formatter.warning("-")} #{warning[:message]}"
|
2021-03-21 17:11:17 -04:00
|
|
|
end
|
2020-07-28 09:08:37 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
summary.join("\n")
|
|
|
|
end
|
|
|
|
|
2016-09-24 13:52:43 +02:00
|
|
|
private
|
2016-08-18 22:11:42 +03:00
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2018-03-25 15:30:16 +10:00
|
|
|
def check_untrusted_pkg
|
|
|
|
odebug "Auditing pkg stanza: allow_untrusted"
|
|
|
|
|
|
|
|
return if @cask.sourcefile_path.nil?
|
|
|
|
|
|
|
|
tap = @cask.tap
|
2018-05-15 16:52:10 +02:00
|
|
|
return if tap.nil?
|
2018-05-25 18:03:16 +02:00
|
|
|
return if tap.user != "Homebrew"
|
2018-03-25 15:30:16 +10:00
|
|
|
|
2021-01-07 13:49:05 -08:00
|
|
|
return if cask.artifacts.none? { |k| k.is_a?(Artifact::Pkg) && k.stanza_options.key?(:allow_untrusted) }
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2020-09-14 01:26:06 +02:00
|
|
|
add_error "allow_untrusted is not permitted in official Homebrew Cask taps"
|
2018-03-25 15:30:16 +10:00
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2018-05-19 12:38:47 +10:00
|
|
|
def check_stanza_requires_uninstall
|
|
|
|
odebug "Auditing stanzas which require an uninstall"
|
|
|
|
|
2018-09-06 06:47:29 +02:00
|
|
|
return if cask.artifacts.none? { |k| k.is_a?(Artifact::Pkg) || k.is_a?(Artifact::Installer) }
|
2021-03-01 13:43:47 +00:00
|
|
|
return if cask.artifacts.any?(Artifact::Uninstall)
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2020-09-14 01:26:06 +02:00
|
|
|
add_error "installer and pkg stanzas require an uninstall stanza"
|
2018-05-19 12:38:47 +10:00
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2017-10-30 20:47:22 -03:00
|
|
|
def check_single_pre_postflight
|
|
|
|
odebug "Auditing preflight and postflight stanzas"
|
|
|
|
|
2018-09-06 06:47:29 +02:00
|
|
|
if cask.artifacts.count { |k| k.is_a?(Artifact::PreflightBlock) && k.directives.key?(:preflight) } > 1
|
2020-09-14 01:26:06 +02:00
|
|
|
add_error "only a single preflight stanza is allowed"
|
2017-11-01 22:35:41 -03:00
|
|
|
end
|
2017-10-30 20:47:22 -03:00
|
|
|
|
2018-09-02 16:15:09 +01:00
|
|
|
count = cask.artifacts.count do |k|
|
2018-09-06 06:47:29 +02:00
|
|
|
k.is_a?(Artifact::PostflightBlock) &&
|
2018-09-02 16:15:09 +01:00
|
|
|
k.directives.key?(:postflight)
|
|
|
|
end
|
|
|
|
return unless count > 1
|
|
|
|
|
2020-09-14 01:26:06 +02:00
|
|
|
add_error "only a single postflight stanza is allowed"
|
2017-10-30 20:47:22 -03:00
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2017-10-27 16:53:22 -03:00
|
|
|
def check_single_uninstall_zap
|
|
|
|
odebug "Auditing single uninstall_* and zap stanzas"
|
|
|
|
|
2018-09-06 06:47:29 +02:00
|
|
|
if cask.artifacts.count { |k| k.is_a?(Artifact::Uninstall) } > 1
|
2020-09-14 01:26:06 +02:00
|
|
|
add_error "only a single uninstall stanza is allowed"
|
2017-11-01 22:35:41 -03:00
|
|
|
end
|
2017-10-27 16:53:22 -03:00
|
|
|
|
2018-09-02 16:15:09 +01:00
|
|
|
count = cask.artifacts.count do |k|
|
2018-09-06 06:47:29 +02:00
|
|
|
k.is_a?(Artifact::PreflightBlock) &&
|
2018-09-02 16:15:09 +01:00
|
|
|
k.directives.key?(:uninstall_preflight)
|
|
|
|
end
|
|
|
|
|
2020-09-14 01:26:06 +02:00
|
|
|
add_error "only a single uninstall_preflight stanza is allowed" if count > 1
|
2017-10-27 16:53:22 -03:00
|
|
|
|
2018-09-02 16:15:09 +01:00
|
|
|
count = cask.artifacts.count do |k|
|
2018-09-06 06:47:29 +02:00
|
|
|
k.is_a?(Artifact::PostflightBlock) &&
|
2018-09-02 16:15:09 +01:00
|
|
|
k.directives.key?(:uninstall_postflight)
|
|
|
|
end
|
|
|
|
|
2020-09-14 01:26:06 +02:00
|
|
|
add_error "only a single uninstall_postflight stanza is allowed" if count > 1
|
2017-10-27 16:53:22 -03:00
|
|
|
|
2018-09-06 06:47:29 +02:00
|
|
|
return unless cask.artifacts.count { |k| k.is_a?(Artifact::Zap) } > 1
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2020-09-14 01:26:06 +02:00
|
|
|
add_error "only a single zap stanza is allowed"
|
2017-10-27 16:53:22 -03:00
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2016-09-24 13:52:43 +02:00
|
|
|
def check_required_stanzas
|
|
|
|
odebug "Auditing required stanzas"
|
2016-10-14 20:17:25 +02:00
|
|
|
[:version, :sha256, :url, :homepage].each do |sym|
|
2016-09-24 13:52:43 +02:00
|
|
|
add_error "a #{sym} stanza is required" unless cask.send(sym)
|
|
|
|
end
|
|
|
|
add_error "at least one name stanza is required" if cask.name.empty?
|
|
|
|
# TODO: specific DSL knowledge should not be spread around in various files like this
|
2020-09-11 10:29:21 +01:00
|
|
|
rejected_artifacts = [:uninstall, :zap]
|
|
|
|
installable_artifacts = cask.artifacts.reject { |k| rejected_artifacts.include?(k) }
|
2016-09-24 13:52:43 +02:00
|
|
|
add_error "at least one activatable artifact stanza is required" if installable_artifacts.empty?
|
2016-08-18 22:11:42 +03:00
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
|
|
|
def check_description_present
|
|
|
|
# Fonts seldom benefit from descriptions and requiring them disproportionately increases the maintenance burden
|
|
|
|
return if cask.tap == "homebrew/cask-fonts"
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
add_warning "Cask should have a description. Please add a `desc` stanza." if cask.desc.blank?
|
2016-09-24 13:52:43 +02:00
|
|
|
end
|
2016-08-18 22:11:42 +03:00
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2016-09-24 13:52:43 +02:00
|
|
|
def check_no_string_version_latest
|
2022-09-13 10:54:05 +02:00
|
|
|
return unless cask.version
|
|
|
|
|
|
|
|
odebug "Auditing version :latest does not appear as a string ('latest')"
|
2016-09-24 13:52:43 +02:00
|
|
|
return unless cask.version.raw_version == "latest"
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2016-09-24 13:52:43 +02:00
|
|
|
add_error "you should use version :latest instead of version 'latest'"
|
|
|
|
end
|
2016-08-18 22:11:42 +03:00
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
|
|
|
def check_sha256_no_check_if_latest
|
2016-09-24 13:52:43 +02:00
|
|
|
return unless cask.sha256
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
odebug "Auditing sha256 :no_check with version :latest"
|
2018-09-10 19:35:08 +02:00
|
|
|
return unless cask.version.latest?
|
|
|
|
return if cask.sha256 == :no_check
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2016-09-24 13:52:43 +02:00
|
|
|
add_error "you should use sha256 :no_check when version is :latest"
|
|
|
|
end
|
2016-08-18 22:11:42 +03:00
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2020-12-07 23:02:55 +01:00
|
|
|
def check_sha256_no_check_if_unversioned
|
2022-09-13 10:54:05 +02:00
|
|
|
return unless cask.sha256
|
2020-12-07 23:02:55 +01:00
|
|
|
return if cask.sha256 == :no_check
|
|
|
|
|
2020-12-11 21:58:09 +01:00
|
|
|
add_error "Use `sha256 :no_check` when URL is unversioned." if cask.url&.unversioned?
|
2020-12-07 23:02:55 +01:00
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2020-11-19 18:12:16 +01:00
|
|
|
def check_sha256_actually_256
|
2022-09-13 10:54:05 +02:00
|
|
|
return unless cask.sha256
|
|
|
|
|
|
|
|
odebug "Auditing sha256 string is a legal SHA-256 digest"
|
2020-11-19 18:12:16 +01:00
|
|
|
return unless cask.sha256.is_a?(Checksum)
|
|
|
|
return if cask.sha256.length == 64 && cask.sha256[/^[0-9a-f]+$/i]
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2020-11-19 18:12:16 +01:00
|
|
|
add_error "sha256 string must be of 64 hexadecimal characters"
|
2016-09-24 13:52:43 +02:00
|
|
|
end
|
2016-08-18 22:11:42 +03:00
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2020-11-19 18:12:16 +01:00
|
|
|
def check_sha256_invalid
|
2022-09-13 10:54:05 +02:00
|
|
|
return unless cask.sha256
|
|
|
|
|
|
|
|
odebug "Auditing sha256 is not a known invalid value"
|
2016-09-24 13:52:43 +02:00
|
|
|
empty_sha256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
|
2020-11-19 18:12:16 +01:00
|
|
|
return unless cask.sha256 == empty_sha256
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2020-11-19 18:12:16 +01:00
|
|
|
add_error "cannot use the sha256 for an empty string: #{empty_sha256}"
|
2016-09-24 13:52:43 +02:00
|
|
|
end
|
2016-08-18 22:11:42 +03:00
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2021-01-08 03:48:53 +01:00
|
|
|
def check_appcast_and_livecheck
|
|
|
|
return unless cask.appcast
|
|
|
|
|
2021-03-20 22:20:55 +01:00
|
|
|
if cask.livecheckable?
|
|
|
|
add_error "Cask has a `livecheck`, the `appcast` should be removed."
|
|
|
|
elsif new_cask?
|
|
|
|
add_error "New casks should use a `livecheck` instead of an `appcast`."
|
|
|
|
end
|
2021-01-08 03:48:53 +01:00
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2020-12-14 14:30:36 +01:00
|
|
|
def check_latest_with_appcast_or_livecheck
|
2018-03-27 20:56:01 +10:00
|
|
|
return unless cask.version.latest?
|
|
|
|
|
2021-01-08 03:48:53 +01:00
|
|
|
add_error "Casks with an `appcast` should not use `version :latest`." if cask.appcast
|
|
|
|
add_error "Casks with a `livecheck` should not use `version :latest`." if cask.livecheckable?
|
2018-03-27 20:56:01 +10:00
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2018-07-12 16:13:46 +10:00
|
|
|
def check_latest_with_auto_updates
|
|
|
|
return unless cask.version.latest?
|
|
|
|
return unless cask.auto_updates
|
|
|
|
|
2021-01-08 03:48:53 +01:00
|
|
|
add_error "Casks with `version :latest` should not use `auto_updates`."
|
2018-07-12 16:13:46 +10:00
|
|
|
end
|
|
|
|
|
2021-06-23 13:23:18 -07:00
|
|
|
LIVECHECK_REFERENCE_URL = "https://docs.brew.sh/Cask-Cookbook#stanza-livecheck"
|
2021-01-28 16:17:40 -08:00
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { params(livecheck_result: T::Boolean).void }
|
|
|
|
def check_hosting_with_livecheck(livecheck_result: check_livecheck_version)
|
2022-06-18 16:44:29 -04:00
|
|
|
return if cask.discontinued? || cask.version.latest?
|
|
|
|
return if block_url_offline? || cask.appcast || cask.livecheckable?
|
2021-03-31 07:17:37 +02:00
|
|
|
return if livecheck_result == :auto_detected
|
2018-06-05 16:42:15 +10:00
|
|
|
|
2021-03-31 07:17:37 +02:00
|
|
|
add_livecheck = "please add a livecheck. See #{Formatter.url(LIVECHECK_REFERENCE_URL)}"
|
2018-06-15 17:01:27 +10:00
|
|
|
|
|
|
|
case cask.url.to_s
|
|
|
|
when %r{sourceforge.net/(\S+)}
|
2021-04-07 02:19:16 +02:00
|
|
|
return unless online?
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2021-03-31 07:17:37 +02:00
|
|
|
add_error "Download is hosted on SourceForge, #{add_livecheck}"
|
2018-06-15 17:01:27 +10:00
|
|
|
when %r{dl.devmate.com/(\S+)}
|
2021-03-31 07:17:37 +02:00
|
|
|
add_error "Download is hosted on DevMate, #{add_livecheck}"
|
2018-06-15 17:01:27 +10:00
|
|
|
when %r{rink.hockeyapp.net/(\S+)}
|
2021-03-31 07:17:37 +02:00
|
|
|
add_error "Download is hosted on HockeyApp, #{add_livecheck}"
|
2018-06-15 17:01:27 +10:00
|
|
|
end
|
2018-06-05 16:42:15 +10:00
|
|
|
end
|
|
|
|
|
2022-09-06 11:46:40 +01:00
|
|
|
SOURCEFORGE_OSDN_REFERENCE_URL = "https://docs.brew.sh/Cask-Cookbook#sourceforgeosdn-urls"
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2022-09-06 11:46:40 +01:00
|
|
|
def check_download_url_format
|
2022-09-13 10:54:05 +02:00
|
|
|
return unless cask.url
|
|
|
|
|
2016-09-24 13:52:43 +02:00
|
|
|
odebug "Auditing URL format"
|
|
|
|
if bad_sourceforge_url?
|
2021-01-28 16:17:40 -08:00
|
|
|
add_error "SourceForge URL format incorrect. See #{Formatter.url(SOURCEFORGE_OSDN_REFERENCE_URL)}"
|
2016-09-24 13:52:43 +02:00
|
|
|
elsif bad_osdn_url?
|
2021-01-28 16:17:40 -08:00
|
|
|
add_error "OSDN URL format incorrect. See #{Formatter.url(SOURCEFORGE_OSDN_REFERENCE_URL)}"
|
2016-09-24 13:52:43 +02:00
|
|
|
end
|
2016-08-18 22:11:42 +03:00
|
|
|
end
|
|
|
|
|
2021-11-07 01:23:31 +00:00
|
|
|
VERIFIED_URL_REFERENCE_URL = "https://docs.brew.sh/Cask-Cookbook#when-url-and-homepage-domains-differ-add-verified"
|
2021-01-28 16:17:40 -08:00
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2020-09-08 22:12:26 +08:00
|
|
|
def check_unnecessary_verified
|
2021-03-31 06:15:06 +02:00
|
|
|
return if block_url_offline?
|
2020-09-08 22:12:26 +08:00
|
|
|
return unless verified_present?
|
|
|
|
return unless url_match_homepage?
|
|
|
|
return unless verified_matches_url?
|
|
|
|
|
2021-01-28 16:17:40 -08:00
|
|
|
add_error "The URL's domain #{Formatter.url(domain)} matches the homepage domain " \
|
|
|
|
"#{Formatter.url(homepage)}, the 'verified' parameter of the 'url' stanza is unnecessary. " \
|
|
|
|
"See #{Formatter.url(VERIFIED_URL_REFERENCE_URL)}"
|
2020-09-08 22:12:26 +08:00
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2020-09-08 22:12:26 +08:00
|
|
|
def check_missing_verified
|
2021-03-31 06:15:06 +02:00
|
|
|
return if block_url_offline?
|
2020-12-12 05:55:39 +01:00
|
|
|
return if file_url?
|
2020-09-08 22:12:26 +08:00
|
|
|
return if url_match_homepage?
|
|
|
|
return if verified_present?
|
|
|
|
|
2021-01-28 16:17:40 -08:00
|
|
|
add_error "The URL's domain #{Formatter.url(domain)} does not match the homepage domain " \
|
|
|
|
"#{Formatter.url(homepage)}, a 'verified' parameter has to be added to the 'url' stanza. " \
|
|
|
|
"See #{Formatter.url(VERIFIED_URL_REFERENCE_URL)}"
|
2020-09-08 22:12:26 +08:00
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2020-09-08 22:12:26 +08:00
|
|
|
def check_no_match
|
2021-03-31 06:15:06 +02:00
|
|
|
return if block_url_offline?
|
2020-09-08 22:12:26 +08:00
|
|
|
return unless verified_present?
|
2021-01-28 15:27:00 -08:00
|
|
|
return if verified_matches_url?
|
2020-09-08 22:12:26 +08:00
|
|
|
|
2021-01-28 16:17:40 -08:00
|
|
|
add_error "Verified URL #{Formatter.url(url_from_verified)} does not match URL " \
|
|
|
|
"#{Formatter.url(strip_url_scheme(cask.url.to_s))}. " \
|
|
|
|
"See #{Formatter.url(VERIFIED_URL_REFERENCE_URL)}"
|
2020-09-08 22:12:26 +08:00
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2016-09-24 13:52:43 +02:00
|
|
|
def check_generic_artifacts
|
2018-09-06 06:47:29 +02:00
|
|
|
cask.artifacts.select { |a| a.is_a?(Artifact::Artifact) }.each do |artifact|
|
2017-04-06 00:33:31 +02:00
|
|
|
unless artifact.target.absolute?
|
|
|
|
add_error "target must be absolute path for #{artifact.class.english_name} #{artifact.source}"
|
2016-09-24 13:52:43 +02:00
|
|
|
end
|
2016-08-18 22:11:42 +03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2020-06-04 23:37:54 +02:00
|
|
|
def check_languages
|
|
|
|
@cask.languages.each do |language|
|
2020-08-12 00:04:20 +02:00
|
|
|
Locale.parse(language)
|
|
|
|
rescue Locale::ParserError
|
|
|
|
add_error "Locale '#{language}' is invalid."
|
2020-06-04 23:37:54 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2016-09-24 13:52:43 +02:00
|
|
|
def check_token_conflicts
|
2020-09-04 05:29:56 +02:00
|
|
|
return unless token_conflicts?
|
2016-09-24 13:52:43 +02:00
|
|
|
return unless core_formula_names.include?(cask.token)
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2021-01-28 16:17:40 -08:00
|
|
|
add_warning "possible duplicate, cask token conflicts with Homebrew core formula: " \
|
|
|
|
"#{Formatter.url(core_formula_url)}"
|
2016-09-24 13:52:43 +02:00
|
|
|
end
|
2016-08-18 22:11:42 +03:00
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2020-06-04 23:11:51 +02:00
|
|
|
def check_token_valid
|
2020-09-14 01:26:06 +02:00
|
|
|
add_error "cask token contains non-ascii characters" unless cask.token.ascii_only?
|
|
|
|
add_error "cask token + should be replaced by -plus-" if cask.token.include? "+"
|
|
|
|
add_error "cask token whitespace should be replaced by hyphens" if cask.token.include? " "
|
|
|
|
add_error "cask token @ should be replaced by -at-" if cask.token.include? "@"
|
|
|
|
add_error "cask token underscores should be replaced by hyphens" if cask.token.include? "_"
|
|
|
|
add_error "cask token should not contain double hyphens" if cask.token.include? "--"
|
2020-06-04 23:11:51 +02:00
|
|
|
|
|
|
|
if cask.token.match?(/[^a-z0-9\-]/)
|
2020-09-14 01:26:06 +02:00
|
|
|
add_error "cask token should only contain lowercase alphanumeric characters and hyphens"
|
2020-06-04 23:11:51 +02:00
|
|
|
end
|
|
|
|
|
2021-01-07 13:49:05 -08:00
|
|
|
return if !cask.token.start_with?("-") && !cask.token.end_with?("-")
|
2020-06-04 23:11:51 +02:00
|
|
|
|
2020-09-14 01:26:06 +02:00
|
|
|
add_error "cask token should not have leading or trailing hyphens"
|
2020-06-04 23:11:51 +02:00
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2020-06-04 23:11:51 +02:00
|
|
|
def check_token_bad_words
|
2020-09-14 02:55:47 +02:00
|
|
|
return unless new_cask?
|
2020-06-04 23:11:51 +02:00
|
|
|
|
|
|
|
token = cask.token
|
|
|
|
|
2020-09-14 01:26:06 +02:00
|
|
|
add_error "cask token contains .app" if token.end_with? ".app"
|
2020-06-04 23:11:51 +02:00
|
|
|
|
2020-09-01 14:05:52 +01:00
|
|
|
if /-(?<designation>alpha|beta|rc|release-candidate)$/ =~ cask.token &&
|
2020-09-05 04:13:07 +02:00
|
|
|
cask.tap&.official? &&
|
2020-09-01 14:05:52 +01:00
|
|
|
cask.tap != "homebrew/cask-versions"
|
2020-09-14 01:26:06 +02:00
|
|
|
add_error "cask token contains version designation '#{designation}'"
|
2020-06-04 23:11:51 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
add_warning "cask token mentions launcher" if token.end_with? "launcher"
|
|
|
|
|
|
|
|
add_warning "cask token mentions desktop" if token.end_with? "desktop"
|
|
|
|
|
|
|
|
add_warning "cask token mentions platform" if token.end_with? "mac", "osx", "macos"
|
|
|
|
|
|
|
|
add_warning "cask token mentions architecture" if token.end_with? "x86", "32_bit", "x86_64", "64_bit"
|
|
|
|
|
2021-01-07 13:49:05 -08:00
|
|
|
frameworks = %w[cocoa qt gtk wx java]
|
|
|
|
return if frameworks.include?(token) || !token.end_with?(*frameworks)
|
2020-06-04 23:11:51 +02:00
|
|
|
|
|
|
|
add_warning "cask token mentions framework"
|
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2016-09-24 13:52:43 +02:00
|
|
|
def check_download
|
2021-01-07 13:49:05 -08:00
|
|
|
return if download.blank? || cask.url.blank?
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2016-09-24 13:52:43 +02:00
|
|
|
odebug "Auditing download"
|
2020-11-19 18:12:16 +01:00
|
|
|
download.fetch
|
2016-09-24 13:52:43 +02:00
|
|
|
rescue => e
|
2020-09-14 01:26:06 +02:00
|
|
|
add_error "download not possible: #{e}"
|
2016-09-24 13:52:43 +02:00
|
|
|
end
|
2018-10-10 21:36:02 +00:00
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2022-08-01 14:30:04 +02:00
|
|
|
def check_signing
|
|
|
|
return if !signing? || download.blank? || cask.url.blank?
|
|
|
|
|
|
|
|
odebug "Auditing signing"
|
|
|
|
artifacts = cask.artifacts.select { |k| k.is_a?(Artifact::Pkg) || k.is_a?(Artifact::App) }
|
|
|
|
|
|
|
|
return if artifacts.empty?
|
|
|
|
|
|
|
|
downloaded_path = download.fetch
|
|
|
|
primary_container = UnpackStrategy.detect(downloaded_path, type: @cask.container&.type, merge_xattrs: true)
|
|
|
|
|
|
|
|
return if primary_container.nil?
|
|
|
|
|
|
|
|
Dir.mktmpdir do |tmpdir|
|
|
|
|
tmpdir = Pathname(tmpdir)
|
|
|
|
primary_container.extract_nestedly(to: tmpdir, basename: downloaded_path.basename, verbose: false)
|
2022-08-17 09:56:45 +02:00
|
|
|
artifacts.each do |artifact|
|
2022-08-17 15:49:04 +02:00
|
|
|
path = case artifact
|
|
|
|
when Artifact::Moved
|
|
|
|
tmpdir/artifact.source.basename
|
|
|
|
when Artifact::Pkg
|
|
|
|
artifact.path
|
|
|
|
end
|
|
|
|
next unless path.exist?
|
|
|
|
|
|
|
|
result = system_command("codesign", args: ["--verify", path], print_stderr: false)
|
2022-08-27 03:51:44 +02:00
|
|
|
|
|
|
|
next if result.success?
|
|
|
|
|
2022-08-28 19:21:34 +02:00
|
|
|
message = "Signature verification failed:\n#{result.merged_output}\nmacOS on ARM requires applications " \
|
|
|
|
"to be signed. Please contact the upstream developer to let them know they should "
|
2022-08-27 03:51:44 +02:00
|
|
|
|
2022-08-28 19:21:34 +02:00
|
|
|
message += if result.stderr.include?("not signed at all")
|
|
|
|
"sign their app."
|
|
|
|
else
|
|
|
|
"fix the signature of their app."
|
|
|
|
end
|
|
|
|
|
|
|
|
add_warning message
|
2022-08-01 14:30:04 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { returns(T.nilable(T.any(T::Boolean, Symbol))) }
|
2020-12-14 14:30:36 +01:00
|
|
|
def check_livecheck_version
|
|
|
|
return unless appcast?
|
2021-06-14 12:05:32 -04:00
|
|
|
|
2021-11-02 12:09:56 -04:00
|
|
|
referenced_cask, = Homebrew::Livecheck.resolve_livecheck_reference(cask)
|
|
|
|
|
|
|
|
# Respect skip conditions for a referenced cask
|
|
|
|
if referenced_cask
|
|
|
|
skip_info = Homebrew::Livecheck::SkipConditions.referenced_skip_information(
|
|
|
|
referenced_cask,
|
|
|
|
Homebrew::Livecheck.cask_name(cask),
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2021-06-14 12:05:32 -04:00
|
|
|
# Respect cask skip conditions (e.g. discontinued, latest, unversioned)
|
2021-11-02 12:09:56 -04:00
|
|
|
skip_info ||= Homebrew::Livecheck::SkipConditions.skip_information(cask)
|
2021-06-14 12:05:32 -04:00
|
|
|
return :skip if skip_info.present?
|
2020-12-14 14:30:36 +01:00
|
|
|
|
2021-11-02 12:09:56 -04:00
|
|
|
latest_version = Homebrew::Livecheck.latest_version(
|
|
|
|
cask,
|
|
|
|
referenced_formula_or_cask: referenced_cask,
|
|
|
|
)&.fetch(:latest)
|
2021-03-20 23:38:20 +01:00
|
|
|
if cask.version.to_s == latest_version.to_s
|
|
|
|
if cask.appcast
|
|
|
|
add_error "Version '#{latest_version}' was automatically detected by livecheck; " \
|
|
|
|
"the appcast should be removed."
|
|
|
|
end
|
|
|
|
|
2021-03-31 07:17:37 +02:00
|
|
|
return :auto_detected
|
2021-03-20 23:38:20 +01:00
|
|
|
end
|
|
|
|
|
2021-03-31 07:17:37 +02:00
|
|
|
return :appcast if cask.appcast && !cask.livecheckable?
|
2020-12-14 14:30:36 +01:00
|
|
|
|
|
|
|
add_error "Version '#{cask.version}' differs from '#{latest_version}' retrieved by livecheck."
|
2021-03-31 07:17:37 +02:00
|
|
|
|
|
|
|
false
|
2020-12-14 14:30:36 +01:00
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2019-05-07 17:06:54 +02:00
|
|
|
def check_appcast_contains_version
|
2020-04-23 21:16:17 +02:00
|
|
|
return unless appcast?
|
2019-05-07 17:06:54 +02:00
|
|
|
return if cask.appcast.to_s.empty?
|
2020-05-19 15:47:56 +01:00
|
|
|
return if cask.appcast.must_contain == :no_check
|
2019-05-07 17:06:54 +02:00
|
|
|
|
2020-12-06 17:47:41 +01:00
|
|
|
appcast_url = cask.appcast.to_s
|
|
|
|
begin
|
|
|
|
details = curl_http_content_headers_and_checksum(appcast_url, user_agent: HOMEBREW_USER_AGENT_FAKE_SAFARI)
|
|
|
|
appcast_contents = details[:file]
|
2020-06-10 16:39:03 +02:00
|
|
|
rescue
|
2021-01-28 16:17:40 -08:00
|
|
|
add_error "appcast at URL '#{Formatter.url(appcast_url)}' offline or looping"
|
2020-06-10 16:39:03 +02:00
|
|
|
return
|
|
|
|
end
|
|
|
|
|
2019-05-07 17:06:54 +02:00
|
|
|
version_stanza = cask.version.to_s
|
2020-12-01 17:04:59 +00:00
|
|
|
adjusted_version_stanza = cask.appcast.must_contain.presence || version_stanza.match(/^[[:alnum:].]+/)[0]
|
2021-03-17 13:25:29 -04:00
|
|
|
return if appcast_contents.blank?
|
|
|
|
return if appcast_contents.include?(adjusted_version_stanza)
|
2019-05-07 17:06:54 +02:00
|
|
|
|
2021-01-28 16:17:40 -08:00
|
|
|
add_error <<~EOS.chomp
|
|
|
|
appcast at URL '#{Formatter.url(appcast_url)}' does not contain \
|
|
|
|
the version number '#{adjusted_version_stanza}':
|
|
|
|
#{appcast_contents}
|
|
|
|
EOS
|
2019-05-07 17:06:54 +02:00
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2020-08-13 16:17:47 +02:00
|
|
|
def check_github_prerelease_version
|
2020-09-03 17:35:00 +02:00
|
|
|
return if cask.tap == "homebrew/cask-versions"
|
|
|
|
|
2020-08-13 16:17:47 +02:00
|
|
|
odebug "Auditing GitHub prerelease"
|
2021-03-31 06:15:06 +02:00
|
|
|
user, repo = get_repo_data(%r{https?://github\.com/([^/]+)/([^/]+)/?.*}) if online?
|
2020-08-13 16:17:47 +02:00
|
|
|
return if user.nil?
|
|
|
|
|
2020-09-09 08:57:56 -07:00
|
|
|
tag = SharedAudits.github_tag_from_url(cask.url)
|
|
|
|
tag ||= cask.version
|
|
|
|
error = SharedAudits.github_release(user, repo, tag, cask: cask)
|
2020-09-05 05:41:58 +02:00
|
|
|
add_error error if error
|
2020-08-13 16:17:47 +02:00
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2020-08-13 16:17:47 +02:00
|
|
|
def check_gitlab_prerelease_version
|
2020-09-03 17:35:00 +02:00
|
|
|
return if cask.tap == "homebrew/cask-versions"
|
|
|
|
|
2021-03-31 06:15:06 +02:00
|
|
|
user, repo = get_repo_data(%r{https?://gitlab\.com/([^/]+)/([^/]+)/?.*}) if online?
|
2020-08-13 16:17:47 +02:00
|
|
|
return if user.nil?
|
|
|
|
|
|
|
|
odebug "Auditing GitLab prerelease"
|
|
|
|
|
2020-09-09 08:57:56 -07:00
|
|
|
tag = SharedAudits.gitlab_tag_from_url(cask.url)
|
|
|
|
tag ||= cask.version
|
2020-12-21 12:53:12 -05:00
|
|
|
error = SharedAudits.gitlab_release(user, repo, tag, cask: cask)
|
2020-09-05 06:07:55 +02:00
|
|
|
add_error error if error
|
2020-08-13 16:17:47 +02:00
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2020-08-13 16:17:47 +02:00
|
|
|
def check_github_repository_archived
|
2021-03-31 06:15:06 +02:00
|
|
|
user, repo = get_repo_data(%r{https?://github\.com/([^/]+)/([^/]+)/?.*}) if online?
|
2020-08-13 16:17:47 +02:00
|
|
|
return if user.nil?
|
|
|
|
|
|
|
|
odebug "Auditing GitHub repo archived"
|
|
|
|
|
|
|
|
metadata = SharedAudits.github_repo_data(user, repo)
|
|
|
|
return if metadata.nil?
|
|
|
|
|
2020-11-07 21:29:06 +01:00
|
|
|
return unless metadata["archived"]
|
|
|
|
|
|
|
|
message = "GitHub repo is archived"
|
|
|
|
|
|
|
|
if cask.discontinued?
|
|
|
|
add_warning message
|
|
|
|
else
|
|
|
|
add_error message
|
|
|
|
end
|
2020-08-13 16:17:47 +02:00
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2020-08-13 16:17:47 +02:00
|
|
|
def check_gitlab_repository_archived
|
2021-03-31 06:15:06 +02:00
|
|
|
user, repo = get_repo_data(%r{https?://gitlab\.com/([^/]+)/([^/]+)/?.*}) if online?
|
2020-08-13 16:17:47 +02:00
|
|
|
return if user.nil?
|
|
|
|
|
|
|
|
odebug "Auditing GitLab repo archived"
|
|
|
|
|
|
|
|
metadata = SharedAudits.gitlab_repo_data(user, repo)
|
|
|
|
return if metadata.nil?
|
|
|
|
|
2020-11-07 21:29:06 +01:00
|
|
|
return unless metadata["archived"]
|
|
|
|
|
|
|
|
message = "GitLab repo is archived"
|
|
|
|
|
|
|
|
if cask.discontinued?
|
|
|
|
add_warning message
|
|
|
|
else
|
|
|
|
add_error message
|
|
|
|
end
|
2020-08-13 16:17:47 +02:00
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2020-04-23 21:16:17 +02:00
|
|
|
def check_github_repository
|
2020-09-21 04:40:32 -05:00
|
|
|
return unless new_cask?
|
2020-08-13 16:17:47 +02:00
|
|
|
|
2020-04-23 21:16:17 +02:00
|
|
|
user, repo = get_repo_data(%r{https?://github\.com/([^/]+)/([^/]+)/?.*})
|
|
|
|
return if user.nil?
|
|
|
|
|
|
|
|
odebug "Auditing GitHub repo"
|
|
|
|
|
|
|
|
error = SharedAudits.github(user, repo)
|
|
|
|
add_error error if error
|
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2020-04-23 21:16:17 +02:00
|
|
|
def check_gitlab_repository
|
2020-09-02 19:13:46 +02:00
|
|
|
return unless new_cask?
|
2020-08-13 16:17:47 +02:00
|
|
|
|
2020-04-23 21:16:17 +02:00
|
|
|
user, repo = get_repo_data(%r{https?://gitlab\.com/([^/]+)/([^/]+)/?.*})
|
|
|
|
return if user.nil?
|
|
|
|
|
|
|
|
odebug "Auditing GitLab repo"
|
|
|
|
|
|
|
|
error = SharedAudits.gitlab(user, repo)
|
|
|
|
add_error error if error
|
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2020-04-23 21:16:17 +02:00
|
|
|
def check_bitbucket_repository
|
2020-09-21 04:40:32 -05:00
|
|
|
return unless new_cask?
|
2020-08-13 16:17:47 +02:00
|
|
|
|
2020-04-23 21:16:17 +02:00
|
|
|
user, repo = get_repo_data(%r{https?://bitbucket\.org/([^/]+)/([^/]+)/?.*})
|
|
|
|
return if user.nil?
|
|
|
|
|
|
|
|
odebug "Auditing Bitbucket repo"
|
|
|
|
|
|
|
|
error = SharedAudits.bitbucket(user, repo)
|
|
|
|
add_error error if error
|
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2020-06-06 21:10:16 +01:00
|
|
|
def check_denylist
|
2021-01-26 01:16:00 -08:00
|
|
|
return unless cask.tap
|
|
|
|
return unless cask.tap.official?
|
2021-02-12 18:33:37 +05:30
|
|
|
return unless (reason = Denylist.reason(cask.token))
|
2019-09-08 09:09:37 -04:00
|
|
|
|
2020-06-06 21:10:16 +01:00
|
|
|
add_error "#{cask.token} is not allowed: #{reason}"
|
2019-09-08 09:09:37 -04:00
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2021-01-26 01:16:00 -08:00
|
|
|
def check_reverse_migration
|
|
|
|
return unless new_cask?
|
|
|
|
return unless cask.tap
|
|
|
|
return unless cask.tap.official?
|
|
|
|
return unless cask.tap.tap_migrations.key?(cask.token)
|
|
|
|
|
|
|
|
add_error "#{cask.token} is listed in tap_migrations.json"
|
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
sig { void }
|
2018-10-10 21:36:02 +00:00
|
|
|
def check_https_availability
|
2018-10-10 21:36:06 +00:00
|
|
|
return unless download
|
2019-02-19 13:12:52 +00:00
|
|
|
|
2021-04-20 21:20:01 -04:00
|
|
|
if cask.url && !cask.url.using
|
2022-09-13 10:54:05 +02:00
|
|
|
validate_url_for_https_availability(cask.url, "binary URL", cask.token, cask.tap,
|
|
|
|
user_agents: [cask.url.user_agent])
|
2021-04-20 21:20:01 -04:00
|
|
|
end
|
2020-09-05 04:08:34 +02:00
|
|
|
|
2021-10-04 21:45:20 -04:00
|
|
|
if cask.appcast && appcast?
|
2022-09-13 10:54:05 +02:00
|
|
|
validate_url_for_https_availability(cask.appcast, "appcast URL", cask.token, cask.tap, check_content: true)
|
2021-10-04 21:45:20 -04:00
|
|
|
end
|
2020-09-05 04:08:34 +02:00
|
|
|
|
2021-03-10 13:30:13 +01:00
|
|
|
return unless cask.homepage
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
validate_url_for_https_availability(cask.homepage, "homepage URL", cask.token, cask.tap,
|
|
|
|
user_agents: [:browser, :default],
|
|
|
|
check_content: true,
|
|
|
|
strict: strict?)
|
2018-10-10 21:36:02 +00:00
|
|
|
end
|
|
|
|
|
2022-09-13 10:54:05 +02:00
|
|
|
# sig {
|
|
|
|
# params(url_to_check: T.any(String, URL), url_type: String, cask_token: String, tap: Tap,
|
|
|
|
# options: T.untyped).void
|
|
|
|
# }
|
|
|
|
def validate_url_for_https_availability(url_to_check, url_type, cask_token, tap, **options)
|
2021-04-20 21:20:01 -04:00
|
|
|
problem = curl_check_http_content(url_to_check.to_s, url_type, **options)
|
2021-10-09 13:01:09 -04:00
|
|
|
exception = tap&.audit_exception(:secure_connection_audit_skiplist, cask_token, url_to_check.to_s)
|
2021-10-04 19:22:30 -04:00
|
|
|
|
|
|
|
if problem
|
|
|
|
add_error problem unless exception
|
|
|
|
elsif exception
|
2021-10-09 13:01:09 -04:00
|
|
|
add_error "#{url_to_check} is in the secure connection audit skiplist but does not need to be skipped"
|
2021-10-04 19:22:30 -04:00
|
|
|
end
|
|
|
|
end
|
2022-09-13 10:54:05 +02:00
|
|
|
|
|
|
|
sig { params(regex: T.any(String, Regexp)).returns(T.nilable(T::Array[String])) }
|
|
|
|
def get_repo_data(regex)
|
|
|
|
return unless online?
|
|
|
|
|
|
|
|
_, user, repo = *regex.match(cask.url.to_s)
|
|
|
|
_, user, repo = *regex.match(cask.homepage) unless user
|
|
|
|
_, user, repo = *regex.match(cask.appcast.to_s) unless user
|
|
|
|
return if !user || !repo
|
|
|
|
|
|
|
|
repo.gsub!(/.git$/, "")
|
|
|
|
|
|
|
|
[user, repo]
|
|
|
|
end
|
|
|
|
|
|
|
|
sig {
|
|
|
|
params(regex: T.any(String, Regexp), valid_formats_array: T::Array[T.any(String, Regexp)]).returns(T::Boolean)
|
|
|
|
}
|
|
|
|
def bad_url_format?(regex, valid_formats_array)
|
|
|
|
return false unless cask.url.to_s.match?(regex)
|
|
|
|
|
|
|
|
valid_formats_array.none? { |format| cask.url.to_s =~ format }
|
|
|
|
end
|
|
|
|
|
|
|
|
sig { returns(T::Boolean) }
|
|
|
|
def bad_sourceforge_url?
|
|
|
|
bad_url_format?(/sourceforge/,
|
|
|
|
[
|
|
|
|
%r{\Ahttps://sourceforge\.net/projects/[^/]+/files/latest/download\Z},
|
|
|
|
%r{\Ahttps://downloads\.sourceforge\.net/(?!(project|sourceforge)/)},
|
|
|
|
])
|
|
|
|
end
|
|
|
|
|
|
|
|
sig { returns(T::Boolean) }
|
|
|
|
def bad_osdn_url?
|
|
|
|
bad_url_format?(/osd/, [%r{\Ahttps?://([^/]+.)?dl\.osdn\.jp/}])
|
|
|
|
end
|
|
|
|
|
|
|
|
# sig { returns(String) }
|
|
|
|
def homepage
|
|
|
|
URI(cask.homepage.to_s).host
|
|
|
|
end
|
|
|
|
|
|
|
|
# sig { returns(String) }
|
|
|
|
def domain
|
|
|
|
URI(cask.url.to_s).host
|
|
|
|
end
|
|
|
|
|
|
|
|
sig { returns(T::Boolean) }
|
|
|
|
def url_match_homepage?
|
|
|
|
host = cask.url.to_s
|
|
|
|
host_uri = URI(host)
|
|
|
|
host = if host.match?(/:\d/) && host_uri.port != 80
|
|
|
|
"#{host_uri.host}:#{host_uri.port}"
|
|
|
|
else
|
|
|
|
host_uri.host
|
|
|
|
end
|
|
|
|
home = homepage.downcase
|
|
|
|
if (split_host = host.split(".")).length >= 3
|
|
|
|
host = split_host[-2..].join(".")
|
|
|
|
end
|
|
|
|
if (split_home = homepage.split(".")).length >= 3
|
|
|
|
home = split_home[-2..].join(".")
|
|
|
|
end
|
|
|
|
host == home
|
|
|
|
end
|
|
|
|
|
|
|
|
# sig { params(url: String).returns(String) }
|
|
|
|
def strip_url_scheme(url)
|
|
|
|
url.sub(%r{^[^:/]+://(www\.)?}, "")
|
|
|
|
end
|
|
|
|
|
|
|
|
# sig { returns(String) }
|
|
|
|
def url_from_verified
|
|
|
|
strip_url_scheme(cask.url.verified)
|
|
|
|
end
|
|
|
|
|
|
|
|
sig { returns(T::Boolean) }
|
|
|
|
def verified_matches_url?
|
|
|
|
url_domain, url_path = strip_url_scheme(cask.url.to_s).split("/", 2)
|
|
|
|
verified_domain, verified_path = url_from_verified.split("/", 2)
|
|
|
|
|
|
|
|
(url_domain == verified_domain || (verified_domain && url_domain&.end_with?(".#{verified_domain}"))) &&
|
|
|
|
(!verified_path || url_path&.start_with?(verified_path))
|
|
|
|
end
|
|
|
|
|
|
|
|
sig { returns(T::Boolean) }
|
|
|
|
def verified_present?
|
|
|
|
cask.url.verified.present?
|
|
|
|
end
|
|
|
|
|
|
|
|
sig { returns(T::Boolean) }
|
|
|
|
def file_url?
|
|
|
|
URI(cask.url.to_s).scheme == "file"
|
|
|
|
end
|
|
|
|
|
|
|
|
sig { returns(T::Boolean) }
|
|
|
|
def block_url_offline?
|
|
|
|
return false if online?
|
|
|
|
|
|
|
|
cask.url.from_block?
|
|
|
|
end
|
|
|
|
|
|
|
|
sig { returns(Tap) }
|
|
|
|
def core_tap
|
|
|
|
@core_tap ||= CoreTap.instance
|
|
|
|
end
|
|
|
|
|
|
|
|
# sig { returns(T::Array[String]) }
|
|
|
|
def core_formula_names
|
|
|
|
core_tap.formula_names
|
|
|
|
end
|
|
|
|
|
|
|
|
sig { returns(String) }
|
|
|
|
def core_formula_url
|
|
|
|
"#{core_tap.default_remote}/blob/HEAD/Formula/#{cask.token}.rb"
|
|
|
|
end
|
2016-08-18 22:11:42 +03:00
|
|
|
end
|
|
|
|
end
|