2019-04-19 15:38:03 +09:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2016-07-12 19:46:29 +01:00
|
|
|
require "open3"
|
2016-06-03 13:05:18 +01:00
|
|
|
|
2017-08-08 18:10:13 +02:00
|
|
|
def curl_executable
|
2018-01-11 16:33:20 +00:00
|
|
|
@curl ||= [
|
|
|
|
ENV["HOMEBREW_CURL"],
|
|
|
|
which("curl"),
|
|
|
|
"/usr/bin/curl",
|
2018-03-17 16:46:44 -07:00
|
|
|
].compact.map { |c| Pathname(c) }.find(&:executable?)
|
|
|
|
raise "no executable curl was found" unless @curl
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2018-01-11 16:33:20 +00:00
|
|
|
@curl
|
2017-08-08 18:10:13 +02:00
|
|
|
end
|
2016-06-03 13:05:18 +01:00
|
|
|
|
2017-08-08 18:10:13 +02:00
|
|
|
def curl_args(*extra_args, show_output: false, user_agent: :default)
|
2018-07-30 10:11:00 +02:00
|
|
|
args = []
|
2018-04-08 15:51:58 -07:00
|
|
|
|
|
|
|
# do not load .curlrc unless requested (must be the first argument)
|
2018-04-09 15:43:03 -07:00
|
|
|
args << "-q" unless ENV["HOMEBREW_CURLRC"]
|
2018-04-08 15:51:58 -07:00
|
|
|
|
2019-10-01 08:38:44 +02:00
|
|
|
args << "--globoff"
|
|
|
|
|
2018-04-08 15:51:58 -07:00
|
|
|
args << "--show-error"
|
2016-06-03 13:05:18 +01:00
|
|
|
|
2017-08-08 18:10:13 +02:00
|
|
|
args << "--user-agent" << case user_agent
|
|
|
|
when :browser, :fake
|
|
|
|
HOMEBREW_USER_AGENT_FAKE_SAFARI
|
|
|
|
when :default
|
|
|
|
HOMEBREW_USER_AGENT_CURL
|
2017-01-07 14:03:08 +00:00
|
|
|
else
|
2017-08-08 18:10:13 +02:00
|
|
|
user_agent
|
2016-12-25 23:01:40 +00:00
|
|
|
end
|
|
|
|
|
2017-08-08 18:10:13 +02:00
|
|
|
unless show_output
|
2017-08-11 12:28:58 +02:00
|
|
|
args << "--fail"
|
2016-12-25 23:01:40 +00:00
|
|
|
args << "--progress-bar" unless ARGV.verbose?
|
|
|
|
args << "--verbose" if ENV["HOMEBREW_CURL_VERBOSE"]
|
2018-12-30 21:13:24 +00:00
|
|
|
args << "--silent" unless $stdout.tty?
|
2016-12-25 23:01:40 +00:00
|
|
|
end
|
|
|
|
|
2019-05-17 10:14:54 +01:00
|
|
|
args << "--retry" << ENV["HOMEBREW_CURL_RETRIES"] if ENV["HOMEBREW_CURL_RETRIES"]
|
|
|
|
|
2017-08-08 18:10:13 +02:00
|
|
|
args + extra_args
|
2016-06-03 13:05:18 +01:00
|
|
|
end
|
|
|
|
|
2019-07-13 23:31:56 +08:00
|
|
|
def curl(*args, secrets: [], **options)
|
2017-11-03 18:58:59 +00:00
|
|
|
# SSL_CERT_FILE can be incorrectly set by users or portable-ruby and screw
|
|
|
|
# with SSL downloads so unset it here.
|
2018-07-30 10:11:00 +02:00
|
|
|
system_command! curl_executable,
|
2019-07-13 23:31:56 +08:00
|
|
|
args: curl_args(*args, **options),
|
2018-08-01 11:15:42 +02:00
|
|
|
print_stdout: true,
|
2019-07-13 23:31:56 +08:00
|
|
|
env: { "SSL_CERT_FILE" => nil },
|
|
|
|
secrets: secrets
|
2017-08-04 16:24:29 +02:00
|
|
|
end
|
|
|
|
|
2018-09-06 16:25:08 +02:00
|
|
|
def curl_download(*args, to: nil, **options)
|
2018-09-05 00:38:19 +02:00
|
|
|
destination = Pathname(to)
|
|
|
|
destination.dirname.mkpath
|
2018-09-06 16:25:08 +02:00
|
|
|
|
2019-02-01 14:22:36 -08:00
|
|
|
range_stdout = curl_output("--location", "--range", "0-1",
|
|
|
|
"--dump-header", "-",
|
2019-10-03 08:50:45 +02:00
|
|
|
"--write-out", "%\{http_code}",
|
2019-02-01 14:22:36 -08:00
|
|
|
"--output", "/dev/null", *args, **options).stdout
|
|
|
|
headers, _, http_status = range_stdout.partition("\r\n\r\n")
|
|
|
|
|
|
|
|
supports_partial_download = http_status.to_i == 206 # Partial Content
|
|
|
|
if supports_partial_download &&
|
|
|
|
destination.exist? &&
|
2019-02-03 10:52:09 -08:00
|
|
|
destination.size == %r{^.*Content-Range: bytes \d+-\d+/(\d+)\r\n.*$}m.match(headers)&.[](1)&.to_i
|
2019-02-01 14:22:36 -08:00
|
|
|
return # We've already downloaded all the bytes
|
|
|
|
end
|
|
|
|
|
|
|
|
continue_at = if destination.exist? && supports_partial_download
|
2018-09-06 16:25:08 +02:00
|
|
|
"-"
|
|
|
|
else
|
|
|
|
0
|
2017-08-10 18:53:23 +02:00
|
|
|
end
|
|
|
|
|
2018-09-06 16:25:08 +02:00
|
|
|
curl("--location", "--remote-time", "--continue-at", continue_at.to_s, "--output", destination, *args, **options)
|
2019-09-26 04:58:18 +02:00
|
|
|
rescue ErrorDuringExecution => e
|
|
|
|
# This is a workaround for https://github.com/curl/curl/issues/1618.
|
|
|
|
raise unless e.status.exitstatus == 56 # Unexpected EOF
|
|
|
|
|
|
|
|
raise if args.include?("--http1.1")
|
|
|
|
|
|
|
|
out = curl_output("-V").stdout
|
|
|
|
|
|
|
|
# If `curl` doesn't support HTTP2, the exception is unrelated to this bug.
|
|
|
|
raise unless out.include?("HTTP2")
|
|
|
|
|
|
|
|
# The bug is fixed in `curl` >= 7.60.0.
|
|
|
|
curl_version = out[/curl (\d+(\.\d+)+)/, 1]
|
|
|
|
raise if Gem::Version.new(curl_version) >= Gem::Version.new("7.60.0")
|
|
|
|
|
|
|
|
args << "--http1.1"
|
|
|
|
retry
|
2017-08-08 18:10:13 +02:00
|
|
|
end
|
|
|
|
|
2019-07-13 23:31:56 +08:00
|
|
|
def curl_output(*args, secrets: [], **options)
|
|
|
|
system_command curl_executable,
|
2018-11-02 17:18:07 +00:00
|
|
|
args: curl_args(*args, show_output: true, **options),
|
2019-07-13 23:31:56 +08:00
|
|
|
print_stderr: false,
|
|
|
|
secrets: secrets
|
2016-06-03 13:05:18 +01:00
|
|
|
end
|
2017-12-03 14:02:55 +01:00
|
|
|
|
2019-01-28 08:10:37 +01:00
|
|
|
def curl_check_http_content(url, user_agents: [:default], check_content: false, strict: false)
|
2017-12-03 14:02:55 +01:00
|
|
|
return unless url.start_with? "http"
|
|
|
|
|
|
|
|
details = nil
|
|
|
|
user_agent = nil
|
2019-01-28 08:10:37 +01:00
|
|
|
hash_needed = url.start_with?("http:")
|
2017-12-03 14:02:55 +01:00
|
|
|
user_agents.each do |ua|
|
|
|
|
details = curl_http_content_headers_and_checksum(url, hash_needed: hash_needed, user_agent: ua)
|
|
|
|
user_agent = ua
|
2019-09-18 10:32:13 +01:00
|
|
|
break if http_status_ok?(details[:status])
|
2017-12-03 14:02:55 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
unless details[:status]
|
|
|
|
# Hack around https://github.com/Homebrew/brew/issues/3199
|
|
|
|
return if MacOS.version == :el_capitan
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2017-12-03 14:02:55 +01:00
|
|
|
return "The URL #{url} is not reachable"
|
|
|
|
end
|
|
|
|
|
2019-09-18 10:32:13 +01:00
|
|
|
unless http_status_ok?(details[:status])
|
2017-12-03 14:02:55 +01:00
|
|
|
return "The URL #{url} is not reachable (HTTP status code #{details[:status]})"
|
|
|
|
end
|
|
|
|
|
2018-11-24 01:46:55 +00:00
|
|
|
if url.start_with?("https://") && ENV["HOMEBREW_NO_INSECURE_REDIRECT"] &&
|
|
|
|
!details[:final_url].start_with?("https://")
|
|
|
|
return "The URL #{url} redirects back to HTTP"
|
2018-10-10 21:36:06 +00:00
|
|
|
end
|
|
|
|
|
2017-12-03 14:02:55 +01:00
|
|
|
return unless hash_needed
|
|
|
|
|
|
|
|
secure_url = url.sub "http", "https"
|
|
|
|
secure_details =
|
|
|
|
curl_http_content_headers_and_checksum(secure_url, hash_needed: true, user_agent: user_agent)
|
|
|
|
|
2019-09-18 10:32:13 +01:00
|
|
|
if !http_status_ok?(details[:status]) ||
|
|
|
|
!http_status_ok?(secure_details[:status])
|
2017-12-03 14:02:55 +01:00
|
|
|
return
|
|
|
|
end
|
|
|
|
|
|
|
|
etag_match = details[:etag] &&
|
|
|
|
details[:etag] == secure_details[:etag]
|
|
|
|
content_length_match =
|
|
|
|
details[:content_length] &&
|
|
|
|
details[:content_length] == secure_details[:content_length]
|
|
|
|
file_match = details[:file_hash] == secure_details[:file_hash]
|
|
|
|
|
2018-11-24 01:46:55 +00:00
|
|
|
if (etag_match || content_length_match || file_match) &&
|
2018-11-24 11:21:52 +00:00
|
|
|
secure_details[:final_url].start_with?("https://") &&
|
|
|
|
url.start_with?("http://")
|
|
|
|
return "The URL #{url} should use HTTPS rather than HTTP"
|
2017-12-03 14:02:55 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
return unless check_content
|
|
|
|
|
|
|
|
no_protocol_file_contents = %r{https?:\\?/\\?/}
|
|
|
|
details[:file] = details[:file].gsub(no_protocol_file_contents, "/")
|
|
|
|
secure_details[:file] = secure_details[:file].gsub(no_protocol_file_contents, "/")
|
|
|
|
|
|
|
|
# Check for the same content after removing all protocols
|
2018-11-24 01:46:55 +00:00
|
|
|
if (details[:file] == secure_details[:file]) &&
|
2018-11-24 11:21:52 +00:00
|
|
|
secure_details[:final_url].start_with?("https://") &&
|
|
|
|
url.start_with?("http://")
|
|
|
|
return "The URL #{url} should use HTTPS rather than HTTP"
|
2017-12-03 14:02:55 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
return unless strict
|
|
|
|
|
|
|
|
# Same size, different content after normalization
|
|
|
|
# (typical causes: Generated ID, Timestamp, Unix time)
|
|
|
|
if details[:file].length == secure_details[:file].length
|
|
|
|
return "The URL #{url} may be able to use HTTPS rather than HTTP. Please verify it in a browser."
|
|
|
|
end
|
|
|
|
|
|
|
|
lenratio = (100 * secure_details[:file].length / details[:file].length).to_i
|
|
|
|
return unless (90..110).cover?(lenratio)
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2017-12-03 14:02:55 +01:00
|
|
|
"The URL #{url} may be able to use HTTPS rather than HTTP. Please verify it in a browser."
|
|
|
|
end
|
|
|
|
|
|
|
|
def curl_http_content_headers_and_checksum(url, hash_needed: false, user_agent: :default)
|
2019-10-29 19:06:57 +01:00
|
|
|
file = Tempfile.new.tap(&:close)
|
|
|
|
|
2017-12-03 14:02:55 +01:00
|
|
|
max_time = hash_needed ? "600" : "25"
|
|
|
|
output, = curl_output(
|
2019-10-29 19:06:57 +01:00
|
|
|
"--dump-header", "-", "--output", file.path, "--include", "--location",
|
|
|
|
"--connect-timeout", "15", "--max-time", max_time, url,
|
2017-12-03 14:02:55 +01:00
|
|
|
user_agent: user_agent
|
|
|
|
)
|
|
|
|
|
|
|
|
status_code = :unknown
|
|
|
|
while status_code == :unknown || status_code.to_s.start_with?("3")
|
|
|
|
headers, _, output = output.partition("\r\n\r\n")
|
|
|
|
status_code = headers[%r{HTTP\/.* (\d+)}, 1]
|
2018-11-24 11:21:52 +00:00
|
|
|
final_url = headers[/^Location:\s*(.*)$/i, 1]&.chomp
|
2017-12-03 14:02:55 +01:00
|
|
|
end
|
|
|
|
|
2019-10-29 19:06:57 +01:00
|
|
|
output_hash = Digest::SHA256.file(file.path) if hash_needed
|
2017-12-03 14:02:55 +01:00
|
|
|
|
2018-11-24 01:46:55 +00:00
|
|
|
final_url ||= url
|
2018-10-10 21:36:06 +00:00
|
|
|
|
2017-12-03 14:02:55 +01:00
|
|
|
{
|
2018-11-24 01:46:55 +00:00
|
|
|
url: url,
|
|
|
|
final_url: final_url,
|
2018-11-02 17:18:07 +00:00
|
|
|
status: status_code,
|
|
|
|
etag: headers[%r{ETag: ([wW]\/)?"(([^"]|\\")*)"}, 2],
|
2017-12-03 14:02:55 +01:00
|
|
|
content_length: headers[/Content-Length: (\d+)/, 1],
|
2018-11-02 17:18:07 +00:00
|
|
|
file_hash: output_hash,
|
|
|
|
file: output,
|
2017-12-03 14:02:55 +01:00
|
|
|
}
|
2019-10-29 19:06:57 +01:00
|
|
|
ensure
|
|
|
|
file.unlink
|
2017-12-03 14:02:55 +01:00
|
|
|
end
|
2019-09-18 10:32:13 +01:00
|
|
|
|
|
|
|
def http_status_ok?(status)
|
|
|
|
(100..299).cover?(status.to_i)
|
|
|
|
end
|