2016-07-12 19:46:29 +01:00
|
|
|
require "open3"
|
2016-06-03 13:05:18 +01:00
|
|
|
|
2017-08-08 18:10:13 +02:00
|
|
|
def curl_executable
|
2018-01-11 16:33:20 +00:00
|
|
|
@curl ||= [
|
|
|
|
ENV["HOMEBREW_CURL"],
|
|
|
|
which("curl"),
|
|
|
|
"/usr/bin/curl",
|
2018-03-17 16:46:44 -07:00
|
|
|
].compact.map { |c| Pathname(c) }.find(&:executable?)
|
|
|
|
raise "no executable curl was found" unless @curl
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2018-01-11 16:33:20 +00:00
|
|
|
@curl
|
2017-08-08 18:10:13 +02:00
|
|
|
end
|
2016-06-03 13:05:18 +01:00
|
|
|
|
2017-08-08 18:10:13 +02:00
|
|
|
def curl_args(*extra_args, show_output: false, user_agent: :default)
|
2018-07-30 10:11:00 +02:00
|
|
|
args = []
|
2018-04-08 15:51:58 -07:00
|
|
|
|
|
|
|
# do not load .curlrc unless requested (must be the first argument)
|
2018-04-09 15:43:03 -07:00
|
|
|
args << "-q" unless ENV["HOMEBREW_CURLRC"]
|
2018-04-08 15:51:58 -07:00
|
|
|
|
|
|
|
args << "--show-error"
|
2016-06-03 13:05:18 +01:00
|
|
|
|
2017-08-08 18:10:13 +02:00
|
|
|
args << "--user-agent" << case user_agent
|
|
|
|
when :browser, :fake
|
|
|
|
HOMEBREW_USER_AGENT_FAKE_SAFARI
|
|
|
|
when :default
|
|
|
|
HOMEBREW_USER_AGENT_CURL
|
2017-01-07 14:03:08 +00:00
|
|
|
else
|
2017-08-08 18:10:13 +02:00
|
|
|
user_agent
|
2016-12-25 23:01:40 +00:00
|
|
|
end
|
|
|
|
|
2017-08-08 18:10:13 +02:00
|
|
|
unless show_output
|
2017-08-11 12:28:58 +02:00
|
|
|
args << "--fail"
|
2016-12-25 23:01:40 +00:00
|
|
|
args << "--progress-bar" unless ARGV.verbose?
|
|
|
|
args << "--verbose" if ENV["HOMEBREW_CURL_VERBOSE"]
|
2018-12-30 21:13:24 +00:00
|
|
|
args << "--silent" unless $stdout.tty?
|
2016-12-25 23:01:40 +00:00
|
|
|
end
|
|
|
|
|
2017-08-08 18:10:13 +02:00
|
|
|
args + extra_args
|
2016-06-03 13:05:18 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
def curl(*args)
|
2017-11-03 18:58:59 +00:00
|
|
|
# SSL_CERT_FILE can be incorrectly set by users or portable-ruby and screw
|
|
|
|
# with SSL downloads so unset it here.
|
2018-07-30 10:11:00 +02:00
|
|
|
system_command! curl_executable,
|
2018-11-02 17:18:07 +00:00
|
|
|
args: curl_args(*args),
|
2018-08-01 11:15:42 +02:00
|
|
|
print_stdout: true,
|
2018-11-02 17:18:07 +00:00
|
|
|
env: { "SSL_CERT_FILE" => nil }
|
2017-08-04 16:24:29 +02:00
|
|
|
end
|
|
|
|
|
2018-09-06 16:25:08 +02:00
|
|
|
def curl_download(*args, to: nil, **options)
|
2018-09-05 00:38:19 +02:00
|
|
|
destination = Pathname(to)
|
|
|
|
destination.dirname.mkpath
|
2018-09-06 16:25:08 +02:00
|
|
|
|
2019-02-01 14:22:36 -08:00
|
|
|
range_stdout = curl_output("--location", "--range", "0-1",
|
|
|
|
"--dump-header", "-",
|
|
|
|
"--write-out", "%{http_code}",
|
|
|
|
"--output", "/dev/null", *args, **options).stdout
|
|
|
|
headers, _, http_status = range_stdout.partition("\r\n\r\n")
|
|
|
|
|
|
|
|
supports_partial_download = http_status.to_i == 206 # Partial Content
|
|
|
|
if supports_partial_download &&
|
|
|
|
destination.exist? &&
|
2019-02-03 10:52:09 -08:00
|
|
|
destination.size == %r{^.*Content-Range: bytes \d+-\d+/(\d+)\r\n.*$}m.match(headers)&.[](1)&.to_i
|
2019-02-01 14:22:36 -08:00
|
|
|
return # We've already downloaded all the bytes
|
|
|
|
end
|
|
|
|
|
|
|
|
continue_at = if destination.exist? && supports_partial_download
|
2018-09-06 16:25:08 +02:00
|
|
|
"-"
|
|
|
|
else
|
|
|
|
0
|
2017-08-10 18:53:23 +02:00
|
|
|
end
|
|
|
|
|
2018-09-06 16:25:08 +02:00
|
|
|
curl("--location", "--remote-time", "--continue-at", continue_at.to_s, "--output", destination, *args, **options)
|
2017-08-08 18:10:13 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
def curl_output(*args, **options)
|
2018-07-30 10:11:00 +02:00
|
|
|
system_command(curl_executable,
|
2018-11-02 17:18:07 +00:00
|
|
|
args: curl_args(*args, show_output: true, **options),
|
2018-07-30 10:11:00 +02:00
|
|
|
print_stderr: false)
|
2016-06-03 13:05:18 +01:00
|
|
|
end
|
2017-12-03 14:02:55 +01:00
|
|
|
|
2019-01-28 08:10:37 +01:00
|
|
|
def curl_check_http_content(url, user_agents: [:default], check_content: false, strict: false)
|
2017-12-03 14:02:55 +01:00
|
|
|
return unless url.start_with? "http"
|
|
|
|
|
|
|
|
details = nil
|
|
|
|
user_agent = nil
|
2019-01-28 08:10:37 +01:00
|
|
|
hash_needed = url.start_with?("http:")
|
2017-12-03 14:02:55 +01:00
|
|
|
user_agents.each do |ua|
|
|
|
|
details = curl_http_content_headers_and_checksum(url, hash_needed: hash_needed, user_agent: ua)
|
|
|
|
user_agent = ua
|
|
|
|
break if details[:status].to_s.start_with?("2")
|
|
|
|
end
|
|
|
|
|
|
|
|
unless details[:status]
|
|
|
|
# Hack around https://github.com/Homebrew/brew/issues/3199
|
|
|
|
return if MacOS.version == :el_capitan
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2017-12-03 14:02:55 +01:00
|
|
|
return "The URL #{url} is not reachable"
|
|
|
|
end
|
|
|
|
|
|
|
|
unless details[:status].start_with? "2"
|
|
|
|
return "The URL #{url} is not reachable (HTTP status code #{details[:status]})"
|
|
|
|
end
|
|
|
|
|
2018-11-24 01:46:55 +00:00
|
|
|
if url.start_with?("https://") && ENV["HOMEBREW_NO_INSECURE_REDIRECT"] &&
|
|
|
|
!details[:final_url].start_with?("https://")
|
|
|
|
return "The URL #{url} redirects back to HTTP"
|
2018-10-10 21:36:06 +00:00
|
|
|
end
|
|
|
|
|
2017-12-03 14:02:55 +01:00
|
|
|
return unless hash_needed
|
|
|
|
|
|
|
|
secure_url = url.sub "http", "https"
|
|
|
|
secure_details =
|
|
|
|
curl_http_content_headers_and_checksum(secure_url, hash_needed: true, user_agent: user_agent)
|
|
|
|
|
|
|
|
if !details[:status].to_s.start_with?("2") ||
|
|
|
|
!secure_details[:status].to_s.start_with?("2")
|
|
|
|
return
|
|
|
|
end
|
|
|
|
|
|
|
|
etag_match = details[:etag] &&
|
|
|
|
details[:etag] == secure_details[:etag]
|
|
|
|
content_length_match =
|
|
|
|
details[:content_length] &&
|
|
|
|
details[:content_length] == secure_details[:content_length]
|
|
|
|
file_match = details[:file_hash] == secure_details[:file_hash]
|
|
|
|
|
2018-11-24 01:46:55 +00:00
|
|
|
if (etag_match || content_length_match || file_match) &&
|
2018-11-24 11:21:52 +00:00
|
|
|
secure_details[:final_url].start_with?("https://") &&
|
|
|
|
url.start_with?("http://")
|
|
|
|
return "The URL #{url} should use HTTPS rather than HTTP"
|
2017-12-03 14:02:55 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
return unless check_content
|
|
|
|
|
|
|
|
no_protocol_file_contents = %r{https?:\\?/\\?/}
|
|
|
|
details[:file] = details[:file].gsub(no_protocol_file_contents, "/")
|
|
|
|
secure_details[:file] = secure_details[:file].gsub(no_protocol_file_contents, "/")
|
|
|
|
|
|
|
|
# Check for the same content after removing all protocols
|
2018-11-24 01:46:55 +00:00
|
|
|
if (details[:file] == secure_details[:file]) &&
|
2018-11-24 11:21:52 +00:00
|
|
|
secure_details[:final_url].start_with?("https://") &&
|
|
|
|
url.start_with?("http://")
|
|
|
|
return "The URL #{url} should use HTTPS rather than HTTP"
|
2017-12-03 14:02:55 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
return unless strict
|
|
|
|
|
|
|
|
# Same size, different content after normalization
|
|
|
|
# (typical causes: Generated ID, Timestamp, Unix time)
|
|
|
|
if details[:file].length == secure_details[:file].length
|
|
|
|
return "The URL #{url} may be able to use HTTPS rather than HTTP. Please verify it in a browser."
|
|
|
|
end
|
|
|
|
|
|
|
|
lenratio = (100 * secure_details[:file].length / details[:file].length).to_i
|
|
|
|
return unless (90..110).cover?(lenratio)
|
2018-09-17 02:45:00 +02:00
|
|
|
|
2017-12-03 14:02:55 +01:00
|
|
|
"The URL #{url} may be able to use HTTPS rather than HTTP. Please verify it in a browser."
|
|
|
|
end
|
|
|
|
|
|
|
|
def curl_http_content_headers_and_checksum(url, hash_needed: false, user_agent: :default)
|
|
|
|
max_time = hash_needed ? "600" : "25"
|
|
|
|
output, = curl_output(
|
2018-10-10 21:36:06 +00:00
|
|
|
"--connect-timeout", "15", "--include", "--max-time", max_time, "--location", url,
|
2017-12-03 14:02:55 +01:00
|
|
|
user_agent: user_agent
|
|
|
|
)
|
|
|
|
|
|
|
|
status_code = :unknown
|
|
|
|
while status_code == :unknown || status_code.to_s.start_with?("3")
|
|
|
|
headers, _, output = output.partition("\r\n\r\n")
|
|
|
|
status_code = headers[%r{HTTP\/.* (\d+)}, 1]
|
2018-11-24 11:21:52 +00:00
|
|
|
final_url = headers[/^Location:\s*(.*)$/i, 1]&.chomp
|
2017-12-03 14:02:55 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
output_hash = Digest::SHA256.digest(output) if hash_needed
|
|
|
|
|
2018-11-24 01:46:55 +00:00
|
|
|
final_url ||= url
|
2018-10-10 21:36:06 +00:00
|
|
|
|
2017-12-03 14:02:55 +01:00
|
|
|
{
|
2018-11-24 01:46:55 +00:00
|
|
|
url: url,
|
|
|
|
final_url: final_url,
|
2018-11-02 17:18:07 +00:00
|
|
|
status: status_code,
|
|
|
|
etag: headers[%r{ETag: ([wW]\/)?"(([^"]|\\")*)"}, 2],
|
2017-12-03 14:02:55 +01:00
|
|
|
content_length: headers[/Content-Length: (\d+)/, 1],
|
2018-11-02 17:18:07 +00:00
|
|
|
file_hash: output_hash,
|
|
|
|
file: output,
|
2017-12-03 14:02:55 +01:00
|
|
|
}
|
|
|
|
end
|