mirror of
https://github.com/Homebrew/brew.git
synced 2025-07-14 16:09:03 +08:00
Separate staging from download.
This commit is contained in:
parent
4ffc8b137b
commit
5b3bbb76c9
@ -1,6 +1,7 @@
|
|||||||
require "json"
|
require "json"
|
||||||
require "rexml/document"
|
require "rexml/document"
|
||||||
require "time"
|
require "time"
|
||||||
|
require "unpack_strategy"
|
||||||
|
|
||||||
class AbstractDownloadStrategy
|
class AbstractDownloadStrategy
|
||||||
extend Forwardable
|
extend Forwardable
|
||||||
@ -45,7 +46,10 @@ class AbstractDownloadStrategy
|
|||||||
# Unpack {#cached_location} into the current working directory, and possibly
|
# Unpack {#cached_location} into the current working directory, and possibly
|
||||||
# chdir into the newly-unpacked directory.
|
# chdir into the newly-unpacked directory.
|
||||||
# Unlike {Resource#stage}, this does not take a block.
|
# Unlike {Resource#stage}, this does not take a block.
|
||||||
def stage; end
|
def stage
|
||||||
|
UnpackStrategy.detect(cached_location)
|
||||||
|
.extract(basename: basename_without_params)
|
||||||
|
end
|
||||||
|
|
||||||
# @!attribute [r] cached_location
|
# @!attribute [r] cached_location
|
||||||
# The path to the cached file or directory associated with the resource.
|
# The path to the cached file or directory associated with the resource.
|
||||||
@ -63,22 +67,6 @@ class AbstractDownloadStrategy
|
|||||||
rm_rf(cached_location)
|
rm_rf(cached_location)
|
||||||
end
|
end
|
||||||
|
|
||||||
def expand_safe_system_args(args)
|
|
||||||
args = args.dup
|
|
||||||
args.each_with_index do |arg, ii|
|
|
||||||
next unless arg.is_a? Hash
|
|
||||||
if ARGV.verbose?
|
|
||||||
args.delete_at ii
|
|
||||||
else
|
|
||||||
args[ii] = arg[:quiet_flag]
|
|
||||||
end
|
|
||||||
return args
|
|
||||||
end
|
|
||||||
# 2 as default because commands are eg. svn up, git pull
|
|
||||||
args.insert(2, "-q") unless ARGV.verbose?
|
|
||||||
args
|
|
||||||
end
|
|
||||||
|
|
||||||
def safe_system(*args)
|
def safe_system(*args)
|
||||||
if @shutup
|
if @shutup
|
||||||
quiet_system(*args) || raise(ErrorDuringExecution.new(args.shift, args))
|
quiet_system(*args) || raise(ErrorDuringExecution.new(args.shift, args))
|
||||||
@ -87,8 +75,9 @@ class AbstractDownloadStrategy
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def quiet_safe_system(*args)
|
def basename_without_params
|
||||||
safe_system(*expand_safe_system_args(args))
|
# Strip any ?thing=wad out of .c?thing=wad style extensions
|
||||||
|
File.basename(@url)[/[^?]+/]
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -152,7 +141,7 @@ class VCSDownloadStrategy < AbstractDownloadStrategy
|
|||||||
private
|
private
|
||||||
|
|
||||||
def cache_tag
|
def cache_tag
|
||||||
"__UNKNOWN__"
|
raise NotImplementedError
|
||||||
end
|
end
|
||||||
|
|
||||||
def cache_filename
|
def cache_filename
|
||||||
@ -160,7 +149,7 @@ class VCSDownloadStrategy < AbstractDownloadStrategy
|
|||||||
end
|
end
|
||||||
|
|
||||||
def repo_valid?
|
def repo_valid?
|
||||||
true
|
raise NotImplementedError
|
||||||
end
|
end
|
||||||
|
|
||||||
def clone_repo; end
|
def clone_repo; end
|
||||||
@ -177,40 +166,8 @@ end
|
|||||||
|
|
||||||
class AbstractFileDownloadStrategy < AbstractDownloadStrategy
|
class AbstractFileDownloadStrategy < AbstractDownloadStrategy
|
||||||
def stage
|
def stage
|
||||||
path = cached_location
|
super
|
||||||
unpack_dir = Pathname.pwd
|
|
||||||
|
|
||||||
case type = path.compression_type
|
|
||||||
when :zip
|
|
||||||
safe_system "unzip", "-qq", path, "-d", unpack_dir
|
|
||||||
chdir
|
chdir
|
||||||
when :gzip_only
|
|
||||||
FileUtils.cp path, unpack_dir, preserve: true
|
|
||||||
safe_system "gunzip", "-q", "-N", unpack_dir/path.basename
|
|
||||||
when :bzip2_only
|
|
||||||
FileUtils.cp path, unpack_dir, preserve: true
|
|
||||||
safe_system "bunzip2", "-q", unpack_dir/path.basename
|
|
||||||
when :gzip, :bzip2, :xz, :compress, :tar
|
|
||||||
if type == :xz && DependencyCollector.tar_needs_xz_dependency?
|
|
||||||
pipe_to_tar "#{HOMEBREW_PREFIX}/opt/xz/bin/xz", unpack_dir
|
|
||||||
else
|
|
||||||
safe_system "tar", "xf", path, "-C", unpack_dir
|
|
||||||
end
|
|
||||||
chdir
|
|
||||||
when :lzip
|
|
||||||
pipe_to_tar "#{HOMEBREW_PREFIX}/opt/lzip/bin/lzip", unpack_dir
|
|
||||||
chdir
|
|
||||||
when :lha
|
|
||||||
safe_system "#{HOMEBREW_PREFIX}/opt/lha/bin/lha", "xq2w=#{unpack_dir}", path
|
|
||||||
when :xar
|
|
||||||
safe_system "xar", "-x", "-f", path, "-C", unpack_dir
|
|
||||||
when :rar
|
|
||||||
safe_system "unrar", "x", "-inul", path, unpack_dir
|
|
||||||
when :p7zip
|
|
||||||
safe_system "7zr", "x", "-y", "-bd", "-bso0", path, "-o#{unpack_dir}"
|
|
||||||
else
|
|
||||||
cp path, unpack_dir/basename_without_params, preserve: true
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
@ -227,22 +184,6 @@ class AbstractFileDownloadStrategy < AbstractDownloadStrategy
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def pipe_to_tar(tool, unpack_dir)
|
|
||||||
path = cached_location
|
|
||||||
|
|
||||||
Utils.popen_read(tool, "-dc", path) do |rd|
|
|
||||||
Utils.popen_write("tar", "xf", "-", "-C", unpack_dir) do |wr|
|
|
||||||
buf = ""
|
|
||||||
wr.write(buf) while rd.read(16384, buf)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def basename_without_params
|
|
||||||
# Strip any ?thing=wad out of .c?thing=wad style extensions
|
|
||||||
File.basename(@url)[/[^?]+/]
|
|
||||||
end
|
|
||||||
|
|
||||||
def ext
|
def ext
|
||||||
# We need a Pathname because we've monkeypatched extname to support double
|
# We need a Pathname because we've monkeypatched extname to support double
|
||||||
# extensions (e.g. tar.gz).
|
# extensions (e.g. tar.gz).
|
||||||
@ -384,7 +325,8 @@ end
|
|||||||
# Useful for installing jars.
|
# Useful for installing jars.
|
||||||
class NoUnzipCurlDownloadStrategy < CurlDownloadStrategy
|
class NoUnzipCurlDownloadStrategy < CurlDownloadStrategy
|
||||||
def stage
|
def stage
|
||||||
cp cached_location, basename_without_params, preserve: true
|
UncompressedUnpackStrategy.new(cached_location)
|
||||||
|
.extract(basename: basename_without_params)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -607,11 +549,6 @@ class SubversionDownloadStrategy < VCSDownloadStrategy
|
|||||||
super
|
super
|
||||||
end
|
end
|
||||||
|
|
||||||
def stage
|
|
||||||
super
|
|
||||||
safe_system "svn", "export", "--force", cached_location, Dir.pwd
|
|
||||||
end
|
|
||||||
|
|
||||||
def source_modified_time
|
def source_modified_time
|
||||||
xml = REXML::Document.new(Utils.popen_read("svn", "info", "--xml", cached_location.to_s))
|
xml = REXML::Document.new(Utils.popen_read("svn", "info", "--xml", cached_location.to_s))
|
||||||
Time.parse REXML::XPath.first(xml, "//date/text()").to_s
|
Time.parse REXML::XPath.first(xml, "//date/text()").to_s
|
||||||
@ -647,7 +584,7 @@ class SubversionDownloadStrategy < VCSDownloadStrategy
|
|||||||
args << "-r" << revision
|
args << "-r" << revision
|
||||||
end
|
end
|
||||||
args << "--ignore-externals" if ignore_externals
|
args << "--ignore-externals" if ignore_externals
|
||||||
quiet_safe_system(*args)
|
safe_system(*args)
|
||||||
end
|
end
|
||||||
|
|
||||||
def cache_tag
|
def cache_tag
|
||||||
@ -692,11 +629,6 @@ class GitDownloadStrategy < VCSDownloadStrategy
|
|||||||
@shallow = meta.fetch(:shallow) { true }
|
@shallow = meta.fetch(:shallow) { true }
|
||||||
end
|
end
|
||||||
|
|
||||||
def stage
|
|
||||||
super
|
|
||||||
cp_r File.join(cached_location, "."), Dir.pwd, preserve: true
|
|
||||||
end
|
|
||||||
|
|
||||||
def source_modified_time
|
def source_modified_time
|
||||||
Time.parse Utils.popen_read("git", "--git-dir", git_dir, "show", "-s", "--format=%cD")
|
Time.parse Utils.popen_read("git", "--git-dir", git_dir, "show", "-s", "--format=%cD")
|
||||||
end
|
end
|
||||||
@ -929,10 +861,6 @@ class CVSDownloadStrategy < VCSDownloadStrategy
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def cvspath
|
|
||||||
@cvspath ||= which("cvs", PATH.new("/usr/bin", Formula["cvs"].opt_bin, ENV["PATH"]))
|
|
||||||
end
|
|
||||||
|
|
||||||
def source_modified_time
|
def source_modified_time
|
||||||
# Filter CVS's files because the timestamp for each of them is the moment
|
# Filter CVS's files because the timestamp for each of them is the moment
|
||||||
# of clone.
|
# of clone.
|
||||||
@ -946,10 +874,6 @@ class CVSDownloadStrategy < VCSDownloadStrategy
|
|||||||
max_mtime
|
max_mtime
|
||||||
end
|
end
|
||||||
|
|
||||||
def stage
|
|
||||||
cp_r File.join(cached_location, "."), Dir.pwd, preserve: true
|
|
||||||
end
|
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
||||||
def cache_tag
|
def cache_tag
|
||||||
@ -960,16 +884,23 @@ class CVSDownloadStrategy < VCSDownloadStrategy
|
|||||||
(cached_location/"CVS").directory?
|
(cached_location/"CVS").directory?
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def quiet_flag
|
||||||
|
"-Q" unless ARGV.verbose?
|
||||||
|
end
|
||||||
|
|
||||||
def clone_repo
|
def clone_repo
|
||||||
HOMEBREW_CACHE.cd do
|
with_cvs_env do
|
||||||
# Login is only needed (and allowed) with pserver; skip for anoncvs.
|
# Login is only needed (and allowed) with pserver; skip for anoncvs.
|
||||||
quiet_safe_system cvspath, { quiet_flag: "-Q" }, "-d", @url, "login" if @url.include? "pserver"
|
safe_system "cvs", *quiet_flag, "-d", @url, "login" if @url.include? "pserver"
|
||||||
quiet_safe_system cvspath, { quiet_flag: "-Q" }, "-d", @url, "checkout", "-d", cache_filename, @module
|
safe_system "cvs", *quiet_flag, "-d", @url, "checkout", "-d", cached_location.basename, @module,
|
||||||
|
chdir: cached_location.dirname
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def update
|
def update
|
||||||
cached_location.cd { quiet_safe_system cvspath, { quiet_flag: "-Q" }, "up" }
|
with_cvs_env do
|
||||||
|
safe_system "cvs", *quiet_flag, "update", chdir: cached_location
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def split_url(in_url)
|
def split_url(in_url)
|
||||||
@ -978,6 +909,12 @@ class CVSDownloadStrategy < VCSDownloadStrategy
|
|||||||
url = parts.join(":")
|
url = parts.join(":")
|
||||||
[mod, url]
|
[mod, url]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def with_cvs_env
|
||||||
|
with_env PATH => PATH.new("/usr/bin", Formula["cvs"].opt_bin, ENV["PATH"]) do
|
||||||
|
yield
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
class MercurialDownloadStrategy < VCSDownloadStrategy
|
class MercurialDownloadStrategy < VCSDownloadStrategy
|
||||||
@ -986,30 +923,16 @@ class MercurialDownloadStrategy < VCSDownloadStrategy
|
|||||||
@url = @url.sub(%r{^hg://}, "")
|
@url = @url.sub(%r{^hg://}, "")
|
||||||
end
|
end
|
||||||
|
|
||||||
def hgpath
|
|
||||||
@hgpath ||= which("hg", PATH.new(Formula["mercurial"].opt_bin, ENV["PATH"]))
|
|
||||||
end
|
|
||||||
|
|
||||||
def stage
|
|
||||||
super
|
|
||||||
|
|
||||||
dst = Dir.getwd
|
|
||||||
cached_location.cd do
|
|
||||||
if @ref_type && @ref
|
|
||||||
ohai "Checking out #{@ref_type} #{@ref}" if @ref_type && @ref
|
|
||||||
safe_system hgpath, "archive", "--subrepos", "-y", "-r", @ref, "-t", "files", dst
|
|
||||||
else
|
|
||||||
safe_system hgpath, "archive", "--subrepos", "-y", "-t", "files", dst
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def source_modified_time
|
def source_modified_time
|
||||||
Time.parse Utils.popen_read(hgpath, "tip", "--template", "{date|isodate}", "-R", cached_location.to_s)
|
with_hg_env do
|
||||||
|
Time.parse Utils.popen_read("hg", "tip", "--template", "{date|isodate}", "-R", cached_location.to_s)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def last_commit
|
def last_commit
|
||||||
Utils.popen_read(hgpath, "parent", "--template", "{node|short}", "-R", cached_location.to_s)
|
with_hg_env do
|
||||||
|
Utils.popen_read("hg", "parent", "--template", "{node|short}", "-R", cached_location.to_s)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
@ -1023,12 +946,29 @@ class MercurialDownloadStrategy < VCSDownloadStrategy
|
|||||||
end
|
end
|
||||||
|
|
||||||
def clone_repo
|
def clone_repo
|
||||||
safe_system hgpath, "clone", @url, cached_location
|
with_hg_env do
|
||||||
|
safe_system "hg", "clone", @url, cached_location
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def update
|
def update
|
||||||
cached_location.cd do
|
with_hg_env do
|
||||||
safe_system hgpath, "pull", "--update"
|
safe_system "hg", "--cwd", cached_location, "pull", "--update"
|
||||||
|
|
||||||
|
update_args = if @ref_type && @ref
|
||||||
|
ohai "Checking out #{@ref_type} #{@ref}"
|
||||||
|
[@ref]
|
||||||
|
else
|
||||||
|
["--clean"]
|
||||||
|
end
|
||||||
|
|
||||||
|
safe_system "hg", "--cwd", cached_location, "update", *update_args
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def with_hg_env
|
||||||
|
with_env PATH => PATH.new(Formula["mercurial"].opt_bin, ENV["PATH"]) do
|
||||||
|
yield
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -1040,25 +980,18 @@ class BazaarDownloadStrategy < VCSDownloadStrategy
|
|||||||
ENV["BZR_HOME"] = HOMEBREW_TEMP
|
ENV["BZR_HOME"] = HOMEBREW_TEMP
|
||||||
end
|
end
|
||||||
|
|
||||||
def bzrpath
|
|
||||||
@bzrpath ||= which("bzr", PATH.new(Formula["bazaar"].opt_bin, ENV["PATH"]))
|
|
||||||
end
|
|
||||||
|
|
||||||
def stage
|
|
||||||
# The export command doesn't work on checkouts
|
|
||||||
# See https://bugs.launchpad.net/bzr/+bug/897511
|
|
||||||
cp_r File.join(cached_location, "."), Dir.pwd, preserve: true
|
|
||||||
rm_r ".bzr"
|
|
||||||
end
|
|
||||||
|
|
||||||
def source_modified_time
|
def source_modified_time
|
||||||
timestamp = Utils.popen_read(bzrpath, "log", "-l", "1", "--timezone=utc", cached_location.to_s)[/^timestamp: (.+)$/, 1]
|
timestamp = with_bazaar_env do
|
||||||
|
Utils.popen_read("bzr", "log", "-l", "1", "--timezone=utc", cached_location.to_s)[/^timestamp: (.+)$/, 1]
|
||||||
|
end
|
||||||
raise "Could not get any timestamps from bzr!" if timestamp.to_s.empty?
|
raise "Could not get any timestamps from bzr!" if timestamp.to_s.empty?
|
||||||
Time.parse timestamp
|
Time.parse timestamp
|
||||||
end
|
end
|
||||||
|
|
||||||
def last_commit
|
def last_commit
|
||||||
Utils.popen_read(bzrpath, "revno", cached_location.to_s).chomp
|
with_bazaar_env do
|
||||||
|
Utils.popen_read("bzr", "revno", cached_location.to_s).chomp
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
@ -1072,13 +1005,21 @@ class BazaarDownloadStrategy < VCSDownloadStrategy
|
|||||||
end
|
end
|
||||||
|
|
||||||
def clone_repo
|
def clone_repo
|
||||||
|
with_bazaar_env do
|
||||||
# "lightweight" means history-less
|
# "lightweight" means history-less
|
||||||
safe_system bzrpath, "checkout", "--lightweight", @url, cached_location
|
safe_system "bzr", "checkout", "--lightweight", @url, cached_location
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def update
|
def update
|
||||||
cached_location.cd do
|
with_bazaar_env do
|
||||||
safe_system bzrpath, "update"
|
safe_system "bzr", "update", chdir: cached_location
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def with_bazaar_env
|
||||||
|
with_env "PATH" => PATH.new(Formula["bazaar"].opt_bin, ENV["PATH"]) do
|
||||||
|
yield
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -1089,23 +1030,22 @@ class FossilDownloadStrategy < VCSDownloadStrategy
|
|||||||
@url = @url.sub(%r{^fossil://}, "")
|
@url = @url.sub(%r{^fossil://}, "")
|
||||||
end
|
end
|
||||||
|
|
||||||
def fossilpath
|
|
||||||
@fossilpath ||= which("fossil", PATH.new(Formula["fossil"].opt_bin, ENV["PATH"]))
|
|
||||||
end
|
|
||||||
|
|
||||||
def stage
|
|
||||||
super
|
|
||||||
args = [fossilpath, "open", cached_location]
|
|
||||||
args << @ref if @ref_type && @ref
|
|
||||||
safe_system(*args)
|
|
||||||
end
|
|
||||||
|
|
||||||
def source_modified_time
|
def source_modified_time
|
||||||
Time.parse Utils.popen_read(fossilpath, "info", "tip", "-R", cached_location.to_s)[/^uuid: +\h+ (.+)$/, 1]
|
with_fossil_env do
|
||||||
|
Time.parse Utils.popen_read("fossil", "info", "tip", "-R", cached_location.to_s)[/^uuid: +\h+ (.+)$/, 1]
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def last_commit
|
def last_commit
|
||||||
Utils.popen_read(fossilpath, "info", "tip", "-R", cached_location.to_s)[/^uuid: +(\h+) .+$/, 1]
|
with_fossil_env do
|
||||||
|
Utils.popen_read("fossil", "info", "tip", "-R", cached_location.to_s)[/^uuid: +(\h+) .+$/, 1]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def repo_valid?
|
||||||
|
with_fossil_env do
|
||||||
|
quiet_system "fossil", "branch", "-R", cached_location
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
@ -1115,11 +1055,21 @@ class FossilDownloadStrategy < VCSDownloadStrategy
|
|||||||
end
|
end
|
||||||
|
|
||||||
def clone_repo
|
def clone_repo
|
||||||
safe_system fossilpath, "clone", @url, cached_location
|
with_fossil_env do
|
||||||
|
safe_system "fossil", "clone", @url, cached_location
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def update
|
def update
|
||||||
safe_system fossilpath, "pull", "-R", cached_location
|
with_fossil_env do
|
||||||
|
safe_system "fossil", "pull", "-R", cached_location
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def with_fossil_env
|
||||||
|
with_env "PATH" => PATH.new(Formula["fossil"].opt_bin, ENV["PATH"]) do
|
||||||
|
yield
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -263,47 +263,6 @@ class Pathname
|
|||||||
Version.parse(basename)
|
Version.parse(basename)
|
||||||
end
|
end
|
||||||
|
|
||||||
# @private
|
|
||||||
def compression_type
|
|
||||||
case extname
|
|
||||||
when ".jar", ".war"
|
|
||||||
# Don't treat jars or wars as compressed
|
|
||||||
return
|
|
||||||
when ".gz"
|
|
||||||
# If the filename ends with .gz not preceded by .tar
|
|
||||||
# then we want to gunzip but not tar
|
|
||||||
return :gzip_only
|
|
||||||
when ".bz2"
|
|
||||||
return :bzip2_only
|
|
||||||
when ".lha", ".lzh"
|
|
||||||
return :lha
|
|
||||||
end
|
|
||||||
|
|
||||||
# Get enough of the file to detect common file types
|
|
||||||
# POSIX tar magic has a 257 byte offset
|
|
||||||
# magic numbers stolen from /usr/share/file/magic/
|
|
||||||
case open("rb") { |f| f.read(262) }
|
|
||||||
when /^PK\003\004/n then :zip
|
|
||||||
when /^\037\213/n then :gzip
|
|
||||||
when /^BZh/n then :bzip2
|
|
||||||
when /^\037\235/n then :compress
|
|
||||||
when /^.{257}ustar/n then :tar
|
|
||||||
when /^\xFD7zXZ\x00/n then :xz
|
|
||||||
when /^LZIP/n then :lzip
|
|
||||||
when /^Rar!/n then :rar
|
|
||||||
when /^7z\xBC\xAF\x27\x1C/n then :p7zip
|
|
||||||
when /^xar!/n then :xar
|
|
||||||
when /^\xed\xab\xee\xdb/n then :rpm
|
|
||||||
else
|
|
||||||
# This code so that bad-tarballs and zips produce good error messages
|
|
||||||
# when they don't unarchive properly.
|
|
||||||
case extname
|
|
||||||
when ".tar.gz", ".tgz", ".tar.bz2", ".tbz" then :tar
|
|
||||||
when ".zip" then :zip
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# @private
|
# @private
|
||||||
def text_executable?
|
def text_executable?
|
||||||
/^#!\s*\S+/ =~ open("r") { |f| f.read(1024) }
|
/^#!\s*\S+/ =~ open("r") { |f| f.read(1024) }
|
||||||
|
@ -728,6 +728,8 @@ class FormulaInstaller
|
|||||||
sandbox.allow_write_path(ENV["HOME"]) if ARGV.interactive?
|
sandbox.allow_write_path(ENV["HOME"]) if ARGV.interactive?
|
||||||
sandbox.allow_write_temp_and_cache
|
sandbox.allow_write_temp_and_cache
|
||||||
sandbox.allow_write_log(formula)
|
sandbox.allow_write_log(formula)
|
||||||
|
sandbox.allow_cvs
|
||||||
|
sandbox.allow_fossil
|
||||||
sandbox.allow_write_xcode
|
sandbox.allow_write_xcode
|
||||||
sandbox.allow_write_cellar(formula)
|
sandbox.allow_write_cellar(formula)
|
||||||
sandbox.exec(*args)
|
sandbox.exec(*args)
|
||||||
|
@ -54,6 +54,15 @@ class Sandbox
|
|||||||
allow_write_path HOMEBREW_CACHE
|
allow_write_path HOMEBREW_CACHE
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def allow_cvs
|
||||||
|
allow_write_path "/Users/#{ENV["USER"]}/.cvspass"
|
||||||
|
end
|
||||||
|
|
||||||
|
def allow_fossil
|
||||||
|
allow_write_path "/Users/#{ENV["USER"]}/.fossil"
|
||||||
|
allow_write_path "/Users/#{ENV["USER"]}/.fossil-journal"
|
||||||
|
end
|
||||||
|
|
||||||
def allow_write_cellar(formula)
|
def allow_write_cellar(formula)
|
||||||
allow_write_path formula.rack
|
allow_write_path formula.rack
|
||||||
allow_write_path formula.etc
|
allow_write_path formula.etc
|
||||||
|
@ -9,25 +9,6 @@ describe AbstractDownloadStrategy do
|
|||||||
let(:resource) { double(Resource, url: url, mirrors: [], specs: specs, version: nil) }
|
let(:resource) { double(Resource, url: url, mirrors: [], specs: specs, version: nil) }
|
||||||
let(:args) { %w[foo bar baz] }
|
let(:args) { %w[foo bar baz] }
|
||||||
|
|
||||||
describe "#expand_safe_system_args" do
|
|
||||||
it "works with an explicit quiet flag" do
|
|
||||||
args << { quiet_flag: "--flag" }
|
|
||||||
expanded_args = subject.expand_safe_system_args(args)
|
|
||||||
expect(expanded_args).to eq(%w[foo bar baz --flag])
|
|
||||||
end
|
|
||||||
|
|
||||||
it "adds an implicit quiet flag" do
|
|
||||||
expanded_args = subject.expand_safe_system_args(args)
|
|
||||||
expect(expanded_args).to eq(%w[foo bar -q baz])
|
|
||||||
end
|
|
||||||
|
|
||||||
it "does not mutate the arguments" do
|
|
||||||
result = subject.expand_safe_system_args(args)
|
|
||||||
expect(args).to eq(%w[foo bar baz])
|
|
||||||
expect(result).not_to be args
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
specify "#source_modified_time" do
|
specify "#source_modified_time" do
|
||||||
FileUtils.mktemp "mtime" do
|
FileUtils.mktemp "mtime" do
|
||||||
FileUtils.touch "foo", mtime: Time.now - 10
|
FileUtils.touch "foo", mtime: Time.now - 10
|
||||||
|
BIN
Library/Homebrew/test/support/fixtures/test.jar
Normal file
BIN
Library/Homebrew/test/support/fixtures/test.jar
Normal file
Binary file not shown.
BIN
Library/Homebrew/test/support/fixtures/test.lha
Normal file
BIN
Library/Homebrew/test/support/fixtures/test.lha
Normal file
Binary file not shown.
BIN
Library/Homebrew/test/support/fixtures/test.lz
Normal file
BIN
Library/Homebrew/test/support/fixtures/test.lz
Normal file
Binary file not shown.
127
Library/Homebrew/test/unpack_strategy_spec.rb
Normal file
127
Library/Homebrew/test/unpack_strategy_spec.rb
Normal file
@ -0,0 +1,127 @@
|
|||||||
|
require "unpack_strategy"
|
||||||
|
|
||||||
|
describe UnpackStrategy, :focus do
|
||||||
|
matcher :be_detected_as_a do |klass|
|
||||||
|
match do |expected|
|
||||||
|
@detected = described_class.detect(expected)
|
||||||
|
@detected.is_a?(klass)
|
||||||
|
end
|
||||||
|
|
||||||
|
failure_message do
|
||||||
|
<<~EOS
|
||||||
|
expected: #{klass}
|
||||||
|
detected: #{@detected}
|
||||||
|
EOS
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "::detect" do
|
||||||
|
it "correctly detects JAR files" do
|
||||||
|
expect(TEST_FIXTURE_DIR/"test.jar").to be_detected_as_a UncompressedUnpackStrategy
|
||||||
|
end
|
||||||
|
|
||||||
|
it "correctly detects ZIP files" do
|
||||||
|
expect(TEST_FIXTURE_DIR/"cask/MyFancyApp.zip").to be_detected_as_a ZipUnpackStrategy
|
||||||
|
end
|
||||||
|
|
||||||
|
it "correctly detects BZIP2 files" do
|
||||||
|
expect(TEST_FIXTURE_DIR/"cask/container.bz2").to be_detected_as_a Bzip2UnpackStrategy
|
||||||
|
end
|
||||||
|
|
||||||
|
it "correctly detects GZIP files" do
|
||||||
|
expect(TEST_FIXTURE_DIR/"cask/container.gz").to be_detected_as_a GzipUnpackStrategy
|
||||||
|
end
|
||||||
|
|
||||||
|
it "correctly detects compressed TAR files" do
|
||||||
|
expect(TEST_FIXTURE_DIR/"cask/container.tar.gz").to be_detected_as_a TarUnpackStrategy
|
||||||
|
end
|
||||||
|
|
||||||
|
it "correctly detects 7-ZIP files" do
|
||||||
|
expect(TEST_FIXTURE_DIR/"cask/container.7z").to be_detected_as_a P7ZipUnpackStrategy
|
||||||
|
end
|
||||||
|
|
||||||
|
it "correctly detects XAR files" do
|
||||||
|
expect(TEST_FIXTURE_DIR/"cask/container.xar").to be_detected_as_a XarUnpackStrategy
|
||||||
|
end
|
||||||
|
|
||||||
|
it "correctly detects XZ files" do
|
||||||
|
expect(TEST_FIXTURE_DIR/"cask/container.xz").to be_detected_as_a XzUnpackStrategy
|
||||||
|
end
|
||||||
|
|
||||||
|
it "correctly detects RAR files" do
|
||||||
|
expect(TEST_FIXTURE_DIR/"cask/container.rar").to be_detected_as_a RarUnpackStrategy
|
||||||
|
end
|
||||||
|
|
||||||
|
it "correctly detects LZIP files" do
|
||||||
|
expect(TEST_FIXTURE_DIR/"test.lz").to be_detected_as_a LzipUnpackStrategy
|
||||||
|
end
|
||||||
|
|
||||||
|
it "correctly detects LHA files" do
|
||||||
|
expect(TEST_FIXTURE_DIR/"test.lha").to be_detected_as_a LhaUnpackStrategy
|
||||||
|
end
|
||||||
|
|
||||||
|
it "correctly detects Git repositories" do
|
||||||
|
mktmpdir do |repo|
|
||||||
|
system "git", "-C", repo, "init"
|
||||||
|
|
||||||
|
expect(repo).to be_detected_as_a GitUnpackStrategy
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
it "correctly detects Subversion repositories" do
|
||||||
|
mktmpdir do |path|
|
||||||
|
repo = path/"repo"
|
||||||
|
working_copy = path/"working_copy"
|
||||||
|
|
||||||
|
system "svnadmin", "create", repo
|
||||||
|
system "svn", "checkout", "file://#{repo}", working_copy
|
||||||
|
|
||||||
|
expect(working_copy).to be_detected_as_a SubversionUnpackStrategy
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe GitUnpackStrategy do
|
||||||
|
describe "#extract" do
|
||||||
|
it "correctly extracts a Subversion repository" do
|
||||||
|
mktmpdir do |path|
|
||||||
|
repo = path/"repo"
|
||||||
|
|
||||||
|
repo.mkpath
|
||||||
|
|
||||||
|
system "git", "-C", repo, "init"
|
||||||
|
|
||||||
|
FileUtils.touch repo/"test"
|
||||||
|
system "git", "-C", repo, "add", "test"
|
||||||
|
system "git", "-C", repo, "commit", "-m", "Add `test` file."
|
||||||
|
|
||||||
|
unpack_dir = path/"unpack_dir"
|
||||||
|
GitUnpackStrategy.new(repo).extract(to: unpack_dir)
|
||||||
|
expect(unpack_dir.children(false).map(&:to_s)).to match_array [".git", "test"]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe SubversionUnpackStrategy do
|
||||||
|
describe "#extract" do
|
||||||
|
it "correctly extracts a Subversion repository" do
|
||||||
|
mktmpdir do |path|
|
||||||
|
repo = path/"repo"
|
||||||
|
working_copy = path/"working_copy"
|
||||||
|
|
||||||
|
system "svnadmin", "create", repo
|
||||||
|
system "svn", "checkout", "file://#{repo}", working_copy
|
||||||
|
|
||||||
|
FileUtils.touch working_copy/"test"
|
||||||
|
system "svn", "add", working_copy/"test"
|
||||||
|
system "svn", "commit", working_copy, "-m", "Add `test` file."
|
||||||
|
|
||||||
|
unpack_dir = path/"unpack_dir"
|
||||||
|
SubversionUnpackStrategy.new(working_copy).extract(to: unpack_dir)
|
||||||
|
expect(unpack_dir.children(false).map(&:to_s)).to match_array ["test"]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
344
Library/Homebrew/unpack_strategy.rb
Normal file
344
Library/Homebrew/unpack_strategy.rb
Normal file
@ -0,0 +1,344 @@
|
|||||||
|
class UnpackStrategy
|
||||||
|
# length of the longest regex (currently TarUnpackStrategy)
|
||||||
|
MAX_MAGIC_NUMBER_LENGTH = 262
|
||||||
|
private_constant :MAX_MAGIC_NUMBER_LENGTH
|
||||||
|
|
||||||
|
def self.strategies
|
||||||
|
@strategies ||= [
|
||||||
|
JarUnpackStrategy,
|
||||||
|
ZipUnpackStrategy,
|
||||||
|
XarUnpackStrategy,
|
||||||
|
CompressUnpackStrategy,
|
||||||
|
TarUnpackStrategy,
|
||||||
|
GzipUnpackStrategy,
|
||||||
|
Bzip2UnpackStrategy,
|
||||||
|
XzUnpackStrategy,
|
||||||
|
LzipUnpackStrategy,
|
||||||
|
GitUnpackStrategy,
|
||||||
|
MercurialUnpackStrategy,
|
||||||
|
SubversionUnpackStrategy,
|
||||||
|
CvsUnpackStrategy,
|
||||||
|
FossilUnpackStrategy,
|
||||||
|
BazaarUnpackStrategy,
|
||||||
|
P7ZipUnpackStrategy,
|
||||||
|
RarUnpackStrategy,
|
||||||
|
LhaUnpackStrategy,
|
||||||
|
].freeze
|
||||||
|
end
|
||||||
|
private_class_method :strategies
|
||||||
|
|
||||||
|
def self.detect(path)
|
||||||
|
magic_number = if path.directory?
|
||||||
|
""
|
||||||
|
else
|
||||||
|
File.binread(path, MAX_MAGIC_NUMBER_LENGTH)
|
||||||
|
end
|
||||||
|
|
||||||
|
strategy = strategies.detect do |s|
|
||||||
|
s.can_extract?(path: path, magic_number: magic_number)
|
||||||
|
end
|
||||||
|
|
||||||
|
# This is so that bad files produce good error messages.
|
||||||
|
strategy ||= case path.extname
|
||||||
|
when ".tar.gz", ".tgz", ".tar.bz2", ".tbz", ".tar.xz", ".txz"
|
||||||
|
TarUnpackStrategy
|
||||||
|
when ".zip"
|
||||||
|
ZipUnpackStrategy
|
||||||
|
else
|
||||||
|
UncompressedUnpackStrategy
|
||||||
|
end
|
||||||
|
|
||||||
|
strategy.new(path)
|
||||||
|
end
|
||||||
|
|
||||||
|
attr_reader :path
|
||||||
|
|
||||||
|
def initialize(path)
|
||||||
|
@path = Pathname(path).expand_path
|
||||||
|
end
|
||||||
|
|
||||||
|
def extract(to: nil, basename: nil)
|
||||||
|
unpack_dir = Pathname(to || Dir.pwd).expand_path
|
||||||
|
unpack_dir.mkpath
|
||||||
|
extract_to_dir(unpack_dir, basename: basename)
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def extract_to_dir(_unpack_dir, basename:)
|
||||||
|
raise NotImplementedError
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class DirectoryUnpackStrategy < UnpackStrategy
|
||||||
|
def self.can_extract?(path:, magic_number:)
|
||||||
|
path.directory?
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def extract_to_dir(unpack_dir, basename:)
|
||||||
|
FileUtils.cp_r path.children, unpack_dir, preserve: true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class UncompressedUnpackStrategy < UnpackStrategy
|
||||||
|
def self.can_extract?(path:, magic_number:)
|
||||||
|
false
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def extract_to_dir(unpack_dir, basename:)
|
||||||
|
FileUtils.cp path, unpack_dir/basename, preserve: true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class JarUnpackStrategy < UncompressedUnpackStrategy
|
||||||
|
def self.can_extract?(path:, magic_number:)
|
||||||
|
return false unless ZipUnpackStrategy.can_extract?(path: path, magic_number: magic_number)
|
||||||
|
|
||||||
|
# Check further if the ZIP is a JAR/WAR.
|
||||||
|
Open3.popen3("unzip", "-l", path) do |stdin, stdout, stderr, wait_thr|
|
||||||
|
stdin.close_write
|
||||||
|
stderr.close_read
|
||||||
|
|
||||||
|
begin
|
||||||
|
return stdout.each_line.any? { |l| l.match?(%r{\s+META-INF/MANIFEST.MF$}) }
|
||||||
|
ensure
|
||||||
|
stdout.close_read
|
||||||
|
wait_thr.kill
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class P7ZipUnpackStrategy < UnpackStrategy
|
||||||
|
def self.can_extract?(path:, magic_number:)
|
||||||
|
magic_number.match?(/\A7z\xBC\xAF\x27\x1C/n)
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def extract_to_dir(unpack_dir, basename:)
|
||||||
|
safe_system "7zr", "x", "-y", "-bd", "-bso0", path, "-o#{unpack_dir}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class ZipUnpackStrategy < UnpackStrategy
|
||||||
|
def self.can_extract?(path:, magic_number:)
|
||||||
|
magic_number.match?(/\APK(\003\004|\005\006)/n)
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def extract_to_dir(unpack_dir, basename:)
|
||||||
|
safe_system "unzip", "-qq", path, "-d", unpack_dir
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class TarUnpackStrategy < UnpackStrategy
|
||||||
|
def self.can_extract?(path:, magic_number:)
|
||||||
|
return true if magic_number.match?(/\A.{257}ustar/n)
|
||||||
|
|
||||||
|
# Check if `tar` can list the contents, then it can also extract it.
|
||||||
|
IO.popen(["tar", "tf", path], err: File::NULL) do |stdout|
|
||||||
|
!stdout.read(1).nil?
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def extract_to_dir(unpack_dir, basename:)
|
||||||
|
safe_system "tar", "xf", path, "-C", unpack_dir
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class CompressUnpackStrategy < TarUnpackStrategy
|
||||||
|
def self.can_extract?(path:, magic_number:)
|
||||||
|
magic_number.match?(/\A\037\235/n)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class XzUnpackStrategy < TarUnpackStrategy
|
||||||
|
def self.can_extract?(path:, magic_number:)
|
||||||
|
magic_number.match?(/\A\xFD7zXZ\x00/n)
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def extract_to_dir(unpack_dir, basename:)
|
||||||
|
if DependencyCollector.tar_needs_xz_dependency?
|
||||||
|
unpack_path = unpack_dir/path.basename
|
||||||
|
FileUtils.cp path, unpack_path, preserve: true
|
||||||
|
|
||||||
|
safe_system Formula["xz"].opt_bin/"xz", "-d", "-q", "-T0", unpack_path
|
||||||
|
else
|
||||||
|
super
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class Bzip2UnpackStrategy < UnpackStrategy
|
||||||
|
def self.can_extract?(path:, magic_number:)
|
||||||
|
magic_number.match?(/\ABZh/n)
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def extract_to_dir(unpack_dir, basename:)
|
||||||
|
unpack_path = unpack_dir/path.basename
|
||||||
|
FileUtils.cp path, unpack_path, preserve: true
|
||||||
|
|
||||||
|
safe_system "bunzip2", "-q", unpack_path
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class GzipUnpackStrategy < UnpackStrategy
|
||||||
|
def self.can_extract?(path:, magic_number:)
|
||||||
|
magic_number.match?(/\A\037\213/n)
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def extract_to_dir(unpack_dir, basename:)
|
||||||
|
unpack_path = unpack_dir/path.basename
|
||||||
|
FileUtils.cp path, unpack_path, preserve: true
|
||||||
|
|
||||||
|
safe_system "gunzip", "-q", "-N", unpack_path
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class LzipUnpackStrategy < UnpackStrategy
|
||||||
|
def self.can_extract?(path:, magic_number:)
|
||||||
|
magic_number.match?(/\ALZIP/n)
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def extract_to_dir(unpack_dir, basename:)
|
||||||
|
unpack_path = unpack_dir/path.basename
|
||||||
|
FileUtils.cp path, unpack_path, preserve: true
|
||||||
|
|
||||||
|
safe_system Formula["lzip"].opt_bin/"lzip", "-d", "-q", unpack_path
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class XarUnpackStrategy < UnpackStrategy
|
||||||
|
def self.can_extract?(path:, magic_number:)
|
||||||
|
magic_number.match?(/\Axar!/n)
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def extract_to_dir(unpack_dir, basename:)
|
||||||
|
safe_system "xar", "-x", "-f", path, "-C", unpack_dir
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class RarUnpackStrategy < UnpackStrategy
|
||||||
|
def self.can_extract?(path:, magic_number:)
|
||||||
|
magic_number.match?(/\ARar!/n)
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def extract_to_dir(unpack_dir, basename:)
|
||||||
|
safe_system "unrar", "x", "-inul", path, unpack_dir
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class LhaUnpackStrategy < UnpackStrategy
|
||||||
|
def self.can_extract?(path:, magic_number:)
|
||||||
|
magic_number.match?(/\A..-(lh0|lh1|lz4|lz5|lzs|lh\\40|lhd|lh2|lh3|lh4|lh5)-/n)
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def extract_to_dir(unpack_dir, basename:)
|
||||||
|
safe_system Formula["lha"].opt_bin/"lha", "xq2w=#{unpack_dir}", path
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class GitUnpackStrategy < DirectoryUnpackStrategy
|
||||||
|
def self.can_extract?(path:, magic_number:)
|
||||||
|
super && (path/".git").directory?
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def extract_to_dir(unpack_dir, basename:)
|
||||||
|
FileUtils.cp_r path.children, unpack_dir, preserve: true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class SubversionUnpackStrategy < DirectoryUnpackStrategy
|
||||||
|
def self.can_extract?(path:, magic_number:)
|
||||||
|
super && (path/".svn").directory?
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def extract_to_dir(unpack_dir, basename:)
|
||||||
|
safe_system "svn", "export", "--force", path, unpack_dir
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class CvsUnpackStrategy < DirectoryUnpackStrategy
|
||||||
|
def self.can_extract?(path:, magic_number:)
|
||||||
|
super && (path/"CVS").directory?
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class MercurialUnpackStrategy < DirectoryUnpackStrategy
|
||||||
|
def self.can_extract?(path:, magic_number:)
|
||||||
|
super && (path/".hg").directory?
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def extract_to_dir(unpack_dir, basename:)
|
||||||
|
with_env "PATH" => PATH.new(Formula["mercurial"].opt_bin, ENV["PATH"]) do
|
||||||
|
safe_system "hg", "--cwd", path, "archive", "--subrepos", "-y", "-t", "files", unpack_dir
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class FossilUnpackStrategy < UnpackStrategy
|
||||||
|
def self.can_extract?(path:, magic_number:)
|
||||||
|
return false unless magic_number.match?(/\ASQLite format 3\000/n)
|
||||||
|
|
||||||
|
# Fossil database is made up of artifacts, so the `artifact` table must exist.
|
||||||
|
query = "select count(*) from sqlite_master where type = 'view' and name = 'artifact'"
|
||||||
|
Utils.popen_read("sqlite3", path, query).to_i == 1
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def extract_to_dir(unpack_dir, basename:)
|
||||||
|
args = if @ref_type && @ref
|
||||||
|
[@ref]
|
||||||
|
else
|
||||||
|
[]
|
||||||
|
end
|
||||||
|
|
||||||
|
with_env "PATH" => PATH.new(Formula["fossil"].opt_bin, ENV["PATH"]) do
|
||||||
|
safe_system "fossil", "open", path, *args, chdir: unpack_dir
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class BazaarUnpackStrategy < DirectoryUnpackStrategy
|
||||||
|
def self.can_extract?(path:, magic_number:)
|
||||||
|
super && (path/".bzr").directory?
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def extract_to_dir(unpack_dir, basename:)
|
||||||
|
super
|
||||||
|
|
||||||
|
# The export command doesn't work on checkouts (see https://bugs.launchpad.net/bzr/+bug/897511).
|
||||||
|
FileUtils.rm_r unpack_dir/".bzr"
|
||||||
|
end
|
||||||
|
end
|
Loading…
x
Reference in New Issue
Block a user