Newer
Older
@url = resource.url
@meta = resource.specs
# Download and cache the resource as {#cached_location}.
def fetch
end
# Unpack {#cached_location} into the current working directory.
def stage
end
# The path to the cached file or directory associated with the resource.
def cached_location
end
# Remove {#cached_location} and any other files associated with the resource
# from the cache.
def clear_cache
def expand_safe_system_args args
args.each_with_index do |arg, ii|
if arg.is_a? Hash
unless ARGV.verbose?
args[ii] = arg[:quiet_flag]
else
args.delete_at ii
end
return args
end
end
# 2 as default because commands are eg. svn up, git pull
args.insert(2, '-q') unless ARGV.verbose?
end
def quiet_safe_system *args
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
private
def xzpath
"#{HOMEBREW_PREFIX}/opt/xz/bin/xz"
end
def lzippath
"#{HOMEBREW_PREFIX}/opt/lzip/bin/lzip"
end
def cvspath
@cvspath ||= %W[
/usr/bin/cvs
#{HOMEBREW_PREFIX}/bin/cvs
#{HOMEBREW_PREFIX}/opt/cvs/bin/cvs
#{which("cvs")}
].find { |p| File.executable? p }
end
def hgpath
@hgpath ||= %W[
#{which("hg")}
#{HOMEBREW_PREFIX}/bin/hg
#{HOMEBREW_PREFIX}/opt/mercurial/bin/hg
].find { |p| File.executable? p }
end
def bzrpath
@bzrpath ||= %W[
#{which("bzr")}
#{HOMEBREW_PREFIX}/bin/bzr
#{HOMEBREW_PREFIX}/opt/bazaar/bin/bzr
].find { |p| File.executable? p }
end
def fossilpath
@fossilpath ||= %W[
#{which("fossil")}
#{HOMEBREW_PREFIX}/bin/fossil
#{HOMEBREW_PREFIX}/opt/fossil/bin/fossil
].find { |p| File.executable? p }
end
class VCSDownloadStrategy < AbstractDownloadStrategy
REF_TYPES = [:branch, :revision, :revisions, :tag].freeze
def initialize name, resource
super
@ref_type, @ref = extract_ref(meta)
def fetch
ohai "Cloning #{@url}"
if cached_location.exist? && repo_valid?
puts "Updating #{cached_location}"
update
elsif cached_location.exist?
puts "Removing invalid repository from cache"
clear_cache
clone_repo
else
clone_repo
end
end
def stage
ohai "Checking out #{@ref_type} #{@ref}" if @ref_type && @ref
end
def cached_location
@clone
end
private
def cache_tag
"__UNKNOWN__"
end
def cache_filename
"#{name}--#{cache_tag}"
end
def repo_valid?
true
end
def extract_ref(specs)
key = REF_TYPES.find { |type| specs.key?(type) }
return key, specs[key]
end
class AbstractFileDownloadStrategy < AbstractDownloadStrategy
case cached_location.compression_type
with_system_path { quiet_safe_system 'unzip', {:quiet_flag => '-qq'}, cached_location }
with_system_path { buffered_write("gunzip") }
with_system_path { buffered_write("bunzip2") }
with_system_path { safe_system 'tar', 'xf', cached_location }
with_system_path { pipe_to_tar(xzpath) }
with_system_path { pipe_to_tar(lzippath) }
safe_system "/usr/bin/xar", "-xf", cached_location
quiet_safe_system 'unrar', 'x', {:quiet_flag => '-inul'}, cached_location
safe_system '7zr', 'x', cached_location
cp cached_location, basename_without_params
when 0 then raise "Empty archive"
when 1 then Dir.chdir entries.first rescue nil
def pipe_to_tar(tool)
Utils.popen_read(tool, "-dc", cached_location.to_s) do |rd|
Utils.popen_write("tar", "xf", "-") do |wr|
buf = ""
wr.write(buf) while rd.read(16384, buf)
end
end
end
# gunzip and bunzip2 write the output file in the same directory as the input
# file regardless of the current working directory, so we need to write it to
# the correct location ourselves.
def buffered_write(tool)
target = File.basename(basename_without_params, cached_location.extname)
Utils.popen_read(tool, "-f", cached_location.to_s, "-c") do |pipe|
File.open(target, "wb") do |f|
buf = ""
def basename_without_params
# Strip any ?thing=wad out of .c?thing=wad style extensions
File.basename(@url)[/[^?]+/]
end
# We need a Pathname because we've monkeypatched extname to support double
# extensions (e.g. tar.gz).
# We can't use basename_without_params, because given a URL like
# https://example.com/download.php?file=foo-1.0.tar.gz
# the extension we want is ".tar.gz", not ".php".
Pathname.new(@url).extname[/[^?]+/]
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
class CurlDownloadStrategy < AbstractFileDownloadStrategy
attr_reader :mirrors, :tarball_path, :temporary_path
def initialize(name, resource)
super
@mirrors = resource.mirrors.dup
@tarball_path = HOMEBREW_CACHE.join("#{name}-#{version}#{ext}")
@temporary_path = Pathname.new("#{cached_location}.incomplete")
end
def fetch
ohai "Downloading #{@url}"
unless cached_location.exist?
had_incomplete_download = temporary_path.exist?
begin
_fetch
rescue ErrorDuringExecution
# 33 == range not supported
# try wiping the incomplete download and retrying once
if $?.exitstatus == 33 && had_incomplete_download
ohai "Trying a full download"
temporary_path.unlink
had_incomplete_download = false
retry
else
raise CurlDownloadStrategyError.new(@url)
end
end
ignore_interrupts { temporary_path.rename(cached_location) }
else
puts "Already downloaded: #{cached_location}"
end
rescue CurlDownloadStrategyError
raise if mirrors.empty?
puts "Trying a mirror..."
@url = mirrors.shift
retry
end
def cached_location
tarball_path
end
def clear_cache
super
rm_rf(temporary_path)
end
private
# Private method, can be overridden if needed.
def _fetch
curl @url, "-C", downloaded_size, "-o", temporary_path
end
def downloaded_size
temporary_path.size? || 0
end
def curl(*args)
args << '--connect-timeout' << '5' unless mirrors.empty?
args << "--user" << meta.fetch(:user) if meta.key?(:user)
super
end
end
# Detect and download from Apache Mirror
class CurlApacheMirrorDownloadStrategy < CurlDownloadStrategy
def apache_mirrors
rd, wr = IO.pipe
buf = ""
pid = fork do
rd.close
$stdout.reopen(wr)
$stderr.reopen(wr)
curl "#{@url}&asjson=1"
end
wr.close
rd.readline if ARGV.verbose? # Remove Homebrew output
buf << rd.read until rd.eof?
rd.close
Process.wait(pid)
buf
end
return super if @tried_apache_mirror
@tried_apache_mirror = true
mirrors = Utils::JSON.load(apache_mirrors)
@url = mirrors.fetch('preferred') + mirrors.fetch('path_info')
ohai "Best Mirror #{@url}"
super
rescue IndexError, Utils::JSON::Error
raise CurlDownloadStrategyError, "Couldn't determine mirror, try again later."
end
end
# Download via an HTTP POST.
# Query parameters on the URL are converted into POST parameters
class CurlPostDownloadStrategy < CurlDownloadStrategy
def _fetch
base_url,data = @url.split('?')
curl base_url, '-d', data, '-C', downloaded_size, '-o', temporary_path
# Download from an SSL3-only host.
class CurlSSL3DownloadStrategy < CurlDownloadStrategy
def _fetch
curl @url, '-3', '-C', downloaded_size, '-o', temporary_path
end
end
# Use this strategy to download but not unzip a file.
# Useful for installing jars.
class NoUnzipCurlDownloadStrategy < CurlDownloadStrategy
cp cached_location, basename_without_params
end
end
class CurlUnsafeDownloadStrategy < CurlDownloadStrategy
curl @url, '--insecure', '-C', downloaded_size, '-o', temporary_path
# This strategy extracts our binary packages.
def curl(*args)
mirror = ENV["HOMEBREW_SOURCEFORGE_MIRROR"]
args << "-G" << "-d" << "use_mirror=#{mirror}" if mirror
ohai "Pouring #{cached_location.basename}"
# This strategy extracts local binary packages.
class LocalBottleDownloadStrategy < AbstractFileDownloadStrategy
attr_reader :cached_location
def initialize(formula)
@cached_location = formula.local_bottle_path
ohai "Pouring #{cached_location.basename}"
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
# S3DownloadStrategy downloads tarballs from AWS S3.
# To use it, add ":using => S3DownloadStrategy" to the URL section of your
# formula. This download strategy uses AWS access tokens (in the
# environment variables AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY)
# to sign the request. This strategy is good in a corporate setting,
# because it lets you use a private S3 bucket as a repo for internal
# distribution. (It will work for public buckets as well.)
class S3DownloadStrategy < CurlDownloadStrategy
def _fetch
# Put the aws gem requirement here (vs top of file) so it's only
# a dependency of S3 users, not all Homebrew users
require 'rubygems'
begin
require 'aws-sdk'
rescue LoadError
onoe "Install the aws-sdk gem into the gem repo used by brew."
raise
end
if @url !~ %r[^https?://+([^.]+).s3.amazonaws.com/+(.+)$] then
raise "Bad S3 URL: " + @url
end
(bucket,key) = $1,$2
obj = AWS::S3.new().buckets[bucket].objects[key]
begin
s3url = obj.url_for(:get)
rescue AWS::Errors::MissingCredentialsError
ohai "AWS credentials missing, trying public URL instead."
s3url = obj.public_url
end
curl s3url, '-C', downloaded_size, '-o', temporary_path
class SubversionDownloadStrategy < VCSDownloadStrategy
def initialize(name, resource)
super
@url = @url.sub(/^svn\+/, "") if @url.start_with?("svn+http://")
clear_cache unless @url.chomp("/") == repo_url or quiet_system "svn", "switch", @url, cached_location
quiet_safe_system "svn", "export", "--force", cached_location, Dir.pwd
private
def repo_url
Utils.popen_read("svn", "info", cached_location.to_s).strip[/^URL: (.+)$/, 1]
end
def get_externals
Utils.popen_read("svn", "propget", "svn:externals", @url).chomp.each_line do |line|
yield name, url
end
end
def fetch_args
[]
end
def fetch_repo target, url, revision=nil, ignore_externals=false
# Use "svn up" when the repository already exists locally.
# This saves on bandwidth and will have a similar effect to verifying the
# cache as it will make any changes to get the right revision.
svncommand = target.directory? ? 'up' : 'checkout'
args = ['svn', svncommand] + fetch_args
# SVN shipped with XCode 3.1.4 can't force a checkout.
args << '--force' unless MacOS.version == :leopard
args << url unless target.directory?
args << '-r' << revision if revision
args << '--ignore-externals' if ignore_externals
def cache_tag
head? ? "svn-HEAD" : "svn"
end
cached_location.join(".svn").directory?
def clone_repo
case @ref_type
when :revision
fetch_repo cached_location, @url, @ref
when :revisions
# nil is OK for main_revision, as fetch_repo will then get latest
main_revision = @ref[:trunk]
fetch_repo cached_location, @url, main_revision, true
get_externals do |external_name, external_url|
fetch_repo cached_location+external_name, external_url, @ref[external_name], true
fetch_repo cached_location, @url
end
end
alias_method :update, :clone_repo
StrictSubversionDownloadStrategy = SubversionDownloadStrategy
class UnsafeSubversionDownloadStrategy < SubversionDownloadStrategy
def fetch_args
%w[--non-interactive --trust-server-cert]
class GitDownloadStrategy < VCSDownloadStrategy
SHALLOW_CLONE_WHITELIST = [
%r{git://},
%r{https://github\.com},
%r{http://git\.sv\.gnu\.org},
%r{http://llvm\.org},
]
def initialize name, resource
@ref_type ||= :branch
@ref ||= "master"
@shallow = meta.fetch(:shallow) { true }
cp_r File.join(cached_location, "."), Dir.pwd
config_repo
update_repo
checkout
reset
update_submodules if submodules?
end
end
def shallow_clone?
@shallow && support_depth?
end
def support_depth?
@ref_type != :revision && SHALLOW_CLONE_WHITELIST.any? { |rx| rx === @url }
end
cached_location.join(".git")
quiet_system 'git', '--git-dir', git_dir, 'rev-parse', '-q', '--verify', "#{@ref}^{commit}"
quiet_system "git", "--git-dir", git_dir, "status", "-s"
def submodules?
cached_location.join(".gitmodules").exist?
args = %w{clone}
args << '--depth' << '1' if shallow_clone?
when :branch, :tag then args << '--branch' << @ref
end
args << @url << cached_location
end
def refspec
when :branch then "+refs/heads/#@ref:refs/remotes/origin/#@ref"
when :tag then "+refs/tags/#@ref:refs/tags/#@ref"
else "+refs/heads/master:refs/remotes/origin/master"
end
end
def config_repo
safe_system 'git', 'config', 'remote.origin.url', @url
safe_system 'git', 'config', 'remote.origin.fetch', refspec
if @ref_type == :branch || !has_ref?
quiet_safe_system 'git', 'fetch', 'origin'
end
def clone_repo
safe_system 'git', *clone_args
cached_location.cd do
safe_system "git", "config", "homebrew.cacheversion", cache_version
update_submodules if submodules?
end
quiet_safe_system "git", "checkout", "-f", @ref, "--"
when :branch then "origin/#@ref"
when :revision, :tag then @ref
end
%W{reset --hard #{ref}}
end
def reset
quiet_safe_system 'git', *reset_args
def update_submodules
quiet_safe_system "git", "submodule", "foreach", "--recursive", "git submodule sync"
quiet_safe_system "git", "submodule", "update", "--init", "--recursive"
class CVSDownloadStrategy < VCSDownloadStrategy
def initialize(name, resource)
super
@url = @url.sub(%r[^cvs://], "")
if meta.key?(:module)
@module = meta.fetch(:module)
elsif @url !~ %r[:[^/]+$]
@module = name
else
@module, @url = split_url(@url)
end
cp_r File.join(cached_location, "."), Dir.pwd
cached_location.join("CVS").directory?
def clone_repo
HOMEBREW_CACHE.cd do
quiet_safe_system cvspath, { :quiet_flag => "-Q" }, "-d", @url, "login"
quiet_safe_system cvspath, { :quiet_flag => "-Q" }, "-d", @url, "checkout", "-d", cache_filename, @module
cached_location.cd { quiet_safe_system cvspath, { :quiet_flag => "-Q" }, "up" }
mod=parts.pop
url=parts.join(':')
[ mod, url ]
end
class MercurialDownloadStrategy < VCSDownloadStrategy
def initialize(name, resource)
super
@url = @url.sub(%r[^hg://], "")
end
safe_system hgpath, 'archive', '--subrepos', '-y', '-r', @ref, '-t', 'files', dst
safe_system hgpath, 'archive', '--subrepos', '-y', '-t', 'files', dst
cached_location.join(".hg").directory?
cached_location.cd { quiet_safe_system hgpath, "pull", "--update" }
class BazaarDownloadStrategy < VCSDownloadStrategy
def initialize(name, resource)
super
@url = @url.sub(%r[^bzr://], "")
end
# The export command doesn't work on checkouts
# See https://bugs.launchpad.net/bzr/+bug/897511
cp_r File.join(cached_location, "."), Dir.pwd
cached_location.join(".bzr").directory?
def clone_repo
# "lightweight" means history-less
safe_system bzrpath, "checkout", "--lightweight", @url, cached_location
cached_location.cd { quiet_safe_system bzrpath, "update" }
class FossilDownloadStrategy < VCSDownloadStrategy
def initialize(name, resource)
super
@url = @url.sub(%r[^fossil://], "")
end
args = [fossilpath, "open", cached_location]
args << @ref if @ref_type && @ref
safe_system(*args)
private
def cache_tag
"fossil"
end
safe_system fossilpath, "clone", @url, cached_location
safe_system fossilpath, "pull", "-R", cached_location
def self.detect(url, strategy=nil)
if strategy.nil?
detect_from_url(url)
elsif Class === strategy && strategy < AbstractDownloadStrategy
strategy
elsif Symbol === strategy
detect_from_symbol(strategy)
else
raise TypeError,
"Unknown download strategy specification #{strategy.inspect}"
def self.detect_from_url(url)
case url
when %r[^https?://.+\.git$], %r[^git://]
GitDownloadStrategy
when %r[^http://www\.apache\.org/dyn/closer\.cgi]
CurlApacheMirrorDownloadStrategy
when %r[^https?://(.+?\.)?googlecode\.com/svn], %r[^https?://svn\.], %r[^svn://], %r[^https?://(.+?\.)?sourceforge\.net/svnroot/]
SubversionDownloadStrategy
when %r[^cvs://]
CVSDownloadStrategy
when %r[^https?://(.+?\.)?googlecode\.com/hg]
MercurialDownloadStrategy
when %r[^hg://]
MercurialDownloadStrategy
when %r[^bzr://]
BazaarDownloadStrategy
when %r[^fossil://]
FossilDownloadStrategy
when %r[^http://svn\.apache\.org/repos/], %r[^svn\+http://]
SubversionDownloadStrategy
when %r[^https?://(.+?\.)?sourceforge\.net/hgweb/]
MercurialDownloadStrategy
else
CurlDownloadStrategy
def self.detect_from_symbol(symbol)
case symbol
when :hg then MercurialDownloadStrategy
when :nounzip then NoUnzipCurlDownloadStrategy
when :git then GitDownloadStrategy
when :bzr then BazaarDownloadStrategy
when :svn then SubversionDownloadStrategy
when :curl then CurlDownloadStrategy
when :ssl3 then CurlSSL3DownloadStrategy
when :cvs then CVSDownloadStrategy
when :post then CurlPostDownloadStrategy
raise "Unknown download strategy #{strategy} was requested."