def download(urls,easy_options={},multi_options={},download_paths=nil,&blk)
errors = []
procs = []
files = []
urls_with_config = []
url_to_download_paths = {}
urls.each_with_index do|urlcfg,i|
if urlcfg.is_a?(Hash)
url = url[:url]
else
url = urlcfg
end
if download_paths and download_paths[i]
download_path = download_paths[i]
else
download_path = File.basename(url)
end
file = lambda do|dp|
file = File.open(dp,"wb")
procs << (lambda {|data| file.write data; data.size })
files << file
file
end.call(download_path)
if urlcfg.is_a?(Hash)
urls_with_config << urlcfg.merge({:on_body => procs.last}.merge(easy_options))
else
urls_with_config << {:url => url, :on_body => procs.last, :method => :get}.merge(easy_options)
end
url_to_download_paths[url] = {:path => download_path, :file => file}
end
if blk
Curl::Multi.http(urls_with_config, multi_options) do |c,code,method|
info = url_to_download_paths[c.url]
begin
file = info[:file]
files.reject!{|f| f == file }
file.close
rescue => e
errors << e
end
blk.call(c,info[:path])
end
else
Curl::Multi.http(urls_with_config, multi_options)
end
ensure
files.each {|f|
begin
f.close
rescue => e
errors << e
end
}
raise errors unless errors.empty?
end