class Gem::RemoteFetcher

def self.fetcher

def self.fetcher
  @fetcher ||= self.new Gem.configuration[:http_proxy]
end

def self.fetcher=(fetcher)

def self.fetcher=(fetcher)
  @fetcher = fetcher
end

def cache_update_path(uri, path = nil, update = true)

def cache_update_path(uri, path = nil, update = true)
  mtime = path && File.stat(path).mtime rescue nil
  data = fetch_path(uri, mtime)
  if data == nil # indicates the server returned 304 Not Modified
    return Gem.read_binary(path)
  end
  if update and path
    Gem.write_binary(path, data)
  end
  data
end

def close_all

def close_all
  @pools.each_value {|pool| pool.close_all }
end

def download(spec, source_uri, install_dir = Gem.dir)

def download(spec, source_uri, install_dir = Gem.dir)
  install_cache_dir = File.join install_dir, "cache"
  cache_dir =
    if Dir.pwd == install_dir # see fetch_command
      install_dir
    elsif File.writable?(install_cache_dir) || (File.writable?(install_dir) && (not File.exist?(install_cache_dir)))
      install_cache_dir
    else
      File.join Gem.user_dir, "cache"
    end
  gem_file_name = File.basename spec.cache_file
  local_gem_path = File.join cache_dir, gem_file_name
  require "fileutils"
  FileUtils.mkdir_p cache_dir rescue nil unless File.exist? cache_dir
  source_uri = parse_uri(source_uri)
  scheme = source_uri.scheme
  # URI.parse gets confused by MS Windows paths with forward slashes.
  scheme = nil if scheme =~ /^[a-z]$/i
  # REFACTOR: split this up and dispatch on scheme (eg download_http)
  # REFACTOR: be sure to clean up fake fetcher when you do this... cleaner
  case scheme
  when 'http', 'https', 's3' then
    unless File.exist? local_gem_path
      begin
        verbose "Downloading gem #{gem_file_name}"
        remote_gem_path = source_uri + "gems/#{gem_file_name}"
        self.cache_update_path remote_gem_path, local_gem_path
      rescue FetchError
        raise if spec.original_platform == spec.platform
        alternate_name = "#{spec.original_name}.gem"
        verbose "Failed, downloading gem #{alternate_name}"
        remote_gem_path = source_uri + "gems/#{alternate_name}"
        self.cache_update_path remote_gem_path, local_gem_path
      end
    end
  when 'file' then
    begin
      path = source_uri.path
      path = File.dirname(path) if File.extname(path) == '.gem'
      remote_gem_path = Gem::Util.correct_for_windows_path(File.join(path, 'gems', gem_file_name))
      FileUtils.cp(remote_gem_path, local_gem_path)
    rescue Errno::EACCES
      local_gem_path = source_uri.to_s
    end
    verbose "Using local gem #{local_gem_path}"
  when nil then # TODO test for local overriding cache
    source_path = if Gem.win_platform? && source_uri.scheme &&
                     !source_uri.path.include?(':')
                    "#{source_uri.scheme}:#{source_uri.path}"
                  else
                    source_uri.path
                  end
    source_path = Gem::UriFormatter.new(source_path).unescape
    begin
      FileUtils.cp source_path, local_gem_path unless
        File.identical?(source_path, local_gem_path)
    rescue Errno::EACCES
      local_gem_path = source_uri.to_s
    end
    verbose "Using local gem #{local_gem_path}"
  else
    raise ArgumentError, "unsupported URI scheme #{source_uri.scheme}"
  end
  local_gem_path
end

def download_to_cache(dependency)

def download_to_cache(dependency)
  found, _ = Gem::SpecFetcher.fetcher.spec_for_dependency dependency
  return if found.empty?
  spec, source = found.max_by {|(s,_)| s.version }
  download spec, source.uri
end

def fetch_file(uri, *_)

def fetch_file(uri, *_)
  Gem.read_binary Gem::Util.correct_for_windows_path uri.path
end

def fetch_http(uri, last_modified = nil, head = false, depth = 0)

def fetch_http(uri, last_modified = nil, head = false, depth = 0)
  fetch_type = head ? Net::HTTP::Head : Net::HTTP::Get
  response   = request uri, fetch_type, last_modified do |req|
    headers.each {|k,v| req.add_field(k,v) }
  end
  case response
  when Net::HTTPOK, Net::HTTPNotModified then
    response.uri = uri
    head ? response : response.body
  when Net::HTTPMovedPermanently, Net::HTTPFound, Net::HTTPSeeOther,
       Net::HTTPTemporaryRedirect then
    raise FetchError.new('too many redirects', uri) if depth > 10
    unless location = response['Location']
      raise FetchError.new("redirecting but no redirect location was given", uri)
    end
    location = parse_uri location
    if https?(uri) && !https?(location)
      raise FetchError.new("redirecting to non-https resource: #{location}", uri)
    end
    fetch_http(location, last_modified, head, depth + 1)
  else
    raise FetchError.new("bad response #{response.message} #{response.code}", uri)
  end
end

def fetch_path(uri, mtime = nil, head = false)

def fetch_path(uri, mtime = nil, head = false)
  uri = parse_uri uri
  unless uri.scheme
    raise ArgumentError, "uri scheme is invalid: #{uri.scheme.inspect}"
  end
  data = send "fetch_#{uri.scheme}", uri, mtime, head
  if data and !head and uri.to_s.end_with?(".gz")
    begin
      data = Gem::Util.gunzip data
    rescue Zlib::GzipFile::Error
      raise FetchError.new("server did not return a valid file", uri)
    end
  end
  data
rescue Timeout::Error
  raise UnknownHostError.new('timed out', uri)
rescue IOError, SocketError, SystemCallError,
       *(OpenSSL::SSL::SSLError if Gem::HAVE_OPENSSL) => e
  if e.message =~ /getaddrinfo/
    raise UnknownHostError.new('no such name', uri)
  else
    raise FetchError.new("#{e.class}: #{e}", uri)
  end
end

def fetch_s3(uri, mtime = nil, head = false)

def fetch_s3(uri, mtime = nil, head = false)
  begin
    public_uri = s3_uri_signer(uri).sign
  rescue Gem::S3URISigner::ConfigurationError, Gem::S3URISigner::InstanceProfileError => e
    raise FetchError.new(e.message, "s3://#{uri.host}")
  end
  fetch_https public_uri, mtime, head
end

def https?(uri)

def https?(uri)
  uri.scheme.downcase == 'https'
end

def initialize(proxy=nil, dns=nil, headers={})

def initialize(proxy=nil, dns=nil, headers={})
  require 'net/http'
  require 'stringio'
  require 'uri'
  Socket.do_not_reverse_lookup = true
  @proxy = proxy
  @pools = {}
  @pool_lock = Mutex.new
  @cert_files = Gem::Request.get_cert_files
  @headers = headers
end

def pools_for(proxy)

def pools_for(proxy)
  @pool_lock.synchronize do
    @pools[proxy] ||= Gem::Request::ConnectionPools.new proxy, @cert_files
  end
end

def proxy_for(proxy, uri)

def proxy_for(proxy, uri)
  Gem::Request.proxy_uri(proxy || Gem::Request.get_proxy_from_env(uri.scheme))
end

def request(uri, request_class, last_modified = nil)

def request(uri, request_class, last_modified = nil)
  proxy = proxy_for @proxy, uri
  pool  = pools_for(proxy).pool_for uri
  request = Gem::Request.new uri, request_class, last_modified, pool
  request.fetch do |req|
    yield req if block_given?
  end
end

def s3_uri_signer(uri)

we have our own signing code here to avoid a dependency on the aws-sdk gem
def s3_uri_signer(uri)
  Gem::S3URISigner.new(uri)
end