class Faraday::HttpCache
end
builder.use :http_cache, store: Rails.cache, instrumenter: ActiveSupport::Notifications
client = Faraday.new do |builder|
# Instrument events using ActiveSupport::Notifications
end
builder.use :http_cache, store: Rails.cache, serializer: Marshal
client = Faraday.new do |builder|
# Use Marshal for serialization
end
builder.use :http_cache, store: Rails.cache
client = Faraday.new do |builder|
# Provide an existing CacheStore (for instance, from a Rails app)
end
builder.adapter Faraday.default_adapter
builder.use :http_cache, logger: my_logger_instance, store: my_store_backend
client = Faraday.new do |builder|
# Attach a Logger to the middleware.
end
builder.adapter Faraday.default_adapter
builder.use :http_cache, store: my_store_backend
client = Faraday.new do |builder|
# Using the middleware with a simple client:
Examples:
with other middlewares on your stack.
to your stack, so it will be closest to the inner app, avoiding issues
request to it’s server. This middleware should be the last attached handler
request, the middleware will return the response instead of issuing a new
adapter. If a stored response can be served again for a subsequent
’Faraday::HttpCache::Storage’ to cache responses retrieved by the stack
The middleware use the provided configuration options to establish a
Public: The middleware responsible for caching and serving responses.
def call(env)
Public: Process the request into a duplicate of this instance to
def call(env) dup.call!(env) end
def call!(env)
process is finished.
The processed steps will be recorded to be logged once the whole
to the underlying app and does nothing to the response.
If the request can't be cached, the request will be delegated directly
forward the request and try to store the response for future requests.
valid stored response to serve. On a cache miss, the middleware will
On a cacheable request, the middleware will attempt to locate a
Internal: Process the stack request to try to serve a cache response.
def call!(env) @trace = [] @request = create_request(env) response = nil if @request.cacheable? response = process(env) else trace :unacceptable response = @app.call(env) end response.on_complete do delete(@request, response) if should_delete?(response.status, @request.method) log_request response.env[:http_cache_trace] = @trace instrument(response.env) end end
def create_request(env)
def create_request(env) Request.from_env(env) end
def create_response(env)
Returns a 'Hash' containing the ':status', ':body' and 'response_headers'
env - the environment 'Hash' from the Faraday stack.
Internal: Creates a new 'Hash' containing the response information.
def create_response(env) hash = env.to_hash { status: hash[:status], body: hash[:body], response_headers: hash[:response_headers] } end
def delete(request, response)
def delete(request, response) headers = %w(Location Content-Location) headers.each do |header| url = response.headers[header] @storage.delete(url) if url end @storage.delete(request.url) trace :delete end
def extract_status(trace)
Internal: Extracts the cache status from a trace.
def extract_status(trace) CACHE_STATUSES.find { |status| trace.include?(status) } end
def fetch(env)
env - the environment 'Hash' from the Faraday stack.
Internal: Fetches the response from the Faraday stack and stores it.
def fetch(env) trace :miss @app.call(env).on_complete do |fresh_env| response = Response.new(create_response(fresh_env)) store(response) end end
def initialize(app, store: nil, serializer: nil, shared_cache: true, instrumenter: nil, instrument_name: EVENT_NAME, logger: nil) # rubocop:disable Metrics/ParameterLists
Faraday::HttpCache.new(app, store: store, logger: my_logger)
store = ActiveSupport::Cache.lookup_store
# Initialize the middleware with a MemoryStore and logger
Faraday::HttpCache.new(app, store: store)
store = ActiveSupport::Cache.lookup_store(:file_store, ['tmp'])
# Initialize the middleware with a FileStore at the 'tmp' dir.
Faraday:HttpCache.new(app, logger: my_logger, serializer: Marshal)
# Initialize the middleware with a logger and Marshal as a serializer
Faraday::HttpCache.new(app, logger: my_logger)
# Initialize the middleware with a logger.
Examples:
:logger - A logger object.
:instrument_name - The String name of the instrument being reported on (optional).
:instrumenter - An instrumentation object that should respond to 'instrument'.
:shared_cache - A flag to mark the middleware as a shared cache or not.
:serializer - A serializer that should respond to 'dump' and 'load'.
:store - A cache store that should respond to 'read', 'write', and 'delete'.
app - the next endpoint on the 'Faraday' stack.
Public: Initializes a new HttpCache middleware.
def initialize(app, store: nil, serializer: nil, shared_cache: true, instrumenter: nil, instrument_name: EVENT_NAME, logger: nil) # rubocop:disable Metrics/ParameterLists super(app) @logger = logger @shared_cache = shared_cache @instrumenter = instrumenter @instrument_name = instrument_name @storage = Storage.new(store: store, serializer: serializer, logger: logger) end
def instrument(env)
Internal: instruments the request processing.
def instrument(env) return unless @instrumenter payload = { env: env, cache_status: extract_status(env[:http_cache_trace]) } @instrumenter.instrument(@instrument_name, payload) # DEPRECATED: Event name from the 1.1.1 release that isn't compatible # with the `ActiveSupport::LogSubscriber` API. @instrumenter.instrument('process_request.http_cache.faraday', payload) end
def log_request
This method does nothing if theresn't a logger present.
and how the middleware handled it.
Internal: Logs the trace info about the incoming request
def log_request return unless @logger method = @request.method.to_s.upcase path = @request.url.request_uri @logger.debug { "HTTP Cache: [#{method} #{path}] #{@trace.join(', ')}" } end
def process(env)
env - the environment 'Hash' provided from the 'Faraday' stack.
revalidate the response back to the server.
* If a response is found but isn't fresh anymore, the middleware will
stack calls and return the stored response back to the client.
* If a fresh response is found, the middleware will abort the remaining
the call to the remaining stack and return the new response.
* If no entry is present on the storage, the 'fetch' method will forward
Internal: Tries to locate a valid response or forwards the call to the stack.
def process(env) entry = @storage.read(@request) return fetch(env) if entry.nil? if entry.fresh? && !@request.no_cache? response = entry.to_response(env) trace :fresh else trace :must_revalidate response = validate(entry, env) end response end
def shared_cache?
Internal: Should this cache instance act like a "shared cache" according
def shared_cache? @shared_cache end
def should_delete?(status, method)
cache entries for the same resource.
Internal: Checks if the current request method should remove any existing
def should_delete?(status, method) UNSAFE_METHODS.include?(method) && !ERROR_STATUSES.cover?(status) end
def store(response)
response - a 'Faraday::HttpCache::Response' instance to be stored.
recorded for logging purposes.
If the response isn't cacheable, a trace action 'uncacheable' will be
Internal: Stores the response into the storage.
def store(response) if shared_cache? ? response.cacheable_in_shared_cache? : response.cacheable_in_private_cache? trace :store @storage.write(@request, response) else trace :uncacheable end end
def trace(operation)
operation - the name of the performed action, a String or Symbol.
request/response phase is finished.
Internal: Records a traced action to be used by the logger once the
def trace(operation) @trace << operation end
def validate(entry, env)
env - the environment 'Hash' to perform the request.
entry - a stale 'Faraday::HttpCache::Response' retrieved from the cache.
response will be stored (replacing the old one) and used.
and forwarded against the Faraday stack. Otherwise, the freshly new
is marked as 'Not Modified', the previous stored response will be used
existing 'Last-Modified' and 'ETag' headers. If the new response
using the 'If-Modified-Since' and 'If-None-Match' headers with the
Internal: Tries to validated a stored entry back to it's origin server
def validate(entry, env) headers = env[:request_headers] headers['If-Modified-Since'] = entry.last_modified if entry.last_modified headers['If-None-Match'] = entry.etag if entry.etag @app.call(env).on_complete do |requested_env| response = Response.new(requested_env) if response.not_modified? trace :valid updated_response_headers = response.payload[:response_headers] # These headers are not allowed in 304 responses, yet some proxy # servers add them in. Don't override the values from the original # response. updated_response_headers.delete('Content-Type') updated_response_headers.delete('Content-Length') updated_payload = entry.payload updated_payload[:response_headers].update(updated_response_headers) requested_env.update(updated_payload) response = Response.new(updated_payload) else trace :invalid end store(response) end end