class ActiveSupport::Cache::MemCacheStore

an in-memory cache inside of a block.
MemCacheStore implements the Strategy::LocalCache strategy which implements
server goes down, then MemCacheStore will ignore it until it comes back up.
and MemCacheStore will load balance between all available servers. If a
- Clustering and load balancing. One can specify multiple memcached servers,
Special features:
This is currently the most popular cache store for production websites.
A cache store implementation which stores data in Memcached:

def self.build_mem_cache(*addresses) # :nodoc:

:nodoc:
# => #
ActiveSupport::Cache::MemCacheStore.build_mem_cache('localhost:10290')
# => #
ActiveSupport::Cache::MemCacheStore.build_mem_cache

- "127.0.0.1:11211" (otherwise)
- ENV["MEMCACHE_SERVERS"] (if defined)
If no addresses are provided, we give nil to Dalli::Client, so it uses its fallbacks:
Creates a new Dalli::Client instance with specified addresses and options.
def self.build_mem_cache(*addresses) # :nodoc:
  addresses = addresses.flatten
  options = addresses.extract_options!
  addresses = nil if addresses.compact.empty?
  pool_options = retrieve_pool_options(options)
  if pool_options.empty?
    Dalli::Client.new(addresses, options)
  else
    ensure_connection_pool_added!
    ConnectionPool.new(pool_options) { Dalli::Client.new(addresses, options.merge(threadsafe: false)) }
  end
end

def self.supports_cache_versioning?

Advertise cache versioning support.
def self.supports_cache_versioning?
  true
end

def clear(options = nil)

be used with care when shared cache is being used.
Clear the entire cache on all memcached servers. This method should
def clear(options = nil)
  rescue_error_with(nil) { @data.with { |c| c.flush_all } }
end

def decrement(name, amount = 1, options = nil)

to zero.
Calling it on a value not stored with +:raw+ will initialize that value
operator and can only be used on values written with the +:raw+ option.
Decrement a cached value. This method uses the memcached decr atomic
def decrement(name, amount = 1, options = nil)
  options = merged_options(options)
  instrument(:decrement, name, amount: amount) do
    rescue_error_with nil do
      @data.with { |c| c.decr(normalize_key(name, options), amount, options[:expires_in]) }
    end
  end
end

def default_coder

def default_coder
  Coders[Cache.format_version]
end

def delete_entry(key, **options)

Delete an entry from the cache.
def delete_entry(key, **options)
  rescue_error_with(false) { @data.with { |c| c.delete(key) } }
end

def deserialize_entry(payload, raw: false, **)

def deserialize_entry(payload, raw: false, **)
  if payload && raw
    Entry.new(payload)
  else
    super(payload)
  end
end

def increment(name, amount = 1, options = nil)

to zero.
Calling it on a value not stored with +:raw+ will initialize that value
operator and can only be used on values written with the +:raw+ option.
Increment a cached value. This method uses the memcached incr atomic
def increment(name, amount = 1, options = nil)
  options = merged_options(options)
  instrument(:increment, name, amount: amount) do
    rescue_error_with nil do
      @data.with { |c| c.incr(normalize_key(name, options), amount, options[:expires_in]) }
    end
  end
end

def initialize(*addresses)

MemCacheStore will connect to localhost:11211 (the default memcached port).
If no addresses are provided, but ENV['MEMCACHE_SERVERS'] is defined, it will be used instead. Otherwise,

ActiveSupport::Cache::MemCacheStore.new("localhost", "server-downstairs.localnetwork:8229")

in the form of "host_name:port". For example:
addresses. Each address is either a host name, or a host-with-port string
Creates a new MemCacheStore object, with the given memcached server
def initialize(*addresses)
  addresses = addresses.flatten
  options = addresses.extract_options!
  if options.key?(:cache_nils)
    options[:skip_nil] = !options.delete(:cache_nils)
  end
  super(options)
  unless [String, Dalli::Client, NilClass].include?(addresses.first.class)
    raise ArgumentError, "First argument must be an empty array, an array of hosts or a Dalli::Client instance."
  end
  if addresses.first.is_a?(Dalli::Client)
    @data = addresses.first
  else
    mem_cache_options = options.dup
    # The value "compress: false" prevents duplicate compression within Dalli.
    mem_cache_options[:compress] = false
    (UNIVERSAL_OPTIONS - %i(compress)).each { |name| mem_cache_options.delete(name) }
    @data = self.class.build_mem_cache(*(addresses + [mem_cache_options]))
  end
end

def normalize_key(key, options)

characters properly.
before applying the regular expression to ensure we are escaping all
Memcache keys are binaries. So we need to force their encoding to binary
def normalize_key(key, options)
  key = super
  if key
    key = key.dup.force_encoding(Encoding::ASCII_8BIT)
    key = key.gsub(ESCAPE_KEY_CHARS) { |match| "%#{match.getbyte(0).to_s(16).upcase}" }
    key = "#{key[0, 212]}:hash:#{ActiveSupport::Digest.hexdigest(key)}" if key.size > 250
  end
  key
end

def read_entry(key, **options)

Read an entry from the cache.
def read_entry(key, **options)
  deserialize_entry(read_serialized_entry(key, **options), **options)
end

def read_multi_entries(names, **options)

Reads multiple entries from the cache implementation.
def read_multi_entries(names, **options)
  keys_to_names = names.index_by { |name| normalize_key(name, options) }
  raw_values = @data.with { |c| c.get_multi(keys_to_names.keys) }
  values = {}
  raw_values.each do |key, value|
    entry = deserialize_entry(value, raw: options[:raw])
    unless entry.expired? || entry.mismatched?(normalize_version(keys_to_names[key], options))
      values[keys_to_names[key]] = entry.value
    end
  end
  values
end

def read_serialized_entry(key, **options)

def read_serialized_entry(key, **options)
  rescue_error_with(nil) do
    @data.with { |c| c.get(key, options) }
  end
end

def rescue_error_with(fallback)

def rescue_error_with(fallback)
  yield
rescue Dalli::DalliError => error
  ActiveSupport.error_reporter&.report(error, handled: true, severity: :warning)
  logger.error("DalliError (#{error}): #{error.message}") if logger
  fallback
end

def serialize_entry(entry, raw: false, **options)

def serialize_entry(entry, raw: false, **options)
  if raw
    entry.value.to_s
  else
    super(entry, raw: raw, **options)
  end
end

def stats

Get the statistics from the memcached servers.
def stats
  @data.with { |c| c.stats }
end

def write_entry(key, entry, **options)

Write an entry to the cache.
def write_entry(key, entry, **options)
  write_serialized_entry(key, serialize_entry(entry, **options), **options)
end

def write_serialized_entry(key, payload, **options)

def write_serialized_entry(key, payload, **options)
  method = options[:unless_exist] ? :add : :set
  expires_in = options[:expires_in].to_i
  if options[:race_condition_ttl] && expires_in > 0 && !options[:raw]
    # Set the memcache expire a few minutes in the future to support race condition ttls on read
    expires_in += 5.minutes
  end
  rescue_error_with false do
    # Don't pass compress option to Dalli since we are already dealing with compression.
    options.delete(:compress)
    @data.with { |c| c.send(method, key, payload, expires_in, **options) }
  end
end