class ActiveSupport::Cache::MemCacheStore
an in-memory cache inside of a block.
MemCacheStore implements the Strategy::LocalCache strategy which implements
server goes down, then MemCacheStore will ignore it until it comes back up.
and MemCacheStore will load balance between all available servers. If a
- Clustering and load balancing. One can specify multiple memcached servers,
Special features:
This is currently the most popular cache store for production websites.
A cache store implementation which stores data in Memcached:
def self.build_mem_cache(*addresses) # :nodoc:
# => #
ActiveSupport::Cache::MemCacheStore.build_mem_cache('localhost:10290')
# => #
ActiveSupport::Cache::MemCacheStore.build_mem_cache
By default address is equal localhost:11211.
Creates a new Dalli::Client instance with specified addresses and options.
def self.build_mem_cache(*addresses) # :nodoc: addresses = addresses.flatten options = addresses.extract_options! addresses = ["localhost:11211"] if addresses.empty? pool_options = retrieve_pool_options(options) if pool_options.empty? Dalli::Client.new(addresses, options) else ensure_connection_pool_added! ConnectionPool.new(pool_options) { Dalli::Client.new(addresses, options.merge(threadsafe: false)) } end end
def self.supports_cache_versioning?
def self.supports_cache_versioning? true end
def clear(options = nil)
Clear the entire cache on all memcached servers. This method should
def clear(options = nil) rescue_error_with(nil) { @data.with { |c| c.flush_all } } end
def decrement(name, amount = 1, options = nil)
Calling it on a value not stored with :raw will initialize that value
operator and can only be used on values written with the :raw option.
Decrement a cached value. This method uses the memcached decr atomic
def decrement(name, amount = 1, options = nil) options = merged_options(options) instrument(:decrement, name, amount: amount) do rescue_error_with nil do @data.with { |c| c.decr(normalize_key(name, options), amount, options[:expires_in]) } end end end
def delete_entry(key, options)
def delete_entry(key, options) rescue_error_with(false) { @data.with { |c| c.delete(key) } } end
def deserialize_entry(raw_value)
def deserialize_entry(raw_value) if raw_value entry = Marshal.load(raw_value) rescue raw_value entry.is_a?(Entry) ? entry : Entry.new(entry) end end
def increment(name, amount = 1, options = nil)
Calling it on a value not stored with :raw will initialize that value
operator and can only be used on values written with the :raw option.
Increment a cached value. This method uses the memcached incr atomic
def increment(name, amount = 1, options = nil) options = merged_options(options) instrument(:increment, name, amount: amount) do rescue_error_with nil do @data.with { |c| c.incr(normalize_key(name, options), amount, options[:expires_in]) } end end end
def initialize(*addresses)
If no addresses are specified, then MemCacheStore will connect to
ActiveSupport::Cache::MemCacheStore.new("localhost", "server-downstairs.localnetwork:8229")
in the form of "host_name:port". For example:
addresses. Each address is either a host name, or a host-with-port string
Creates a new MemCacheStore object, with the given memcached server
def initialize(*addresses) addresses = addresses.flatten options = addresses.extract_options! super(options) unless [String, Dalli::Client, NilClass].include?(addresses.first.class) raise ArgumentError, "First argument must be an empty array, an array of hosts or a Dalli::Client instance." end if addresses.first.is_a?(Dalli::Client) @data = addresses.first else mem_cache_options = options.dup UNIVERSAL_OPTIONS.each { |name| mem_cache_options.delete(name) } @data = self.class.build_mem_cache(*(addresses + [mem_cache_options])) end end
def normalize_key(key, options)
before applying the regular expression to ensure we are escaping all
Memcache keys are binaries. So we need to force their encoding to binary
def normalize_key(key, options) key = super.dup key = key.force_encoding(Encoding::ASCII_8BIT) key = key.gsub(ESCAPE_KEY_CHARS) { |match| "%#{match.getbyte(0).to_s(16).upcase}" } key = "#{key[0, 213]}:md5:#{ActiveSupport::Digest.hexdigest(key)}" if key.size > 250 key end
def read_entry(key, options)
def read_entry(key, options) rescue_error_with(nil) { deserialize_entry(@data.with { |c| c.get(key, options) }) } end
def read_multi_entries(names, options)
def read_multi_entries(names, options) keys_to_names = Hash[names.map { |name| [normalize_key(name, options), name] }] raw_values = @data.with { |c| c.get_multi(keys_to_names.keys) } values = {} raw_values.each do |key, value| entry = deserialize_entry(value) unless entry.expired? || entry.mismatched?(normalize_version(keys_to_names[key], options)) values[keys_to_names[key]] = entry.value end end values end
def rescue_error_with(fallback)
def rescue_error_with(fallback) yield rescue Dalli::DalliError => e logger.error("DalliError (#{e}): #{e.message}") if logger fallback end
def stats
def stats @data.with { |c| c.stats } end
def write_entry(key, entry, options)
def write_entry(key, entry, options) method = options && options[:unless_exist] ? :add : :set value = options[:raw] ? entry.value.to_s : entry expires_in = options[:expires_in].to_i if expires_in > 0 && !options[:raw] # Set the memcache expire a few minutes in the future to support race condition ttls on read expires_in += 5.minutes end rescue_error_with false do @data.with { |c| c.send(method, key, value, expires_in, options) } end end