class ActiveRecord::ConnectionAdapters::SchemaCache
def self._load_from(filename) # :nodoc:
def self._load_from(filename) # :nodoc: return unless File.file?(filename) read(filename) do |file| if filename.include?(".dump") Marshal.load(file) else if YAML.respond_to?(:unsafe_load) YAML.unsafe_load(file) else YAML.load(file) end end end end
def self.read(filename, &block)
def self.read(filename, &block) if File.extname(filename) == ".gz" Zlib::GzipReader.open(filename) { |gz| yield gz.read } else yield File.read(filename) end end
def add(pool, table_name)
def add(pool, table_name) pool.with_connection do if data_source_exists?(pool, table_name) primary_keys(pool, table_name) columns(pool, table_name) columns_hash(pool, table_name) indexes(pool, table_name) end end end
def add_all(pool) # :nodoc:
def add_all(pool) # :nodoc: pool.with_connection do tables_to_cache(pool).each do |table| add(pool, table) end version(pool) end end
def cached?(table_name)
def cached?(table_name) @columns.key?(table_name) end
def clear_data_source_cache!(_connection, name)
def clear_data_source_cache!(_connection, name) @columns.delete name @columns_hash.delete name @primary_keys.delete name @data_sources.delete name @indexes.delete name end
def columns(pool, table_name)
def columns(pool, table_name) if ignored_table?(table_name) raise ActiveRecord::StatementInvalid.new("Table '#{table_name}' doesn't exist", connection_pool: pool) end @columns.fetch(table_name) do pool.with_connection do |connection| @columns[deep_deduplicate(table_name)] = deep_deduplicate(connection.columns(table_name)) end end end
def columns_hash(pool, table_name)
Get the columns for a table as a hash, key is the column name
def columns_hash(pool, table_name) @columns_hash.fetch(table_name) do @columns_hash[deep_deduplicate(table_name)] = columns(pool, table_name).index_by(&:name).freeze end end
def columns_hash?(_pool, table_name)
def columns_hash?(_pool, table_name) @columns_hash.key?(table_name) end
def data_source_exists?(pool, name)
def data_source_exists?(pool, name) return if ignored_table?(name) if @data_sources.empty? tables_to_cache(pool).each do |source| @data_sources[source] = true end end return @data_sources[name] if @data_sources.key? name @data_sources[deep_deduplicate(name)] = pool.with_connection do |connection| connection.data_source_exists?(name) end end
def deep_deduplicate(value)
def deep_deduplicate(value) case value when Hash value.transform_keys { |k| deep_deduplicate(k) }.transform_values { |v| deep_deduplicate(v) } when Array value.map { |i| deep_deduplicate(i) } when String, Deduplicable -value else value end end
def derive_columns_hash_and_deduplicate_values
def derive_columns_hash_and_deduplicate_values @columns = deep_deduplicate(@columns) @columns_hash = @columns.transform_values { |columns| columns.index_by(&:name) } @primary_keys = deep_deduplicate(@primary_keys) @data_sources = deep_deduplicate(@data_sources) @indexes = deep_deduplicate(@indexes) end
def dump_to(filename)
def dump_to(filename) open(filename) { |f| if filename.include?(".dump") f.write(Marshal.dump(self)) else f.write(YAML.dump(self)) end } end
def encode_with(coder) # :nodoc:
def encode_with(coder) # :nodoc: coder["columns"] = @columns.sort.to_h coder["primary_keys"] = @primary_keys.sort.to_h coder["data_sources"] = @data_sources.sort.to_h coder["indexes"] = @indexes.sort.to_h coder["version"] = @version end
def ignored_table?(table_name)
def ignored_table?(table_name) ActiveRecord.schema_cache_ignored_table?(table_name) end
def indexes(pool, table_name)
def indexes(pool, table_name) @indexes.fetch(table_name) do pool.with_connection do |connection| if data_source_exists?(pool, table_name) @indexes[deep_deduplicate(table_name)] = deep_deduplicate(connection.indexes(table_name)) else [] end end end end
def init_with(coder) # :nodoc:
def init_with(coder) # :nodoc: @columns = coder["columns"] @columns_hash = coder["columns_hash"] @primary_keys = coder["primary_keys"] @data_sources = coder["data_sources"] @indexes = coder["indexes"] || {} @version = coder["version"] unless coder["deduplicated"] derive_columns_hash_and_deduplicate_values end end
def initialize # :nodoc:
def initialize # :nodoc: @columns = {} @columns_hash = {} @primary_keys = {} @data_sources = {} @indexes = {} @version = nil end
def initialize_dup(other) # :nodoc:
def initialize_dup(other) # :nodoc: super @columns = @columns.dup @columns_hash = @columns_hash.dup @primary_keys = @primary_keys.dup @data_sources = @data_sources.dup @indexes = @indexes.dup end
def marshal_dump # :nodoc:
def marshal_dump # :nodoc: [@version, @columns, {}, @primary_keys, @data_sources, @indexes] end
def marshal_load(array) # :nodoc:
def marshal_load(array) # :nodoc: @version, @columns, _columns_hash, @primary_keys, @data_sources, @indexes, _database_version = array @indexes ||= {} derive_columns_hash_and_deduplicate_values end
def open(filename)
def open(filename) FileUtils.mkdir_p(File.dirname(filename)) File.atomic_write(filename) do |file| if File.extname(filename) == ".gz" zipper = Zlib::GzipWriter.new file zipper.mtime = 0 yield zipper zipper.flush zipper.close else yield file end end end
def primary_keys(pool, table_name)
def primary_keys(pool, table_name) @primary_keys.fetch(table_name) do pool.with_connection do |connection| if data_source_exists?(pool, table_name) @primary_keys[deep_deduplicate(table_name)] = deep_deduplicate(connection.primary_key(table_name)) end end end end
def schema_version
def schema_version @version end
def size
def size [@columns, @columns_hash, @primary_keys, @data_sources].sum(&:size) end
def tables_to_cache(pool)
def tables_to_cache(pool) pool.with_connection do |connection| connection.data_sources.reject do |table| ignored_table?(table) end end end
def version(pool)
def version(pool) @version ||= pool.with_connection(&:schema_version) end