lib/litedb/connection.rb



# frozen_string_literal: true

require "sqlite3"
require "litescheduler"

module Litedb
  class Connection < SQLite3::Database
    DEFAULT_FILE = ":memory:"
    DEFAULT_OPTIONS = {
      synchronous: :NORMAL,
      mmap_size: 32 * 1024, # 32 kilobytes
      journal_size_limit: 64 * 1024 * 1024 # 64 megabytes
    }.freeze

    attr_reader :config

    def initialize(file = nil, options = {}, zvfs = nil)
      @scheduler = Litescheduler.instance
      file ||= DEFAULT_FILE
      options = options.transform_keys { |key|
        begin
          key.to_sym
        rescue
          key
        end
      }
      @config = DEFAULT_OPTIONS.merge(options)

      super(file, @config, zvfs)

      set_pragmas
      run_migrations
      prepare_statements
    end

    def run_statement(statement, *args)
      statements[statement].execute!(*args)
    end

    private

    def set_pragmas
      # set a custom busy handler to override the `busy_timeout`
      # this ensures we either switch to another execution context or try again to connect
      # https://www.sqlite.org/pragma.html#pragma_busy_timeout
      busy_handler { @scheduler.switch || sleep(rand * 0.001) }
      # Journal mode WAL allows for greater concurrency (many readers + one writer)
      # https://www.sqlite.org/pragma.html#pragma_journal_mode
      self.journal_mode = :WAL
      # level of database durability
      # 2 = "FULL" (sync on every write)
      # 1 = "NORMAL" (sync every 1000 written pages)
      # 0 = "OFF" (don't sync)
      # https://www.sqlite.org/pragma.html#pragma_synchronous
      self.synchronous = @config[:synchronous]
      # set the global memory map so all processes can share data
      # https://www.sqlite.org/pragma.html#pragma_mmap_size
      # https://www.sqlite.org/mmap.html
      self.mmap_size = @config[:mmap_size]
      # impose a limit on the WAL file to prevent unlimited growth (with a negative impact on read performance as well)
      # https://www.sqlite.org/pragma.html#pragma_journal_size_limit
      self.journal_size_limit = @config[:journal_size_limit]
    end

    def run_migrations
      return if @config[:migrations].nil?

      migrations = if @config[:migrations].is_a?(Hash)
        @config[:migrations].values
      elsif @config[:migrations].is_a?(Array)
        @config[:migrations]
      else
        raise Error.new("`migrations` option must be either a Hash or an Array")
      end

      transaction(:immediate) do
        migrations.each do |sql|
          execute(clean_sql(sql))
        end
      end
    end

    def prepare_statements
      instance_variable_set(:@statements, {})
      self.class.attr_reader(:statements) unless respond_to?(:statements)

      return if @config[:statements].nil?

      @config[:statements].each do |key, sql|
        statements[key.to_sym] = prepare(clean_sql(sql))
      end
    end

    def clean_sql(sql)
      sql.gsub(/[[:space:]]+/, " ").strip
    end
  end
end