class GraphQL::Dataloader::Source
def self.batch_key_for(*batch_args, **batch_kwargs)
-
(Object)
-
Parameters:
-
batch_kwargs
(Hash
) -- -
batch_args
(Array
) --
def self.batch_key_for(*batch_args, **batch_kwargs) [*batch_args, **batch_kwargs] end
def fetch(keys)
-
(Array
- A loaded value for each of `keys`. The array must match one-for-one to the list of `keys`.
Parameters:
-
keys
(Array
) -- keys passed to {#load}, {#load_all}, {#request}, or {#request_all}
def fetch(keys) # somehow retrieve these from the backend raise "Implement `#{self.class}#fetch(#{keys.inspect}) to return a record for each of the keys" end
def load(key)
-
(Object)
- The result from {#fetch} for `key`. If `key` hasn't been loaded yet, the Fiber will yield until it's loaded.
Parameters:
-
key
(Object
) -- A loading key which will be passed to {#fetch} if it isn't already in the internal cache.
def load(key) if @results.key?(key) result_for(key) else @pending_keys << key sync result_for(key) end end
def load_all(keys)
-
(Object)
- The result from {#fetch} for `keys`. If `keys` haven't been loaded yet, the Fiber will yield until they're loaded.
Parameters:
-
keys
(Array
) -- Loading keys which will be passed to `#fetch` (or read from the internal cache).
def load_all(keys) if keys.any? { |k| !@results.key?(k) } pending_keys = keys.select { |k| !@results.key?(k) } @pending_keys.concat(pending_keys) sync end keys.map { |k| result_for(k) } end
def pending?
-
(Boolean)
- True if this source has any pending requests for data.
def pending? !@pending_keys.empty? end
def request(key)
-
(Dataloader::Request)
- a pending request for a value from `key`. Call `.load` on that object to wait for the result.
def request(key) if !@results.key?(key) @pending_keys << key end Dataloader::Request.new(self, key) end
def request_all(keys)
-
(Dataloader::Request)
- a pending request for a values from `keys`. Call `.load` on that object to wait for the results.
def request_all(keys) pending_keys = keys.select { |k| !@results.key?(k) } @pending_keys.concat(pending_keys) Dataloader::RequestAll.new(self, keys) end
def result_for(key)
- Api: - private
Returns:
-
(Object)
- The result from {#fetch} for `key`.
Parameters:
-
key
(Object
) -- key passed to {#load} or {#load_all}
def result_for(key) if !@results.key?(key) raise GraphQL::InvariantError, <<-ERR ng result for a key on #{self.class} that hasn't been loaded yet (#{key.inspect}, loaded: #{@results.keys}) ey should have been loaded already. This is a bug in GraphQL::Dataloader, please report it on GitHub: https://github.com/rmosolgo/graphql-ruby/issues/new. end result = @results[key] raise result if result.class <= StandardError result end
def run_pending_keys
-
(void)
-
Other tags:
- Api: - private
def run_pending_keys if !@fetching_keys.empty? @pending_keys -= @fetching_keys end return if @pending_keys.empty? fetch_keys = @pending_keys.uniq @fetching_keys.concat(fetch_keys) @pending_keys = [] results = fetch(fetch_keys) fetch_keys.each_with_index do |key, idx| @results[key] = results[idx] end nil rescue StandardError => error fetch_keys.each { |key| @results[key] = error } ensure if fetch_keys @fetching_keys -= fetch_keys end end
def setup(dataloader)
- Api: - private
def setup(dataloader) # These keys have been requested but haven't been fetched yet @pending_keys = [] # These keys have been passed to `fetch` but haven't been finished yet @fetching_keys = [] # { key => result } @results = {} @dataloader = dataloader end
def sync
-
(void)
-
def sync pending_keys = @pending_keys.dup @dataloader.yield iterations = 0 while pending_keys.any? { |k| !@results.key?(k) } iterations += 1 if iterations > 1000 raise "#{self.class}#sync tried 1000 times to load pending keys (#{pending_keys}), but they still weren't loaded. There is likely a circular dependency." end @dataloader.yield end nil end