class GraphQL::Dataloader::Source
def self.batch_key_for(*batch_args, **batch_kwargs)
-
(Object)
-
Parameters:
-
batch_kwargs
(Hash
) -- -
batch_args
(Array
) --
def self.batch_key_for(*batch_args, **batch_kwargs) [*batch_args, **batch_kwargs] end
def clear_cache
-
(void)
-
def clear_cache @results.clear nil end
def fetch(keys)
-
(Array
- A loaded value for each of `keys`. The array must match one-for-one to the list of `keys`.
Parameters:
-
keys
(Array
) -- keys passed to {#load}, {#load_all}, {#request}, or {#request_all}
def fetch(keys) # somehow retrieve these from the backend raise "Implement `#{self.class}#fetch(#{keys.inspect}) to return a record for each of the keys" end
def load(value)
-
(Object)
- The result from {#fetch} for `key`. If `key` hasn't been loaded yet, the Fiber will yield until it's loaded.
Parameters:
-
value
(Object
) -- A loading value which will be passed to {#fetch} if it isn't already in the internal cache.
def load(value) result_key = result_key_for(value) if @results.key?(result_key) result_for(result_key) else @pending[result_key] ||= value sync([result_key]) result_for(result_key) end end
def load_all(values)
-
(Object)
- The result from {#fetch} for `keys`. If `keys` haven't been loaded yet, the Fiber will yield until they're loaded.
Parameters:
-
values
(Array
) -- Loading keys which will be passed to `#fetch` (or read from the internal cache).
def load_all(values) result_keys = [] pending_keys = [] values.each { |v| k = result_key_for(v) result_keys << k if !@results.key?(k) @pending[k] ||= v pending_keys << k end } if !pending_keys.empty? sync(pending_keys) end result_keys.map { |k| result_for(k) } end
def merge(new_results)
-
(void)
-
Parameters:
-
new_results
(Hash
) -- key-value pairs to cache in this source
def merge(new_results) new_results.each do |new_k, new_v| key = result_key_for(new_k) @results[key] = new_v end nil end
def pending?
-
(Boolean)
- True if this source has any pending requests for data.
def pending? !@pending.empty? end
def request(value)
-
(Dataloader::Request)
- a pending request for a value from `key`. Call `.load` on that object to wait for the result.
def request(value) res_key = result_key_for(value) if !@results.key?(res_key) @pending[res_key] ||= value end Dataloader::Request.new(self, value) end
def request_all(values)
-
(Dataloader::Request)
- a pending request for a values from `keys`. Call `.load` on that object to wait for the results.
def request_all(values) values.each do |v| res_key = result_key_for(v) if !@results.key?(res_key) @pending[res_key] ||= v end end Dataloader::RequestAll.new(self, values) end
def result_for(key)
- Api: - private
Returns:
-
(Object)
- The result from {#fetch} for `key`.
Parameters:
-
key
(Object
) -- key passed to {#load} or {#load_all}
def result_for(key) if !@results.key?(key) raise GraphQL::InvariantError, <<-ERR ng result for a key on #{self.class} that hasn't been loaded yet (#{key.inspect}, loaded: #{@results.keys}) ey should have been loaded already. This is a bug in GraphQL::Dataloader, please report it on GitHub: https://github.com/rmosolgo/graphql-ruby/issues/new. end result = @results[key] if result.is_a?(StandardError) # Dup it because the rescuer may modify it. # (This happens for GraphQL::ExecutionErrors, at least) raise result.dup end result end
def result_key_for(value)
-
(Object)
- The key for tracking this pending data
Parameters:
-
value
(Object
) -- A value passed to `.request` or `.load`, for which a value will be loaded
def result_key_for(value) value end
def run_pending_keys
-
(void)
-
Other tags:
- Api: - private
def run_pending_keys if !@fetching.empty? @fetching.each_key { |k| @pending.delete(k) } end return if @pending.empty? fetch_h = @pending @pending = {} @fetching.merge!(fetch_h) results = fetch(fetch_h.values) fetch_h.each_with_index do |(key, _value), idx| @results[key] = results[idx] end nil rescue StandardError => error fetch_h.each_key { |key| @results[key] = error } ensure fetch_h && fetch_h.each_key { |k| @fetching.delete(k) } end
def setup(dataloader)
- Api: - private
def setup(dataloader) # These keys have been requested but haven't been fetched yet @pending = {} # These keys have been passed to `fetch` but haven't been finished yet @fetching = {} # { key => result } @results = {} @dataloader = dataloader end
def sync(pending_result_keys)
-
(void)
-
def sync(pending_result_keys) @dataloader.yield(self) iterations = 0 while pending_result_keys.any? { |key| !@results.key?(key) } iterations += 1 if iterations > MAX_ITERATIONS raise "#{self.class}#sync tried #{MAX_ITERATIONS} times to load pending keys (#{pending_result_keys}), but they still weren't loaded. There is likely a circular dependency#{@dataloader.fiber_limit ? " or `fiber_limit: #{@dataloader.fiber_limit}` is set too low" : ""}." end @dataloader.yield(self) end nil end