Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 37 additions & 3 deletions lib/graphql/dataloader.rb
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# frozen_string_literal: true

require "graphql/dataloader/flat_dataloader"
require "graphql/dataloader/null_dataloader"
require "graphql/dataloader/request"
require "graphql/dataloader/request_all"
Expand Down Expand Up @@ -64,8 +65,14 @@ def initialize(nonblocking: self.class.default_nonblocking, fiber_limit: self.cl
@nonblocking = nonblocking
end
@fiber_limit = fiber_limit
@steps_to_rerun_after_lazy = []
@lazies_at_depth = nil
end

attr_accessor :lazies_at_depth

attr_reader :steps_to_rerun_after_lazy

# @return [Integer, nil]
attr_reader :fiber_limit

Expand Down Expand Up @@ -140,10 +147,10 @@ def yield(source = Fiber[:__graphql_current_dataloader_source])
end

# @api private Nothing to see here
def append_job(&job)
def append_job(callable = nil, &job)
# Given a block, queue it up to be worked through when `#run` is called.
# (If the dataloader is already running, than a Fiber will pick this up later.)
@pending_jobs.push(job)
@pending_jobs.push(callable || job)
nil
end

Expand Down Expand Up @@ -189,6 +196,8 @@ def run_isolated
end

def run
# TODO unify the initialization lazies_at_depth
@lazies_at_depth ||= Hash.new { |h, k| h[k] = [] }
trace = Fiber[:__graphql_current_multiplex]&.current_trace
jobs_fiber_limit, total_fiber_limit = calculate_fiber_limit
job_fibers = []
Expand Down Expand Up @@ -222,6 +231,31 @@ def run
end
join_queues(source_fibers, next_source_fibers)
end

if @lazies_at_depth.any?
smallest_depth = nil
@lazies_at_depth.each_key do |depth_key|
smallest_depth ||= depth_key
if depth_key < smallest_depth
smallest_depth = depth_key
end
end

if smallest_depth
lazies = @lazies_at_depth.delete(smallest_depth)
if !lazies.empty?
append_job {
lazies.each(&:value) # resolve these Lazy instances
}
job_fibers << spawn_job_fiber(trace)
end
end
elsif @steps_to_rerun_after_lazy.any?
@pending_jobs.concat(@steps_to_rerun_after_lazy)
f = spawn_job_fiber(trace)
job_fibers << f
@steps_to_rerun_after_lazy.clear
end
end

trace&.end_dataloader(self)
Expand Down Expand Up @@ -331,4 +365,4 @@ def spawn_source_fiber(trace)
end
end

require "graphql/dataloader/async_dataloader"
require "graphql/dataloader/async_dataloader"
75 changes: 75 additions & 0 deletions lib/graphql/dataloader/flat_dataloader.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
# frozen_string_literal: true
module GraphQL
class Dataloader
class FlatDataloader < Dataloader
def initialize(*)
# TODO unify the initialization lazies_at_depth
@lazies_at_depth ||= Hash.new { |h, k| h[k] = [] }
@steps_to_rerun_after_lazy = []
@queue = []
end

def run
while @queue.any?
while (step = @queue.shift)
step.call
end

while @lazies_at_depth&.any?
smallest_depth = nil
@lazies_at_depth.each_key do |depth_key|
smallest_depth ||= depth_key
if depth_key < smallest_depth
smallest_depth = depth_key
end
end

if smallest_depth
lazies = @lazies_at_depth.delete(smallest_depth)
lazies.each(&:value) # resolve these Lazy instances
end
end

if @steps_to_rerun_after_lazy.any?
@steps_to_rerun_after_lazy.each(&:call)
@steps_to_rerun_after_lazy.clear
end
end
end

def run_isolated
prev_queue = @queue
prev_stral = @steps_to_rerun_after_lazy
prev_lad = @lazies_at_depth
@steps_to_rerun_after_lazy = []
@queue = []
@lazies_at_depth = @lazies_at_depth.dup&.clear
res = nil
append_job {
res = yield
}
run
res
ensure
@queue = prev_queue
@steps_to_rerun_after_lazy = prev_stral
@lazies_at_depth = prev_lad
end

def clear_cache; end

def yield(_source)
raise GraphQL::Error, "GraphQL::Dataloader is not running -- add `use GraphQL::Dataloader` to your schema to use Dataloader sources."
end

def append_job(callable = nil, &block)
@queue << (callable || block)
nil
end

def with(*)
raise GraphQL::Error, "GraphQL::Dataloader is not running -- add `use GraphQL::Dataloader` to your schema to use Dataloader sources."
end
end
end
end
4 changes: 2 additions & 2 deletions lib/graphql/dataloader/null_dataloader.rb
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@ def yield(_source)
raise GraphQL::Error, "GraphQL::Dataloader is not running -- add `use GraphQL::Dataloader` to your schema to use Dataloader sources."
end

def append_job
yield
def append_job(callable = nil)
callable ? callable.call : yield
nil
end

Expand Down
11 changes: 1 addition & 10 deletions lib/graphql/execution/interpreter.rb
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
require "graphql/execution/interpreter/arguments_cache"
require "graphql/execution/interpreter/execution_errors"
require "graphql/execution/interpreter/runtime"
require "graphql/execution/interpreter/resolve"
require "graphql/execution/interpreter/handles_raw_value"

module GraphQL
Expand Down Expand Up @@ -43,6 +42,7 @@ def run_all(schema, query_options, context: {}, max_complexity: schema.max_compl
schema = multiplex.schema
queries = multiplex.queries
lazies_at_depth = Hash.new { |h, k| h[k] = [] }
multiplex.dataloader.lazies_at_depth = lazies_at_depth
multiplex_analyzers = schema.multiplex_analyzers
if multiplex.max_complexity
multiplex_analyzers += [GraphQL::Analysis::MaxQueryComplexity]
Expand Down Expand Up @@ -90,15 +90,6 @@ def run_all(schema, query_options, context: {}, max_complexity: schema.max_compl

multiplex.dataloader.run

# Then, work through lazy results in a breadth-first way
multiplex.dataloader.append_job {
query = multiplex.queries.length == 1 ? multiplex.queries[0] : nil
multiplex.current_trace.execute_query_lazy(multiplex: multiplex, query: query) do
Interpreter::Resolve.resolve_each_depth(lazies_at_depth, multiplex.dataloader)
end
}
multiplex.dataloader.run

# Then, find all errors and assign the result to the query object
results.each_with_index do |data_result, idx|
query = queries[idx]
Expand Down
100 changes: 0 additions & 100 deletions lib/graphql/execution/interpreter/resolve.rb

This file was deleted.

3 changes: 2 additions & 1 deletion lib/graphql/execution/interpreter/runtime.rb
Original file line number Diff line number Diff line change
Expand Up @@ -488,7 +488,8 @@ def evaluate_selection_with_resolved_keyword_args(kwarg_arguments, resolved_argu
# all of its child fields before moving on to the next root mutation field.
# (Subselections of this mutation will still be resolved level-by-level.)
if selection_result.graphql_is_eager
Interpreter::Resolve.resolve_all([field_result], @dataloader)
# TODO what to do with this
# Interpreter::Resolve.resolve_all([field_result], @dataloader)
end
end

Expand Down
2 changes: 1 addition & 1 deletion lib/graphql/schema.rb
Original file line number Diff line number Diff line change
Expand Up @@ -671,7 +671,7 @@ def union_memberships(type = nil)
# @api private
# @see GraphQL::Dataloader
def dataloader_class
@dataloader_class || GraphQL::Dataloader::NullDataloader
@dataloader_class || GraphQL::Dataloader::FlatDataloader
end

attr_writer :dataloader_class
Expand Down
Loading