diff --git a/lib/graphql/dataloader.rb b/lib/graphql/dataloader.rb index 92ddac4f6c..ad88bffb97 100644 --- a/lib/graphql/dataloader.rb +++ b/lib/graphql/dataloader.rb @@ -1,5 +1,6 @@ # frozen_string_literal: true +require "graphql/dataloader/flat_dataloader" require "graphql/dataloader/null_dataloader" require "graphql/dataloader/request" require "graphql/dataloader/request_all" @@ -64,8 +65,14 @@ def initialize(nonblocking: self.class.default_nonblocking, fiber_limit: self.cl @nonblocking = nonblocking end @fiber_limit = fiber_limit + @steps_to_rerun_after_lazy = [] + @lazies_at_depth = nil end + attr_accessor :lazies_at_depth + + attr_reader :steps_to_rerun_after_lazy + # @return [Integer, nil] attr_reader :fiber_limit @@ -140,10 +147,10 @@ def yield(source = Fiber[:__graphql_current_dataloader_source]) end # @api private Nothing to see here - def append_job(&job) + def append_job(callable = nil, &job) # Given a block, queue it up to be worked through when `#run` is called. # (If the dataloader is already running, than a Fiber will pick this up later.) - @pending_jobs.push(job) + @pending_jobs.push(callable || job) nil end @@ -189,6 +196,8 @@ def run_isolated end def run + # TODO unify the initialization lazies_at_depth + @lazies_at_depth ||= Hash.new { |h, k| h[k] = [] } trace = Fiber[:__graphql_current_multiplex]&.current_trace jobs_fiber_limit, total_fiber_limit = calculate_fiber_limit job_fibers = [] @@ -222,6 +231,31 @@ def run end join_queues(source_fibers, next_source_fibers) end + + if @lazies_at_depth.any? + smallest_depth = nil + @lazies_at_depth.each_key do |depth_key| + smallest_depth ||= depth_key + if depth_key < smallest_depth + smallest_depth = depth_key + end + end + + if smallest_depth + lazies = @lazies_at_depth.delete(smallest_depth) + if !lazies.empty? + append_job { + lazies.each(&:value) # resolve these Lazy instances + } + job_fibers << spawn_job_fiber(trace) + end + end + elsif @steps_to_rerun_after_lazy.any? + @pending_jobs.concat(@steps_to_rerun_after_lazy) + f = spawn_job_fiber(trace) + job_fibers << f + @steps_to_rerun_after_lazy.clear + end end trace&.end_dataloader(self) @@ -331,4 +365,4 @@ def spawn_source_fiber(trace) end end -require "graphql/dataloader/async_dataloader" +require "graphql/dataloader/async_dataloader" \ No newline at end of file diff --git a/lib/graphql/dataloader/flat_dataloader.rb b/lib/graphql/dataloader/flat_dataloader.rb new file mode 100644 index 0000000000..310785c1ad --- /dev/null +++ b/lib/graphql/dataloader/flat_dataloader.rb @@ -0,0 +1,75 @@ +# frozen_string_literal: true +module GraphQL + class Dataloader + class FlatDataloader < Dataloader + def initialize(*) + # TODO unify the initialization lazies_at_depth + @lazies_at_depth ||= Hash.new { |h, k| h[k] = [] } + @steps_to_rerun_after_lazy = [] + @queue = [] + end + + def run + while @queue.any? + while (step = @queue.shift) + step.call + end + + while @lazies_at_depth&.any? + smallest_depth = nil + @lazies_at_depth.each_key do |depth_key| + smallest_depth ||= depth_key + if depth_key < smallest_depth + smallest_depth = depth_key + end + end + + if smallest_depth + lazies = @lazies_at_depth.delete(smallest_depth) + lazies.each(&:value) # resolve these Lazy instances + end + end + + if @steps_to_rerun_after_lazy.any? + @steps_to_rerun_after_lazy.each(&:call) + @steps_to_rerun_after_lazy.clear + end + end + end + + def run_isolated + prev_queue = @queue + prev_stral = @steps_to_rerun_after_lazy + prev_lad = @lazies_at_depth + @steps_to_rerun_after_lazy = [] + @queue = [] + @lazies_at_depth = @lazies_at_depth.dup&.clear + res = nil + append_job { + res = yield + } + run + res + ensure + @queue = prev_queue + @steps_to_rerun_after_lazy = prev_stral + @lazies_at_depth = prev_lad + end + + def clear_cache; end + + def yield(_source) + raise GraphQL::Error, "GraphQL::Dataloader is not running -- add `use GraphQL::Dataloader` to your schema to use Dataloader sources." + end + + def append_job(callable = nil, &block) + @queue << (callable || block) + nil + end + + def with(*) + raise GraphQL::Error, "GraphQL::Dataloader is not running -- add `use GraphQL::Dataloader` to your schema to use Dataloader sources." + end + end + end +end \ No newline at end of file diff --git a/lib/graphql/dataloader/null_dataloader.rb b/lib/graphql/dataloader/null_dataloader.rb index 7fd222d7fe..3e431c1086 100644 --- a/lib/graphql/dataloader/null_dataloader.rb +++ b/lib/graphql/dataloader/null_dataloader.rb @@ -18,8 +18,8 @@ def yield(_source) raise GraphQL::Error, "GraphQL::Dataloader is not running -- add `use GraphQL::Dataloader` to your schema to use Dataloader sources." end - def append_job - yield + def append_job(callable = nil) + callable ? callable.call : yield nil end diff --git a/lib/graphql/execution/interpreter.rb b/lib/graphql/execution/interpreter.rb index ba0b94b2aa..c2015f1aac 100644 --- a/lib/graphql/execution/interpreter.rb +++ b/lib/graphql/execution/interpreter.rb @@ -5,7 +5,6 @@ require "graphql/execution/interpreter/arguments_cache" require "graphql/execution/interpreter/execution_errors" require "graphql/execution/interpreter/runtime" -require "graphql/execution/interpreter/resolve" require "graphql/execution/interpreter/handles_raw_value" module GraphQL @@ -43,6 +42,7 @@ def run_all(schema, query_options, context: {}, max_complexity: schema.max_compl schema = multiplex.schema queries = multiplex.queries lazies_at_depth = Hash.new { |h, k| h[k] = [] } + multiplex.dataloader.lazies_at_depth = lazies_at_depth multiplex_analyzers = schema.multiplex_analyzers if multiplex.max_complexity multiplex_analyzers += [GraphQL::Analysis::MaxQueryComplexity] @@ -90,15 +90,6 @@ def run_all(schema, query_options, context: {}, max_complexity: schema.max_compl multiplex.dataloader.run - # Then, work through lazy results in a breadth-first way - multiplex.dataloader.append_job { - query = multiplex.queries.length == 1 ? multiplex.queries[0] : nil - multiplex.current_trace.execute_query_lazy(multiplex: multiplex, query: query) do - Interpreter::Resolve.resolve_each_depth(lazies_at_depth, multiplex.dataloader) - end - } - multiplex.dataloader.run - # Then, find all errors and assign the result to the query object results.each_with_index do |data_result, idx| query = queries[idx] diff --git a/lib/graphql/execution/interpreter/resolve.rb b/lib/graphql/execution/interpreter/resolve.rb deleted file mode 100644 index 570ab48e9d..0000000000 --- a/lib/graphql/execution/interpreter/resolve.rb +++ /dev/null @@ -1,100 +0,0 @@ -# frozen_string_literal: true - -module GraphQL - module Execution - class Interpreter - module Resolve - # Continue field results in `results` until there's nothing else to continue. - # @return [void] - def self.resolve_all(results, dataloader) - dataloader.append_job { resolve(results, dataloader) } - nil - end - - def self.resolve_each_depth(lazies_at_depth, dataloader) - smallest_depth = nil - lazies_at_depth.each_key do |depth_key| - smallest_depth ||= depth_key - if depth_key < smallest_depth - smallest_depth = depth_key - end - end - - if smallest_depth - lazies = lazies_at_depth.delete(smallest_depth) - if !lazies.empty? - lazies.each do |l| - dataloader.append_job { l.value } - end - # Run lazies _and_ dataloader, see if more are enqueued - dataloader.run - resolve_each_depth(lazies_at_depth, dataloader) - end - end - nil - end - - # After getting `results` back from an interpreter evaluation, - # continue it until you get a response-ready Ruby value. - # - # `results` is one level of _depth_ of a query or multiplex. - # - # Resolve all lazy values in that depth before moving on - # to the next level. - # - # It's assumed that the lazies will - # return {Lazy} instances if there's more work to be done, - # or return {Hash}/{Array} if the query should be continued. - # - # @return [void] - def self.resolve(results, dataloader) - # There might be pending jobs here that _will_ write lazies - # into the result hash. We should run them out, so we - # can be sure that all lazies will be present in the result hashes. - # A better implementation would somehow interleave (or unify) - # these approaches. - dataloader.run - next_results = [] - while !results.empty? - result_value = results.shift - if result_value.is_a?(Runtime::GraphQLResultHash) || result_value.is_a?(Hash) - results.concat(result_value.values) - next - elsif result_value.is_a?(Runtime::GraphQLResultArray) - results.concat(result_value.values) - next - elsif result_value.is_a?(Array) - results.concat(result_value) - next - elsif result_value.is_a?(Lazy) - loaded_value = result_value.value - if loaded_value.is_a?(Lazy) - # Since this field returned another lazy, - # add it to the same queue - results << loaded_value - elsif loaded_value.is_a?(Runtime::GraphQLResultHash) || loaded_value.is_a?(Runtime::GraphQLResultArray) || - loaded_value.is_a?(Hash) || loaded_value.is_a?(Array) - # Add these values in wholesale -- - # they might be modified by later work in the dataloader. - next_results << loaded_value - end - end - end - - if !next_results.empty? - # Any pending data loader jobs may populate the - # resutl arrays or result hashes accumulated in - # `next_results``. Run those **to completion** - # before continuing to resolve `next_results`. - # (Just `.append_job` doesn't work if any pending - # jobs require multiple passes.) - dataloader.run - dataloader.append_job { resolve(next_results, dataloader) } - end - - nil - end - end - end - end -end diff --git a/lib/graphql/execution/interpreter/runtime.rb b/lib/graphql/execution/interpreter/runtime.rb index a2d0f389f5..900979fe54 100644 --- a/lib/graphql/execution/interpreter/runtime.rb +++ b/lib/graphql/execution/interpreter/runtime.rb @@ -488,7 +488,8 @@ def evaluate_selection_with_resolved_keyword_args(kwarg_arguments, resolved_argu # all of its child fields before moving on to the next root mutation field. # (Subselections of this mutation will still be resolved level-by-level.) if selection_result.graphql_is_eager - Interpreter::Resolve.resolve_all([field_result], @dataloader) + # TODO what to do with this + # Interpreter::Resolve.resolve_all([field_result], @dataloader) end end diff --git a/lib/graphql/schema.rb b/lib/graphql/schema.rb index be3d20b729..1f0094f828 100644 --- a/lib/graphql/schema.rb +++ b/lib/graphql/schema.rb @@ -671,7 +671,7 @@ def union_memberships(type = nil) # @api private # @see GraphQL::Dataloader def dataloader_class - @dataloader_class || GraphQL::Dataloader::NullDataloader + @dataloader_class || GraphQL::Dataloader::FlatDataloader end attr_writer :dataloader_class