From 0caf2eb0c1dde92d691be5a78029fb0783f63023 Mon Sep 17 00:00:00 2001 From: Greg MacWilliam Date: Fri, 30 May 2025 23:31:05 -0400 Subject: [PATCH] claude cleanups. --- lib/graphql/stitching/client.rb | 4 +- lib/graphql/stitching/composer.rb | 2 +- .../composer/type_resolver_config.rb | 2 +- lib/graphql/stitching/executor.rb | 2 +- lib/graphql/stitching/executor/root_source.rb | 26 ++++--- .../executor/type_resolver_source.rb | 74 +++++++++++-------- lib/graphql/stitching/http_executable.rb | 3 +- lib/graphql/stitching/plan.rb | 59 +++++++++++---- lib/graphql/stitching/planner.rb | 53 ++++++------- lib/graphql/stitching/request.rb | 2 +- lib/graphql/stitching/request/skip_include.rb | 4 +- lib/graphql/stitching/supergraph.rb | 22 ++++-- lib/graphql/stitching/util.rb | 25 ++++++- 13 files changed, 179 insertions(+), 99 deletions(-) diff --git a/lib/graphql/stitching/client.rb b/lib/graphql/stitching/client.rb index 93ae1fcf..f57d702d 100644 --- a/lib/graphql/stitching/client.rb +++ b/lib/graphql/stitching/client.rb @@ -25,7 +25,7 @@ def initialize(locations: nil, supergraph: nil, composer_options: {}) raise ArgumentError, "Cannot provide both locations and a supergraph." elsif supergraph && !supergraph.is_a?(Supergraph) raise ArgumentError, "Provided supergraph must be a GraphQL::Stitching::Supergraph instance." - elsif supergraph && composer_options.any? + elsif supergraph && !composer_options.empty? raise ArgumentError, "Cannot provide composer options with a pre-built supergraph." elsif supergraph supergraph @@ -50,7 +50,7 @@ def execute(raw_query = nil, query: nil, variables: nil, operation_name: nil, co if validate validation_errors = request.validate - return error_result(request, validation_errors) if validation_errors.any? + return error_result(request, validation_errors) unless validation_errors.empty? end load_plan(request) diff --git a/lib/graphql/stitching/composer.rb b/lib/graphql/stitching/composer.rb index 34ad03c5..00fc2989 100644 --- a/lib/graphql/stitching/composer.rb +++ b/lib/graphql/stitching/composer.rb @@ -423,7 +423,7 @@ def build_merged_arguments(type_name, members_by_location, owner, field_name: ni memo[location] = argument.default_value end - if default_values_by_location.any? + unless default_values_by_location.empty? kwargs[:default_value] = @default_value_merger.call(default_values_by_location, { type_name: type_name, field_name: field_name, diff --git a/lib/graphql/stitching/composer/type_resolver_config.rb b/lib/graphql/stitching/composer/type_resolver_config.rb index 67b0cf21..a36062ea 100644 --- a/lib/graphql/stitching/composer/type_resolver_config.rb +++ b/lib/graphql/stitching/composer/type_resolver_config.rb @@ -8,7 +8,7 @@ class TypeResolverConfig class << self def extract_directive_assignments(schema, location, assignments) - return EMPTY_OBJECT unless assignments && assignments.any? + return EMPTY_OBJECT unless assignments && !assignments.empty? assignments.each_with_object({}) do |kwargs, memo| type = kwargs[:parent_type_name] ? schema.get_type(kwargs[:parent_type_name]) : schema.query diff --git a/lib/graphql/stitching/executor.rb b/lib/graphql/stitching/executor.rb index 02c2ee56..17f3131a 100644 --- a/lib/graphql/stitching/executor.rb +++ b/lib/graphql/stitching/executor.rb @@ -79,7 +79,7 @@ def exec!(next_steps) def exec_task(task) next_steps = task.load.tap(&:compact!) - exec!(next_steps) if next_steps.any? + exec!(next_steps) unless next_steps.empty? end end end diff --git a/lib/graphql/stitching/executor/root_source.rb b/lib/graphql/stitching/executor/root_source.rb index d39672d4..0f35ca3e 100644 --- a/lib/graphql/stitching/executor/root_source.rb +++ b/lib/graphql/stitching/executor/root_source.rb @@ -21,7 +21,7 @@ def fetch(ops) @executor.query_count += 1 if result["data"] - if op.path.any? + unless op.path.empty? # Nested root scopes must expand their pathed origin set origin_set = op.path.reduce([@executor.data]) do |set, ns| set.flat_map { |obj| obj && obj[ns] }.tap(&:compact!) @@ -44,24 +44,28 @@ def fetch(ops) # Builds root source documents # "query MyOperation_1($var:VarType) { rootSelections ... }" def build_document(op, operation_name = nil, operation_directives = nil) - doc = String.new - doc << op.operation_type + doc_buffer = String.new + doc_buffer << op.operation_type if operation_name - doc << " #{operation_name}_#{op.step}" + doc_buffer << " " << operation_name << "_" << op.step.to_s end - if op.variables.any? - variable_defs = op.variables.map { |k, v| "$#{k}:#{v}" }.join(",") - doc << "(#{variable_defs})" + unless op.variables.empty? + doc_buffer << "(" + op.variables.each_with_index do |(k, v), i| + doc_buffer << "," unless i.zero? + doc_buffer << "$" << k << ":" << v + end + doc_buffer << ")" end if operation_directives - doc << " #{operation_directives} " + doc_buffer << " " << operation_directives << " " end - doc << op.selections - doc + doc_buffer << op.selections + doc_buffer end # Format response errors without a document location (because it won't match the request doc), @@ -69,7 +73,7 @@ def build_document(op, operation_name = nil, operation_directives = nil) def format_errors!(errors, path) errors.each do |err| err.delete("locations") - err["path"].unshift(*path) if err["path"] && path.any? + err["path"].unshift(*path) if err["path"] && !path.empty? end errors end diff --git a/lib/graphql/stitching/executor/type_resolver_source.rb b/lib/graphql/stitching/executor/type_resolver_source.rb index 2699718b..dc1498d7 100644 --- a/lib/graphql/stitching/executor/type_resolver_source.rb +++ b/lib/graphql/stitching/executor/type_resolver_source.rb @@ -20,10 +20,10 @@ def fetch(ops) origin_set.select! { _1[TypeResolver::TYPENAME_EXPORT_NODE.alias] == op.if_type } end - memo[op] = origin_set if origin_set.any? + memo[op] = origin_set unless origin_set.empty? end - if origin_sets_by_operation.any? + unless origin_sets_by_operation.empty? query_document, variable_names = build_document( origin_sets_by_operation, @executor.request.operation_name, @@ -51,61 +51,76 @@ def fetch(ops) # }" def build_document(origin_sets_by_operation, operation_name = nil, operation_directives = nil) variable_defs = {} - query_fields = origin_sets_by_operation.map.with_index do |(op, origin_set), batch_index| + fields_buffer = String.new + + origin_sets_by_operation.each_with_index do |(op, origin_set), batch_index| variable_defs.merge!(op.variables) resolver = @executor.request.supergraph.resolvers_by_version[op.resolver] + fields_buffer << " " unless batch_index.zero? if resolver.list? - arguments = resolver.arguments.map.with_index do |arg, i| + fields_buffer << "_" << batch_index.to_s << "_result: " << resolver.field << "(" + + resolver.arguments.each_with_index do |arg, i| + fields_buffer << "," unless i.zero? if arg.key? variable_name = "_#{batch_index}_key_#{i}".freeze @variables[variable_name] = origin_set.map { arg.build(_1) } variable_defs[variable_name] = arg.to_type_signature - "#{arg.name}:$#{variable_name}" + fields_buffer << arg.name << ":$" << variable_name else - "#{arg.name}:#{arg.value.print}" + fields_buffer << arg.name << ":" << arg.value.print end end - "_#{batch_index}_result: #{resolver.field}(#{arguments.join(",")}) #{op.selections}" + fields_buffer << ") " << op.selections else - origin_set.map.with_index do |origin_obj, index| - arguments = resolver.arguments.map.with_index do |arg, i| + origin_set.each_with_index do |origin_obj, index| + fields_buffer << " " unless index.zero? + fields_buffer << "_" << batch_index.to_s << "_" << index.to_s << "_result: " << resolver.field << "(" + + resolver.arguments.each_with_index do |arg, i| + fields_buffer << "," unless i.zero? if arg.key? variable_name = "_#{batch_index}_#{index}_key_#{i}".freeze @variables[variable_name] = arg.build(origin_obj) variable_defs[variable_name] = arg.to_type_signature - "#{arg.name}:$#{variable_name}" + fields_buffer << arg.name << ":$" << variable_name else - "#{arg.name}:#{arg.value.print}" + fields_buffer << arg.name << ":" << arg.value.print end end - "_#{batch_index}_#{index}_result: #{resolver.field}(#{arguments.join(",")}) #{op.selections}" + fields_buffer << ") " << op.selections end end end - doc = String.new(QUERY_OP) # << resolver fulfillment always uses query + doc_buffer = String.new(QUERY_OP) # << resolver fulfillment always uses query if operation_name - doc << " #{operation_name}" + doc_buffer << " " << operation_name origin_sets_by_operation.each_key do |op| - doc << "_#{op.step}" + doc_buffer << "_" << op.step.to_s end end - if variable_defs.any? - doc << "(#{variable_defs.map { |k, v| "$#{k}:#{v}" }.join(",")})" + unless variable_defs.empty? + doc_buffer << "(" + variable_defs.each_with_index do |(k, v), i| + doc_buffer << "," unless i.zero? + doc_buffer << "$" << k << ":" << v + end + doc_buffer << ")" end if operation_directives - doc << " #{operation_directives} " + doc_buffer << " " << operation_directives << " " end - doc << "{ #{query_fields.join(" ")} }" + doc_buffer << "{ " << fields_buffer << " }" - return doc, variable_defs.keys.tap do |names| + return doc_buffer, variable_defs.keys.tap do |names| names.reject! { @variables.key?(_1) } end end @@ -120,10 +135,11 @@ def merge_results!(origin_sets_by_operation, raw_result) origin_set.map.with_index { |_, index| raw_result["_#{batch_index}_#{index}_result"] } end - next unless results&.any? + next if results.nil? || results.empty? origin_set.each_with_index do |origin_obj, index| - origin_obj.merge!(results[index]) if results[index] + result = results[index] + origin_obj.merge!(result) if result end end end @@ -132,7 +148,7 @@ def merge_results!(origin_sets_by_operation, raw_result) def extract_errors!(origin_sets_by_operation, errors) ops = origin_sets_by_operation.keys origin_sets = origin_sets_by_operation.values - pathed_errors_by_op_index_and_object_id = {} + pathed_errors_by_op_index_and_object_id = Hash.new { |h, k| h[k] = {} } errors_result = errors.each_with_object([]) do |err, memo| err.delete("locations") @@ -151,8 +167,8 @@ def extract_errors!(origin_sets_by_operation, errors) end if origin_obj - by_op_index = pathed_errors_by_op_index_and_object_id[result_alias[1].to_i] ||= {} - by_object_id = by_op_index[origin_obj.object_id] ||= [] + pathed_errors_by_op_index = pathed_errors_by_op_index_and_object_id[result_alias[1].to_i] + by_object_id = pathed_errors_by_op_index[origin_obj.object_id] ||= [] by_object_id << err next end @@ -162,9 +178,9 @@ def extract_errors!(origin_sets_by_operation, errors) memo << err end - if pathed_errors_by_op_index_and_object_id.any? + unless pathed_errors_by_op_index_and_object_id.empty? pathed_errors_by_op_index_and_object_id.each do |op_index, pathed_errors_by_object_id| - repath_errors!(pathed_errors_by_object_id, ops.dig(op_index, "path")) + repath_errors!(pathed_errors_by_object_id, ops[op_index].path) errors_result.push(*pathed_errors_by_object_id.each_value) end end @@ -180,7 +196,7 @@ def repath_errors!(pathed_errors_by_object_id, forward_path, current_path=[], ro current_path.push(forward_path.shift) scope = root[current_path.last] - if forward_path.any? && scope.is_a?(Array) + if !forward_path.empty? && scope.is_a?(Array) scope.each_with_index do |element, index| inner_elements = element.is_a?(Array) ? element.flatten : [element] inner_elements.each do |inner_element| @@ -190,7 +206,7 @@ def repath_errors!(pathed_errors_by_object_id, forward_path, current_path=[], ro end end - elsif forward_path.any? + elsif !forward_path.empty? repath_errors!(pathed_errors_by_object_id, forward_path, current_path, scope) elsif scope.is_a?(Array) diff --git a/lib/graphql/stitching/http_executable.rb b/lib/graphql/stitching/http_executable.rb index d2e66c1f..f6cc71af 100644 --- a/lib/graphql/stitching/http_executable.rb +++ b/lib/graphql/stitching/http_executable.rb @@ -80,7 +80,7 @@ def extract_multipart_form(request, document, variables) return if files_by_path.none? - map = {} + map = Hash.new { |h, k| h[k] = [] } files = files_by_path.values.tap(&:uniq!) variables_copy = variables.dup @@ -90,7 +90,6 @@ def extract_multipart_form(request, document, variables) path.each_with_index do |key, i| if i == path.length - 1 file_index = files.index(copy[key]).to_s - map[file_index] ||= [] map[file_index] << "variables.#{path.join(".")}" copy[key] = nil elsif orig[key].object_id == copy[key].object_id diff --git a/lib/graphql/stitching/plan.rb b/lib/graphql/stitching/plan.rb index ef48f3a1..9e07adf2 100644 --- a/lib/graphql/stitching/plan.rb +++ b/lib/graphql/stitching/plan.rb @@ -2,21 +2,42 @@ module GraphQL module Stitching - # Immutable-ish structures representing a query plan. + # Immutable structures representing a query plan. # May serialize to/from JSON. class Plan - Op = Struct.new( - :step, - :after, - :location, - :operation_type, - :selections, - :variables, - :path, - :if_type, - :resolver, - keyword_init: true - ) do + class Op + attr_reader :step + attr_reader :after + attr_reader :location + attr_reader :operation_type + attr_reader :selections + attr_reader :variables + attr_reader :path + attr_reader :if_type + attr_reader :resolver + + def initialize( + step:, + after:, + location:, + operation_type:, + selections:, + variables: nil, + path: nil, + if_type: nil, + resolver: nil + ) + @step = step + @after = after + @location = location + @operation_type = operation_type + @selections = selections + @variables = variables + @path = path + @if_type = if_type + @resolver = resolver + end + def as_json { step: step, @@ -30,6 +51,18 @@ def as_json resolver: resolver }.tap(&:compact!) end + + def ==(other) + step == other.step && + after == other.after && + location == other.location && + operation_type == other.operation_type && + selections == other.selections && + variables == other.variables && + path == other.path && + if_type == other.if_type && + resolver == other.resolver + end end class << self diff --git a/lib/graphql/stitching/planner.rb b/lib/graphql/stitching/planner.rb index 3a8fa9b2..2a4dc288 100644 --- a/lib/graphql/stitching/planner.rb +++ b/lib/graphql/stitching/planner.rb @@ -10,6 +10,15 @@ class Planner SUPERGRAPH_LOCATIONS = [Supergraph::SUPERGRAPH_LOCATION].freeze ROOT_INDEX = 0 + class ScopePartition + attr_reader :location, :selections + + def initialize(location:, selections:) + @location = location + @selections = selections + end + end + def initialize(request) @request = request @supergraph = request.supergraph @@ -76,11 +85,15 @@ def add_step( resolver: nil ) # coalesce repeat parameters into a single entrypoint - entrypoint = [parent_index, location, parent_type.graphql_name, resolver&.key&.to_definition, "#", *path].join("/") + entrypoint = String.new + entrypoint << parent_index.to_s << "/" << location << "/" << parent_type.graphql_name + entrypoint << "/" << (resolver&.key&.to_s || "") << "/#" + path.each { entrypoint << "/" << _1 } + step = @steps_by_entrypoint[entrypoint] next_index = step ? parent_index : @planning_index += 1 - if selections.any? + unless selections.empty? selections = extract_locale_selections(location, parent_type, next_index, selections, path, variables) end @@ -102,8 +115,6 @@ def add_step( end end - ScopePartition = Struct.new(:location, :selections, keyword_init: true) - # A) Group all root selections by their preferred entrypoint locations. def build_root_entrypoints parent_type = @request.query.root_type_for_operation(@request.operation.operation_type) @@ -111,10 +122,9 @@ def build_root_entrypoints case @request.operation.operation_type when QUERY_OP # A.1) Group query fields by location for parallel execution. - selections_by_location = {} + selections_by_location = Hash.new { |h, k| h[k] = [] } each_field_in_scope(parent_type, @request.operation.selections) do |node| locations = @supergraph.locations_by_type_and_field[parent_type.graphql_name][node.name] || SUPERGRAPH_LOCATIONS - selections_by_location[locations.first] ||= [] selections_by_location[locations.first] << node end @@ -229,7 +239,8 @@ def extract_locale_selections( # B.3) Collect all variable definitions used within the filtered selection. extract_node_variables(node, locale_variables) - field_type = @supergraph.memoized_schema_fields(parent_type.graphql_name)[node.name].type.unwrap + schema_fields = @supergraph.memoized_schema_fields(parent_type.graphql_name) + field_type = schema_fields[node.name].type.unwrap if Util.is_leaf_type?(field_type) locale_selections << node @@ -377,9 +388,11 @@ def delegate_remote_selections(parent_type, remote_selections) end # C.2) Distribute non-unique fields among locations that were added during C.1. - if selections_by_location.any? && remote_selections.any? + if !selections_by_location.empty? && !remote_selections.empty? + available_locations = Set.new(selections_by_location.each_key) + remote_selections.reject! do |node| - used_location = possible_locations_by_field[node.name].find { selections_by_location[_1] } + used_location = possible_locations_by_field[node.name].find { available_locations.include?(_1) } if used_location selections_by_location[used_location] << node true @@ -388,29 +401,17 @@ def delegate_remote_selections(parent_type, remote_selections) end # C.3) Distribute remaining fields among locations weighted by greatest availability. - if remote_selections.any? - field_count_by_location = remote_selections.each_with_object({}) do |node, memo| + if !remote_selections.empty? + field_count_by_location = Hash.new(0) + remote_selections.each do |node| possible_locations_by_field[node.name].each do |location| - memo[location] ||= 0 - memo[location] += 1 + field_count_by_location[location] += 1 end end remote_selections.each do |node| possible_locations = possible_locations_by_field[node.name] - preferred_location = possible_locations.first - - possible_locations.reduce(0) do |max_availability, possible_location| - availability = field_count_by_location.fetch(possible_location, 0) - - if availability > max_availability - preferred_location = possible_location - availability - else - max_availability - end - end - + preferred_location = possible_locations.max_by { field_count_by_location[_1] } || possible_locations.first selections_by_location[preferred_location] ||= [] selections_by_location[preferred_location] << node end diff --git a/lib/graphql/stitching/request.rb b/lib/graphql/stitching/request.rb index 5e87efaf..5ebc4db6 100644 --- a/lib/graphql/stitching/request.rb +++ b/lib/graphql/stitching/request.rb @@ -101,7 +101,7 @@ def operation_name # @return [String] A string of directives applied to the root operation. These are passed through in all subgraph requests. def operation_directives - @operation_directives ||= if operation.directives.any? + @operation_directives ||= unless operation.directives.empty? printer = GraphQL::Language::Printer.new operation.directives.map { printer.print(_1) }.join(" ") end diff --git a/lib/graphql/stitching/request/skip_include.rb b/lib/graphql/stitching/request/skip_include.rb index 935548b9..d5b0196c 100644 --- a/lib/graphql/stitching/request/skip_include.rb +++ b/lib/graphql/stitching/request/skip_include.rb @@ -34,7 +34,7 @@ def render_node(parent_node, variables) next nil end - node = render_node(node, variables) if node.selections.any? + node = render_node(node, variables) unless node.selections.empty? changed ||= node.object_id != original_node.object_id node end @@ -51,7 +51,7 @@ def render_node(parent_node, variables) end def prune_node(node, variables) - return node unless node.directives.any? + return node if node.directives.empty? delete_node = false filtered_directives = node.directives.reject do |directive| diff --git a/lib/graphql/stitching/supergraph.rb b/lib/graphql/stitching/supergraph.rb index 74abec44..a25d1d67 100644 --- a/lib/graphql/stitching/supergraph.rb +++ b/lib/graphql/stitching/supergraph.rb @@ -50,11 +50,11 @@ def initialize(schema:, fields: {}, resolvers: {}, visibility_profiles: [], exec end end.freeze - if visibility_profiles.any? + if visibility_profiles.empty? + @schema.use(GraphQL::Schema::AlwaysVisible) + else profiles = visibility_profiles.each_with_object({ nil => {} }) { |p, m| m[p.to_s] = {} } @schema.use(GraphQL::Schema::Visibility, profiles: profiles) - else - @schema.use(GraphQL::Schema::AlwaysVisible) end end @@ -187,7 +187,17 @@ def route_type_to_locations(type_name, start_location, goal_locations) private - PathNode = Struct.new(:location, :key, :cost, :resolver, keyword_init: true) + class PathNode + attr_reader :location, :key, :resolver + attr_accessor :cost + + def initialize(location:, key:, resolver: nil, cost: 0) + @location = location + @key = key + @resolver = resolver + @cost = cost + end + end # tunes A* search to favor paths with fewest joining locations, ie: # favor longer paths through target locations over shorter paths with additional locations. @@ -196,10 +206,10 @@ def route_type_to_locations_via_search(type_name, start_location, goal_locations costs = {} paths = possible_keys_for_type_and_location(type_name, start_location).map do |possible_key| - [PathNode.new(location: start_location, key: possible_key, cost: 0)] + [PathNode.new(location: start_location, key: possible_key)] end - while paths.any? + while !paths.empty? path = paths.pop current_location = path.last.location current_key = path.last.key diff --git a/lib/graphql/stitching/util.rb b/lib/graphql/stitching/util.rb index 2771b902..3b98f73f 100644 --- a/lib/graphql/stitching/util.rb +++ b/lib/graphql/stitching/util.rb @@ -4,12 +4,29 @@ module GraphQL module Stitching # General utilities to aid with stitching. class Util - TypeStructure = Struct.new(:list, :null, :name, keyword_init: true) do - alias_method :list?, :list - alias_method :null?, :null + class TypeStructure + attr_reader :name + + def initialize(list:, null:, name:) + @list = list + @null = null + @name = name + end + + def list? + @list + end + + def null? + @null + end def non_null? - !null + !@null + end + + def ==(other) + @list == other.list? && @null == other.null? && @name == other.name end end