Skip to content

Commit

Permalink
Rename "boundary" code concept to "resolver" (#136)
Browse files Browse the repository at this point in the history
  • Loading branch information
gmac authored Jun 2, 2024
1 parent afc4691 commit 05c284a
Show file tree
Hide file tree
Showing 33 changed files with 309 additions and 305 deletions.
2 changes: 1 addition & 1 deletion docs/mechanics.md
Original file line number Diff line number Diff line change
Expand Up @@ -345,4 +345,4 @@ type Query {
}
```

In this graph, `Widget` is a merged type without a resolver query in location C. This works because all of its fields are resolvable in other locations; that means location C can provide outbound representations of this type without ever needing to resolve inbound requests for it. Outbound types do still require a key field (such as `id` above) that allow them to join with data in other resolver locations (such as `price` above).
In this graph, `Widget` is a merged type without a resolver query in location C. This works because all of its fields are resolvable in other locations; that means location C can provide outbound representations of this type without ever needing to resolve inbound requests for it. Outbound types do still require a shared key field (such as `id` above) that allow them to join with data in other resolver locations (such as `price` above).
2 changes: 1 addition & 1 deletion lib/graphql/stitching.rb
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def stitching_directive_names
end

require_relative "stitching/supergraph"
require_relative "stitching/boundary"
require_relative "stitching/resolver"
require_relative "stitching/client"
require_relative "stitching/composer"
require_relative "stitching/executor"
Expand Down
75 changes: 38 additions & 37 deletions lib/graphql/stitching/composer.rb
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@

require_relative "./composer/base_validator"
require_relative "./composer/validate_interfaces"
require_relative "./composer/validate_boundaries"
require_relative "./composer/boundary_config"
require_relative "./composer/validate_resolvers"
require_relative "./composer/resolver_config"

module GraphQL
module Stitching
Expand Down Expand Up @@ -31,7 +31,7 @@ class T < GraphQL::Schema::Object
# @api private
VALIDATORS = [
"ValidateInterfaces",
"ValidateBoundaries",
"ValidateResolvers",
].freeze

# @return [String] name of the Query type in the composed schema.
Expand Down Expand Up @@ -62,10 +62,10 @@ def initialize(
@default_value_merger = default_value_merger || BASIC_VALUE_MERGER
@directive_kwarg_merger = directive_kwarg_merger || BASIC_VALUE_MERGER
@root_field_location_selector = root_field_location_selector || BASIC_ROOT_FIELD_LOCATION_SELECTOR
@boundary_configs = {}
@resolver_configs = {}

@field_map = nil
@boundary_map = nil
@resolver_map = nil
@mapped_type_names = nil
@candidate_directives_by_name_and_location = nil
@candidate_types_by_name_and_location = nil
Expand Down Expand Up @@ -125,7 +125,7 @@ def perform(locations_input)
raise ComposerError, "Cannot merge different kinds for `#{type_name}`. Found: #{kinds.join(", ")}."
end

extract_boundaries(type_name, types_by_location) if type_name == @query_name
extract_resolvers(type_name, types_by_location) if type_name == @query_name

memo[type_name] = case kinds.first
when "SCALAR"
Expand Down Expand Up @@ -157,12 +157,12 @@ def perform(locations_input)
end

select_root_field_locations(schema)
expand_abstract_boundaries(schema)
expand_abstract_resolvers(schema)

supergraph = Supergraph.new(
schema: schema,
fields: @field_map,
boundaries: @boundary_map,
resolvers: @resolver_map,
executables: executables,
)

Expand All @@ -189,8 +189,8 @@ def prepare_locations_input(locations_input)
raise ComposerError, "The schema for `#{location}` location must be a GraphQL::Schema class."
end

@boundary_configs.merge!(BoundaryConfig.extract_directive_assignments(schema, location, input[:stitch]))
@boundary_configs.merge!(BoundaryConfig.extract_federation_entities(schema, location))
@resolver_configs.merge!(ResolverConfig.extract_directive_assignments(schema, location, input[:stitch]))
@resolver_configs.merge!(ResolverConfig.extract_federation_entities(schema, location))

schemas[location.to_s] = schema
executables[location.to_s] = input[:executable] || schema
Expand Down Expand Up @@ -527,23 +527,23 @@ def merge_deprecations(type_name, members_by_location, field_name: nil, argument

# @!scope class
# @!visibility private
def extract_boundaries(type_name, types_by_location)
def extract_resolvers(type_name, types_by_location)
types_by_location.each do |location, type_candidate|
type_candidate.fields.each do |field_name, field_candidate|
boundary_type = field_candidate.type.unwrap
boundary_structure = Util.flatten_type_structure(field_candidate.type)
boundary_configs = @boundary_configs.fetch("#{location}.#{field_name}", [])
resolver_type = field_candidate.type.unwrap
resolver_structure = Util.flatten_type_structure(field_candidate.type)
resolver_configs = @resolver_configs.fetch("#{location}.#{field_name}", [])

field_candidate.directives.each do |directive|
next unless directive.graphql_name == GraphQL::Stitching.stitch_directive
boundary_configs << BoundaryConfig.from_kwargs(directive.arguments.keyword_arguments)
resolver_configs << ResolverConfig.from_kwargs(directive.arguments.keyword_arguments)
end

boundary_configs.each do |config|
resolver_configs.each do |config|
key_selections = GraphQL.parse("{ #{config.key} }").definitions[0].selections

if key_selections.length != 1
raise ComposerError, "Boundary key at #{type_name}.#{field_name} must specify exactly one key."
raise ComposerError, "Resolver key at #{type_name}.#{field_name} must specify exactly one key."
end

argument_name = key_selections[0].alias
Expand All @@ -555,34 +555,35 @@ def extract_boundaries(type_name, types_by_location)

argument = field_candidate.arguments[argument_name]
unless argument
# contextualize this... "boundaries with multiple args need mapping aliases."
raise ComposerError, "Invalid boundary argument `#{argument_name}` for #{type_name}.#{field_name}."
raise ComposerError, "No resolver argument matched for #{type_name}.#{field_name}. " \
"Add an alias to the key that specifies its intended argument, ex: `arg:key`"
end

argument_structure = Util.flatten_type_structure(argument.type)
if argument_structure.length != boundary_structure.length
raise ComposerError, "Mismatched input/output for #{type_name}.#{field_name}.#{argument_name} boundary. Arguments must map directly to results."
if argument_structure.length != resolver_structure.length
raise ComposerError, "Mismatched input/output for #{type_name}.#{field_name}.#{argument_name} resolver. " \
"Arguments must map directly to results."
end

boundary_type_name = if config.type_name
if !boundary_type.kind.abstract?
resolver_type_name = if config.type_name
if !resolver_type.kind.abstract?
raise ComposerError, "Resolver config may only specify a type name for abstract resolvers."
elsif !boundary_type.possible_types.find { _1.graphql_name == config.type_name }
elsif !resolver_type.possible_types.find { _1.graphql_name == config.type_name }
raise ComposerError, "Type `#{config.type_name}` is not a possible return type for query `#{field_name}`."
end
config.type_name
else
boundary_type.graphql_name
resolver_type.graphql_name
end

@boundary_map[boundary_type_name] ||= []
@boundary_map[boundary_type_name] << Boundary.new(
@resolver_map[resolver_type_name] ||= []
@resolver_map[resolver_type_name] << Resolver.new(
location: location,
type_name: boundary_type_name,
type_name: resolver_type_name,
key: key_selections[0].name,
field: field_candidate.name,
arg: argument_name,
list: boundary_structure.first.list?,
list: resolver_structure.first.list?,
federation: config.federation,
)
end
Expand Down Expand Up @@ -612,15 +613,15 @@ def select_root_field_locations(schema)

# @!scope class
# @!visibility private
def expand_abstract_boundaries(schema)
@boundary_map.keys.each do |type_name|
boundary_type = schema.types[type_name]
next unless boundary_type.kind.abstract?
def expand_abstract_resolvers(schema)
@resolver_map.keys.each do |type_name|
resolver_type = schema.types[type_name]
next unless resolver_type.kind.abstract?

expanded_types = Util.expand_abstract_type(schema, boundary_type)
expanded_types = Util.expand_abstract_type(schema, resolver_type)
expanded_types.select { @candidate_types_by_name_and_location[_1.graphql_name].length > 1 }.each do |expanded_type|
@boundary_map[expanded_type.graphql_name] ||= []
@boundary_map[expanded_type.graphql_name].push(*@boundary_map[type_name])
@resolver_map[expanded_type.graphql_name] ||= []
@resolver_map[expanded_type.graphql_name].push(*@resolver_map[type_name])
end
end
end
Expand Down Expand Up @@ -670,7 +671,7 @@ def build_enum_usage_map(schemas)

def reset!
@field_map = {}
@boundary_map = {}
@resolver_map = {}
@mapped_type_names = {}
@candidate_directives_by_name_and_location = nil
@schema_directives = nil
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

module GraphQL::Stitching
class Composer
class BoundaryConfig
class ResolverConfig
ENTITY_TYPENAME = "_Entity"
ENTITIES_QUERY = "_entities"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

module GraphQL::Stitching
class Composer
class ValidateBoundaries < BaseValidator
class ValidateResolvers < BaseValidator

def perform(supergraph, composer)
supergraph.schema.types.each do |type_name, type|
Expand All @@ -15,9 +15,9 @@ def perform(supergraph, composer)
candidate_types_by_location = composer.candidate_types_by_name_and_location[type_name]
next unless candidate_types_by_location.length > 1

boundaries = supergraph.boundaries[type_name]
if boundaries&.any?
validate_as_boundary(supergraph, type, candidate_types_by_location, boundaries)
resolvers = supergraph.resolvers[type_name]
if resolvers&.any?
validate_as_resolver(supergraph, type, candidate_types_by_location, resolvers)
elsif type.kind.object?
validate_as_shared(supergraph, type, candidate_types_by_location)
end
Expand All @@ -26,48 +26,48 @@ def perform(supergraph, composer)

private

def validate_as_boundary(supergraph, type, candidate_types_by_location, boundaries)
# abstract boundaries are expanded with their concrete implementations, which each get validated. Ignore the abstract itself.
def validate_as_resolver(supergraph, type, candidate_types_by_location, resolvers)
# abstract resolvers are expanded with their concrete implementations, which each get validated. Ignore the abstract itself.
return if type.kind.abstract?

# only one boundary allowed per type/location/key
boundaries_by_location_and_key = boundaries.each_with_object({}) do |boundary, memo|
if memo.dig(boundary.location, boundary.key)
raise Composer::ValidationError, "Multiple boundary queries for `#{type.graphql_name}.#{boundary.key}` "\
"found in #{boundary.location}. Limit one boundary query per type and key in each location. "\
"Abstract boundaries provide all possible types."
# only one resolver allowed per type/location/key
resolvers_by_location_and_key = resolvers.each_with_object({}) do |resolver, memo|
if memo.dig(resolver.location, resolver.key)
raise Composer::ValidationError, "Multiple resolver queries for `#{type.graphql_name}.#{resolver.key}` "\
"found in #{resolver.location}. Limit one resolver query per type and key in each location. "\
"Abstract resolvers provide all possible types."
end
memo[boundary.location] ||= {}
memo[boundary.location][boundary.key] = boundary
memo[resolver.location] ||= {}
memo[resolver.location][resolver.key] = resolver
end

boundary_keys = boundaries.map(&:key).to_set
resolver_keys = resolvers.map(&:key).to_set

# All non-key fields must be resolvable in at least one boundary location
# All non-key fields must be resolvable in at least one resolver location
supergraph.locations_by_type_and_field[type.graphql_name].each do |field_name, locations|
next if boundary_keys.include?(field_name)
next if resolver_keys.include?(field_name)

if locations.none? { boundaries_by_location_and_key[_1] }
if locations.none? { resolvers_by_location_and_key[_1] }
where = locations.length > 1 ? "one of #{locations.join(", ")} locations" : locations.first
raise Composer::ValidationError, "A boundary query is required for `#{type.graphql_name}` in #{where} to resolve field `#{field_name}`."
raise Composer::ValidationError, "A resolver query is required for `#{type.graphql_name}` in #{where} to resolve field `#{field_name}`."
end
end

# All locations of a boundary type must include at least one key field
# All locations of a resolver type must include at least one key field
supergraph.fields_by_type_and_location[type.graphql_name].each do |location, field_names|
if field_names.none? { boundary_keys.include?(_1) }
raise Composer::ValidationError, "A boundary key is required for `#{type.graphql_name}` in #{location} to join with other locations."
if field_names.none? { resolver_keys.include?(_1) }
raise Composer::ValidationError, "A resolver key is required for `#{type.graphql_name}` in #{location} to join with other locations."
end
end

# verify that all outbound locations can access all inbound locations
resolver_locations = boundaries_by_location_and_key.keys
resolver_locations = resolvers_by_location_and_key.keys
candidate_types_by_location.each_key do |location|
remote_locations = resolver_locations.reject { _1 == location }
paths = supergraph.route_type_to_locations(type.graphql_name, location, remote_locations)
if paths.length != remote_locations.length || paths.any? { |_loc, path| path.nil? }
raise Composer::ValidationError, "Cannot route `#{type.graphql_name}` boundaries in #{location} to all other locations. "\
"All locations must provide a boundary accessor that uses a conjoining key."
raise Composer::ValidationError, "Cannot route `#{type.graphql_name}` resolvers in #{location} to all other locations. "\
"All locations must provide a resolver query with a joining key."
end
end
end
Expand All @@ -87,7 +87,7 @@ def validate_as_shared(supergraph, type, candidate_types_by_location)
candidate_types_by_location.each do |location, candidate_type|
if candidate_type.fields.keys.sort != expected_fields
raise Composer::ValidationError, "Shared type `#{type.graphql_name}` must have consistent fields across locations, "\
"or else define boundary queries so that its unique fields may be accessed remotely."
"or else define resolver queries so that its unique fields may be accessed remotely."
end
end
end
Expand Down
6 changes: 3 additions & 3 deletions lib/graphql/stitching/executor.rb
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# frozen_string_literal: true

require "json"
require_relative "./executor/boundary_source"
require_relative "./executor/resolver_source"
require_relative "./executor/root_source"

module GraphQL
Expand Down Expand Up @@ -55,9 +55,9 @@ def exec!(next_steps = [0])
tasks = @request.plan
.ops
.select { next_steps.include?(_1.after) }
.group_by { [_1.location, _1.boundary.nil?] }
.group_by { [_1.location, _1.resolver.nil?] }
.map do |(location, root_source), ops|
source_type = root_source ? RootSource : BoundarySource
source_type = root_source ? RootSource : ResolverSource
@dataloader.with(source_type, self, location).request_all(ops)
end

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

module GraphQL::Stitching
class Executor
class BoundarySource < GraphQL::Dataloader::Source
class ResolverSource < GraphQL::Dataloader::Source
def initialize(executor, location)
@executor = executor
@location = location
Expand Down Expand Up @@ -41,36 +41,36 @@ def fetch(ops)
ops.map { origin_sets_by_operation[_1] ? _1.step : nil }
end

# Builds batched boundary queries
# Builds batched resolver queries
# "query MyOperation_2_3($var:VarType) {
# _0_result: list(keys:["a","b","c"]) { boundarySelections... }
# _1_0_result: item(key:"x") { boundarySelections... }
# _1_1_result: item(key:"y") { boundarySelections... }
# _1_2_result: item(key:"z") { boundarySelections... }
# _0_result: list(keys:["a","b","c"]) { resolverSelections... }
# _1_0_result: item(key:"x") { resolverSelections... }
# _1_1_result: item(key:"y") { resolverSelections... }
# _1_2_result: item(key:"z") { resolverSelections... }
# }"
def build_document(origin_sets_by_operation, operation_name = nil, operation_directives = nil)
variable_defs = {}
query_fields = origin_sets_by_operation.map.with_index do |(op, origin_set), batch_index|
variable_defs.merge!(op.variables)
boundary = op.boundary
resolver = op.resolver

if boundary.list
if resolver.list?
input = origin_set.each_with_index.reduce(String.new) do |memo, (origin_obj, index)|
memo << "," if index > 0
memo << build_key(boundary.key, origin_obj, federation: boundary.federation)
memo << build_key(resolver.key, origin_obj, federation: resolver.federation)
memo
end

"_#{batch_index}_result: #{boundary.field}(#{boundary.arg}:[#{input}]) #{op.selections}"
"_#{batch_index}_result: #{resolver.field}(#{resolver.arg}:[#{input}]) #{op.selections}"
else
origin_set.map.with_index do |origin_obj, index|
input = build_key(boundary.key, origin_obj, federation: boundary.federation)
"_#{batch_index}_#{index}_result: #{boundary.field}(#{boundary.arg}:#{input}) #{op.selections}"
input = build_key(resolver.key, origin_obj, federation: resolver.federation?)
"_#{batch_index}_#{index}_result: #{resolver.field}(#{resolver.arg}:#{input}) #{op.selections}"
end
end
end

doc = String.new("query") # << boundary fulfillment always uses query
doc = String.new("query") # << resolver fulfillment always uses query

if operation_name
doc << " #{operation_name}"
Expand Down Expand Up @@ -106,7 +106,7 @@ def merge_results!(origin_sets_by_operation, raw_result)
return unless raw_result

origin_sets_by_operation.each_with_index do |(op, origin_set), batch_index|
results = if op.dig("boundary", "list")
results = if op.resolver.list?
raw_result["_#{batch_index}_result"]
else
origin_set.map.with_index { |_, index| raw_result["_#{batch_index}_#{index}_result"] }
Expand Down
Loading

0 comments on commit 05c284a

Please sign in to comment.