lib/graphql/stitching/executor.rb in graphql-stitching-0.3.4 vs lib/graphql/stitching/executor.rb in graphql-stitching-0.3.6
- old
+ new
@@ -24,26 +24,26 @@
if result["errors"]&.any?
result["errors"].each { _1.delete("locations") }
@executor.errors.concat(result["errors"])
end
- ops.map { op["key"] }
+ ops.map { op["order"] }
end
# Builds root source documents
# "query MyOperation_1($var:VarType) { rootSelections ... }"
def build_document(op, operation_name = nil)
doc = String.new
doc << op["operation_type"]
if operation_name
- doc << " " << operation_name << "_" << op["key"].to_s
+ doc << " #{operation_name}_#{op["order"]}"
end
if op["variables"].any?
variable_defs = op["variables"].map { |k, v| "$#{k}:#{v}" }.join(",")
- doc << "(" << variable_defs << ")"
+ doc << "(#{variable_defs})"
end
doc << op["selections"]
doc
end
@@ -55,17 +55,17 @@
@location = location
end
def fetch(ops)
origin_sets_by_operation = ops.each_with_object({}) do |op, memo|
- origin_set = op["insertion_path"].reduce([@executor.data]) do |set, path_segment|
+ origin_set = op["path"].reduce([@executor.data]) do |set, path_segment|
set.flat_map { |obj| obj && obj[path_segment] }.tap(&:compact!)
end
- if op["type_condition"]
+ if op["if_type"]
# operations planned around unused fragment conditions should not trigger requests
- origin_set.select! { _1["_STITCH_typename"] == op["type_condition"] }
+ origin_set.select! { _1["_STITCH_typename"] == op["if_type"] }
end
memo[op] = origin_set if origin_set.any?
end
@@ -79,11 +79,11 @@
errors = raw_result.dig("errors")
@executor.errors.concat(extract_errors!(origin_sets_by_operation, errors)) if errors&.any?
end
- ops.map { origin_sets_by_operation[_1] ? _1["key"] : nil }
+ ops.map { origin_sets_by_operation[_1] ? _1["order"] : nil }
end
# Builds batched boundary queries
# "query MyOperation_2_3($var:VarType) {
# _0_result: list(keys:["a","b","c"]) { boundarySelections... }
@@ -94,11 +94,11 @@
def build_document(origin_sets_by_operation, operation_name = nil)
variable_defs = {}
query_fields = origin_sets_by_operation.map.with_index do |(op, origin_set), batch_index|
variable_defs.merge!(op["variables"])
boundary = op["boundary"]
- key_selection = "_STITCH_#{boundary["selection"]}"
+ key_selection = "_STITCH_#{boundary["key"]}"
if boundary["list"]
input = JSON.generate(origin_set.map { _1[key_selection] })
"_#{batch_index}_result: #{boundary["field"]}(#{boundary["arg"]}:#{input}) #{op["selections"]}"
else
@@ -111,22 +111,22 @@
doc = String.new
doc << "query" # << boundary fulfillment always uses query
if operation_name
- doc << " " << operation_name
+ doc << " #{operation_name}"
origin_sets_by_operation.each_key do |op|
- doc << "_" << op["key"].to_s
+ doc << "_#{op["order"]}"
end
end
if variable_defs.any?
variable_str = variable_defs.map { |k, v| "$#{k}:#{v}" }.join(",")
- doc << "(" << variable_str << ")"
+ doc << "(#{variable_str})"
end
- doc << "{ " << query_fields.join(" ") << " }"
+ doc << "{ #{query_fields.join(" ")} }"
return doc, variable_defs.keys
end
def merge_results!(origin_sets_by_operation, raw_result)
@@ -181,11 +181,11 @@
memo << err
end
if pathed_errors_by_op_index_and_object_id.any?
pathed_errors_by_op_index_and_object_id.each do |op_index, pathed_errors_by_object_id|
- repath_errors!(pathed_errors_by_object_id, ops.dig(op_index, "insertion_path"))
+ repath_errors!(pathed_errors_by_object_id, ops.dig(op_index, "path"))
errors_result.concat(pathed_errors_by_object_id.values)
end
end
errors_result.flatten!
end
@@ -263,19 +263,19 @@
result
end
private
- def exec!(after_keys = [0])
+ def exec!(next_ordinals = [0])
if @exec_cycles > @queue.length
# sanity check... if we've exceeded queue size, then something went wrong.
raise StitchingError, "Too many execution requests attempted."
end
@dataloader.append_job do
tasks = @queue
- .select { after_keys.include?(_1["after_key"]) }
+ .select { next_ordinals.include?(_1["after"]) }
.group_by { [_1["location"], _1["boundary"].nil?] }
.map do |(location, root_source), ops|
if root_source
@dataloader.with(RootSource, self, location).request_all(ops)
else
@@ -289,12 +289,11 @@
@exec_cycles += 1
@dataloader.run
end
def exec_task(task)
- next_keys = task.load
- next_keys.compact!
- exec!(next_keys) if next_keys.any?
+ next_ordinals = task.load.tap(&:compact!)
+ exec!(next_ordinals) if next_ordinals.any?
end
end
end
end