graphql.nadel.engine.execution.HydrationInputResolver Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of nadel-engine Show documentation
Show all versions of nadel-engine Show documentation
Nadel is a Java library that combines multiple GrahpQL services together into one API.
The newest version!
package graphql.nadel.engine.execution;
import graphql.Internal;
import graphql.execution.Async;
import graphql.execution.ExecutionContext;
import graphql.execution.ExecutionId;
import graphql.execution.ResultPath;
import graphql.language.Argument;
import graphql.language.ArrayValue;
import graphql.language.Field;
import graphql.language.FieldDefinition;
import graphql.language.NullValue;
import graphql.language.SelectionSet;
import graphql.language.StringValue;
import graphql.language.Value;
import graphql.nadel.OperationKind;
import graphql.nadel.Service;
import graphql.nadel.ServiceExecutionHydrationDetails;
import graphql.nadel.dsl.ExtendedFieldDefinition;
import graphql.nadel.dsl.NodeId;
import graphql.nadel.dsl.RemoteArgumentDefinition;
import graphql.nadel.dsl.RemoteArgumentSource;
import graphql.nadel.dsl.UnderlyingServiceHydration;
import graphql.nadel.engine.NadelContext;
import graphql.nadel.engine.execution.transformation.FieldTransformation;
import graphql.nadel.engine.execution.transformation.HydrationTransformation;
import graphql.nadel.engine.result.ElapsedTime;
import graphql.nadel.engine.result.ExecutionResultNode;
import graphql.nadel.engine.result.LeafExecutionResultNode;
import graphql.nadel.engine.result.ListExecutionResultNode;
import graphql.nadel.engine.result.ObjectExecutionResultNode;
import graphql.nadel.engine.result.ResultComplexityAggregator;
import graphql.nadel.engine.result.RootExecutionResultNode;
import graphql.nadel.hooks.ServiceExecutionHooks;
import graphql.nadel.normalized.NormalizedQueryField;
import graphql.nadel.util.FpKit;
import graphql.schema.GraphQLCompositeType;
import graphql.schema.GraphQLFieldDefinition;
import graphql.schema.GraphQLObjectType;
import graphql.schema.GraphQLSchema;
import graphql.util.NodeMultiZipper;
import graphql.util.NodeZipper;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import static graphql.Assert.assertNotNull;
import static graphql.Assert.assertTrue;
import static graphql.language.Field.newField;
import static graphql.language.SelectionSet.newSelectionSet;
import static graphql.nadel.engine.execution.StrategyUtil.changeFieldIdsInResultNode;
import static graphql.nadel.engine.execution.StrategyUtil.copyFieldInformation;
import static graphql.nadel.engine.execution.StrategyUtil.getHydrationInputNodes;
import static graphql.nadel.engine.execution.StrategyUtil.groupNodesIntoBatchesByField;
import static graphql.nadel.engine.result.ResultNodeAdapter.RESULT_NODE_ADAPTER;
import static graphql.nadel.util.FpKit.filter;
import static graphql.nadel.util.FpKit.findOneOrNull;
import static graphql.nadel.util.FpKit.flatList;
import static graphql.nadel.util.FpKit.map;
import static graphql.schema.GraphQLTypeUtil.isList;
import static graphql.schema.GraphQLTypeUtil.unwrapAll;
import static graphql.schema.GraphQLTypeUtil.unwrapNonNull;
import static java.lang.String.format;
import static java.util.Collections.singletonList;
@Internal
public class HydrationInputResolver {
private final OverallQueryTransformer queryTransformer = new OverallQueryTransformer();
private final ServiceResultNodesToOverallResult serviceResultNodesToOverallResult = new ServiceResultNodesToOverallResult();
private final List services;
private final GraphQLSchema overallSchema;
private final ServiceExecutor serviceExecutor;
private final ServiceExecutionHooks serviceExecutionHooks;
private final Set hydrationInputPaths;
public HydrationInputResolver(List services,
GraphQLSchema overallSchema,
ServiceExecutor serviceExecutor,
ServiceExecutionHooks serviceExecutionHooks,
Set hydrationInputPaths) {
this.services = services;
this.overallSchema = overallSchema;
this.serviceExecutor = serviceExecutor;
this.serviceExecutionHooks = serviceExecutionHooks;
this.hydrationInputPaths = hydrationInputPaths;
}
public CompletableFuture resolveAllHydrationInputs(ExecutionContext context,
ExecutionResultNode node,
Map serviceContexts,
ResultComplexityAggregator resultComplexityAggregator) {
Set> hydrationInputZippers = getHydrationInputNodes(node, hydrationInputPaths);
if (hydrationInputZippers.isEmpty()) {
return CompletableFuture.completedFuture(node);
}
List> hydrationInputBatches = groupNodesIntoBatchesByField(hydrationInputZippers, node);
List>>> resolvedNodeCFs = new ArrayList<>();
for (NodeMultiZipper batch : hydrationInputBatches) {
if (isBatchHydrationField((HydrationInputNode) batch.getZippers().get(0).getCurNode())) {
resolveInputNodesAsBatch(context, resolvedNodeCFs, batch, serviceContexts, resultComplexityAggregator);
} else {
resolveInputNodes(context, resolvedNodeCFs, batch, serviceContexts, resultComplexityAggregator);
}
}
return Async
.each(resolvedNodeCFs)
.thenCompose(resolvedNodes -> {
NodeMultiZipper multiZipper = new NodeMultiZipper<>(node, flatList(resolvedNodes), RESULT_NODE_ADAPTER);
ExecutionResultNode newRoot = multiZipper.toRootNode();
return resolveAllHydrationInputs(context, newRoot, serviceContexts, resultComplexityAggregator);
})
.whenComplete(this::possiblyLogException);
}
private void resolveInputNodes(ExecutionContext context,
List>>> resolvedNodeCFs,
NodeMultiZipper batch, Map serviceContexts,
ResultComplexityAggregator resultComplexityAggregator) {
for (NodeZipper hydrationInputNodeZipper : batch.getZippers()) {
HydrationInputNode hydrationInputNode = (HydrationInputNode) hydrationInputNodeZipper.getCurNode();
CompletableFuture executionResultNodeCompletableFuture = resolveSingleHydrationInput(context, hydrationInputNode, serviceContexts, resultComplexityAggregator);
resolvedNodeCFs.add(executionResultNodeCompletableFuture.thenApply(newNode -> singletonList(hydrationInputNodeZipper.withNewNode(newNode))));
}
}
private void resolveInputNodesAsBatch(ExecutionContext context,
List>>> resolvedNodeCFs,
NodeMultiZipper batch,
Map serviceContexts,
ResultComplexityAggregator resultComplexityAggregator) {
List> batchesWithCorrectSize = groupIntoCorrectBatchSizes(batch);
for (NodeMultiZipper oneBatch : batchesWithCorrectSize) {
List batchedNodes = map(oneBatch.getZippers(), zipper -> (HydrationInputNode) zipper.getCurNode());
CompletableFuture> executionResultNodeCompletableFuture = resolveHydrationInputBatch(context, batchedNodes, serviceContexts, resultComplexityAggregator);
resolvedNodeCFs.add(replaceNodesInZipper(oneBatch, executionResultNodeCompletableFuture));
}
}
private Integer getDefaultBatchSize(UnderlyingServiceHydration underlyingServiceHydration) {
GraphQLFieldDefinition graphQLFieldDefinition = null;
String topLevelField = underlyingServiceHydration.getTopLevelField();
if (underlyingServiceHydration.getSyntheticField() != null) {
GraphQLFieldDefinition syntheticFieldDefinition = overallSchema.getQueryType().getFieldDefinition(underlyingServiceHydration.getSyntheticField());
if (syntheticFieldDefinition == null) {
return null;
}
GraphQLObjectType syntheticFieldDefinitionType = (GraphQLObjectType) syntheticFieldDefinition.getType();
graphQLFieldDefinition = syntheticFieldDefinitionType.getFieldDefinition(underlyingServiceHydration.getTopLevelField());
} else {
graphQLFieldDefinition = overallSchema.getQueryType().getFieldDefinition(topLevelField);
}
// the field we use to hydrate doesn't need to be exposed, therefore can be null
if (graphQLFieldDefinition == null) {
return null;
}
FieldDefinition fieldDefinition = graphQLFieldDefinition.getDefinition();
if (!(fieldDefinition instanceof ExtendedFieldDefinition)) {
return null;
}
return ((ExtendedFieldDefinition) fieldDefinition).getDefaultBatchSize();
}
private List> groupIntoCorrectBatchSizes(NodeMultiZipper batch) {
HydrationInputNode node = (HydrationInputNode) batch.getZippers().get(0).getCurNode();
Integer batchSize = node.getHydrationTransformation().getUnderlyingServiceHydration().getBatchSize();
if (batchSize == null) {
batchSize = getDefaultBatchSize(node.getHydrationTransformation().getUnderlyingServiceHydration());
}
if (batchSize == null) {
return singletonList(batch);
}
List> result = new ArrayList<>();
int counter = 0;
List> currentBatch = new ArrayList<>();
for (NodeZipper zipper : batch.getZippers()) {
currentBatch.add(zipper);
counter++;
if (counter == batchSize) {
result.add(new NodeMultiZipper<>(batch.getCommonRoot(), currentBatch, RESULT_NODE_ADAPTER));
counter = 0;
currentBatch = new ArrayList<>();
}
}
if (currentBatch.size() > 0) {
result.add(new NodeMultiZipper<>(batch.getCommonRoot(), currentBatch, RESULT_NODE_ADAPTER));
}
return result;
}
private boolean isBatchHydrationField(HydrationInputNode hydrationInputNode) {
HydrationTransformation hydrationTransformation = hydrationInputNode.getHydrationTransformation();
Service service = getService(hydrationTransformation.getUnderlyingServiceHydration());
String syntheticFieldName = hydrationTransformation.getUnderlyingServiceHydration().getSyntheticField();
String topLevelFieldName = hydrationTransformation.getUnderlyingServiceHydration().getTopLevelField();
GraphQLFieldDefinition topLevelFieldDefinition;
if (syntheticFieldName == null) {
topLevelFieldDefinition = service.getUnderlyingSchema().getQueryType().getFieldDefinition(topLevelFieldName);
} else {
topLevelFieldDefinition = ((GraphQLObjectType) service.getUnderlyingSchema().getQueryType().getFieldDefinition(syntheticFieldName).getType()).getFieldDefinition(topLevelFieldName);
}
assertNotNull(topLevelFieldDefinition, () -> String.format("hydration field '%s' does not exist in underlying schema in service '%s'", topLevelFieldName, service.getName()));
return isList(unwrapNonNull(topLevelFieldDefinition.getType()));
}
private CompletableFuture>> replaceNodesInZipper(NodeMultiZipper batch,
CompletableFuture> executionResultNodeCompletableFuture) {
return executionResultNodeCompletableFuture.thenApply(executionResultNodes -> {
List> newZippers = new ArrayList<>();
List> zippers = batch.getZippers();
for (int i = 0; i < executionResultNodes.size(); i++) {
NodeZipper zipper = zippers.get(i);
NodeZipper newZipper = zipper.withNewNode(executionResultNodes.get(i));
newZippers.add(newZipper);
}
return newZippers;
});
}
private CompletableFuture resolveSingleHydrationInput(ExecutionContext executionContext,
HydrationInputNode hydrationInputNode,
Map serviceContexts,
ResultComplexityAggregator resultComplexityAggregator) {
HydrationTransformation hydrationTransformation = hydrationInputNode.getHydrationTransformation();
Field originalField = hydrationTransformation.getOriginalField();
UnderlyingServiceHydration underlyingServiceHydration = hydrationTransformation.getUnderlyingServiceHydration();
ServiceExecutionHydrationDetails hydrationDetails = new ServiceExecutionHydrationDetails(underlyingServiceHydration.getTimeout(),
underlyingServiceHydration.getBatchSize(),
null,
null);
String topLevelFieldName = underlyingServiceHydration.getTopLevelField();
Service service = getService(underlyingServiceHydration);
Field topLevelField = createSingleHydrationTopLevelField(hydrationInputNode,
hydrationInputNode.getSelectionSet(),
underlyingServiceHydration,
topLevelFieldName,
underlyingServiceHydration.getSyntheticField(),
originalField);
GraphQLCompositeType topLevelFieldType = (GraphQLCompositeType) unwrapAll(hydrationTransformation.getOriginalFieldType());
OperationKind operationKind = OperationKind.QUERY;
String operationName = buildOperationName(service, executionContext);
boolean isSyntheticHydration = underlyingServiceHydration.getSyntheticField() != null;
CompletableFuture queryTransformationResultCF = queryTransformer
.transformHydratedTopLevelField(
executionContext,
service.getUnderlyingSchema(),
operationName,
operationKind,
topLevelField,
topLevelFieldType,
serviceExecutionHooks,
service,
serviceContexts.get(service),
isSyntheticHydration
);
return queryTransformationResultCF.thenCompose(queryTransformationResult -> {
CompletableFuture serviceResult = serviceExecutor
.execute(executionContext, queryTransformationResult, service, operationKind,
serviceContexts.get(service), service.getUnderlyingSchema(), hydrationDetails);
return serviceResult
.thenApply(resultNode -> convertSingleHydrationResultIntoOverallResult(executionContext.getExecutionId(),
hydrationInputNode,
hydrationTransformation,
resultNode,
hydrationInputNode.getNormalizedField(),
queryTransformationResult,
getNadelContext(executionContext),
resultComplexityAggregator
))
.whenComplete(this::possiblyLogException);
});
}
private Field createSingleHydrationTopLevelField(HydrationInputNode hydrationInputNode,
SelectionSet selectionSet,
UnderlyingServiceHydration underlyingServiceHydration,
String topLevelFieldName,
String syntheticFieldName,
Field originalField) {
List allArguments = getArguments(hydrationInputNode, underlyingServiceHydration, originalField);
Field topLevelField = newField(topLevelFieldName)
.selectionSet(selectionSet)
.arguments(allArguments)
.additionalData(NodeId.ID, UUID.randomUUID().toString())
.build();
if (syntheticFieldName == null) {
return topLevelField;
}
Field syntheticField = newField(syntheticFieldName)
.selectionSet(newSelectionSet().selection(topLevelField).build())
.additionalData(NodeId.ID, UUID.randomUUID().toString())
.build();
return syntheticField;
}
private List getArguments(HydrationInputNode hydrationInputNode, UnderlyingServiceHydration underlyingServiceHydration, Field originalField) {
List arguments = underlyingServiceHydration.getArguments();
List argumentDefinitionsFromSourceObjects = filter(arguments, argument -> argument.getRemoteArgumentSource().getSourceType() == RemoteArgumentSource.SourceType.OBJECT_FIELD);
List allArguments = new ArrayList<>();
for (RemoteArgumentDefinition definition : argumentDefinitionsFromSourceObjects) {
List sourcePath = definition.getRemoteArgumentSource().getPath();
Object definitionValue = getDefinitionValue(sourcePath, hydrationInputNode.getCompletedValue());
Value argumentValue = (definitionValue != null) ? new StringValue(definitionValue.toString()) : NullValue.newNullValue().build();
Argument argumentAstFromSourceObject = Argument.newArgument()
.name(definition.getName())
.value(argumentValue)
.build();
allArguments.add(argumentAstFromSourceObject);
}
addExtraFieldArguments(originalField, arguments, allArguments);
return allArguments;
}
private Object getDefinitionValue(List sourcePath, Object value) {
for (String path : sourcePath) {
value = ((Map) value).get(path);
}
return value;
}
private ExecutionResultNode convertSingleHydrationResultIntoOverallResult(ExecutionId executionId,
HydrationInputNode hydrationInputNode,
HydrationTransformation hydrationTransformation,
RootExecutionResultNode rootResultNode,
NormalizedQueryField rootNormalizedField,
QueryTransformationResult queryTransformationResult,
NadelContext nadelContext,
ResultComplexityAggregator resultComplexityAggregator
) {
Map transformationByTransformationId = queryTransformationResult.getTransformations().getTransformationIdToTransformation();
Map transformationToFieldId = queryTransformationResult.getTransformations().getTransformationToFieldId();
Map typeRenameMappings = queryTransformationResult.getTransformations().getTypeRenameMappings();
assertTrue(rootResultNode.getChildren().size() == 1, () -> "expected rootResultNode to only have 1 child.");
ExecutionResultNode root = rootResultNode.getChildren().get(0);
if (hydrationTransformation.getUnderlyingServiceHydration().getSyntheticField() != null && root.getChildren().size() > 0) {
assertTrue(root.getChildren().size() == 1, () -> "expected synthetic field to only have 1 topLevelField child.");
root = root.getChildren().get(0);
}
ExecutionResultNode firstTopLevelResultNode = serviceResultNodesToOverallResult
.convertChildren(executionId,
root,
rootNormalizedField,
overallSchema,
hydrationInputNode,
true,
false,
transformationByTransformationId,
transformationToFieldId,
typeRenameMappings,
nadelContext,
queryTransformationResult.getRemovedFieldMap(),
hydrationInputPaths);
String serviceName = hydrationTransformation.getUnderlyingServiceHydration().getServiceName();
resultComplexityAggregator.incrementServiceNodeCount(serviceName, firstTopLevelResultNode.getTotalNodeCount());
resultComplexityAggregator.incrementTypeRenameCount(firstTopLevelResultNode.getTotalTypeRenameCount());
resultComplexityAggregator.incrementFieldRenameCount(firstTopLevelResultNode.getTotalFieldRenameCount());
firstTopLevelResultNode = firstTopLevelResultNode.withNewErrors(rootResultNode.getErrors());
firstTopLevelResultNode = StrategyUtil.copyFieldInformation(hydrationInputNode, firstTopLevelResultNode);
return changeFieldIdsInResultNode(firstTopLevelResultNode, NodeId.getId(hydrationTransformation.getOriginalField()));
}
private CompletableFuture> resolveHydrationInputBatch(ExecutionContext executionContext,
List hydrationInputs,
Map serviceContexts,
ResultComplexityAggregator resultComplexityAggregator) {
List hydrationTransformations = map(hydrationInputs, HydrationInputNode::getHydrationTransformation);
HydrationTransformation hydrationTransformation = hydrationTransformations.get(0);
Field originalField = hydrationTransformation.getOriginalField();
UnderlyingServiceHydration underlyingServiceHydration = hydrationTransformation.getUnderlyingServiceHydration();
ServiceExecutionHydrationDetails hydrationDetails = new ServiceExecutionHydrationDetails(underlyingServiceHydration.getTimeout(),
underlyingServiceHydration.getBatchSize(),
null,
null);
Service service = getService(underlyingServiceHydration);
Field topLevelField = createBatchHydrationTopLevelField(executionContext,
hydrationInputs,
originalField,
underlyingServiceHydration);
GraphQLCompositeType topLevelFieldType = (GraphQLCompositeType) unwrapAll(hydrationTransformation.getOriginalFieldType());
OperationKind operationKind = OperationKind.QUERY;
String operationName = buildOperationName(service, executionContext);
boolean isSyntheticHydration = underlyingServiceHydration.getSyntheticField() != null;
CompletableFuture queryTransformationResultCF = queryTransformer
.transformHydratedTopLevelField(
executionContext,
service.getUnderlyingSchema(),
operationName, operationKind,
topLevelField,
topLevelFieldType,
serviceExecutionHooks,
service,
serviceContexts.get(service),
isSyntheticHydration
);
return queryTransformationResultCF.thenCompose(queryTransformationResult -> {
return serviceExecutor
.execute(executionContext, queryTransformationResult, service, operationKind, serviceContexts.get(service), service.getUnderlyingSchema(), hydrationDetails)
.thenApply(resultNode -> convertHydrationBatchResultIntoOverallResult(executionContext, hydrationInputs, resultNode, queryTransformationResult, resultComplexityAggregator))
.whenComplete(this::possiblyLogException);
});
}
private Field createBatchHydrationTopLevelField(ExecutionContext executionContext,
List hydrationInputs,
Field originalField,
UnderlyingServiceHydration underlyingServiceHydration) {
String topLevelFieldName = underlyingServiceHydration.getTopLevelField();
String syntheticFieldName = underlyingServiceHydration.getSyntheticField();
List allArguments = getBatchArguments(hydrationInputs, originalField, underlyingServiceHydration);
Field topLevelField = newField(topLevelFieldName)
.selectionSet(hydrationInputs.get(0).getSelectionSet())
.additionalData(NodeId.ID, UUID.randomUUID().toString())
.arguments(allArguments)
.build();
if (!underlyingServiceHydration.isObjectMatchByIndex()) {
topLevelField = ArtificialFieldUtils.addObjectIdentifier(getNadelContext(executionContext), topLevelField, underlyingServiceHydration.getObjectIdentifier());
}
if (syntheticFieldName == null) {
return topLevelField;
}
Field syntheticField = newField(syntheticFieldName)
.selectionSet(newSelectionSet().selection(topLevelField).build())
.additionalData(NodeId.ID, UUID.randomUUID().toString())
.build();
return syntheticField;
}
private List getBatchArguments(List hydrationInputs,
Field originalField,
UnderlyingServiceHydration underlyingServiceHydration) {
List arguments = underlyingServiceHydration.getArguments();
List argumentDefinitionsFromSourceObjects = filter(arguments, argument -> argument.getRemoteArgumentSource().getSourceType() == RemoteArgumentSource.SourceType.OBJECT_FIELD);
List allArguments = new ArrayList<>();
for (RemoteArgumentDefinition definition : argumentDefinitionsFromSourceObjects) {
List values = new ArrayList<>();
List sourcePath = definition.getRemoteArgumentSource().getPath();
for (ExecutionResultNode hydrationInputNode : hydrationInputs) {
Object definitionValue = getDefinitionValue(sourcePath, hydrationInputNode.getCompletedValue());
Value argumentValue = (definitionValue != null) ? new StringValue(definitionValue.toString()) : NullValue.newNullValue().build();
values.add(argumentValue);
}
Argument argumentAstFromSourceObject = Argument.newArgument().name(definition.getName()).value(new ArrayValue(values)).build();
allArguments.add(argumentAstFromSourceObject);
}
addExtraFieldArguments(originalField, arguments, allArguments);
return allArguments;
}
private void addExtraFieldArguments(Field originalField, List arguments, List allArguments) {
List extraArguments = filter(arguments, argument -> argument.getRemoteArgumentSource().getSourceType() == RemoteArgumentSource.SourceType.FIELD_ARGUMENT);
Map originalArgumentsByName = FpKit.getByName(originalField.getArguments(), Argument::getName);
for (RemoteArgumentDefinition argumentDefinition : extraArguments) {
if (originalArgumentsByName.containsKey(argumentDefinition.getName())) {
allArguments.add(originalArgumentsByName.get(argumentDefinition.getName()));
}
}
}
private List convertHydrationBatchResultIntoOverallResult(ExecutionContext executionContext,
List hydrationInputNodes,
RootExecutionResultNode rootResultNode,
QueryTransformationResult queryTransformationResult,
ResultComplexityAggregator resultComplexityAggregator) {
UnderlyingServiceHydration serviceHydration = hydrationInputNodes.get(0).getHydrationTransformation().getUnderlyingServiceHydration();
boolean isSyntheticHydration = serviceHydration.getSyntheticField() != null;
boolean isResolveByIndex = serviceHydration.isObjectMatchByIndex();
ExecutionResultNode root = rootResultNode.getChildren().get(0);
if (!(root instanceof LeafExecutionResultNode) && isSyntheticHydration) {
root = root.getChildren().get(0);
}
if (root instanceof LeafExecutionResultNode) {
// we only expect a null value here
assertTrue(root.isNullValue());
List result = new ArrayList<>();
boolean first = true;
for (HydrationInputNode hydrationInputNode : hydrationInputNodes) {
ExecutionResultNode resultNode = createNullValue(hydrationInputNode);
if (first) {
resultNode = resultNode.withNewErrors(rootResultNode.getErrors());
first = false;
}
result.add(resultNode);
}
return result;
}
assertTrue(root instanceof ListExecutionResultNode, () -> "expect a list result from the underlying service for batched hydration");
ListExecutionResultNode listResultNode = (ListExecutionResultNode) root;
List resolvedNodes = listResultNode.getChildren();
if (isResolveByIndex) {
assertTrue(resolvedNodes.size() == hydrationInputNodes.size(), () -> String.format(
"If you use indexed hydration then you MUST follow a contract where the resolved nodes matches the size of the input arguments. We expected %d returned nodes but only got %d",
hydrationInputNodes.size(),
resolvedNodes.size()
));
}
List result = new ArrayList<>();
Map transformationByResultField = queryTransformationResult.getTransformations().getTransformationIdToTransformation();
Map transformationToFieldId = queryTransformationResult.getTransformations().getTransformationToFieldId();
Map typeRenameMappings = queryTransformationResult.getTransformations().getTypeRenameMappings();
boolean first = true;
for (int i = 0; i < hydrationInputNodes.size(); i++) {
HydrationInputNode hydrationInputNode = hydrationInputNodes.get(i);
ExecutionResultNode matchingResolvedNode;
if (isResolveByIndex) {
matchingResolvedNode = resolvedNodes.get(i);
} else {
// the first source object defined in the nadel schema is the idDefinition
RemoteArgumentDefinition idDefinition = findOneOrNull(serviceHydration.getArguments(), argument -> (argument.getRemoteArgumentSource().getSourceType() == RemoteArgumentSource.SourceType.OBJECT_FIELD));
matchingResolvedNode = findMatchingResolvedNode(executionContext,
hydrationInputNode,
resolvedNodes,
idDefinition.getRemoteArgumentSource().getPath());
}
ExecutionResultNode resultNode;
if (matchingResolvedNode != null) {
ExecutionResultNode overallResultNode = serviceResultNodesToOverallResult.convertChildren(
executionContext.getExecutionId(),
matchingResolvedNode,
hydrationInputNode.getNormalizedField(),
overallSchema,
hydrationInputNode,
true,
true,
transformationByResultField,
transformationToFieldId,
typeRenameMappings,
getNadelContext(executionContext),
queryTransformationResult.getRemovedFieldMap(),
hydrationInputPaths);
String serviceName = hydrationInputNode.getHydrationTransformation().getUnderlyingServiceHydration().getServiceName();
int nodeCount = overallResultNode.getTotalNodeCount();
resultComplexityAggregator.incrementServiceNodeCount(serviceName, nodeCount);
resultComplexityAggregator.incrementTypeRenameCount(overallResultNode.getTotalTypeRenameCount());
resultComplexityAggregator.incrementFieldRenameCount(overallResultNode.getTotalFieldRenameCount());
resultNode = copyFieldInformation(hydrationInputNode, overallResultNode);
} else {
resultNode = createNullValue(hydrationInputNode);
}
if (first) {
resultNode = resultNode.withNewErrors(rootResultNode.getErrors());
first = false;
}
result.add(resultNode);
}
return result;
}
private LeafExecutionResultNode createNullValue(HydrationInputNode inputNode) {
ElapsedTime elapsedTime = inputNode.getElapsedTime();
return LeafExecutionResultNode.newLeafExecutionResultNode()
.objectType(inputNode.getObjectType())
.alias(inputNode.getAlias())
.fieldIds(inputNode.getFieldIds())
.resultPath(inputNode.getResultPath())
.fieldDefinition(inputNode.getFieldDefinition())
.completedValue(null)
.elapsedTime(elapsedTime)
.build();
}
private ExecutionResultNode findMatchingResolvedNode(ExecutionContext executionContext,
HydrationInputNode inputNode,
List resolvedNodes,
List sourcePath) {
NadelContext nadelContext = getNadelContext(executionContext);
String objectIdentifier = nadelContext.getObjectIdentifierAlias();
String inputNodeId = (String) getDefinitionValue(sourcePath, inputNode.getCompletedValue());
for (ExecutionResultNode resolvedNode : resolvedNodes) {
LeafExecutionResultNode idNode = getFieldByResultKey((ObjectExecutionResultNode) resolvedNode, objectIdentifier);
assertNotNull(idNode, () -> String.format("no value found for object identifier: %s", objectIdentifier));
Object id = idNode.getCompletedValue();
assertNotNull(id, () -> "object identifier is null");
if (id.equals(inputNodeId)) {
return resolvedNode;
}
}
return null;
}
private LeafExecutionResultNode getFieldByResultKey(ObjectExecutionResultNode node, String resultKey) {
return (LeafExecutionResultNode) findOneOrNull(node.getChildren(), child -> child.getResultKey().equals(resultKey));
}
@SuppressWarnings("unused")
private void possiblyLogException(T result, Throwable exception) {
if (exception != null) {
exception.printStackTrace();
}
}
@SuppressWarnings("OptionalGetWithoutIsPresent")
private Service getService(UnderlyingServiceHydration underlyingServiceHydration) {
return FpKit.findOne(services, service -> service.getName().equals(underlyingServiceHydration.getServiceName())).get();
}
private String buildOperationName(Service service, ExecutionContext executionContext) {
// to help with downstream debugging we put our name and their name in the operation
NadelContext nadelContext = executionContext.getContext();
if (nadelContext.getOriginalOperationName() != null) {
return format("nadel_2_%s_%s", service.getName(), nadelContext.getOriginalOperationName());
} else {
return format("nadel_2_%s", service.getName());
}
}
private NadelContext getNadelContext(ExecutionContext executionContext) {
return (NadelContext) executionContext.getContext();
}
}