All Downloads are FREE. Search and download functionalities are using the official Maven repository.

graphql.normalized.ExecutableNormalizedOperationFactory Maven / Gradle / Ivy

There is a newer version: 230521-nf-execution
Show newest version
package graphql.normalized;

import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import graphql.Assert;
import graphql.ExperimentalApi;
import graphql.GraphQLContext;
import graphql.PublicApi;
import graphql.collect.ImmutableKit;
import graphql.execution.AbortExecutionException;
import graphql.execution.CoercedVariables;
import graphql.execution.MergedField;
import graphql.execution.RawVariables;
import graphql.execution.ValuesResolver;
import graphql.execution.conditional.ConditionalNodes;
import graphql.execution.directives.QueryDirectives;
import graphql.execution.directives.QueryDirectivesImpl;
import graphql.execution.incremental.IncrementalUtils;
import graphql.introspection.Introspection;
import graphql.language.Directive;
import graphql.language.Document;
import graphql.language.Field;
import graphql.language.FragmentDefinition;
import graphql.language.FragmentSpread;
import graphql.language.InlineFragment;
import graphql.language.NodeUtil;
import graphql.language.OperationDefinition;
import graphql.language.Selection;
import graphql.language.SelectionSet;
import graphql.language.VariableDefinition;
import graphql.normalized.incremental.NormalizedDeferredExecution;
import graphql.schema.FieldCoordinates;
import graphql.schema.GraphQLCompositeType;
import graphql.schema.GraphQLFieldDefinition;
import graphql.schema.GraphQLInterfaceType;
import graphql.schema.GraphQLNamedOutputType;
import graphql.schema.GraphQLObjectType;
import graphql.schema.GraphQLSchema;
import graphql.schema.GraphQLType;
import graphql.schema.GraphQLUnionType;
import graphql.schema.GraphQLUnmodifiedType;
import graphql.schema.impl.SchemaUtil;
import org.jetbrains.annotations.Nullable;

import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Collectors;

import static graphql.Assert.assertNotNull;
import static graphql.Assert.assertShouldNeverHappen;
import static graphql.collect.ImmutableKit.map;
import static graphql.schema.GraphQLTypeUtil.unwrapAll;
import static graphql.util.FpKit.filterSet;
import static graphql.util.FpKit.groupingBy;
import static graphql.util.FpKit.intersection;
import static java.util.Collections.singleton;
import static java.util.Collections.singletonList;
import static java.util.stream.Collectors.toCollection;
import static java.util.stream.Collectors.toSet;

/**
 * This factory can create a {@link ExecutableNormalizedOperation} which represents what would be executed
 * during a given graphql operation.
 */
@PublicApi
public class ExecutableNormalizedOperationFactory {

    public static class Options {


        private final GraphQLContext graphQLContext;
        private final Locale locale;
        private final int maxChildrenDepth;
        private final int maxFieldsCount;

        private final boolean deferSupport;

        /**
         * The default max fields count is 100,000.
         * This is big enough for even very large queries, but
         * can be changed via {#setDefaultOptions
         */
        public static final int DEFAULT_MAX_FIELDS_COUNT = 100_000;
        private static Options defaultOptions = new Options(GraphQLContext.getDefault(),
                Locale.getDefault(),
                Integer.MAX_VALUE,
                DEFAULT_MAX_FIELDS_COUNT,
                false);

        private Options(GraphQLContext graphQLContext,
                        Locale locale,
                        int maxChildrenDepth,
                        int maxFieldsCount,
                        boolean deferSupport) {
            this.graphQLContext = graphQLContext;
            this.locale = locale;
            this.maxChildrenDepth = maxChildrenDepth;
            this.deferSupport = deferSupport;
            this.maxFieldsCount = maxFieldsCount;
        }

        /**
         * Sets new default Options used when creating instances of {@link ExecutableNormalizedOperation}.
         *
         * @param options new default options
         */
        public static void setDefaultOptions(Options options) {
            defaultOptions = Assert.assertNotNull(options);
        }


        /**
         * Returns the default options used when creating instances of {@link ExecutableNormalizedOperation}.
         *
         * @return the default options
         */
        public static Options defaultOptions() {
            return defaultOptions;
        }

        /**
         * Locale to use when parsing the query.
         * 

* e.g. can be passed to {@link graphql.schema.Coercing} for parsing. * * @param locale the locale to use * * @return new options object to use */ public Options locale(Locale locale) { return new Options(this.graphQLContext, locale, this.maxChildrenDepth, this.maxFieldsCount, this.deferSupport); } /** * Context object to use when parsing the operation. *

* Can be used to intercept input values e.g. using {@link graphql.execution.values.InputInterceptor}. * * @param graphQLContext the context to use * * @return new options object to use */ public Options graphQLContext(GraphQLContext graphQLContext) { return new Options(graphQLContext, this.locale, this.maxChildrenDepth, this.maxFieldsCount, this.deferSupport); } /** * Controls the maximum depth of the operation. Can be used to prevent * against malicious operations. * * @param maxChildrenDepth the max depth * * @return new options object to use */ public Options maxChildrenDepth(int maxChildrenDepth) { return new Options(this.graphQLContext, this.locale, maxChildrenDepth, this.maxFieldsCount, this.deferSupport); } /** * Controls the maximum number of ENFs created. Can be used to prevent * against malicious operations. * * @param maxFieldsCount the max number of ENFs created * * @return new options object to use */ public Options maxFieldsCount(int maxFieldsCount) { return new Options(this.graphQLContext, this.locale, this.maxChildrenDepth, maxFieldsCount, this.deferSupport); } /** * Controls whether defer execution is supported when creating instances of {@link ExecutableNormalizedOperation}. * * @param deferSupport true to enable support for defer * * @return new options object to use */ @ExperimentalApi public Options deferSupport(boolean deferSupport) { return new Options(this.graphQLContext, this.locale, this.maxChildrenDepth, this.maxFieldsCount, deferSupport); } /** * @return context to use during operation parsing * * @see #graphQLContext(GraphQLContext) */ public GraphQLContext getGraphQLContext() { return graphQLContext; } /** * @return locale to use during operation parsing * * @see #locale(Locale) */ public Locale getLocale() { return locale; } /** * @return maximum children depth before aborting parsing * * @see #maxChildrenDepth(int) */ public int getMaxChildrenDepth() { return maxChildrenDepth; } public int getMaxFieldsCount() { return maxFieldsCount; } /** * @return whether support for defer is enabled * * @see #deferSupport(boolean) */ @ExperimentalApi public boolean getDeferSupport() { return deferSupport; } } private static final ConditionalNodes conditionalNodes = new ConditionalNodes(); private ExecutableNormalizedOperationFactory() { } /** * This will create a runtime representation of the graphql operation that would be executed * in a runtime sense. * * @param graphQLSchema the schema to be used * @param document the {@link Document} holding the operation text * @param operationName the operation name to use * @param coercedVariableValues the coerced variables to use * * @return a runtime representation of the graphql operation. */ public static ExecutableNormalizedOperation createExecutableNormalizedOperation( GraphQLSchema graphQLSchema, Document document, String operationName, CoercedVariables coercedVariableValues ) { return createExecutableNormalizedOperation( graphQLSchema, document, operationName, coercedVariableValues, Options.defaultOptions()); } /** * This will create a runtime representation of the graphql operation that would be executed * in a runtime sense. * * @param graphQLSchema the schema to be used * @param document the {@link Document} holding the operation text * @param operationName the operation name to use * @param coercedVariableValues the coerced variables to use * @param options the {@link Options} to use for parsing * * @return a runtime representation of the graphql operation. */ public static ExecutableNormalizedOperation createExecutableNormalizedOperation( GraphQLSchema graphQLSchema, Document document, String operationName, CoercedVariables coercedVariableValues, Options options ) { NodeUtil.GetOperationResult getOperationResult = NodeUtil.getOperation(document, operationName); return new ExecutableNormalizedOperationFactoryImpl( graphQLSchema, getOperationResult.operationDefinition, getOperationResult.fragmentsByName, coercedVariableValues, null, options ).createNormalizedQueryImpl(); } /** * This will create a runtime representation of the graphql operation that would be executed * in a runtime sense. * * @param graphQLSchema the schema to be used * @param operationDefinition the operation to be executed * @param fragments a set of fragments associated with the operation * @param coercedVariableValues the coerced variables to use * * @return a runtime representation of the graphql operation. */ public static ExecutableNormalizedOperation createExecutableNormalizedOperation(GraphQLSchema graphQLSchema, OperationDefinition operationDefinition, Map fragments, CoercedVariables coercedVariableValues) { return createExecutableNormalizedOperation(graphQLSchema, operationDefinition, fragments, coercedVariableValues, Options.defaultOptions()); } /** * This will create a runtime representation of the graphql operation that would be executed * in a runtime sense. * * @param graphQLSchema the schema to be used * @param operationDefinition the operation to be executed * @param fragments a set of fragments associated with the operation * @param coercedVariableValues the coerced variables to use * @param options the options to use * * @return a runtime representation of the graphql operation. */ public static ExecutableNormalizedOperation createExecutableNormalizedOperation(GraphQLSchema graphQLSchema, OperationDefinition operationDefinition, Map fragments, CoercedVariables coercedVariableValues, Options options) { return new ExecutableNormalizedOperationFactoryImpl( graphQLSchema, operationDefinition, fragments, coercedVariableValues, null, options ).createNormalizedQueryImpl(); } /** * This will create a runtime representation of the graphql operation that would be executed * in a runtime sense. * * @param graphQLSchema the schema to be used * @param document the {@link Document} holding the operation text * @param operationName the operation name to use * @param rawVariables the raw variables to be coerced * * @return a runtime representation of the graphql operation. */ public static ExecutableNormalizedOperation createExecutableNormalizedOperationWithRawVariables(GraphQLSchema graphQLSchema, Document document, String operationName, RawVariables rawVariables) { return createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, operationName, rawVariables, Options.defaultOptions()); } /** * This will create a runtime representation of the graphql operation that would be executed * in a runtime sense. * * @param graphQLSchema the schema to be used * @param document the {@link Document} holding the operation text * @param operationName the operation name to use * @param rawVariables the raw variables that have not yet been coerced * @param locale the {@link Locale} to use during coercion * @param graphQLContext the {@link GraphQLContext} to use during coercion * * @return a runtime representation of the graphql operation. */ public static ExecutableNormalizedOperation createExecutableNormalizedOperationWithRawVariables( GraphQLSchema graphQLSchema, Document document, String operationName, RawVariables rawVariables, GraphQLContext graphQLContext, Locale locale ) { return createExecutableNormalizedOperationWithRawVariables( graphQLSchema, document, operationName, rawVariables, Options.defaultOptions().graphQLContext(graphQLContext).locale(locale)); } /** * This will create a runtime representation of the graphql operation that would be executed * in a runtime sense. * * @param graphQLSchema the schema to be used * @param document the {@link Document} holding the operation text * @param operationName the operation name to use * @param rawVariables the raw variables that have not yet been coerced * @param options the {@link Options} to use for parsing * * @return a runtime representation of the graphql operation. */ public static ExecutableNormalizedOperation createExecutableNormalizedOperationWithRawVariables(GraphQLSchema graphQLSchema, Document document, String operationName, RawVariables rawVariables, Options options) { NodeUtil.GetOperationResult getOperationResult = NodeUtil.getOperation(document, operationName); OperationDefinition operationDefinition = getOperationResult.operationDefinition; List variableDefinitions = operationDefinition.getVariableDefinitions(); CoercedVariables coercedVariableValues = ValuesResolver.coerceVariableValues(graphQLSchema, variableDefinitions, rawVariables, options.getGraphQLContext(), options.getLocale()); Map normalizedVariableValues = ValuesResolver.getNormalizedVariableValues(graphQLSchema, variableDefinitions, rawVariables, options.getGraphQLContext(), options.getLocale()); return new ExecutableNormalizedOperationFactoryImpl( graphQLSchema, operationDefinition, getOperationResult.fragmentsByName, coercedVariableValues, normalizedVariableValues, options ).createNormalizedQueryImpl(); } private static class ExecutableNormalizedOperationFactoryImpl { private final GraphQLSchema graphQLSchema; private final OperationDefinition operationDefinition; private final Map fragments; private final CoercedVariables coercedVariableValues; private final @Nullable Map normalizedVariableValues; private final Options options; private final List possibleMergerList = new ArrayList<>(); private final ImmutableListMultimap.Builder fieldToNormalizedField = ImmutableListMultimap.builder(); private final ImmutableMap.Builder normalizedFieldToMergedField = ImmutableMap.builder(); private final ImmutableMap.Builder normalizedFieldToQueryDirectives = ImmutableMap.builder(); private final ImmutableListMultimap.Builder coordinatesToNormalizedFields = ImmutableListMultimap.builder(); private int fieldCount = 0; private int maxDepthSeen = 0; private ExecutableNormalizedOperationFactoryImpl( GraphQLSchema graphQLSchema, OperationDefinition operationDefinition, Map fragments, CoercedVariables coercedVariableValues, @Nullable Map normalizedVariableValues, Options options ) { this.graphQLSchema = graphQLSchema; this.operationDefinition = operationDefinition; this.fragments = fragments; this.coercedVariableValues = coercedVariableValues; this.normalizedVariableValues = normalizedVariableValues; this.options = options; } /** * Creates a new ExecutableNormalizedOperation for the provided query */ private ExecutableNormalizedOperation createNormalizedQueryImpl() { GraphQLObjectType rootType = SchemaUtil.getOperationRootType(graphQLSchema, operationDefinition); CollectNFResult collectFromOperationResult = collectFromOperation(rootType); for (ExecutableNormalizedField topLevel : collectFromOperationResult.children) { ImmutableList fieldAndAstParents = collectFromOperationResult.normalizedFieldToAstFields.get(topLevel); MergedField mergedField = newMergedField(fieldAndAstParents); captureMergedField(topLevel, mergedField); updateFieldToNFMap(topLevel, fieldAndAstParents); updateCoordinatedToNFMap(topLevel); int depthSeen = buildFieldWithChildren( topLevel, fieldAndAstParents, 1); maxDepthSeen = Math.max(maxDepthSeen, depthSeen); } // getPossibleMergerList for (PossibleMerger possibleMerger : possibleMergerList) { List childrenWithSameResultKey = possibleMerger.parent.getChildrenWithSameResultKey(possibleMerger.resultKey); ENFMerger.merge(possibleMerger.parent, childrenWithSameResultKey, graphQLSchema, options.deferSupport); } return new ExecutableNormalizedOperation( operationDefinition.getOperation(), operationDefinition.getName(), new ArrayList<>(collectFromOperationResult.children), fieldToNormalizedField.build(), normalizedFieldToMergedField.build(), normalizedFieldToQueryDirectives.build(), coordinatesToNormalizedFields.build(), fieldCount, maxDepthSeen ); } private void captureMergedField(ExecutableNormalizedField enf, MergedField mergedFld) { // QueryDirectivesImpl is a lazy object and only computes itself when asked for QueryDirectives queryDirectives = new QueryDirectivesImpl(mergedFld, graphQLSchema, coercedVariableValues.toMap(), options.getGraphQLContext(), options.getLocale()); normalizedFieldToQueryDirectives.put(enf, queryDirectives); normalizedFieldToMergedField.put(enf, mergedFld); } private int buildFieldWithChildren(ExecutableNormalizedField executableNormalizedField, ImmutableList fieldAndAstParents, int curLevel) { checkMaxDepthExceeded(curLevel); CollectNFResult nextLevel = collectFromMergedField(executableNormalizedField, fieldAndAstParents, curLevel + 1); int maxDepthSeen = curLevel; for (ExecutableNormalizedField childENF : nextLevel.children) { executableNormalizedField.addChild(childENF); ImmutableList childFieldAndAstParents = nextLevel.normalizedFieldToAstFields.get(childENF); MergedField mergedField = newMergedField(childFieldAndAstParents); captureMergedField(childENF, mergedField); updateFieldToNFMap(childENF, childFieldAndAstParents); updateCoordinatedToNFMap(childENF); int depthSeen = buildFieldWithChildren(childENF, childFieldAndAstParents, curLevel + 1); maxDepthSeen = Math.max(maxDepthSeen, depthSeen); checkMaxDepthExceeded(maxDepthSeen); } return maxDepthSeen; } private void checkMaxDepthExceeded(int depthSeen) { if (depthSeen > this.options.getMaxChildrenDepth()) { throw new AbortExecutionException("Maximum query depth exceeded. " + depthSeen + " > " + this.options.getMaxChildrenDepth()); } } private static MergedField newMergedField(ImmutableList fieldAndAstParents) { return MergedField.newMergedField(map(fieldAndAstParents, fieldAndAstParent -> fieldAndAstParent.field)).build(); } private void updateFieldToNFMap(ExecutableNormalizedField executableNormalizedField, ImmutableList mergedField) { for (FieldAndAstParent astField : mergedField) { fieldToNormalizedField.put(astField.field, executableNormalizedField); } } private void updateCoordinatedToNFMap(ExecutableNormalizedField topLevel) { for (String objectType : topLevel.getObjectTypeNames()) { FieldCoordinates coordinates = FieldCoordinates.coordinates(objectType, topLevel.getFieldName()); coordinatesToNormalizedFields.put(coordinates, topLevel); } } public CollectNFResult collectFromMergedField(ExecutableNormalizedField executableNormalizedField, ImmutableList mergedField, int level) { List fieldDefs = executableNormalizedField.getFieldDefinitions(graphQLSchema); Set possibleObjects = resolvePossibleObjects(fieldDefs); if (possibleObjects.isEmpty()) { return new CollectNFResult(ImmutableKit.emptyList(), ImmutableListMultimap.of()); } List collectedFields = new ArrayList<>(); for (FieldAndAstParent fieldAndAstParent : mergedField) { if (fieldAndAstParent.field.getSelectionSet() == null) { continue; } GraphQLFieldDefinition fieldDefinition = Introspection.getFieldDef(graphQLSchema, fieldAndAstParent.astParentType, fieldAndAstParent.field.getName()); GraphQLUnmodifiedType astParentType = unwrapAll(fieldDefinition.getType()); this.collectFromSelectionSet(fieldAndAstParent.field.getSelectionSet(), collectedFields, (GraphQLCompositeType) astParentType, possibleObjects, null ); } Map> fieldsByName = fieldsByResultKey(collectedFields); ImmutableList.Builder resultNFs = ImmutableList.builder(); ImmutableListMultimap.Builder normalizedFieldToAstFields = ImmutableListMultimap.builder(); createNFs(resultNFs, fieldsByName, normalizedFieldToAstFields, level, executableNormalizedField); return new CollectNFResult(resultNFs.build(), normalizedFieldToAstFields.build()); } private Map> fieldsByResultKey(List collectedFields) { Map> fieldsByName = new LinkedHashMap<>(); for (CollectedField collectedField : collectedFields) { fieldsByName.computeIfAbsent(collectedField.field.getResultKey(), ignored -> new ArrayList<>()).add(collectedField); } return fieldsByName; } public CollectNFResult collectFromOperation(GraphQLObjectType rootType) { Set possibleObjects = ImmutableSet.of(rootType); List collectedFields = new ArrayList<>(); collectFromSelectionSet(operationDefinition.getSelectionSet(), collectedFields, rootType, possibleObjects, null); // group by result key Map> fieldsByName = fieldsByResultKey(collectedFields); ImmutableList.Builder resultNFs = ImmutableList.builder(); ImmutableListMultimap.Builder normalizedFieldToAstFields = ImmutableListMultimap.builder(); createNFs(resultNFs, fieldsByName, normalizedFieldToAstFields, 1, null); return new CollectNFResult(resultNFs.build(), normalizedFieldToAstFields.build()); } private void createNFs(ImmutableList.Builder nfListBuilder, Map> fieldsByName, ImmutableListMultimap.Builder normalizedFieldToAstFields, int level, ExecutableNormalizedField parent) { for (String resultKey : fieldsByName.keySet()) { List fieldsWithSameResultKey = fieldsByName.get(resultKey); List commonParentsGroups = groupByCommonParents(fieldsWithSameResultKey); for (CollectedFieldGroup fieldGroup : commonParentsGroups) { ExecutableNormalizedField nf = createNF(fieldGroup, level, parent); if (nf == null) { continue; } for (CollectedField collectedField : fieldGroup.fields) { normalizedFieldToAstFields.put(nf, new FieldAndAstParent(collectedField.field, collectedField.astTypeCondition)); } nfListBuilder.add(nf); if (this.options.deferSupport) { nf.addDeferredExecutions(fieldGroup.deferredExecutions); } } if (commonParentsGroups.size() > 1) { possibleMergerList.add(new PossibleMerger(parent, resultKey)); } } } private ExecutableNormalizedField createNF(CollectedFieldGroup collectedFieldGroup, int level, ExecutableNormalizedField parent) { this.fieldCount++; if (this.fieldCount > this.options.getMaxFieldsCount()) { throw new AbortExecutionException("Maximum field count exceeded. " + this.fieldCount + " > " + this.options.getMaxFieldsCount()); } Field field; Set objectTypes = collectedFieldGroup.objectTypes; field = collectedFieldGroup.fields.iterator().next().field; String fieldName = field.getName(); GraphQLFieldDefinition fieldDefinition = Introspection.getFieldDefinition(graphQLSchema, objectTypes.iterator().next(), fieldName); Map argumentValues = ValuesResolver.getArgumentValues(fieldDefinition.getArguments(), field.getArguments(), CoercedVariables.of(this.coercedVariableValues.toMap()), this.options.graphQLContext, this.options.locale); Map normalizedArgumentValues = null; if (this.normalizedVariableValues != null) { normalizedArgumentValues = ValuesResolver.getNormalizedArgumentValues(fieldDefinition.getArguments(), field.getArguments(), this.normalizedVariableValues); } ImmutableList objectTypeNames = map(objectTypes, GraphQLObjectType::getName); return ExecutableNormalizedField.newNormalizedField() .alias(field.getAlias()) .resolvedArguments(argumentValues) .normalizedArguments(normalizedArgumentValues) .astArguments(field.getArguments()) .objectTypeNames(objectTypeNames) .fieldName(fieldName) .level(level) .parent(parent) .build(); } private List groupByCommonParents(Collection fields) { if (this.options.deferSupport) { return groupByCommonParentsWithDeferSupport(fields); } else { return groupByCommonParentsNoDeferSupport(fields); } } private List groupByCommonParentsNoDeferSupport(Collection fields) { ImmutableSet.Builder objectTypes = ImmutableSet.builder(); for (CollectedField collectedField : fields) { objectTypes.addAll(collectedField.objectTypes); } Set allRelevantObjects = objectTypes.build(); Map> groupByAstParent = groupingBy(fields, fieldAndType -> fieldAndType.astTypeCondition); if (groupByAstParent.size() == 1) { return singletonList(new CollectedFieldGroup(ImmutableSet.copyOf(fields), allRelevantObjects, null)); } ImmutableList.Builder result = ImmutableList.builder(); for (GraphQLObjectType objectType : allRelevantObjects) { Set relevantFields = filterSet(fields, field -> field.objectTypes.contains(objectType)); result.add(new CollectedFieldGroup(relevantFields, singleton(objectType), null)); } return result.build(); } private List groupByCommonParentsWithDeferSupport(Collection fields) { ImmutableSet.Builder objectTypes = ImmutableSet.builder(); ImmutableSet.Builder deferredExecutionsBuilder = ImmutableSet.builder(); for (CollectedField collectedField : fields) { objectTypes.addAll(collectedField.objectTypes); NormalizedDeferredExecution collectedDeferredExecution = collectedField.deferredExecution; if (collectedDeferredExecution != null) { deferredExecutionsBuilder.add(collectedDeferredExecution); } } Set allRelevantObjects = objectTypes.build(); Set deferredExecutions = deferredExecutionsBuilder.build(); Set duplicatedLabels = listDuplicatedLabels(deferredExecutions); if (!duplicatedLabels.isEmpty()) { // Query validation should pick this up Assert.assertShouldNeverHappen("Duplicated @defer labels are not allowed: [%s]", String.join(",", duplicatedLabels)); } Map> groupByAstParent = groupingBy(fields, fieldAndType -> fieldAndType.astTypeCondition); if (groupByAstParent.size() == 1) { return singletonList(new CollectedFieldGroup(ImmutableSet.copyOf(fields), allRelevantObjects, deferredExecutions)); } ImmutableList.Builder result = ImmutableList.builder(); for (GraphQLObjectType objectType : allRelevantObjects) { Set relevantFields = filterSet(fields, field -> field.objectTypes.contains(objectType)); Set filteredDeferredExecutions = deferredExecutions.stream() .filter(filterExecutionsFromType(objectType)) .collect(toCollection(LinkedHashSet::new)); result.add(new CollectedFieldGroup(relevantFields, singleton(objectType), filteredDeferredExecutions)); } return result.build(); } private static Predicate filterExecutionsFromType(GraphQLObjectType objectType) { String objectTypeName = objectType.getName(); return deferredExecution -> deferredExecution.getPossibleTypes() .stream() .map(GraphQLObjectType::getName) .anyMatch(objectTypeName::equals); } private Set listDuplicatedLabels(Collection deferredExecutions) { return deferredExecutions.stream() .map(NormalizedDeferredExecution::getLabel) .filter(Objects::nonNull) .collect(Collectors.groupingBy(Function.identity(), Collectors.counting())) .entrySet() .stream() .filter(entry -> entry.getValue() > 1) .map(Map.Entry::getKey) .collect(toSet()); } private void collectFromSelectionSet(SelectionSet selectionSet, List result, GraphQLCompositeType astTypeCondition, Set possibleObjects, NormalizedDeferredExecution deferredExecution ) { for (Selection selection : selectionSet.getSelections()) { if (selection instanceof Field) { collectField(result, (Field) selection, possibleObjects, astTypeCondition, deferredExecution); } else if (selection instanceof InlineFragment) { collectInlineFragment(result, (InlineFragment) selection, possibleObjects, astTypeCondition); } else if (selection instanceof FragmentSpread) { collectFragmentSpread(result, (FragmentSpread) selection, possibleObjects); } } } private void collectFragmentSpread(List result, FragmentSpread fragmentSpread, Set possibleObjects ) { if (!conditionalNodes.shouldInclude(fragmentSpread, this.coercedVariableValues.toMap(), this.graphQLSchema, this.options.graphQLContext)) { return; } FragmentDefinition fragmentDefinition = assertNotNull(this.fragments.get(fragmentSpread.getName())); if (!conditionalNodes.shouldInclude(fragmentDefinition, this.coercedVariableValues.toMap(), this.graphQLSchema, this.options.graphQLContext)) { return; } GraphQLCompositeType newAstTypeCondition = (GraphQLCompositeType) assertNotNull(this.graphQLSchema.getType(fragmentDefinition.getTypeCondition().getName())); Set newPossibleObjects = narrowDownPossibleObjects(possibleObjects, newAstTypeCondition); NormalizedDeferredExecution newDeferredExecution = buildDeferredExecution( fragmentSpread.getDirectives(), newPossibleObjects); collectFromSelectionSet(fragmentDefinition.getSelectionSet(), result, newAstTypeCondition, newPossibleObjects, newDeferredExecution); } private void collectInlineFragment(List result, InlineFragment inlineFragment, Set possibleObjects, GraphQLCompositeType astTypeCondition ) { if (!conditionalNodes.shouldInclude(inlineFragment, this.coercedVariableValues.toMap(), this.graphQLSchema, this.options.graphQLContext)) { return; } Set newPossibleObjects = possibleObjects; GraphQLCompositeType newAstTypeCondition = astTypeCondition; if (inlineFragment.getTypeCondition() != null) { newAstTypeCondition = (GraphQLCompositeType) this.graphQLSchema.getType(inlineFragment.getTypeCondition().getName()); newPossibleObjects = narrowDownPossibleObjects(possibleObjects, newAstTypeCondition); } NormalizedDeferredExecution newDeferredExecution = buildDeferredExecution( inlineFragment.getDirectives(), newPossibleObjects ); collectFromSelectionSet(inlineFragment.getSelectionSet(), result, newAstTypeCondition, newPossibleObjects, newDeferredExecution); } private NormalizedDeferredExecution buildDeferredExecution( List directives, Set newPossibleObjects) { if (!options.deferSupport) { return null; } return IncrementalUtils.createDeferredExecution( this.coercedVariableValues.toMap(), directives, (label) -> new NormalizedDeferredExecution(label, newPossibleObjects) ); } private void collectField(List result, Field field, Set possibleObjectTypes, GraphQLCompositeType astTypeCondition, NormalizedDeferredExecution deferredExecution ) { if (!conditionalNodes.shouldInclude(field, this.coercedVariableValues.toMap(), this.graphQLSchema, this.options.graphQLContext)) { return; } // this means there is actually no possible type for this field, and we are done if (possibleObjectTypes.isEmpty()) { return; } result.add(new CollectedField(field, possibleObjectTypes, astTypeCondition, deferredExecution)); } private Set narrowDownPossibleObjects(Set currentOnes, GraphQLCompositeType typeCondition) { ImmutableSet resolvedTypeCondition = resolvePossibleObjects(typeCondition); if (currentOnes.isEmpty()) { return resolvedTypeCondition; } // Faster intersection, as either set often has a size of 1. return intersection(currentOnes, resolvedTypeCondition); } private ImmutableSet resolvePossibleObjects(List defs) { ImmutableSet.Builder builder = ImmutableSet.builder(); for (GraphQLFieldDefinition def : defs) { GraphQLUnmodifiedType outputType = unwrapAll(def.getType()); if (outputType instanceof GraphQLCompositeType) { builder.addAll(resolvePossibleObjects((GraphQLCompositeType) outputType)); } } return builder.build(); } private ImmutableSet resolvePossibleObjects(GraphQLCompositeType type) { if (type instanceof GraphQLObjectType) { return ImmutableSet.of((GraphQLObjectType) type); } else if (type instanceof GraphQLInterfaceType) { return ImmutableSet.copyOf(graphQLSchema.getImplementations((GraphQLInterfaceType) type)); } else if (type instanceof GraphQLUnionType) { List unionTypes = ((GraphQLUnionType) type).getTypes(); return ImmutableSet.copyOf(ImmutableKit.map(unionTypes, GraphQLObjectType.class::cast)); } else { return assertShouldNeverHappen(); } } private static class PossibleMerger { ExecutableNormalizedField parent; String resultKey; public PossibleMerger(ExecutableNormalizedField parent, String resultKey) { this.parent = parent; this.resultKey = resultKey; } } private static class CollectedField { Field field; Set objectTypes; GraphQLCompositeType astTypeCondition; NormalizedDeferredExecution deferredExecution; public CollectedField(Field field, Set objectTypes, GraphQLCompositeType astTypeCondition, NormalizedDeferredExecution deferredExecution) { this.field = field; this.objectTypes = objectTypes; this.astTypeCondition = astTypeCondition; this.deferredExecution = deferredExecution; } } public static class CollectNFResult { private final Collection children; private final ImmutableListMultimap normalizedFieldToAstFields; public CollectNFResult(Collection children, ImmutableListMultimap normalizedFieldToAstFields) { this.children = children; this.normalizedFieldToAstFields = normalizedFieldToAstFields; } } private static class FieldAndAstParent { final Field field; final GraphQLCompositeType astParentType; private FieldAndAstParent(Field field, GraphQLCompositeType astParentType) { this.field = field; this.astParentType = astParentType; } } private static class CollectedFieldGroup { Set objectTypes; Set fields; Set deferredExecutions; public CollectedFieldGroup(Set fields, Set objectTypes, Set deferredExecutions) { this.fields = fields; this.objectTypes = objectTypes; this.deferredExecutions = deferredExecutions; } } } }





© 2015 - 2024 Weber Informatics LLC | Privacy Policy