diff --git a/src/main/java/graphql/Assert.java b/src/main/java/graphql/Assert.java index 5fcbf9dbe9..c4ba88a911 100644 --- a/src/main/java/graphql/Assert.java +++ b/src/main/java/graphql/Assert.java @@ -85,6 +85,20 @@ public static void assertTrue(boolean condition) { throw new AssertException("condition expected to be true"); } + public static void assertTrue(boolean condition, String constantMsg) { + if (condition) { + return; + } + throwAssert(constantMsg); + } + + public static void assertTrue(boolean condition, String msgFmt, Object arg1) { + if (condition) { + return; + } + throwAssert(msgFmt, arg1); + } + public static void assertFalse(boolean condition, Supplier msg) { if (!condition) { return; @@ -117,4 +131,7 @@ public static String assertValidName(String name) { throw new AssertException(String.format(invalidNameErrorMessage, name)); } + private static T throwAssert(String format, Object... args) { + throw new AssertException(format(format, args)); + } } diff --git a/src/main/java/graphql/analysis/NodeVisitorWithTypeTracking.java b/src/main/java/graphql/analysis/NodeVisitorWithTypeTracking.java index 7b65c8e9ea..73f6064a43 100644 --- a/src/main/java/graphql/analysis/NodeVisitorWithTypeTracking.java +++ b/src/main/java/graphql/analysis/NodeVisitorWithTypeTracking.java @@ -3,8 +3,8 @@ import graphql.GraphQLContext; import graphql.Internal; import graphql.execution.CoercedVariables; -import graphql.execution.ConditionalNodes; import graphql.execution.ValuesResolver; +import graphql.execution.conditional.ConditionalNodes; import graphql.introspection.Introspection; import graphql.language.Argument; import graphql.language.Directive; @@ -68,7 +68,9 @@ public TraversalControl visitDirective(Directive node, TraverserContext co @Override public TraversalControl visitInlineFragment(InlineFragment inlineFragment, TraverserContext context) { - if (!conditionalNodes.shouldInclude(variables, inlineFragment.getDirectives())) { + QueryTraversalContext parentEnv = context.getVarFromParents(QueryTraversalContext.class); + GraphQLContext graphQLContext = parentEnv.getGraphQLContext(); + if (!conditionalNodes.shouldInclude(inlineFragment, variables, null, graphQLContext)) { return TraversalControl.ABORT; } @@ -82,7 +84,6 @@ public TraversalControl visitInlineFragment(InlineFragment inlineFragment, Trave preOrderCallback.visitInlineFragment(inlineFragmentEnvironment); // inline fragments are allowed not have type conditions, if so the parent type counts - QueryTraversalContext parentEnv = context.getVarFromParents(QueryTraversalContext.class); GraphQLCompositeType fragmentCondition; if (inlineFragment.getTypeCondition() != null) { @@ -92,17 +93,19 @@ public TraversalControl visitInlineFragment(InlineFragment inlineFragment, Trave fragmentCondition = parentEnv.getUnwrappedOutputType(); } // for unions we only have other fragments inside - context.setVar(QueryTraversalContext.class, new QueryTraversalContext(fragmentCondition, parentEnv.getEnvironment(), inlineFragment)); + context.setVar(QueryTraversalContext.class, new QueryTraversalContext(fragmentCondition, parentEnv.getEnvironment(), inlineFragment, graphQLContext)); return TraversalControl.CONTINUE; } @Override - public TraversalControl visitFragmentDefinition(FragmentDefinition node, TraverserContext context) { - if (!conditionalNodes.shouldInclude(variables, node.getDirectives())) { + public TraversalControl visitFragmentDefinition(FragmentDefinition fragmentDefinition, TraverserContext context) { + QueryTraversalContext parentEnv = context.getVarFromParents(QueryTraversalContext.class); + GraphQLContext graphQLContext = parentEnv.getGraphQLContext(); + if (!conditionalNodes.shouldInclude(fragmentDefinition, variables, null, graphQLContext)) { return TraversalControl.ABORT; } - QueryVisitorFragmentDefinitionEnvironment fragmentEnvironment = new QueryVisitorFragmentDefinitionEnvironmentImpl(node, context, schema); + QueryVisitorFragmentDefinitionEnvironment fragmentEnvironment = new QueryVisitorFragmentDefinitionEnvironmentImpl(fragmentDefinition, context, schema); if (context.getPhase() == LEAVE) { postOrderCallback.visitFragmentDefinition(fragmentEnvironment); @@ -110,20 +113,21 @@ public TraversalControl visitFragmentDefinition(FragmentDefinition node, Travers } preOrderCallback.visitFragmentDefinition(fragmentEnvironment); - QueryTraversalContext parentEnv = context.getVarFromParents(QueryTraversalContext.class); - GraphQLCompositeType typeCondition = (GraphQLCompositeType) schema.getType(node.getTypeCondition().getName()); - context.setVar(QueryTraversalContext.class, new QueryTraversalContext(typeCondition, parentEnv.getEnvironment(), node)); + GraphQLCompositeType typeCondition = (GraphQLCompositeType) schema.getType(fragmentDefinition.getTypeCondition().getName()); + context.setVar(QueryTraversalContext.class, new QueryTraversalContext(typeCondition, parentEnv.getEnvironment(), fragmentDefinition, graphQLContext)); return TraversalControl.CONTINUE; } @Override public TraversalControl visitFragmentSpread(FragmentSpread fragmentSpread, TraverserContext context) { - if (!conditionalNodes.shouldInclude(variables, fragmentSpread.getDirectives())) { + QueryTraversalContext parentEnv = context.getVarFromParents(QueryTraversalContext.class); + GraphQLContext graphQLContext = parentEnv.getGraphQLContext(); + if (!conditionalNodes.shouldInclude(fragmentSpread, variables, null, graphQLContext)) { return TraversalControl.ABORT; } FragmentDefinition fragmentDefinition = fragmentsByName.get(fragmentSpread.getName()); - if (!conditionalNodes.shouldInclude(variables, fragmentDefinition.getDirectives())) { + if (!conditionalNodes.shouldInclude(fragmentDefinition, variables, null, graphQLContext)) { return TraversalControl.ABORT; } @@ -135,19 +139,19 @@ public TraversalControl visitFragmentSpread(FragmentSpread fragmentSpread, Trave preOrderCallback.visitFragmentSpread(fragmentSpreadEnvironment); - QueryTraversalContext parentEnv = context.getVarFromParents(QueryTraversalContext.class); GraphQLCompositeType typeCondition = (GraphQLCompositeType) schema.getType(fragmentDefinition.getTypeCondition().getName()); assertNotNull(typeCondition, () -> format("Invalid type condition '%s' in fragment '%s'", fragmentDefinition.getTypeCondition().getName(), fragmentDefinition.getName())); - context.setVar(QueryTraversalContext.class, new QueryTraversalContext(typeCondition, parentEnv.getEnvironment(), fragmentDefinition)); + context.setVar(QueryTraversalContext.class, new QueryTraversalContext(typeCondition, parentEnv.getEnvironment(), fragmentDefinition, graphQLContext)); return TraversalControl.CONTINUE; } @Override public TraversalControl visitField(Field field, TraverserContext context) { QueryTraversalContext parentEnv = context.getVarFromParents(QueryTraversalContext.class); + GraphQLContext graphQLContext = parentEnv.getGraphQLContext(); GraphQLFieldDefinition fieldDefinition = Introspection.getFieldDef(schema, (GraphQLCompositeType) unwrapAll(parentEnv.getOutputType()), field.getName()); boolean isTypeNameIntrospectionField = fieldDefinition == schema.getIntrospectionTypenameFieldDefinition(); @@ -174,7 +178,7 @@ public TraversalControl visitField(Field field, TraverserContext context) return TraversalControl.CONTINUE; } - if (!conditionalNodes.shouldInclude(variables, field.getDirectives())) { + if (!conditionalNodes.shouldInclude(field, variables, null, graphQLContext)) { return TraversalControl.ABORT; } @@ -182,8 +186,8 @@ public TraversalControl visitField(Field field, TraverserContext context) GraphQLUnmodifiedType unmodifiedType = unwrapAll(fieldDefinition.getType()); QueryTraversalContext fieldEnv = (unmodifiedType instanceof GraphQLCompositeType) - ? new QueryTraversalContext(fieldDefinition.getType(), environment, field) - : new QueryTraversalContext(null, environment, field);// Terminal (scalar) node, EMPTY FRAME + ? new QueryTraversalContext(fieldDefinition.getType(), environment, field, graphQLContext) + : new QueryTraversalContext(null, environment, field, graphQLContext);// Terminal (scalar) node, EMPTY FRAME context.setVar(QueryTraversalContext.class, fieldEnv); @@ -259,4 +263,4 @@ protected TraversalControl visitValue(Value value, TraverserContext con } return preOrderCallback.visitArgumentValue(environment); } -} +} \ No newline at end of file diff --git a/src/main/java/graphql/analysis/QueryTransformer.java b/src/main/java/graphql/analysis/QueryTransformer.java index 35c840bb04..b23dc54db3 100644 --- a/src/main/java/graphql/analysis/QueryTransformer.java +++ b/src/main/java/graphql/analysis/QueryTransformer.java @@ -1,5 +1,6 @@ package graphql.analysis; +import graphql.GraphQLContext; import graphql.PublicApi; import graphql.language.FragmentDefinition; import graphql.language.Node; @@ -67,7 +68,7 @@ public Node transform(QueryVisitor queryVisitor) { NodeVisitorWithTypeTracking nodeVisitor = new NodeVisitorWithTypeTracking(queryVisitor, noOp, variables, schema, fragmentsByName); Map, Object> rootVars = new LinkedHashMap<>(); - rootVars.put(QueryTraversalContext.class, new QueryTraversalContext(rootParentType, null, null)); + rootVars.put(QueryTraversalContext.class, new QueryTraversalContext(rootParentType, null, null, GraphQLContext.getDefault())); TraverserVisitor nodeTraverserVisitor = new TraverserVisitor() { @@ -163,4 +164,4 @@ public QueryTransformer build() { return new QueryTransformer(schema, root, rootParentType, fragmentsByName, variables); } } -} +} \ No newline at end of file diff --git a/src/main/java/graphql/analysis/QueryTraversalContext.java b/src/main/java/graphql/analysis/QueryTraversalContext.java index de591141cc..4261e1b811 100644 --- a/src/main/java/graphql/analysis/QueryTraversalContext.java +++ b/src/main/java/graphql/analysis/QueryTraversalContext.java @@ -1,5 +1,6 @@ package graphql.analysis; +import graphql.GraphQLContext; import graphql.Internal; import graphql.language.SelectionSetContainer; import graphql.schema.GraphQLCompositeType; @@ -16,14 +17,17 @@ class QueryTraversalContext { // never used for scalars/enums, always a possibly wrapped composite type private final GraphQLOutputType outputType; private final QueryVisitorFieldEnvironment environment; - private final SelectionSetContainer selectionSetContainer; + private final SelectionSetContainer selectionSetContainer; + private final GraphQLContext graphQLContext; QueryTraversalContext(GraphQLOutputType outputType, QueryVisitorFieldEnvironment environment, - SelectionSetContainer selectionSetContainer) { + SelectionSetContainer selectionSetContainer, + GraphQLContext graphQLContext) { this.outputType = outputType; this.environment = environment; this.selectionSetContainer = selectionSetContainer; + this.graphQLContext = graphQLContext; } public GraphQLOutputType getOutputType() { @@ -34,13 +38,15 @@ public GraphQLCompositeType getUnwrappedOutputType() { return (GraphQLCompositeType) GraphQLTypeUtil.unwrapAll(outputType); } - public QueryVisitorFieldEnvironment getEnvironment() { return environment; } - public SelectionSetContainer getSelectionSetContainer() { - + public SelectionSetContainer getSelectionSetContainer() { return selectionSetContainer; } -} + + public GraphQLContext getGraphQLContext() { + return graphQLContext; + } +} \ No newline at end of file diff --git a/src/main/java/graphql/analysis/QueryTraverser.java b/src/main/java/graphql/analysis/QueryTraverser.java index 14d873f599..59532843f2 100644 --- a/src/main/java/graphql/analysis/QueryTraverser.java +++ b/src/main/java/graphql/analysis/QueryTraverser.java @@ -177,7 +177,7 @@ private List childrenOf(Node node) { private Object visitImpl(QueryVisitor visitFieldCallback, Boolean preOrder) { Map, Object> rootVars = new LinkedHashMap<>(); - rootVars.put(QueryTraversalContext.class, new QueryTraversalContext(rootParentType, null, null)); + rootVars.put(QueryTraversalContext.class, new QueryTraversalContext(rootParentType, null, null, GraphQLContext.getDefault())); QueryVisitor preOrderCallback; QueryVisitor postOrderCallback; @@ -343,4 +343,4 @@ private void checkState() { } } -} +} \ No newline at end of file diff --git a/src/main/java/graphql/execution/ExecutionContext.java b/src/main/java/graphql/execution/ExecutionContext.java index 8c547e23c6..e3e90e6f87 100644 --- a/src/main/java/graphql/execution/ExecutionContext.java +++ b/src/main/java/graphql/execution/ExecutionContext.java @@ -81,7 +81,7 @@ public class ExecutionContext { this.errors.set(builder.errors); this.localContext = builder.localContext; this.executionInput = builder.executionInput; - queryTree = FpKit.interThreadMemoize(() -> ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, operationDefinition, fragmentsByName, coercedVariables)); + this.queryTree = FpKit.interThreadMemoize(() -> ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, operationDefinition, fragmentsByName, coercedVariables)); } diff --git a/src/main/java/graphql/execution/FieldCollector.java b/src/main/java/graphql/execution/FieldCollector.java index d32218bfc8..34480011c1 100644 --- a/src/main/java/graphql/execution/FieldCollector.java +++ b/src/main/java/graphql/execution/FieldCollector.java @@ -2,6 +2,7 @@ import graphql.Internal; +import graphql.execution.conditional.ConditionalNodes; import graphql.language.Field; import graphql.language.FragmentDefinition; import graphql.language.FragmentSpread; @@ -76,13 +77,19 @@ private void collectFragmentSpread(FieldCollectorParameters parameters, Set visitedFragments, Map fields, InlineFragment inlineFragment) { - if (!conditionalNodes.shouldInclude(parameters.getVariables(), inlineFragment.getDirectives()) || + if (!conditionalNodes.shouldInclude(inlineFragment, + parameters.getVariables(), + parameters.getGraphQLSchema(), + parameters.getGraphQLContext()) || !doesFragmentConditionMatch(parameters, inlineFragment)) { return; } @@ -100,7 +110,10 @@ private void collectInlineFragment(FieldCollectorParameters parameters, Set fields, Field field) { - if (!conditionalNodes.shouldInclude(parameters.getVariables(), field.getDirectives())) { + if (!conditionalNodes.shouldInclude(field, + parameters.getVariables(), + parameters.getGraphQLSchema(), + parameters.getGraphQLContext())) { return; } String name = field.getResultKey(); @@ -143,4 +156,4 @@ private boolean checkTypeCondition(FieldCollectorParameters parameters, GraphQLT } -} +} \ No newline at end of file diff --git a/src/main/java/graphql/execution/FieldCollectorParameters.java b/src/main/java/graphql/execution/FieldCollectorParameters.java index b1878ff2a7..30c8696e3a 100644 --- a/src/main/java/graphql/execution/FieldCollectorParameters.java +++ b/src/main/java/graphql/execution/FieldCollectorParameters.java @@ -1,6 +1,7 @@ package graphql.execution; import graphql.Assert; +import graphql.GraphQLContext; import graphql.Internal; import graphql.language.FragmentDefinition; import graphql.schema.GraphQLObjectType; @@ -17,6 +18,7 @@ public class FieldCollectorParameters { private final Map fragmentsByName; private final Map variables; private final GraphQLObjectType objectType; + private final GraphQLContext graphQLContext; public GraphQLSchema getGraphQLSchema() { return graphQLSchema; @@ -34,11 +36,16 @@ public GraphQLObjectType getObjectType() { return objectType; } - private FieldCollectorParameters(GraphQLSchema graphQLSchema, Map variables, Map fragmentsByName, GraphQLObjectType objectType) { - this.fragmentsByName = fragmentsByName; - this.graphQLSchema = graphQLSchema; - this.variables = variables; - this.objectType = objectType; + public GraphQLContext getGraphQLContext() { + return graphQLContext; + } + + private FieldCollectorParameters(Builder builder) { + this.fragmentsByName = builder.fragmentsByName; + this.graphQLSchema = builder.graphQLSchema; + this.variables = builder.variables; + this.objectType = builder.objectType; + this.graphQLContext = builder.graphQLContext; } public static Builder newParameters() { @@ -50,6 +57,7 @@ public static class Builder { private Map fragmentsByName; private Map variables; private GraphQLObjectType objectType; + private GraphQLContext graphQLContext = GraphQLContext.getDefault(); /** * @see FieldCollectorParameters#newParameters() @@ -68,6 +76,11 @@ public Builder objectType(GraphQLObjectType objectType) { return this; } + public Builder graphQLContext(GraphQLContext graphQLContext) { + this.graphQLContext = graphQLContext; + return this; + } + public Builder fragments(Map fragmentsByName) { this.fragmentsByName = fragmentsByName; return this; @@ -80,8 +93,8 @@ public Builder variables(Map variables) { public FieldCollectorParameters build() { Assert.assertNotNull(graphQLSchema, () -> "You must provide a schema"); - return new FieldCollectorParameters(graphQLSchema, variables, fragmentsByName, objectType); + return new FieldCollectorParameters(this); } } -} +} \ No newline at end of file diff --git a/src/main/java/graphql/execution/ConditionalNodes.java b/src/main/java/graphql/execution/conditional/ConditionalNodes.java similarity index 60% rename from src/main/java/graphql/execution/ConditionalNodes.java rename to src/main/java/graphql/execution/conditional/ConditionalNodes.java index a9e3ca733e..abee17d5ca 100644 --- a/src/main/java/graphql/execution/ConditionalNodes.java +++ b/src/main/java/graphql/execution/conditional/ConditionalNodes.java @@ -1,10 +1,14 @@ -package graphql.execution; +package graphql.execution.conditional; import graphql.Assert; import graphql.GraphQLContext; import graphql.Internal; +import graphql.execution.CoercedVariables; +import graphql.execution.ValuesResolver; import graphql.language.Directive; +import graphql.language.DirectivesContainer; import graphql.language.NodeUtil; +import graphql.schema.GraphQLSchema; import java.util.List; import java.util.Locale; @@ -16,7 +20,17 @@ @Internal public class ConditionalNodes { - public boolean shouldInclude(Map variables, List directives) { + + public boolean shouldInclude(DirectivesContainer element, + Map variables, + GraphQLSchema graphQLSchema, + GraphQLContext graphQLContext + ) { + return shouldInclude(variables, element.getDirectives()); + // this was backported and additional code which allowed for custom Should include code was removed + } + + private boolean shouldInclude(Map variables, List directives) { // shortcut on no directives if (directives.isEmpty()) { return true; @@ -34,10 +48,10 @@ private boolean getDirectiveResult(Map variables, List argumentValues = ValuesResolver.getArgumentValues(SkipDirective.getArguments(), foundDirective.getArguments(), CoercedVariables.of(variables), GraphQLContext.getDefault(), Locale.getDefault()); Object flag = argumentValues.get("if"); - Assert.assertTrue(flag instanceof Boolean, () -> String.format("The '%s' directive MUST have a value for the 'if' argument", directiveName)); + Assert.assertTrue(flag instanceof Boolean, "The '%s' directive MUST have a value for the 'if' argument", directiveName); return (Boolean) flag; } return defaultValue; } -} +} \ No newline at end of file diff --git a/src/main/java/graphql/introspection/GoodFaithIntrospection.java b/src/main/java/graphql/introspection/GoodFaithIntrospection.java index 7c4a21226e..327d6d9c53 100644 --- a/src/main/java/graphql/introspection/GoodFaithIntrospection.java +++ b/src/main/java/graphql/introspection/GoodFaithIntrospection.java @@ -19,6 +19,8 @@ import java.util.Optional; import java.util.concurrent.atomic.AtomicBoolean; +import static graphql.normalized.ExecutableNormalizedOperationFactory.Options; +import static graphql.normalized.ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation; import static graphql.schema.FieldCoordinates.coordinates; /** @@ -45,6 +47,14 @@ public class GoodFaithIntrospection { public static final String GOOD_FAITH_INTROSPECTION_DISABLED = "GOOD_FAITH_INTROSPECTION_DISABLED"; private static final AtomicBoolean ENABLED_STATE = new AtomicBoolean(true); + /** + * This is the maximum number of executable fields that can be in a good faith introspection query + */ + public static final int GOOD_FAITH_MAX_FIELDS_COUNT = 500; + /** + * This is the maximum depth a good faith introspection query can be + */ + public static final int GOOD_FAITH_MAX_DEPTH_COUNT = 20; /** * @return true if good faith introspection is enabled @@ -77,7 +87,7 @@ public static boolean enabledJvmWide(boolean flag) { public static Optional checkIntrospection(ExecutionContext executionContext) { if (isIntrospectionEnabled(executionContext.getGraphQLContext())) { - ExecutableNormalizedOperation operation = executionContext.getNormalizedQueryTree().get(); + ExecutableNormalizedOperation operation = mkOperation(executionContext); ImmutableListMultimap coordinatesToENFs = operation.getCoordinatesToNormalizedFields(); for (Map.Entry entry : ALLOWED_FIELD_INSTANCES.entrySet()) { FieldCoordinates coordinates = entry.getKey(); @@ -92,6 +102,29 @@ public static Optional checkIntrospection(ExecutionContext exec return Optional.empty(); } + /** + * This makes an executable operation limited in size then which suits a good faith introspection query. This helps guard + * against malicious queries. + * + * @param executionContext the execution context + * + * @return an executable operation + */ + private static ExecutableNormalizedOperation mkOperation(ExecutionContext executionContext) { + Options options = Options.defaultOptions() + .maxFieldsCount(GOOD_FAITH_MAX_FIELDS_COUNT) + .maxChildrenDepth(GOOD_FAITH_MAX_DEPTH_COUNT) + .locale(executionContext.getLocale()) + .graphQLContext(executionContext.getGraphQLContext()); + + return createExecutableNormalizedOperation(executionContext.getGraphQLSchema(), + executionContext.getOperationDefinition(), + executionContext.getFragmentsByName(), + executionContext.getCoercedVariables(), + options); + + } + private static boolean isIntrospectionEnabled(GraphQLContext graphQlContext) { if (!isEnabledJvmWide()) { return false; diff --git a/src/main/java/graphql/introspection/Introspection.java b/src/main/java/graphql/introspection/Introspection.java index 37bf624e7b..124121c758 100644 --- a/src/main/java/graphql/introspection/Introspection.java +++ b/src/main/java/graphql/introspection/Introspection.java @@ -115,20 +115,20 @@ public static boolean isEnabledJvmWide() { */ public static Optional isIntrospectionSensible(MergedSelectionSet mergedSelectionSet, ExecutionContext executionContext) { GraphQLContext graphQLContext = executionContext.getGraphQLContext(); - MergedField schemaField = mergedSelectionSet.getSubField(SchemaMetaFieldDef.getName()); - if (schemaField != null) { - if (!isIntrospectionEnabled(graphQLContext)) { - return mkDisabledError(schemaField); - } - } - MergedField typeField = mergedSelectionSet.getSubField(TypeMetaFieldDef.getName()); - if (typeField != null) { - if (!isIntrospectionEnabled(graphQLContext)) { - return mkDisabledError(typeField); + + boolean isIntrospection = false; + for (String key : mergedSelectionSet.getKeys()) { + String fieldName = mergedSelectionSet.getSubField(key).getName(); + if (fieldName.equals(SchemaMetaFieldDef.getName()) + || fieldName.equals(TypeMetaFieldDef.getName())) { + if (!isIntrospectionEnabled(graphQLContext)) { + return mkDisabledError(mergedSelectionSet.getSubField(key)); + } + isIntrospection = true; + break; } } - if (schemaField != null || typeField != null) - { + if (isIntrospection) { return GoodFaithIntrospection.checkIntrospection(executionContext); } return Optional.empty(); diff --git a/src/main/java/graphql/normalized/ExecutableNormalizedOperation.java b/src/main/java/graphql/normalized/ExecutableNormalizedOperation.java index 4958649841..39054a9bdc 100644 --- a/src/main/java/graphql/normalized/ExecutableNormalizedOperation.java +++ b/src/main/java/graphql/normalized/ExecutableNormalizedOperation.java @@ -2,9 +2,10 @@ import com.google.common.collect.ImmutableListMultimap; import graphql.Assert; -import graphql.Internal; +import graphql.PublicApi; import graphql.execution.MergedField; import graphql.execution.ResultPath; +import graphql.execution.directives.QueryDirectives; import graphql.language.Field; import graphql.language.OperationDefinition; import graphql.schema.FieldCoordinates; @@ -13,14 +14,25 @@ import java.util.List; import java.util.Map; -@Internal +/** + * A {@link ExecutableNormalizedOperation} represent how the text of a graphql operation (sometimes known colloquially as a query) + * will be executed at runtime according to the graphql specification. It handles complex mechanisms like merging + * duplicate fields into one and also detecting when the types of a given field may actually be for more than one possible object + * type. + *

+ * An operation consists of a list of {@link ExecutableNormalizedField}s in a parent child hierarchy + */ +@PublicApi public class ExecutableNormalizedOperation { private final OperationDefinition.Operation operation; private final String operationName; private final List topLevelFields; private final ImmutableListMultimap fieldToNormalizedField; private final Map normalizedFieldToMergedField; + private final Map normalizedFieldToQueryDirectives; private final ImmutableListMultimap coordinatesToNormalizedFields; + private final int operationFieldCount; + private final int operationDepth; public ExecutableNormalizedOperation( OperationDefinition.Operation operation, @@ -28,53 +40,131 @@ public ExecutableNormalizedOperation( List topLevelFields, ImmutableListMultimap fieldToNormalizedField, Map normalizedFieldToMergedField, - ImmutableListMultimap coordinatesToNormalizedFields - ) { + Map normalizedFieldToQueryDirectives, + ImmutableListMultimap coordinatesToNormalizedFields, + int operationFieldCount, + int operationDepth) { this.operation = operation; this.operationName = operationName; this.topLevelFields = topLevelFields; this.fieldToNormalizedField = fieldToNormalizedField; this.normalizedFieldToMergedField = normalizedFieldToMergedField; + this.normalizedFieldToQueryDirectives = normalizedFieldToQueryDirectives; this.coordinatesToNormalizedFields = coordinatesToNormalizedFields; + this.operationFieldCount = operationFieldCount; + this.operationDepth = operationDepth; } + /** + * @return operation AST being executed + */ public OperationDefinition.Operation getOperation() { return operation; } + /** + * @return the operation name, which can be null + */ public String getOperationName() { return operationName; } + /** + * @return This returns how many {@link ExecutableNormalizedField}s are in the operation. + */ + public int getOperationFieldCount() { + return operationFieldCount; + } + + /** + * @return This returns the depth of the operation + */ + public int getOperationDepth() { + return operationDepth; + } + + /** + * This multimap shows how a given {@link ExecutableNormalizedField} maps to a one or more field coordinate in the schema + * + * @return a multimap of fields to schema field coordinates + */ public ImmutableListMultimap getCoordinatesToNormalizedFields() { return coordinatesToNormalizedFields; } + /** + * @return a list of the top level {@link ExecutableNormalizedField}s in this operation. + */ public List getTopLevelFields() { return topLevelFields; } /** - * This is a multimap: the size of it reflects the all the normalized fields + * This is a multimap and the size of it reflects all the normalized fields in the operation * - * @return an immutable list multi map of field to normalised field + * @return an immutable list multimap of {@link Field} to {@link ExecutableNormalizedField} */ public ImmutableListMultimap getFieldToNormalizedField() { return fieldToNormalizedField; } + /** + * Looks up one or more {@link ExecutableNormalizedField}s given a {@link Field} AST element in the operation + * + * @param field the field to look up + * + * @return zero, one or more possible {@link ExecutableNormalizedField}s that represent that field + */ public List getNormalizedFields(Field field) { return fieldToNormalizedField.get(field); } + /** + * @return a map of {@link ExecutableNormalizedField} to {@link MergedField}s + */ public Map getNormalizedFieldToMergedField() { return normalizedFieldToMergedField; } + /** + * Looks up the {@link MergedField} given a {@link ExecutableNormalizedField} + * + * @param executableNormalizedField the field to use the key + * + * @return a {@link MergedField} or null if its not present + */ public MergedField getMergedField(ExecutableNormalizedField executableNormalizedField) { return normalizedFieldToMergedField.get(executableNormalizedField); } + /** + * @return a map of {@link ExecutableNormalizedField} to its {@link QueryDirectives} + */ + public Map getNormalizedFieldToQueryDirectives() { + return normalizedFieldToQueryDirectives; + + } + + /** + * This looks up the {@link QueryDirectives} associated with the given {@link ExecutableNormalizedField} + * + * @param executableNormalizedField the executable normalised field in question + * + * @return the fields query directives or null + */ + public QueryDirectives getQueryDirectives(ExecutableNormalizedField executableNormalizedField) { + return normalizedFieldToQueryDirectives.get(executableNormalizedField); + } + + /** + * This will find a {@link ExecutableNormalizedField} given a merged field and a result path. If this does not find a field it will assert with an exception + * + * @param mergedField the merged field + * @param fieldsContainer the containing type of that field + * @param resultPath the result path in play + * + * @return the ExecutableNormalizedField + */ public ExecutableNormalizedField getNormalizedField(MergedField mergedField, GraphQLFieldsContainer fieldsContainer, ResultPath resultPath) { List executableNormalizedFields = fieldToNormalizedField.get(mergedField.getSingleField()); List keysOnlyPath = resultPath.getKeysOnly(); @@ -87,4 +177,4 @@ public ExecutableNormalizedField getNormalizedField(MergedField mergedField, Gra } return Assert.assertShouldNeverHappen("normalized field not found"); } -} +} \ No newline at end of file diff --git a/src/main/java/graphql/normalized/ExecutableNormalizedOperationFactory.java b/src/main/java/graphql/normalized/ExecutableNormalizedOperationFactory.java index 6093fb0967..51e4a4e2f0 100644 --- a/src/main/java/graphql/normalized/ExecutableNormalizedOperationFactory.java +++ b/src/main/java/graphql/normalized/ExecutableNormalizedOperationFactory.java @@ -5,13 +5,16 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import graphql.GraphQLContext; -import graphql.Internal; +import graphql.PublicApi; import graphql.collect.ImmutableKit; +import graphql.execution.AbortExecutionException; import graphql.execution.CoercedVariables; -import graphql.execution.ConditionalNodes; +import graphql.execution.conditional.ConditionalNodes; import graphql.execution.MergedField; import graphql.execution.RawVariables; import graphql.execution.ValuesResolver; +import graphql.execution.directives.QueryDirectives; +import graphql.execution.directives.QueryDirectivesImpl; import graphql.introspection.Introspection; import graphql.language.Document; import graphql.language.Field; @@ -27,10 +30,10 @@ import graphql.schema.GraphQLCompositeType; import graphql.schema.GraphQLFieldDefinition; import graphql.schema.GraphQLInterfaceType; +import graphql.schema.GraphQLNamedOutputType; import graphql.schema.GraphQLObjectType; import graphql.schema.GraphQLSchema; import graphql.schema.GraphQLType; -import graphql.schema.GraphQLTypeUtil; import graphql.schema.GraphQLUnionType; import graphql.schema.GraphQLUnmodifiedType; import graphql.schema.impl.SchemaUtil; @@ -47,7 +50,6 @@ import static graphql.Assert.assertNotNull; import static graphql.Assert.assertShouldNeverHappen; import static graphql.collect.ImmutableKit.map; -import static graphql.execution.MergedField.newMergedField; import static graphql.schema.GraphQLTypeUtil.unwrapAll; import static graphql.util.FpKit.filterSet; import static graphql.util.FpKit.groupingBy; @@ -55,444 +57,744 @@ import static java.util.Collections.singleton; import static java.util.Collections.singletonList; -@Internal +/** + * This factory can create a {@link ExecutableNormalizedOperation} which represents what would be executed + * during a given graphql operation. + */ +@PublicApi public class ExecutableNormalizedOperationFactory { - private final ConditionalNodes conditionalNodes = new ConditionalNodes(); + public static class Options { + private final GraphQLContext graphQLContext; + private final Locale locale; + private final int maxChildrenDepth; + private final int maxFieldsCount; + + private Options(GraphQLContext graphQLContext, + Locale locale, + int maxChildrenDepth, + int maxFieldsCount) { + this.graphQLContext = graphQLContext; + this.locale = locale; + this.maxChildrenDepth = maxChildrenDepth; + this.maxFieldsCount = maxFieldsCount; + } - public static ExecutableNormalizedOperation createExecutableNormalizedOperation(GraphQLSchema graphQLSchema, - Document document, - String operationName, - CoercedVariables coercedVariableValues) { + public static Options defaultOptions() { + return new Options( + GraphQLContext.getDefault(), + Locale.getDefault(), + Integer.MAX_VALUE, + Integer.MAX_VALUE); + } + + /** + * Locale to use when parsing the query. + *

+ * e.g. can be passed to {@link graphql.schema.Coercing} for parsing. + * + * @param locale the locale to use + * @return new options object to use + */ + public Options locale(Locale locale) { + return new Options(this.graphQLContext, locale, this.maxChildrenDepth, this.maxFieldsCount); + } + + /** + * Context object to use when parsing the operation. + *

+ * + * @param graphQLContext the context to use + * @return new options object to use + */ + public Options graphQLContext(GraphQLContext graphQLContext) { + return new Options(graphQLContext, this.locale, this.maxChildrenDepth, this.maxFieldsCount); + } + + /** + * Controls the maximum depth of the operation. Can be used to prevent + * against malicious operations. + * + * @param maxChildrenDepth the max depth + * @return new options object to use + */ + public Options maxChildrenDepth(int maxChildrenDepth) { + return new Options(this.graphQLContext, this.locale, maxChildrenDepth, this.maxFieldsCount); + } + + /** + * Controls the maximum number of ENFs created. Can be used to prevent + * against malicious operations. + * + * @param maxFieldsCount the max number of ENFs created + * @return new options object to use + */ + public Options maxFieldsCount(int maxFieldsCount) { + return new Options(this.graphQLContext, this.locale, this.maxChildrenDepth, maxFieldsCount); + } + + /** + * @return context to use during operation parsing + * @see #graphQLContext(GraphQLContext) + */ + public GraphQLContext getGraphQLContext() { + return graphQLContext; + } + + /** + * @return locale to use during operation parsing + * @see #locale(Locale) + */ + public Locale getLocale() { + return locale; + } + + /** + * @return maximum children depth before aborting parsing + * @see #maxChildrenDepth(int) + */ + public int getMaxChildrenDepth() { + return maxChildrenDepth; + } + + public int getMaxFieldsCount() { + return maxFieldsCount; + } + + } + + private static final ConditionalNodes conditionalNodes = new ConditionalNodes(); + + private ExecutableNormalizedOperationFactory() { + + } + + /** + * This will create a runtime representation of the graphql operation that would be executed + * in a runtime sense. + * + * @param graphQLSchema the schema to be used + * @param document the {@link Document} holding the operation text + * @param operationName the operation name to use + * @param coercedVariableValues the coerced variables to use + * @return a runtime representation of the graphql operation. + */ + public static ExecutableNormalizedOperation createExecutableNormalizedOperation( + GraphQLSchema graphQLSchema, + Document document, + String operationName, + CoercedVariables coercedVariableValues + ) { + return createExecutableNormalizedOperation( + graphQLSchema, + document, + operationName, + coercedVariableValues, + Options.defaultOptions()); + } + + /** + * This will create a runtime representation of the graphql operation that would be executed + * in a runtime sense. + * + * @param graphQLSchema the schema to be used + * @param document the {@link Document} holding the operation text + * @param operationName the operation name to use + * @param coercedVariableValues the coerced variables to use + * @param options the {@link Options} to use for parsing + * @return a runtime representation of the graphql operation. + */ + public static ExecutableNormalizedOperation createExecutableNormalizedOperation( + GraphQLSchema graphQLSchema, + Document document, + String operationName, + CoercedVariables coercedVariableValues, + Options options + ) { NodeUtil.GetOperationResult getOperationResult = NodeUtil.getOperation(document, operationName); - return new ExecutableNormalizedOperationFactory().createNormalizedQueryImpl(graphQLSchema, getOperationResult.operationDefinition, getOperationResult.fragmentsByName, coercedVariableValues, null); + + return new ExecutableNormalizedOperationFactoryImpl( + graphQLSchema, + getOperationResult.operationDefinition, + getOperationResult.fragmentsByName, + coercedVariableValues, + null, + options + ).createNormalizedQueryImpl(); } + /** + * This will create a runtime representation of the graphql operation that would be executed + * in a runtime sense. + * + * @param graphQLSchema the schema to be used + * @param operationDefinition the operation to be executed + * @param fragments a set of fragments associated with the operation + * @param coercedVariableValues the coerced variables to use + * @return a runtime representation of the graphql operation. + */ public static ExecutableNormalizedOperation createExecutableNormalizedOperation(GraphQLSchema graphQLSchema, OperationDefinition operationDefinition, Map fragments, CoercedVariables coercedVariableValues) { - return new ExecutableNormalizedOperationFactory().createNormalizedQueryImpl(graphQLSchema, operationDefinition, fragments, coercedVariableValues, null); + return createExecutableNormalizedOperation(graphQLSchema, + operationDefinition, + fragments, + coercedVariableValues, + Options.defaultOptions()); + } + + /** + * This will create a runtime representation of the graphql operation that would be executed + * in a runtime sense. + * + * @param graphQLSchema the schema to be used + * @param operationDefinition the operation to be executed + * @param fragments a set of fragments associated with the operation + * @param coercedVariableValues the coerced variables to use + * @return a runtime representation of the graphql operation. + */ + public static ExecutableNormalizedOperation createExecutableNormalizedOperation(GraphQLSchema graphQLSchema, + OperationDefinition operationDefinition, + Map fragments, + CoercedVariables coercedVariableValues, + Options options) { + return new ExecutableNormalizedOperationFactoryImpl( + graphQLSchema, + operationDefinition, + fragments, + coercedVariableValues, + null, + options + ).createNormalizedQueryImpl(); } + /** + * This will create a runtime representation of the graphql operation that would be executed + * in a runtime sense. + * + * @param graphQLSchema the schema to be used + * @param document the {@link Document} holding the operation text + * @param operationName the operation name to use + * @param rawVariables the raw variables to be coerced + * @return a runtime representation of the graphql operation. + */ public static ExecutableNormalizedOperation createExecutableNormalizedOperationWithRawVariables(GraphQLSchema graphQLSchema, Document document, String operationName, RawVariables rawVariables) { - return createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, operationName, rawVariables, GraphQLContext.getDefault(), Locale.getDefault()); + return createExecutableNormalizedOperationWithRawVariables(graphQLSchema, + document, + operationName, + rawVariables, + Options.defaultOptions()); } + + /** + * This will create a runtime representation of the graphql operation that would be executed + * in a runtime sense. + * + * @param graphQLSchema the schema to be used + * @param document the {@link Document} holding the operation text + * @param operationName the operation name to use + * @param rawVariables the raw variables that have not yet been coerced + * @param locale the {@link Locale} to use during coercion + * @param graphQLContext the {@link GraphQLContext} to use during coercion + * @return a runtime representation of the graphql operation. + */ + public static ExecutableNormalizedOperation createExecutableNormalizedOperationWithRawVariables( + GraphQLSchema graphQLSchema, + Document document, + String operationName, + RawVariables rawVariables, + GraphQLContext graphQLContext, + Locale locale + ) { + return createExecutableNormalizedOperationWithRawVariables( + graphQLSchema, + document, + operationName, + rawVariables, + Options.defaultOptions().graphQLContext(graphQLContext).locale(locale)); + } + + + /** + * This will create a runtime representation of the graphql operation that would be executed + * in a runtime sense. + * + * @param graphQLSchema the schema to be used + * @param document the {@link Document} holding the operation text + * @param operationName the operation name to use + * @param rawVariables the raw variables that have not yet been coerced + * @param options the {@link Options} to use for parsing + * @return a runtime representation of the graphql operation. + */ public static ExecutableNormalizedOperation createExecutableNormalizedOperationWithRawVariables(GraphQLSchema graphQLSchema, Document document, String operationName, RawVariables rawVariables, - GraphQLContext graphQLContext, - Locale locale) { + Options options) { NodeUtil.GetOperationResult getOperationResult = NodeUtil.getOperation(document, operationName); - return new ExecutableNormalizedOperationFactory().createExecutableNormalizedOperationImplWithRawVariables(graphQLSchema, getOperationResult.operationDefinition, getOperationResult.fragmentsByName, rawVariables, graphQLContext, locale); - } - - private ExecutableNormalizedOperation createExecutableNormalizedOperationImplWithRawVariables(GraphQLSchema graphQLSchema, - OperationDefinition operationDefinition, - Map fragments, - RawVariables rawVariables, - GraphQLContext graphQLContext, - Locale locale) { + OperationDefinition operationDefinition = getOperationResult.operationDefinition; List variableDefinitions = operationDefinition.getVariableDefinitions(); - CoercedVariables coercedVariableValues = ValuesResolver.coerceVariableValues(graphQLSchema, variableDefinitions, rawVariables, graphQLContext, locale); - Map normalizedVariableValues = ValuesResolver.getNormalizedVariableValues(graphQLSchema, variableDefinitions, rawVariables, graphQLContext, locale); - return createNormalizedQueryImpl(graphQLSchema, operationDefinition, fragments, coercedVariableValues, normalizedVariableValues); + CoercedVariables coercedVariableValues = ValuesResolver.coerceVariableValues(graphQLSchema, + variableDefinitions, + rawVariables, + options.getGraphQLContext(), + options.getLocale()); + Map normalizedVariableValues = ValuesResolver.getNormalizedVariableValues(graphQLSchema, + variableDefinitions, + rawVariables, + options.getGraphQLContext(), + options.getLocale()); + + return new ExecutableNormalizedOperationFactoryImpl( + graphQLSchema, + operationDefinition, + getOperationResult.fragmentsByName, + coercedVariableValues, + normalizedVariableValues, + options + ).createNormalizedQueryImpl(); } - /** - * Creates a new Normalized query tree for the provided query - */ - private ExecutableNormalizedOperation createNormalizedQueryImpl(GraphQLSchema graphQLSchema, - OperationDefinition operationDefinition, - Map fragments, - CoercedVariables coercedVariableValues, - @Nullable Map normalizedVariableValues) { - FieldCollectorNormalizedQueryParams parameters = FieldCollectorNormalizedQueryParams - .newParameters() - .fragments(fragments) - .schema(graphQLSchema) - .coercedVariables(coercedVariableValues.toMap()) - .normalizedVariables(normalizedVariableValues) - .build(); - - GraphQLObjectType rootType = SchemaUtil.getOperationRootType(graphQLSchema, operationDefinition); - - CollectNFResult collectFromOperationResult = collectFromOperation(parameters, operationDefinition, rootType); - - ImmutableListMultimap.Builder fieldToNormalizedField = ImmutableListMultimap.builder(); - ImmutableMap.Builder normalizedFieldToMergedField = ImmutableMap.builder(); - ImmutableListMultimap.Builder coordinatesToNormalizedFields = ImmutableListMultimap.builder(); - - for (ExecutableNormalizedField topLevel : collectFromOperationResult.children) { - ImmutableList mergedField = collectFromOperationResult.normalizedFieldToAstFields.get(topLevel); - normalizedFieldToMergedField.put(topLevel, newMergedField(map(mergedField, fieldAndAstParent -> fieldAndAstParent.field)).build()); - updateFieldToNFMap(topLevel, mergedField, fieldToNormalizedField); - updateCoordinatedToNFMap(coordinatesToNormalizedFields, topLevel); - - buildFieldWithChildren(topLevel, - mergedField, - parameters, - fieldToNormalizedField, - normalizedFieldToMergedField, - coordinatesToNormalizedFields, - 1); - - } - for (FieldCollectorNormalizedQueryParams.PossibleMerger possibleMerger : parameters.possibleMergerList) { - List childrenWithSameResultKey = possibleMerger.parent.getChildrenWithSameResultKey(possibleMerger.resultKey); - ENFMerger.merge(possibleMerger.parent, childrenWithSameResultKey, graphQLSchema); - } - return new ExecutableNormalizedOperation( - operationDefinition.getOperation(), - operationDefinition.getName(), - new ArrayList<>(collectFromOperationResult.children), - fieldToNormalizedField.build(), - normalizedFieldToMergedField.build(), - coordinatesToNormalizedFields.build() - ); - } - - private void buildFieldWithChildren(ExecutableNormalizedField field, - ImmutableList mergedField, - FieldCollectorNormalizedQueryParams fieldCollectorNormalizedQueryParams, - ImmutableListMultimap.Builder fieldNormalizedField, - ImmutableMap.Builder normalizedFieldToMergedField, - ImmutableListMultimap.Builder coordinatesToNormalizedFields, - int curLevel) { - CollectNFResult nextLevel = collectFromMergedField(fieldCollectorNormalizedQueryParams, field, mergedField, curLevel + 1); - - for (ExecutableNormalizedField child : nextLevel.children) { - field.addChild(child); - ImmutableList mergedFieldForChild = nextLevel.normalizedFieldToAstFields.get(child); - normalizedFieldToMergedField.put(child, newMergedField(map(mergedFieldForChild, fieldAndAstParent -> fieldAndAstParent.field)).build()); - updateFieldToNFMap(child, mergedFieldForChild, fieldNormalizedField); - updateCoordinatedToNFMap(coordinatesToNormalizedFields, child); - - buildFieldWithChildren(child, - mergedFieldForChild, - fieldCollectorNormalizedQueryParams, - fieldNormalizedField, - normalizedFieldToMergedField, - coordinatesToNormalizedFields, - curLevel + 1); + private static class ExecutableNormalizedOperationFactoryImpl { + private final GraphQLSchema graphQLSchema; + private final OperationDefinition operationDefinition; + private final Map fragments; + private final CoercedVariables coercedVariableValues; + private final @Nullable Map normalizedVariableValues; + private final Options options; + + private final List possibleMergerList = new ArrayList<>(); + + private final ImmutableListMultimap.Builder fieldToNormalizedField = ImmutableListMultimap.builder(); + private final ImmutableMap.Builder normalizedFieldToMergedField = ImmutableMap.builder(); + private final ImmutableMap.Builder normalizedFieldToQueryDirectives = ImmutableMap.builder(); + private final ImmutableListMultimap.Builder coordinatesToNormalizedFields = ImmutableListMultimap.builder(); + private int fieldCount = 0; + private int maxDepthSeen = 0; + + private ExecutableNormalizedOperationFactoryImpl( + GraphQLSchema graphQLSchema, + OperationDefinition operationDefinition, + Map fragments, + CoercedVariables coercedVariableValues, + @Nullable Map normalizedVariableValues, + Options options + ) { + this.graphQLSchema = graphQLSchema; + this.operationDefinition = operationDefinition; + this.fragments = fragments; + this.coercedVariableValues = coercedVariableValues; + this.normalizedVariableValues = normalizedVariableValues; + this.options = options; } - } - private void updateFieldToNFMap(ExecutableNormalizedField executableNormalizedField, - ImmutableList mergedField, - ImmutableListMultimap.Builder fieldToNormalizedField) { - for (FieldAndAstParent astField : mergedField) { - fieldToNormalizedField.put(astField.field, executableNormalizedField); + /** + * Creates a new ExecutableNormalizedOperation for the provided query + */ + private ExecutableNormalizedOperation createNormalizedQueryImpl() { + GraphQLObjectType rootType = SchemaUtil.getOperationRootType(graphQLSchema, operationDefinition); + + CollectNFResult collectFromOperationResult = collectFromOperation(rootType); + + for (ExecutableNormalizedField topLevel : collectFromOperationResult.children) { + ImmutableList fieldAndAstParents = collectFromOperationResult.normalizedFieldToAstFields.get(topLevel); + MergedField mergedField = newMergedField(fieldAndAstParents); + + captureMergedField(topLevel, mergedField); + + updateFieldToNFMap(topLevel, fieldAndAstParents); + updateCoordinatedToNFMap(topLevel); + + int depthSeen = buildFieldWithChildren( + topLevel, + fieldAndAstParents, + 1); + maxDepthSeen = Math.max(maxDepthSeen, depthSeen); + } + // getPossibleMergerList + for (PossibleMerger possibleMerger : possibleMergerList) { + List childrenWithSameResultKey = possibleMerger.parent.getChildrenWithSameResultKey(possibleMerger.resultKey); + ENFMerger.merge(possibleMerger.parent, childrenWithSameResultKey, graphQLSchema); + } + return new ExecutableNormalizedOperation( + operationDefinition.getOperation(), + operationDefinition.getName(), + new ArrayList<>(collectFromOperationResult.children), + fieldToNormalizedField.build(), + normalizedFieldToMergedField.build(), + normalizedFieldToQueryDirectives.build(), + coordinatesToNormalizedFields.build(), + fieldCount, + maxDepthSeen + ); } - } - private void updateCoordinatedToNFMap(ImmutableListMultimap.Builder coordinatesToNormalizedFields, ExecutableNormalizedField topLevel) { - for (String objectType : topLevel.getObjectTypeNames()) { - FieldCoordinates coordinates = FieldCoordinates.coordinates(objectType, topLevel.getFieldName()); - coordinatesToNormalizedFields.put(coordinates, topLevel); + private void captureMergedField(ExecutableNormalizedField enf, MergedField mergedFld) { + // QueryDirectivesImpl is a lazy object and only computes itself when asked for + QueryDirectives queryDirectives = new QueryDirectivesImpl(mergedFld, graphQLSchema, coercedVariableValues.toMap(), options.getGraphQLContext(), options.getLocale()); + normalizedFieldToQueryDirectives.put(enf, queryDirectives); + normalizedFieldToMergedField.put(enf, mergedFld); } - } - private static class FieldAndAstParent { - final Field field; - final GraphQLCompositeType astParentType; + private int buildFieldWithChildren(ExecutableNormalizedField executableNormalizedField, + ImmutableList fieldAndAstParents, + int curLevel) { + checkMaxDepthExceeded(curLevel); - private FieldAndAstParent(Field field, GraphQLCompositeType astParentType) { - this.field = field; - this.astParentType = astParentType; - } - } + CollectNFResult nextLevel = collectFromMergedField(executableNormalizedField, fieldAndAstParents, curLevel + 1); + + int maxDepthSeen = curLevel; + for (ExecutableNormalizedField childENF : nextLevel.children) { + executableNormalizedField.addChild(childENF); + ImmutableList childFieldAndAstParents = nextLevel.normalizedFieldToAstFields.get(childENF); + + MergedField mergedField = newMergedField(childFieldAndAstParents); + captureMergedField(childENF, mergedField); + updateFieldToNFMap(childENF, childFieldAndAstParents); + updateCoordinatedToNFMap(childENF); - public static class CollectNFResult { - private final Collection children; - private final ImmutableListMultimap normalizedFieldToAstFields; + int depthSeen = buildFieldWithChildren(childENF, + childFieldAndAstParents, + curLevel + 1); + maxDepthSeen = Math.max(maxDepthSeen, depthSeen); - public CollectNFResult(Collection children, ImmutableListMultimap normalizedFieldToAstFields) { - this.children = children; - this.normalizedFieldToAstFields = normalizedFieldToAstFields; + checkMaxDepthExceeded(maxDepthSeen); + } + return maxDepthSeen; } - } + private void checkMaxDepthExceeded(int depthSeen) { + if (depthSeen > this.options.getMaxChildrenDepth()) { + throw new AbortExecutionException("Maximum query depth exceeded. " + depthSeen + " > " + this.options.getMaxChildrenDepth()); + } + } - public CollectNFResult collectFromMergedField(FieldCollectorNormalizedQueryParams parameters, - ExecutableNormalizedField executableNormalizedField, - ImmutableList mergedField, - int level) { - List fieldDefs = executableNormalizedField.getFieldDefinitions(parameters.getGraphQLSchema()); - Set possibleObjects = resolvePossibleObjects(fieldDefs, parameters.getGraphQLSchema()); - if (possibleObjects.isEmpty()) { - return new CollectNFResult(ImmutableKit.emptyList(), ImmutableListMultimap.of()); + private static MergedField newMergedField(ImmutableList fieldAndAstParents) { + return MergedField.newMergedField(map(fieldAndAstParents, fieldAndAstParent -> fieldAndAstParent.field)).build(); } - List collectedFields = new ArrayList<>(); - for (FieldAndAstParent fieldAndAstParent : mergedField) { - if (fieldAndAstParent.field.getSelectionSet() == null) { - continue; + private void updateFieldToNFMap(ExecutableNormalizedField executableNormalizedField, + ImmutableList mergedField) { + for (FieldAndAstParent astField : mergedField) { + fieldToNormalizedField.put(astField.field, executableNormalizedField); } - GraphQLFieldDefinition fieldDefinition = Introspection.getFieldDef(parameters.getGraphQLSchema(), fieldAndAstParent.astParentType, fieldAndAstParent.field.getName()); - GraphQLUnmodifiedType astParentType = unwrapAll(fieldDefinition.getType()); - this.collectFromSelectionSet(parameters, - fieldAndAstParent.field.getSelectionSet(), - collectedFields, - (GraphQLCompositeType) astParentType, - possibleObjects - ); } - Map> fieldsByName = fieldsByResultKey(collectedFields); - ImmutableList.Builder resultNFs = ImmutableList.builder(); - ImmutableListMultimap.Builder normalizedFieldToAstFields = ImmutableListMultimap.builder(); - createNFs(resultNFs, parameters, fieldsByName, normalizedFieldToAstFields, level, executableNormalizedField); + private void updateCoordinatedToNFMap(ExecutableNormalizedField topLevel) { + for (String objectType : topLevel.getObjectTypeNames()) { + FieldCoordinates coordinates = FieldCoordinates.coordinates(objectType, topLevel.getFieldName()); + coordinatesToNormalizedFields.put(coordinates, topLevel); + } + } - return new CollectNFResult(resultNFs.build(), normalizedFieldToAstFields.build()); - } + public CollectNFResult collectFromMergedField(ExecutableNormalizedField executableNormalizedField, + ImmutableList mergedField, + int level) { + List fieldDefs = executableNormalizedField.getFieldDefinitions(graphQLSchema); + Set possibleObjects = resolvePossibleObjects(fieldDefs); + if (possibleObjects.isEmpty()) { + return new CollectNFResult(ImmutableKit.emptyList(), ImmutableListMultimap.of()); + } - private Map> fieldsByResultKey(List collectedFields) { - Map> fieldsByName = new LinkedHashMap<>(); - for (CollectedField collectedField : collectedFields) { - fieldsByName.computeIfAbsent(collectedField.field.getResultKey(), ignored -> new ArrayList<>()).add(collectedField); + List collectedFields = new ArrayList<>(); + for (FieldAndAstParent fieldAndAstParent : mergedField) { + if (fieldAndAstParent.field.getSelectionSet() == null) { + continue; + } + GraphQLFieldDefinition fieldDefinition = Introspection.getFieldDef(graphQLSchema, fieldAndAstParent.astParentType, fieldAndAstParent.field.getName()); + GraphQLUnmodifiedType astParentType = unwrapAll(fieldDefinition.getType()); + this.collectFromSelectionSet(fieldAndAstParent.field.getSelectionSet(), + collectedFields, + (GraphQLCompositeType) astParentType, + possibleObjects + ); + } + Map> fieldsByName = fieldsByResultKey(collectedFields); + ImmutableList.Builder resultNFs = ImmutableList.builder(); + ImmutableListMultimap.Builder normalizedFieldToAstFields = ImmutableListMultimap.builder(); + + createNFs(resultNFs, fieldsByName, normalizedFieldToAstFields, level, executableNormalizedField); + + return new CollectNFResult(resultNFs.build(), normalizedFieldToAstFields.build()); } - return fieldsByName; - } - public CollectNFResult collectFromOperation(FieldCollectorNormalizedQueryParams parameters, - OperationDefinition operationDefinition, - GraphQLObjectType rootType) { + private Map> fieldsByResultKey(List collectedFields) { + Map> fieldsByName = new LinkedHashMap<>(); + for (CollectedField collectedField : collectedFields) { + fieldsByName.computeIfAbsent(collectedField.field.getResultKey(), ignored -> new ArrayList<>()).add(collectedField); + } + return fieldsByName; + } + public CollectNFResult collectFromOperation(GraphQLObjectType rootType) { - Set possibleObjects = ImmutableSet.of(rootType); - List collectedFields = new ArrayList<>(); - collectFromSelectionSet(parameters, operationDefinition.getSelectionSet(), collectedFields, rootType, possibleObjects); - // group by result key - Map> fieldsByName = fieldsByResultKey(collectedFields); - ImmutableList.Builder resultNFs = ImmutableList.builder(); - ImmutableListMultimap.Builder normalizedFieldToAstFields = ImmutableListMultimap.builder(); - createNFs(resultNFs, parameters, fieldsByName, normalizedFieldToAstFields, 1, null); + Set possibleObjects = ImmutableSet.of(rootType); + List collectedFields = new ArrayList<>(); + collectFromSelectionSet(operationDefinition.getSelectionSet(), collectedFields, rootType, possibleObjects); + // group by result key + Map> fieldsByName = fieldsByResultKey(collectedFields); + ImmutableList.Builder resultNFs = ImmutableList.builder(); + ImmutableListMultimap.Builder normalizedFieldToAstFields = ImmutableListMultimap.builder(); - return new CollectNFResult(resultNFs.build(), normalizedFieldToAstFields.build()); - } + createNFs(resultNFs, fieldsByName, normalizedFieldToAstFields, 1, null); - private void createNFs(ImmutableList.Builder nfListBuilder, - FieldCollectorNormalizedQueryParams parameters, - Map> fieldsByName, - ImmutableListMultimap.Builder normalizedFieldToAstFields, - int level, - ExecutableNormalizedField parent) { - for (String resultKey : fieldsByName.keySet()) { - List fieldsWithSameResultKey = fieldsByName.get(resultKey); - List commonParentsGroups = groupByCommonParents(fieldsWithSameResultKey); - for (CollectedFieldGroup fieldGroup : commonParentsGroups) { - ExecutableNormalizedField nf = createNF(parameters, fieldGroup, level, parent); - if (nf == null) { - continue; + return new CollectNFResult(resultNFs.build(), normalizedFieldToAstFields.build()); + } + + private void createNFs(ImmutableList.Builder nfListBuilder, + Map> fieldsByName, + ImmutableListMultimap.Builder normalizedFieldToAstFields, + int level, + ExecutableNormalizedField parent) { + for (String resultKey : fieldsByName.keySet()) { + List fieldsWithSameResultKey = fieldsByName.get(resultKey); + List commonParentsGroups = groupByCommonParents(fieldsWithSameResultKey); + for (CollectedFieldGroup fieldGroup : commonParentsGroups) { + ExecutableNormalizedField nf = createNF(fieldGroup, level, parent); + if (nf == null) { + continue; + } + for (CollectedField collectedField : fieldGroup.fields) { + normalizedFieldToAstFields.put(nf, new FieldAndAstParent(collectedField.field, collectedField.astTypeCondition)); + } + nfListBuilder.add(nf); } - for (CollectedField collectedField : fieldGroup.fields) { - normalizedFieldToAstFields.put(nf, new FieldAndAstParent(collectedField.field, collectedField.astTypeCondition)); + if (commonParentsGroups.size() > 1) { + possibleMergerList.add(new PossibleMerger(parent, resultKey)); } - nfListBuilder.add(nf); - } - if (commonParentsGroups.size() > 1) { - parameters.addPossibleMergers(parent, resultKey); } } - } - - private ExecutableNormalizedField createNF(FieldCollectorNormalizedQueryParams parameters, - CollectedFieldGroup collectedFieldGroup, - int level, - ExecutableNormalizedField parent) { - Field field; - Set objectTypes = collectedFieldGroup.objectTypes; - field = collectedFieldGroup.fields.iterator().next().field; - String fieldName = field.getName(); - GraphQLFieldDefinition fieldDefinition = Introspection.getFieldDef(parameters.getGraphQLSchema(), objectTypes.iterator().next(), fieldName); - - Map argumentValues = ValuesResolver.getArgumentValues(fieldDefinition.getArguments(), field.getArguments(), CoercedVariables.of(parameters.getCoercedVariableValues()), parameters.getGraphQLContext(), parameters.getLocale()); - Map normalizedArgumentValues = null; - if (parameters.getNormalizedVariableValues() != null) { - normalizedArgumentValues = ValuesResolver.getNormalizedArgumentValues(fieldDefinition.getArguments(), field.getArguments(), parameters.getNormalizedVariableValues()); - } - ImmutableList objectTypeNames = map(objectTypes, GraphQLObjectType::getName); - - return ExecutableNormalizedField.newNormalizedField() - .alias(field.getAlias()) - .resolvedArguments(argumentValues) - .normalizedArguments(normalizedArgumentValues) - .astArguments(field.getArguments()) - .objectTypeNames(objectTypeNames) - .fieldName(fieldName) - .level(level) - .parent(parent) - .build(); - } - private static class CollectedFieldGroup { - Set objectTypes; - Set fields; + private ExecutableNormalizedField createNF(CollectedFieldGroup collectedFieldGroup, + int level, + ExecutableNormalizedField parent) { - public CollectedFieldGroup(Set fields, Set objectTypes) { - this.fields = fields; - this.objectTypes = objectTypes; + this.fieldCount++; + if (this.fieldCount > this.options.getMaxFieldsCount()) { + throw new AbortExecutionException("Maximum field count exceeded. " + this.fieldCount + " > " + this.options.getMaxFieldsCount()); + } + Field field; + Set objectTypes = collectedFieldGroup.objectTypes; + field = collectedFieldGroup.fields.iterator().next().field; + String fieldName = field.getName(); + GraphQLFieldDefinition fieldDefinition = Introspection.getFieldDef(graphQLSchema, objectTypes.iterator().next(), fieldName); + + Map argumentValues = ValuesResolver.getArgumentValues(fieldDefinition.getArguments(), field.getArguments(), CoercedVariables.of(this.coercedVariableValues.toMap()), this.options.graphQLContext, this.options.locale); + Map normalizedArgumentValues = null; + if (this.normalizedVariableValues != null) { + normalizedArgumentValues = ValuesResolver.getNormalizedArgumentValues(fieldDefinition.getArguments(), field.getArguments(), this.normalizedVariableValues); + } + ImmutableList objectTypeNames = map(objectTypes, GraphQLObjectType::getName); + return ExecutableNormalizedField.newNormalizedField() + .alias(field.getAlias()) + .resolvedArguments(argumentValues) + .normalizedArguments(normalizedArgumentValues) + .astArguments(field.getArguments()) + .objectTypeNames(objectTypeNames) + .fieldName(fieldName) + .level(level) + .parent(parent) + .build(); } - } - private List groupByCommonParents(Collection fields) { - ImmutableSet.Builder objectTypes = ImmutableSet.builder(); - for (CollectedField collectedField : fields) { - objectTypes.addAll(collectedField.objectTypes); + private List groupByCommonParents(Collection fields) { + return groupByCommonParentsNoDeferSupport(fields); } - Set allRelevantObjects = objectTypes.build(); - Map> groupByAstParent = groupingBy(fields, fieldAndType -> fieldAndType.astTypeCondition); - if (groupByAstParent.size() == 1) { - return singletonList(new CollectedFieldGroup(ImmutableSet.copyOf(fields), allRelevantObjects)); + + private List groupByCommonParentsNoDeferSupport(Collection fields) { + ImmutableSet.Builder objectTypes = ImmutableSet.builder(); + for (CollectedField collectedField : fields) { + objectTypes.addAll(collectedField.objectTypes); + } + Set allRelevantObjects = objectTypes.build(); + Map> groupByAstParent = groupingBy(fields, fieldAndType -> fieldAndType.astTypeCondition); + if (groupByAstParent.size() == 1) { + return singletonList(new CollectedFieldGroup(ImmutableSet.copyOf(fields), allRelevantObjects)); + } + ImmutableList.Builder result = ImmutableList.builder(); + for (GraphQLObjectType objectType : allRelevantObjects) { + Set relevantFields = filterSet(fields, field -> field.objectTypes.contains(objectType)); + result.add(new CollectedFieldGroup(relevantFields, singleton(objectType))); + } + return result.build(); } - ImmutableList.Builder result = ImmutableList.builder(); - for (GraphQLObjectType objectType : allRelevantObjects) { - Set relevantFields = filterSet(fields, field -> field.objectTypes.contains(objectType)); - result.add(new CollectedFieldGroup(relevantFields, singleton(objectType))); + + private void collectFromSelectionSet(SelectionSet selectionSet, + List result, + GraphQLCompositeType astTypeCondition, + Set possibleObjects + ) { + for (Selection selection : selectionSet.getSelections()) { + if (selection instanceof Field) { + collectField(result, (Field) selection, possibleObjects, astTypeCondition); + } else if (selection instanceof InlineFragment) { + collectInlineFragment(result, (InlineFragment) selection, possibleObjects, astTypeCondition); + } else if (selection instanceof FragmentSpread) { + collectFragmentSpread(result, (FragmentSpread) selection, possibleObjects); + } + } } - return result.build(); - } + private void collectFragmentSpread(List result, + FragmentSpread fragmentSpread, + Set possibleObjects + ) { + if (!conditionalNodes.shouldInclude(fragmentSpread, + this.coercedVariableValues.toMap(), + this.graphQLSchema, + this.options.graphQLContext)) { + return; + } + FragmentDefinition fragmentDefinition = assertNotNull(this.fragments.get(fragmentSpread.getName())); - private void collectFromSelectionSet(FieldCollectorNormalizedQueryParams parameters, - SelectionSet selectionSet, - List result, - GraphQLCompositeType astTypeCondition, - Set possibleObjects - ) { - for (Selection selection : selectionSet.getSelections()) { - if (selection instanceof Field) { - collectField(parameters, result, (Field) selection, possibleObjects, astTypeCondition); - } else if (selection instanceof InlineFragment) { - collectInlineFragment(parameters, result, (InlineFragment) selection, possibleObjects, astTypeCondition); - } else if (selection instanceof FragmentSpread) { - collectFragmentSpread(parameters, result, (FragmentSpread) selection, possibleObjects, astTypeCondition); + if (!conditionalNodes.shouldInclude(fragmentDefinition, + this.coercedVariableValues.toMap(), + this.graphQLSchema, + this.options.graphQLContext)) { + return; } + GraphQLCompositeType newAstTypeCondition = (GraphQLCompositeType) assertNotNull(this.graphQLSchema.getType(fragmentDefinition.getTypeCondition().getName())); + Set newPossibleObjects = narrowDownPossibleObjects(possibleObjects, newAstTypeCondition); + + collectFromSelectionSet(fragmentDefinition.getSelectionSet(), result, newAstTypeCondition, newPossibleObjects); } - } - private static class CollectedField { - Field field; - Set objectTypes; - GraphQLCompositeType astTypeCondition; + private void collectInlineFragment(List result, + InlineFragment inlineFragment, + Set possibleObjects, + GraphQLCompositeType astTypeCondition + ) { + if (!conditionalNodes.shouldInclude(inlineFragment, this.coercedVariableValues.toMap(), this.graphQLSchema, this.options.graphQLContext)) { + return; + } + Set newPossibleObjects = possibleObjects; + GraphQLCompositeType newAstTypeCondition = astTypeCondition; - public CollectedField(Field field, Set objectTypes, GraphQLCompositeType astTypeCondition) { - this.field = field; - this.objectTypes = objectTypes; - this.astTypeCondition = astTypeCondition; - } + if (inlineFragment.getTypeCondition() != null) { + newAstTypeCondition = (GraphQLCompositeType) this.graphQLSchema.getType(inlineFragment.getTypeCondition().getName()); + newPossibleObjects = narrowDownPossibleObjects(possibleObjects, newAstTypeCondition); - public boolean isAbstract() { - return GraphQLTypeUtil.isInterfaceOrUnion(astTypeCondition); - } + } - public boolean isConcrete() { - return GraphQLTypeUtil.isObjectType(astTypeCondition); + collectFromSelectionSet(inlineFragment.getSelectionSet(), result, newAstTypeCondition, newPossibleObjects); } - } - private void collectFragmentSpread(FieldCollectorNormalizedQueryParams parameters, - List result, - FragmentSpread fragmentSpread, - Set possibleObjects, - GraphQLCompositeType astTypeCondition - ) { - if (!conditionalNodes.shouldInclude(parameters.getCoercedVariableValues(), fragmentSpread.getDirectives())) { - return; + private void collectField(List result, + Field field, + Set possibleObjectTypes, + GraphQLCompositeType astTypeCondition + ) { + if (!conditionalNodes.shouldInclude(field, + this.coercedVariableValues.toMap(), + this.graphQLSchema, + this.options.graphQLContext)) { + return; + } + // this means there is actually no possible type for this field, and we are done + if (possibleObjectTypes.isEmpty()) { + return; + } + result.add(new CollectedField(field, possibleObjectTypes, astTypeCondition)); } - FragmentDefinition fragmentDefinition = assertNotNull(parameters.getFragmentsByName().get(fragmentSpread.getName())); - if (!conditionalNodes.shouldInclude(parameters.getCoercedVariableValues(), fragmentDefinition.getDirectives())) { - return; - } - GraphQLCompositeType newAstTypeCondition = (GraphQLCompositeType) assertNotNull(parameters.getGraphQLSchema().getType(fragmentDefinition.getTypeCondition().getName())); - Set newPossibleObjects = narrowDownPossibleObjects(possibleObjects, newAstTypeCondition, parameters.getGraphQLSchema()); - collectFromSelectionSet(parameters, fragmentDefinition.getSelectionSet(), result, newAstTypeCondition, newPossibleObjects); - } + private Set narrowDownPossibleObjects(Set currentOnes, + GraphQLCompositeType typeCondition) { + ImmutableSet resolvedTypeCondition = resolvePossibleObjects(typeCondition); + if (currentOnes.isEmpty()) { + return resolvedTypeCondition; + } - private void collectInlineFragment(FieldCollectorNormalizedQueryParams parameters, - List result, - InlineFragment inlineFragment, - Set possibleObjects, - GraphQLCompositeType astTypeCondition - ) { - if (!conditionalNodes.shouldInclude(parameters.getCoercedVariableValues(), inlineFragment.getDirectives())) { - return; + // Faster intersection, as either set often has a size of 1. + return intersection(currentOnes, resolvedTypeCondition); } - Set newPossibleObjects = possibleObjects; - GraphQLCompositeType newAstTypeCondition = astTypeCondition; - if (inlineFragment.getTypeCondition() != null) { - newAstTypeCondition = (GraphQLCompositeType) parameters.getGraphQLSchema().getType(inlineFragment.getTypeCondition().getName()); - newPossibleObjects = narrowDownPossibleObjects(possibleObjects, newAstTypeCondition, parameters.getGraphQLSchema()); + private ImmutableSet resolvePossibleObjects(List defs) { + ImmutableSet.Builder builder = ImmutableSet.builder(); + for (GraphQLFieldDefinition def : defs) { + GraphQLUnmodifiedType outputType = unwrapAll(def.getType()); + if (outputType instanceof GraphQLCompositeType) { + builder.addAll(resolvePossibleObjects((GraphQLCompositeType) outputType)); + } + } + + return builder.build(); } - collectFromSelectionSet(parameters, inlineFragment.getSelectionSet(), result, newAstTypeCondition, newPossibleObjects); - } - private void collectField(FieldCollectorNormalizedQueryParams parameters, - List result, - Field field, - Set possibleObjectTypes, - GraphQLCompositeType astTypeCondition - ) { - if (!conditionalNodes.shouldInclude(parameters.getCoercedVariableValues(), field.getDirectives())) { - return; + private ImmutableSet resolvePossibleObjects(GraphQLCompositeType type) { + if (type instanceof GraphQLObjectType) { + return ImmutableSet.of((GraphQLObjectType) type); + } else if (type instanceof GraphQLInterfaceType) { + return ImmutableSet.copyOf(graphQLSchema.getImplementations((GraphQLInterfaceType) type)); + } else if (type instanceof GraphQLUnionType) { + List unionTypes = ((GraphQLUnionType) type).getTypes(); + return ImmutableSet.copyOf(ImmutableKit.map(unionTypes, GraphQLObjectType.class::cast)); + } else { + return assertShouldNeverHappen(); + } } - // this means there is actually no possible type for this field and we are done - if (possibleObjectTypes.isEmpty()) { - return; + + private static class PossibleMerger { + ExecutableNormalizedField parent; + String resultKey; + + public PossibleMerger(ExecutableNormalizedField parent, String resultKey) { + this.parent = parent; + this.resultKey = resultKey; + } } - result.add(new CollectedField(field, possibleObjectTypes, astTypeCondition)); - } - private Set narrowDownPossibleObjects(Set currentOnes, - GraphQLCompositeType typeCondition, - GraphQLSchema graphQLSchema) { + private static class CollectedField { + Field field; + Set objectTypes; + GraphQLCompositeType astTypeCondition; - ImmutableSet resolvedTypeCondition = resolvePossibleObjects(typeCondition, graphQLSchema); - if (currentOnes.isEmpty()) { - return resolvedTypeCondition; + public CollectedField(Field field, Set objectTypes, GraphQLCompositeType astTypeCondition) { + this.field = field; + this.objectTypes = objectTypes; + this.astTypeCondition = astTypeCondition; + } } - // Faster intersection, as either set often has a size of 1. - return intersection(currentOnes, resolvedTypeCondition); - } + public static class CollectNFResult { + private final Collection children; + private final ImmutableListMultimap normalizedFieldToAstFields; - private ImmutableSet resolvePossibleObjects(List defs, GraphQLSchema graphQLSchema) { - ImmutableSet.Builder builder = ImmutableSet.builder(); + public CollectNFResult(Collection children, ImmutableListMultimap normalizedFieldToAstFields) { + this.children = children; + this.normalizedFieldToAstFields = normalizedFieldToAstFields; + } + } - for (GraphQLFieldDefinition def : defs) { - GraphQLUnmodifiedType outputType = unwrapAll(def.getType()); - if (outputType instanceof GraphQLCompositeType) { - builder.addAll(resolvePossibleObjects((GraphQLCompositeType) outputType, graphQLSchema)); + private static class FieldAndAstParent { + final Field field; + final GraphQLCompositeType astParentType; + + private FieldAndAstParent(Field field, GraphQLCompositeType astParentType) { + this.field = field; + this.astParentType = astParentType; } } - return builder.build(); - } + private static class CollectedFieldGroup { + Set objectTypes; + Set fields; - private ImmutableSet resolvePossibleObjects(GraphQLCompositeType type, GraphQLSchema graphQLSchema) { - if (type instanceof GraphQLObjectType) { - return ImmutableSet.of((GraphQLObjectType) type); - } else if (type instanceof GraphQLInterfaceType) { - return ImmutableSet.copyOf(graphQLSchema.getImplementations((GraphQLInterfaceType) type)); - } else if (type instanceof GraphQLUnionType) { - List types = ((GraphQLUnionType) type).getTypes(); - return ImmutableSet.copyOf(types); - } else { - return assertShouldNeverHappen(); + public CollectedFieldGroup(Set fields, Set objectTypes) { + this.fields = fields; + this.objectTypes = objectTypes; + } } } -} + +} \ No newline at end of file diff --git a/src/test/groovy/graphql/InterfacesImplementingInterfacesTest.groovy b/src/test/groovy/graphql/InterfacesImplementingInterfacesTest.groovy index b5813c4d0b..bb22d70461 100644 --- a/src/test/groovy/graphql/InterfacesImplementingInterfacesTest.groovy +++ b/src/test/groovy/graphql/InterfacesImplementingInterfacesTest.groovy @@ -893,8 +893,10 @@ class InterfacesImplementingInterfacesTest extends Specification { given: def graphQLSchema = createComplexSchema() + GraphQL graphQL = GraphQL.newGraphQL(graphQLSchema).build() + when: - def result = GraphQL.newGraphQL(graphQLSchema).build().execute(""" + String query = """ { nodeType: __type(name: "Node") { possibleTypes { @@ -902,7 +904,20 @@ class InterfacesImplementingInterfacesTest extends Specification { name } } - resourceType: __type(name: "Resource") { + } + """ + def result = graphQL.execute(query) + + then: + !result.errors + result.data == [ + nodeType: [possibleTypes: [[kind: 'OBJECT', name: 'File'], [kind: 'OBJECT', name: 'Image']]], + ] + + when: + query = """ + { + resourceType: __type(name: "Resource") { possibleTypes { kind name @@ -911,22 +926,35 @@ class InterfacesImplementingInterfacesTest extends Specification { kind name } - } - imageType: __type(name: "Image") { + } + } + """ + result = graphQL.execute(query) + + then: + !result.errors + result.data == [ + resourceType: [possibleTypes: [[kind: 'OBJECT', name: 'File'], [kind: 'OBJECT', name: 'Image']], interfaces: [[kind: 'INTERFACE', name: 'Node']]] + ] + + when: + + query = """ + { + imageType: __type(name: "Image") { interfaces { kind name } } - } - """) + } + """ + result = graphQL.execute(query) then: !result.errors result.data == [ - nodeType : [possibleTypes: [[kind: 'OBJECT', name: 'File'], [kind: 'OBJECT', name: 'Image']]], imageType : [interfaces: [[kind: 'INTERFACE', name: 'Resource'], [kind: 'INTERFACE', name: 'Node']]], - resourceType: [possibleTypes: [[kind: 'OBJECT', name: 'File'], [kind: 'OBJECT', name: 'Image']], interfaces: [[kind: 'INTERFACE', name: 'Node']]] ] } diff --git a/src/test/groovy/graphql/UnionTest.groovy b/src/test/groovy/graphql/UnionTest.groovy index 403f31d3d8..8edd7b2600 100644 --- a/src/test/groovy/graphql/UnionTest.groovy +++ b/src/test/groovy/graphql/UnionTest.groovy @@ -4,7 +4,7 @@ import spock.lang.Specification class UnionTest extends Specification { - def "can introspect on union and intersection types"() { + def "can introspect on union types"() { def query = """ { Named: __type(name: "Named") { @@ -15,15 +15,6 @@ class UnionTest extends Specification { possibleTypes { name } enumValues { name } inputFields { name } - } - Pet: __type(name: "Pet") { - kind - name - fields { name } - interfaces { name } - possibleTypes { name } - enumValues { name } - inputFields { name } } } """ @@ -42,8 +33,32 @@ class UnionTest extends Specification { ], enumValues : null, inputFields : null - ], - Pet : [ + ]] + when: + def executionResult = GraphQL.newGraphQL(GarfieldSchema.GarfieldSchema).build().execute(query) + + then: + executionResult.data == expectedResult + + + } + + def "can introspect on intersection types"() { + def query = """ + { + Pet: __type(name: "Pet") { + kind + name + fields { name } + interfaces { name } + possibleTypes { name } + enumValues { name } + inputFields { name } + } + } + """ + + def expectedResult = [Pet : [ kind : 'UNION', name : 'Pet', fields : null, diff --git a/src/test/groovy/graphql/execution/ConditionalNodesTest.groovy b/src/test/groovy/graphql/execution/ConditionalNodesTest.groovy index 7c76600727..b998ffd6f0 100644 --- a/src/test/groovy/graphql/execution/ConditionalNodesTest.groovy +++ b/src/test/groovy/graphql/execution/ConditionalNodesTest.groovy @@ -1,6 +1,6 @@ package graphql.execution - +import graphql.execution.conditional.ConditionalNodes import graphql.language.Argument import graphql.language.BooleanValue import graphql.language.Directive diff --git a/src/test/groovy/graphql/introspection/GoodFaithIntrospectionInstrumentationTest.groovy b/src/test/groovy/graphql/introspection/GoodFaithIntrospectionInstrumentationTest.groovy index f1ffc2c570..b77e1d76e8 100644 --- a/src/test/groovy/graphql/introspection/GoodFaithIntrospectionInstrumentationTest.groovy +++ b/src/test/groovy/graphql/introspection/GoodFaithIntrospectionInstrumentationTest.groovy @@ -3,6 +3,10 @@ package graphql.introspection import graphql.ExecutionInput import graphql.ExecutionResult import graphql.TestUtil +import graphql.execution.AbortExecutionException +import graphql.execution.CoercedVariables +import graphql.language.Document +import graphql.normalized.ExecutableNormalizedOperationFactory import spock.lang.Specification class GoodFaithIntrospectionInstrumentationTest extends Specification { @@ -12,10 +16,23 @@ class GoodFaithIntrospectionInstrumentationTest extends Specification { def setup() { GoodFaithIntrospection.enabledJvmWide(true) } + def cleanup() { GoodFaithIntrospection.enabledJvmWide(true) } + def "standard introspection query is inside limits just in general"() { + + when: + Document document = TestUtil.toDocument(IntrospectionQuery.INTROSPECTION_QUERY) + def eno = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphql.getGraphQLSchema(), document, + "IntrospectionQuery", CoercedVariables.emptyVariables()) + + then: + eno.getOperationFieldCount() < GoodFaithIntrospection.GOOD_FAITH_MAX_FIELDS_COUNT // currently 189 + eno.getOperationDepth() < GoodFaithIntrospection.GOOD_FAITH_MAX_DEPTH_COUNT // currently 13 + } + def "test asking for introspection in good faith"() { when: @@ -69,12 +86,25 @@ class GoodFaithIntrospectionInstrumentationTest extends Specification { alias1 : __type(name : "t1") { name } } """ | _ + // a case for __type with aliases + """ query badActor { + a1: __type(name : "t") { name } + a2 : __type(name : "t1") { name } + } + """ | _ // a case for schema repeated - dont ask twice """ query badActor { __schema { types { name} } alias1 : __schema { types { name} } } """ | _ + // a case for used aliases + """ query badActor { + a1: __schema { types { name} } + a2 : __schema { types { name} } + } + """ | _ + } def "mixed general queries and introspections will be stopped anyway"() { @@ -133,4 +163,70 @@ class GoodFaithIntrospectionInstrumentationTest extends Specification { !er.errors.isEmpty() er.errors[0] instanceof GoodFaithIntrospection.BadFaithIntrospectionError } + + def "can stop deep queries"() { + + when: + def query = createDeepQuery(depth) + def then = System.currentTimeMillis() + ExecutionResult er = graphql.execute(query) + def ms = System.currentTimeMillis()-then + + then: + !er.errors.isEmpty() + er.errors[0].class == targetError + er.data == null // it stopped hard - it did not continue to normal business + println "Took " + ms + "ms" + + where: + depth | targetError + 2 | GoodFaithIntrospection.BadFaithIntrospectionError.class + 10 | AbortExecutionException.class + 15 | AbortExecutionException.class + 20 | AbortExecutionException.class + 25 | AbortExecutionException.class + 50 | AbortExecutionException.class + 100 | AbortExecutionException.class + } + + String createDeepQuery(int depth = 25) { + def result = """ +query test { + __schema { + types { + ...F1 + } + } +} +""" + for (int i = 1; i < depth; i++) { + result += """ + fragment F$i on __Type { + fields { + type { + ...F${i + 1} + } + } + + ofType { + ...F${i + 1} + } +} + + +""" + } + result += """ + fragment F$depth on __Type { + fields { + type { +name + } + } +} + + +""" + return result + } } diff --git a/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryTest.groovy b/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryTest.groovy index 9472680f47..0713fbf141 100644 --- a/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryTest.groovy +++ b/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryTest.groovy @@ -3,9 +3,12 @@ package graphql.normalized import graphql.ExecutionInput import graphql.GraphQL import graphql.TestUtil +import graphql.execution.AbortExecutionException import graphql.execution.CoercedVariables import graphql.execution.MergedField import graphql.execution.RawVariables +import graphql.execution.directives.QueryAppliedDirective +import graphql.introspection.IntrospectionQuery import graphql.language.Document import graphql.language.Field import graphql.language.FragmentDefinition @@ -18,6 +21,9 @@ import graphql.util.TraverserContext import graphql.util.TraverserVisitorStub import spock.lang.Specification +import java.util.stream.Collectors +import java.util.stream.IntStream + import static graphql.TestUtil.schema import static graphql.language.AstPrinter.printAst import static graphql.parser.Parser.parseValue @@ -109,8 +115,7 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) expect: @@ -195,8 +200,7 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) expect: @@ -275,8 +279,7 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) expect: @@ -326,8 +329,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTree(tree) expect: @@ -369,8 +372,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTree(tree) expect: @@ -419,8 +422,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTree(tree) expect: @@ -482,8 +485,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) expect: @@ -528,8 +531,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTree(tree) expect: @@ -572,8 +575,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTree(tree) expect: @@ -616,8 +619,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTree(tree) expect: @@ -648,8 +651,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTree(tree) expect: @@ -699,8 +702,58 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) + + expect: + printedTree == ['-Query.pet: Pet', + '--[Bird, Cat, Dog].name: String' + ] + } + + def "query with fragment and type condition merged together 2"() { + def graphQLSchema = TestUtil.schema(""" + type Query { + pet : Pet + } + interface Pet { + name : String + } + + type Dog implements Pet { + name : String + } + + type Bird implements Pet { + name : String + } + + type Cat implements Pet { + name : String + } + """) + def query = """ + { + pet { + name + ... on Dog { + name + } + ... CatFrag + } + } + + fragment CatFrag on Cat { + name + } + """ + assertValidQuery(graphQLSchema, query) + + Document document = TestUtil.parseQuery(query) + + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) expect: @@ -709,6 +762,7 @@ type Dog implements Animal{ ] } + def "query with interface in between"() { def graphQLSchema = schema(""" type Query { @@ -737,8 +791,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTree(tree) expect: @@ -781,8 +835,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - def dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) expect: @@ -821,8 +875,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - def dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) expect: @@ -869,8 +923,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - def dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) expect: @@ -896,6 +950,40 @@ type Dog implements Animal{ result } + List printTreeAndDirectives(ExecutableNormalizedOperation queryExecutionTree) { + def result = [] + Traverser traverser = Traverser.depthFirst({ it.getChildren() }) + traverser.traverse(queryExecutionTree.getTopLevelFields(), new TraverserVisitorStub() { + @Override + TraversalControl enter(TraverserContext context) { + ExecutableNormalizedField queryExecutionField = context.thisNode() + def queryDirectives = queryExecutionTree.getQueryDirectives(queryExecutionField) + + def fieldDetails = queryExecutionField.printDetails() + if (queryDirectives != null) { + def appliedDirectivesByName = queryDirectives.getImmediateAppliedDirectivesByName() + if (!appliedDirectivesByName.isEmpty()) { + fieldDetails += " " + printDirectives(appliedDirectivesByName) + } + } + result << fieldDetails + return TraversalControl.CONTINUE + } + + String printDirectives(Map> stringListMap) { + String s = stringListMap.collect { entry -> + entry.value.collect { + " @" + it.name + "(" + it.getArguments().collect { + it.name + " : " + '"' + it.value + '"' + }.join(",") + ")" + }.join(' ') + }.join(" ") + return s + } + }) + result + } + static List printTreeWithLevelInfo(ExecutableNormalizedOperation queryExecutionTree, GraphQLSchema schema) { def result = [] Traverser traverser = Traverser.depthFirst({ it.getChildren() }) @@ -938,8 +1026,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) def subFooField = (document.getDefinitions()[1] as FragmentDefinition).getSelectionSet().getSelections()[0] as Field - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def fieldToNormalizedField = tree.getFieldToNormalizedField() expect: @@ -981,8 +1069,8 @@ type Dog implements Animal{ def petsField = (document.getDefinitions()[0] as OperationDefinition).getSelectionSet().getSelections()[0] as Field def idField = petsField.getSelectionSet().getSelections()[0] as Field - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def fieldToNormalizedField = tree.getFieldToNormalizedField() @@ -1030,8 +1118,8 @@ type Dog implements Animal{ def schemaField = selections[2] as Field def typeField = selections[3] as Field - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def fieldToNormalizedField = tree.getFieldToNormalizedField() expect: @@ -1087,8 +1175,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) expect: @@ -1130,8 +1218,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTree(tree) expect: @@ -1158,8 +1246,8 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def normalizedFieldToMergedField = tree.getNormalizedFieldToMergedField() Traverser traverser = Traverser.depthFirst({ it.getChildren() }) List result = new ArrayList<>() @@ -1196,10 +1284,9 @@ type Dog implements Animal{ Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() when: - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def coordinatesToNormalizedFields = tree.coordinatesToNormalizedFields then: @@ -1297,8 +1384,8 @@ schema { Document document = TestUtil.parseQuery(mutation) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() - def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) expect: @@ -1347,7 +1434,7 @@ schema { assertValidQuery(graphQLSchema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + def variables = [ var1: [bar: 123], var2: [foo: "foo", input2: [bar: 123]] @@ -1355,7 +1442,7 @@ schema { // the normalized arg value should be the same regardless of how the value was provided def expectedNormalizedArgValue = [foo: new NormalizedInputValue("String", parseValue('"foo"')), input2: new NormalizedInputValue("Input2", [bar: new NormalizedInputValue("Int", parseValue("123"))])] when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables)) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables)) def topLevelField = tree.getTopLevelFields().get(0) def secondField = topLevelField.getChildren().get(0) def arg1 = secondField.getNormalizedArgument("arg1") @@ -1394,9 +1481,9 @@ schema { assertValidQuery(graphQLSchema, query) def document = TestUtil.parseQuery(query) - def dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.emptyVariables()) then: def topLevelField = tree.getTopLevelFields().get(0) @@ -1429,13 +1516,13 @@ schema { assertValidQuery(graphQLSchema, query) def document = TestUtil.parseQuery(query) - def dependencyGraph = new ExecutableNormalizedOperationFactory() + def variables = [ varIds : null, otherVar: null, ] when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables)) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables)) then: def topLevelField = tree.getTopLevelFields().get(0) @@ -1485,9 +1572,9 @@ schema { ] assertValidQuery(graphQLSchema, query, variables) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables)) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables)) def topLevelField = tree.getTopLevelFields().get(0) def arg1 = topLevelField.getNormalizedArgument("arg1") def arg2 = topLevelField.getNormalizedArgument("arg2") @@ -1538,9 +1625,9 @@ schema { ] assertValidQuery(graphQLSchema, query, variables) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables)) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables)) def topLevelField = tree.getTopLevelFields().get(0) def arg1 = topLevelField.getNormalizedArgument("arg1") def arg2 = topLevelField.getNormalizedArgument("arg2") @@ -1593,9 +1680,9 @@ schema { ''' assertValidQuery(graphQLSchema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.emptyVariables()) then: tree.normalizedFieldToMergedField.size() == 3 @@ -1651,17 +1738,19 @@ schema { ''' assertValidQuery(graphQLSchema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.emptyVariables()) - println String.join("\n", printTree(tree)) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.emptyVariables()) + def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) - /** - * This is a test for two fields with the same key (friend), - * but backed by two different fields (Cat.dogFriend,Dog.dogFriend) - * which end up being two different NormalizedField - */ then: + // the two friend fields are not in on ENF + printedTree == ['-Query.pets: Pet', + '--friend: Cat.catFriend: CatFriend', + '---CatFriend.catFriendName: String', + '--friend: Dog.dogFriend: DogFriend', + '---DogFriend.dogFriendName: String'] + tree.normalizedFieldToMergedField.size() == 5 tree.fieldToNormalizedField.size() == 7 } @@ -1697,18 +1786,23 @@ schema { ''' assertValidQuery(graphQLSchema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) then: + /** + * the two name fields are not merged, because they are backed by different fields with different arguments + * If the arguments are the same, it would be one ENF. + */ printedTree == ['-Query.pets: Pet', '--Cat.name: String', '--Dog.name: String' ] } + def "diverging fields with the same parent type on deeper level"() { given: def schema = schema(''' @@ -1768,9 +1862,9 @@ schema { ''' assertValidQuery(schema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, schema) then: @@ -1832,9 +1926,9 @@ schema { ''' assertValidQuery(schema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, schema) then: @@ -1889,9 +1983,9 @@ schema { ''' assertValidQuery(schema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, schema) then: @@ -1964,9 +2058,9 @@ schema { ''' assertValidQuery(schema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, schema) then: @@ -2026,9 +2120,9 @@ schema { ''' assertValidQuery(schema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, schema) then: @@ -2068,9 +2162,9 @@ schema { ''' assertValidQuery(schema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, schema) then: @@ -2111,9 +2205,9 @@ schema { ''' assertValidQuery(schema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, schema) then: @@ -2154,9 +2248,9 @@ schema { ''' assertValidQuery(schema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, schema) then: @@ -2229,9 +2323,9 @@ schema { ''' assertValidQuery(schema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, schema) then: @@ -2305,9 +2399,9 @@ schema { ''' assertValidQuery(schema, query) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables()) def printedTree = printTreeWithLevelInfo(tree, schema) then: @@ -2367,9 +2461,9 @@ schema { def variables = ["true": Boolean.TRUE, "false": Boolean.FALSE] assertValidQuery(graphQLSchema, query, variables) Document document = TestUtil.parseQuery(query) - ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: - def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables)) + def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables)) println String.join("\n", printTree(tree)) def printedTree = printTree(tree) @@ -2384,6 +2478,59 @@ schema { ] } + + def "query directives are captured is respected"() { + given: + String schema = """ + directive @fieldDirective(target : String!) on FIELD + directive @fieldXDirective(target : String!) on FIELD + + type Query { + pets: Pet + } + interface Pet { + name: String + } + type Cat implements Pet { + name: String + } + type Dog implements Pet { + name: String + } + """ + GraphQLSchema graphQLSchema = TestUtil.schema(schema) + + String query = ''' + query q { + pets { + ... on Cat { + cName : name @fieldDirective(target : "Cat.name") + } + ... on Dog { + dName : name @fieldDirective(target : "Dog.name") @fieldXDirective(target : "Dog.name") + } + ... on Pet { + pName : name @fieldDirective(target : "Pet.name") + } + }} + ''' + + def variables = [:] + assertValidQuery(graphQLSchema, query, variables) + Document document = TestUtil.parseQuery(query) + + when: + def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables)) + def printedTree = printTreeAndDirectives(tree) + + then: + printedTree == ['Query.pets', + 'cName: Cat.name @fieldDirective(target : "Cat.name")', + 'dName: Dog.name @fieldDirective(target : "Dog.name") @fieldXDirective(target : "Dog.name")', + 'pName: [Cat, Dog].name @fieldDirective(target : "Pet.name")', + ] + } + def "missing argument"() { given: String schema = """ @@ -2406,4 +2553,589 @@ schema { printedTree == ['Query.hello'] tree.getTopLevelFields().get(0).getNormalizedArguments().isEmpty() } + + def "reused field via fragments"() { + String schema = """ + type Query { + pet: Pet + } + type Pet { + owner: Person + emergencyContact: Person + } + type Person { + name: String + } + """ + GraphQLSchema graphQLSchema = TestUtil.schema(schema) + + String query = """ +{ pet { + owner { ...personName } + emergencyContact { ...personName } +}} +fragment personName on Person { + name +} + """ + + assertValidQuery(graphQLSchema, query) + + Document document = TestUtil.parseQuery(query) + + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) + + expect: + printedTree == ['-Query.pet: Pet', + '--Pet.owner: Person', + '---Person.name: String', + '--Pet.emergencyContact: Person', + '---Person.name: String' + ] + + } + + + def "test interface fields with three different output types (covariance) on the implementations"() { + def graphQLSchema = schema(""" + interface Animal { + parent: Animal + name: String + } + type Cat implements Animal { + name: String + parent: Cat + } + type Dog implements Animal { + name: String + parent: Dog + isGoodBoy: Boolean + } + type Bird implements Animal { + name: String + parent: Bird + } + type Query { + animal: Animal + } + """) + + def query = """ + { + animal { + parent { + name + } + } + } + """ + + assertValidQuery(graphQLSchema, query) + + Document document = TestUtil.parseQuery(query) + + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) + + expect: + printedTree == [ + "-Query.animal: Animal", + "--[Bird, Cat, Dog].parent: Bird, Cat, Dog", + "---[Bird, Cat, Dog].name: String", + ] + } + + def "covariants with union fields"() { + def graphQLSchema = schema(""" + type Query { + animal: Animal + } + interface Animal { + parent: DogOrCat + name: String + } + type Cat implements Animal { + name: String + parent: Cat + } + type Dog implements Animal { + name: String + parent: Dog + isGoodBoy: Boolean + } + union DogOrCat = Dog | Cat + """) + + def query = """ + { + animal { + parent { + __typename + } + } + } + """ + + assertValidQuery(graphQLSchema, query) + + Document document = TestUtil.parseQuery(query) + + + def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) + + expect: + printedTree == [ + "-Query.animal: Animal", + "--[Cat, Dog].parent: Cat, Dog", + "---[Cat, Dog].__typename: String!", + ] + } + + def "query cannot exceed max depth"() { + String schema = """ + type Query { + animal: Animal + } + interface Animal { + name: String + friends: [Animal] + } + type Bird implements Animal { + name: String + friends: [Animal] + } + type Cat implements Animal { + name: String + friends: [Animal] + breed: String + } + type Dog implements Animal { + name: String + breed: String + friends: [Animal] + } + """ + GraphQLSchema graphQLSchema = TestUtil.schema(schema) + + // We generate two less fields than the given depth + // One is due to the top level field + // One is due to the leaf selection + def animalSubselection = IntStream.rangeClosed(1, queryDepth - 2) + .mapToObj { + "" + } + .reduce("CHILD") { acc, value -> + acc.replace("CHILD", "friends { CHILD }") + } + .replace("CHILD", "name") + + // Note: there is a total of 51 fields here + String query = """ + { + animal { + $animalSubselection + } + } + """ + + def limit = 50 + + assertValidQuery(graphQLSchema, query) + + Document document = TestUtil.parseQuery(query) + + when: + Exception exception + try { + ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables( + graphQLSchema, + document, + null, + RawVariables.emptyVariables(), + ExecutableNormalizedOperationFactory.Options.defaultOptions().maxChildrenDepth(limit)) + } catch (Exception e) { + exception = e + } + + then: + if (queryDepth > limit) { + assert exception != null + assert exception.message.contains("depth exceeded") + assert exception.message.contains("> 50") + } else { + assert exception == null + } + + where: + _ | queryDepth + _ | 49 + _ | 50 + _ | 51 + } + + def "big query is fine as long as depth is under limit"() { + String schema = """ + type Query { + animal: Animal + } + interface Animal { + name: String + friends: [Friend] + } + union Pet = Dog | Cat + type Friend { + name: String + isBirdOwner: Boolean + isCatOwner: Boolean + pets: [Pet] + } + type Bird implements Animal { + name: String + friends: [Friend] + } + type Cat implements Animal { + name: String + friends: [Friend] + breed: String + } + type Dog implements Animal { + name: String + breed: String + friends: [Friend] + } + """ + + def garbageFields = IntStream.range(0, 1000) + .mapToObj { + """test_$it: friends { name }""" + } + .collect(Collectors.joining("\n")) + + GraphQLSchema graphQLSchema = TestUtil.schema(schema) + + String query = """ + { + animal { + name + otherName: name + ... on Animal { + name + } + ... on Cat { + name + friends { + ... on Friend { + isCatOwner + pets { + ... on Dog { + name + } + } + } + } + } + ... on Bird { + friends { + isBirdOwner + } + friends { + name + pets { + ... on Cat { + breed + } + } + } + } + ... on Dog { + name + } + $garbageFields + } + } + """ + + assertValidQuery(graphQLSchema, query) + + Document document = TestUtil.parseQuery(query) + + when: + def result = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables( + graphQLSchema, + document, + null, + RawVariables.emptyVariables(), + ExecutableNormalizedOperationFactory.Options.defaultOptions().maxChildrenDepth(5)) + + then: + noExceptionThrown() + } + + def "big query exceeding fields count"() { + String schema = """ + type Query { + animal: Animal + } + interface Animal { + name: String + friends: [Friend] + } + union Pet = Dog | Cat + type Friend { + name: String + isBirdOwner: Boolean + isCatOwner: Boolean + pets: [Pet] + } + type Bird implements Animal { + name: String + friends: [Friend] + } + type Cat implements Animal { + name: String + friends: [Friend] + breed: String + } + type Dog implements Animal { + name: String + breed: String + friends: [Friend] + } + """ + + def garbageFields = IntStream.range(0, 1000) + .mapToObj { + """test_$it: friends { name }""" + } + .collect(Collectors.joining("\n")) + + GraphQLSchema graphQLSchema = TestUtil.schema(schema) + + String query = """ + { + animal { + name + otherName: name + ... on Animal { + name + } + ... on Cat { + name + friends { + ... on Friend { + isCatOwner + pets { + ... on Dog { + name + } + } + } + } + } + ... on Bird { + friends { + isBirdOwner + } + friends { + name + pets { + ... on Cat { + breed + } + } + } + } + ... on Dog { + name + } + $garbageFields + } + } + """ + + assertValidQuery(graphQLSchema, query) + + Document document = TestUtil.parseQuery(query) + + when: + def result = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables( + graphQLSchema, + document, + null, + RawVariables.emptyVariables(), + ExecutableNormalizedOperationFactory.Options.defaultOptions().maxFieldsCount(2013)) + + then: + def e = thrown(AbortExecutionException) + e.message == "Maximum field count exceeded. 2014 > 2013" + } + + def "small query exceeding fields count"() { + String schema = """ + type Query { + hello: String + } + """ + + GraphQLSchema graphQLSchema = TestUtil.schema(schema) + + String query = """ {hello a1: hello}""" + + assertValidQuery(graphQLSchema, query) + + Document document = TestUtil.parseQuery(query) + + when: + def result = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables( + graphQLSchema, + document, + null, + RawVariables.emptyVariables(), + ExecutableNormalizedOperationFactory.Options.defaultOptions().maxFieldsCount(1)) + + then: + def e = thrown(AbortExecutionException) + e.message == "Maximum field count exceeded. 2 > 1" + + + } + + def "query not exceeding fields count"() { + String schema = """ + type Query { + dogs: [Dog] + } + type Dog { + name: String + breed: String + } + """ + + GraphQLSchema graphQLSchema = TestUtil.schema(schema) + + String query = """ {dogs{name breed }}""" + + assertValidQuery(graphQLSchema, query) + + Document document = TestUtil.parseQuery(query) + + when: + def result = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables( + graphQLSchema, + document, + null, + RawVariables.emptyVariables(), + ExecutableNormalizedOperationFactory.Options.defaultOptions().maxFieldsCount(3)) + + then: + notThrown(AbortExecutionException) + + + } + + def "query with meta fields exceeding fields count"() { + String schema = """ + type Query { + hello: String + } + """ + + GraphQLSchema graphQLSchema = TestUtil.schema(schema) + + String query = IntrospectionQuery.INTROSPECTION_QUERY + + assertValidQuery(graphQLSchema, query) + + Document document = TestUtil.parseQuery(query) + + when: + def result = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables( + graphQLSchema, + document, + null, + RawVariables.emptyVariables(), + // This limit is set to 188 in version 21+ + // The new built-in directive @oneOf adds one node to introspection in version 21+ + ExecutableNormalizedOperationFactory.Options.defaultOptions().maxFieldsCount(187)) + println result.normalizedFieldToMergedField.size() + + then: + def e = thrown(AbortExecutionException) + // This line is different in version 21+, it is "Maximum field count exceeded. 189 > 188" + // The new built-in directive @oneOf adds one node to introspection in version 21+ + e.message == "Maximum field count exceeded. 188 > 187" + } + + def "can capture depth and field count"() { + String schema = """ + type Query { + foo: Foo + } + + type Foo { + stop : String + bar : Bar + } + + type Bar { + stop : String + foo : Foo + } + """ + + GraphQLSchema graphQLSchema = TestUtil.schema(schema) + + String query = "{ foo { bar { foo { bar { foo { stop bar { stop }}}}}}}" + + assertValidQuery(graphQLSchema, query) + + Document document = TestUtil.parseQuery(query) + + when: + def result = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables( + graphQLSchema, + document, + null, + RawVariables.emptyVariables() + ) + + then: + result.getOperationDepth() == 7 + result.getOperationFieldCount() == 8 + } + + private static ExecutableNormalizedOperation localCreateExecutableNormalizedOperation( + GraphQLSchema graphQLSchema, + Document document, + String operationName, + CoercedVariables coercedVariableValues + ) { + + def options = ExecutableNormalizedOperationFactory.Options.defaultOptions() + + return ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, operationName, coercedVariableValues, options) + } + + private static ExecutableNormalizedOperation localCreateExecutableNormalizedOperationWithRawVariables( + GraphQLSchema graphQLSchema, + Document document, + String operationName, + RawVariables rawVariables + ) { + + def options = ExecutableNormalizedOperationFactory.Options.defaultOptions() + + return ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables( + graphQLSchema, + document, + operationName, + rawVariables, + options + ) + } } + diff --git a/src/test/java/benchmark/BenchmarkUtils.java b/src/test/java/benchmark/BenchmarkUtils.java index fd7897e125..c94a6b6a53 100644 --- a/src/test/java/benchmark/BenchmarkUtils.java +++ b/src/test/java/benchmark/BenchmarkUtils.java @@ -1,11 +1,15 @@ package benchmark; -import com.google.common.io.Files; -import graphql.Assert; +import com.google.common.io.ByteStreams; -import java.io.File; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; import java.net.URL; import java.nio.charset.Charset; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; import java.util.concurrent.Callable; public class BenchmarkUtils { @@ -17,7 +21,12 @@ static String loadResource(String name) { if (resource == null) { throw new IllegalArgumentException("missing resource: " + name); } - return String.join("\n", Files.readLines(new File(resource.toURI()), Charset.defaultCharset())); + byte[] bytes; + try (InputStream inputStream = resource.openStream()) { + // In GraphQL Java version 21 and above, this Guava helper is replaced with Java 9's readAllBytes() + bytes = ByteStreams.toByteArray(inputStream); + } + return new String(bytes, Charset.defaultCharset()); }); } @@ -29,4 +38,50 @@ static T asRTE(Callable callable) { } } -} + public static void runInToolingForSomeTimeThenExit(Runnable setup, Runnable r, Runnable tearDown) { + int runForMillis = getRunForMillis(); + if (runForMillis <= 0) { + System.out.print("'runForMillis' environment var is not set - continuing \n"); + return; + } + System.out.printf("Running initial code in some tooling - runForMillis=%d \n", runForMillis); + System.out.print("Get your tooling in order and press enter..."); + readLine(); + System.out.print("Lets go...\n"); + setup.run(); + + DateTimeFormatter dtf = DateTimeFormatter.ofPattern("HH:mm:ss"); + long now, then = System.currentTimeMillis(); + do { + now = System.currentTimeMillis(); + long msLeft = runForMillis - (now - then); + System.out.printf("\t%s Running in loop... %s ms left\n", dtf.format(LocalDateTime.now()), msLeft); + r.run(); + now = System.currentTimeMillis(); + } while ((now - then) < runForMillis); + + tearDown.run(); + + System.out.printf("This ran for %d millis. Exiting...\n", System.currentTimeMillis() - then); + System.exit(0); + } + + private static void readLine() { + BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); + try { + br.readLine(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private static int getRunForMillis() { + String runFor = System.getenv("runForMillis"); + try { + return Integer.parseInt(runFor); + } catch (NumberFormatException e) { + return -1; + } + } + +} \ No newline at end of file diff --git a/src/test/java/benchmark/ENFBenchmarkDeepIntrospection.java b/src/test/java/benchmark/ENFBenchmarkDeepIntrospection.java new file mode 100644 index 0000000000..0ed09d4675 --- /dev/null +++ b/src/test/java/benchmark/ENFBenchmarkDeepIntrospection.java @@ -0,0 +1,122 @@ +package benchmark; + +import graphql.execution.CoercedVariables; +import graphql.language.Document; +import graphql.normalized.ExecutableNormalizedOperation; +import graphql.normalized.ExecutableNormalizedOperationFactory; +import graphql.parser.Parser; +import graphql.schema.GraphQLSchema; +import graphql.schema.idl.SchemaGenerator; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Level; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; + +import java.util.concurrent.TimeUnit; + +import static graphql.normalized.ExecutableNormalizedOperationFactory.*; + +@State(Scope.Benchmark) +@Warmup(iterations = 2, time = 5) +@Measurement(iterations = 3, time = 5) +@Fork(2) +public class ENFBenchmarkDeepIntrospection { + + @Param({"2", "10", "20"}) + int howDeep = 2; + + String query = ""; + + GraphQLSchema schema; + Document document; + + @Setup(Level.Trial) + public void setUp() { + String schemaString = BenchmarkUtils.loadResource("large-schema-2.graphqls"); + schema = SchemaGenerator.createdMockedSchema(schemaString); + + query = createDeepQuery(howDeep); + document = Parser.parse(query); + } + @Benchmark + @BenchmarkMode(Mode.AverageTime) + @OutputTimeUnit(TimeUnit.MILLISECONDS) + public ExecutableNormalizedOperation benchMarkAvgTime() { + ExecutableNormalizedOperationFactory.Options options = ExecutableNormalizedOperationFactory.Options.defaultOptions(); + ExecutableNormalizedOperation executableNormalizedOperation = createExecutableNormalizedOperation(schema, + document, + null, + CoercedVariables.emptyVariables(), + options); + return executableNormalizedOperation; + } + + public static void main(String[] args) throws RunnerException { + runAtStartup(); + + Options opt = new OptionsBuilder() + .include("benchmark.ENFBenchmarkDeepIntrospection") + .build(); + + new Runner(opt).run(); + } + + private static void runAtStartup() { + + ENFBenchmarkDeepIntrospection benchmarkIntrospection = new ENFBenchmarkDeepIntrospection(); + benchmarkIntrospection.howDeep = 2; + + BenchmarkUtils.runInToolingForSomeTimeThenExit( + benchmarkIntrospection::setUp, + () -> { while (true) { benchmarkIntrospection.benchMarkAvgTime(); }}, + () ->{} + ); + } + + + + private static String createDeepQuery(int depth) { + String result = "query test {\n" + + " __schema {\n" + + " types {\n" + + " ...F1\n" + + " }\n" + + " }\n" + + "}\n"; + + for (int i = 1; i < depth; i++) { + result += " fragment F" + i + " on __Type {\n" + + " fields {\n" + + " type {\n" + + " ...F" + (i + 1) +"\n" + + " }\n" + + " }\n" + + "\n" + + " ofType {\n" + + " ...F"+ (i + 1) + "\n" + + " }\n" + + " }\n"; + } + result += " fragment F" + depth + " on __Type {\n" + + " fields {\n" + + " type {\n" + + "name\n" + + " }\n" + + " }\n" + + "}\n"; + return result; + } + +}