diff --git a/.github/workflows/stale-pr-issue.yml b/.github/workflows/stale-pr-issue.yml
index f36da60e2a..540f31aa80 100644
--- a/.github/workflows/stale-pr-issue.yml
+++ b/.github/workflows/stale-pr-issue.yml
@@ -8,6 +8,7 @@ on:
- cron: '0 0 * * *'
permissions:
+ actions: write
issues: write
pull-requests: write
diff --git a/agent-test/build.gradle b/agent-test/build.gradle
index 2ce1e1892c..d447dc42fe 100644
--- a/agent-test/build.gradle
+++ b/agent-test/build.gradle
@@ -4,12 +4,12 @@ plugins {
dependencies {
implementation(rootProject)
- implementation("net.bytebuddy:byte-buddy-agent:1.14.15")
+ implementation("net.bytebuddy:byte-buddy-agent:1.14.18")
- testImplementation 'org.junit.jupiter:junit-jupiter:5.10.2'
+ testImplementation 'org.junit.jupiter:junit-jupiter:5.10.3'
testRuntimeOnly 'org.junit.platform:junit-platform-launcher'
- testImplementation("org.assertj:assertj-core:3.25.3")
+ testImplementation("org.assertj:assertj-core:3.26.3")
}
diff --git a/agent/build.gradle b/agent/build.gradle
index c81c0e4d7c..1dfeba3f27 100644
--- a/agent/build.gradle
+++ b/agent/build.gradle
@@ -6,7 +6,7 @@ plugins {
}
dependencies {
- implementation("net.bytebuddy:byte-buddy:1.14.15")
+ implementation("net.bytebuddy:byte-buddy:1.14.18")
// graphql-java itself
implementation(rootProject)
}
diff --git a/build.gradle b/build.gradle
index b3fdb18952..07a6456b03 100644
--- a/build.gradle
+++ b/build.gradle
@@ -107,16 +107,17 @@ dependencies {
implementation 'com.google.guava:guava:' + guavaVersion
testImplementation group: 'junit', name: 'junit', version: '4.13.2'
testImplementation 'org.spockframework:spock-core:2.0-groovy-3.0'
- testImplementation 'org.codehaus.groovy:groovy:3.0.21'
- testImplementation 'org.codehaus.groovy:groovy-json:3.0.21'
+ testImplementation 'org.codehaus.groovy:groovy:3.0.22'
+ testImplementation 'org.codehaus.groovy:groovy-json:3.0.22'
testImplementation 'com.google.code.gson:gson:2.11.0'
- testImplementation 'org.eclipse.jetty:jetty-server:11.0.21'
- testImplementation 'com.fasterxml.jackson.core:jackson-databind:2.17.1'
+ testImplementation 'org.eclipse.jetty:jetty-server:11.0.22'
+ testImplementation 'com.fasterxml.jackson.core:jackson-databind:2.17.2'
testImplementation 'org.awaitility:awaitility-groovy:4.2.0'
testImplementation 'com.github.javafaker:javafaker:1.0.2'
testImplementation 'org.reactivestreams:reactive-streams-tck:' + reactiveStreamsVersion
testImplementation "io.reactivex.rxjava2:rxjava:2.2.21"
+ testImplementation "io.projectreactor:reactor-core:3.6.8"
testImplementation 'org.testng:testng:7.10.2' // use for reactive streams test inheritance
diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar
index d64cd49177..2c3521197d 100644
Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
index a80b22ce5c..09523c0e54 100644
--- a/gradle/wrapper/gradle-wrapper.properties
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -1,6 +1,6 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-bin.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-8.9-bin.zip
networkTimeout=10000
validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME
diff --git a/gradlew b/gradlew
index 1aa94a4269..f5feea6d6b 100755
--- a/gradlew
+++ b/gradlew
@@ -15,6 +15,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+# SPDX-License-Identifier: Apache-2.0
+#
##############################################################################
#
@@ -55,7 +57,7 @@
# Darwin, MinGW, and NonStop.
#
# (3) This script is generated from the Groovy template
-# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
+# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project.
#
# You can find Gradle at https://github.com/gradle/gradle/.
@@ -84,7 +86,8 @@ done
# shellcheck disable=SC2034
APP_BASE_NAME=${0##*/}
# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
-APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit
+APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s
+' "$PWD" ) || exit
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
diff --git a/gradlew.bat b/gradlew.bat
index 6689b85bee..9b42019c79 100644
--- a/gradlew.bat
+++ b/gradlew.bat
@@ -13,6 +13,8 @@
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
+@rem SPDX-License-Identifier: Apache-2.0
+@rem
@if "%DEBUG%"=="" @echo off
@rem ##########################################################################
@@ -43,11 +45,11 @@ set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if %ERRORLEVEL% equ 0 goto execute
-echo.
-echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
-echo.
-echo Please set the JAVA_HOME variable in your environment to match the
-echo location of your Java installation.
+echo. 1>&2
+echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2
+echo. 1>&2
+echo Please set the JAVA_HOME variable in your environment to match the 1>&2
+echo location of your Java installation. 1>&2
goto fail
@@ -57,11 +59,11 @@ set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
-echo.
-echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
-echo.
-echo Please set the JAVA_HOME variable in your environment to match the
-echo location of your Java installation.
+echo. 1>&2
+echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2
+echo. 1>&2
+echo Please set the JAVA_HOME variable in your environment to match the 1>&2
+echo location of your Java installation. 1>&2
goto fail
diff --git a/src/main/java/graphql/Directives.java b/src/main/java/graphql/Directives.java
index 8e0c81661e..da5bfc80ba 100644
--- a/src/main/java/graphql/Directives.java
+++ b/src/main/java/graphql/Directives.java
@@ -31,16 +31,25 @@
@PublicApi
public class Directives {
- private static final String SPECIFIED_BY = "specifiedBy";
private static final String DEPRECATED = "deprecated";
+ private static final String INCLUDE = "include";
+ private static final String SKIP = "skip";
+ private static final String SPECIFIED_BY = "specifiedBy";
private static final String ONE_OF = "oneOf";
private static final String DEFER = "defer";
- public static final String NO_LONGER_SUPPORTED = "No longer supported";
public static final DirectiveDefinition DEPRECATED_DIRECTIVE_DEFINITION;
+ public static final DirectiveDefinition INCLUDE_DIRECTIVE_DEFINITION;
+ public static final DirectiveDefinition SKIP_DIRECTIVE_DEFINITION;
public static final DirectiveDefinition SPECIFIED_BY_DIRECTIVE_DEFINITION;
@ExperimentalApi
public static final DirectiveDefinition ONE_OF_DIRECTIVE_DEFINITION;
+ @ExperimentalApi
+ public static final DirectiveDefinition DEFER_DIRECTIVE_DEFINITION;
+
+ public static final String BOOLEAN = "Boolean";
+ public static final String STRING = "String";
+ public static final String NO_LONGER_SUPPORTED = "No longer supported";
static {
DEPRECATED_DIRECTIVE_DEFINITION = DirectiveDefinition.newDirectiveDefinition()
@@ -54,11 +63,39 @@ public class Directives {
newInputValueDefinition()
.name("reason")
.description(createDescription("The reason for the deprecation"))
- .type(newTypeName().name("String").build())
+ .type(newTypeName().name(STRING).build())
.defaultValue(StringValue.newStringValue().value(NO_LONGER_SUPPORTED).build())
.build())
.build();
+ INCLUDE_DIRECTIVE_DEFINITION = DirectiveDefinition.newDirectiveDefinition()
+ .name(INCLUDE)
+ .directiveLocation(newDirectiveLocation().name(FRAGMENT_SPREAD.name()).build())
+ .directiveLocation(newDirectiveLocation().name(INLINE_FRAGMENT.name()).build())
+ .directiveLocation(newDirectiveLocation().name(FIELD.name()).build())
+ .description(createDescription("Directs the executor to include this field or fragment only when the `if` argument is true"))
+ .inputValueDefinition(
+ newInputValueDefinition()
+ .name("if")
+ .description(createDescription("Included when true."))
+ .type(newNonNullType(newTypeName().name(BOOLEAN).build()).build())
+ .build())
+ .build();
+
+ SKIP_DIRECTIVE_DEFINITION = DirectiveDefinition.newDirectiveDefinition()
+ .name(SKIP)
+ .directiveLocation(newDirectiveLocation().name(FRAGMENT_SPREAD.name()).build())
+ .directiveLocation(newDirectiveLocation().name(INLINE_FRAGMENT.name()).build())
+ .directiveLocation(newDirectiveLocation().name(FIELD.name()).build())
+ .description(createDescription("Directs the executor to skip this field or fragment when the `if` argument is true."))
+ .inputValueDefinition(
+ newInputValueDefinition()
+ .name("if")
+ .description(createDescription("Skipped when true."))
+ .type(newNonNullType(newTypeName().name(BOOLEAN).build()).build())
+ .build())
+ .build();
+
SPECIFIED_BY_DIRECTIVE_DEFINITION = DirectiveDefinition.newDirectiveDefinition()
.name(SPECIFIED_BY)
.directiveLocation(newDirectiveLocation().name(SCALAR.name()).build())
@@ -67,7 +104,7 @@ public class Directives {
newInputValueDefinition()
.name("url")
.description(createDescription("The URL that specifies the behaviour of this scalar."))
- .type(newNonNullType(newTypeName().name("String").build()).build())
+ .type(newNonNullType(newTypeName().name(STRING).build()).build())
.build())
.build();
@@ -76,6 +113,26 @@ public class Directives {
.directiveLocation(newDirectiveLocation().name(INPUT_OBJECT.name()).build())
.description(createDescription("Indicates an Input Object is a OneOf Input Object."))
.build();
+
+ DEFER_DIRECTIVE_DEFINITION = DirectiveDefinition.newDirectiveDefinition()
+ .name(DEFER)
+ .directiveLocation(newDirectiveLocation().name(FRAGMENT_SPREAD.name()).build())
+ .directiveLocation(newDirectiveLocation().name(INLINE_FRAGMENT.name()).build())
+ .description(createDescription("This directive allows results to be deferred during execution"))
+ .inputValueDefinition(
+ newInputValueDefinition()
+ .name("if")
+ .description(createDescription("Deferred behaviour is controlled by this argument"))
+ .type(newNonNullType(newTypeName().name(BOOLEAN).build()).build())
+ .defaultValue(BooleanValue.newBooleanValue(true).build())
+ .build())
+ .inputValueDefinition(
+ newInputValueDefinition()
+ .name("label")
+ .description(createDescription("A unique label that represents the fragment being deferred"))
+ .type(newTypeName().name(STRING).build())
+ .build())
+ .build();
}
/**
@@ -104,33 +161,36 @@ public class Directives {
.type(GraphQLString)
.description("A unique label that represents the fragment being deferred")
)
+ .definition(DEFER_DIRECTIVE_DEFINITION)
.build();
public static final GraphQLDirective IncludeDirective = GraphQLDirective.newDirective()
- .name("include")
+ .name(INCLUDE)
.description("Directs the executor to include this field or fragment only when the `if` argument is true")
.argument(newArgument()
.name("if")
.type(nonNull(GraphQLBoolean))
.description("Included when true."))
.validLocations(FRAGMENT_SPREAD, INLINE_FRAGMENT, FIELD)
+ .definition(INCLUDE_DIRECTIVE_DEFINITION)
.build();
public static final GraphQLDirective SkipDirective = GraphQLDirective.newDirective()
- .name("skip")
+ .name(SKIP)
.description("Directs the executor to skip this field or fragment when the `if` argument is true.")
.argument(newArgument()
.name("if")
.type(nonNull(GraphQLBoolean))
.description("Skipped when true."))
.validLocations(FRAGMENT_SPREAD, INLINE_FRAGMENT, FIELD)
+ .definition(SKIP_DIRECTIVE_DEFINITION)
.build();
/**
* The "deprecated" directive is special and is always available in a graphql schema
*
- * See https://graphql.github.io/graphql-spec/June2018/#sec--deprecated
+ * See the GraphQL specification for @deprecated
*/
public static final GraphQLDirective DeprecatedDirective = GraphQLDirective.newDirective()
.name(DEPRECATED)
diff --git a/src/main/java/graphql/analysis/NodeVisitorWithTypeTracking.java b/src/main/java/graphql/analysis/NodeVisitorWithTypeTracking.java
index 2a2b237aeb..c2f0bebd3f 100644
--- a/src/main/java/graphql/analysis/NodeVisitorWithTypeTracking.java
+++ b/src/main/java/graphql/analysis/NodeVisitorWithTypeTracking.java
@@ -30,6 +30,7 @@
import graphql.util.TraversalControl;
import graphql.util.TraverserContext;
+import java.util.Collections;
import java.util.Locale;
import java.util.Map;
@@ -50,13 +51,20 @@ public class NodeVisitorWithTypeTracking extends NodeVisitorStub {
private final GraphQLSchema schema;
private final Map fragmentsByName;
private final ConditionalNodes conditionalNodes = new ConditionalNodes();
-
- public NodeVisitorWithTypeTracking(QueryVisitor preOrderCallback, QueryVisitor postOrderCallback, Map variables, GraphQLSchema schema, Map fragmentsByName) {
+ private final QueryTraversalOptions options;
+
+ public NodeVisitorWithTypeTracking(QueryVisitor preOrderCallback,
+ QueryVisitor postOrderCallback,
+ Map variables,
+ GraphQLSchema schema,
+ Map fragmentsByName,
+ QueryTraversalOptions options) {
this.preOrderCallback = preOrderCallback;
this.postOrderCallback = postOrderCallback;
this.variables = variables;
this.schema = schema;
this.fragmentsByName = fragmentsByName;
+ this.options = options;
}
@Override
@@ -156,12 +164,17 @@ public TraversalControl visitField(Field field, TraverserContext context)
boolean isTypeNameIntrospectionField = fieldDefinition == schema.getIntrospectionTypenameFieldDefinition();
GraphQLFieldsContainer fieldsContainer = !isTypeNameIntrospectionField ? (GraphQLFieldsContainer) unwrapAll(parentEnv.getOutputType()) : null;
GraphQLCodeRegistry codeRegistry = schema.getCodeRegistry();
- Map argumentValues = ValuesResolver.getArgumentValues(codeRegistry,
- fieldDefinition.getArguments(),
- field.getArguments(),
- CoercedVariables.of(variables),
- GraphQLContext.getDefault(),
- Locale.getDefault());
+ Map argumentValues;
+ if (options.isCoerceFieldArguments()) {
+ argumentValues = ValuesResolver.getArgumentValues(codeRegistry,
+ fieldDefinition.getArguments(),
+ field.getArguments(),
+ CoercedVariables.of(variables),
+ GraphQLContext.getDefault(),
+ Locale.getDefault());
+ } else {
+ argumentValues = Collections.emptyMap();
+ }
QueryVisitorFieldEnvironment environment = new QueryVisitorFieldEnvironmentImpl(isTypeNameIntrospectionField,
field,
fieldDefinition,
diff --git a/src/main/java/graphql/analysis/QueryTransformer.java b/src/main/java/graphql/analysis/QueryTransformer.java
index 9c45902dae..eed41818e9 100644
--- a/src/main/java/graphql/analysis/QueryTransformer.java
+++ b/src/main/java/graphql/analysis/QueryTransformer.java
@@ -36,20 +36,22 @@ public class QueryTransformer {
private final GraphQLSchema schema;
private final Map fragmentsByName;
private final Map variables;
-
private final GraphQLCompositeType rootParentType;
+ private final QueryTraversalOptions options;
private QueryTransformer(GraphQLSchema schema,
Node root,
GraphQLCompositeType rootParentType,
Map fragmentsByName,
- Map variables) {
+ Map variables,
+ QueryTraversalOptions options) {
this.schema = assertNotNull(schema, () -> "schema can't be null");
this.variables = assertNotNull(variables, () -> "variables can't be null");
this.root = assertNotNull(root, () -> "root can't be null");
this.rootParentType = assertNotNull(rootParentType);
this.fragmentsByName = assertNotNull(fragmentsByName, () -> "fragmentsByName can't be null");
+ this.options = assertNotNull(options, () -> "options can't be null");
}
/**
@@ -65,12 +67,17 @@ private QueryTransformer(GraphQLSchema schema,
*/
public Node transform(QueryVisitor queryVisitor) {
QueryVisitor noOp = new QueryVisitorStub();
- NodeVisitorWithTypeTracking nodeVisitor = new NodeVisitorWithTypeTracking(queryVisitor, noOp, variables, schema, fragmentsByName);
+ NodeVisitorWithTypeTracking nodeVisitor = new NodeVisitorWithTypeTracking(queryVisitor,
+ noOp,
+ variables,
+ schema,
+ fragmentsByName,
+ options);
Map, Object> rootVars = new LinkedHashMap<>();
rootVars.put(QueryTraversalContext.class, new QueryTraversalContext(rootParentType, null, null, GraphQLContext.getDefault()));
- TraverserVisitor nodeTraverserVisitor = new TraverserVisitor() {
+ TraverserVisitor nodeTraverserVisitor = new TraverserVisitor<>() {
@Override
public TraversalControl enter(TraverserContext context) {
@@ -98,6 +105,7 @@ public static class Builder {
private Node root;
private GraphQLCompositeType rootParentType;
private Map fragmentsByName;
+ private QueryTraversalOptions options = QueryTraversalOptions.defaultOptions();
/**
@@ -160,8 +168,25 @@ public Builder fragmentsByName(Map fragmentsByName)
return this;
}
+ /**
+ * Sets the options to use while traversing
+ *
+ * @param options the options to use
+ * @return this builder
+ */
+ public Builder options(QueryTraversalOptions options) {
+ this.options = assertNotNull(options, () -> "options can't be null");
+ return this;
+ }
+
public QueryTransformer build() {
- return new QueryTransformer(schema, root, rootParentType, fragmentsByName, variables);
+ return new QueryTransformer(
+ schema,
+ root,
+ rootParentType,
+ fragmentsByName,
+ variables,
+ options);
}
}
}
diff --git a/src/main/java/graphql/analysis/QueryTraversalOptions.java b/src/main/java/graphql/analysis/QueryTraversalOptions.java
new file mode 100644
index 0000000000..7ce73f05ce
--- /dev/null
+++ b/src/main/java/graphql/analysis/QueryTraversalOptions.java
@@ -0,0 +1,31 @@
+package graphql.analysis;
+
+import graphql.PublicApi;
+
+/**
+ * This options object controls how {@link QueryTraverser} works
+ */
+@PublicApi
+public class QueryTraversalOptions {
+
+ private final boolean coerceFieldArguments;
+
+ private QueryTraversalOptions(boolean coerceFieldArguments) {
+ this.coerceFieldArguments = coerceFieldArguments;
+ }
+
+ /**
+ * @return true if field arguments should be coerced. This is true by default.
+ */
+ public boolean isCoerceFieldArguments() {
+ return coerceFieldArguments;
+ }
+
+ public static QueryTraversalOptions defaultOptions() {
+ return new QueryTraversalOptions(true);
+ }
+
+ public QueryTraversalOptions coerceFieldArguments(boolean coerceFieldArguments) {
+ return new QueryTraversalOptions(coerceFieldArguments);
+ }
+}
diff --git a/src/main/java/graphql/analysis/QueryTraverser.java b/src/main/java/graphql/analysis/QueryTraverser.java
index 0ec067595b..2f543e5b43 100644
--- a/src/main/java/graphql/analysis/QueryTraverser.java
+++ b/src/main/java/graphql/analysis/QueryTraverser.java
@@ -49,23 +49,29 @@ public class QueryTraverser {
private CoercedVariables coercedVariables;
private final GraphQLCompositeType rootParentType;
+ private final QueryTraversalOptions options;
private QueryTraverser(GraphQLSchema schema,
Document document,
String operation,
- CoercedVariables coercedVariables) {
+ CoercedVariables coercedVariables,
+ QueryTraversalOptions options
+ ) {
this.schema = schema;
NodeUtil.GetOperationResult getOperationResult = NodeUtil.getOperation(document, operation);
this.fragmentsByName = getOperationResult.fragmentsByName;
this.roots = singletonList(getOperationResult.operationDefinition);
this.rootParentType = getRootTypeFromOperation(getOperationResult.operationDefinition);
this.coercedVariables = coercedVariables;
+ this.options = options;
}
private QueryTraverser(GraphQLSchema schema,
Document document,
String operation,
- RawVariables rawVariables) {
+ RawVariables rawVariables,
+ QueryTraversalOptions options
+ ) {
this.schema = schema;
NodeUtil.GetOperationResult getOperationResult = NodeUtil.getOperation(document, operation);
List variableDefinitions = getOperationResult.operationDefinition.getVariableDefinitions();
@@ -73,18 +79,22 @@ private QueryTraverser(GraphQLSchema schema,
this.roots = singletonList(getOperationResult.operationDefinition);
this.rootParentType = getRootTypeFromOperation(getOperationResult.operationDefinition);
this.coercedVariables = ValuesResolver.coerceVariableValues(schema, variableDefinitions, rawVariables, GraphQLContext.getDefault(), Locale.getDefault());
+ this.options = options;
}
private QueryTraverser(GraphQLSchema schema,
Node root,
GraphQLCompositeType rootParentType,
Map fragmentsByName,
- CoercedVariables coercedVariables) {
+ CoercedVariables coercedVariables,
+ QueryTraversalOptions options
+ ) {
this.schema = schema;
this.roots = Collections.singleton(root);
this.rootParentType = rootParentType;
this.fragmentsByName = fragmentsByName;
this.coercedVariables = coercedVariables;
+ this.options = options;
}
public Object visitDepthFirst(QueryVisitor queryVisitor) {
@@ -191,7 +201,12 @@ private Object visitImpl(QueryVisitor visitFieldCallback, Boolean preOrder) {
}
NodeTraverser nodeTraverser = new NodeTraverser(rootVars, this::childrenOf);
- NodeVisitorWithTypeTracking nodeVisitorWithTypeTracking = new NodeVisitorWithTypeTracking(preOrderCallback, postOrderCallback, coercedVariables.toMap(), schema, fragmentsByName);
+ NodeVisitorWithTypeTracking nodeVisitorWithTypeTracking = new NodeVisitorWithTypeTracking(preOrderCallback,
+ postOrderCallback,
+ coercedVariables.toMap(),
+ schema,
+ fragmentsByName,
+ options);
return nodeTraverser.depthFirst(nodeVisitorWithTypeTracking, roots);
}
@@ -210,6 +225,7 @@ public static class Builder {
private Node root;
private GraphQLCompositeType rootParentType;
private Map fragmentsByName;
+ private QueryTraversalOptions options = QueryTraversalOptions.defaultOptions();
/**
@@ -313,6 +329,17 @@ public Builder fragmentsByName(Map fragmentsByName)
return this;
}
+ /**
+ * Sets the options to use while traversing
+ *
+ * @param options the options to use
+ * @return this builder
+ */
+ public Builder options(QueryTraversalOptions options) {
+ this.options = assertNotNull(options, () -> "options can't be null");
+ return this;
+ }
+
/**
* @return a built {@link QueryTraverser} object
*/
@@ -320,17 +347,35 @@ public QueryTraverser build() {
checkState();
if (document != null) {
if (rawVariables != null) {
- return new QueryTraverser(schema, document, operation, rawVariables);
+ return new QueryTraverser(schema,
+ document,
+ operation,
+ rawVariables,
+ options);
}
- return new QueryTraverser(schema, document, operation, coercedVariables);
+ return new QueryTraverser(schema,
+ document,
+ operation,
+ coercedVariables,
+ options);
} else {
if (rawVariables != null) {
// When traversing with an arbitrary root, there is no variable definition context available
// Thus, the variables must have already been coerced
// Retaining this builder for backwards compatibility
- return new QueryTraverser(schema, root, rootParentType, fragmentsByName, CoercedVariables.of(rawVariables.toMap()));
+ return new QueryTraverser(schema,
+ root,
+ rootParentType,
+ fragmentsByName,
+ CoercedVariables.of(rawVariables.toMap()),
+ options);
}
- return new QueryTraverser(schema, root, rootParentType, fragmentsByName, coercedVariables);
+ return new QueryTraverser(schema,
+ root,
+ rootParentType,
+ fragmentsByName,
+ coercedVariables,
+ options);
}
}
diff --git a/src/main/java/graphql/execution/CoercedVariables.java b/src/main/java/graphql/execution/CoercedVariables.java
index a0d8c038dd..feeed4fb56 100644
--- a/src/main/java/graphql/execution/CoercedVariables.java
+++ b/src/main/java/graphql/execution/CoercedVariables.java
@@ -36,4 +36,9 @@ public static CoercedVariables emptyVariables() {
public static CoercedVariables of(Map coercedVariables) {
return new CoercedVariables(coercedVariables);
}
+
+ @Override
+ public String toString() {
+ return coercedVariables.toString();
+ }
}
diff --git a/src/main/java/graphql/execution/ExecutionContext.java b/src/main/java/graphql/execution/ExecutionContext.java
index e459cbe0ad..c122f9d2c3 100644
--- a/src/main/java/graphql/execution/ExecutionContext.java
+++ b/src/main/java/graphql/execution/ExecutionContext.java
@@ -86,6 +86,7 @@ public class ExecutionContext {
this.errors.set(builder.errors);
this.localContext = builder.localContext;
this.executionInput = builder.executionInput;
+ this.dataLoaderDispatcherStrategy = builder.dataLoaderDispatcherStrategy;
this.queryTree = FpKit.interThreadMemoize(() -> ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, operationDefinition, fragmentsByName, coercedVariables));
}
diff --git a/src/main/java/graphql/execution/ExecutionContextBuilder.java b/src/main/java/graphql/execution/ExecutionContextBuilder.java
index 7220e8dee7..b60c793b20 100644
--- a/src/main/java/graphql/execution/ExecutionContextBuilder.java
+++ b/src/main/java/graphql/execution/ExecutionContextBuilder.java
@@ -45,6 +45,7 @@ public class ExecutionContextBuilder {
ValueUnboxer valueUnboxer;
Object localContext;
ExecutionInput executionInput;
+ DataLoaderDispatchStrategy dataLoaderDispatcherStrategy = DataLoaderDispatchStrategy.NO_OP;
/**
* @return a new builder of {@link graphql.execution.ExecutionContext}s
@@ -90,6 +91,7 @@ public ExecutionContextBuilder() {
errors = ImmutableList.copyOf(other.getErrors());
valueUnboxer = other.getValueUnboxer();
executionInput = other.getExecutionInput();
+ dataLoaderDispatcherStrategy = other.getDataLoaderDispatcherStrategy();
}
public ExecutionContextBuilder instrumentation(Instrumentation instrumentation) {
@@ -203,6 +205,12 @@ public ExecutionContextBuilder executionInput(ExecutionInput executionInput) {
return this;
}
+ @Internal
+ public ExecutionContextBuilder dataLoaderDispatcherStrategy(DataLoaderDispatchStrategy dataLoaderDispatcherStrategy) {
+ this.dataLoaderDispatcherStrategy = dataLoaderDispatcherStrategy;
+ return this;
+ }
+
public ExecutionContextBuilder resetErrors() {
this.errors = emptyList();
return this;
diff --git a/src/main/java/graphql/execution/ExecutionStrategy.java b/src/main/java/graphql/execution/ExecutionStrategy.java
index de1ae0e178..36c2e78956 100644
--- a/src/main/java/graphql/execution/ExecutionStrategy.java
+++ b/src/main/java/graphql/execution/ExecutionStrategy.java
@@ -550,7 +550,9 @@ protected FetchedValue unboxPossibleDataFetcherResult(ExecutionContext execution
if (result instanceof DataFetcherResult) {
DataFetcherResult> dataFetcherResult = (DataFetcherResult>) result;
- executionContext.addErrors(dataFetcherResult.getErrors());
+
+ addErrorsToRightContext(dataFetcherResult.getErrors(), parameters, executionContext);
+
addExtensionsIfPresent(executionContext, dataFetcherResult);
Object localContext = dataFetcherResult.getLocalContext();
@@ -586,12 +588,6 @@ protected CompletableFuture handleFetchingException(
.exception(e)
.build();
- parameters.getDeferredCallContext().onFetchingException(
- parameters.getPath(),
- parameters.getField().getSingleField().getSourceLocation(),
- e
- );
-
try {
return asyncHandleException(dataFetcherExceptionHandler, handlerParameters);
} catch (Exception handlerException) {
@@ -714,9 +710,8 @@ protected FieldValueInfo completeValue(ExecutionContext executionContext, Execut
private void handleUnresolvedTypeProblem(ExecutionContext context, ExecutionStrategyParameters parameters, UnresolvedTypeException e) {
UnresolvedTypeError error = new UnresolvedTypeError(parameters.getPath(), parameters.getExecutionStepInfo(), e);
- context.addError(error);
- parameters.getDeferredCallContext().onError(error);
+ addErrorToRightContext(error, parameters, context);
}
/**
@@ -745,7 +740,7 @@ private FieldValueInfo getFieldValueInfoForNull(ExecutionStrategyParameters para
protected Object /* CompletableFuture | Object */
completeValueForNull(ExecutionStrategyParameters parameters) {
try {
- return parameters.getNonNullFieldValidator().validate(parameters.getPath(), null);
+ return parameters.getNonNullFieldValidator().validate(parameters, null);
} catch (Exception e) {
return Async.exceptionallyCompletedFuture(e);
}
@@ -764,7 +759,7 @@ private FieldValueInfo getFieldValueInfoForNull(ExecutionStrategyParameters para
protected FieldValueInfo completeValueForList(ExecutionContext executionContext, ExecutionStrategyParameters parameters, Object result) {
Iterable resultIterable = toIterable(executionContext, parameters, result);
try {
- resultIterable = parameters.getNonNullFieldValidator().validate(parameters.getPath(), resultIterable);
+ resultIterable = parameters.getNonNullFieldValidator().validate(parameters, resultIterable);
} catch (NonNullableFieldWasNullException e) {
return new FieldValueInfo(LIST, exceptionallyCompletedFuture(e));
}
@@ -891,7 +886,7 @@ protected void handleValueException(CompletableFuture overallResult, Thro
}
try {
- serialized = parameters.getNonNullFieldValidator().validate(parameters.getPath(), serialized);
+ serialized = parameters.getNonNullFieldValidator().validate(parameters, serialized);
} catch (NonNullableFieldWasNullException e) {
return exceptionallyCompletedFuture(e);
}
@@ -917,7 +912,7 @@ protected void handleValueException(CompletableFuture overallResult, Thro
serialized = handleCoercionProblem(executionContext, parameters, e);
}
try {
- serialized = parameters.getNonNullFieldValidator().validate(parameters.getPath(), serialized);
+ serialized = parameters.getNonNullFieldValidator().validate(parameters, serialized);
} catch (NonNullableFieldWasNullException e) {
return exceptionallyCompletedFuture(e);
}
@@ -971,9 +966,8 @@ protected void handleValueException(CompletableFuture overallResult, Thro
@SuppressWarnings("SameReturnValue")
private Object handleCoercionProblem(ExecutionContext context, ExecutionStrategyParameters parameters, CoercingSerializeException e) {
SerializationError error = new SerializationError(parameters.getPath(), e);
- context.addError(error);
- parameters.getDeferredCallContext().onError(error);
+ addErrorToRightContext(error, parameters, context);
return null;
}
@@ -997,9 +991,8 @@ protected Iterable toIterable(ExecutionContext context, ExecutionStrateg
private void handleTypeMismatchProblem(ExecutionContext context, ExecutionStrategyParameters parameters) {
TypeMismatchError error = new TypeMismatchError(parameters.getPath(), parameters.getExecutionStepInfo().getUnwrappedNonNullType());
- context.addError(error);
- parameters.getDeferredCallContext().onError(error);
+ addErrorToRightContext(error, parameters, context);
}
/**
@@ -1158,4 +1151,20 @@ private static Supplier> getArgumentV
return argumentValues;
}
+ // Errors that result from the execution of deferred fields are kept in the deferred context only.
+ private static void addErrorToRightContext(GraphQLError error, ExecutionStrategyParameters parameters, ExecutionContext executionContext) {
+ if (parameters.getDeferredCallContext() != null) {
+ parameters.getDeferredCallContext().addError(error);
+ } else {
+ executionContext.addError(error);
+ }
+ }
+
+ private static void addErrorsToRightContext(List errors, ExecutionStrategyParameters parameters, ExecutionContext executionContext) {
+ if (parameters.getDeferredCallContext() != null) {
+ parameters.getDeferredCallContext().addErrors(errors);
+ } else {
+ executionContext.addErrors(errors);
+ }
+ }
}
diff --git a/src/main/java/graphql/execution/ExecutionStrategyParameters.java b/src/main/java/graphql/execution/ExecutionStrategyParameters.java
index e1df7bb363..f74336bad1 100644
--- a/src/main/java/graphql/execution/ExecutionStrategyParameters.java
+++ b/src/main/java/graphql/execution/ExecutionStrategyParameters.java
@@ -2,6 +2,7 @@
import graphql.PublicApi;
import graphql.execution.incremental.DeferredCallContext;
+import org.jetbrains.annotations.Nullable;
import java.util.function.Consumer;
@@ -71,10 +72,40 @@ public ExecutionStrategyParameters getParent() {
return parent;
}
+ /**
+ * Returns the deferred call context if we're in the scope of a deferred call.
+ * A new DeferredCallContext is created for each @defer block, and is passed down to all fields within the deferred call.
+ *
+ *
+ * query {
+ * ... @defer {
+ * field1 { # new DeferredCallContext created here
+ * field1a # DeferredCallContext passed down to this field
+ * }
+ * }
+ *
+ * ... @defer {
+ * field2 # new DeferredCallContext created here
+ * }
+ * }
+ *
+ *
+ * @return the deferred call context or null if we're not in the scope of a deferred call
+ */
+ @Nullable
public DeferredCallContext getDeferredCallContext() {
return deferredCallContext;
}
+ /**
+ * Returns true if we're in the scope of a deferred call.
+ *
+ * @return true if we're in the scope of a deferred call
+ */
+ public boolean isInDeferredContext() {
+ return deferredCallContext != null;
+ }
+
/**
* This returns the current field in its query representations.
*
@@ -187,9 +218,6 @@ public Builder deferredCallContext(DeferredCallContext deferredCallContext) {
}
public ExecutionStrategyParameters build() {
- if (deferredCallContext == null) {
- deferredCallContext = new DeferredCallContext();
- }
return new ExecutionStrategyParameters(executionStepInfo, source, localContext, fields, nonNullableFieldValidator, path, currentField, parent, deferredCallContext);
}
}
diff --git a/src/main/java/graphql/execution/FetchedValue.java b/src/main/java/graphql/execution/FetchedValue.java
index 0a643f8b71..8ebac38ced 100644
--- a/src/main/java/graphql/execution/FetchedValue.java
+++ b/src/main/java/graphql/execution/FetchedValue.java
@@ -17,7 +17,7 @@ public class FetchedValue {
private final Object localContext;
private final ImmutableList errors;
- FetchedValue(Object fetchedValue, List errors, Object localContext) {
+ public FetchedValue(Object fetchedValue, List errors, Object localContext) {
this.fetchedValue = fetchedValue;
this.errors = ImmutableList.copyOf(errors);
this.localContext = localContext;
diff --git a/src/main/java/graphql/execution/MergedField.java b/src/main/java/graphql/execution/MergedField.java
index e66afb63f8..f7e6545052 100644
--- a/src/main/java/graphql/execution/MergedField.java
+++ b/src/main/java/graphql/execution/MergedField.java
@@ -6,8 +6,8 @@
import graphql.execution.incremental.DeferredExecution;
import graphql.language.Argument;
import graphql.language.Field;
+import org.jetbrains.annotations.Nullable;
-import javax.annotation.Nullable;
import java.util.List;
import java.util.Objects;
import java.util.function.Consumer;
@@ -139,6 +139,16 @@ public List getDeferredExecutions() {
return deferredExecutions;
}
+ /**
+ * Returns true if this field is part of a deferred execution
+ *
+ * @return true if this field is part of a deferred execution
+ */
+ @ExperimentalApi
+ public boolean isDeferred() {
+ return !deferredExecutions.isEmpty();
+ }
+
public static Builder newMergedField() {
return new Builder();
}
diff --git a/src/main/java/graphql/execution/NonNullableFieldValidator.java b/src/main/java/graphql/execution/NonNullableFieldValidator.java
index af26b69008..3241fa247a 100644
--- a/src/main/java/graphql/execution/NonNullableFieldValidator.java
+++ b/src/main/java/graphql/execution/NonNullableFieldValidator.java
@@ -1,6 +1,7 @@
package graphql.execution;
+import graphql.GraphQLError;
import graphql.Internal;
/**
@@ -23,7 +24,7 @@ public NonNullableFieldValidator(ExecutionContext executionContext, ExecutionSte
/**
* Called to check that a value is non null if the type requires it to be non null
*
- * @param path the path to this place
+ * @param parameters the execution strategy parameters
* @param result the result to check
* @param the type of the result
*
@@ -31,7 +32,7 @@ public NonNullableFieldValidator(ExecutionContext executionContext, ExecutionSte
*
* @throws NonNullableFieldWasNullException if the value is null but the type requires it to be non null
*/
- public T validate(ResultPath path, T result) throws NonNullableFieldWasNullException {
+ public T validate(ExecutionStrategyParameters parameters, T result) throws NonNullableFieldWasNullException {
if (result == null) {
if (executionStepInfo.isNonNullType()) {
// see https://spec.graphql.org/October2021/#sec-Errors-and-Non-Nullability
@@ -46,8 +47,15 @@ public T validate(ResultPath path, T result) throws NonNullableFieldWasNullE
//
// We will do this until the spec makes this more explicit.
//
+ final ResultPath path = parameters.getPath();
+
NonNullableFieldWasNullException nonNullException = new NonNullableFieldWasNullException(executionStepInfo, path);
- executionContext.addError(new NonNullableFieldWasNullError(nonNullException), path);
+ final GraphQLError error = new NonNullableFieldWasNullError(nonNullException);
+ if(parameters.getDeferredCallContext() != null) {
+ parameters.getDeferredCallContext().addError(error);
+ } else {
+ executionContext.addError(error, path);
+ }
throw nonNullException;
}
}
diff --git a/src/main/java/graphql/execution/RawVariables.java b/src/main/java/graphql/execution/RawVariables.java
index f8442fc5b9..02e1acabe2 100644
--- a/src/main/java/graphql/execution/RawVariables.java
+++ b/src/main/java/graphql/execution/RawVariables.java
@@ -36,4 +36,9 @@ public static RawVariables emptyVariables() {
public static RawVariables of(Map rawVariables) {
return new RawVariables(rawVariables);
}
+
+ @Override
+ public String toString() {
+ return rawVariables.toString();
+ }
}
diff --git a/src/main/java/graphql/execution/SubscriptionExecutionStrategy.java b/src/main/java/graphql/execution/SubscriptionExecutionStrategy.java
index 7a7cd5c952..4cc43963b8 100644
--- a/src/main/java/graphql/execution/SubscriptionExecutionStrategy.java
+++ b/src/main/java/graphql/execution/SubscriptionExecutionStrategy.java
@@ -2,6 +2,7 @@
import graphql.ExecutionResult;
import graphql.ExecutionResultImpl;
+import graphql.GraphQLContext;
import graphql.PublicApi;
import graphql.execution.instrumentation.ExecutionStrategyInstrumentationContext;
import graphql.execution.instrumentation.Instrumentation;
@@ -37,6 +38,14 @@
@PublicApi
public class SubscriptionExecutionStrategy extends ExecutionStrategy {
+ /**
+ * If a boolean value is placed into the {@link GraphQLContext} with this key then the order
+ * of the subscription events can be controlled. By default, subscription events are published
+ * as the graphql subselection calls complete, and not in the order they originally arrived from the
+ * source publisher. But this can be changed to {@link Boolean#TRUE} to keep them in order.
+ */
+ public static final String KEEP_SUBSCRIPTION_EVENTS_ORDERED = "KEEP_SUBSCRIPTION_EVENTS_ORDERED";
+
public SubscriptionExecutionStrategy() {
super();
}
@@ -64,7 +73,8 @@ public CompletableFuture execute(ExecutionContext executionCont
return new ExecutionResultImpl(null, executionContext.getErrors());
}
Function> mapperFunction = eventPayload -> executeSubscriptionEvent(executionContext, parameters, eventPayload);
- SubscriptionPublisher mapSourceToResponse = new SubscriptionPublisher(publisher, mapperFunction);
+ boolean keepOrdered = keepOrdered(executionContext.getGraphQLContext());
+ SubscriptionPublisher mapSourceToResponse = new SubscriptionPublisher(publisher, mapperFunction, keepOrdered);
return new ExecutionResultImpl(mapSourceToResponse, executionContext.getErrors());
});
@@ -75,6 +85,10 @@ public CompletableFuture execute(ExecutionContext executionCont
return overallResult;
}
+ private boolean keepOrdered(GraphQLContext graphQLContext) {
+ return graphQLContext.getOrDefault(KEEP_SUBSCRIPTION_EVENTS_ORDERED, false);
+ }
+
/*
https://github.com/facebook/graphql/blob/master/spec/Section%206%20--%20Execution.md
@@ -99,7 +113,7 @@ private CompletableFuture> createSourceEventStream(ExecutionCo
if (publisher != null) {
assertTrue(publisher instanceof Publisher, () -> "Your data fetcher must return a Publisher of events when using graphql subscriptions");
}
- //noinspection unchecked
+ //noinspection unchecked,DataFlowIssue
return (Publisher) publisher;
});
}
diff --git a/src/main/java/graphql/execution/ValuesResolverConversion.java b/src/main/java/graphql/execution/ValuesResolverConversion.java
index eff4d1cef1..66b84bb198 100644
--- a/src/main/java/graphql/execution/ValuesResolverConversion.java
+++ b/src/main/java/graphql/execution/ValuesResolverConversion.java
@@ -374,6 +374,7 @@ static CoercedVariables externalValueToInternalValueForVariables(
coercedValues.put(variableName, null);
} else {
Object coercedValue = externalValueToInternalValueImpl(
+ variableName,
inputInterceptor,
fieldVisibility,
variableInputType,
@@ -398,11 +399,28 @@ static CoercedVariables externalValueToInternalValueForVariables(
return CoercedVariables.of(coercedValues);
}
+ static Object externalValueToInternalValueImpl(
+ InputInterceptor inputInterceptor,
+ GraphqlFieldVisibility fieldVisibility,
+ GraphQLInputType graphQLType,
+ Object originalValue,
+ GraphQLContext graphqlContext,
+ Locale locale
+ ) throws NonNullableValueCoercedAsNullException, CoercingParseValueException {
+ return externalValueToInternalValueImpl("externalValue",
+ inputInterceptor,
+ fieldVisibility,
+ graphQLType,
+ originalValue,
+ graphqlContext,
+ locale);
+ }
+
/**
* Performs validation too
*/
- @SuppressWarnings("unchecked")
static Object externalValueToInternalValueImpl(
+ String variableName,
InputInterceptor inputInterceptor,
GraphqlFieldVisibility fieldVisibility,
GraphQLInputType graphQLType,
@@ -412,6 +430,7 @@ static Object externalValueToInternalValueImpl(
) throws NonNullableValueCoercedAsNullException, CoercingParseValueException {
if (isNonNull(graphQLType)) {
Object returnValue = externalValueToInternalValueImpl(
+ variableName,
inputInterceptor,
fieldVisibility,
unwrapOneAs(graphQLType),
@@ -458,13 +477,18 @@ static Object externalValueToInternalValueImpl(
locale);
} else if (graphQLType instanceof GraphQLInputObjectType) {
if (value instanceof Map) {
- return externalValueToInternalValueForObject(
+ GraphQLInputObjectType inputObjectType = (GraphQLInputObjectType) graphQLType;
+ //noinspection unchecked
+ Map coercedMap = externalValueToInternalValueForObject(
inputInterceptor,
fieldVisibility,
- (GraphQLInputObjectType) graphQLType,
+ inputObjectType,
(Map) value,
graphqlContext,
locale);
+
+ ValuesResolverOneOfValidation.validateOneOfInputTypes(inputObjectType, coercedMap, null, variableName, locale);
+ return coercedMap;
} else {
throw CoercingParseValueException.newCoercingParseValueException()
.message("Expected type 'Map' but was '" + value.getClass().getSimpleName() +
@@ -479,7 +503,7 @@ static Object externalValueToInternalValueImpl(
/**
* performs validation
*/
- private static Object externalValueToInternalValueForObject(
+ private static Map externalValueToInternalValueForObject(
InputInterceptor inputInterceptor,
GraphqlFieldVisibility fieldVisibility,
GraphQLInputObjectType inputObjectType,
diff --git a/src/main/java/graphql/execution/incremental/DeferredCallContext.java b/src/main/java/graphql/execution/incremental/DeferredCallContext.java
index 15f428966f..d7d494aace 100644
--- a/src/main/java/graphql/execution/incremental/DeferredCallContext.java
+++ b/src/main/java/graphql/execution/incremental/DeferredCallContext.java
@@ -1,10 +1,7 @@
package graphql.execution.incremental;
-import graphql.ExceptionWhileDataFetching;
import graphql.GraphQLError;
import graphql.Internal;
-import graphql.execution.ResultPath;
-import graphql.language.SourceLocation;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
@@ -23,12 +20,11 @@ public class DeferredCallContext {
private final List errors = new CopyOnWriteArrayList<>();
- public void onFetchingException(ResultPath path, SourceLocation sourceLocation, Throwable throwable) {
- ExceptionWhileDataFetching error = new ExceptionWhileDataFetching(path, throwable, sourceLocation);
- onError(error);
+ public void addErrors(List errors) {
+ this.errors.addAll(errors);
}
- public void onError(GraphQLError graphqlError) {
+ public void addError(GraphQLError graphqlError) {
errors.add(graphqlError);
}
diff --git a/src/main/java/graphql/execution/incremental/DeferredExecution.java b/src/main/java/graphql/execution/incremental/DeferredExecution.java
index 3f14f5922e..6bc05973c2 100644
--- a/src/main/java/graphql/execution/incremental/DeferredExecution.java
+++ b/src/main/java/graphql/execution/incremental/DeferredExecution.java
@@ -2,8 +2,7 @@
import graphql.ExperimentalApi;
import graphql.normalized.incremental.NormalizedDeferredExecution;
-
-import javax.annotation.Nullable;
+import org.jetbrains.annotations.Nullable;
/**
* Represents details about the defer execution that can be associated with a {@link graphql.execution.MergedField}.
diff --git a/src/main/java/graphql/execution/incremental/IncrementalCallState.java b/src/main/java/graphql/execution/incremental/IncrementalCallState.java
index f96a706f36..2f5c9742be 100644
--- a/src/main/java/graphql/execution/incremental/IncrementalCallState.java
+++ b/src/main/java/graphql/execution/incremental/IncrementalCallState.java
@@ -4,6 +4,7 @@
import graphql.execution.reactive.SingleSubscriberPublisher;
import graphql.incremental.DelayedIncrementalPartialResult;
import graphql.incremental.IncrementalPayload;
+import graphql.util.InterThreadMemoizedSupplier;
import graphql.util.LockKit;
import org.reactivestreams.Publisher;
@@ -13,6 +14,7 @@
import java.util.concurrent.ConcurrentLinkedDeque;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
+import java.util.function.Supplier;
import static graphql.incremental.DelayedIncrementalPartialResultImpl.newIncrementalExecutionResult;
@@ -24,7 +26,7 @@
public class IncrementalCallState {
private final AtomicBoolean incrementalCallsDetected = new AtomicBoolean(false);
private final Deque> incrementalCalls = new ConcurrentLinkedDeque<>();
- private final SingleSubscriberPublisher publisher = new SingleSubscriberPublisher<>();
+ private final Supplier> publisher = createPublisher();
private final AtomicInteger pendingCalls = new AtomicInteger();
private final LockKit.ReentrantLock publisherLock = new LockKit.ReentrantLock();
@@ -36,7 +38,7 @@ private void drainIncrementalCalls() {
incrementalCall.invoke()
.whenComplete((payload, exception) -> {
if (exception != null) {
- publisher.offerError(exception);
+ publisher.get().offerError(exception);
return;
}
@@ -53,13 +55,13 @@ private void drainIncrementalCalls() {
.hasNext(remainingCalls != 0)
.build();
- publisher.offer(executionResult);
+ publisher.get().offer(executionResult);
} finally {
publisherLock.unlock();
}
if (remainingCalls == 0) {
- publisher.noMoreData();
+ publisher.get().noMoreData();
} else {
// Nested calls were added, let's try to drain the queue again.
drainIncrementalCalls();
@@ -85,13 +87,20 @@ public boolean getIncrementalCallsDetected() {
return incrementalCallsDetected.get();
}
+ private Supplier> createPublisher() {
+ // this will be created once and once only - any extra calls to .get() will return the previously created
+ // singleton object
+ return new InterThreadMemoizedSupplier<>(() -> new SingleSubscriberPublisher<>(this::drainIncrementalCalls));
+ }
+
/**
- * When this is called the deferred execution will begin
+ * This method will return a {@link Publisher} of deferred results. No field processing will be done
+ * until a {@link org.reactivestreams.Subscriber} is attached to this publisher. Once a {@link org.reactivestreams.Subscriber}
+ * is attached the deferred field result processing will be started and published as a series of events.
*
* @return the publisher of deferred results
*/
public Publisher startDeferredCalls() {
- drainIncrementalCalls();
- return publisher;
+ return publisher.get();
}
}
diff --git a/src/main/java/graphql/execution/instrumentation/FieldFetchingInstrumentationContext.java b/src/main/java/graphql/execution/instrumentation/FieldFetchingInstrumentationContext.java
index d8e51269be..c64ea80ba5 100644
--- a/src/main/java/graphql/execution/instrumentation/FieldFetchingInstrumentationContext.java
+++ b/src/main/java/graphql/execution/instrumentation/FieldFetchingInstrumentationContext.java
@@ -2,12 +2,30 @@
import graphql.Internal;
import graphql.PublicSpi;
+import graphql.execution.instrumentation.parameters.InstrumentationFieldFetchParameters;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
+/**
+ * FieldFetchingInstrumentationContext is returned back from the {@link Instrumentation#beginFieldFetching(InstrumentationFieldFetchParameters, InstrumentationState)}
+ * method, and it's much like the normal {@link InstrumentationContext} type except it also
+ * gives the value that was returned by a fields {@link graphql.schema.DataFetcher}. This allows
+ * you to know if the field value is a completely materialised field or if it's a {@link java.util.concurrent.CompletableFuture}
+ * promise to a value.
+ */
@PublicSpi
public interface FieldFetchingInstrumentationContext extends InstrumentationContext {
+ /**
+ * This is called back with the value fetched for the field by its {@link graphql.schema.DataFetcher}.
+ * This can be a materialised java object or it maybe a {@link java.util.concurrent.CompletableFuture}
+ * promise to some async value that has not yet completed.
+ *
+ * @param fetchedValue a value that a field's {@link graphql.schema.DataFetcher} returned
+ */
+ default void onFetchedValue(Object fetchedValue) {
+ }
+
@Internal
FieldFetchingInstrumentationContext NOOP = new FieldFetchingInstrumentationContext() {
@Override
@@ -17,18 +35,13 @@ public void onDispatched() {
@Override
public void onCompleted(Object result, Throwable t) {
}
-
- @Override
- public void onFetchedValue(Object fetchedValue) {
- }
};
/**
- * This creates a no-op {@link InstrumentationContext} if the one pass in is null
+ * This creates a no-op {@link InstrumentationContext} if the one passed in is null
*
* @param nullableContext a {@link InstrumentationContext} that can be null
- *
- * @return a non null {@link InstrumentationContext} that maybe a no-op
+ * @return a non-null {@link InstrumentationContext} that maybe a no-op
*/
@NotNull
@Internal
@@ -51,18 +64,6 @@ public void onDispatched() {
public void onCompleted(Object result, Throwable t) {
context.onCompleted(result, t);
}
-
- @Override
- public void onFetchedValue(Object fetchedValue) {
- }
};
}
-
- /**
- * This is called back with value fetched for the field.
- *
- * @param fetchedValue a value that a field's {@link graphql.schema.DataFetcher} returned
- */
- default void onFetchedValue(Object fetchedValue) {
- }
}
diff --git a/src/main/java/graphql/execution/instrumentation/Instrumentation.java b/src/main/java/graphql/execution/instrumentation/Instrumentation.java
index 11819347ad..4f7767a767 100644
--- a/src/main/java/graphql/execution/instrumentation/Instrumentation.java
+++ b/src/main/java/graphql/execution/instrumentation/Instrumentation.java
@@ -209,6 +209,11 @@ default InstrumentationContext beginFieldFetch(InstrumentationFieldFetch
* This is called just before a field {@link DataFetcher} is invoked. The {@link FieldFetchingInstrumentationContext#onFetchedValue(Object)}
* callback will be invoked once a value is returned by a {@link DataFetcher} but perhaps before
* its value is completed if it's a {@link CompletableFuture} value.
+ *
+ * This method is the replacement method for the now deprecated {@link #beginFieldFetch(InstrumentationFieldFetchParameters, InstrumentationState)}
+ * method, and it should be implemented in new {@link Instrumentation} classes. This default version of this
+ * method calls back to the deprecated {@link #beginFieldFetch(InstrumentationFieldFetchParameters, InstrumentationState)} method
+ * so that older implementations continue to work.
*
* @param parameters the parameters to this step
* @param state the state created during the call to {@link #createStateAsync(InstrumentationCreateStateParameters)}
diff --git a/src/main/java/graphql/execution/reactive/CompletionStageMappingOrderedPublisher.java b/src/main/java/graphql/execution/reactive/CompletionStageMappingOrderedPublisher.java
new file mode 100644
index 0000000000..4f4e651ca3
--- /dev/null
+++ b/src/main/java/graphql/execution/reactive/CompletionStageMappingOrderedPublisher.java
@@ -0,0 +1,36 @@
+package graphql.execution.reactive;
+
+import graphql.Internal;
+import org.jetbrains.annotations.NotNull;
+import org.reactivestreams.Publisher;
+import org.reactivestreams.Subscriber;
+
+import java.util.concurrent.CompletionStage;
+import java.util.function.Function;
+
+/**
+ * A reactive Publisher that bridges over another Publisher of `D` and maps the results
+ * to type `U` via a CompletionStage, handling errors in that stage but keeps the results
+ * in order of downstream publishing. This means it must queue unfinished
+ * completion stages in memory in arrival order.
+ *
+ * @param the downstream type
+ * @param the upstream type to be mapped to
+ */
+@Internal
+public class CompletionStageMappingOrderedPublisher extends CompletionStageMappingPublisher {
+ /**
+ * You need the following :
+ *
+ * @param upstreamPublisher an upstream source of data
+ * @param mapper a mapper function that turns upstream data into a promise of mapped D downstream data
+ */
+ public CompletionStageMappingOrderedPublisher(Publisher upstreamPublisher, Function> mapper) {
+ super(upstreamPublisher, mapper);
+ }
+
+ @Override
+ protected @NotNull Subscriber super U> createSubscriber(Subscriber super D> downstreamSubscriber) {
+ return new CompletionStageOrderedSubscriber<>(mapper, downstreamSubscriber);
+ }
+}
diff --git a/src/main/java/graphql/execution/reactive/CompletionStageMappingPublisher.java b/src/main/java/graphql/execution/reactive/CompletionStageMappingPublisher.java
index 18bfabbc6c..3e913c62a2 100644
--- a/src/main/java/graphql/execution/reactive/CompletionStageMappingPublisher.java
+++ b/src/main/java/graphql/execution/reactive/CompletionStageMappingPublisher.java
@@ -1,31 +1,26 @@
package graphql.execution.reactive;
import graphql.Internal;
-import graphql.util.LockKit;
+import org.jetbrains.annotations.NotNull;
import org.reactivestreams.Publisher;
import org.reactivestreams.Subscriber;
-import org.reactivestreams.Subscription;
-import java.util.ArrayDeque;
-import java.util.Queue;
import java.util.concurrent.CompletionStage;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicReference;
-import java.util.function.BiConsumer;
import java.util.function.Function;
+import static graphql.Assert.assertNotNullWithNPE;
+
/**
* A reactive Publisher that bridges over another Publisher of `D` and maps the results
* to type `U` via a CompletionStage, handling errors in that stage
*
- * @param the down stream type
- * @param the up stream type to be mapped to
+ * @param the downstream type
+ * @param the upstream type to be mapped to
*/
-@SuppressWarnings("ReactiveStreamsPublisherImplementation")
@Internal
public class CompletionStageMappingPublisher implements Publisher {
- private final Publisher upstreamPublisher;
- private final Function> mapper;
+ protected final Publisher upstreamPublisher;
+ protected final Function> mapper;
/**
* You need the following :
@@ -40,9 +35,16 @@ public CompletionStageMappingPublisher(Publisher upstreamPublisher, Function<
@Override
public void subscribe(Subscriber super D> downstreamSubscriber) {
- upstreamPublisher.subscribe(new CompletionStageSubscriber(downstreamSubscriber));
+ assertNotNullWithNPE(downstreamSubscriber, () -> "Subscriber passed to subscribe must not be null");
+ upstreamPublisher.subscribe(createSubscriber(downstreamSubscriber));
+ }
+
+ @NotNull
+ protected Subscriber super U> createSubscriber(Subscriber super D> downstreamSubscriber) {
+ return new CompletionStageSubscriber<>(mapper, downstreamSubscriber);
}
+
/**
* Get instance of an upstreamPublisher
*
@@ -52,126 +54,4 @@ public Publisher getUpstreamPublisher() {
return upstreamPublisher;
}
- @SuppressWarnings("ReactiveStreamsSubscriberImplementation")
- @Internal
- public class CompletionStageSubscriber implements Subscriber {
- private final Subscriber super D> downstreamSubscriber;
- Subscription delegatingSubscription;
- final Queue> inFlightDataQ;
- final LockKit.ReentrantLock lock = new LockKit.ReentrantLock();
- final AtomicReference onCompleteOrErrorRun;
- final AtomicBoolean onCompleteOrErrorRunCalled;
-
- public CompletionStageSubscriber(Subscriber super D> downstreamSubscriber) {
- this.downstreamSubscriber = downstreamSubscriber;
- inFlightDataQ = new ArrayDeque<>();
- onCompleteOrErrorRun = new AtomicReference<>();
- onCompleteOrErrorRunCalled = new AtomicBoolean(false);
- }
-
-
- @Override
- public void onSubscribe(Subscription subscription) {
- delegatingSubscription = new DelegatingSubscription(subscription);
- downstreamSubscriber.onSubscribe(delegatingSubscription);
- }
-
- @Override
- public void onNext(U u) {
- // for safety - no more data after we have called done/error - we should not get this BUT belts and braces
- if (onCompleteOrErrorRunCalled.get()) {
- return;
- }
- try {
- CompletionStage completionStage = mapper.apply(u);
- offerToInFlightQ(completionStage);
- completionStage.whenComplete(whenNextFinished(completionStage));
- } catch (RuntimeException throwable) {
- handleThrowable(throwable);
- }
- }
-
- private BiConsumer whenNextFinished(CompletionStage completionStage) {
- return (d, throwable) -> {
- try {
- if (throwable != null) {
- handleThrowable(throwable);
- } else {
- downstreamSubscriber.onNext(d);
- }
- } finally {
- Runnable runOnCompleteOrErrorRun = onCompleteOrErrorRun.get();
- boolean empty = removeFromInFlightQAndCheckIfEmpty(completionStage);
- if (empty && runOnCompleteOrErrorRun != null) {
- onCompleteOrErrorRun.set(null);
- runOnCompleteOrErrorRun.run();
- }
- }
- };
- }
-
- private void handleThrowable(Throwable throwable) {
- downstreamSubscriber.onError(throwable);
- //
- // reactive semantics say that IF an exception happens on a publisher
- // then onError is called and no more messages flow. But since the exception happened
- // during the mapping, the upstream publisher does not no about this.
- // so we cancel to bring the semantics back together, that is as soon as an exception
- // has happened, no more messages flow
- //
- delegatingSubscription.cancel();
- }
-
- @Override
- public void onError(Throwable t) {
- onCompleteOrError(() -> {
- onCompleteOrErrorRunCalled.set(true);
- downstreamSubscriber.onError(t);
- });
- }
-
- @Override
- public void onComplete() {
- onCompleteOrError(() -> {
- onCompleteOrErrorRunCalled.set(true);
- downstreamSubscriber.onComplete();
- });
- }
-
- /**
- * Get instance of downstream subscriber
- *
- * @return {@link Subscriber}
- */
- public Subscriber super D> getDownstreamSubscriber() {
- return downstreamSubscriber;
- }
-
- private void onCompleteOrError(Runnable doneCodeToRun) {
- if (inFlightQIsEmpty()) {
- // run right now
- doneCodeToRun.run();
- } else {
- onCompleteOrErrorRun.set(doneCodeToRun);
- }
- }
-
- private void offerToInFlightQ(CompletionStage> completionStage) {
- lock.runLocked(() ->
- inFlightDataQ.offer(completionStage)
- );
- }
-
- private boolean removeFromInFlightQAndCheckIfEmpty(CompletionStage> completionStage) {
- // uncontested locks in java are cheap - we don't expect much contention here
- return lock.callLocked(() -> {
- inFlightDataQ.remove(completionStage);
- return inFlightDataQ.isEmpty();
- });
- }
-
- private boolean inFlightQIsEmpty() {
- return lock.callLocked(inFlightDataQ::isEmpty);
- }
- }
}
diff --git a/src/main/java/graphql/execution/reactive/CompletionStageOrderedSubscriber.java b/src/main/java/graphql/execution/reactive/CompletionStageOrderedSubscriber.java
new file mode 100644
index 0000000000..53a8dd4719
--- /dev/null
+++ b/src/main/java/graphql/execution/reactive/CompletionStageOrderedSubscriber.java
@@ -0,0 +1,84 @@
+package graphql.execution.reactive;
+
+import graphql.Internal;
+import org.reactivestreams.Subscriber;
+
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.CompletionException;
+import java.util.concurrent.CompletionStage;
+import java.util.function.Function;
+
+/**
+ * This subscriber can be used to map between a {@link org.reactivestreams.Publisher} of U
+ * elements and map them into {@link CompletionStage} of D promises, and it keeps them in the order
+ * the Publisher provided them.
+ *
+ * @param published upstream elements
+ * @param mapped downstream values
+ */
+@Internal
+public class CompletionStageOrderedSubscriber extends CompletionStageSubscriber implements Subscriber {
+
+ public CompletionStageOrderedSubscriber(Function> mapper, Subscriber super D> downstreamSubscriber) {
+ super(mapper, downstreamSubscriber);
+ }
+
+ @Override
+ protected void whenNextFinished(CompletionStage completionStage, D d, Throwable throwable) {
+ try {
+ if (throwable != null) {
+ handleThrowableDuringMapping(throwable);
+ } else {
+ emptyInFlightQueueIfWeCan();
+ }
+ } finally {
+ boolean empty = inFlightQIsEmpty();
+ finallyAfterEachPromiseFinishes(empty);
+ }
+ }
+
+ private void emptyInFlightQueueIfWeCan() {
+ // done inside a memory lock, so we cant offer new CFs to the queue
+ // until we have processed any completed ones from the start of
+ // the queue.
+ lock.runLocked(() -> {
+ //
+ // from the top of the in flight queue, take all the CFs that have
+ // completed... but stop if they are not done
+ while (!inFlightDataQ.isEmpty()) {
+ CompletionStage> cs = inFlightDataQ.peek();
+ if (cs != null) {
+ //
+ CompletableFuture> cf = cs.toCompletableFuture();
+ if (cf.isDone()) {
+ // take it off the queue
+ inFlightDataQ.poll();
+ D value;
+ try {
+ //noinspection unchecked
+ value = (D) cf.join();
+ } catch (RuntimeException rte) {
+ //
+ // if we get an exception while joining on a value, we
+ // send it into the exception handling and break out
+ handleThrowableDuringMapping(cfExceptionUnwrap(rte));
+ break;
+ }
+ downstreamSubscriber.onNext(value);
+ } else {
+ // if the CF is not done, then we have to stop processing
+ // to keep the results in order inside the inFlightQueue
+ break;
+ }
+ }
+ }
+ });
+ }
+
+ private Throwable cfExceptionUnwrap(Throwable throwable) {
+ if (throwable instanceof CompletionException & throwable.getCause() != null) {
+ return throwable.getCause();
+ }
+ return throwable;
+ }
+}
diff --git a/src/main/java/graphql/execution/reactive/CompletionStageSubscriber.java b/src/main/java/graphql/execution/reactive/CompletionStageSubscriber.java
new file mode 100644
index 0000000000..c9db2ae9e6
--- /dev/null
+++ b/src/main/java/graphql/execution/reactive/CompletionStageSubscriber.java
@@ -0,0 +1,201 @@
+package graphql.execution.reactive;
+
+import graphql.Internal;
+import graphql.util.LockKit;
+import org.jetbrains.annotations.NotNull;
+import org.reactivestreams.Subscriber;
+import org.reactivestreams.Subscription;
+
+import java.util.ArrayDeque;
+import java.util.Queue;
+import java.util.concurrent.CompletionStage;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.function.BiConsumer;
+import java.util.function.Function;
+
+/**
+ * This subscriber can be used to map between a {@link org.reactivestreams.Publisher} of U
+ * elements and map them into {@link CompletionStage} of D promises.
+ *
+ * @param published upstream elements
+ * @param mapped downstream values
+ */
+@Internal
+public class CompletionStageSubscriber implements Subscriber {
+ protected final Function> mapper;
+ protected final Subscriber super D> downstreamSubscriber;
+ protected Subscription delegatingSubscription;
+ protected final Queue> inFlightDataQ;
+ protected final LockKit.ReentrantLock lock = new LockKit.ReentrantLock();
+ protected final AtomicReference onCompleteRun;
+ protected final AtomicBoolean isTerminal;
+
+ public CompletionStageSubscriber(Function> mapper, Subscriber super D> downstreamSubscriber) {
+ this.mapper = mapper;
+ this.downstreamSubscriber = downstreamSubscriber;
+ inFlightDataQ = new ArrayDeque<>();
+ onCompleteRun = new AtomicReference<>();
+ isTerminal = new AtomicBoolean(false);
+ }
+
+ /**
+ * Get instance of downstream subscriber
+ *
+ * @return {@link Subscriber}
+ */
+ public Subscriber super D> getDownstreamSubscriber() {
+ return downstreamSubscriber;
+ }
+
+ @Override
+ public void onSubscribe(Subscription subscription) {
+ delegatingSubscription = new DelegatingSubscription(subscription);
+ downstreamSubscriber.onSubscribe(delegatingSubscription);
+ }
+
+ @Override
+ public void onNext(U u) {
+ // for safety - no more data after we have called done/error - we should not get this BUT belts and braces
+ if (isTerminal()) {
+ return;
+ }
+ try {
+ CompletionStage completionStage = mapper.apply(u);
+ offerToInFlightQ(completionStage);
+ completionStage.whenComplete(whenComplete(completionStage));
+ } catch (RuntimeException throwable) {
+ handleThrowableDuringMapping(throwable);
+ }
+ }
+
+ @NotNull
+ private BiConsumer whenComplete(CompletionStage completionStage) {
+ return (d, throwable) -> {
+ if (isTerminal()) {
+ return;
+ }
+ whenNextFinished(completionStage, d, throwable);
+ };
+ }
+
+ /**
+ * This is called as each mapped {@link CompletionStage} completes with
+ * a value or exception
+ *
+ * @param completionStage the completion stage that has completed
+ * @param d the value completed
+ * @param throwable or the throwable that happened during completion
+ */
+ protected void whenNextFinished(CompletionStage completionStage, D d, Throwable throwable) {
+ try {
+ if (throwable != null) {
+ handleThrowableDuringMapping(throwable);
+ } else {
+ downstreamSubscriber.onNext(d);
+ }
+ } finally {
+ boolean empty = removeFromInFlightQAndCheckIfEmpty(completionStage);
+ finallyAfterEachPromiseFinishes(empty);
+ }
+ }
+
+ protected void finallyAfterEachPromiseFinishes(boolean isInFlightEmpty) {
+ //
+ // if the runOnCompleteOrErrorRun runnable is set, the upstream has
+ // called onComplete() already, but the CFs have not all completed
+ // yet, so we have to check whenever a CF completes
+ //
+ Runnable runOnCompleteOrErrorRun = onCompleteRun.get();
+ if (isInFlightEmpty && runOnCompleteOrErrorRun != null) {
+ onCompleteRun.set(null);
+ runOnCompleteOrErrorRun.run();
+ }
+ }
+
+ protected void handleThrowableDuringMapping(Throwable throwable) {
+ // only do this once
+ if (isTerminal.compareAndSet(false, true)) {
+ downstreamSubscriber.onError(throwable);
+ //
+ // Reactive semantics say that IF an exception happens on a publisher,
+ // then onError is called and no more messages flow. But since the exception happened
+ // during the mapping, the upstream publisher does not know about this.
+ // So we cancel to bring the semantics back together, that is as soon as an exception
+ // has happened, no more messages flow
+ //
+ delegatingSubscription.cancel();
+
+ cancelInFlightFutures();
+ }
+ }
+
+ @Override
+ public void onError(Throwable t) {
+ // we immediately terminate - we don't wait for any promises to complete
+ if (isTerminal.compareAndSet(false, true)) {
+ downstreamSubscriber.onError(t);
+ cancelInFlightFutures();
+ }
+ }
+
+ @Override
+ public void onComplete() {
+ onComplete(() -> {
+ if (isTerminal.compareAndSet(false, true)) {
+ downstreamSubscriber.onComplete();
+ }
+ });
+ }
+
+ private void onComplete(Runnable doneCodeToRun) {
+ if (inFlightQIsEmpty()) {
+ // run right now
+ doneCodeToRun.run();
+ } else {
+ onCompleteRun.set(doneCodeToRun);
+ }
+ }
+
+ protected void offerToInFlightQ(CompletionStage> completionStage) {
+ lock.runLocked(() ->
+ inFlightDataQ.offer(completionStage)
+ );
+ }
+
+ private boolean removeFromInFlightQAndCheckIfEmpty(CompletionStage> completionStage) {
+ // uncontested locks in java are cheap - we don't expect much contention here
+ return lock.callLocked(() -> {
+ inFlightDataQ.remove(completionStage);
+ return inFlightDataQ.isEmpty();
+ });
+ }
+
+ /**
+ * If the promise is backed by frameworks such as Reactor, then the cancel()
+ * can cause them to propagate the cancel back into the reactive chain
+ */
+ private void cancelInFlightFutures() {
+ lock.runLocked(() -> {
+ while (!inFlightDataQ.isEmpty()) {
+ CompletionStage> cs = inFlightDataQ.poll();
+ if (cs != null) {
+ cs.toCompletableFuture().cancel(false);
+ }
+ }
+ });
+ }
+
+ protected boolean inFlightQIsEmpty() {
+ return lock.callLocked(inFlightDataQ::isEmpty);
+ }
+
+ /**
+ * The two terminal states are onComplete or onError
+ *
+ * @return true if it's in a terminal state
+ */
+ protected boolean isTerminal() {
+ return isTerminal.get();
+ }
+}
diff --git a/src/main/java/graphql/execution/reactive/NonBlockingMutexExecutor.java b/src/main/java/graphql/execution/reactive/NonBlockingMutexExecutor.java
index 38c6eb1238..a9da42d410 100644
--- a/src/main/java/graphql/execution/reactive/NonBlockingMutexExecutor.java
+++ b/src/main/java/graphql/execution/reactive/NonBlockingMutexExecutor.java
@@ -76,9 +76,9 @@ private void runAll(RunNode next) {
if (last.compareAndSet(current, null)) {
return; // end-of-queue: we're done.
} else {
- //noinspection StatementWithEmptyBody
while ((next = current.get()) == null) {
- // Thread.onSpinWait(); in Java 9
+ // hint to the CPU we are actively waiting
+ Thread.onSpinWait();
}
}
}
diff --git a/src/main/java/graphql/execution/reactive/SubscriptionPublisher.java b/src/main/java/graphql/execution/reactive/SubscriptionPublisher.java
index ae4176bb2e..7b8b08d19f 100644
--- a/src/main/java/graphql/execution/reactive/SubscriptionPublisher.java
+++ b/src/main/java/graphql/execution/reactive/SubscriptionPublisher.java
@@ -31,10 +31,15 @@ public class SubscriptionPublisher implements Publisher {
*
* @param upstreamPublisher the original publisher of objects that then have a graphql selection set applied to them
* @param mapper a mapper that turns object into promises to execution results which are then published on this stream
+ * @param keepOrdered this indicates that the order of results should be kep in the same order as the source events arrive
*/
@Internal
- public SubscriptionPublisher(Publisher upstreamPublisher, Function> mapper) {
- mappingPublisher = new CompletionStageMappingPublisher<>(upstreamPublisher, mapper);
+ public SubscriptionPublisher(Publisher upstreamPublisher, Function> mapper, boolean keepOrdered) {
+ if (keepOrdered) {
+ mappingPublisher = new CompletionStageMappingOrderedPublisher<>(upstreamPublisher, mapper);
+ } else {
+ mappingPublisher = new CompletionStageMappingPublisher<>(upstreamPublisher, mapper);
+ }
}
/**
diff --git a/src/main/java/graphql/incremental/DeferPayload.java b/src/main/java/graphql/incremental/DeferPayload.java
index 8d6047de3a..fcfb06c0c4 100644
--- a/src/main/java/graphql/incremental/DeferPayload.java
+++ b/src/main/java/graphql/incremental/DeferPayload.java
@@ -3,8 +3,8 @@
import graphql.ExecutionResult;
import graphql.ExperimentalApi;
import graphql.GraphQLError;
+import org.jetbrains.annotations.Nullable;
-import javax.annotation.Nullable;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
diff --git a/src/main/java/graphql/incremental/DelayedIncrementalPartialResult.java b/src/main/java/graphql/incremental/DelayedIncrementalPartialResult.java
index 706944e528..e1edaafe03 100644
--- a/src/main/java/graphql/incremental/DelayedIncrementalPartialResult.java
+++ b/src/main/java/graphql/incremental/DelayedIncrementalPartialResult.java
@@ -1,8 +1,8 @@
package graphql.incremental;
import graphql.ExperimentalApi;
+import org.jetbrains.annotations.Nullable;
-import javax.annotation.Nullable;
import java.util.List;
import java.util.Map;
diff --git a/src/main/java/graphql/incremental/IncrementalExecutionResult.java b/src/main/java/graphql/incremental/IncrementalExecutionResult.java
index b3dc1b929e..a0bc40c32f 100644
--- a/src/main/java/graphql/incremental/IncrementalExecutionResult.java
+++ b/src/main/java/graphql/incremental/IncrementalExecutionResult.java
@@ -2,9 +2,9 @@
import graphql.ExecutionResult;
import graphql.ExperimentalApi;
+import org.jetbrains.annotations.Nullable;
import org.reactivestreams.Publisher;
-import javax.annotation.Nullable;
import java.util.List;
/**
@@ -86,6 +86,7 @@
public interface IncrementalExecutionResult extends ExecutionResult {
/**
* Indicates whether there are pending incremental data.
+ *
* @return "true" if there are incremental data, "false" otherwise.
*/
boolean hasNext();
@@ -103,7 +104,11 @@ public interface IncrementalExecutionResult extends ExecutionResult {
List getIncremental();
/**
- * This {@link Publisher} will asynchronously emit events containing defer and/or stream payloads.
+ * This method will return a {@link Publisher} of deferred results. No field processing will be done
+ * until a {@link org.reactivestreams.Subscriber} is attached to this publisher.
+ *
+ * Once a {@link org.reactivestreams.Subscriber} is attached the deferred field result processing will be
+ * started and published as a series of events.
*
* @return a {@link Publisher} that clients can subscribe to receive incremental payloads.
*/
diff --git a/src/main/java/graphql/incremental/IncrementalExecutionResultImpl.java b/src/main/java/graphql/incremental/IncrementalExecutionResultImpl.java
index 765453260d..77b05c6fe5 100644
--- a/src/main/java/graphql/incremental/IncrementalExecutionResultImpl.java
+++ b/src/main/java/graphql/incremental/IncrementalExecutionResultImpl.java
@@ -3,9 +3,9 @@
import graphql.ExecutionResult;
import graphql.ExecutionResultImpl;
import graphql.ExperimentalApi;
+import org.jetbrains.annotations.Nullable;
import org.reactivestreams.Publisher;
-import javax.annotation.Nullable;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
@@ -91,6 +91,8 @@ public Builder incrementalItemPublisher(Publisher getExtensions() {
return this.extensions;
}
- protected Map toSpecification() {
+ public Map toSpecification() {
Map result = new LinkedHashMap<>();
result.put("path", path);
@@ -122,25 +121,25 @@ public T errors(List errors) {
return (T) this;
}
- public Builder addErrors(List errors) {
+ public T addErrors(List errors) {
this.errors.addAll(errors);
- return this;
+ return (T) this;
}
- public Builder addError(GraphQLError error) {
+ public T addError(GraphQLError error) {
this.errors.add(error);
- return this;
+ return (T) this;
}
- public Builder extensions(Map extensions) {
+ public T extensions(Map extensions) {
this.extensions = extensions;
- return this;
+ return (T) this;
}
- public Builder addExtension(String key, Object value) {
+ public T addExtension(String key, Object value) {
this.extensions = (this.extensions == null ? new LinkedHashMap<>() : this.extensions);
this.extensions.put(key, value);
- return this;
+ return (T) this;
}
}
}
diff --git a/src/main/java/graphql/incremental/StreamPayload.java b/src/main/java/graphql/incremental/StreamPayload.java
index 88e1e7b543..299f5ac468 100644
--- a/src/main/java/graphql/incremental/StreamPayload.java
+++ b/src/main/java/graphql/incremental/StreamPayload.java
@@ -2,8 +2,8 @@
import graphql.ExperimentalApi;
import graphql.GraphQLError;
+import org.jetbrains.annotations.Nullable;
-import javax.annotation.Nullable;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
diff --git a/src/main/java/graphql/language/AstPrinter.java b/src/main/java/graphql/language/AstPrinter.java
index b48fa2075d..3315c1302c 100644
--- a/src/main/java/graphql/language/AstPrinter.java
+++ b/src/main/java/graphql/language/AstPrinter.java
@@ -619,7 +619,7 @@ String wrap(String start, String maybeString, String end) {
private String block(List nodes) {
if (isEmpty(nodes)) {
- return "{}";
+ return "";
}
if (compactMode) {
String joinedNodes = joinTight(nodes, " ", "", "");
diff --git a/src/main/java/graphql/language/OperationDefinition.java b/src/main/java/graphql/language/OperationDefinition.java
index 1a7197c88f..b60aac3a11 100644
--- a/src/main/java/graphql/language/OperationDefinition.java
+++ b/src/main/java/graphql/language/OperationDefinition.java
@@ -21,7 +21,7 @@
import static graphql.language.NodeChildrenContainer.newNodeChildrenContainer;
@PublicApi
-public class OperationDefinition extends AbstractNode implements Definition, SelectionSetContainer, DirectivesContainer {
+public class OperationDefinition extends AbstractNode implements Definition, SelectionSetContainer, DirectivesContainer, NamedNode {
public enum Operation {
QUERY, MUTATION, SUBSCRIPTION
diff --git a/src/main/java/graphql/normalized/incremental/NormalizedDeferredExecution.java b/src/main/java/graphql/normalized/incremental/NormalizedDeferredExecution.java
index a3f789e063..2f13c4c88e 100644
--- a/src/main/java/graphql/normalized/incremental/NormalizedDeferredExecution.java
+++ b/src/main/java/graphql/normalized/incremental/NormalizedDeferredExecution.java
@@ -2,8 +2,8 @@
import graphql.ExperimentalApi;
import graphql.schema.GraphQLObjectType;
+import org.jetbrains.annotations.Nullable;
-import javax.annotation.Nullable;
import java.util.Set;
/**
diff --git a/src/main/java/graphql/parser/ExtendedBailStrategy.java b/src/main/java/graphql/parser/ExtendedBailStrategy.java
index a0861ed0c1..8a83904402 100644
--- a/src/main/java/graphql/parser/ExtendedBailStrategy.java
+++ b/src/main/java/graphql/parser/ExtendedBailStrategy.java
@@ -42,6 +42,10 @@ public Token recoverInline(Parser recognizer) throws RecognitionException {
InvalidSyntaxException mkMoreTokensException(Token token) {
SourceLocation sourceLocation = AntlrHelper.createSourceLocation(multiSourceReader, token);
+ if (environment.getParserOptions().isRedactTokenParserErrorMessages()) {
+ return new MoreTokensSyntaxException(environment.getI18N(), sourceLocation);
+ }
+
String sourcePreview = AntlrHelper.createPreview(multiSourceReader, token.getLine());
return new MoreTokensSyntaxException(environment.getI18N(), sourceLocation,
token.getText(), sourcePreview);
@@ -66,7 +70,7 @@ private InvalidSyntaxException mkException(Parser recognizer, RecognitionExcepti
String msgKey;
List args;
SourceLocation location = sourceLocation == null ? SourceLocation.EMPTY : sourceLocation;
- if (offendingToken == null) {
+ if (offendingToken == null || environment.getParserOptions().isRedactTokenParserErrorMessages()) {
msgKey = "InvalidSyntaxBail.noToken";
args = ImmutableList.of(location.getLine(), location.getColumn());
} else {
diff --git a/src/main/java/graphql/parser/MultiSourceReader.java b/src/main/java/graphql/parser/MultiSourceReader.java
index acbcef40e0..fad54b2fa0 100644
--- a/src/main/java/graphql/parser/MultiSourceReader.java
+++ b/src/main/java/graphql/parser/MultiSourceReader.java
@@ -23,6 +23,10 @@
@PublicApi
public class MultiSourceReader extends Reader {
+ // In Java version 16+, LineNumberReader.read considers end-of-stream to be a line terminator
+ // and will increment the line number, whereas in previous versions it doesn't.
+ private static final boolean LINE_NUMBER_READER_EOS_IS_TERMINATOR;
+
private final List sourceParts;
private final StringBuilder data = new StringBuilder();
private int currentIndex = 0;
@@ -30,6 +34,21 @@ public class MultiSourceReader extends Reader {
private final boolean trackData;
private final LockKit.ReentrantLock readerLock = new LockKit.ReentrantLock();
+ static {
+ LINE_NUMBER_READER_EOS_IS_TERMINATOR = lineNumberReaderEOSIsTerminator();
+ }
+
+ private static boolean lineNumberReaderEOSIsTerminator() {
+ LineNumberReader reader = new LineNumberReader(new StringReader("a"));
+ try {
+ reader.read();
+ reader.read();
+ } catch (IOException e) {
+ throw new UncheckedIOException(e);
+ }
+ return reader.getLineNumber() > 0;
+ }
+
private MultiSourceReader(Builder builder) {
this.sourceParts = builder.sourceParts;
@@ -46,10 +65,16 @@ public int read(char[] cbuf, int off, int len) throws IOException {
}
SourcePart sourcePart = sourceParts.get(currentIndex);
int read = sourcePart.lineReader.read(cbuf, off, len);
- overallLineNumber = calcLineNumber();
if (read == -1) {
currentIndex++;
- } else {
+ sourcePart.reachedEndOfStream = true;
+ } else if (read > 0) {
+ sourcePart.lastRead = cbuf[off + read - 1];
+ }
+ // note: calcLineNumber() must be called after updating sourcePart.reachedEndOfStream
+ // and sourcePart.lastRead
+ overallLineNumber = calcLineNumber();
+ if (read != -1) {
trackData(cbuf, off, read);
return read;
}
@@ -68,7 +93,7 @@ private void trackData(char[] cbuf, int off, int len) {
private int calcLineNumber() {
int linenumber = 0;
for (SourcePart sourcePart : sourceParts) {
- linenumber += sourcePart.lineReader.getLineNumber();
+ linenumber += sourcePart.getLineNumber();
}
return linenumber;
}
@@ -125,7 +150,7 @@ public SourceAndLine getSourceAndLineFromOverallLine(int overallLineNumber) {
sourceAndLine.sourceName = sourcePart.sourceName;
if (sourcePart == currentPart) {
// we cant go any further
- int partLineNumber = currentPart.lineReader.getLineNumber();
+ int partLineNumber = currentPart.getLineNumber();
previousPage = page;
page += partLineNumber;
if (page > overallLineNumber) {
@@ -136,7 +161,7 @@ public SourceAndLine getSourceAndLineFromOverallLine(int overallLineNumber) {
return sourceAndLine;
} else {
previousPage = page;
- int partLineNumber = sourcePart.lineReader.getLineNumber();
+ int partLineNumber = sourcePart.getLineNumber();
page += partLineNumber;
if (page > overallLineNumber) {
sourceAndLine.line = overallLineNumber - previousPage;
@@ -157,9 +182,9 @@ public int getLineNumber() {
return 0;
}
if (currentIndex >= sourceParts.size()) {
- return sourceParts.get(sourceParts.size() - 1).lineReader.getLineNumber();
+ return sourceParts.get(sourceParts.size() - 1).getLineNumber();
}
- return sourceParts.get(currentIndex).lineReader.getLineNumber();
+ return sourceParts.get(currentIndex).getLineNumber();
});
}
@@ -220,6 +245,24 @@ private static class SourcePart {
String sourceName;
LineNumberReader lineReader;
boolean closed;
+ char lastRead;
+ boolean reachedEndOfStream = false;
+
+ /**
+ * This handles the discrepancy between LineNumberReader.getLineNumber() for Java versions
+ * 16+ vs below. Use this instead of lineReader.getLineNumber() directly.
+ * @return The current line number. EOS is not considered a line terminator.
+ */
+ int getLineNumber() {
+ int lineNumber = lineReader.getLineNumber();
+ if (reachedEndOfStream
+ && LINE_NUMBER_READER_EOS_IS_TERMINATOR
+ && lastRead != '\r'
+ && lastRead != '\n') {
+ return Math.max(lineNumber - 1, 0);
+ }
+ return lineNumber;
+ }
}
diff --git a/src/main/java/graphql/parser/Parser.java b/src/main/java/graphql/parser/Parser.java
index 15a0f5f641..433442b7fb 100644
--- a/src/main/java/graphql/parser/Parser.java
+++ b/src/main/java/graphql/parser/Parser.java
@@ -301,7 +301,7 @@ public void syntaxError(Recognizer, ?> recognizer, Object offendingSymbol, int
String preview = AntlrHelper.createPreview(multiSourceReader, line);
String msgKey;
List args;
- if (antlerMsg == null) {
+ if (antlerMsg == null || environment.getParserOptions().isRedactTokenParserErrorMessages()) {
msgKey = "InvalidSyntax.noMessage";
args = ImmutableList.of(sourceLocation.getLine(), sourceLocation.getColumn());
} else {
diff --git a/src/main/java/graphql/parser/ParserOptions.java b/src/main/java/graphql/parser/ParserOptions.java
index 965e71876d..c3366d6bb6 100644
--- a/src/main/java/graphql/parser/ParserOptions.java
+++ b/src/main/java/graphql/parser/ParserOptions.java
@@ -62,6 +62,7 @@ public class ParserOptions {
.maxTokens(MAX_QUERY_TOKENS) // to prevent a billion laughs style attacks, we set a default for graphql-java
.maxWhitespaceTokens(MAX_WHITESPACE_TOKENS)
.maxRuleDepth(MAX_RULE_DEPTH)
+ .redactTokenParserErrorMessages(false)
.build();
private static ParserOptions defaultJvmOperationParserOptions = newParserOptions()
@@ -73,6 +74,7 @@ public class ParserOptions {
.maxTokens(MAX_QUERY_TOKENS) // to prevent a billion laughs style attacks, we set a default for graphql-java
.maxWhitespaceTokens(MAX_WHITESPACE_TOKENS)
.maxRuleDepth(MAX_RULE_DEPTH)
+ .redactTokenParserErrorMessages(false)
.build();
private static ParserOptions defaultJvmSdlParserOptions = newParserOptions()
@@ -84,6 +86,7 @@ public class ParserOptions {
.maxTokens(Integer.MAX_VALUE) // we are less worried about a billion laughs with SDL parsing since the call path is not facing attackers
.maxWhitespaceTokens(Integer.MAX_VALUE)
.maxRuleDepth(Integer.MAX_VALUE)
+ .redactTokenParserErrorMessages(false)
.build();
/**
@@ -189,6 +192,7 @@ public static void setDefaultSdlParserOptions(ParserOptions options) {
private final int maxTokens;
private final int maxWhitespaceTokens;
private final int maxRuleDepth;
+ private final boolean redactTokenParserErrorMessages;
private final ParsingListener parsingListener;
private ParserOptions(Builder builder) {
@@ -200,6 +204,7 @@ private ParserOptions(Builder builder) {
this.maxTokens = builder.maxTokens;
this.maxWhitespaceTokens = builder.maxWhitespaceTokens;
this.maxRuleDepth = builder.maxRuleDepth;
+ this.redactTokenParserErrorMessages = builder.redactTokenParserErrorMessages;
this.parsingListener = builder.parsingListener;
}
@@ -294,6 +299,14 @@ public int getMaxRuleDepth() {
return maxRuleDepth;
}
+ /**
+ * Option to redact offending tokens in parser error messages.
+ * By default, the parser will include the offending token in the error message, if possible.
+ */
+ public boolean isRedactTokenParserErrorMessages() {
+ return redactTokenParserErrorMessages;
+ }
+
public ParsingListener getParsingListener() {
return parsingListener;
}
@@ -319,6 +332,7 @@ public static class Builder {
private int maxTokens = MAX_QUERY_TOKENS;
private int maxWhitespaceTokens = MAX_WHITESPACE_TOKENS;
private int maxRuleDepth = MAX_RULE_DEPTH;
+ private boolean redactTokenParserErrorMessages = false;
Builder() {
}
@@ -331,6 +345,7 @@ public static class Builder {
this.maxTokens = parserOptions.maxTokens;
this.maxWhitespaceTokens = parserOptions.maxWhitespaceTokens;
this.maxRuleDepth = parserOptions.maxRuleDepth;
+ this.redactTokenParserErrorMessages = parserOptions.redactTokenParserErrorMessages;
this.parsingListener = parserOptions.parsingListener;
}
@@ -374,6 +389,11 @@ public Builder maxRuleDepth(int maxRuleDepth) {
return this;
}
+ public Builder redactTokenParserErrorMessages(boolean redactTokenParserErrorMessages) {
+ this.redactTokenParserErrorMessages = redactTokenParserErrorMessages;
+ return this;
+ }
+
public Builder parsingListener(ParsingListener parsingListener) {
this.parsingListener = assertNotNull(parsingListener);
return this;
diff --git a/src/main/java/graphql/parser/exceptions/MoreTokensSyntaxException.java b/src/main/java/graphql/parser/exceptions/MoreTokensSyntaxException.java
index 6f73b38e4c..a1378b2c85 100644
--- a/src/main/java/graphql/parser/exceptions/MoreTokensSyntaxException.java
+++ b/src/main/java/graphql/parser/exceptions/MoreTokensSyntaxException.java
@@ -14,4 +14,11 @@ public MoreTokensSyntaxException(@NotNull I18n i18N, @NotNull SourceLocation sou
super(i18N.msg("InvalidSyntaxMoreTokens.full", offendingToken, sourceLocation.getLine(), sourceLocation.getColumn()),
sourceLocation, offendingToken, sourcePreview, null);
}
+
+ @Internal
+ public MoreTokensSyntaxException(@NotNull I18n i18N, @NotNull SourceLocation sourceLocation) {
+ super(i18N.msg("InvalidSyntaxMoreTokens.noMessage", sourceLocation.getLine(), sourceLocation.getColumn()),
+ sourceLocation, null, null, null);
+ }
+
}
diff --git a/src/main/java/graphql/schema/diffing/DiffImpl.java b/src/main/java/graphql/schema/diffing/DiffImpl.java
index efff71cdd0..f24eb934a6 100644
--- a/src/main/java/graphql/schema/diffing/DiffImpl.java
+++ b/src/main/java/graphql/schema/diffing/DiffImpl.java
@@ -226,6 +226,10 @@ private void addChildToQueue(int fixedEditorialCost,
Mapping newMapping = parentPartialMapping.extendMapping(v_i, availableTargetVertices.get(assignments[0]));
+ if (costMatrixSum >= Integer.MAX_VALUE && optimalEdit.mapping == null) {
+ throw new RuntimeException("bug: could not find any allowed mapping");
+ }
+
if (lowerBoundForPartialMapping >= optimalEdit.ged) {
return;
}
@@ -440,41 +444,9 @@ private double calcLowerBoundMappingCost(Vertex v,
boolean equalNodes = v.getType().equals(u.getType()) && v.getProperties().equals(u.getProperties());
- Collection adjacentEdgesV = completeSourceGraph.getAdjacentEdgesNonCopy(v);
- Multiset multisetLabelsV = HashMultiset.create();
-
- for (Edge edge : adjacentEdgesV) {
- // test if this is an inner edge (meaning it not part of the subgraph induced by the partial mapping)
- // we know that v is not part of the mapped vertices, therefore we only need to test the "to" vertex
- if (!partialMapping.containsSource(edge.getTo())) {
- multisetLabelsV.add(edge.getLabel());
- }
- }
-
- Collection adjacentEdgesU = completeTargetGraph.getAdjacentEdgesNonCopy(u);
- Multiset multisetLabelsU = HashMultiset.create();
- for (Edge edge : adjacentEdgesU) {
- // test if this is an inner edge (meaning it not part of the subgraph induced by the partial mapping)
- // we know that u is not part of the mapped vertices, therefore we only need to test the "to" vertex
- if (!partialMapping.containsTarget(edge.getTo())) {
- multisetLabelsU.add(edge.getLabel());
- }
- }
-
- int anchoredVerticesCost = calcAnchoredVerticesCost(v, u, partialMapping);
-
- Multiset intersection = Multisets.intersection(multisetLabelsV, multisetLabelsU);
- int multiSetEditDistance = Math.max(multisetLabelsV.size(), multisetLabelsU.size()) - intersection.size();
-
- double result = (equalNodes ? 0 : 1) + multiSetEditDistance + anchoredVerticesCost;
- return result;
- }
-
-
- private int calcAnchoredVerticesCost(Vertex v,
- Vertex u,
- Mapping partialMapping) {
int anchoredVerticesCost = 0;
+ Multiset multisetInnerEdgeLabelsV = HashMultiset.create();
+ Multiset multisetInnerEdgeLabelsU = HashMultiset.create();
Collection adjacentEdgesV = completeSourceGraph.getAdjacentEdgesNonCopy(v);
Collection adjacentEdgesU = completeTargetGraph.getAdjacentEdgesNonCopy(u);
@@ -485,55 +457,60 @@ private int calcAnchoredVerticesCost(Vertex v,
Set matchedTargetEdges = new LinkedHashSet<>();
Set matchedTargetEdgesInverse = new LinkedHashSet<>();
- outer:
for (Edge edgeV : adjacentEdgesV) {
- // we are only interested in edges from anchored vertices
- if (!partialMapping.containsSource(edgeV.getTo())) {
+
+ Vertex targetTo = partialMapping.getTarget(edgeV.getTo());
+ if (targetTo == null) {
+ // meaning it is an inner edge(not part of the subgraph induced by the partial mapping)
+ multisetInnerEdgeLabelsV.add(edgeV.getLabel());
continue;
}
- for (Edge edgeU : adjacentEdgesU) {
- // looking for an adjacent edge from u matching it
- if (partialMapping.getTarget(edgeV.getTo()) == edgeU.getTo()) {
- matchedTargetEdges.add(edgeU);
- // found two adjacent edges, comparing the labels
- if (!Objects.equals(edgeV.getLabel(), edgeU.getLabel())) {
- anchoredVerticesCost++;
- }
- continue outer;
+ /* question is if the edge from v is mapped onto an edge from u
+ (also edge are not mapped directly, but the vertices are)
+ and if the adjacent edge is mapped onto an adjacent edge,
+ we need to check the labels of the edges
+ */
+ Edge matchedTargetEdge = completeTargetGraph.getEdge(u, targetTo);
+ if (matchedTargetEdge != null) {
+ matchedTargetEdges.add(matchedTargetEdge);
+ if (!Objects.equals(edgeV.getLabel(), matchedTargetEdge.getLabel())) {
+ anchoredVerticesCost++;
}
+ } else {
+// // no matching adjacent edge from u found means there is no
+// // edge from edgeV.getTo() to mapped(edgeV.getTo())
+// // and we need to increase the costs
+ anchoredVerticesCost++;
}
- // no matching adjacent edge from u found means there is no
- // edge from edgeV.getTo() to mapped(edgeV.getTo())
- // and we need to increase the costs
- anchoredVerticesCost++;
}
- outer:
for (Edge edgeV : adjacentEdgesInverseV) {
+
+ Vertex targetFrom = partialMapping.getTarget(edgeV.getFrom());
// we are only interested in edges from anchored vertices
- if (!partialMapping.containsSource(edgeV.getFrom())) {
+ if (targetFrom == null) {
continue;
}
- for (Edge edgeU : adjacentEdgesInverseU) {
- if (partialMapping.getTarget(edgeV.getFrom()) == edgeU.getFrom()) {
- matchedTargetEdgesInverse.add(edgeU);
- if (!Objects.equals(edgeV.getLabel(), edgeU.getLabel())) {
- anchoredVerticesCost++;
- }
- continue outer;
+ Edge matachedTargetEdge = completeTargetGraph.getEdge(targetFrom, u);
+ if (matachedTargetEdge != null) {
+ matchedTargetEdgesInverse.add(matachedTargetEdge);
+ if (!Objects.equals(edgeV.getLabel(), matachedTargetEdge.getLabel())) {
+ anchoredVerticesCost++;
}
+ } else {
+ anchoredVerticesCost++;
}
- anchoredVerticesCost++;
-
}
- /**
- * what is missing now is all edges from u (and inverse), which have not been matched.
- */
for (Edge edgeU : adjacentEdgesU) {
- // we are only interested in edges from anchored vertices
- if (!partialMapping.containsTarget(edgeU.getTo()) || matchedTargetEdges.contains(edgeU)) {
+ // test if this is an inner edge (meaning it not part of the subgraph induced by the partial mapping)
+ // we know that u is not part of the mapped vertices, therefore we only need to test the "to" vertex
+ if (!partialMapping.containsTarget(edgeU.getTo())) {
+ multisetInnerEdgeLabelsU.add(edgeU.getLabel());
+ continue;
+ }
+ if (matchedTargetEdges.contains(edgeU)) {
continue;
}
anchoredVerticesCost++;
@@ -547,7 +524,12 @@ private int calcAnchoredVerticesCost(Vertex v,
anchoredVerticesCost++;
}
- return anchoredVerticesCost;
+
+ Multiset intersectionInnerEdgeLabels = Multisets.intersection(multisetInnerEdgeLabelsV, multisetInnerEdgeLabelsU);
+ int multiSetEditDistanceForInnerEdges = Math.max(multisetInnerEdgeLabelsV.size(), multisetInnerEdgeLabelsU.size()) - intersectionInnerEdgeLabels.size();
+
+ int result = (equalNodes ? 0 : 1) + multiSetEditDistanceForInnerEdges + anchoredVerticesCost;
+ return result;
}
@@ -574,5 +556,4 @@ private double calcLowerBoundMappingCostForIsolated(Vertex vertex,
return 1 + adjacentEdges.size() + anchoredInverseEdges;
}
-
}
diff --git a/src/main/java/graphql/schema/diffing/SchemaDiffing.java b/src/main/java/graphql/schema/diffing/SchemaDiffing.java
index de047bf1fd..b12f8aec5a 100644
--- a/src/main/java/graphql/schema/diffing/SchemaDiffing.java
+++ b/src/main/java/graphql/schema/diffing/SchemaDiffing.java
@@ -96,15 +96,17 @@ private DiffImpl.OptimalEdit diffImpl(SchemaGraph sourceGraph, SchemaGraph targe
Multimaps.invertFrom(possibleMappings.possibleMappings, invertedPossibleOnes);
possibleMappings.possibleMappings = invertedPossibleOnes;
+ sortVertices(nonMappedTarget, targetGraph, possibleMappings);
+
List sourceVertices = new ArrayList<>();
sourceVertices.addAll(possibleMappings.fixedOneToOneSources);
sourceVertices.addAll(nonMappedSource);
+
List targetVertices = new ArrayList<>();
targetVertices.addAll(possibleMappings.fixedOneToOneTargets);
targetVertices.addAll(nonMappedTarget);
- sortVertices(nonMappedTarget, targetGraph, possibleMappings);
DiffImpl diffImpl = new DiffImpl(possibleMappingsCalculator, targetGraph, sourceGraph, possibleMappings, runningCheck);
DiffImpl.OptimalEdit optimalEdit = diffImpl.diffImpl(startMappingInverted, targetVertices, sourceVertices, algoIterationCount);
diff --git a/src/main/java/graphql/schema/idl/ArgValueOfAllowedTypeChecker.java b/src/main/java/graphql/schema/idl/ArgValueOfAllowedTypeChecker.java
index 9dcd16b768..e00a40812f 100644
--- a/src/main/java/graphql/schema/idl/ArgValueOfAllowedTypeChecker.java
+++ b/src/main/java/graphql/schema/idl/ArgValueOfAllowedTypeChecker.java
@@ -31,6 +31,7 @@
import graphql.schema.GraphQLScalarType;
import graphql.schema.idl.errors.DirectiveIllegalArgumentTypeError;
+import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Map;
@@ -41,7 +42,6 @@
import static graphql.collect.ImmutableKit.emptyList;
import static graphql.schema.idl.errors.DirectiveIllegalArgumentTypeError.DUPLICATED_KEYS_MESSAGE;
import static graphql.schema.idl.errors.DirectiveIllegalArgumentTypeError.EXPECTED_ENUM_MESSAGE;
-import static graphql.schema.idl.errors.DirectiveIllegalArgumentTypeError.EXPECTED_LIST_MESSAGE;
import static graphql.schema.idl.errors.DirectiveIllegalArgumentTypeError.EXPECTED_NON_NULL_MESSAGE;
import static graphql.schema.idl.errors.DirectiveIllegalArgumentTypeError.EXPECTED_OBJECT_MESSAGE;
import static graphql.schema.idl.errors.DirectiveIllegalArgumentTypeError.MISSING_REQUIRED_FIELD_MESSAGE;
@@ -259,25 +259,24 @@ private void checkArgValueMatchesAllowedNonNullType(List errors, V
}
private void checkArgValueMatchesAllowedListType(List errors, Value> instanceValue, ListType allowedArgType) {
- if (instanceValue instanceof NullValue) {
- return;
+ // From the spec, on input coercion:
+ // If the value passed as an input to a list type is not a list and not the null value,
+ // then the result of input coercion is a list of size one where the single item value
+ // is the result of input coercion for the list’s item type on the provided value
+ // (note this may apply recursively for nested lists).
+
+ Value> coercedInstanceValue = instanceValue;
+ if (!(instanceValue instanceof ArrayValue) && !(instanceValue instanceof NullValue)) {
+ coercedInstanceValue = new ArrayValue(Collections.singletonList(instanceValue));
}
- Type> unwrappedAllowedType = allowedArgType.getType();
- if (!(instanceValue instanceof ArrayValue)) {
- checkArgValueMatchesAllowedType(errors, instanceValue, unwrappedAllowedType);
+ if (coercedInstanceValue instanceof NullValue) {
return;
}
- ArrayValue arrayValue = ((ArrayValue) instanceValue);
- boolean isUnwrappedList = unwrappedAllowedType instanceof ListType;
-
- // validate each instance value in the list, all instances must match for the list to match
+ Type> unwrappedAllowedType = allowedArgType.getType();
+ ArrayValue arrayValue = ((ArrayValue) coercedInstanceValue);
arrayValue.getValues().forEach(value -> {
- // restrictive check for sub-arrays
- if (isUnwrappedList && !(value instanceof ArrayValue)) {
- addValidationError(errors, EXPECTED_LIST_MESSAGE, value.getClass().getSimpleName());
- }
checkArgValueMatchesAllowedType(errors, value, unwrappedAllowedType);
});
}
diff --git a/src/main/java/graphql/schema/idl/errors/DirectiveIllegalArgumentTypeError.java b/src/main/java/graphql/schema/idl/errors/DirectiveIllegalArgumentTypeError.java
index 3097df4ccb..70ef1930e2 100644
--- a/src/main/java/graphql/schema/idl/errors/DirectiveIllegalArgumentTypeError.java
+++ b/src/main/java/graphql/schema/idl/errors/DirectiveIllegalArgumentTypeError.java
@@ -16,7 +16,6 @@ public class DirectiveIllegalArgumentTypeError extends BaseError {
public static final String NOT_A_VALID_SCALAR_LITERAL_MESSAGE = "Argument value is not a valid value of scalar '%s'.";
public static final String MISSING_REQUIRED_FIELD_MESSAGE = "Missing required field '%s'.";
public static final String EXPECTED_NON_NULL_MESSAGE = "Argument value is 'null', expected a non-null value.";
- public static final String EXPECTED_LIST_MESSAGE = "Argument value is '%s', expected a list value.";
public static final String EXPECTED_OBJECT_MESSAGE = "Argument value is of type '%s', expected an Object value.";
public DirectiveIllegalArgumentTypeError(Node element, String elementName, String directiveName, String argumentName, String detailedMessaged) {
diff --git a/src/main/java/graphql/validation/ArgumentValidationUtil.java b/src/main/java/graphql/validation/ArgumentValidationUtil.java
index 18261d4b0d..a378f40b63 100644
--- a/src/main/java/graphql/validation/ArgumentValidationUtil.java
+++ b/src/main/java/graphql/validation/ArgumentValidationUtil.java
@@ -91,6 +91,12 @@ protected void handleFieldNotValidError(Value> value, GraphQLType type, int in
argumentNames.add(0, String.format("[%s]", index));
}
+ @Override
+ protected void handleExtraOneOfFieldsError(GraphQLInputObjectType type, Value> value) {
+ errMsgKey = "ArgumentValidationUtil.extraOneOfFieldsError";
+ arguments.add(type.getName());
+ }
+
public I18nMsg getMsgAndArgs() {
StringBuilder argument = new StringBuilder(argumentName);
for (String name : argumentNames) {
diff --git a/src/main/java/graphql/validation/ValidationUtil.java b/src/main/java/graphql/validation/ValidationUtil.java
index 413b5887fc..1bff274d92 100644
--- a/src/main/java/graphql/validation/ValidationUtil.java
+++ b/src/main/java/graphql/validation/ValidationUtil.java
@@ -3,6 +3,7 @@
import com.google.common.collect.ImmutableSet;
import graphql.Assert;
+import graphql.Directives;
import graphql.GraphQLContext;
import graphql.GraphQLError;
import graphql.Internal;
@@ -77,6 +78,9 @@ protected void handleFieldNotValidError(ObjectField objectField, GraphQLInputObj
protected void handleFieldNotValidError(Value> value, GraphQLType type, int index) {
}
+ protected void handleExtraOneOfFieldsError(GraphQLInputObjectType type, Value> value) {
+ }
+
public boolean isValidLiteralValue(Value> value, GraphQLType type, GraphQLSchema schema, GraphQLContext graphQLContext, Locale locale) {
if (value == null || value instanceof NullValue) {
boolean valid = !(isNonNull(type));
@@ -95,18 +99,18 @@ public boolean isValidLiteralValue(Value> value, GraphQLType type, GraphQLSche
if (type instanceof GraphQLScalarType) {
Optional invalid = parseLiteral(value, ((GraphQLScalarType) type).getCoercing(), graphQLContext, locale);
invalid.ifPresent(graphQLError -> handleScalarError(value, (GraphQLScalarType) type, graphQLError));
- return !invalid.isPresent();
+ return invalid.isEmpty();
}
if (type instanceof GraphQLEnumType) {
Optional invalid = parseLiteralEnum(value, (GraphQLEnumType) type, graphQLContext, locale);
invalid.ifPresent(graphQLError -> handleEnumError(value, (GraphQLEnumType) type, graphQLError));
- return !invalid.isPresent();
+ return invalid.isEmpty();
}
if (isList(type)) {
return isValidLiteralValue(value, (GraphQLList) type, schema, graphQLContext, locale);
}
- return type instanceof GraphQLInputObjectType && isValidLiteralValue(value, (GraphQLInputObjectType) type, schema, graphQLContext, locale);
+ return type instanceof GraphQLInputObjectType && isValidLiteralValueForInputObjectType(value, (GraphQLInputObjectType) type, schema, graphQLContext, locale);
}
@@ -128,7 +132,7 @@ private Optional parseLiteral(Value> value, Coercing, ?> coerc
}
}
- boolean isValidLiteralValue(Value> value, GraphQLInputObjectType type, GraphQLSchema schema, GraphQLContext graphQLContext, Locale locale) {
+ boolean isValidLiteralValueForInputObjectType(Value> value, GraphQLInputObjectType type, GraphQLSchema schema, GraphQLContext graphQLContext, Locale locale) {
if (!(value instanceof ObjectValue)) {
handleNotObjectError(value, type);
return false;
@@ -156,9 +160,16 @@ boolean isValidLiteralValue(Value> value, GraphQLInputObjectType type, GraphQL
}
}
+ if (type.hasAppliedDirective(Directives.OneOfDirective.getName())) {
+ if (objectFieldMap.keySet().size() != 1) {
+ handleExtraOneOfFieldsError(type,value);
+ return false;
+ }
+ }
return true;
}
+
private Set getMissingFields(GraphQLInputObjectType type, Map objectFieldMap, GraphqlFieldVisibility fieldVisibility) {
return fieldVisibility.getFieldDefinitions(type).stream()
.filter(field -> isNonNull(field.getType()))
diff --git a/src/main/resources/i18n/Parsing.properties b/src/main/resources/i18n/Parsing.properties
index 91a7f276f4..474fba1545 100644
--- a/src/main/resources/i18n/Parsing.properties
+++ b/src/main/resources/i18n/Parsing.properties
@@ -16,6 +16,7 @@ InvalidSyntax.full=Invalid syntax with ANTLR error ''{0}'' at line {1} column {2
InvalidSyntaxBail.noToken=Invalid syntax at line {0} column {1}
InvalidSyntaxBail.full=Invalid syntax with offending token ''{0}'' at line {1} column {2}
#
+InvalidSyntaxMoreTokens.noMessage=Invalid syntax encountered. There are extra tokens in the text that have not been consumed. Offending token at line {0} column {1}
InvalidSyntaxMoreTokens.full=Invalid syntax encountered. There are extra tokens in the text that have not been consumed. Offending token ''{0}'' at line {1} column {2}
#
ParseCancelled.full=More than {0} ''{1}'' tokens have been presented. To prevent Denial Of Service attacks, parsing has been cancelled.
diff --git a/src/main/resources/i18n/Parsing_de.properties b/src/main/resources/i18n/Parsing_de.properties
index 2d3c43f777..55127ff689 100644
--- a/src/main/resources/i18n/Parsing_de.properties
+++ b/src/main/resources/i18n/Parsing_de.properties
@@ -16,6 +16,7 @@ InvalidSyntax.full=Ungültige Syntax, ANTLR-Fehler ''{0}'' in Zeile {1} Spalte {
InvalidSyntaxBail.noToken=Ungültige Syntax in Zeile {0} Spalte {1}
InvalidSyntaxBail.full=Ungültige Syntax wegen des ungültigen Tokens ''{0}'' in Zeile {1} Spalte {2}
#
+InvalidSyntaxMoreTokens.noMessage=Es wurde eine ungültige Syntax festgestellt. Es gibt zusätzliche Token im Text, die nicht konsumiert wurden. Ungültiges Token in Zeile {0} Spalte {1}
InvalidSyntaxMoreTokens.full=Es wurde eine ungültige Syntax festgestellt. Es gibt zusätzliche Token im Text, die nicht konsumiert wurden. Ungültiges Token ''{0}'' in Zeile {1} Spalte {2}
#
ParseCancelled.full=Es wurden mehr als {0} ''{1}'' Token präsentiert. Um Denial-of-Service-Angriffe zu verhindern, wurde das Parsing abgebrochen.
diff --git a/src/main/resources/i18n/Parsing_nl.properties b/src/main/resources/i18n/Parsing_nl.properties
index dfa099a91a..cfd8457825 100644
--- a/src/main/resources/i18n/Parsing_nl.properties
+++ b/src/main/resources/i18n/Parsing_nl.properties
@@ -15,6 +15,7 @@ InvalidSyntax.full=Ongeldige syntaxis, ANTLR foutmelding ''{0}'' op lijn {1} kol
InvalidSyntaxBail.noToken=Ongeldige syntaxis op lijn {0} kolom {1}
InvalidSyntaxBail.full=Ongeldige syntaxis wegens ongeldige token ''{0}'' op lijn {1} kolom {2}
#
+InvalidSyntaxMoreTokens.noMessage=Ongeldige syntaxis tegengekomen. Er zijn tokens in de tekst die niet zijn verwerkt. Ongeldige token op lijn {0} kolom {1}
InvalidSyntaxMoreTokens.full=Ongeldige syntaxis tegengekomen. Er zijn tokens in de tekst die niet zijn verwerkt. Ongeldige token ''{0}'' op lijn {1} kolom {2}
#
ParseCancelled.full=Meer dan {0} ''{1}'' tokens zijn gepresenteerd. Om een DDoS-aanval te voorkomen is het parsen gestopt.
diff --git a/src/main/resources/i18n/Validation.properties b/src/main/resources/i18n/Validation.properties
index c12920da07..4f5c42ab25 100644
--- a/src/main/resources/i18n/Validation.properties
+++ b/src/main/resources/i18n/Validation.properties
@@ -106,4 +106,6 @@ ArgumentValidationUtil.handleNotObjectError=Validation error ({0}) : argument ''
ArgumentValidationUtil.handleMissingFieldsError=Validation error ({0}) : argument ''{1}'' with value ''{2}'' is missing required fields ''{3}''
# suppress inspection "UnusedProperty"
ArgumentValidationUtil.handleExtraFieldError=Validation error ({0}) : argument ''{1}'' with value ''{2}'' contains a field not in ''{3}'': ''{4}''
-
+# suppress inspection "UnusedProperty"
+# suppress inspection "UnusedMessageFormatParameter"
+ArgumentValidationUtil.extraOneOfFieldsError=Validation error ({0}) : Exactly one key must be specified for OneOf type ''{3}''.
\ No newline at end of file
diff --git a/src/test/groovy/graphql/TestUtil.groovy b/src/test/groovy/graphql/TestUtil.groovy
index 490e7cee93..dea2c2c7ce 100644
--- a/src/test/groovy/graphql/TestUtil.groovy
+++ b/src/test/groovy/graphql/TestUtil.groovy
@@ -316,4 +316,11 @@ class TestUtil {
return JsonOutput.prettyPrint(JsonOutput.toJson(obj))
}
+
+ static Random rn = new Random()
+
+ static int rand(int min, int max) {
+ return rn.nextInt(max - min + 1) + min
+ }
+
}
diff --git a/src/test/groovy/graphql/analysis/QueryTransformerTest.groovy b/src/test/groovy/graphql/analysis/QueryTransformerTest.groovy
index 9425a32162..2ce312c7ce 100644
--- a/src/test/groovy/graphql/analysis/QueryTransformerTest.groovy
+++ b/src/test/groovy/graphql/analysis/QueryTransformerTest.groovy
@@ -1,6 +1,7 @@
package graphql.analysis
import graphql.TestUtil
+import graphql.execution.CoercedVariables
import graphql.language.Document
import graphql.language.Field
import graphql.language.NodeUtil
@@ -448,4 +449,60 @@ class QueryTransformerTest extends Specification {
printAstCompact(newNode) == "{__typename ...on A{aX}...on B{b}}"
}
+
+ def "can coerce field arguments or not"() {
+ def sdl = """
+ input Test{ x: String!}
+ type Query{ testInput(input: Test!): String}
+ type Mutation{ testInput(input: Test!): String}
+ """
+
+ def schema = TestUtil.schema(sdl)
+
+ def query = createQuery('''
+ mutation a($test: Test!) {
+ testInput(input: $test)
+ }''')
+
+
+ def fieldArgMap = [:]
+ def queryVisitorStub = new QueryVisitorStub() {
+ @Override
+ void visitField(QueryVisitorFieldEnvironment queryVisitorFieldEnvironment) {
+ super.visitField(queryVisitorFieldEnvironment)
+ fieldArgMap = queryVisitorFieldEnvironment.getArguments()
+ }
+ }
+
+ when:
+ QueryTraverser.newQueryTraverser()
+ .schema(schema)
+ .document(query)
+ .coercedVariables(CoercedVariables.of([test: [x: "X"]]))
+ .build()
+ .visitPreOrder(queryVisitorStub)
+
+ then:
+
+ fieldArgMap == [input: [x:"X"]]
+
+ when:
+ fieldArgMap = null
+
+
+ def options = QueryTraversalOptions.defaultOptions()
+ .coerceFieldArguments(false)
+ QueryTraverser.newQueryTraverser()
+ .schema(schema)
+ .document(query)
+ .coercedVariables(CoercedVariables.of([test: [x: "X"]]))
+ .options(options)
+ .build()
+ .visitPreOrder(queryVisitorStub)
+
+
+ then:
+ fieldArgMap == [:] // empty map
+ }
+
}
diff --git a/src/test/groovy/graphql/analysis/QueryTraversalOptionsTest.groovy b/src/test/groovy/graphql/analysis/QueryTraversalOptionsTest.groovy
new file mode 100644
index 0000000000..379c58e820
--- /dev/null
+++ b/src/test/groovy/graphql/analysis/QueryTraversalOptionsTest.groovy
@@ -0,0 +1,20 @@
+package graphql.analysis
+
+import spock.lang.Specification
+
+class QueryTraversalOptionsTest extends Specification {
+
+ def "defaulting works as expected"() {
+ when:
+ def options = QueryTraversalOptions.defaultOptions()
+
+ then:
+ options.isCoerceFieldArguments()
+
+ when:
+ options = QueryTraversalOptions.defaultOptions().coerceFieldArguments(false)
+
+ then:
+ !options.isCoerceFieldArguments()
+ }
+}
diff --git a/src/test/groovy/graphql/analysis/QueryTraverserTest.groovy b/src/test/groovy/graphql/analysis/QueryTraverserTest.groovy
index 03a40919ca..dbe0b33384 100644
--- a/src/test/groovy/graphql/analysis/QueryTraverserTest.groovy
+++ b/src/test/groovy/graphql/analysis/QueryTraverserTest.groovy
@@ -1907,4 +1907,59 @@ class QueryTraverserTest extends Specification {
then: "it should not be visited"
0 * visitor.visitField(_)
}
+
+ def "can coerce field arguments or not"() {
+ def sdl = """
+ input Test{ x: String!}
+ type Query{ testInput(input: Test!): String}
+ type Mutation{ testInput(input: Test!): String}
+ """
+
+ def schema = TestUtil.schema(sdl)
+
+ def query = createQuery('''
+ mutation a($test: Test!) {
+ testInput(input: $test)
+ }''')
+
+
+ def fieldArgMap = [:]
+ def queryVisitorStub = new QueryVisitorStub() {
+ @Override
+ void visitField(QueryVisitorFieldEnvironment queryVisitorFieldEnvironment) {
+ super.visitField(queryVisitorFieldEnvironment)
+ fieldArgMap = queryVisitorFieldEnvironment.getArguments()
+ }
+ }
+
+ when:
+ QueryTraverser.newQueryTraverser()
+ .schema(schema)
+ .document(query)
+ .coercedVariables(CoercedVariables.of([test: [x: "X"]]))
+ .build()
+ .visitPreOrder(queryVisitorStub)
+
+ then:
+
+ fieldArgMap == [input: [x:"X"]]
+
+ when:
+ fieldArgMap = null
+
+
+ def options = QueryTraversalOptions.defaultOptions()
+ .coerceFieldArguments(false)
+ QueryTraverser.newQueryTraverser()
+ .schema(schema)
+ .document(query)
+ .coercedVariables(CoercedVariables.of([test: [x: "X"]]))
+ .options(options)
+ .build()
+ .visitPreOrder(queryVisitorStub)
+
+
+ then:
+ fieldArgMap == [:] // empty map
+ }
}
diff --git a/src/test/groovy/graphql/execution/ExecutionContextBuilderTest.groovy b/src/test/groovy/graphql/execution/ExecutionContextBuilderTest.groovy
index 210c442989..6fdbeef3f4 100644
--- a/src/test/groovy/graphql/execution/ExecutionContextBuilderTest.groovy
+++ b/src/test/groovy/graphql/execution/ExecutionContextBuilderTest.groovy
@@ -25,24 +25,26 @@ class ExecutionContextBuilderTest extends Specification {
def operation = document.definitions[0] as OperationDefinition
def fragment = document.definitions[1] as FragmentDefinition
def dataLoaderRegistry = new DataLoaderRegistry()
+ def mockDataLoaderDispatcherStrategy = Mock(DataLoaderDispatchStrategy)
def "builds the correct ExecutionContext"() {
when:
def executionContext = new ExecutionContextBuilder()
- .instrumentation(instrumentation)
- .queryStrategy(queryStrategy)
- .mutationStrategy(mutationStrategy)
- .subscriptionStrategy(subscriptionStrategy)
- .graphQLSchema(schema)
- .executionId(executionId)
- .context(context) // Retain deprecated builder for test coverage
- .graphQLContext(graphQLContext)
- .root(root)
- .operationDefinition(operation)
- .fragmentsByName([MyFragment: fragment])
- .variables([var: 'value']) // Retain deprecated builder for test coverage
- .dataLoaderRegistry(dataLoaderRegistry)
- .build()
+ .instrumentation(instrumentation)
+ .queryStrategy(queryStrategy)
+ .mutationStrategy(mutationStrategy)
+ .subscriptionStrategy(subscriptionStrategy)
+ .graphQLSchema(schema)
+ .executionId(executionId)
+ .context(context) // Retain deprecated builder for test coverage
+ .graphQLContext(graphQLContext)
+ .root(root)
+ .operationDefinition(operation)
+ .fragmentsByName([MyFragment: fragment])
+ .variables([var: 'value']) // Retain deprecated builder for test coverage
+ .dataLoaderRegistry(dataLoaderRegistry)
+ .dataLoaderDispatcherStrategy(mockDataLoaderDispatcherStrategy)
+ .build()
then:
executionContext.executionId == executionId
@@ -58,6 +60,7 @@ class ExecutionContextBuilderTest extends Specification {
executionContext.getFragmentsByName() == [MyFragment: fragment]
executionContext.operationDefinition == operation
executionContext.dataLoaderRegistry == dataLoaderRegistry
+ executionContext.dataLoaderDispatcherStrategy == mockDataLoaderDispatcherStrategy
}
def "builds the correct ExecutionContext with coerced variables"() {
@@ -66,19 +69,19 @@ class ExecutionContextBuilderTest extends Specification {
when:
def executionContext = new ExecutionContextBuilder()
- .instrumentation(instrumentation)
- .queryStrategy(queryStrategy)
- .mutationStrategy(mutationStrategy)
- .subscriptionStrategy(subscriptionStrategy)
- .graphQLSchema(schema)
- .executionId(executionId)
- .graphQLContext(graphQLContext)
- .root(root)
- .operationDefinition(operation)
- .fragmentsByName([MyFragment: fragment])
- .coercedVariables(coercedVariables)
- .dataLoaderRegistry(dataLoaderRegistry)
- .build()
+ .instrumentation(instrumentation)
+ .queryStrategy(queryStrategy)
+ .mutationStrategy(mutationStrategy)
+ .subscriptionStrategy(subscriptionStrategy)
+ .graphQLSchema(schema)
+ .executionId(executionId)
+ .graphQLContext(graphQLContext)
+ .root(root)
+ .operationDefinition(operation)
+ .fragmentsByName([MyFragment: fragment])
+ .coercedVariables(coercedVariables)
+ .dataLoaderRegistry(dataLoaderRegistry)
+ .build()
then:
executionContext.executionId == executionId
@@ -205,4 +208,32 @@ class ExecutionContextBuilderTest extends Specification {
executionContext.operationDefinition == operation
executionContext.dataLoaderRegistry == dataLoaderRegistry
}
+
+ def "transform copies dispatcher"() {
+ given:
+ def oldCoercedVariables = CoercedVariables.emptyVariables()
+ def executionContextOld = new ExecutionContextBuilder()
+ .instrumentation(instrumentation)
+ .queryStrategy(queryStrategy)
+ .mutationStrategy(mutationStrategy)
+ .subscriptionStrategy(subscriptionStrategy)
+ .graphQLSchema(schema)
+ .executionId(executionId)
+ .graphQLContext(graphQLContext)
+ .root(root)
+ .operationDefinition(operation)
+ .coercedVariables(oldCoercedVariables)
+ .fragmentsByName([MyFragment: fragment])
+ .dataLoaderRegistry(dataLoaderRegistry)
+ .dataLoaderDispatcherStrategy(DataLoaderDispatchStrategy.NO_OP)
+ .build()
+
+ when:
+ def executionContext = executionContextOld
+ .transform(builder -> builder
+ .dataLoaderDispatcherStrategy(mockDataLoaderDispatcherStrategy))
+
+ then:
+ executionContext.getDataLoaderDispatcherStrategy() == mockDataLoaderDispatcherStrategy
+ }
}
diff --git a/src/test/groovy/graphql/execution/NonNullableFieldValidatorTest.groovy b/src/test/groovy/graphql/execution/NonNullableFieldValidatorTest.groovy
index 0a39bddd36..b288faff07 100644
--- a/src/test/groovy/graphql/execution/NonNullableFieldValidatorTest.groovy
+++ b/src/test/groovy/graphql/execution/NonNullableFieldValidatorTest.groovy
@@ -9,13 +9,17 @@ class NonNullableFieldValidatorTest extends Specification {
ExecutionContext context = Mock(ExecutionContext)
+ def parameters = Mock(ExecutionStrategyParameters) {
+ getPath() >> ResultPath.rootPath()
+ }
+
def "non nullable field throws exception"() {
ExecutionStepInfo typeInfo = ExecutionStepInfo.newExecutionStepInfo().type(nonNull(GraphQLString)).build()
NonNullableFieldValidator validator = new NonNullableFieldValidator(context, typeInfo)
when:
- validator.validate(ResultPath.rootPath(), null)
+ validator.validate(parameters, null)
then:
thrown(NonNullableFieldWasNullException)
@@ -28,7 +32,7 @@ class NonNullableFieldValidatorTest extends Specification {
NonNullableFieldValidator validator = new NonNullableFieldValidator(context, typeInfo)
when:
- def result = validator.validate(ResultPath.rootPath(), null)
+ def result = validator.validate(parameters, null)
then:
result == null
diff --git a/src/test/groovy/graphql/execution/SubscriptionExecutionStrategyTest.groovy b/src/test/groovy/graphql/execution/SubscriptionExecutionStrategyTest.groovy
index c6e0f5a52b..3d7bb28086 100644
--- a/src/test/groovy/graphql/execution/SubscriptionExecutionStrategyTest.groovy
+++ b/src/test/groovy/graphql/execution/SubscriptionExecutionStrategyTest.groovy
@@ -628,4 +628,97 @@ class SubscriptionExecutionStrategyTest extends Specification {
instrumentResultCalls.size() == 11 // one for the initial execution and then one for each stream event
}
+ def "emits results in the order they complete"() {
+ List promises = []
+ Publisher publisher = new RxJavaMessagePublisher(10)
+
+ DataFetcher newMessageDF = { env -> return publisher }
+ DataFetcher senderDF = dfThatDoesNotComplete("sender", promises)
+ DataFetcher textDF = PropertyDataFetcher.fetching("text")
+
+ GraphQL graphQL = buildSubscriptionQL(newMessageDF, senderDF, textDF)
+
+ def executionInput = ExecutionInput.newExecutionInput().query("""
+ subscription NewMessages {
+ newMessage(roomId: 123) {
+ sender
+ text
+ }
+ }
+ """).build()
+
+ def executionResult = graphQL.execute(executionInput)
+
+ when:
+ Publisher msgStream = executionResult.getData()
+ def capturingSubscriber = new CapturingSubscriber(100)
+ msgStream.subscribe(capturingSubscriber)
+
+ // make them all complete but in reverse order
+ promises.reverse().forEach { it.run() }
+
+ then:
+ Awaitility.await().untilTrue(capturingSubscriber.isDone())
+
+ // in order they completed - which was reversed
+ def messages = capturingSubscriber.events
+ messages.size() == 10
+ for (int i = 0, j = messages.size() - 1; i < messages.size(); i++, j--) {
+ def message = messages[i].data
+ assert message == ["newMessage": [sender: "sender" + j, text: "text" + j]]
+ }
+ }
+
+ def "emits results in the order they where emitted by source"() {
+ List promises = []
+ Publisher publisher = new RxJavaMessagePublisher(10)
+
+ DataFetcher newMessageDF = { env -> return publisher }
+ DataFetcher senderDF = dfThatDoesNotComplete("sender", promises)
+ DataFetcher textDF = PropertyDataFetcher.fetching("text")
+
+ GraphQL graphQL = buildSubscriptionQL(newMessageDF, senderDF, textDF)
+
+ def executionInput = ExecutionInput.newExecutionInput().query("""
+ subscription NewMessages {
+ newMessage(roomId: 123) {
+ sender
+ text
+ }
+ }
+ """).graphQLContext([(SubscriptionExecutionStrategy.KEEP_SUBSCRIPTION_EVENTS_ORDERED): true]).build()
+
+ def executionResult = graphQL.execute(executionInput)
+
+ when:
+ Publisher msgStream = executionResult.getData()
+ def capturingSubscriber = new CapturingSubscriber(100)
+ msgStream.subscribe(capturingSubscriber)
+
+ // make them all complete but in reverse order
+ promises.reverse().forEach { it.run() }
+
+ then:
+ Awaitility.await().untilTrue(capturingSubscriber.isDone())
+
+ // in order they were emitted originally - they have been buffered
+ def messages = capturingSubscriber.events
+ messages.size() == 10
+ for (int i = 0; i < messages.size(); i++) {
+ def message = messages[i].data
+ assert message == ["newMessage": [sender: "sender" + i, text: "text" + i]]
+ }
+ }
+
+ private static DataFetcher> dfThatDoesNotComplete(String propertyName, List promises) {
+ { env ->
+ def df = PropertyDataFetcher.fetching(propertyName)
+ def value = df.get(env)
+
+ def cf = new CompletableFuture()
+ promises.add({ cf.complete(value) })
+ return cf
+ }
+ }
+
}
diff --git a/src/test/groovy/graphql/execution/ValuesResolverTest.groovy b/src/test/groovy/graphql/execution/ValuesResolverTest.groovy
index 1a7aa22b3e..97af997047 100644
--- a/src/test/groovy/graphql/execution/ValuesResolverTest.groovy
+++ b/src/test/groovy/graphql/execution/ValuesResolverTest.groovy
@@ -23,9 +23,11 @@ import graphql.language.Value
import graphql.language.VariableDefinition
import graphql.language.VariableReference
import graphql.schema.CoercingParseValueException
+import graphql.schema.DataFetcher
import spock.lang.Specification
import spock.lang.Unroll
+import static graphql.ExecutionInput.newExecutionInput
import static graphql.Scalars.GraphQLBoolean
import static graphql.Scalars.GraphQLFloat
import static graphql.Scalars.GraphQLInt
@@ -88,6 +90,72 @@ class ValuesResolverTest extends Specification {
[name: 'x'] || [name: 'x']
}
+ def "getVariableValues: @oneOf map object as variable input"() {
+ given:
+ def aField = newInputObjectField()
+ .name("a")
+ .type(GraphQLString)
+ def bField = newInputObjectField()
+ .name("b")
+ .type(GraphQLString)
+ def inputType = newInputObject()
+ .name("Person")
+ .withAppliedDirective(Directives.OneOfDirective.toAppliedDirective())
+ .field(aField)
+ .field(bField)
+ .build()
+ def schema = TestUtil.schemaWithInputType(inputType)
+ VariableDefinition variableDefinition = new VariableDefinition("variable", new TypeName("Person"))
+
+ when:
+ def resolvedValues = ValuesResolver.coerceVariableValues(schema, [variableDefinition], RawVariables.of([variable: [a: 'x']]), graphQLContext, locale)
+ then:
+ resolvedValues.get('variable') == [a: 'x']
+
+ when:
+ resolvedValues = ValuesResolver.coerceVariableValues(schema, [variableDefinition], RawVariables.of([variable: [b: 'y']]), graphQLContext, locale)
+ then:
+ resolvedValues.get('variable') == [b: 'y']
+
+ when:
+ ValuesResolver.coerceVariableValues(schema, [variableDefinition], RawVariables.of([variable: [a: 'x', b: 'y']]), graphQLContext, locale)
+ then:
+ thrown(OneOfTooManyKeysException.class)
+ }
+
+ def "can validate inner input oneOf fields"() {
+ //
+ // a test from https://github.com/graphql-java/graphql-java/issues/3572
+ //
+ def sdl = '''
+ input OneOf @oneOf { a: Int, b: Int }
+ type Outer { inner(oneof: OneOf!): Boolean }
+ type Query { outer: Outer }
+ '''
+
+ DataFetcher outer = { env -> return null }
+ def graphQL = TestUtil.graphQL(sdl, [Query: [outer: outer]]).build()
+
+ def query = '''
+ query ($oneof: OneOf!) {
+ outer {
+ # these variables are never accessed by a data fetcher because
+ # Query.outer always returns null
+ inner(oneof: $oneof)
+ }
+ }
+ '''
+
+ when:
+ def er = graphQL.execute(
+ newExecutionInput(query).variables([oneof: [a: 2, b: 1]])
+ )
+
+ then:
+ er.errors.size() == 1
+ er.errors[0].message == "Exactly one key must be specified for OneOf type 'OneOf'."
+ }
+
class Person {
def name = ""
@@ -460,59 +528,59 @@ class ValuesResolverTest extends Specification {
e.message == "Exactly one key must be specified for OneOf type 'OneOfInputObject'."
where:
- testCase | inputValue | variables
+ testCase | inputValue | variables
'{oneOfField: {a: "abc", b: 123} } {}' | buildObjectLiteral([
oneOfField: [
a: StringValue.of("abc"),
b: IntValue.of(123)
]
- ]) | CoercedVariables.emptyVariables()
+ ]) | CoercedVariables.emptyVariables()
'{oneOfField: {a: null, b: 123 }} {}' | buildObjectLiteral([
oneOfField: [
a: NullValue.of(),
b: IntValue.of(123)
]
- ]) | CoercedVariables.emptyVariables()
+ ]) | CoercedVariables.emptyVariables()
'{oneOfField: {a: $var, b: 123 }} { var: null }' | buildObjectLiteral([
oneOfField: [
a: VariableReference.of("var"),
b: IntValue.of(123)
]
- ]) | CoercedVariables.of(["var": null])
+ ]) | CoercedVariables.of(["var": null])
'{oneOfField: {a: $var, b: 123 }} {}' | buildObjectLiteral([
oneOfField: [
a: VariableReference.of("var"),
b: IntValue.of(123)
]
- ]) | CoercedVariables.emptyVariables()
+ ]) | CoercedVariables.emptyVariables()
'{oneOfField: {a : "abc", b : null}} {}' | buildObjectLiteral([
oneOfField: [
a: StringValue.of("abc"),
b: NullValue.of()
]
- ]) | CoercedVariables.emptyVariables()
+ ]) | CoercedVariables.emptyVariables()
'{oneOfField: {a : null, b : null}} {}' | buildObjectLiteral([
oneOfField: [
a: NullValue.of(),
b: NullValue.of()
]
- ]) | CoercedVariables.emptyVariables()
+ ]) | CoercedVariables.emptyVariables()
'{oneOfField: {a : $a, b : $b}} {a : "abc"}' | buildObjectLiteral([
oneOfField: [
a: VariableReference.of("a"),
b: VariableReference.of("v")
]
- ]) | CoercedVariables.of(["a": "abc"])
+ ]) | CoercedVariables.of(["a": "abc"])
'$var {var : {oneOfField: { a : "abc", b : 123}}}' | VariableReference.of("var")
- | CoercedVariables.of(["var": ["oneOfField": ["a": "abc", "b": 123]]])
+ | CoercedVariables.of(["var": ["oneOfField": ["a": "abc", "b": 123]]])
- '$var {var : {oneOfField: {} }}' | VariableReference.of("var")
- | CoercedVariables.of(["var": ["oneOfField": [:]]])
+ '$var {var : {oneOfField: {} }}' | VariableReference.of("var")
+ | CoercedVariables.of(["var": ["oneOfField": [:]]])
}
@@ -600,7 +668,7 @@ class ValuesResolverTest extends Specification {
a: VariableReference.of("var")
]) | CoercedVariables.of(["var": null])
- '`{ a: $var }` { }' | buildObjectLiteral([
+ '`{ a: $var }` { }' | buildObjectLiteral([
a: VariableReference.of("var")
]) | CoercedVariables.emptyVariables()
}
@@ -631,38 +699,38 @@ class ValuesResolverTest extends Specification {
where:
- testCase | inputArray | variables
+ testCase | inputArray | variables
'[{ a: "abc", b: 123 }]'
- | ArrayValue.newArrayValue()
- .value(buildObjectLiteral([
- a: StringValue.of("abc"),
- b: IntValue.of(123)
- ])).build()
- | CoercedVariables.emptyVariables()
+ | ArrayValue.newArrayValue()
+ .value(buildObjectLiteral([
+ a: StringValue.of("abc"),
+ b: IntValue.of(123)
+ ])).build()
+ | CoercedVariables.emptyVariables()
'[{ a: "abc" }, { a: "xyz", b: 789 }]'
- | ArrayValue.newArrayValue()
- .values([
- buildObjectLiteral([
+ | ArrayValue.newArrayValue()
+ .values([
+ buildObjectLiteral([
a: StringValue.of("abc")
- ]),
- buildObjectLiteral([
+ ]),
+ buildObjectLiteral([
a: StringValue.of("xyz"),
b: IntValue.of(789)
- ]),
- ]).build()
- | CoercedVariables.emptyVariables()
+ ]),
+ ]).build()
+ | CoercedVariables.emptyVariables()
'[{ a: "abc" }, $var ] [{ a: "abc" }, { a: "xyz", b: 789 }]'
- | ArrayValue.newArrayValue()
- .values([
- buildObjectLiteral([
+ | ArrayValue.newArrayValue()
+ .values([
+ buildObjectLiteral([
a: StringValue.of("abc")
- ]),
- VariableReference.of("var")
- ]).build()
- | CoercedVariables.of("var": [a: "xyz", b: 789])
+ ]),
+ VariableReference.of("var")
+ ]).build()
+ | CoercedVariables.of("var": [a: "xyz", b: 789])
}
@@ -692,31 +760,31 @@ class ValuesResolverTest extends Specification {
where:
- testCase | inputArray | variables
+ testCase | inputArray | variables
'[{ a: "abc" }, { a: null }]'
- | ArrayValue.newArrayValue()
- .values([
- buildObjectLiteral([
+ | ArrayValue.newArrayValue()
+ .values([
+ buildObjectLiteral([
a: StringValue.of("abc")
- ]),
- buildObjectLiteral([
+ ]),
+ buildObjectLiteral([
a: NullValue.of()
- ]),
- ]).build()
- | CoercedVariables.emptyVariables()
+ ]),
+ ]).build()
+ | CoercedVariables.emptyVariables()
'[{ a: "abc" }, { a: $var }] [{ a: "abc" }, { a: null }]'
- | ArrayValue.newArrayValue()
- .values([
- buildObjectLiteral([
+ | ArrayValue.newArrayValue()
+ .values([
+ buildObjectLiteral([
a: StringValue.of("abc")
- ]),
- buildObjectLiteral([
+ ]),
+ buildObjectLiteral([
a: VariableReference.of("var")
- ]),
- ]).build()
- | CoercedVariables.of("var": null)
+ ]),
+ ]).build()
+ | CoercedVariables.of("var": null)
}
@@ -746,38 +814,38 @@ class ValuesResolverTest extends Specification {
where:
- testCase | inputArray | variables
+ testCase | inputArray | variables
'[{ a: "abc", b: 123 }]'
- | ArrayValue.newArrayValue()
- .value(buildObjectLiteral([
- a: StringValue.of("abc"),
- b: IntValue.of(123)
- ])).build()
- | CoercedVariables.emptyVariables()
+ | ArrayValue.newArrayValue()
+ .value(buildObjectLiteral([
+ a: StringValue.of("abc"),
+ b: IntValue.of(123)
+ ])).build()
+ | CoercedVariables.emptyVariables()
'[{ a: "abc" }, { a: "xyz", b: 789 }]'
- | ArrayValue.newArrayValue()
- .values([
- buildObjectLiteral([
+ | ArrayValue.newArrayValue()
+ .values([
+ buildObjectLiteral([
a: StringValue.of("abc")
- ]),
- buildObjectLiteral([
+ ]),
+ buildObjectLiteral([
a: StringValue.of("xyz"),
b: IntValue.of(789)
- ]),
- ]).build()
- | CoercedVariables.emptyVariables()
+ ]),
+ ]).build()
+ | CoercedVariables.emptyVariables()
'[{ a: "abc" }, $var ] [{ a: "abc" }, { a: "xyz", b: 789 }]'
- | ArrayValue.newArrayValue()
- .values([
- buildObjectLiteral([
+ | ArrayValue.newArrayValue()
+ .values([
+ buildObjectLiteral([
a: StringValue.of("abc")
- ]),
- VariableReference.of("var")
- ]).build()
- | CoercedVariables.of("var": [a: "xyz", b: 789])
+ ]),
+ VariableReference.of("var")
+ ]).build()
+ | CoercedVariables.of("var": [a: "xyz", b: 789])
}
@@ -807,38 +875,38 @@ class ValuesResolverTest extends Specification {
where:
- testCase | inputArray | variables
+ testCase | inputArray | variables
'[{ a: "abc", b: 123 }]'
- | ArrayValue.newArrayValue()
- .value(buildObjectLiteral([
- a: StringValue.of("abc"),
- b: IntValue.of(123)
- ])).build()
- | CoercedVariables.emptyVariables()
+ | ArrayValue.newArrayValue()
+ .value(buildObjectLiteral([
+ a: StringValue.of("abc"),
+ b: IntValue.of(123)
+ ])).build()
+ | CoercedVariables.emptyVariables()
'[{ a: "abc" }, { a: "xyz", b: 789 }]'
- | ArrayValue.newArrayValue()
- .values([
- buildObjectLiteral([
+ | ArrayValue.newArrayValue()
+ .values([
+ buildObjectLiteral([
a: StringValue.of("abc")
- ]),
- buildObjectLiteral([
+ ]),
+ buildObjectLiteral([
a: StringValue.of("xyz"),
b: IntValue.of(789)
- ]),
- ]).build()
- | CoercedVariables.emptyVariables()
+ ]),
+ ]).build()
+ | CoercedVariables.emptyVariables()
'[{ a: "abc" }, $var ] [{ a: "abc" }, { a: "xyz", b: 789 }]'
- | ArrayValue.newArrayValue()
- .values([
- buildObjectLiteral([
+ | ArrayValue.newArrayValue()
+ .values([
+ buildObjectLiteral([
a: StringValue.of("abc")
- ]),
- VariableReference.of("var")
- ]).build()
- | CoercedVariables.of("var": [a: "xyz", b: 789])
+ ]),
+ VariableReference.of("var")
+ ]).build()
+ | CoercedVariables.of("var": [a: "xyz", b: 789])
}
@@ -915,26 +983,26 @@ class ValuesResolverTest extends Specification {
where:
- testCase | inputArray | variables | expectedValues
+ testCase | inputArray | variables | expectedValues
'[{ a: "abc"}]'
- | ArrayValue.newArrayValue()
- .value(buildObjectLiteral([
- a: StringValue.of("abc"),
- ])).build()
- | CoercedVariables.emptyVariables()
- | [arg: [[a: "abc"]]]
+ | ArrayValue.newArrayValue()
+ .value(buildObjectLiteral([
+ a: StringValue.of("abc"),
+ ])).build()
+ | CoercedVariables.emptyVariables()
+ | [arg: [[a: "abc"]]]
'[{ a: "abc" }, $var ] [{ a: "abc" }, { b: 789 }]'
- | ArrayValue.newArrayValue()
- .values([
- buildObjectLiteral([
+ | ArrayValue.newArrayValue()
+ .values([
+ buildObjectLiteral([
a: StringValue.of("abc")
- ]),
- VariableReference.of("var")
- ]).build()
- | CoercedVariables.of("var": [b: 789])
- | [arg: [[a: "abc"], [b: 789]]]
+ ]),
+ VariableReference.of("var")
+ ]).build()
+ | CoercedVariables.of("var": [b: 789])
+ | [arg: [[a: "abc"], [b: 789]]]
}
@@ -1223,7 +1291,7 @@ class ValuesResolverTest extends Specification {
}
'''
- def executionInput = ExecutionInput.newExecutionInput()
+ def executionInput = newExecutionInput()
.query(mutation)
.variables([input: [name: 'Name', position: 'UNKNOWN_POSITION']])
.build()
@@ -1261,7 +1329,7 @@ class ValuesResolverTest extends Specification {
}
'''
- def executionInput = ExecutionInput.newExecutionInput()
+ def executionInput = newExecutionInput()
.query(mutation)
.variables([input: [name: 'Name', hilarious: 'sometimes']])
.build()
@@ -1299,7 +1367,7 @@ class ValuesResolverTest extends Specification {
}
'''
- def executionInput = ExecutionInput.newExecutionInput()
+ def executionInput = newExecutionInput()
.query(mutation)
.variables([input: [name: 'Name', laughsPerMinute: 'none']])
.build()
diff --git a/src/test/groovy/graphql/execution/incremental/DeferExecutionSupportIntegrationTest.groovy b/src/test/groovy/graphql/execution/incremental/DeferExecutionSupportIntegrationTest.groovy
index 11e6e2d1f8..3f149a8ad5 100644
--- a/src/test/groovy/graphql/execution/incremental/DeferExecutionSupportIntegrationTest.groovy
+++ b/src/test/groovy/graphql/execution/incremental/DeferExecutionSupportIntegrationTest.groovy
@@ -6,7 +6,9 @@ import graphql.ExecutionInput
import graphql.ExecutionResult
import graphql.ExperimentalApi
import graphql.GraphQL
+import graphql.GraphqlErrorBuilder
import graphql.TestUtil
+import graphql.execution.DataFetcherResult
import graphql.execution.pubsub.CapturingSubscriber
import graphql.incremental.DelayedIncrementalPartialResult
import graphql.incremental.IncrementalExecutionResult
@@ -55,6 +57,7 @@ class DeferExecutionSupportIntegrationTest extends Specification {
comments: [Comment]
resolvesToNull: String
dataFetcherError: String
+ dataAndError: String
coercionError: Int
typeMismatchError: [String]
nonNullableError: String!
@@ -128,6 +131,22 @@ class DeferExecutionSupportIntegrationTest extends Specification {
}
}
+ private static DataFetcher resolveWithDataAndError(Object data) {
+ return new DataFetcher() {
+ @Override
+ Object get(DataFetchingEnvironment environment) throws Exception {
+ return DataFetcherResult.newResult()
+ .data(data)
+ .error(
+ GraphqlErrorBuilder.newError()
+ .message("Bang!")
+ .build()
+ )
+ .build()
+ }
+ }
+ }
+
void setup() {
def runtimeWiring = RuntimeWiring.newRuntimeWiring()
.type(newTypeWiring("Query")
@@ -148,6 +167,7 @@ class DeferExecutionSupportIntegrationTest extends Specification {
.type(newTypeWiring("Post").dataFetcher("wordCount", resolve(45999, 10, true)))
.type(newTypeWiring("Post").dataFetcher("latestComment", resolve([title: "Comment title"], 10)))
.type(newTypeWiring("Post").dataFetcher("dataFetcherError", resolveWithException()))
+ .type(newTypeWiring("Post").dataFetcher("dataAndError", resolveWithDataAndError("data")))
.type(newTypeWiring("Post").dataFetcher("coercionError", resolve("Not a number", 10)))
.type(newTypeWiring("Post").dataFetcher("typeMismatchError", resolve([a: "A Map instead of a List"], 10)))
.type(newTypeWiring("Post").dataFetcher("nonNullableError", resolve(null)))
@@ -1158,6 +1178,104 @@ class DeferExecutionSupportIntegrationTest extends Specification {
]
}
+ def "can handle data fetcher that returns both data and error on nested field"() {
+ def query = '''
+ query {
+ hello
+ ... @defer {
+ post {
+ dataAndError
+ }
+ }
+ }
+ '''
+
+ when:
+ def initialResult = executeQuery(query)
+
+ then:
+ initialResult.toSpecification() == [
+ data : [hello: "world"],
+ hasNext: true
+ ]
+
+ when:
+ def incrementalResults = getIncrementalResults(initialResult)
+
+ then:
+ incrementalResults == [
+ [
+ hasNext : false,
+ incremental: [
+ [
+ path : [],
+ data : [post: [dataAndError: "data"]],
+ errors: [[
+ message : "Bang!",
+ locations : [],
+ extensions: [classification: "DataFetchingException"]
+ ]],
+ ],
+ ]
+ ],
+ ]
+ }
+
+ def "can handle data fetcher that returns both data and error"() {
+ def query = '''
+ query {
+ post {
+ id
+ ... @defer {
+ dataAndError
+ }
+ ... @defer {
+ text
+ }
+ }
+ }
+ '''
+
+ when:
+ def initialResult = executeQuery(query)
+
+ then:
+ initialResult.toSpecification() == [
+ data : [post: [id: "1001"]],
+ hasNext: true
+ ]
+
+ when:
+ def incrementalResults = getIncrementalResults(initialResult)
+
+ then:
+ incrementalResults == [
+ [
+ hasNext : true,
+ incremental: [
+ [
+ path : ["post"],
+ data : [dataAndError: "data"],
+ errors: [[
+ message : "Bang!",
+ locations : [],
+ extensions: [classification: "DataFetchingException"]
+ ]],
+ ],
+ ]
+ ],
+ [
+ hasNext : false,
+ incremental: [
+ [
+ path: ["post"],
+ data: [text: "The full text"],
+ ]
+ ]
+ ]
+ ]
+ }
+
def "can handle UnresolvedTypeException"() {
def query = """
query {
@@ -1512,7 +1630,9 @@ class DeferExecutionSupportIntegrationTest extends Specification {
deferredResultStream.subscribe(subscriber)
Awaitility.await().untilTrue(subscriber.isDone())
-
+ if (subscriber.throwable != null) {
+ throw new RuntimeException(subscriber.throwable)
+ }
return subscriber.getEvents()
.collect { it.toSpecification() }
}
diff --git a/src/test/groovy/graphql/execution/incremental/IncrementalCallStateDeferTest.groovy b/src/test/groovy/graphql/execution/incremental/IncrementalCallStateDeferTest.groovy
index 8634458122..94740a9e63 100644
--- a/src/test/groovy/graphql/execution/incremental/IncrementalCallStateDeferTest.groovy
+++ b/src/test/groovy/graphql/execution/incremental/IncrementalCallStateDeferTest.groovy
@@ -3,11 +3,17 @@ package graphql.execution.incremental
import graphql.ExecutionResultImpl
import graphql.execution.ResultPath
+import graphql.execution.pubsub.CapturingSubscriber
import graphql.incremental.DelayedIncrementalPartialResult
import org.awaitility.Awaitility
+import org.jetbrains.annotations.NotNull
+import org.reactivestreams.Publisher
import spock.lang.Specification
+import java.util.concurrent.Callable
import java.util.concurrent.CompletableFuture
+import java.util.concurrent.Executors
+import java.util.concurrent.ThreadFactory
import java.util.function.Supplier
class IncrementalCallStateDeferTest extends Specification {
@@ -57,7 +63,7 @@ class IncrementalCallStateDeferTest extends Specification {
incrementalCallState.enqueue(offThread("C", 10, "/field/path"))
when:
- def subscriber = new graphql.execution.pubsub.CapturingSubscriber() {
+ def subscriber = new CapturingSubscriber() {
@Override
void onComplete() {
assert false, "This should not be called!"
@@ -83,7 +89,7 @@ class IncrementalCallStateDeferTest extends Specification {
incrementalCallState.enqueue(offThread("C", 10, "/field/path")) // <-- will finish first
when:
- def subscriber = new graphql.execution.pubsub.CapturingSubscriber() {
+ def subscriber = new CapturingSubscriber() {
@Override
void onNext(DelayedIncrementalPartialResult executionResult) {
this.getEvents().add(executionResult)
@@ -112,8 +118,8 @@ class IncrementalCallStateDeferTest extends Specification {
incrementalCallState.enqueue(offThread("C", 10, "/field/path")) // <-- will finish first
when:
- def subscriber1 = new graphql.execution.pubsub.CapturingSubscriber()
- def subscriber2 = new graphql.execution.pubsub.CapturingSubscriber()
+ def subscriber1 = new CapturingSubscriber()
+ def subscriber2 = new CapturingSubscriber()
incrementalCallState.startDeferredCalls().subscribe(subscriber1)
incrementalCallState.startDeferredCalls().subscribe(subscriber2)
@@ -195,12 +201,89 @@ class IncrementalCallStateDeferTest extends Specification {
results.any { it.incremental[0].data["c"] == "C" }
}
+ def "nothing happens until the publisher is subscribed to"() {
+
+ def startingValue = "*"
+ given:
+ def incrementalCallState = new IncrementalCallState()
+ incrementalCallState.enqueue(offThread({ -> startingValue + "A" }, 100, "/field/path")) // <-- will finish last
+ incrementalCallState.enqueue(offThread({ -> startingValue + "B" }, 50, "/field/path")) // <-- will finish second
+ incrementalCallState.enqueue(offThread({ -> startingValue + "C" }, 10, "/field/path")) // <-- will finish first
+
+ when:
+
+ // get the publisher but not work has been done here
+ def publisher = incrementalCallState.startDeferredCalls()
+ // we are changing a side effect after the publisher is created
+ startingValue = "_"
+
+ // subscription wll case the queue publisher to start draining the queue
+ List results = subscribeAndWaitCalls(publisher)
+
+ then:
+ assertResultsSizeAndHasNextRule(3, results)
+ results[0].incremental[0].data["_c"] == "_C"
+ results[1].incremental[0].data["_b"] == "_B"
+ results[2].incremental[0].data["_a"] == "_A"
+ }
+
+ def "can swap threads on subscribe"() {
+
+ given:
+ def incrementalCallState = new IncrementalCallState()
+ incrementalCallState.enqueue(offThread({ -> "A" }, 100, "/field/path")) // <-- will finish last
+ incrementalCallState.enqueue(offThread({ -> "B" }, 50, "/field/path")) // <-- will finish second
+ incrementalCallState.enqueue(offThread({ -> "C" }, 10, "/field/path")) // <-- will finish first
+
+ when:
+
+ // get the publisher but not work has been done here
+ def publisher = incrementalCallState.startDeferredCalls()
+
+ def threadFactory = new ThreadFactory() {
+ @Override
+ Thread newThread(@NotNull Runnable r) {
+ return new Thread(r, "SubscriberThread")
+ }
+ }
+ def executor = Executors.newSingleThreadExecutor(threadFactory)
+
+ def subscribeThreadName = ""
+ Callable> callable = new Callable() {
+ @Override
+ Object call() throws Exception {
+ subscribeThreadName = Thread.currentThread().getName()
+ def listOfResults = subscribeAndWaitCalls(publisher)
+ return listOfResults
+ }
+ }
+ def future = executor.submit(callable)
+
+ Awaitility.await().until { future.isDone() }
+
+ then:
+ def results = future.get()
+
+ // we subscribed on our other thread
+ subscribeThreadName == "SubscriberThread"
+
+ assertResultsSizeAndHasNextRule(3, results)
+ results[0].incremental[0].data["c"] == "C"
+ results[1].incremental[0].data["b"] == "B"
+ results[2].incremental[0].data["a"] == "A"
+ }
+
private static DeferredFragmentCall offThread(String data, int sleepTime, String path) {
+ offThread(() -> data, sleepTime, path)
+ }
+
+ private static DeferredFragmentCall offThread(Supplier dataSupplier, int sleepTime, String path) {
def callSupplier = new Supplier>() {
@Override
CompletableFuture get() {
return CompletableFuture.supplyAsync({
Thread.sleep(sleepTime)
+ String data = dataSupplier.get()
if (data == "Bang") {
throw new RuntimeException(data)
}
@@ -239,11 +322,17 @@ class IncrementalCallStateDeferTest extends Specification {
}
private static List startAndWaitCalls(IncrementalCallState incrementalCallState) {
- def subscriber = new graphql.execution.pubsub.CapturingSubscriber()
-
- incrementalCallState.startDeferredCalls().subscribe(subscriber)
+ def publisher = incrementalCallState.startDeferredCalls()
+ return subscribeAndWaitCalls(publisher)
+ }
+ private static List subscribeAndWaitCalls(Publisher publisher) {
+ def subscriber = new CapturingSubscriber()
+ publisher.subscribe(subscriber)
Awaitility.await().untilTrue(subscriber.isDone())
+ if (subscriber.throwable != null) {
+ throw new RuntimeException(subscriber.throwable)
+ }
return subscriber.getEvents()
}
}
diff --git a/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderDispatcherTest.groovy b/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderDispatcherTest.groovy
index 7eaa9cec10..839621f727 100644
--- a/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderDispatcherTest.groovy
+++ b/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderDispatcherTest.groovy
@@ -1,6 +1,7 @@
package graphql.execution.instrumentation.dataloader
import graphql.ExecutionInput
+import graphql.ExecutionResult
import graphql.GraphQL
import graphql.TestUtil
import graphql.execution.AsyncSerialExecutionStrategy
@@ -8,11 +9,16 @@ import graphql.execution.instrumentation.ChainedInstrumentation
import graphql.execution.instrumentation.InstrumentationState
import graphql.execution.instrumentation.SimplePerformantInstrumentation
import graphql.execution.instrumentation.parameters.InstrumentationExecutionParameters
+import graphql.execution.pubsub.CapturingSubscriber
import graphql.schema.DataFetcher
+import graphql.schema.DataFetchingEnvironment
+import org.awaitility.Awaitility
import org.dataloader.BatchLoader
import org.dataloader.DataLoaderFactory
import org.dataloader.DataLoaderRegistry
import org.jetbrains.annotations.NotNull
+import org.reactivestreams.Publisher
+import reactor.core.publisher.Mono
import spock.lang.Specification
import spock.lang.Unroll
@@ -275,4 +281,81 @@ class DataLoaderDispatcherTest extends Specification {
er.errors.isEmpty()
er.data == support.buildResponse(depth)
}
+
+ def "issue 3662 - dataloader dispatching can work with subscriptions"() {
+
+ def sdl = '''
+ type Query {
+ field : String
+ }
+
+ type Subscription {
+ onSub : OnSub
+ }
+
+ type OnSub {
+ x : String
+ y : String
+ }
+ '''
+
+ // the dispatching is ALWAYS so not really batching but it completes
+ BatchLoader batchLoader = { keys ->
+ CompletableFuture.supplyAsync {
+ Thread.sleep(50) // some delay
+ keys
+ }
+ }
+
+ DataFetcher dlDF = { DataFetchingEnvironment env ->
+ def dataLoader = env.getDataLoaderRegistry().getDataLoader("dl")
+ return dataLoader.load("working as expected")
+ }
+ DataFetcher dlSub = { DataFetchingEnvironment env ->
+ return Mono.just([x: "X", y: "Y"])
+ }
+ def runtimeWiring = newRuntimeWiring()
+ .type(newTypeWiring("OnSub")
+ .dataFetcher("x", dlDF)
+ .dataFetcher("y", dlDF)
+ .build()
+ )
+ .type(newTypeWiring("Subscription")
+ .dataFetcher("onSub", dlSub)
+ .build()
+ )
+ .build()
+
+ def graphql = TestUtil.graphQL(sdl, runtimeWiring).build()
+
+ DataLoaderRegistry dataLoaderRegistry = new DataLoaderRegistry()
+ dataLoaderRegistry.register("dl", DataLoaderFactory.newDataLoader(batchLoader))
+
+ when:
+ def query = """
+ subscription s {
+ onSub {
+ x, y
+ }
+ }
+ """
+ def executionInput = newExecutionInput()
+ .dataLoaderRegistry(dataLoaderRegistry)
+ .query(query)
+ .build()
+ def er = graphql.execute(executionInput)
+
+ then:
+ er.errors.isEmpty()
+ def subscriber = new CapturingSubscriber()
+ Publisher pub = er.data
+ pub.subscribe(subscriber)
+
+ Awaitility.await().untilTrue(subscriber.isDone())
+
+ subscriber.getEvents().size() == 1
+
+ def msgER = subscriber.getEvents()[0] as ExecutionResult
+ msgER.data == [onSub: [x: "working as expected", y: "working as expected"]]
+ }
}
diff --git a/src/test/groovy/graphql/execution/pubsub/CapturingSubscriber.java b/src/test/groovy/graphql/execution/pubsub/CapturingSubscriber.java
index f736807941..790bac22be 100644
--- a/src/test/groovy/graphql/execution/pubsub/CapturingSubscriber.java
+++ b/src/test/groovy/graphql/execution/pubsub/CapturingSubscriber.java
@@ -3,9 +3,11 @@
import org.reactivestreams.Subscriber;
import org.reactivestreams.Subscription;
+import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
/**
* A subscriber that captures each object for testing
@@ -13,37 +15,53 @@
public class CapturingSubscriber implements Subscriber {
private final List events = new ArrayList<>();
private final AtomicBoolean done = new AtomicBoolean();
+ private final AtomicLong creationTime = new AtomicLong(System.nanoTime());
+ private final int requestN;
private Subscription subscription;
private Throwable throwable;
+ public CapturingSubscriber() {
+ this(1);
+ }
+
+ public CapturingSubscriber(int requestN) {
+ this.requestN = requestN;
+ }
@Override
public void onSubscribe(Subscription subscription) {
- System.out.println("onSubscribe called at " + System.nanoTime());
+ System.out.println("onSubscribe called at " + delta());
this.subscription = subscription;
- subscription.request(1);
+ subscription.request(requestN);
}
@Override
public void onNext(T t) {
- System.out.println("onNext called at " + System.nanoTime());
- events.add(t);
- subscription.request(1);
+ System.out.println("\tonNext " + t + " called at " + delta());
+ synchronized (this) {
+ events.add(t);
+ subscription.request(requestN);
+ }
}
@Override
public void onError(Throwable t) {
- System.out.println("onError called at " + System.nanoTime());
+ System.out.println("onError called at " + delta());
this.throwable = t;
done.set(true);
}
@Override
public void onComplete() {
- System.out.println("onComplete called at " + System.nanoTime());
+ System.out.println("onComplete called at " + delta());
done.set(true);
}
+ private String delta() {
+ Duration nanos = Duration.ofNanos(System.nanoTime() - creationTime.get());
+ return "+" + nanos.toMillis() + "ms";
+ }
+
public List getEvents() {
return events;
}
@@ -56,6 +74,13 @@ public AtomicBoolean isDone() {
return done;
}
+ public boolean isCompleted() {
+ return done.get() && throwable == null;
+ }
+ public boolean isCompletedExceptionally() {
+ return done.get() && throwable != null;
+ }
+
public Subscription getSubscription() {
return subscription;
}
diff --git a/src/test/groovy/graphql/execution/pubsub/CapturingSubscription.java b/src/test/groovy/graphql/execution/pubsub/CapturingSubscription.java
new file mode 100644
index 0000000000..551a34a119
--- /dev/null
+++ b/src/test/groovy/graphql/execution/pubsub/CapturingSubscription.java
@@ -0,0 +1,26 @@
+package graphql.execution.pubsub;
+
+import org.reactivestreams.Subscription;
+
+public class CapturingSubscription implements Subscription {
+ private long count = 0;
+ private boolean cancelled = false;
+
+ public long getCount() {
+ return count;
+ }
+
+ public boolean isCancelled() {
+ return cancelled;
+ }
+
+ @Override
+ public void request(long l) {
+ count += l;
+ }
+
+ @Override
+ public void cancel() {
+ cancelled = true;
+ }
+}
diff --git a/src/test/groovy/graphql/execution/reactive/CompletionStageMappingPublisherTest.groovy b/src/test/groovy/graphql/execution/reactive/CompletionStageMappingPublisherTest.groovy
index c2117f9a8d..bc069bcf1e 100644
--- a/src/test/groovy/graphql/execution/reactive/CompletionStageMappingPublisherTest.groovy
+++ b/src/test/groovy/graphql/execution/reactive/CompletionStageMappingPublisherTest.groovy
@@ -10,9 +10,13 @@ import java.util.concurrent.CompletableFuture
import java.util.concurrent.CompletionStage
import java.util.function.Function
+/**
+ * This contains tests for both CompletionStageMappingPublisher and CompletionStageMappingOrderedPublisher because
+ * they have so much common code
+ */
class CompletionStageMappingPublisherTest extends Specification {
- def "basic mapping"() {
+ def "basic mapping of #why"() {
when:
Publisher rxIntegers = Flowable.range(0, 10)
@@ -23,7 +27,7 @@ class CompletionStageMappingPublisherTest extends Specification {
return CompletableFuture.completedFuture(String.valueOf(integer))
}
}
- Publisher rxStrings = new CompletionStageMappingPublisher(rxIntegers, mapper)
+ Publisher rxStrings = creator.call(rxIntegers,mapper)
def capturingSubscriber = new CapturingSubscriber<>()
rxStrings.subscribe(capturingSubscriber)
@@ -31,11 +35,15 @@ class CompletionStageMappingPublisherTest extends Specification {
then:
capturingSubscriber.events.size() == 10
- capturingSubscriber.events[0] instanceof String
- capturingSubscriber.events[0] == "0"
+ capturingSubscriber.events == ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9",]
+
+ where:
+ why | creator
+ "CompletionStageMappingPublisher" | { Publisher p, Function> m -> new CompletionStageMappingPublisher(p,m) }
+ "CompletionStageMappingOrderedPublisher" | { Publisher p, Function> m -> new CompletionStageMappingOrderedPublisher(p,m) }
}
- def "multiple subscribers get there messages"() {
+ def "multiple subscribers get there messages for #why"() {
when:
Publisher rxIntegers = Flowable.range(0, 10)
@@ -46,7 +54,7 @@ class CompletionStageMappingPublisherTest extends Specification {
return CompletableFuture.completedFuture(String.valueOf(integer))
}
}
- Publisher rxStrings = new CompletionStageMappingPublisher(rxIntegers, mapper)
+ Publisher rxStrings = creator.call(rxIntegers,mapper)
def capturingSubscriber1 = new CapturingSubscriber<>()
def capturingSubscriber2 = new CapturingSubscriber<>()
@@ -56,10 +64,20 @@ class CompletionStageMappingPublisherTest extends Specification {
then:
capturingSubscriber1.events.size() == 10
+ // order is kept
+ capturingSubscriber1.events == ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9",]
+
capturingSubscriber2.events.size() == 10
+ // order is kept
+ capturingSubscriber2.events == ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9",]
+
+ where:
+ why | creator
+ "CompletionStageMappingPublisher" | { Publisher p, Function> m -> new CompletionStageMappingPublisher(p,m) }
+ "CompletionStageMappingOrderedPublisher" | { Publisher p, Function> m -> new CompletionStageMappingOrderedPublisher(p,m) }
}
- def "error handling"() {
+ def "error handling for #why"() {
when:
Publisher rxIntegers = Flowable.range(0, 10)
@@ -76,7 +94,7 @@ class CompletionStageMappingPublisherTest extends Specification {
}
}
}
- Publisher rxStrings = new CompletionStageMappingPublisher(rxIntegers, mapper)
+ Publisher rxStrings = creator.call(rxIntegers,mapper)
def capturingSubscriber = new CapturingSubscriber<>()
rxStrings.subscribe(capturingSubscriber)
@@ -87,11 +105,16 @@ class CompletionStageMappingPublisherTest extends Specification {
//
// got this far and cancelled
capturingSubscriber.events.size() == 5
+ capturingSubscriber.events == ["0", "1", "2", "3", "4",]
+ where:
+ why | creator
+ "CompletionStageMappingPublisher" | { Publisher p, Function> m -> new CompletionStageMappingPublisher(p,m) }
+ "CompletionStageMappingOrderedPublisher" | { Publisher p, Function> m -> new CompletionStageMappingOrderedPublisher(p,m) }
}
- def "mapper exception causes onError"() {
+ def "mapper exception causes onError for #why"() {
when:
Publisher rxIntegers = Flowable.range(0, 10)
@@ -106,7 +129,7 @@ class CompletionStageMappingPublisherTest extends Specification {
}
}
}
- Publisher rxStrings = new CompletionStageMappingPublisher(rxIntegers, mapper)
+ Publisher rxStrings = creator.call(rxIntegers, mapper)
def capturingSubscriber = new CapturingSubscriber<>()
rxStrings.subscribe(capturingSubscriber)
@@ -117,6 +140,12 @@ class CompletionStageMappingPublisherTest extends Specification {
//
// got this far and cancelled
capturingSubscriber.events.size() == 5
+ capturingSubscriber.events == ["0", "1", "2", "3", "4",]
+
+ where:
+ why | creator
+ "CompletionStageMappingPublisher" | { Publisher p, Function> m -> new CompletionStageMappingPublisher(p,m) }
+ "CompletionStageMappingOrderedPublisher" | { Publisher p, Function> m -> new CompletionStageMappingOrderedPublisher(p,m) }
}
@@ -126,8 +155,8 @@ class CompletionStageMappingPublisherTest extends Specification {
when:
Publisher rxIntegers = Flowable.range(0, 10)
- Function> mapper = mapperThatDelaysFor(100)
- Publisher rxStrings = new CompletionStageMappingPublisher(rxIntegers, mapper)
+ Function> mapper = mapperThatDelaysFor(10)
+ Publisher rxStrings = creator.call(rxIntegers,mapper)
def capturingSubscriber = new CapturingSubscriber<>()
rxStrings.subscribe(capturingSubscriber)
@@ -137,8 +166,12 @@ class CompletionStageMappingPublisherTest extends Specification {
Awaitility.await().untilTrue(capturingSubscriber.isDone())
capturingSubscriber.events.size() == 10
- capturingSubscriber.events[0] instanceof String
- capturingSubscriber.events[0] == "0"
+ capturingSubscriber.events == ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9",]
+
+ where:
+ why | creator
+ "CompletionStageMappingPublisher" | { Publisher p, Function> m -> new CompletionStageMappingPublisher(p,m) }
+ "CompletionStageMappingOrderedPublisher" | { Publisher p, Function> m -> new CompletionStageMappingOrderedPublisher(p,m) }
}
Function> mapperThatDelaysFor(int delay) {
diff --git a/src/test/groovy/graphql/execution/reactive/CompletionStageOrderedSubscriberTest.groovy b/src/test/groovy/graphql/execution/reactive/CompletionStageOrderedSubscriberTest.groovy
new file mode 100644
index 0000000000..3438a87d06
--- /dev/null
+++ b/src/test/groovy/graphql/execution/reactive/CompletionStageOrderedSubscriberTest.groovy
@@ -0,0 +1,22 @@
+package graphql.execution.reactive
+
+import graphql.execution.pubsub.CapturingSubscriber
+import graphql.execution.pubsub.CapturingSubscription
+import org.reactivestreams.Subscriber
+
+import java.util.concurrent.CompletableFuture
+import java.util.concurrent.CompletionStage
+import java.util.function.Function
+
+class CompletionStageOrderedSubscriberTest extends CompletionStageSubscriberTest {
+
+ @Override
+ protected Subscriber createSubscriber(Function> mapper, CapturingSubscriber capturingSubscriber) {
+ return new CompletionStageOrderedSubscriber(mapper, capturingSubscriber)
+ }
+
+ @Override
+ protected ArrayList expectedOrderingOfAsyncCompletion() {
+ return ["0", "1", "2", "3"]
+ }
+}
diff --git a/src/test/groovy/graphql/execution/reactive/CompletionStageSubscriberTest.groovy b/src/test/groovy/graphql/execution/reactive/CompletionStageSubscriberTest.groovy
new file mode 100644
index 0000000000..45d3d87830
--- /dev/null
+++ b/src/test/groovy/graphql/execution/reactive/CompletionStageSubscriberTest.groovy
@@ -0,0 +1,226 @@
+package graphql.execution.reactive
+
+import graphql.execution.pubsub.CapturingSubscriber
+import graphql.execution.pubsub.CapturingSubscription
+import org.reactivestreams.Subscriber
+import spock.lang.Specification
+
+import java.util.concurrent.CompletableFuture
+import java.util.concurrent.CompletionStage
+import java.util.function.Function
+
+/**
+ * This can be used to test the CompletionStageSubscriber and CompletionStageOrderedSubscriber
+ * since they share so much common code. There are protected helpers to create the subscribers
+ * and set expectations
+ */
+class CompletionStageSubscriberTest extends Specification {
+
+ protected Subscriber createSubscriber(Function> mapper, CapturingSubscriber capturingSubscriber) {
+ def completionStageSubscriber = new CompletionStageSubscriber(mapper, capturingSubscriber)
+ completionStageSubscriber
+ }
+
+ protected ArrayList expectedOrderingOfAsyncCompletion() {
+ ["3", "2", "1", "0"]
+ }
+
+ def "basic test of mapping"() {
+ def capturingSubscriber = new CapturingSubscriber<>()
+ def subscription = new CapturingSubscription()
+ def mapper = new Function>() {
+ @Override
+ CompletionStage apply(Integer integer) {
+ return CompletableFuture.completedFuture(String.valueOf(integer))
+ }
+ }
+ Subscriber completionStageSubscriber = createSubscriber(mapper, capturingSubscriber)
+
+ when:
+ completionStageSubscriber.onSubscribe(subscription)
+ completionStageSubscriber.onNext(0)
+
+ then:
+ !subscription.isCancelled()
+ !capturingSubscriber.isCompleted()
+ capturingSubscriber.events == ["0"]
+
+ when:
+ completionStageSubscriber.onNext(1)
+
+ then:
+ !subscription.isCancelled()
+ !capturingSubscriber.isCompleted()
+ capturingSubscriber.events == ["0", "1"]
+
+ when:
+ completionStageSubscriber.onComplete()
+
+ then:
+ !subscription.isCancelled()
+ capturingSubscriber.isCompleted()
+ capturingSubscriber.events == ["0", "1"]
+ }
+
+ def "can hold CFs that have not completed and does not emit them even when onComplete is called"() {
+ def capturingSubscriber = new CapturingSubscriber<>()
+ def subscription = new CapturingSubscription()
+ List promises = []
+ Function> mapper = mapperThatDoesNotComplete(promises)
+ Subscriber