diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000000..10ef831183
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,10 @@
+version: 2
+updates:
+ - package-ecosystem: "gradle"
+ directory: "/"
+ schedule:
+ interval: "weekly"
+ - package-ecosystem: "github-actions"
+ directory: "/"
+ schedule:
+ interval: "weekly"
diff --git a/.github/workflows/invoke_test_runner.yml b/.github/workflows/invoke_test_runner.yml
index 0ebead00d6..d14afc17e7 100644
--- a/.github/workflows/invoke_test_runner.yml
+++ b/.github/workflows/invoke_test_runner.yml
@@ -50,7 +50,7 @@ jobs:
- id: 'auth'
name: 'Authenticate to Google Cloud'
- uses: google-github-actions/auth@v0.4.0
+ uses: google-github-actions/auth@v1.0.0
with:
credentials_json: ${{ secrets.GOOGLE_APPLICATION_CREDENTIALS }}
diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml
index 099f1c0fa4..c35ea0eee0 100644
--- a/.github/workflows/master.yml
+++ b/.github/workflows/master.yml
@@ -13,7 +13,7 @@ jobs:
MAVEN_CENTRAL_PGP_KEY: ${{ secrets.MAVEN_CENTRAL_PGP_KEY }}
steps:
- - uses: actions/checkout@v1
+ - uses: actions/checkout@v3
- uses: gradle/wrapper-validation-action@v1
- name: Set up JDK 1.8
uses: actions/setup-java@v1
diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml
index a900264635..39e84200df 100644
--- a/.github/workflows/pull_request.yml
+++ b/.github/workflows/pull_request.yml
@@ -14,7 +14,7 @@ jobs:
buildAndTest:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v1
+ - uses: actions/checkout@v3
- uses: gradle/wrapper-validation-action@v1
- name: Set up JDK 1.8
uses: actions/setup-java@v1
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index b61d755e40..0a855a32e3 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -17,7 +17,7 @@ jobs:
RELEASE_VERSION: ${{ github.event.inputs.version }}
steps:
- - uses: actions/checkout@v1
+ - uses: actions/checkout@v3
- uses: gradle/wrapper-validation-action@v1
- name: Set up JDK 1.8
uses: actions/setup-java@v1
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
index cd9d3382b0..5b3d4cc7f8 100644
--- a/CODE_OF_CONDUCT.md
+++ b/CODE_OF_CONDUCT.md
@@ -44,7 +44,7 @@ incident.
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 1.3.0, available at
-[http://contributor-covenant.org/version/1/3/0/][version]
+[https://contributor-covenant.org/version/1/3/0/][version]
-[homepage]: http://contributor-covenant.org
-[version]: http://contributor-covenant.org/version/1/3/0/
+[homepage]: https://contributor-covenant.org
+[version]: https://contributor-covenant.org/version/1/3/0/
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 41f2c19b01..fb2fb04c52 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -9,7 +9,7 @@ The overall goal of graphql-java is to have a correct implementation of the [Gra
In order to achieve that we have a strong focus on maintainability and high test coverage:
-- We expect new or modified unit test for every change (written in [Spock](http://spockframework.org/)).
+- We expect new or modified unit test for every change (written in [Spock](https://spockframework.org/)).
- Your code should be formatted with our IntelliJ [graphql-java-code-style](graphql-java-code-style.xml).
diff --git a/README.md b/README.md
index f1b8e82cb4..1d68fdf6bb 100644
--- a/README.md
+++ b/README.md
@@ -5,7 +5,7 @@ Discuss and ask questions in our Discussions: https://github.com/graphql-java/gr
This is a [GraphQL](https://github.com/graphql/graphql-spec) Java implementation.
[](https://github.com/graphql-java/graphql-java/actions/workflows/master.yml)
-[](https://maven-badges.herokuapp.com/maven-central/com.graphql-java/graphql-java/)
+[](https://maven-badges.herokuapp.com/maven-central/com.graphql-java/graphql-java/)
[](https://maven-badges.herokuapp.com/maven-central/com.graphql-java/graphql-java/)
[](https://github.com/graphql-java/graphql-java/blob/master/LICENSE.md)
diff --git a/README.zh_cn.md b/README.zh_cn.md
index 28190754e6..8c519b188b 100644
--- a/README.zh_cn.md
+++ b/README.zh_cn.md
@@ -5,7 +5,7 @@
该组件是 [GraphQL 规范](https://github.com/graphql/graphql-spec) 的 Java 实现。
[](https://github.com/graphql-java/graphql-java/actions/workflows/master.yml)
-[](https://maven-badges.herokuapp.com/maven-central/com.graphql-java/graphql-java/)
+[](https://maven-badges.herokuapp.com/maven-central/com.graphql-java/graphql-java/)
[](https://maven-badges.herokuapp.com/maven-central/com.graphql-java/graphql-java/)
[](https://github.com/graphql-java/graphql-java/blob/master/LICENSE.md)
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 0000000000..9657523631
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,17 @@
+# Security Policy
+
+## Supported Versions
+
+We support the latest release with security updates.
+
+We retain the discretion to backport security updates, this is decided on a case-by-case basis.
+
+| Version | Supported |
+| ------- | ------------------ |
+| v20.x | :white_check_mark: |
+
+## Reporting a Vulnerability
+
+:rotating_light: To report a vulnerability, **DO NOT open a pull request or issue or GitHub discussion. DO NOT post publicly.**
+
+Instead, **report the vulnerability privately** via the Security tab on [graphql-java GitHub repository](https://github.com/graphql-java/graphql-java). See instructions at https://docs.github.com/en/code-security/security-advisories/guidance-on-reporting-and-writing/privately-reporting-a-security-vulnerability
diff --git a/build.gradle b/build.gradle
index 609d59347a..e961cb5fe5 100644
--- a/build.gradle
+++ b/build.gradle
@@ -1,5 +1,6 @@
import java.text.SimpleDateFormat
+
plugins {
id 'java'
id 'java-library'
@@ -13,6 +14,12 @@ plugins {
id "me.champeau.jmh" version "0.6.6"
}
+java {
+ toolchain {
+ languageVersion = JavaLanguageVersion.of(8)
+ }
+}
+
def getDevelopmentVersion() {
def gitCheckOutput = new StringBuilder()
def gitCheckError = new StringBuilder()
@@ -39,15 +46,6 @@ def getDevelopmentVersion() {
version
}
-if (JavaVersion.current() != JavaVersion.VERSION_1_8) {
- def msg = String.format("This build must be run with java 1.8 - you are running %s - gradle finds the JDK via JAVA_HOME=%s",
- JavaVersion.current(), System.getenv("JAVA_HOME"))
- throw new GradleException(msg)
-}
-
-
-sourceCompatibility = 1.8
-targetCompatibility = 1.8
def reactiveStreamsVersion = '1.0.3'
def slf4jVersion = '1.7.35'
def releaseVersion = System.env.RELEASE_VERSION
@@ -85,7 +83,7 @@ jar {
}
dependencies {
- compileOnly 'org.jetbrains:annotations:23.0.0'
+ compileOnly 'org.jetbrains:annotations:24.0.1'
implementation 'org.antlr:antlr4-runtime:' + antlrVersion
implementation 'org.slf4j:slf4j-api:' + slf4jVersion
api 'com.graphql-java:java-dataloader:3.2.0'
@@ -94,9 +92,7 @@ dependencies {
implementation 'com.google.guava:guava:31.0.1-jre'
testImplementation group: 'junit', name: 'junit', version: '4.13.2'
testImplementation 'org.spockframework:spock-core:2.0-groovy-3.0'
- testImplementation 'org.codehaus.groovy:groovy:3.0.9'
- testImplementation 'cglib:cglib-nodep:3.3.0'
- testImplementation 'org.objenesis:objenesis:3.2'
+ testImplementation 'org.codehaus.groovy:groovy:3.0.16'
testImplementation 'com.google.code.gson:gson:2.8.9'
testImplementation 'org.eclipse.jetty:jetty-server:9.4.26.v20200117'
testImplementation 'com.fasterxml.jackson.core:jackson-databind:2.13.1'
@@ -150,30 +146,39 @@ shadowJar {
// -removeheaders: Private-Package Removes the MANIFEST.MF header Private-Package, which contains all the internal packages and
// also the repackaged packages like guava, which would be wrong after repackaging.
// Import-Package: Changes the imported packages header, to exclude guava and dependencies from the import list (! excludes packages)
- // Guava was repackaged and included inside the jar, so we need remove it.
+ // Guava was repackaged and included inside the jar, so we need to remove it.
+ // ANTLR was shaded, so we need to remove it.
+ // sun.misc is a JRE internal-only class that is not directly used by graphql-java. It was causing problems in libraries using graphql-java.
// The last ,* copies all the existing imports from the other dependencies, which is required.
bnd('''
-exportcontents: graphql.*
-removeheaders: Private-Package
-Import-Package: !com.google.*,!org.checkerframework.*,!javax.annotation.*,!graphql.com.google.*,!org.antlr.*,!graphql.org.antlr.*,*
+Import-Package: !com.google.*,!org.checkerframework.*,!javax.annotation.*,!graphql.com.google.*,!org.antlr.*,!graphql.org.antlr.*,!sun.misc.*,*
''')
}
-task removeNotNeededGuava(type: Zip) {
+task extractWithoutGuava(type: Copy) {
from({ zipTree({ "build/libs/graphql-java-${project.version}.jar" }) }) {
exclude('/com/**')
}
+ into layout.buildDirectory.dir("extract")
+}
+
+task buildNewJar(type: Jar) {
+ from layout.buildDirectory.dir("extract")
archiveFileName = "graphql-java-tmp.jar"
destinationDirectory = file("${project.buildDir}/libs")
+ manifest {
+ from file("build/extract/META-INF/MANIFEST.MF")
+ }
doLast {
delete("build/libs/graphql-java-${project.version}.jar")
file("build/libs/graphql-java-tmp.jar").renameTo(file("build/libs/graphql-java-${project.version}.jar"))
}
}
-
-shadowJar.finalizedBy removeNotNeededGuava
+shadowJar.finalizedBy extractWithoutGuava, buildNewJar
task testng(type: Test) {
diff --git a/coding-guidelines.md b/coding-guidelines.md
index fb55a0bab4..cecd8b2c42 100644
--- a/coding-guidelines.md
+++ b/coding-guidelines.md
@@ -26,7 +26,7 @@ We have a mix of Optional and allowing null values because GraphQL Java was orig
We are aiming to not use Optional moving forward in order to be consistent overall.
### Unit testing and dependencies
-All tests are written in [Spock](http://spockframework.org).
+All tests are written in [Spock](https://spockframework.org).
All new code has to have unit tests.
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
index 2e6e5897b5..070cb702f0 100644
--- a/gradle/wrapper/gradle-wrapper.properties
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -1,5 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-7.3.3-bin.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-7.6-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
diff --git a/src/main/antlr/GraphqlSDL.g4 b/src/main/antlr/GraphqlSDL.g4
index 89666105f7..9c4ef2a4f1 100644
--- a/src/main/antlr/GraphqlSDL.g4
+++ b/src/main/antlr/GraphqlSDL.g4
@@ -16,7 +16,7 @@ schemaDefinition : description? SCHEMA directives? '{' operationTypeDefinition+
schemaExtension :
EXTEND SCHEMA directives? '{' operationTypeDefinition+ '}' |
- EXTEND SCHEMA directives+
+ EXTEND SCHEMA directives
;
operationTypeDefinition : description? operationType ':' typeName;
diff --git a/src/main/java/graphql/Scalars.java b/src/main/java/graphql/Scalars.java
index 4a72d248ce..25f9dc640c 100644
--- a/src/main/java/graphql/Scalars.java
+++ b/src/main/java/graphql/Scalars.java
@@ -13,7 +13,8 @@
* by the graphql specification (Int, Float, String, Boolean and ID) while others are offer because they are common on
* Java platforms.
*
- * For more info see http://graphql.org/learn/schema/#scalar-types and more specifically https://spec.graphql.org/October2021/#sec-Scalars
+ * For more info see https://graphql.org/learn/schema/#scalar-types and
+ * more specifically https://spec.graphql.org/draft/#sec-Scalars
*/
@PublicApi
public class Scalars {
diff --git a/src/main/java/graphql/collect/ImmutableKit.java b/src/main/java/graphql/collect/ImmutableKit.java
index af74316b69..6fc66280c1 100644
--- a/src/main/java/graphql/collect/ImmutableKit.java
+++ b/src/main/java/graphql/collect/ImmutableKit.java
@@ -13,6 +13,7 @@
import static graphql.Assert.assertNotNull;
@Internal
+@SuppressWarnings({"UnstableApiUsage"})
public final class ImmutableKit {
public static ImmutableList emptyList() {
@@ -32,7 +33,6 @@ public static ImmutableMap addToMap(Map existing, K newKey, V
}
public static ImmutableList concatLists(List l1, List l2) {
- //noinspection UnstableApiUsage
return ImmutableList.builderWithExpectedSize(l1.size() + l2.size()).addAll(l1).addAll(l2).build();
}
@@ -50,8 +50,7 @@ public static ImmutableList concatLists(List l1, List l2) {
public static ImmutableList map(Collection extends T> collection, Function super T, ? extends R> mapper) {
assertNotNull(collection);
assertNotNull(mapper);
- @SuppressWarnings({"RedundantTypeArguments", "UnstableApiUsage"})
- ImmutableList.Builder builder = ImmutableList.builderWithExpectedSize(collection.size());
+ ImmutableList.Builder builder = ImmutableList.builderWithExpectedSize(collection.size());
for (T t : collection) {
R r = mapper.apply(t);
builder.add(r);
@@ -60,21 +59,22 @@ public static ImmutableList map(Collection extends T> collection, Fu
}
/**
- * This will map an iterable of items but drop any that are null from the mapped list
- *
+ * This will map a collection of items but drop any that are null from the input.
* This is more efficient than `c.stream().map().collect()` because it does not create the intermediate objects needed
* for the flexible style. Benchmarking has shown this to outperform `stream()`.
*
- * @param iterable the iterable to map
+ * @param collection the collection to map
* @param mapper the mapper function
* @param for two
* @param for result
*
* @return a map immutable list of results
*/
- public static ImmutableList mapAndDropNulls(Iterable extends T> iterable, Function super T, ? extends R> mapper) {
- ImmutableList.Builder builder = ImmutableList.builder();
- for (T t : iterable) {
+ public static ImmutableList mapAndDropNulls(Collection extends T> collection, Function super T, ? extends R> mapper) {
+ assertNotNull(collection);
+ assertNotNull(mapper);
+ ImmutableList.Builder builder = ImmutableList.builderWithExpectedSize(collection.size());
+ for (T t : collection) {
R r = mapper.apply(t);
if (r != null) {
builder.add(r);
@@ -83,7 +83,6 @@ public static ImmutableList mapAndDropNulls(Iterable extends T> iter
return builder.build();
}
-
/**
* This constructs a new Immutable list from an existing collection and adds a new element to it.
*
@@ -99,7 +98,6 @@ public static ImmutableList addToList(Collection extends T> existing, T
assertNotNull(existing);
assertNotNull(newValue);
int expectedSize = existing.size() + 1 + extraValues.length;
- @SuppressWarnings("UnstableApiUsage")
ImmutableList.Builder newList = ImmutableList.builderWithExpectedSize(expectedSize);
newList.addAll(existing);
newList.add(newValue);
@@ -124,7 +122,6 @@ public static ImmutableSet addToSet(Collection extends T> existing, T n
assertNotNull(existing);
assertNotNull(newValue);
int expectedSize = existing.size() + 1 + extraValues.length;
- @SuppressWarnings("UnstableApiUsage")
ImmutableSet.Builder newSet = ImmutableSet.builderWithExpectedSize(expectedSize);
newSet.addAll(existing);
newSet.add(newValue);
diff --git a/src/main/java/graphql/execution/Execution.java b/src/main/java/graphql/execution/Execution.java
index e09f13ada7..401258dedb 100644
--- a/src/main/java/graphql/execution/Execution.java
+++ b/src/main/java/graphql/execution/Execution.java
@@ -4,6 +4,7 @@
import graphql.ExecutionInput;
import graphql.ExecutionResult;
import graphql.ExecutionResultImpl;
+import graphql.GraphQLContext;
import graphql.GraphQLError;
import graphql.Internal;
import graphql.execution.instrumentation.Instrumentation;
@@ -11,6 +12,7 @@
import graphql.execution.instrumentation.InstrumentationState;
import graphql.execution.instrumentation.parameters.InstrumentationExecuteOperationParameters;
import graphql.execution.instrumentation.parameters.InstrumentationExecutionParameters;
+import graphql.extensions.ExtensionsBuilder;
import graphql.language.Document;
import graphql.language.FragmentDefinition;
import graphql.language.NodeUtil;
@@ -42,7 +44,7 @@ public class Execution {
private final ExecutionStrategy mutationStrategy;
private final ExecutionStrategy subscriptionStrategy;
private final Instrumentation instrumentation;
- private ValueUnboxer valueUnboxer;
+ private final ValueUnboxer valueUnboxer;
public Execution(ExecutionStrategy queryStrategy, ExecutionStrategy mutationStrategy, ExecutionStrategy subscriptionStrategy, Instrumentation instrumentation, ValueUnboxer valueUnboxer) {
this.queryStrategy = queryStrategy != null ? queryStrategy : new AsyncExecutionStrategy();
@@ -105,6 +107,9 @@ public CompletableFuture execute(Document document, GraphQLSche
private CompletableFuture executeOperation(ExecutionContext executionContext, Object root, OperationDefinition operationDefinition) {
+ GraphQLContext graphQLContext = executionContext.getGraphQLContext();
+ addExtensionsBuilderNotPresent(graphQLContext);
+
InstrumentationExecuteOperationParameters instrumentationParams = new InstrumentationExecuteOperationParameters(executionContext);
InstrumentationContext executeOperationCtx = nonNullCtx(instrumentation.beginExecuteOperation(instrumentationParams, executionContext.getInstrumentationState()));
@@ -168,8 +173,30 @@ private CompletableFuture executeOperation(ExecutionContext exe
// note this happens NOW - not when the result completes
executeOperationCtx.onDispatched(result);
- result = result.whenComplete(executeOperationCtx::onCompleted);
+ // fill out extensions if we have them
+ result = result.thenApply(er -> mergeExtensionsBuilderIfPresent(er, graphQLContext));
+ result = result.whenComplete(executeOperationCtx::onCompleted);
return result;
}
+
+ private void addExtensionsBuilderNotPresent(GraphQLContext graphQLContext) {
+ Object builder = graphQLContext.get(ExtensionsBuilder.class);
+ if (builder == null) {
+ graphQLContext.put(ExtensionsBuilder.class, ExtensionsBuilder.newExtensionsBuilder());
+ }
+ }
+
+ private ExecutionResult mergeExtensionsBuilderIfPresent(ExecutionResult executionResult, GraphQLContext graphQLContext) {
+ Object builder = graphQLContext.get(ExtensionsBuilder.class);
+ if (builder instanceof ExtensionsBuilder) {
+ ExtensionsBuilder extensionsBuilder = (ExtensionsBuilder) builder;
+ Map currentExtensions = executionResult.getExtensions();
+ if (currentExtensions != null) {
+ extensionsBuilder.addValues(currentExtensions);
+ }
+ executionResult = extensionsBuilder.setExtensions(executionResult);
+ }
+ return executionResult;
+ }
}
diff --git a/src/main/java/graphql/execution/ExecutionStrategy.java b/src/main/java/graphql/execution/ExecutionStrategy.java
index 33d45f5af7..9e38a58372 100644
--- a/src/main/java/graphql/execution/ExecutionStrategy.java
+++ b/src/main/java/graphql/execution/ExecutionStrategy.java
@@ -451,8 +451,7 @@ protected FieldValueInfo completeValue(ExecutionContext executionContext, Execut
CompletableFuture fieldValue;
if (result == null) {
- fieldValue = completeValueForNull(executionContext, parameters);
- return FieldValueInfo.newFieldValueInfo(NULL).fieldValue(fieldValue).build();
+ return getFieldValueInfoForNull(executionContext, parameters);
} else if (isList(fieldType)) {
return completeValueForList(executionContext, parameters, result);
} else if (isScalar(fieldType)) {
@@ -473,10 +472,8 @@ protected FieldValueInfo completeValue(ExecutionContext executionContext, Execut
} catch (UnresolvedTypeException ex) {
// consider the result to be null and add the error on the context
handleUnresolvedTypeProblem(executionContext, parameters, ex);
- // and validate the field is nullable, if non-nullable throw exception
- parameters.getNonNullFieldValidator().validate(parameters.getPath(), null);
- // complete the field as null
- fieldValue = completedFuture(new ExecutionResultImpl(null, executionContext.getErrors()));
+ // complete field as null, validating it is nullable
+ return getFieldValueInfoForNull(executionContext, parameters);
}
return FieldValueInfo.newFieldValueInfo(OBJECT).fieldValue(fieldValue).build();
}
@@ -488,6 +485,21 @@ private void handleUnresolvedTypeProblem(ExecutionContext context, ExecutionStra
}
+ /**
+ * Called to complete a null value.
+ *
+ * @param executionContext contains the top level execution parameters
+ * @param parameters contains the parameters holding the fields to be executed and source object
+ *
+ * @return a {@link FieldValueInfo}
+ *
+ * @throws NonNullableFieldWasNullException if a non null field resolves to a null value
+ */
+ private FieldValueInfo getFieldValueInfoForNull(ExecutionContext executionContext, ExecutionStrategyParameters parameters) {
+ CompletableFuture fieldValue = completeValueForNull(executionContext, parameters);
+ return FieldValueInfo.newFieldValueInfo(NULL).fieldValue(fieldValue).build();
+ }
+
protected CompletableFuture completeValueForNull(ExecutionContext executionContext, ExecutionStrategyParameters parameters) {
return Async.tryCatch(() -> {
Object nullValue = parameters.getNonNullFieldValidator().validate(parameters.getPath(), null);
diff --git a/src/main/java/graphql/execution/SubscriptionExecutionStrategy.java b/src/main/java/graphql/execution/SubscriptionExecutionStrategy.java
index abd71569e2..be816e7add 100644
--- a/src/main/java/graphql/execution/SubscriptionExecutionStrategy.java
+++ b/src/main/java/graphql/execution/SubscriptionExecutionStrategy.java
@@ -30,8 +30,9 @@
* Afterwards each object delivered on that stream will be mapped via running the original selection set over that object and hence producing an ExecutionResult
* just like a normal graphql query.
*
- * See https://github.com/facebook/graphql/blob/master/spec/Section%206%20--%20Execution.md
- * See http://www.reactive-streams.org/
+ * See https://spec.graphql.org/draft/#sec-Subscription
+ *
+ * See https://www.reactive-streams.org/
*/
@PublicApi
public class SubscriptionExecutionStrategy extends ExecutionStrategy {
diff --git a/src/main/java/graphql/execution/directives/QueryAppliedDirective.java b/src/main/java/graphql/execution/directives/QueryAppliedDirective.java
index 638a890048..d2e0e66891 100644
--- a/src/main/java/graphql/execution/directives/QueryAppliedDirective.java
+++ b/src/main/java/graphql/execution/directives/QueryAppliedDirective.java
@@ -30,7 +30,7 @@
* classes have been introduced to better model when a directive is applied to a query element,
* as opposed to its schema definition itself.
*
- * See http://graphql.org/learn/queries/#directives for more details on the concept.
+ * See https://graphql.org/learn/queries/#directives for more details on the concept.
*/
@PublicApi
public class QueryAppliedDirective {
diff --git a/src/main/java/graphql/execution/instrumentation/ExecutionStrategyInstrumentationContext.java b/src/main/java/graphql/execution/instrumentation/ExecutionStrategyInstrumentationContext.java
index 44a75ffc0b..04fbceab81 100644
--- a/src/main/java/graphql/execution/instrumentation/ExecutionStrategyInstrumentationContext.java
+++ b/src/main/java/graphql/execution/instrumentation/ExecutionStrategyInstrumentationContext.java
@@ -4,8 +4,8 @@
import graphql.Internal;
import graphql.PublicSpi;
import graphql.execution.FieldValueInfo;
+import org.jetbrains.annotations.NotNull;
-import javax.annotation.Nonnull;
import java.util.List;
import java.util.concurrent.CompletableFuture;
@@ -27,7 +27,7 @@ default void onFieldValuesException() {
*
* @return a non null {@link InstrumentationContext} that maybe a no-op
*/
- @Nonnull
+ @NotNull
@Internal
static ExecutionStrategyInstrumentationContext nonNullCtx(ExecutionStrategyInstrumentationContext nullableContext) {
return nullableContext == null ? NOOP : nullableContext;
diff --git a/src/main/java/graphql/execution/instrumentation/SimpleInstrumentation.java b/src/main/java/graphql/execution/instrumentation/SimpleInstrumentation.java
index a8b27a8718..f35278c551 100644
--- a/src/main/java/graphql/execution/instrumentation/SimpleInstrumentation.java
+++ b/src/main/java/graphql/execution/instrumentation/SimpleInstrumentation.java
@@ -5,7 +5,7 @@
/**
* An implementation of {@link graphql.execution.instrumentation.Instrumentation} that does nothing. It can be used
- * as a base for derived classes where you only implement the methods you want to. With all the methods in {@link Instrumentation}
+ * as a base for derived classes where you only implement the methods you want to. With all the methods in {@link Instrumentation}
* now defaulted (post Java 6) this class is really not needed anymore but has been retained for backwards compatibility
* reasons.
*
diff --git a/src/main/java/graphql/execution/instrumentation/SimpleInstrumentationContext.java b/src/main/java/graphql/execution/instrumentation/SimpleInstrumentationContext.java
index b696cc225e..2621314a56 100644
--- a/src/main/java/graphql/execution/instrumentation/SimpleInstrumentationContext.java
+++ b/src/main/java/graphql/execution/instrumentation/SimpleInstrumentationContext.java
@@ -1,8 +1,8 @@
package graphql.execution.instrumentation;
import graphql.PublicApi;
+import org.jetbrains.annotations.NotNull;
-import javax.annotation.Nonnull;
import java.util.concurrent.CompletableFuture;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
@@ -43,7 +43,7 @@ public static InstrumentationContext noOp() {
*
* @return a non null {@link InstrumentationContext} that maybe a no-op
*/
- @Nonnull
+ @NotNull
public static InstrumentationContext nonNullCtx(InstrumentationContext nullableContext) {
return nullableContext == null ? noOp() : nullableContext;
}
diff --git a/src/main/java/graphql/execution/instrumentation/dataloader/DataLoaderDispatcherInstrumentation.java b/src/main/java/graphql/execution/instrumentation/dataloader/DataLoaderDispatcherInstrumentation.java
index fd328c3ff5..87c137e303 100644
--- a/src/main/java/graphql/execution/instrumentation/dataloader/DataLoaderDispatcherInstrumentation.java
+++ b/src/main/java/graphql/execution/instrumentation/dataloader/DataLoaderDispatcherInstrumentation.java
@@ -129,16 +129,7 @@ private boolean isDataLoaderCompatibleExecution(ExecutionContext executionContex
// if there are no data loaders, there is nothing to do
//
if (state.hasNoDataLoaders()) {
- return new ExecutionStrategyInstrumentationContext() {
- @Override
- public void onDispatched(CompletableFuture result) {
- }
-
- @Override
- public void onCompleted(ExecutionResult result, Throwable t) {
- }
- };
-
+ return ExecutionStrategyInstrumentationContext.NOOP;
}
return state.getApproach().beginExecutionStrategy(parameters, state.getState());
}
diff --git a/src/main/java/graphql/execution/instrumentation/fieldvalidation/FieldValidationInstrumentation.java b/src/main/java/graphql/execution/instrumentation/fieldvalidation/FieldValidationInstrumentation.java
index 7dc5d6ba72..d1f30fbfbd 100644
--- a/src/main/java/graphql/execution/instrumentation/fieldvalidation/FieldValidationInstrumentation.java
+++ b/src/main/java/graphql/execution/instrumentation/fieldvalidation/FieldValidationInstrumentation.java
@@ -43,6 +43,6 @@ public FieldValidationInstrumentation(FieldValidation fieldValidation) {
if (errors != null && !errors.isEmpty()) {
throw new AbortExecutionException(errors);
}
- return super.beginExecuteOperation(parameters);
+ return super.beginExecuteOperation(parameters, state);
}
}
diff --git a/src/main/java/graphql/extensions/DefaultExtensionsMerger.java b/src/main/java/graphql/extensions/DefaultExtensionsMerger.java
new file mode 100644
index 0000000000..a74a182c81
--- /dev/null
+++ b/src/main/java/graphql/extensions/DefaultExtensionsMerger.java
@@ -0,0 +1,74 @@
+package graphql.extensions;
+
+import com.google.common.collect.Sets;
+import graphql.Internal;
+import org.jetbrains.annotations.NotNull;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+
+@Internal
+public class DefaultExtensionsMerger implements ExtensionsMerger {
+ @Override
+ @NotNull
+ public Map merge(@NotNull Map leftMap, @NotNull Map rightMap) {
+ if (leftMap.isEmpty()) {
+ return mapCast(rightMap);
+ }
+ if (rightMap.isEmpty()) {
+ return mapCast(leftMap);
+ }
+ Map targetMap = new LinkedHashMap<>();
+ Set leftKeys = leftMap.keySet();
+ for (Object key : leftKeys) {
+ Object leftVal = leftMap.get(key);
+ if (rightMap.containsKey(key)) {
+ Object rightVal = rightMap.get(key);
+ targetMap.put(key, mergeObjects(leftVal, rightVal));
+ } else {
+ targetMap.put(key, leftVal);
+ }
+ }
+ Sets.SetView rightOnlyKeys = Sets.difference(rightMap.keySet(), leftKeys);
+ for (Object key : rightOnlyKeys) {
+ Object rightVal = rightMap.get(key);
+ targetMap.put(key, rightVal);
+ }
+ return targetMap;
+ }
+
+ private Object mergeObjects(Object leftVal, Object rightVal) {
+ if (leftVal instanceof Map && rightVal instanceof Map) {
+ return merge(mapCast(leftVal), mapCast(rightVal));
+ } else if (leftVal instanceof Collection && rightVal instanceof Collection) {
+ // we append - no equality or merging here
+ return appendLists(leftVal, rightVal);
+ } else {
+ // we have some primitive - so prefer the right since it was encountered last
+ // and last write wins here
+ return rightVal;
+ }
+ }
+
+ @NotNull
+ private List appendLists(Object leftVal, Object rightVal) {
+ List target = new ArrayList<>(listCast(leftVal));
+ target.addAll(listCast(rightVal));
+ return target;
+ }
+
+ private Map mapCast(Object map) {
+ //noinspection unchecked
+ return (Map) map;
+ }
+
+ private Collection listCast(Object collection) {
+ //noinspection unchecked
+ return (Collection) collection;
+ }
+}
diff --git a/src/main/java/graphql/extensions/ExtensionsBuilder.java b/src/main/java/graphql/extensions/ExtensionsBuilder.java
new file mode 100644
index 0000000000..6c3a982e0c
--- /dev/null
+++ b/src/main/java/graphql/extensions/ExtensionsBuilder.java
@@ -0,0 +1,125 @@
+package graphql.extensions;
+
+import com.google.common.collect.ImmutableMap;
+import graphql.ExecutionResult;
+import graphql.PublicApi;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+
+import java.util.Collections;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.CopyOnWriteArrayList;
+
+import static graphql.Assert.assertNotNull;
+
+/**
+ * This class can be used to help build the graphql `extensions` map. A series of changes to the extensions can
+ * be added and these will be merged together via a {@link ExtensionsMerger} implementation and that resultant
+ * map can be used as the `extensions`
+ *
+ * The engine will place a {@link ExtensionsBuilder} into the {@link graphql.GraphQLContext} (if one is not manually placed there)
+ * and hence {@link graphql.schema.DataFetcher}s can use it to build up extensions progressively.
+ *
+ * At the end of the execution, the {@link ExtensionsBuilder} will be used to build a graphql `extensions` map that
+ * is placed in the {@link ExecutionResult}
+ */
+@PublicApi
+public class ExtensionsBuilder {
+
+ // thread safe since there can be many changes say in DFs across threads
+ private final List> changes = new CopyOnWriteArrayList<>();
+ private final ExtensionsMerger extensionsMerger;
+
+
+ private ExtensionsBuilder(ExtensionsMerger extensionsMerger) {
+ this.extensionsMerger = extensionsMerger;
+ }
+
+ /**
+ * @return a new ExtensionsBuilder using a default merger
+ */
+ public static ExtensionsBuilder newExtensionsBuilder() {
+ return new ExtensionsBuilder(ExtensionsMerger.DEFAULT);
+ }
+
+ /**
+ * This creates a new ExtensionsBuilder with the provided {@link ExtensionsMerger}
+ *
+ * @param extensionsMerger the merging code to use
+ *
+ * @return a new ExtensionsBuilder using the provided merger
+ */
+ public static ExtensionsBuilder newExtensionsBuilder(ExtensionsMerger extensionsMerger) {
+ return new ExtensionsBuilder(extensionsMerger);
+ }
+
+
+ /**
+ * Adds new values into the extension builder
+ *
+ * @param newValues the new values to add
+ *
+ * @return this builder for fluent style reasons
+ */
+ public ExtensionsBuilder addValues(@NotNull Map newValues) {
+ assertNotNull(newValues);
+ changes.add(newValues);
+ return this;
+ }
+
+ /**
+ * Adds a single new value into the extension builder
+ *
+ * @param key the key in the extensions
+ * @param value the value in the extensions
+ *
+ * @return this builder for fluent style reasons
+ */
+ public ExtensionsBuilder addValue(@NotNull Object key, @Nullable Object value) {
+ assertNotNull(key);
+ return addValues(Collections.singletonMap(key, value));
+ }
+
+ /**
+ * This builds an extensions map from this builder, merging together the values provided
+ *
+ * @return a new extensions map
+ */
+ public Map buildExtensions() {
+ if (changes.isEmpty()) {
+ return ImmutableMap.of();
+ }
+ Map firstChange = changes.get(0);
+ if (changes.size() == 1) {
+ return firstChange;
+ }
+ Map outMap = new LinkedHashMap<>(firstChange);
+ for (int i = 1; i < changes.size(); i++) {
+ Map newMap = extensionsMerger.merge(outMap, changes.get(i));
+ assertNotNull(outMap, () -> "You MUST provide a non null Map from ExtensionsMerger.merge()");
+ outMap = newMap;
+ }
+ return outMap;
+ }
+
+ /**
+ * This sets new extensions into the provided {@link ExecutionResult}, overwriting any previous values
+ *
+ * @param executionResult the result to set these extensions into
+ *
+ * @return a new ExecutionResult with the extensions values in this builder
+ */
+ public ExecutionResult setExtensions(ExecutionResult executionResult) {
+ assertNotNull(executionResult);
+ Map currentExtensions = executionResult.getExtensions();
+ Map builderExtensions = buildExtensions();
+ // if there was no extensions map before, and we are not adding anything new
+ // then leave it null
+ if (currentExtensions == null && builderExtensions.isEmpty()) {
+ return executionResult;
+ }
+ return executionResult.transform(builder -> builder.extensions(builderExtensions));
+ }
+}
diff --git a/src/main/java/graphql/extensions/ExtensionsMerger.java b/src/main/java/graphql/extensions/ExtensionsMerger.java
new file mode 100644
index 0000000000..7ad57461c0
--- /dev/null
+++ b/src/main/java/graphql/extensions/ExtensionsMerger.java
@@ -0,0 +1,45 @@
+package graphql.extensions;
+
+import graphql.PublicSpi;
+import org.jetbrains.annotations.NotNull;
+
+import java.util.Map;
+
+/**
+ * This interface is a callback asking code to merge two maps with an eye to creating
+ * the graphql `extensions` value.
+ *
+ * How best to merge two maps is hard to know up front. Should it be a shallow clone or a deep one,
+ * should keys be replaced or not and should lists of value be combined? The {@link ExtensionsMerger} is the
+ * interface asked to do this.
+ *
+ * This interface will be called repeatedly for each change that has been added to the {@link ExtensionsBuilder} and it is expected to merge the two maps as it sees fit
+ */
+@PublicSpi
+public interface ExtensionsMerger {
+
+ /**
+ * A default implementation will do the following
+ *
+ * It will deep merge the maps
+ * It concatenate lists when they occur under the same key
+ * It will add any keys from the right hand side map that are not present in the left
+ * If a key is in both the left and right side, it will prefer the right hand side
+ * It will try to maintain key order if the maps are ordered
+ *
+ */
+ ExtensionsMerger DEFAULT = new DefaultExtensionsMerger();
+
+ /**
+ * Called to merge the map on the left with the map on the right according to whatever code strategy some-one might envisage
+ *
+ * The map on the left is guaranteed to have been encountered before the map on the right
+ *
+ * @param leftMap the map on the left
+ * @param rightMap the map on the right
+ *
+ * @return a non null merged map
+ */
+ @NotNull
+ Map merge(@NotNull Map leftMap, @NotNull Map rightMap);
+}
diff --git a/src/main/java/graphql/language/SDLExtensionDefinition.java b/src/main/java/graphql/language/SDLExtensionDefinition.java
index d3cffb66ae..2b71cd46ab 100644
--- a/src/main/java/graphql/language/SDLExtensionDefinition.java
+++ b/src/main/java/graphql/language/SDLExtensionDefinition.java
@@ -4,7 +4,7 @@
import graphql.PublicApi;
/**
- * An marker interface for Schema Definition Language (SDL) extension definitions.
+ * A marker interface for Schema Definition Language (SDL) extension definitions.
*/
@PublicApi
public interface SDLExtensionDefinition {
diff --git a/src/main/java/graphql/parser/GraphqlAntlrToLanguage.java b/src/main/java/graphql/parser/GraphqlAntlrToLanguage.java
index eb5070bc66..440c4c5ea0 100644
--- a/src/main/java/graphql/parser/GraphqlAntlrToLanguage.java
+++ b/src/main/java/graphql/parser/GraphqlAntlrToLanguage.java
@@ -1,6 +1,5 @@
package graphql.parser;
-
import com.google.common.collect.ImmutableList;
import graphql.Assert;
import graphql.Internal;
@@ -68,8 +67,8 @@
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.tree.TerminalNode;
+import org.jetbrains.annotations.Nullable;
-import javax.annotation.Nullable;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.ArrayList;
@@ -407,10 +406,10 @@ private SDLDefinition creationSchemaExtension(GraphqlParser.SchemaExtensionConte
addCommonData(def, ctx);
List directives = new ArrayList<>();
- List directivesCtx = ctx.directives();
- for (GraphqlParser.DirectivesContext directiveCtx : directivesCtx) {
- directives.addAll(createDirectives(directiveCtx));
- }
+
+ GraphqlParser.DirectivesContext directivesCtx = ctx.directives();
+ directives.addAll(createDirectives(directivesCtx));
+
def.directives(directives);
List operationTypeDefs = map(ctx.operationTypeDefinition(), this::createOperationTypeDefinition);
diff --git a/src/main/java/graphql/parser/Parser.java b/src/main/java/graphql/parser/Parser.java
index c1aac322f3..ce476c89bb 100644
--- a/src/main/java/graphql/parser/Parser.java
+++ b/src/main/java/graphql/parser/Parser.java
@@ -13,6 +13,7 @@
import graphql.parser.antlr.GraphqlLexer;
import graphql.parser.antlr.GraphqlParser;
import graphql.parser.exceptions.ParseCancelledException;
+import graphql.parser.exceptions.ParseCancelledTooDeepException;
import org.antlr.v4.runtime.BaseErrorListener;
import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CodePointCharStream;
@@ -254,13 +255,19 @@ private Type> parseTypeImpl(String input) throws InvalidSyntaxException {
}
private Node> parseImpl(ParserEnvironment environment, BiFunction nodeFunction) throws InvalidSyntaxException {
+ // default in the parser options if they are not set
+ ParserOptions parserOptions = environment.getParserOptions();
+ parserOptions = Optional.ofNullable(parserOptions).orElse(ParserOptions.getDefaultParserOptions());
+
MultiSourceReader multiSourceReader;
Reader reader = environment.getDocument();
if (reader instanceof MultiSourceReader) {
multiSourceReader = (MultiSourceReader) reader;
} else {
multiSourceReader = MultiSourceReader.newMultiSourceReader()
- .reader(reader, null).build();
+ .reader(reader, null)
+ .trackData(parserOptions.isReaderTrackData())
+ .build();
}
CodePointCharStream charStream;
try {
@@ -290,14 +297,15 @@ public void syntaxError(Recognizer, ?> recognizer, Object offendingSymbol, int
}
});
- // default in the parser options if they are not set
- ParserOptions parserOptions = environment.getParserOptions();
- parserOptions = Optional.ofNullable(parserOptions).orElse(ParserOptions.getDefaultParserOptions());
-
// this lexer wrapper allows us to stop lexing when too many tokens are in place. This prevents DOS attacks.
int maxTokens = parserOptions.getMaxTokens();
int maxWhitespaceTokens = parserOptions.getMaxWhitespaceTokens();
- BiConsumer onTooManyTokens = (maxTokenCount, token) -> throwCancelParseIfTooManyTokens(environment, token, maxTokenCount, multiSourceReader);
+ BiConsumer onTooManyTokens = (maxTokenCount, token) -> throwIfTokenProblems(
+ environment,
+ token,
+ maxTokenCount,
+ multiSourceReader,
+ ParseCancelledException.class);
SafeTokenSource safeTokenSource = new SafeTokenSource(lexer, maxTokens, maxWhitespaceTokens, onTooManyTokens);
CommonTokenStream tokens = new CommonTokenStream(safeTokenSource);
@@ -343,9 +351,31 @@ private void setupParserListener(ParserEnvironment environment, MultiSourceReade
ParserOptions parserOptions = toLanguage.getParserOptions();
ParsingListener parsingListener = parserOptions.getParsingListener();
int maxTokens = parserOptions.getMaxTokens();
+ int maxRuleDepth = parserOptions.getMaxRuleDepth();
// prevent a billion laugh attacks by restricting how many tokens we allow
ParseTreeListener listener = new GraphqlBaseListener() {
int count = 0;
+ int depth = 0;
+
+
+ @Override
+ public void enterEveryRule(ParserRuleContext ctx) {
+ depth++;
+ if (depth > maxRuleDepth) {
+ throwIfTokenProblems(
+ environment,
+ ctx.getStart(),
+ maxRuleDepth,
+ multiSourceReader,
+ ParseCancelledTooDeepException.class
+ );
+ }
+ }
+
+ @Override
+ public void exitEveryRule(ParserRuleContext ctx) {
+ depth--;
+ }
@Override
public void visitTerminal(TerminalNode node) {
@@ -370,14 +400,20 @@ public int getCharPositionInLine() {
count++;
if (count > maxTokens) {
- throwCancelParseIfTooManyTokens(environment, token, maxTokens, multiSourceReader);
+ throwIfTokenProblems(
+ environment,
+ token,
+ maxTokens,
+ multiSourceReader,
+ ParseCancelledException.class
+ );
}
}
};
parser.addParseListener(listener);
}
- private void throwCancelParseIfTooManyTokens(ParserEnvironment environment, Token token, int maxTokens, MultiSourceReader multiSourceReader) throws ParseCancelledException {
+ private void throwIfTokenProblems(ParserEnvironment environment, Token token, int maxLimit, MultiSourceReader multiSourceReader, Class extends InvalidSyntaxException> targetException) throws ParseCancelledException {
String tokenType = "grammar";
SourceLocation sourceLocation = null;
String offendingToken = null;
@@ -388,7 +424,10 @@ private void throwCancelParseIfTooManyTokens(ParserEnvironment environment, Toke
offendingToken = token.getText();
sourceLocation = AntlrHelper.createSourceLocation(multiSourceReader, token.getLine(), token.getCharPositionInLine());
}
- throw new ParseCancelledException(environment.getI18N(), sourceLocation, offendingToken, maxTokens, tokenType);
+ if (targetException.equals(ParseCancelledTooDeepException.class)) {
+ throw new ParseCancelledTooDeepException(environment.getI18N(), sourceLocation, offendingToken, maxLimit, tokenType);
+ }
+ throw new ParseCancelledException(environment.getI18N(), sourceLocation, offendingToken, maxLimit, tokenType);
}
/**
diff --git a/src/main/java/graphql/parser/ParserOptions.java b/src/main/java/graphql/parser/ParserOptions.java
index 6fe708323a..6256166075 100644
--- a/src/main/java/graphql/parser/ParserOptions.java
+++ b/src/main/java/graphql/parser/ParserOptions.java
@@ -15,9 +15,9 @@ public class ParserOptions {
/**
* A graphql hacking vector is to send nonsensical queries that burn lots of parsing CPU time and burn
* memory representing a document that won't ever execute. To prevent this for most users, graphql-java
- * set this value to 15000. ANTLR parsing time is linear to the number of tokens presented. The more you
+ * sets this value to 15000. ANTLR parsing time is linear to the number of tokens presented. The more you
* allow the longer it takes.
- *
+ *
* If you want to allow more, then {@link #setDefaultParserOptions(ParserOptions)} allows you to change this
* JVM wide.
*/
@@ -26,34 +26,50 @@ public class ParserOptions {
* Another graphql hacking vector is to send large amounts of whitespace in operations that burn lots of parsing CPU time and burn
* memory representing a document. Whitespace token processing in ANTLR is 2 orders of magnitude faster than grammar token processing
* however it still takes some time to happen.
- *
+ *
* If you want to allow more, then {@link #setDefaultParserOptions(ParserOptions)} allows you to change this
* JVM wide.
*/
public static final int MAX_WHITESPACE_TOKENS = 200_000;
+ /**
+ * A graphql hacking vector is to send nonsensical queries that have lots of grammar rule depth to them which
+ * can cause stack overflow exceptions during the query parsing. To prevent this for most users, graphql-java
+ * sets this value to 500 grammar rules deep.
+ *
+ * If you want to allow more, then {@link #setDefaultParserOptions(ParserOptions)} allows you to change this
+ * JVM wide.
+ */
+ public static final int MAX_RULE_DEPTH = 500;
+
private static ParserOptions defaultJvmParserOptions = newParserOptions()
.captureIgnoredChars(false)
.captureSourceLocation(true)
.captureLineComments(true)
+ .readerTrackData(true)
.maxTokens(MAX_QUERY_TOKENS) // to prevent a billion laughs style attacks, we set a default for graphql-java
.maxWhitespaceTokens(MAX_WHITESPACE_TOKENS)
+ .maxRuleDepth(MAX_RULE_DEPTH)
.build();
private static ParserOptions defaultJvmOperationParserOptions = newParserOptions()
.captureIgnoredChars(false)
.captureSourceLocation(true)
.captureLineComments(false) // #comments are not useful in query parsing
+ .readerTrackData(true)
.maxTokens(MAX_QUERY_TOKENS) // to prevent a billion laughs style attacks, we set a default for graphql-java
.maxWhitespaceTokens(MAX_WHITESPACE_TOKENS)
+ .maxRuleDepth(MAX_RULE_DEPTH)
.build();
private static ParserOptions defaultJvmSdlParserOptions = newParserOptions()
.captureIgnoredChars(false)
.captureSourceLocation(true)
.captureLineComments(true) // #comments are useful in SDL parsing
+ .readerTrackData(true)
.maxTokens(Integer.MAX_VALUE) // we are less worried about a billion laughs with SDL parsing since the call path is not facing attackers
.maxWhitespaceTokens(Integer.MAX_VALUE)
+ .maxRuleDepth(Integer.MAX_VALUE)
.build();
/**
@@ -154,16 +170,20 @@ public static void setDefaultSdlParserOptions(ParserOptions options) {
private final boolean captureIgnoredChars;
private final boolean captureSourceLocation;
private final boolean captureLineComments;
+ private final boolean readerTrackData;
private final int maxTokens;
private final int maxWhitespaceTokens;
+ private final int maxRuleDepth;
private final ParsingListener parsingListener;
private ParserOptions(Builder builder) {
this.captureIgnoredChars = builder.captureIgnoredChars;
this.captureSourceLocation = builder.captureSourceLocation;
this.captureLineComments = builder.captureLineComments;
+ this.readerTrackData = builder.readerTrackData;
this.maxTokens = builder.maxTokens;
this.maxWhitespaceTokens = builder.maxWhitespaceTokens;
+ this.maxRuleDepth = builder.maxRuleDepth;
this.parsingListener = builder.parsingListener;
}
@@ -204,6 +224,15 @@ public boolean isCaptureLineComments() {
return captureLineComments;
}
+ /**
+ * Controls whether the underlying {@link MultiSourceReader} should track previously read data or not.
+ *
+ * @return true if {@link MultiSourceReader} should track data in memory.
+ */
+ public boolean isReaderTrackData() {
+ return readerTrackData;
+ }
+
/**
* A graphql hacking vector is to send nonsensical queries that burn lots of parsing CPU time and burns
* memory representing a document that won't ever execute. To prevent this you can set a maximum number of parse
@@ -226,6 +255,17 @@ public int getMaxWhitespaceTokens() {
return maxWhitespaceTokens;
}
+ /**
+ * A graphql hacking vector is to send nonsensical queries that have lots of rule depth to them which
+ * can cause stack overflow exceptions during the query parsing. To prevent this you can set a value
+ * that is the maximum depth allowed before an exception is thrown and the parsing is stopped.
+ *
+ * @return the maximum token depth the parser will accept, after which an exception will be thrown.
+ */
+ public int getMaxRuleDepth() {
+ return maxRuleDepth;
+ }
+
public ParsingListener getParsingListener() {
return parsingListener;
}
@@ -245,9 +285,11 @@ public static class Builder {
private boolean captureIgnoredChars = false;
private boolean captureSourceLocation = true;
private boolean captureLineComments = true;
- private int maxTokens = MAX_QUERY_TOKENS;
+ private boolean readerTrackData = true;
private ParsingListener parsingListener = ParsingListener.NOOP;
+ private int maxTokens = MAX_QUERY_TOKENS;
private int maxWhitespaceTokens = MAX_WHITESPACE_TOKENS;
+ private int maxRuleDepth = MAX_RULE_DEPTH;
Builder() {
}
@@ -258,6 +300,7 @@ public static class Builder {
this.captureLineComments = parserOptions.captureLineComments;
this.maxTokens = parserOptions.maxTokens;
this.maxWhitespaceTokens = parserOptions.maxWhitespaceTokens;
+ this.maxRuleDepth = parserOptions.maxRuleDepth;
this.parsingListener = parserOptions.parsingListener;
}
@@ -276,6 +319,11 @@ public Builder captureLineComments(boolean captureLineComments) {
return this;
}
+ public Builder readerTrackData(boolean readerTrackData) {
+ this.readerTrackData = readerTrackData;
+ return this;
+ }
+
public Builder maxTokens(int maxTokens) {
this.maxTokens = maxTokens;
return this;
@@ -286,6 +334,11 @@ public Builder maxWhitespaceTokens(int maxWhitespaceTokens) {
return this;
}
+ public Builder maxRuleDepth(int maxRuleDepth) {
+ this.maxRuleDepth = maxRuleDepth;
+ return this;
+ }
+
public Builder parsingListener(ParsingListener parsingListener) {
this.parsingListener = assertNotNull(parsingListener);
return this;
diff --git a/src/main/java/graphql/parser/exceptions/ParseCancelledTooDeepException.java b/src/main/java/graphql/parser/exceptions/ParseCancelledTooDeepException.java
new file mode 100644
index 0000000000..c5f9499ec4
--- /dev/null
+++ b/src/main/java/graphql/parser/exceptions/ParseCancelledTooDeepException.java
@@ -0,0 +1,18 @@
+package graphql.parser.exceptions;
+
+import graphql.Internal;
+import graphql.i18n.I18n;
+import graphql.language.SourceLocation;
+import graphql.parser.InvalidSyntaxException;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+
+@Internal
+public class ParseCancelledTooDeepException extends InvalidSyntaxException {
+
+ @Internal
+ public ParseCancelledTooDeepException(@NotNull I18n i18N, @Nullable SourceLocation sourceLocation, @Nullable String offendingToken, int maxTokens, @NotNull String tokenType) {
+ super(i18N.msg("ParseCancelled.tooDeep", maxTokens, tokenType),
+ sourceLocation, offendingToken, null, null);
+ }
+}
diff --git a/src/main/java/graphql/schema/Coercing.java b/src/main/java/graphql/schema/Coercing.java
index 491bd16b0e..0b4d127b80 100644
--- a/src/main/java/graphql/schema/Coercing.java
+++ b/src/main/java/graphql/schema/Coercing.java
@@ -38,6 +38,8 @@
public interface Coercing {
/**
+ * This is deprecated and you should implement {@link #serialize(Object, GraphQLContext, Locale)} instead
+ *
* Called to convert a Java object result of a DataFetcher to a valid runtime value for the scalar type.
*
* Note : Throw {@link graphql.schema.CoercingSerializeException} if there is fundamental
@@ -54,7 +56,9 @@ public interface Coercing {
*/
@Deprecated
@DeprecatedAt("2022-08-22")
- @Nullable O serialize(@NotNull Object dataFetcherResult) throws CoercingSerializeException;
+ default @Nullable O serialize(@NotNull Object dataFetcherResult) throws CoercingSerializeException {
+ throw new UnsupportedOperationException("The non deprecated version of serialize has not been implemented by this scalar : " + this.getClass());
+ }
/**
* Called to convert a Java object result of a DataFetcher to a valid runtime value for the scalar type.
@@ -80,11 +84,13 @@ public interface Coercing {
}
/**
+ * This is deprecated and you should implement {@link #parseValue(Object, GraphQLContext, Locale)} instead
+ *
* Called to resolve an input from a query variable into a Java object acceptable for the scalar type.
*
* Note : You should not allow {@link java.lang.RuntimeException}s to come out of your parseValue method, but rather
* catch them and fire them as {@link graphql.schema.CoercingParseValueException} instead as per the method contract.
- *
+ *
* Note : if input is explicit/raw value null, input coercion will return null before this method is called
*
* @param input is never null
@@ -95,7 +101,9 @@ public interface Coercing {
*/
@Deprecated
@DeprecatedAt("2022-08-22")
- @Nullable I parseValue(@NotNull Object input) throws CoercingParseValueException;
+ default @Nullable I parseValue(@NotNull Object input) throws CoercingParseValueException {
+ throw new UnsupportedOperationException("The non deprecated version of parseValue has not been implemented by this scalar : " + this.getClass());
+ }
/**
* Called to resolve an input from a query variable into a Java object acceptable for the scalar type.
@@ -113,7 +121,8 @@ public interface Coercing {
*
* @throws graphql.schema.CoercingParseValueException if value input can't be parsed
*/
- @Nullable default I parseValue(@NotNull Object input, @NotNull GraphQLContext graphQLContext, @NotNull Locale locale) throws CoercingParseValueException {
+ @Nullable
+ default I parseValue(@NotNull Object input, @NotNull GraphQLContext graphQLContext, @NotNull Locale locale) throws CoercingParseValueException {
assertNotNull(input);
assertNotNull(graphQLContext);
assertNotNull(locale);
@@ -121,12 +130,14 @@ public interface Coercing {
}
/**
+ * This is deprecated and you should implement {@link #parseLiteral(Value, CoercedVariables, GraphQLContext, Locale)} instead
+ *
* Called during query validation to convert a query input AST node into a Java object acceptable for the scalar type. The input
* object will be an instance of {@link graphql.language.Value}.
*
* Note : You should not allow {@link java.lang.RuntimeException}s to come out of your parseLiteral method, but rather
* catch them and fire them as {@link graphql.schema.CoercingParseLiteralException} instead as per the method contract.
- *
+ *
* Note : if input is literal {@link graphql.language.NullValue}, input coercion will return null before this method is called
*
* @param input is never null
@@ -137,9 +148,13 @@ public interface Coercing {
*/
@Deprecated
@DeprecatedAt("2022-08-22")
- @Nullable I parseLiteral(@NotNull Object input) throws CoercingParseLiteralException;
+ default @Nullable I parseLiteral(@NotNull Object input) throws CoercingParseLiteralException {
+ throw new UnsupportedOperationException("The non deprecated version of parseLiteral has not been implemented by this scalar : " + this.getClass());
+ }
/**
+ * This is deprecated and you should implement {@link #parseLiteral(Value, CoercedVariables, GraphQLContext, Locale)} instead
+ *
* Called during query execution to convert a query input AST node into a Java object acceptable for the scalar type. The input
* object will be an instance of {@link graphql.language.Value}.
*
@@ -197,6 +212,8 @@ public interface Coercing {
/**
+ * This is deprecated and you should implement {@link #valueToLiteral(Object, GraphQLContext, Locale)} instead
+ *
* Converts an external input value to a literal (Ast Value).
*
* IMPORTANT: the argument is validated before by calling {@link #parseValue(Object)}.
@@ -208,7 +225,7 @@ public interface Coercing {
@Deprecated
@DeprecatedAt("2022-08-22")
default @NotNull Value valueToLiteral(@NotNull Object input) {
- throw new UnsupportedOperationException("This is not implemented by this Scalar " + this.getClass());
+ throw new UnsupportedOperationException("The non deprecated version of valueToLiteral has not been implemented by this scalar : " + this.getClass());
}
/**
diff --git a/src/main/java/graphql/schema/GraphQLAppliedDirective.java b/src/main/java/graphql/schema/GraphQLAppliedDirective.java
index a956af8e3e..ba73f99701 100644
--- a/src/main/java/graphql/schema/GraphQLAppliedDirective.java
+++ b/src/main/java/graphql/schema/GraphQLAppliedDirective.java
@@ -28,7 +28,7 @@
* classes have been introduced to better model when a directive is applied to a schema element,
* as opposed to its schema definition itself.
*
- * See http://graphql.org/learn/queries/#directives for more details on the concept.
+ * See https://graphql.org/learn/queries/#directives for more details on the concept.
*/
@PublicApi
public class GraphQLAppliedDirective implements GraphQLNamedSchemaElement {
diff --git a/src/main/java/graphql/schema/GraphQLArgument.java b/src/main/java/graphql/schema/GraphQLArgument.java
index eb4f156ed8..4ba3f123ea 100644
--- a/src/main/java/graphql/schema/GraphQLArgument.java
+++ b/src/main/java/graphql/schema/GraphQLArgument.java
@@ -23,15 +23,15 @@
import static graphql.execution.ValuesResolver.getInputValueImpl;
/**
- * This defines an argument that can be supplied to a graphql field (via {@link graphql.schema.GraphQLFieldDefinition}.
+ * This defines an argument that can be supplied to a graphql field (via {@link GraphQLFieldDefinition}.
*
* Fields can be thought of as "functions" that take arguments and return a value.
*
- * See http://graphql.org/learn/queries/#arguments for more details on the concept.
+ * See https://graphql.org/learn/queries/#arguments for more details on the concept.
*
- * {@link graphql.schema.GraphQLArgument} is used in two contexts, one context is graphql queries where it represents the arguments that can be
+ * {@link GraphQLArgument} is used in two contexts, one context is graphql queries where it represents the arguments that can be
* set on a field and the other is in Schema Definition Language (SDL) where it can be used to represent the argument value instances
- * that have been supplied on a {@link graphql.schema.GraphQLDirective}.
+ * that have been supplied on a {@link GraphQLDirective}.
*
* The difference is the 'value' and 'defaultValue' properties. In a query argument, the 'value' is never in the GraphQLArgument
* object but rather in the AST direct or in the query variables map and the 'defaultValue' represents a value to use if both of these are
diff --git a/src/main/java/graphql/schema/GraphQLDirective.java b/src/main/java/graphql/schema/GraphQLDirective.java
index 5cc120e1b1..1dbc41042c 100644
--- a/src/main/java/graphql/schema/GraphQLDirective.java
+++ b/src/main/java/graphql/schema/GraphQLDirective.java
@@ -23,7 +23,7 @@
/**
* A directive can be used to modify the behavior of a graphql field or type.
*
- * See http://graphql.org/learn/queries/#directives for more details on the concept.
+ * See https://graphql.org/learn/queries/#directives for more details on the concept.
*
* A directive has a definition, that is what arguments it takes, and it can also be applied
* to other schema elements. Originally graphql-java re-used the {@link GraphQLDirective} and {@link GraphQLArgument}
diff --git a/src/main/java/graphql/schema/GraphQLEnumType.java b/src/main/java/graphql/schema/GraphQLEnumType.java
index 27354e1e74..00ced1b451 100644
--- a/src/main/java/graphql/schema/GraphQLEnumType.java
+++ b/src/main/java/graphql/schema/GraphQLEnumType.java
@@ -1,6 +1,5 @@
package graphql.schema;
-
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import graphql.DirectivesUtil;
@@ -14,8 +13,8 @@
import graphql.util.FpKit;
import graphql.util.TraversalControl;
import graphql.util.TraverserContext;
+import org.jetbrains.annotations.NotNull;
-import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
@@ -38,7 +37,7 @@
* This allows you to validate that any arguments of this type are one of the allowed values
* and communicate through the type system that a field will always be one of a finite set of values.
*
- * See http://graphql.org/learn/schema/#enumeration-types for more details
+ * See https://graphql.org/learn/schema/#enumeration-types for more details
*/
@PublicApi
public class GraphQLEnumType implements GraphQLNamedInputType, GraphQLNamedOutputType, GraphQLUnmodifiedType, GraphQLNullableType, GraphQLDirectiveContainer {
@@ -149,7 +148,7 @@ private ImmutableMap buildMap(List assertShouldNeverHappen("Duplicated definition for field '%s' in type '%s'", fld1.getName(), this.name)));
}
- private Object getValueByName(@Nonnull Object value, GraphQLContext graphQLContext, Locale locale) {
+ private Object getValueByName(@NotNull Object value, GraphQLContext graphQLContext, Locale locale) {
GraphQLEnumValueDefinition enumValueDefinition = valueDefinitionMap.get(value.toString());
if (enumValueDefinition != null) {
return enumValueDefinition.getValue();
diff --git a/src/main/java/graphql/schema/GraphQLEnumValueDefinition.java b/src/main/java/graphql/schema/GraphQLEnumValueDefinition.java
index e2caca9e11..4d34417773 100644
--- a/src/main/java/graphql/schema/GraphQLEnumValueDefinition.java
+++ b/src/main/java/graphql/schema/GraphQLEnumValueDefinition.java
@@ -19,7 +19,7 @@
/**
* A graphql enumeration type has a limited set of values and this defines one of those unique values
*
- * See http://graphql.org/learn/schema/#enumeration-types for more details
+ * See https://graphql.org/learn/schema/#enumeration-types for more details
*
* @see graphql.schema.GraphQLEnumType
*/
diff --git a/src/main/java/graphql/schema/GraphQLFieldDefinition.java b/src/main/java/graphql/schema/GraphQLFieldDefinition.java
index 865309e37f..79f824997d 100644
--- a/src/main/java/graphql/schema/GraphQLFieldDefinition.java
+++ b/src/main/java/graphql/schema/GraphQLFieldDefinition.java
@@ -24,13 +24,13 @@
/**
* Fields are the ways you get data values in graphql and a field definition represents a field, its type, the arguments it takes
- * and the {@link graphql.schema.DataFetcher} used to get data values for that field.
+ * and the {@link DataFetcher} used to get data values for that field.
*
* Fields can be thought of as functions in graphql, they have a name, take defined arguments and return a value.
*
* Fields can also be deprecated, which indicates the consumers that a field wont be supported in the future.
*
- * See http://graphql.org/learn/queries/#fields for more details on the concept.
+ * See https://graphql.org/learn/queries/#fields for more details on the concept.
*/
@PublicApi
public class GraphQLFieldDefinition implements GraphQLNamedSchemaElement, GraphQLDirectiveContainer {
diff --git a/src/main/java/graphql/schema/GraphQLInputObjectField.java b/src/main/java/graphql/schema/GraphQLInputObjectField.java
index 0e88a907c1..fda458defc 100644
--- a/src/main/java/graphql/schema/GraphQLInputObjectField.java
+++ b/src/main/java/graphql/schema/GraphQLInputObjectField.java
@@ -22,12 +22,12 @@
import static graphql.execution.ValuesResolver.getInputValueImpl;
/**
- * Input objects defined via {@link graphql.schema.GraphQLInputObjectType} contains these input fields.
+ * Input objects defined via {@link GraphQLInputObjectType} contains these input fields.
*
- * There are similar to {@link graphql.schema.GraphQLFieldDefinition} however they can ONLY be used on input objects, that
+ * There are similar to {@link GraphQLFieldDefinition} however they can ONLY be used on input objects, that
* is to describe values that are fed into a graphql mutation.
*
- * See http://graphql.org/learn/schema/#input-types for more details on the concept.
+ * See https://graphql.org/learn/schema/#input-types for more details on the concept.
*/
@PublicApi
public class GraphQLInputObjectField implements GraphQLNamedSchemaElement, GraphQLInputValueDefinition {
diff --git a/src/main/java/graphql/schema/GraphQLInputObjectType.java b/src/main/java/graphql/schema/GraphQLInputObjectType.java
index 3bdae47028..ae22565a74 100644
--- a/src/main/java/graphql/schema/GraphQLInputObjectType.java
+++ b/src/main/java/graphql/schema/GraphQLInputObjectType.java
@@ -28,7 +28,7 @@
* graphql clearly delineates between the types of objects that represent the output of a query and input objects that
* can be fed into a graphql mutation. You can define objects as input to graphql via this class
*
- * See http://graphql.org/learn/schema/#input-types for more details on the concept
+ * See https://graphql.org/learn/schema/#input-types for more details on the concept
*/
@PublicApi
public class GraphQLInputObjectType implements GraphQLNamedInputType, GraphQLUnmodifiedType, GraphQLNullableType, GraphQLInputFieldsContainer, GraphQLDirectiveContainer {
diff --git a/src/main/java/graphql/schema/GraphQLInterfaceType.java b/src/main/java/graphql/schema/GraphQLInterfaceType.java
index 4f1cf1e51b..814609a7c5 100644
--- a/src/main/java/graphql/schema/GraphQLInterfaceType.java
+++ b/src/main/java/graphql/schema/GraphQLInterfaceType.java
@@ -32,10 +32,10 @@
* In graphql, an interface is an abstract type that defines the set of fields that a type must include to
* implement that interface.
*
- * At runtime a {@link graphql.schema.TypeResolver} is used to take an interface object value and decide what {@link graphql.schema.GraphQLObjectType}
+ * At runtime a {@link TypeResolver} is used to take an interface object value and decide what {@link GraphQLObjectType}
* represents this interface type.
*
- * See http://graphql.org/learn/schema/#interfaces for more details on the concept.
+ * See https://graphql.org/learn/schema/#interfaces for more details on the concept.
*/
@PublicApi
public class GraphQLInterfaceType implements GraphQLNamedType, GraphQLCompositeType, GraphQLUnmodifiedType, GraphQLNullableType, GraphQLDirectiveContainer, GraphQLImplementingType {
diff --git a/src/main/java/graphql/schema/GraphQLList.java b/src/main/java/graphql/schema/GraphQLList.java
index 8ec4008f97..1ac94f5ffe 100644
--- a/src/main/java/graphql/schema/GraphQLList.java
+++ b/src/main/java/graphql/schema/GraphQLList.java
@@ -13,8 +13,8 @@
/**
* A modified type that indicates there is a list of the underlying wrapped type, eg a list of strings or a list of booleans.
- *
- * See http://graphql.org/learn/schema/#lists-and-non-null for more details on the concept
+ *
+ * See https://graphql.org/learn/schema/#lists-and-non-null for more details on the concept
*/
@PublicApi
public class GraphQLList implements GraphQLType, GraphQLInputType, GraphQLOutputType, GraphQLModifiedType, GraphQLNullableType {
diff --git a/src/main/java/graphql/schema/GraphQLNonNull.java b/src/main/java/graphql/schema/GraphQLNonNull.java
index ebbfa77bac..6de1ba61d3 100644
--- a/src/main/java/graphql/schema/GraphQLNonNull.java
+++ b/src/main/java/graphql/schema/GraphQLNonNull.java
@@ -15,7 +15,7 @@
/**
* A modified type that indicates there the underlying wrapped type will not be null.
*
- * See http://graphql.org/learn/schema/#lists-and-non-null for more details on the concept
+ * See https://graphql.org/learn/schema/#lists-and-non-null for more details on the concept
*/
@PublicApi
public class GraphQLNonNull implements GraphQLType, GraphQLInputType, GraphQLOutputType, GraphQLModifiedType {
diff --git a/src/main/java/graphql/schema/GraphQLObjectType.java b/src/main/java/graphql/schema/GraphQLObjectType.java
index daf4f4ce58..732c0ff753 100644
--- a/src/main/java/graphql/schema/GraphQLObjectType.java
+++ b/src/main/java/graphql/schema/GraphQLObjectType.java
@@ -33,9 +33,9 @@
* by the graphql system.
*
* Those fields can themselves by object types and so on until you reach the leaf nodes of the type tree represented
- * by {@link graphql.schema.GraphQLScalarType}s.
+ * by {@link GraphQLScalarType}s.
*
- * See http://graphql.org/learn/schema/#object-types-and-fields for more details on the concept.
+ * See https://graphql.org/learn/schema/#object-types-and-fields for more details on the concept.
*/
@PublicApi
public class GraphQLObjectType implements GraphQLNamedOutputType, GraphQLCompositeType, GraphQLUnmodifiedType, GraphQLNullableType, GraphQLDirectiveContainer, GraphQLImplementingType {
diff --git a/src/main/java/graphql/schema/GraphQLSchema.java b/src/main/java/graphql/schema/GraphQLSchema.java
index adbf61aaf1..9b0a1b561c 100644
--- a/src/main/java/graphql/schema/GraphQLSchema.java
+++ b/src/main/java/graphql/schema/GraphQLSchema.java
@@ -45,7 +45,7 @@
* The schema represents the combined type system of the graphql engine. This is how the engine knows
* what graphql queries represent what data.
*
- * See http://graphql.org/learn/schema/#type-language for more details
+ * See https://graphql.org/learn/schema/#type-language for more details
*/
@PublicApi
public class GraphQLSchema {
diff --git a/src/main/java/graphql/schema/GraphQLUnionType.java b/src/main/java/graphql/schema/GraphQLUnionType.java
index f2813f9012..452fcd2da9 100644
--- a/src/main/java/graphql/schema/GraphQLUnionType.java
+++ b/src/main/java/graphql/schema/GraphQLUnionType.java
@@ -28,12 +28,12 @@
/**
* A union type is a polymorphic type that dynamically represents one of more concrete object types.
*
- * At runtime a {@link graphql.schema.TypeResolver} is used to take an union object value and decide what {@link graphql.schema.GraphQLObjectType}
+ * At runtime a {@link TypeResolver} is used to take an union object value and decide what {@link GraphQLObjectType}
* represents this union of types.
*
* Note that members of a union type need to be concrete object types; you can't create a union type out of interfaces or other unions.
*
- * See http://graphql.org/learn/schema/#union-types for more details on the concept.
+ * See https://graphql.org/learn/schema/#union-types for more details on the concept.
*/
@PublicApi
public class GraphQLUnionType implements GraphQLNamedOutputType, GraphQLCompositeType, GraphQLUnmodifiedType, GraphQLNullableType, GraphQLDirectiveContainer {
diff --git a/src/main/java/graphql/schema/PropertyFetchingImpl.java b/src/main/java/graphql/schema/PropertyFetchingImpl.java
index 75a795a0ee..5a7a92989a 100644
--- a/src/main/java/graphql/schema/PropertyFetchingImpl.java
+++ b/src/main/java/graphql/schema/PropertyFetchingImpl.java
@@ -3,6 +3,8 @@
import graphql.GraphQLException;
import graphql.Internal;
import graphql.schema.fetching.LambdaFetchingSupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
@@ -30,6 +32,7 @@
*/
@Internal
public class PropertyFetchingImpl {
+ private static final Logger log = LoggerFactory.getLogger(PropertyFetchingImpl.class);
private final AtomicBoolean USE_SET_ACCESSIBLE = new AtomicBoolean(true);
private final AtomicBoolean USE_LAMBDA_FACTORY = new AtomicBoolean(true);
@@ -108,10 +111,21 @@ public Object getPropertyValue(String propertyName, Object object, GraphQLType g
Optional> getterOpt = lambdaGetter(propertyName, object);
if (getterOpt.isPresent()) {
- Function getter = getterOpt.get();
- cachedFunction = new CachedLambdaFunction(getter);
- LAMBDA_CACHE.putIfAbsent(cacheKey, cachedFunction);
- return getter.apply(object);
+ try {
+ Function getter = getterOpt.get();
+ Object value = getter.apply(object);
+ cachedFunction = new CachedLambdaFunction(getter);
+ LAMBDA_CACHE.putIfAbsent(cacheKey, cachedFunction);
+ return value;
+ } catch (LinkageError | ClassCastException ignored) {
+ //
+ // if we get a linkage error then it maybe that class loader challenges
+ // are preventing the Meta Lambda from working. So let's continue with
+ // old skool reflection and if it's all broken there then it will eventually
+ // end up negatively cached
+ log.debug("Unable to invoke fast Meta Lambda for `{}` - Falling back to reflection", object.getClass().getName(), ignored);
+
+ }
}
//
@@ -241,7 +255,7 @@ private Method findPubliclyAccessibleMethod(CacheKey cacheKey, Class> rootClas
and fetch them - e.g. `object.propertyName()`
*/
private Method findRecordMethod(CacheKey cacheKey, Class> rootClass, String methodName) throws NoSuchMethodException {
- return findPubliclyAccessibleMethod(cacheKey,rootClass,methodName,false);
+ return findPubliclyAccessibleMethod(cacheKey, rootClass, methodName, false);
}
private Method findViaSetAccessible(CacheKey cacheKey, Class> aClass, String methodName, boolean dfeInUse) throws NoSuchMethodException {
@@ -347,6 +361,7 @@ public void clearReflectionCache() {
public boolean setUseSetAccessible(boolean flag) {
return USE_SET_ACCESSIBLE.getAndSet(flag);
}
+
public boolean setUseLambdaFactory(boolean flag) {
return USE_LAMBDA_FACTORY.getAndSet(flag);
}
diff --git a/src/main/java/graphql/schema/diffing/DiffImpl.java b/src/main/java/graphql/schema/diffing/DiffImpl.java
index ee8c847f0d..0ce80046a1 100644
--- a/src/main/java/graphql/schema/diffing/DiffImpl.java
+++ b/src/main/java/graphql/schema/diffing/DiffImpl.java
@@ -22,10 +22,11 @@
@Internal
public class DiffImpl {
- private static MappingEntry LAST_ELEMENT = new MappingEntry();
- private SchemaGraph completeSourceGraph;
- private SchemaGraph completeTargetGraph;
- private FillupIsolatedVertices.IsolatedVertices isolatedVertices;
+ private static final MappingEntry LAST_ELEMENT = new MappingEntry();
+ private final SchemaGraph completeSourceGraph;
+ private final SchemaGraph completeTargetGraph;
+ private final FillupIsolatedVertices.IsolatedVertices isolatedVertices;
+ private final SchemaDiffingRunningCheck runningCheck;
private static class MappingEntry {
public boolean siblingsFinished;
@@ -67,14 +68,14 @@ public OptimalEdit(List mappings, List> listOfEditO
}
}
- public DiffImpl(SchemaGraph completeSourceGraph, SchemaGraph completeTargetGraph, FillupIsolatedVertices.IsolatedVertices isolatedVertices) {
+ public DiffImpl(SchemaGraph completeSourceGraph, SchemaGraph completeTargetGraph, FillupIsolatedVertices.IsolatedVertices isolatedVertices, SchemaDiffingRunningCheck runningCheck) {
this.completeSourceGraph = completeSourceGraph;
this.completeTargetGraph = completeTargetGraph;
this.isolatedVertices = isolatedVertices;
+ this.runningCheck = runningCheck;
}
OptimalEdit diffImpl(Mapping startMapping, List relevantSourceList, List relevantTargetList) throws Exception {
-
int graphSize = relevantSourceList.size();
ArrayList initialEditOperations = new ArrayList<>();
@@ -122,7 +123,10 @@ OptimalEdit diffImpl(Mapping startMapping, List relevantSourceList, List
relevantTargetList
);
}
+
+ runningCheck.check();
}
+
return optimalEdit;
}
diff --git a/src/main/java/graphql/schema/diffing/FillupIsolatedVertices.java b/src/main/java/graphql/schema/diffing/FillupIsolatedVertices.java
index 0f27e76ce7..a648a1161f 100644
--- a/src/main/java/graphql/schema/diffing/FillupIsolatedVertices.java
+++ b/src/main/java/graphql/schema/diffing/FillupIsolatedVertices.java
@@ -41,14 +41,15 @@
@Internal
public class FillupIsolatedVertices {
+ private final SchemaDiffingRunningCheck runningCheck;
- SchemaGraph sourceGraph;
- SchemaGraph targetGraph;
- IsolatedVertices isolatedVertices;
+ private final SchemaGraph sourceGraph;
+ private final SchemaGraph targetGraph;
+ private final IsolatedVertices isolatedVertices;
- private BiMap toRemove = HashBiMap.create();
+ private final BiMap toRemove = HashBiMap.create();
- static Map> typeContexts = new LinkedHashMap<>();
+ final static Map> typeContexts = new LinkedHashMap<>();
static {
typeContexts.put(SCHEMA, schemaContext());
@@ -710,7 +711,8 @@ public boolean filter(Vertex argument, SchemaGraph schemaGraph) {
}
- public FillupIsolatedVertices(SchemaGraph sourceGraph, SchemaGraph targetGraph) {
+ public FillupIsolatedVertices(SchemaGraph sourceGraph, SchemaGraph targetGraph, SchemaDiffingRunningCheck runningCheck) {
+ this.runningCheck = runningCheck;
this.sourceGraph = sourceGraph;
this.targetGraph = targetGraph;
this.isolatedVertices = new IsolatedVertices();
@@ -830,6 +832,7 @@ private void calcPossibleMappingImpl(
Set usedSourceVertices,
Set usedTargetVertices,
String typeNameForDebug) {
+ runningCheck.check();
VertexContextSegment finalCurrentContext = contexts.get(contextIx);
Map> sourceGroups = FpKit.filterAndGroupingBy(currentSourceVertices,
@@ -922,5 +925,7 @@ private void calcPossibleMappingImpl(
isolatedVertices.putPossibleMappings(possibleSourceVertices, possibleTargetVertices);
}
-
+ public IsolatedVertices getIsolatedVertices() {
+ return isolatedVertices;
+ }
}
diff --git a/src/main/java/graphql/schema/diffing/SchemaDiffing.java b/src/main/java/graphql/schema/diffing/SchemaDiffing.java
index 763ed0b1b3..7b1db49647 100644
--- a/src/main/java/graphql/schema/diffing/SchemaDiffing.java
+++ b/src/main/java/graphql/schema/diffing/SchemaDiffing.java
@@ -15,12 +15,19 @@
@Internal
public class SchemaDiffing {
-
-
+ private final SchemaDiffingRunningCheck runningCheck = new SchemaDiffingRunningCheck();
SchemaGraph sourceGraph;
SchemaGraph targetGraph;
+ /**
+ * Tries to stop the algorithm from execution ASAP by throwing a
+ * {@link SchemaDiffingCancelledException}.
+ */
+ public void stop() {
+ runningCheck.stop();
+ }
+
public List diffGraphQLSchema(GraphQLSchema graphQLSchema1, GraphQLSchema graphQLSchema2) throws Exception {
sourceGraph = new SchemaGraphFactory("source-").createGraph(graphQLSchema1);
targetGraph = new SchemaGraphFactory("target-").createGraph(graphQLSchema2);
@@ -32,7 +39,7 @@ public EditOperationAnalysisResult diffAndAnalyze(GraphQLSchema graphQLSchema1,
targetGraph = new SchemaGraphFactory("target-").createGraph(graphQLSchema2);
DiffImpl.OptimalEdit optimalEdit = diffImpl(sourceGraph, targetGraph);
EditOperationAnalyzer editOperationAnalyzer = new EditOperationAnalyzer(graphQLSchema1, graphQLSchema1, sourceGraph, targetGraph);
- return editOperationAnalyzer.analyzeEdits(optimalEdit.listOfEditOperations.get(0),optimalEdit.mappings.get(0));
+ return editOperationAnalyzer.analyzeEdits(optimalEdit.listOfEditOperations.get(0), optimalEdit.mappings.get(0));
}
public DiffImpl.OptimalEdit diffGraphQLSchemaAllEdits(GraphQLSchema graphQLSchema1, GraphQLSchema graphQLSchema2) throws Exception {
@@ -45,9 +52,9 @@ public DiffImpl.OptimalEdit diffGraphQLSchemaAllEdits(GraphQLSchema graphQLSchem
private DiffImpl.OptimalEdit diffImpl(SchemaGraph sourceGraph, SchemaGraph targetGraph) throws Exception {
int sizeDiff = targetGraph.size() - sourceGraph.size();
System.out.println("graph diff: " + sizeDiff);
- FillupIsolatedVertices fillupIsolatedVertices = new FillupIsolatedVertices(sourceGraph, targetGraph);
+ FillupIsolatedVertices fillupIsolatedVertices = new FillupIsolatedVertices(sourceGraph, targetGraph, runningCheck);
fillupIsolatedVertices.ensureGraphAreSameSize();
- FillupIsolatedVertices.IsolatedVertices isolatedVertices = fillupIsolatedVertices.isolatedVertices;
+ FillupIsolatedVertices.IsolatedVertices isolatedVertices = fillupIsolatedVertices.getIsolatedVertices();
assertTrue(sourceGraph.size() == targetGraph.size());
// if (sizeDiff != 0) {
@@ -60,7 +67,8 @@ private DiffImpl.OptimalEdit diffImpl(SchemaGraph sourceGraph, SchemaGraph targe
editorialCostForMapping(fixedMappings, sourceGraph, targetGraph, result);
return new DiffImpl.OptimalEdit(singletonList(fixedMappings), singletonList(result), result.size());
}
- DiffImpl diffImpl = new DiffImpl(sourceGraph, targetGraph, isolatedVertices);
+
+ DiffImpl diffImpl = new DiffImpl(sourceGraph, targetGraph, isolatedVertices, runningCheck);
List nonMappedSource = new ArrayList<>(sourceGraph.getVertices());
nonMappedSource.removeAll(fixedMappings.getSources());
// for(Vertex vertex: nonMappedSource) {
@@ -74,6 +82,7 @@ private DiffImpl.OptimalEdit diffImpl(SchemaGraph sourceGraph, SchemaGraph targe
List nonMappedTarget = new ArrayList<>(targetGraph.getVertices());
nonMappedTarget.removeAll(fixedMappings.getTargets());
+ runningCheck.check();
sortListBasedOnPossibleMapping(nonMappedSource, isolatedVertices);
// the non mapped vertices go to the end
@@ -141,6 +150,4 @@ private List calcEdgeOperations(Mapping mapping) {
}
return result;
}
-
-
}
diff --git a/src/main/java/graphql/schema/diffing/SchemaDiffingCancelledException.java b/src/main/java/graphql/schema/diffing/SchemaDiffingCancelledException.java
new file mode 100644
index 0000000000..1288644191
--- /dev/null
+++ b/src/main/java/graphql/schema/diffing/SchemaDiffingCancelledException.java
@@ -0,0 +1,10 @@
+package graphql.schema.diffing;
+
+import graphql.Internal;
+
+@Internal
+public class SchemaDiffingCancelledException extends RuntimeException {
+ SchemaDiffingCancelledException(boolean byInterrupt) {
+ super("Schema diffing job was cancelled by " + (byInterrupt ? "thread interrupt" : "stop call"));
+ }
+}
diff --git a/src/main/java/graphql/schema/diffing/SchemaDiffingRunningCheck.java b/src/main/java/graphql/schema/diffing/SchemaDiffingRunningCheck.java
new file mode 100644
index 0000000000..8b0182eb38
--- /dev/null
+++ b/src/main/java/graphql/schema/diffing/SchemaDiffingRunningCheck.java
@@ -0,0 +1,17 @@
+package graphql.schema.diffing;
+
+import java.util.concurrent.atomic.AtomicBoolean;
+
+class SchemaDiffingRunningCheck {
+ private final AtomicBoolean wasStopped = new AtomicBoolean(false);
+
+ void check() {
+ if (wasStopped.get()) {
+ throw new SchemaDiffingCancelledException(false);
+ }
+ }
+
+ void stop() {
+ wasStopped.set(true);
+ }
+}
diff --git a/src/main/java/graphql/schema/diffing/ana/EditOperationAnalyzer.java b/src/main/java/graphql/schema/diffing/ana/EditOperationAnalyzer.java
index 9fcc3ce154..888974dc83 100644
--- a/src/main/java/graphql/schema/diffing/ana/EditOperationAnalyzer.java
+++ b/src/main/java/graphql/schema/diffing/ana/EditOperationAnalyzer.java
@@ -8,17 +8,17 @@
import graphql.schema.diffing.Mapping;
import graphql.schema.diffing.SchemaGraph;
import graphql.schema.diffing.Vertex;
-import graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveArgumentRename;
-import graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveObjectLocation;
import graphql.schema.idl.ScalarInfo;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
+import java.util.function.Predicate;
import static graphql.Assert.assertTrue;
import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveAddition;
import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveArgumentDeletion;
+import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveArgumentRename;
import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveArgumentValueModification;
import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveDeletion;
import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveDirectiveArgumentLocation;
@@ -31,6 +31,7 @@
import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveInterfaceLocation;
import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveObjectFieldArgumentLocation;
import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveObjectFieldLocation;
+import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveObjectLocation;
import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveScalarLocation;
import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveUnionLocation;
import static graphql.schema.diffing.ana.SchemaDifference.DirectiveAddition;
@@ -48,6 +49,7 @@
import static graphql.schema.diffing.ana.SchemaDifference.EnumModification;
import static graphql.schema.diffing.ana.SchemaDifference.EnumValueAddition;
import static graphql.schema.diffing.ana.SchemaDifference.EnumValueDeletion;
+import static graphql.schema.diffing.ana.SchemaDifference.EnumValueRenamed;
import static graphql.schema.diffing.ana.SchemaDifference.InputObjectAddition;
import static graphql.schema.diffing.ana.SchemaDifference.InputObjectDeletion;
import static graphql.schema.diffing.ana.SchemaDifference.InputObjectDifference;
@@ -336,11 +338,9 @@ private void appliedDirectiveArgumentChanged(EditOperation editOperation) {
if (nameChanged) {
AppliedDirectiveArgumentRename argumentRename = new AppliedDirectiveArgumentRename(location, oldArgumentName, newArgumentName);
getObjectModification(object.getName()).getDetails().add(argumentRename);
-
}
}
}
-
}
private void appliedDirectiveAdded(EditOperation editOperation) {
@@ -593,7 +593,7 @@ private void handleUnionMemberChanges(List editOperations, Mappin
break;
case DELETE_EDGE:
Edge oldEdge = editOperation.getSourceEdge();
- if (oldEdge.getFrom().isOfType(SchemaGraph.UNION)) {
+ if (oldEdge.getFrom().isOfType(SchemaGraph.UNION) && !oldEdge.getTo().isOfType(SchemaGraph.APPLIED_DIRECTIVE)) {
handleUnionMemberDeleted(editOperation);
}
break;
@@ -606,16 +606,21 @@ private void handleEnumValuesChanges(List editOperations, Mapping
switch (editOperation.getOperation()) {
case INSERT_EDGE:
Edge newEdge = editOperation.getTargetEdge();
- if (newEdge.getFrom().isOfType(SchemaGraph.ENUM)) {
+ if (newEdge.getFrom().isOfType(SchemaGraph.ENUM) && newEdge.getTo().isOfType(SchemaGraph.ENUM_VALUE)) {
handleEnumValueAdded(editOperation);
}
break;
case DELETE_EDGE:
Edge oldEdge = editOperation.getSourceEdge();
- if (oldEdge.getFrom().isOfType(SchemaGraph.ENUM)) {
+ if (oldEdge.getFrom().isOfType(SchemaGraph.ENUM) && oldEdge.getTo().isOfType(SchemaGraph.ENUM_VALUE)) {
handleEnumValueDeleted(editOperation);
}
break;
+ case CHANGE_VERTEX:
+ if (editOperation.getSourceVertex().isOfType(SchemaGraph.ENUM_VALUE) && editOperation.getTargetVertex().isOfType(SchemaGraph.ENUM_VALUE)) {
+ handleEnumValueChanged(editOperation);
+ }
+ break;
}
}
}
@@ -641,20 +646,21 @@ private void handleArgumentChange(EditOperation editOperation) {
} else {
assertTrue(fieldOrDirective.isOfType(SchemaGraph.FIELD));
Vertex field = fieldOrDirective;
+ String fieldName = field.getName();
Vertex fieldsContainerForField = newSchemaGraph.getFieldsContainerForField(field);
if (fieldsContainerForField.isOfType(SchemaGraph.OBJECT)) {
Vertex object = fieldsContainerForField;
ObjectModification objectModification = getObjectModification(object.getName());
String oldName = editOperation.getSourceVertex().getName();
String newName = argument.getName();
- objectModification.getDetails().add(new ObjectFieldArgumentRename(oldName, newName));
+ objectModification.getDetails().add(new ObjectFieldArgumentRename(fieldName, oldName, newName));
} else {
assertTrue(fieldsContainerForField.isOfType(SchemaGraph.INTERFACE));
Vertex interfaze = fieldsContainerForField;
InterfaceModification interfaceModification = getInterfaceModification(interfaze.getName());
String oldName = editOperation.getSourceVertex().getName();
String newName = argument.getName();
- interfaceModification.getDetails().add(new InterfaceFieldArgumentRename(oldName, newName));
+ interfaceModification.getDetails().add(new InterfaceFieldArgumentRename(fieldName, oldName, newName));
}
}
@@ -723,6 +729,13 @@ private void handleEnumValueDeleted(EditOperation editOperation) {
enumModification.getDetails().add(new EnumValueDeletion(value.getName()));
}
+ private void handleEnumValueChanged(EditOperation editOperation) {
+ Vertex enumVertex = newSchemaGraph.getEnumForEnumValue(editOperation.getTargetVertex());
+ EnumModification enumModification = getEnumModification(enumVertex.getName());
+ String oldName = editOperation.getSourceVertex().getName();
+ String newName = editOperation.getTargetVertex().getName();
+ enumModification.getDetails().add(new EnumValueRenamed(oldName, newName));
+ }
private void fieldChanged(EditOperation editOperation) {
Vertex field = editOperation.getTargetVertex();
@@ -931,7 +944,7 @@ private void typeEdgeInsertedForInputField(EditOperation
return;
}
String newType = getTypeFromEdgeLabel(editOperation.getTargetEdge());
- EditOperation deletedTypeEdgeOperation = findDeletedEdge(inputField, editOperations, mapping);
+ EditOperation deletedTypeEdgeOperation = findDeletedEdge(inputField, editOperations, mapping, this::isTypeEdge);
String oldType = getTypeFromEdgeLabel(deletedTypeEdgeOperation.getSourceEdge());
InputObjectFieldTypeModification inputObjectFieldTypeModification = new InputObjectFieldTypeModification(inputField.getName(), oldType, newType);
getInputObjectModification(inputObject.getName()).getDetails().add(inputObjectFieldTypeModification);
@@ -962,7 +975,7 @@ private void typeEdgeInsertedForArgument(EditOperation
String newType = getTypeFromEdgeLabel(editOperation.getTargetEdge());
// this means we have an existing object changed its type
// and there must be a deleted edge with the old type information
- EditOperation deletedTypeEdgeOperation = findDeletedEdge(argument, editOperations, mapping);
+ EditOperation deletedTypeEdgeOperation = findDeletedEdge(argument, editOperations, mapping, this::isTypeEdge);
String oldType = getTypeFromEdgeLabel(deletedTypeEdgeOperation.getSourceEdge());
ObjectFieldArgumentTypeModification objectFieldArgumentTypeModification = new ObjectFieldArgumentTypeModification(field.getName(), argument.getName(), oldType, newType);
getObjectModification(object.getName()).getDetails().add(objectFieldArgumentTypeModification);
@@ -984,7 +997,7 @@ private void typeEdgeInsertedForArgument(EditOperation
String newType = getTypeFromEdgeLabel(editOperation.getTargetEdge());
// this means we have an existing object changed its type
// and there must be a deleted edge with the old type information
- EditOperation deletedTypeEdgeOperation = findDeletedEdge(argument, editOperations, mapping);
+ EditOperation deletedTypeEdgeOperation = findDeletedEdge(argument, editOperations, mapping, this::isTypeEdge);
String oldType = getTypeFromEdgeLabel(deletedTypeEdgeOperation.getSourceEdge());
InterfaceFieldArgumentTypeModification interfaceFieldArgumentTypeModification = new InterfaceFieldArgumentTypeModification(field.getName(), argument.getName(), oldType, newType);
getInterfaceModification(interfaze.getName()).getDetails().add(interfaceFieldArgumentTypeModification);
@@ -999,7 +1012,7 @@ private void typeEdgeInsertedForArgument(EditOperation
return;
}
String newType = getTypeFromEdgeLabel(editOperation.getTargetEdge());
- EditOperation deletedTypeEdgeOperation = findDeletedEdge(argument, editOperations, mapping);
+ EditOperation deletedTypeEdgeOperation = findDeletedEdge(argument, editOperations, mapping, this::isTypeEdge);
String oldType = getTypeFromEdgeLabel(deletedTypeEdgeOperation.getSourceEdge());
DirectiveArgumentTypeModification directiveArgumentTypeModification = new DirectiveArgumentTypeModification(argument.getName(), oldType, newType);
getDirectiveModification(directive.getName()).getDetails().add(directiveArgumentTypeModification);
@@ -1024,11 +1037,10 @@ private void typeEdgeInsertedForField(EditOperation
String newType = getTypeFromEdgeLabel(editOperation.getTargetEdge());
// this means we have an existing object changed its type
// and there must be a deleted edge with the old type information
- EditOperation deletedTypeEdgeOperation = findDeletedEdge(field, editOperations, mapping);
+ EditOperation deletedTypeEdgeOperation = findDeletedEdge(field, editOperations, mapping, this::isTypeEdge);
String oldType = getTypeFromEdgeLabel(deletedTypeEdgeOperation.getSourceEdge());
ObjectFieldTypeModification objectFieldTypeModification = new ObjectFieldTypeModification(field.getName(), oldType, newType);
getObjectModification(object.getName()).getDetails().add(objectFieldTypeModification);
-
} else {
assertTrue(objectOrInterface.isOfType(SchemaGraph.INTERFACE));
Vertex interfaze = objectOrInterface;
@@ -1041,22 +1053,23 @@ private void typeEdgeInsertedForField(EditOperation
String newType = getTypeFromEdgeLabel(editOperation.getTargetEdge());
// this means we have an existing object changed its type
// and there must be a deleted edge with the old type information
- EditOperation deletedTypeEdgeOperation = findDeletedEdge(field, editOperations, mapping);
+ EditOperation deletedTypeEdgeOperation = findDeletedEdge(field, editOperations, mapping, this::isTypeEdge);
String oldType = getTypeFromEdgeLabel(deletedTypeEdgeOperation.getSourceEdge());
InterfaceFieldTypeModification interfaceFieldTypeModification = new InterfaceFieldTypeModification(field.getName(), oldType, newType);
getInterfaceModification(interfaze.getName()).getDetails().add(interfaceFieldTypeModification);
-
}
}
- private EditOperation findDeletedEdge(Vertex targetVertexFrom, List editOperations, Mapping
- mapping) {
+ private EditOperation findDeletedEdge(Vertex targetVertexFrom,
+ List editOperations,
+ Mapping mapping,
+ Predicate edgePredicate) {
Vertex sourceVertexFrom = mapping.getSource(targetVertexFrom);
for (EditOperation editOperation : editOperations) {
if (editOperation.getOperation() == EditOperation.Operation.DELETE_EDGE) {
Edge deletedEdge = editOperation.getSourceEdge();
- if (deletedEdge.getFrom() == sourceVertexFrom) {
+ if (deletedEdge.getFrom() == sourceVertexFrom && edgePredicate.test(deletedEdge)) {
return editOperation;
}
}
@@ -1198,6 +1211,10 @@ private String getDefaultValueFromEdgeLabel(Edge edge) {
return defaultValue;
}
+ private boolean isTypeEdge(Edge edge) {
+ String label = edge.getLabel();
+ return label.startsWith("type=");
+ }
private void interfaceImplementationDeleted(Edge deletedEdge) {
Vertex from = deletedEdge.getFrom();
diff --git a/src/main/java/graphql/schema/diffing/ana/SchemaDifference.java b/src/main/java/graphql/schema/diffing/ana/SchemaDifference.java
index 6c4b1bcbf9..8d809269cd 100644
--- a/src/main/java/graphql/schema/diffing/ana/SchemaDifference.java
+++ b/src/main/java/graphql/schema/diffing/ana/SchemaDifference.java
@@ -8,7 +8,7 @@
/**
* Any kind of difference between two schemas is a SchemaDifference.
- *
+ *
* Below that we have three different possible kind of differences:
* - Addition
* - Deletion
@@ -66,19 +66,19 @@ public String getName() {
class ObjectModification implements SchemaModification, ObjectDifference {
private final String oldName;
private final String newName;
- private final boolean renamed;
+ private final boolean isNameChanged;
private final List details = new ArrayList<>();
public ObjectModification(String oldName, String newName) {
this.oldName = oldName;
this.newName = newName;
- this.renamed = oldName.equals(newName);
+ this.isNameChanged = !oldName.equals(newName);
}
public ObjectModification(String newName) {
this.oldName = newName;
this.newName = newName;
- this.renamed = false;
+ this.isNameChanged = false;
}
public List getDetails() {
@@ -97,8 +97,8 @@ public String getNewName() {
return newName;
}
- public boolean isRenamed() {
- return renamed;
+ public boolean isNameChanged() {
+ return isNameChanged;
}
}
@@ -173,14 +173,20 @@ public String getOldName() {
}
class ObjectFieldArgumentRename implements ObjectModificationDetail {
+ private final String fieldName;
private final String oldName;
private final String newName;
- public ObjectFieldArgumentRename(String oldName, String newName) {
+ public ObjectFieldArgumentRename(String fieldName, String oldName, String newName) {
+ this.fieldName = fieldName;
this.oldName = oldName;
this.newName = newName;
}
+ public String getFieldName() {
+ return fieldName;
+ }
+
public String getNewName() {
return newName;
}
@@ -334,24 +340,28 @@ class InterfaceDeletion implements SchemaDeletion, InterfaceDifference {
public InterfaceDeletion(String name) {
this.name = name;
}
+
+ public String getName() {
+ return name;
+ }
}
class InterfaceModification implements SchemaModification, InterfaceDifference {
private final String oldName;
private final String newName;
- private final boolean renamed;
+ private final boolean isNameChanged;
private final List details = new ArrayList<>();
public InterfaceModification(String oldName, String newName) {
this.oldName = oldName;
this.newName = newName;
- this.renamed = oldName.equals(newName);
+ this.isNameChanged = !oldName.equals(newName);
}
public InterfaceModification(String newName) {
this.oldName = newName;
this.newName = newName;
- this.renamed = false;
+ this.isNameChanged = false;
}
public List getDetails() {
@@ -366,8 +376,8 @@ public String getOldName() {
return oldName;
}
- public boolean isRenamed() {
- return renamed;
+ public boolean isNameChanged() {
+ return isNameChanged;
}
public List getDetails(Class extends T> clazz) {
@@ -509,7 +519,7 @@ public String getName() {
class InterfaceFieldArgumentTypeModification implements InterfaceModificationDetail {
- private String fieldName;
+ private final String fieldName;
private final String argumentName;
private final String oldType;
private final String newType;
@@ -571,14 +581,20 @@ public String getArgumentName() {
}
class InterfaceFieldArgumentRename implements InterfaceModificationDetail {
+ private final String fieldName;
private final String oldName;
private final String newName;
- public InterfaceFieldArgumentRename(String oldName, String newName) {
+ public InterfaceFieldArgumentRename(String fieldName, String oldName, String newName) {
+ this.fieldName = fieldName;
this.oldName = oldName;
this.newName = newName;
}
+ public String getFieldName() {
+ return fieldName;
+ }
+
public String getNewName() {
return newName;
}
@@ -621,20 +637,20 @@ public String getName() {
class UnionModification implements SchemaModification, UnionDifference {
private final String oldName;
private final String newName;
- private final boolean nameChanged;
+ private final boolean isNameChanged;
private final List details = new ArrayList<>();
public UnionModification(String oldName, String newName) {
this.oldName = oldName;
this.newName = newName;
- this.nameChanged = oldName.equals(newName);
+ this.isNameChanged = !oldName.equals(newName);
}
public UnionModification(String newName) {
this.oldName = newName;
this.newName = newName;
- this.nameChanged = false;
+ this.isNameChanged = false;
}
public String getNewName() {
@@ -654,7 +670,7 @@ public List getDetails(Class extends T>
}
public boolean isNameChanged() {
- return nameChanged;
+ return isNameChanged;
}
}
@@ -813,7 +829,7 @@ public String getName() {
class InputObjectModification implements SchemaModification, InputObjectDifference {
private final String oldName;
private final String newName;
- private final boolean nameChanged;
+ private final boolean isNameChanged;
private final List details = new ArrayList<>();
@@ -821,17 +837,17 @@ class InputObjectModification implements SchemaModification, InputObjectDifferen
public InputObjectModification(String oldName, String newName) {
this.oldName = oldName;
this.newName = newName;
- this.nameChanged = oldName.equals(newName);
+ this.isNameChanged = !oldName.equals(newName);
}
public InputObjectModification(String newName) {
this.oldName = newName;
this.newName = newName;
- this.nameChanged = false;
+ this.isNameChanged = false;
}
public boolean isNameChanged() {
- return nameChanged;
+ return isNameChanged;
}
public String getNewName() {
@@ -885,23 +901,23 @@ class EnumModification implements SchemaModification, EnumDifference {
private final String oldName;
private final String newName;
- private final boolean nameChanged;
+ private final boolean isNameChanged;
private final List details = new ArrayList<>();
public EnumModification(String oldName, String newName) {
this.oldName = oldName;
this.newName = newName;
- this.nameChanged = oldName.equals(newName);
+ this.isNameChanged = !oldName.equals(newName);
}
public EnumModification(String newName) {
this.oldName = newName;
this.newName = newName;
- this.nameChanged = false;
+ this.isNameChanged = false;
}
public boolean isNameChanged() {
- return nameChanged;
+ return isNameChanged;
}
public String getNewName() {
@@ -938,6 +954,24 @@ public String getName() {
}
}
+ class EnumValueRenamed implements EnumModificationDetail {
+ private final String oldName;
+ private final String newName;
+
+ public EnumValueRenamed(String oldName, String newName) {
+ this.oldName = oldName;
+ this.newName = newName;
+ }
+
+ public String getOldName() {
+ return oldName;
+ }
+
+ public String getNewName() {
+ return newName;
+ }
+ }
+
class EnumValueAddition implements EnumModificationDetail {
private final String name;
@@ -986,25 +1020,25 @@ interface ScalarModificationDetail {
class ScalarModification implements SchemaModification, ScalarDifference {
private final String oldName;
private final String newName;
- private final boolean nameChanged;
+ private final boolean isNameChanged;
private List details = new ArrayList<>();
public ScalarModification(String oldName, String newName) {
this.oldName = oldName;
this.newName = newName;
- this.nameChanged = oldName.equals(newName);
+ this.isNameChanged = !oldName.equals(newName);
}
public ScalarModification(String newName) {
this.oldName = newName;
this.newName = newName;
- this.nameChanged = false;
+ this.isNameChanged = false;
}
public boolean isNameChanged() {
- return nameChanged;
+ return isNameChanged;
}
public String getNewName() {
@@ -1057,24 +1091,24 @@ public String getName() {
class DirectiveModification implements SchemaModification, DirectiveDifference {
private final String oldName;
private final String newName;
- private final boolean nameChanged;
+ private final boolean isNameChanged;
private final List details = new ArrayList<>();
public DirectiveModification(String oldName, String newName) {
this.oldName = oldName;
this.newName = newName;
- this.nameChanged = oldName.equals(newName);
+ this.isNameChanged = !oldName.equals(newName);
}
public DirectiveModification(String newName) {
this.oldName = newName;
this.newName = newName;
- this.nameChanged = false;
+ this.isNameChanged = false;
}
public boolean isNameChanged() {
- return nameChanged;
+ return isNameChanged;
}
public String getNewName() {
diff --git a/src/main/java/graphql/schema/fetching/LambdaFetchingSupport.java b/src/main/java/graphql/schema/fetching/LambdaFetchingSupport.java
index 0056819b36..c3f4fd486f 100644
--- a/src/main/java/graphql/schema/fetching/LambdaFetchingSupport.java
+++ b/src/main/java/graphql/schema/fetching/LambdaFetchingSupport.java
@@ -191,7 +191,7 @@ private static String decapitalize(String name) {
@VisibleForTesting
static Function mkCallFunction(Class> targetClass, String targetMethod, Class> targetMethodReturnType) throws Throwable {
- MethodHandles.Lookup lookup = MethodHandles.lookup();
+ MethodHandles.Lookup lookup = getLookup(targetClass);
MethodHandle virtualMethodHandle = lookup.findVirtual(targetClass, targetMethod, MethodType.methodType(targetMethodReturnType));
CallSite site = LambdaMetafactory.metafactory(lookup,
"apply",
@@ -204,4 +204,14 @@ static Function mkCallFunction(Class> targetClass, String targ
return getterFunction;
}
+ private static MethodHandles.Lookup getLookup(Class> targetClass) throws IllegalAccessException {
+ MethodHandles.Lookup lookupMe = MethodHandles.lookup();
+ //
+ // This is a Java 9 approach to method look up allowing private access
+ // which we don't want to use yet until we get to Java 11
+ //
+ //lookupMe = MethodHandles.privateLookupIn(targetClass, lookupMe);
+ return lookupMe;
+ }
+
}
diff --git a/src/main/java/graphql/schema/idl/SchemaGeneratorHelper.java b/src/main/java/graphql/schema/idl/SchemaGeneratorHelper.java
index 79fdb5493a..097d320ea9 100644
--- a/src/main/java/graphql/schema/idl/SchemaGeneratorHelper.java
+++ b/src/main/java/graphql/schema/idl/SchemaGeneratorHelper.java
@@ -909,10 +909,13 @@ void buildOperations(BuildContext buildCtx, GraphQLSchema.Builder schemaBuilder)
Optional mutationOperation = getOperationNamed("mutation", operationTypeDefs);
if (!mutationOperation.isPresent()) {
- Optional mutationTypeDef = typeRegistry.getType("Mutation");
- if (mutationTypeDef.isPresent()) {
- mutation = buildOutputType(buildCtx, TypeName.newTypeName().name(mutationTypeDef.get().getName()).build());
- schemaBuilder.mutation(mutation);
+ if (!typeRegistry.schemaDefinition().isPresent()) {
+ // If no schema definition, then there is no schema keyword. Default to using type called Mutation
+ Optional mutationTypeDef = typeRegistry.getType("Mutation");
+ if (mutationTypeDef.isPresent()) {
+ mutation = buildOutputType(buildCtx, TypeName.newTypeName().name(mutationTypeDef.get().getName()).build());
+ schemaBuilder.mutation(mutation);
+ }
}
} else {
mutation = buildOperation(buildCtx, mutationOperation.get());
@@ -921,10 +924,13 @@ void buildOperations(BuildContext buildCtx, GraphQLSchema.Builder schemaBuilder)
Optional subscriptionOperation = getOperationNamed("subscription", operationTypeDefs);
if (!subscriptionOperation.isPresent()) {
- Optional subscriptionTypeDef = typeRegistry.getType("Subscription");
- if (subscriptionTypeDef.isPresent()) {
- subscription = buildOutputType(buildCtx, TypeName.newTypeName().name(subscriptionTypeDef.get().getName()).build());
- schemaBuilder.subscription(subscription);
+ if (!typeRegistry.schemaDefinition().isPresent()) {
+ // If no schema definition, then there is no schema keyword. Default to using type called Subscription
+ Optional subscriptionTypeDef = typeRegistry.getType("Subscription");
+ if (subscriptionTypeDef.isPresent()) {
+ subscription = buildOutputType(buildCtx, TypeName.newTypeName().name(subscriptionTypeDef.get().getName()).build());
+ schemaBuilder.subscription(subscription);
+ }
}
} else {
subscription = buildOperation(buildCtx, subscriptionOperation.get());
diff --git a/src/main/java/graphql/schema/idl/SchemaPrinter.java b/src/main/java/graphql/schema/idl/SchemaPrinter.java
index e4ae7a7c63..6f3cbc64d9 100644
--- a/src/main/java/graphql/schema/idl/SchemaPrinter.java
+++ b/src/main/java/graphql/schema/idl/SchemaPrinter.java
@@ -315,7 +315,7 @@ public Options includeSchemaElement(Predicate includeSchem
this.includeDirectiveDefinitions,
this.useAstDefinitions,
this.descriptionsAsHashComments,
- includeDirective,
+ this.includeDirective,
includeSchemaElement,
this.comparatorRegistry);
}
diff --git a/src/main/java/graphql/schema/impl/SchemaUtil.java b/src/main/java/graphql/schema/impl/SchemaUtil.java
index a1f382d84d..66b06f2b83 100644
--- a/src/main/java/graphql/schema/impl/SchemaUtil.java
+++ b/src/main/java/graphql/schema/impl/SchemaUtil.java
@@ -100,6 +100,7 @@ public static void replaceTypeReferences(GraphQLSchema schema) {
final Map typeMap = schema.getTypeMap();
List roots = new ArrayList<>(typeMap.values());
roots.addAll(schema.getDirectives());
+ roots.addAll(schema.getSchemaAppliedDirectives());
SchemaTraverser schemaTraverser = new SchemaTraverser(schemaElement -> schemaElement.getChildrenWithTypeReferences().getChildrenAsList());
schemaTraverser.depthFirst(new GraphQLTypeResolvingVisitor(typeMap), roots);
}
diff --git a/src/main/java/graphql/util/EscapeUtil.java b/src/main/java/graphql/util/EscapeUtil.java
index 9f1e35c9d7..d7a6cb5eeb 100644
--- a/src/main/java/graphql/util/EscapeUtil.java
+++ b/src/main/java/graphql/util/EscapeUtil.java
@@ -9,7 +9,7 @@ private EscapeUtil() {
}
/**
- * Encodes the value as a JSON string according to http://json.org/ rules
+ * Encodes the value as a JSON string according to https://json.org/ rules
*
* @param stringValue the value to encode as a JSON string
*
diff --git a/src/main/java/graphql/validation/AbstractRule.java b/src/main/java/graphql/validation/AbstractRule.java
index eca44a20e9..c5c2f5a56a 100644
--- a/src/main/java/graphql/validation/AbstractRule.java
+++ b/src/main/java/graphql/validation/AbstractRule.java
@@ -11,6 +11,7 @@
import graphql.language.FragmentSpread;
import graphql.language.InlineFragment;
import graphql.language.Node;
+import graphql.language.ObjectValue;
import graphql.language.OperationDefinition;
import graphql.language.SelectionSet;
import graphql.language.SourceLocation;
@@ -188,6 +189,10 @@ public void documentFinished(Document document) {
}
+ public void checkObjectValue(ObjectValue objectValue) {
+
+ }
+
@Override
public String toString() {
return "Rule{" + validationContext + "}";
diff --git a/src/main/java/graphql/validation/RulesVisitor.java b/src/main/java/graphql/validation/RulesVisitor.java
index 1b4bf421ca..83864aaee7 100644
--- a/src/main/java/graphql/validation/RulesVisitor.java
+++ b/src/main/java/graphql/validation/RulesVisitor.java
@@ -16,6 +16,7 @@
import graphql.language.FragmentSpread;
import graphql.language.InlineFragment;
import graphql.language.Node;
+import graphql.language.ObjectValue;
import graphql.language.OperationDefinition;
import graphql.language.SelectionSet;
import graphql.language.TypeName;
@@ -78,6 +79,8 @@ public void enter(Node node, List ancestors) {
checkVariable((VariableReference) node);
} else if (node instanceof SelectionSet) {
checkSelectionSet((SelectionSet) node);
+ } else if (node instanceof ObjectValue) {
+ checkObjectValue((ObjectValue) node);
}
}
@@ -151,6 +154,10 @@ private void checkVariable(VariableReference node) {
currentRules.forEach(r -> r.checkVariable(node));
}
+ private void checkObjectValue(ObjectValue node) {
+ currentRules.forEach(r -> r.checkObjectValue(node));
+ }
+
@Override
public void leave(Node node, List ancestors) {
validationContext.getTraversalContext().leave(node, ancestors);
diff --git a/src/main/java/graphql/validation/ValidationErrorType.java b/src/main/java/graphql/validation/ValidationErrorType.java
index 5646aac2f0..5ae5be0aaf 100644
--- a/src/main/java/graphql/validation/ValidationErrorType.java
+++ b/src/main/java/graphql/validation/ValidationErrorType.java
@@ -40,5 +40,6 @@ public enum ValidationErrorType implements ValidationErrorClassification {
DuplicateVariableName,
NullValueForNonNullArgument,
SubscriptionMultipleRootFields,
- SubscriptionIntrospectionRootField
+ SubscriptionIntrospectionRootField,
+ UniqueObjectFieldName
}
diff --git a/src/main/java/graphql/validation/Validator.java b/src/main/java/graphql/validation/Validator.java
index 51dfd40cbb..54558c617a 100644
--- a/src/main/java/graphql/validation/Validator.java
+++ b/src/main/java/graphql/validation/Validator.java
@@ -6,6 +6,7 @@
import graphql.language.Document;
import graphql.schema.GraphQLSchema;
import graphql.validation.rules.ArgumentsOfCorrectType;
+import graphql.validation.rules.UniqueObjectFieldName;
import graphql.validation.rules.ExecutableDefinitions;
import graphql.validation.rules.FieldsOnCorrectType;
import graphql.validation.rules.FragmentsOnCompositeType;
@@ -153,6 +154,9 @@ public List createRules(ValidationContext validationContext, Valid
SubscriptionUniqueRootField uniqueSubscriptionRootField = new SubscriptionUniqueRootField(validationContext, validationErrorCollector);
rules.add(uniqueSubscriptionRootField);
+ UniqueObjectFieldName uniqueObjectFieldName = new UniqueObjectFieldName(validationContext, validationErrorCollector);
+ rules.add(uniqueObjectFieldName);
+
return rules;
}
}
diff --git a/src/main/java/graphql/validation/rules/UniqueObjectFieldName.java b/src/main/java/graphql/validation/rules/UniqueObjectFieldName.java
new file mode 100644
index 0000000000..25c7c2410d
--- /dev/null
+++ b/src/main/java/graphql/validation/rules/UniqueObjectFieldName.java
@@ -0,0 +1,34 @@
+package graphql.validation.rules;
+
+import static graphql.validation.ValidationErrorType.UniqueObjectFieldName;
+
+import com.google.common.collect.Sets;
+import graphql.language.ObjectField;
+import graphql.language.ObjectValue;
+import graphql.validation.AbstractRule;
+import graphql.validation.ValidationContext;
+import graphql.validation.ValidationErrorCollector;
+
+import java.util.Set;
+
+public class UniqueObjectFieldName extends AbstractRule {
+ public UniqueObjectFieldName(ValidationContext validationContext, ValidationErrorCollector validationErrorCollector) {
+ super(validationContext, validationErrorCollector);
+ }
+
+ @Override
+ public void checkObjectValue(ObjectValue objectValue) {
+ Set fieldNames = Sets.newHashSetWithExpectedSize(objectValue.getObjectFields().size());
+
+ for (ObjectField field : objectValue.getObjectFields()) {
+ String fieldName = field.getName();
+
+ if (fieldNames.contains(fieldName)) {
+ String message = i18n(UniqueObjectFieldName, "UniqueObjectFieldName.duplicateFieldName", fieldName);
+ addError(UniqueObjectFieldName, objectValue.getSourceLocation(), message);
+ } else {
+ fieldNames.add(fieldName);
+ }
+ }
+ }
+}
diff --git a/src/main/resources/i18n/Parsing.properties b/src/main/resources/i18n/Parsing.properties
index 1152e601e5..a45ae1114b 100644
--- a/src/main/resources/i18n/Parsing.properties
+++ b/src/main/resources/i18n/Parsing.properties
@@ -19,6 +19,7 @@ InvalidSyntaxBail.full=Invalid syntax with offending token ''{0}'' at line {1} c
InvalidSyntaxMoreTokens.full=Invalid syntax encountered. There are extra tokens in the text that have not been consumed. Offending token ''{0}'' at line {1} column {2}
#
ParseCancelled.full=More than {0} ''{1}'' tokens have been presented. To prevent Denial Of Service attacks, parsing has been cancelled.
+ParseCancelled.tooDeep=More than {0} deep ''{1}'' rules have been entered. To prevent Denial Of Service attacks, parsing has been cancelled.
#
InvalidUnicode.trailingLeadingSurrogate=Invalid unicode encountered. Trailing surrogate must be preceded with a leading surrogate. Offending token ''{0}'' at line {1} column {2}
InvalidUnicode.leadingTrailingSurrogate=Invalid unicode encountered. Leading surrogate must be followed by a trailing surrogate. Offending token ''{0}'' at line {1} column {2}
diff --git a/src/main/resources/i18n/Scalars_de.properties b/src/main/resources/i18n/Scalars_de.properties
index 02b1b27a75..8c86f64261 100644
--- a/src/main/resources/i18n/Scalars_de.properties
+++ b/src/main/resources/i18n/Scalars_de.properties
@@ -33,4 +33,4 @@ Boolean.notBoolean=Erwartet wurde ein Wert, der in den Typ ''Boolean'' konvertie
Boolean.unexpectedAstType=Erwartet wurde ein AST type ''BooleanValue'', aber es war ein ''{0}''
Boolean.unexpectedRawValueType=Erwartet wurde eine Boolean-Eingabe, aber es war ein ''{0}''
#
-String.unexpectedRawValueType=Erwartet wurde eine String-Eingabe, aber es war ein ''{0}''
+String.unexpectedRawValueType=Erwartet wurde eine String-Eingabe, aber es war ein ''{0}''
\ No newline at end of file
diff --git a/src/main/resources/i18n/Validation.properties b/src/main/resources/i18n/Validation.properties
index 3b2dfb731f..9733f79738 100644
--- a/src/main/resources/i18n/Validation.properties
+++ b/src/main/resources/i18n/Validation.properties
@@ -78,6 +78,8 @@ VariablesAreInputTypes.wrongType=Validation error ({0}) : Input variable ''{1}''
#
VariableTypesMatchRule.unexpectedType=Validation error ({0}) : Variable type ''{1}'' does not match expected type ''{2}''
#
+UniqueObjectFieldName.duplicateFieldName=Validation Error ({0}) : There can be only one field named ''{1}''
+#
# These are used but IDEA cant find them easily as being called
#
# suppress inspection "UnusedProperty"
diff --git a/src/main/resources/i18n/Validation_de.properties b/src/main/resources/i18n/Validation_de.properties
index 22059223e4..603e931d07 100644
--- a/src/main/resources/i18n/Validation_de.properties
+++ b/src/main/resources/i18n/Validation_de.properties
@@ -99,4 +99,4 @@ ArgumentValidationUtil.handleNotObjectError=Validierungsfehler ({0}) : Argument
ArgumentValidationUtil.handleMissingFieldsError=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' fehlen Pflichtfelder ''{3}''
# suppress inspection "UnusedProperty"
ArgumentValidationUtil.handleExtraFieldError=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' enth\u00e4lt ein Feld nicht in ''{3}'': ''{4}''
-#
+#
\ No newline at end of file
diff --git a/src/test/groovy/example/http/HttpMain.java b/src/test/groovy/example/http/HttpMain.java
index e705df61d6..e1c001ce87 100644
--- a/src/test/groovy/example/http/HttpMain.java
+++ b/src/test/groovy/example/http/HttpMain.java
@@ -49,7 +49,7 @@
/**
* A very simple example of serving a graphql schema over http.
*
- * More info can be found here : http://graphql.org/learn/serving-over-http/
+ * More info can be found here : https://graphql.org/learn/serving-over-http/
*/
@SuppressWarnings("unchecked")
public class HttpMain extends AbstractHandler {
diff --git a/src/test/groovy/example/http/QueryParameters.java b/src/test/groovy/example/http/QueryParameters.java
index 1c9c309a70..ca22ac6b81 100644
--- a/src/test/groovy/example/http/QueryParameters.java
+++ b/src/test/groovy/example/http/QueryParameters.java
@@ -10,15 +10,15 @@
/**
* Graphql clients can send GET or POST HTTP requests. The spec does not make an explicit
* distinction. So you may need to handle both. The following was tested using
- * a graphiql client tool found here : https://github.com/skevy/graphiql-app
- *
+ * a graphiql client tool found here : graphiql-app
+ *
* You should consider bundling graphiql in your application
- *
- * https://github.com/graphql/graphiql
- *
+ *
+ * https://github.com/graphql/graphiql
+ *
* This outlines more information on how to handle parameters over http
- *
- * http://graphql.org/learn/serving-over-http/
+ *
+ * https://graphql.org/learn/serving-over-http/
*/
class QueryParameters {
diff --git a/src/test/groovy/example/http/package-info.java b/src/test/groovy/example/http/package-info.java
index 88ad0004db..bea88ed753 100644
--- a/src/test/groovy/example/http/package-info.java
+++ b/src/test/groovy/example/http/package-info.java
@@ -1,11 +1,11 @@
/**
* The purpose of this code is to show an example of serving a graphql query over HTTP
- *
- * More info can be found here : http://graphql.org/learn/serving-over-http/
- *
+ *
+ * More info can be found here : https://graphql.org/learn/serving-over-http/
+ *
* There are more concerns in a fully fledged application such as your approach to permissions
* and authentication and so on that are not shown here.
- *
+ *
* The backing data is the "star wars" example schema. And fairly complex example query is as follows :
*
*
diff --git a/src/test/groovy/graphql/TypeReferenceSchema.java b/src/test/groovy/graphql/TypeReferenceSchema.java
index 37c48a2841..1989a29d1d 100644
--- a/src/test/groovy/graphql/TypeReferenceSchema.java
+++ b/src/test/groovy/graphql/TypeReferenceSchema.java
@@ -1,6 +1,8 @@
package graphql;
import graphql.schema.Coercing;
+import graphql.schema.GraphQLAppliedDirective;
+import graphql.schema.GraphQLAppliedDirectiveArgument;
import graphql.schema.GraphQLArgument;
import graphql.schema.GraphQLCodeRegistry;
import graphql.schema.GraphQLDirective;
@@ -315,6 +317,14 @@ public Boolean parseLiteral(Object input) {
.type(QueryDirectiveInput))
.build();
+ public static GraphQLAppliedDirective cacheApplied = GraphQLAppliedDirective.newDirective()
+ .name("cache")
+ .argument(GraphQLAppliedDirectiveArgument.newArgument()
+ .name("enabled")
+ .type(GraphQLTypeReference.typeRef(OnOff.getName()))
+ .valueProgrammatic("On"))
+ .build();
+
public static GraphQLCodeRegistry codeRegistry = GraphQLCodeRegistry.newCodeRegistry()
.typeResolver("Pet", new TypeResolverProxy())
.typeResolver("Addressable", new TypeResolverProxy())
@@ -336,5 +346,6 @@ public Boolean parseLiteral(Object input) {
.additionalDirective(enumValueDirective)
.additionalDirective(interfaceDirective)
.codeRegistry(codeRegistry)
+ .withSchemaAppliedDirectives(cacheApplied)
.build();
}
diff --git a/src/test/groovy/graphql/execution/instrumentation/fieldvalidation/FieldValidationTest.groovy b/src/test/groovy/graphql/execution/instrumentation/fieldvalidation/FieldValidationTest.groovy
index 9d473da226..06034eb2c3 100644
--- a/src/test/groovy/graphql/execution/instrumentation/fieldvalidation/FieldValidationTest.groovy
+++ b/src/test/groovy/graphql/execution/instrumentation/fieldvalidation/FieldValidationTest.groovy
@@ -11,6 +11,8 @@ import graphql.execution.Execution
import graphql.execution.ExecutionId
import graphql.execution.ResultPath
import graphql.execution.ValueUnboxer
+import graphql.execution.instrumentation.ChainedInstrumentation
+import graphql.execution.instrumentation.Instrumentation
import graphql.execution.instrumentation.SimplePerformantInstrumentation
import graphql.execution.instrumentation.parameters.InstrumentationCreateStateParameters
import spock.lang.Specification
@@ -312,4 +314,26 @@ class FieldValidationTest extends Specification {
execution.execute(document, schema, ExecutionId.generate(), executionInput, SimplePerformantInstrumentation.INSTANCE.createState(new InstrumentationCreateStateParameters(schema, executionInput)))
}
+ def "test graphql from end to end with chained instrumentation"() {
+ def fieldValidation = new FieldValidation() {
+ @Override
+ List validateFields(final FieldValidationEnvironment validationEnvironment) {
+ return new ArrayList();
+ }
+ }
+ def instrumentations = [new FieldValidationInstrumentation
+ (fieldValidation)]
+ def chainedInstrumentation = new ChainedInstrumentation(instrumentations);
+ def graphql = GraphQL
+ .newGraphQL(schema)
+ .instrumentation(chainedInstrumentation)
+ .build();
+
+ when:
+ def result = graphql.execute("{ field2 }")
+
+ then:
+ result.getErrors().size() == 0
+ }
+
}
diff --git a/src/test/groovy/graphql/extensions/DefaultExtensionsMergerTest.groovy b/src/test/groovy/graphql/extensions/DefaultExtensionsMergerTest.groovy
new file mode 100644
index 0000000000..4714e4636a
--- /dev/null
+++ b/src/test/groovy/graphql/extensions/DefaultExtensionsMergerTest.groovy
@@ -0,0 +1,79 @@
+package graphql.extensions
+
+import com.google.common.collect.ImmutableMap
+import spock.lang.Specification
+
+class DefaultExtensionsMergerTest extends Specification {
+
+ def merger = new DefaultExtensionsMerger()
+
+ def "can merge maps"() {
+
+ when:
+ def actual = merger.merge(leftMap, rightMap)
+ then:
+ actual == expected
+ where:
+ leftMap | rightMap | expected
+ [:] | [:] | ImmutableMap.of()
+ ImmutableMap.of() | ImmutableMap.of() | ImmutableMap.of()
+ // additive
+ [x: [firstName: "Brad"]] | [y: [lastName: "Baker"]] | [x: [firstName: "Brad"], y: [lastName: "Baker"]]
+ [x: "24", y: "25", z: "26"] | [a: "1", b: "2", c: "3"] | [x: "24", y: "25", z: "26", a: "1", b: "2", c: "3"]
+ // merge
+ [key1: [firstName: "Brad"]] | [key1: [lastName: "Baker"]] | [key1: [firstName: "Brad", lastName: "Baker"]]
+
+ // merge with right extra key
+ [key1: [firstName: "Brad", middleName: "Leon"]] | [key1: [lastName: "Baker"], key2: [hobby: "graphql-java"]] | [key1: [firstName: "Brad", middleName: "Leon", lastName: "Baker"], key2: [hobby: "graphql-java"]]
+
+ }
+
+ def "can handle null entries"() {
+
+ when:
+ def actual = merger.merge(leftMap, rightMap)
+ then:
+ actual == expected
+ where:
+ leftMap | rightMap | expected
+ // nulls
+ [x: [firstName: "Brad"]] | [y: [lastName: null]] | [x: [firstName: "Brad"], y: [lastName: null]]
+ }
+
+ def "prefers the right on conflict"() {
+
+ when:
+ def actual = merger.merge(leftMap, rightMap)
+ then:
+ actual == expected
+ where:
+ leftMap | rightMap | expected
+ [x: [firstName: "Brad"]] | [x: [firstName: "Donna"]] | [x: [firstName: "Donna"]]
+ [x: [firstName: "Brad"]] | [x: [firstName: "Donna", seenStarWars: true]] | [x: [firstName: "Donna", seenStarWars: true]]
+ [x: [firstName: "Brad", hates: "Python"]] | [x: [firstName: "Donna", seenStarWars: true]] | [x: [firstName: "Donna", hates: "Python", seenStarWars: true]]
+
+
+ // disparate types dont matter - it prefers the right
+ [x: [firstName: "Brad"]] | [x: [firstName: [salutation: "Queen", name: "Donna"]]] | [x: [firstName: [salutation: "Queen", name: "Donna"]]]
+
+ }
+
+ def "it appends to lists"() {
+
+ when:
+ def actual = merger.merge(leftMap, rightMap)
+ then:
+ actual == expected
+ where:
+ leftMap | rightMap | expected
+ [x: [1, 2, 3, 4]] | [x: [5, 6, 7, 8]] | [x: [1, 2, 3, 4, 5, 6, 7, 8]]
+ //
+ // truly additive - no object equality
+ [x: [1, 2, 3]] | [x: [1, 2, 3]] | [x: [1, 2, 3, 1, 2, 3]]
+ [x: []] | [x: [1, 2, 3]] | [x: [1, 2, 3]]
+ [x: [null]] | [x: [1, 2, 3]] | [x: [null, 1, 2, 3]]
+ //
+ // prefers right if they are not both lists
+ [x: null] | [x: [1, 2, 3]] | [x: [1, 2, 3]]
+ }
+}
diff --git a/src/test/groovy/graphql/extensions/ExtensionsBuilderTest.groovy b/src/test/groovy/graphql/extensions/ExtensionsBuilderTest.groovy
new file mode 100644
index 0000000000..326248cf76
--- /dev/null
+++ b/src/test/groovy/graphql/extensions/ExtensionsBuilderTest.groovy
@@ -0,0 +1,219 @@
+package graphql.extensions
+
+import graphql.ExecutionInput
+import graphql.ExecutionResult
+import graphql.TestUtil
+import graphql.schema.DataFetcher
+import graphql.schema.DataFetchingEnvironment
+import graphql.schema.GraphQLTypeUtil
+import org.jetbrains.annotations.NotNull
+import spock.lang.Specification
+
+import static graphql.extensions.ExtensionsBuilder.newExtensionsBuilder
+import static graphql.schema.idl.RuntimeWiring.newRuntimeWiring
+import static graphql.schema.idl.TypeRuntimeWiring.newTypeWiring
+
+class ExtensionsBuilderTest extends Specification {
+
+
+ def "can merge changes with default behavior"() {
+ when:
+ def extensions = newExtensionsBuilder().addValue("x", "24")
+ .addValues([y: "25", z: "26"])
+ .addValues([x: "overwrite"])
+ .buildExtensions()
+ then:
+ extensions == [x: "overwrite", y: "25", z: "26"]
+
+ when:
+ extensions = newExtensionsBuilder().addValue("x", "24")
+ .addValues([y: "25", z: "26"])
+ .addValues([x: "overwrite"])
+ .addValues([x: "overwrite2"])
+ .addValues([x: "overwrite2"])
+ .addValue("x", "overwrite3")
+ .addValue("z", "overwriteZ")
+ .addValues([a: "1"])
+ .buildExtensions()
+ then:
+ extensions == [x: "overwrite3", y: "25", z: "overwriteZ", a: "1"]
+ }
+
+ def "can handle no changes"() {
+ when:
+ def extensions = newExtensionsBuilder()
+ .buildExtensions()
+ then:
+ extensions == [:]
+ }
+
+ def "can handle one changes"() {
+ when:
+ def extensions = newExtensionsBuilder()
+ .addValues([x: "24", y: "25"])
+ .buildExtensions()
+ then:
+ extensions == [x: "24", y: "25"]
+ }
+
+ def "can set extensions into an ER"() {
+
+
+ when:
+ def er = ExecutionResult.newExecutionResult().data(["x": "data"]).build()
+ def newER = newExtensionsBuilder().addValue("x", "24")
+ .addValues([y: "25", z: "26"])
+ .addValues([x: "overwrite"])
+ .setExtensions(er)
+ then:
+ newER.data == ["x": "data"]
+ newER.extensions == [x: "overwrite", y: "25", z: "26"]
+
+ when:
+ er = ExecutionResult.newExecutionResult().data(["x": "data"]).extensions([a: "1"]).build()
+ newER = newExtensionsBuilder().addValue("x", "24")
+ .addValues([y: "25", z: "26"])
+ .addValues([x: "overwrite"])
+ .setExtensions(er)
+ then:
+ newER.data == ["x": "data"]
+ newER.extensions == [x: "overwrite", y: "25", z: "26"] // it overwrites - its a set!
+
+ }
+
+ def "can use a custom merger"() {
+ ExtensionsMerger merger = new ExtensionsMerger() {
+ @Override
+ @NotNull
+ Map merge(@NotNull Map leftMap, @NotNull Map rightMap) {
+ return rightMap
+ }
+ }
+ when:
+ def extensions = newExtensionsBuilder(merger)
+ .addValue("x", "24")
+ .addValues([y: "25", z: "26"])
+ .addValues([x: "overwrite"])
+ .addValues([the: "end"]).buildExtensions()
+ then:
+ extensions == [the: "end"]
+ }
+
+ DataFetcher extensionDF = new DataFetcher() {
+ @Override
+ Object get(DataFetchingEnvironment env) throws Exception {
+ ExtensionsBuilder extensionsBuilder = env.getGraphQlContext().get(ExtensionsBuilder.class)
+ def fieldMap = [:]
+ fieldMap.put(env.getFieldDefinition().name, GraphQLTypeUtil.simplePrint(env.getFieldDefinition().type))
+ extensionsBuilder.addValues([common: fieldMap])
+ extensionsBuilder.addValues(fieldMap)
+ return "ignored"
+ }
+ }
+
+
+ def "integration test that shows it working when they put in a builder"() {
+ def sdl = """
+ type Query {
+ name : String!
+ street : String
+ id : ID!
+ }
+ """
+
+ def extensionsBuilder = newExtensionsBuilder()
+ extensionsBuilder.addValue("added","explicitly")
+
+ def ei = ExecutionInput.newExecutionInput("query q { name street id }")
+ .graphQLContext({ ctx ->
+ ctx.put(ExtensionsBuilder.class, extensionsBuilder) })
+ .build()
+
+
+ def graphQL = TestUtil.graphQL(sdl, newRuntimeWiring()
+ .type(newTypeWiring("Query").dataFetchers([
+ name : extensionDF,
+ street: extensionDF,
+ id : extensionDF,
+ ])))
+ .build()
+
+ when:
+ def er = graphQL.execute(ei)
+ then:
+ er.errors.isEmpty()
+ er.extensions == [
+ "added": "explicitly",
+ common: [
+ name : "String!",
+ street: "String",
+ id : "ID!",
+ ],
+ // we break them out so we have common and not common entries
+ name : "String!",
+ street: "String",
+ id : "ID!",
+ ]
+ }
+
+ def "integration test that shows it working when they DONT put in a builder"() {
+ def sdl = """
+ type Query {
+ name : String!
+ street : String
+ id : ID!
+ }
+ """
+
+ def ei = ExecutionInput.newExecutionInput("query q { name street id }")
+ .build()
+
+
+ def graphQL = TestUtil.graphQL(sdl, newRuntimeWiring()
+ .type(newTypeWiring("Query").dataFetchers([
+ name : extensionDF,
+ street: extensionDF,
+ id : extensionDF,
+ ])))
+ .build()
+
+ when:
+ def er = graphQL.execute(ei)
+ then:
+ er.errors.isEmpty()
+ er.extensions == [
+ common: [
+ name : "String!",
+ street: "String",
+ id : "ID!",
+ ],
+ // we break them out so we have common and not common entries
+ name : "String!",
+ street: "String",
+ id : "ID!",
+ ]
+ }
+
+ def "integration test showing it leaves extensions null if they are empty"() {
+ def sdl = """
+ type Query {
+ name : String!
+ street : String
+ id : ID!
+ }
+ """
+
+ def ei = ExecutionInput.newExecutionInput("query q { name street id }")
+ .root(["name" : "Brad", "id" :1234])
+ .build()
+
+
+ def graphQL = TestUtil.graphQL(sdl, newRuntimeWiring().build()).build()
+
+ when:
+ def er = graphQL.execute(ei)
+ then:
+ er.errors.isEmpty()
+ er.extensions == null
+ }
+}
diff --git a/src/test/groovy/graphql/parser/ParserOptionsTest.groovy b/src/test/groovy/graphql/parser/ParserOptionsTest.groovy
index 5867b181fc..422a0898f7 100644
--- a/src/test/groovy/graphql/parser/ParserOptionsTest.groovy
+++ b/src/test/groovy/graphql/parser/ParserOptionsTest.groovy
@@ -26,22 +26,25 @@ class ParserOptionsTest extends Specification {
defaultOptions.isCaptureSourceLocation()
defaultOptions.isCaptureLineComments()
!defaultOptions.isCaptureIgnoredChars()
+ defaultOptions.isReaderTrackData()
defaultOperationOptions.getMaxTokens() == 15_000
defaultOperationOptions.getMaxWhitespaceTokens() == 200_000
defaultOperationOptions.isCaptureSourceLocation()
!defaultOperationOptions.isCaptureLineComments()
!defaultOperationOptions.isCaptureIgnoredChars()
+ defaultOptions.isReaderTrackData()
defaultSdlOptions.getMaxTokens() == Integer.MAX_VALUE
defaultSdlOptions.getMaxWhitespaceTokens() == Integer.MAX_VALUE
defaultSdlOptions.isCaptureSourceLocation()
defaultSdlOptions.isCaptureLineComments()
!defaultSdlOptions.isCaptureIgnoredChars()
+ defaultOptions.isReaderTrackData()
}
def "can set in new option JVM wide"() {
- def newDefaultOptions = defaultOptions.transform({ it.captureIgnoredChars(true) })
+ def newDefaultOptions = defaultOptions.transform({ it.captureIgnoredChars(true).readerTrackData(false) })
def newDefaultOperationOptions = defaultOperationOptions.transform(
{ it.captureIgnoredChars(true).captureLineComments(true).maxWhitespaceTokens(300_000) })
def newDefaultSDlOptions = defaultSdlOptions.transform(
@@ -63,17 +66,20 @@ class ParserOptionsTest extends Specification {
currentDefaultOptions.isCaptureSourceLocation()
currentDefaultOptions.isCaptureLineComments()
currentDefaultOptions.isCaptureIgnoredChars()
+ !currentDefaultOptions.isReaderTrackData()
currentDefaultOperationOptions.getMaxTokens() == 15_000
currentDefaultOperationOptions.getMaxWhitespaceTokens() == 300_000
currentDefaultOperationOptions.isCaptureSourceLocation()
currentDefaultOperationOptions.isCaptureLineComments()
currentDefaultOperationOptions.isCaptureIgnoredChars()
+ currentDefaultOperationOptions.isReaderTrackData()
currentDefaultSdlOptions.getMaxTokens() == Integer.MAX_VALUE
currentDefaultSdlOptions.getMaxWhitespaceTokens() == 300_000
currentDefaultSdlOptions.isCaptureSourceLocation()
currentDefaultSdlOptions.isCaptureLineComments()
currentDefaultSdlOptions.isCaptureIgnoredChars()
+ currentDefaultSdlOptions.isReaderTrackData()
}
}
diff --git a/src/test/groovy/graphql/parser/ParserStressTest.groovy b/src/test/groovy/graphql/parser/ParserStressTest.groovy
new file mode 100644
index 0000000000..68a3991e49
--- /dev/null
+++ b/src/test/groovy/graphql/parser/ParserStressTest.groovy
@@ -0,0 +1,185 @@
+package graphql.parser
+
+import graphql.ExecutionInput
+import graphql.TestUtil
+import graphql.parser.exceptions.ParseCancelledException
+import graphql.parser.exceptions.ParseCancelledTooDeepException
+import spock.lang.Specification
+
+import static graphql.parser.ParserEnvironment.newParserEnvironment
+
+/**
+ * Tests related to how the Parser can be stress tested
+ */
+class ParserStressTest extends Specification {
+ static defaultOptions = ParserOptions.getDefaultParserOptions()
+ static defaultOperationOptions = ParserOptions.getDefaultOperationParserOptions()
+ static defaultSdlOptions = ParserOptions.getDefaultSdlParserOptions()
+
+ void setup() {
+ ParserOptions.setDefaultParserOptions(defaultOptions)
+ ParserOptions.setDefaultOperationParserOptions(defaultOperationOptions)
+ ParserOptions.setDefaultSdlParserOptions(defaultSdlOptions)
+ }
+
+ void cleanup() {
+ ParserOptions.setDefaultParserOptions(defaultOptions)
+ ParserOptions.setDefaultOperationParserOptions(defaultOperationOptions)
+ ParserOptions.setDefaultSdlParserOptions(defaultSdlOptions)
+ }
+
+
+ def "a billion laughs attack will be prevented by default"() {
+ def lol = "@lol" * 10000 // two tokens = 20000+ tokens
+ def text = "query { f $lol }"
+ when:
+ Parser.parse(text)
+
+ then:
+ def e = thrown(ParseCancelledException)
+ e.getMessage().contains("parsing has been cancelled")
+
+ when: "integration test to prove it cancels by default"
+
+ def sdl = """type Query { f : ID} """
+ def graphQL = TestUtil.graphQL(sdl).build()
+ def er = graphQL.execute(text)
+ then:
+ er.errors.size() == 1
+ er.errors[0].message.contains("parsing has been cancelled")
+ }
+
+ def "a large whitespace laughs attack will be prevented by default"() {
+ def spaces = " " * 300_000
+ def text = "query { f $spaces }"
+ when:
+ Parser.parse(text)
+
+ then:
+ def e = thrown(ParseCancelledException)
+ e.getMessage().contains("parsing has been cancelled")
+
+ when: "integration test to prove it cancels by default"
+
+ def sdl = """type Query { f : ID} """
+ def graphQL = TestUtil.graphQL(sdl).build()
+ def er = graphQL.execute(text)
+ then:
+ er.errors.size() == 1
+ er.errors[0].message.contains("parsing has been cancelled")
+ }
+
+ def "they can shoot themselves if they want to with large documents"() {
+ def lol = "@lol" * 10000 // two tokens = 20000+ tokens
+ def text = "query { f $lol }"
+
+ def options = ParserOptions.newParserOptions().maxTokens(30000).build()
+ def parserEnvironment = newParserEnvironment().document(text).parserOptions(options).build()
+
+ when:
+ def doc = new Parser().parseDocument(parserEnvironment)
+
+ then:
+ doc != null
+ }
+
+ def "they can shoot themselves if they want to with large documents with lots of whitespace"() {
+ def spaces = " " * 300_000
+ def text = "query { f $spaces }"
+
+ def options = ParserOptions.newParserOptions().maxWhitespaceTokens(Integer.MAX_VALUE).build()
+ def parserEnvironment = newParserEnvironment().document(text).parserOptions(options).build()
+ when:
+ def doc = new Parser().parseDocument(parserEnvironment)
+
+ then:
+ doc != null
+ }
+
+ def "they can set their own listener into action"() {
+ def queryText = "query { f(arg : 1) }"
+
+ def count = 0
+ def tokens = []
+ ParsingListener listener = { count++; tokens.add(it.getText()) }
+ def parserOptions = ParserOptions.newParserOptions().parsingListener(listener).build()
+ def parserEnvironment = newParserEnvironment().document(queryText).parserOptions(parserOptions).build()
+
+ when:
+ def doc = new Parser().parseDocument(parserEnvironment)
+
+ then:
+ doc != null
+ count == 9
+ tokens == ["query", "{", "f", "(", "arg", ":", "1", ")", "}"]
+
+ when: "integration test to prove it be supplied via EI"
+
+ def sdl = """type Query { f(arg : Int) : ID} """
+ def graphQL = TestUtil.graphQL(sdl).build()
+
+
+ def context = [:]
+ context.put(ParserOptions.class, parserOptions)
+ def executionInput = ExecutionInput.newExecutionInput()
+ .query(queryText)
+ .graphQLContext(context).build()
+
+ count = 0
+ tokens = []
+ def er = graphQL.execute(executionInput)
+ then:
+ er.errors.size() == 0
+ count == 9
+ tokens == ["query", "{", "f", "(", "arg", ":", "1", ")", "}"]
+
+ }
+
+ def "deep query stack overflows are prevented by limiting the depth of rules"() {
+ String text = mkDeepQuery(10000)
+
+ when:
+ def parserEnvironment = newParserEnvironment().document(text).parserOptions(defaultOperationOptions).build()
+ Parser.parse(parserEnvironment)
+
+ then:
+ thrown(ParseCancelledTooDeepException)
+ }
+
+ def "wide queries are prevented by max token counts"() {
+ String text = mkWideQuery(10000)
+
+ when:
+
+ def parserEnvironment = newParserEnvironment().document(text).parserOptions(defaultOperationOptions).build()
+ Parser.parse(parserEnvironment)
+
+ then:
+ thrown(ParseCancelledException) // too many tokens will catch this wide queries
+ }
+
+ String mkDeepQuery(int howMany) {
+ def field = 'f(a:"")'
+ StringBuilder sb = new StringBuilder("query q{")
+ for (int i = 0; i < howMany; i++) {
+ sb.append(field)
+ if (i < howMany - 1) {
+ sb.append("{")
+ }
+ }
+ for (int i = 0; i < howMany - 1; i++) {
+ sb.append("}")
+ }
+ sb.append("}")
+ return sb.toString()
+ }
+
+ String mkWideQuery(int howMany) {
+ StringBuilder sb = new StringBuilder("query q{f(")
+ for (int i = 0; i < howMany; i++) {
+ sb.append('a:1,')
+ }
+ sb.append(")}")
+ return sb.toString()
+ }
+}
diff --git a/src/test/groovy/graphql/parser/ParserTest.groovy b/src/test/groovy/graphql/parser/ParserTest.groovy
index 6dcf336742..4484eb9e58 100644
--- a/src/test/groovy/graphql/parser/ParserTest.groovy
+++ b/src/test/groovy/graphql/parser/ParserTest.groovy
@@ -1,7 +1,6 @@
package graphql.parser
-import graphql.ExecutionInput
-import graphql.TestUtil
+
import graphql.language.Argument
import graphql.language.ArrayValue
import graphql.language.AstComparator
@@ -40,31 +39,15 @@ import graphql.language.TypeName
import graphql.language.UnionTypeDefinition
import graphql.language.VariableDefinition
import graphql.language.VariableReference
-import graphql.parser.exceptions.ParseCancelledException
import org.antlr.v4.runtime.CommonTokenStream
import org.antlr.v4.runtime.ParserRuleContext
import spock.lang.Issue
import spock.lang.Specification
import spock.lang.Unroll
-class ParserTest extends Specification {
-
- static defaultOptions = ParserOptions.getDefaultParserOptions()
- static defaultOperationOptions = ParserOptions.getDefaultOperationParserOptions()
- static defaultSdlOptions = ParserOptions.getDefaultSdlParserOptions()
-
- void setup() {
- ParserOptions.setDefaultParserOptions(defaultOptions)
- ParserOptions.setDefaultOperationParserOptions(defaultOperationOptions)
- ParserOptions.setDefaultSdlParserOptions(defaultSdlOptions)
- }
-
- void cleanup() {
- ParserOptions.setDefaultParserOptions(defaultOptions)
- ParserOptions.setDefaultOperationParserOptions(defaultOperationOptions)
- ParserOptions.setDefaultSdlParserOptions(defaultSdlOptions)
- }
+import static graphql.parser.ParserEnvironment.*
+class ParserTest extends Specification {
def "parse anonymous simple query"() {
given:
@@ -93,10 +76,6 @@ class ParserTest extends Specification {
return AstComparator.isEqual(node1, node2)
}
- boolean isEqual(List node1, List node2) {
- return AstComparator.isEqual(node1, node2)
- }
-
def "parse selectionSet for field"() {
given:
def input = "{ me { name } }"
@@ -404,7 +383,8 @@ class ParserTest extends Specification {
.build()
when:
- def document = new Parser().parseDocument(input, parserOptionsWithoutCaptureLineComments)
+ def parserEnvironment = newParserEnvironment().document(input).parserOptions(parserOptionsWithoutCaptureLineComments).build()
+ def document = new Parser().parseDocument(parserEnvironment)
Field helloField = (document.definitions[0] as OperationDefinition).selectionSet.selections[0] as Field
then:
@@ -772,7 +752,9 @@ triple3 : """edge cases \\""" "" " \\"" \\" edge cases"""
when:
def captureIgnoredCharsTRUE = ParserOptions.newParserOptions().captureIgnoredChars(true).build()
- Document document = new Parser().parseDocument(input, captureIgnoredCharsTRUE)
+ def parserEnvironment = newParserEnvironment().document(input).parserOptions(captureIgnoredCharsTRUE).build()
+
+ Document document = new Parser().parseDocument(parserEnvironment)
def field = (document.definitions[0] as OperationDefinition).selectionSet.selections[0]
then:
field.getIgnoredChars().getLeft().size() == 3
@@ -868,7 +850,7 @@ triple3 : """edge cases \\""" "" " \\"" \\" edge cases"""
}
'''
when:
- def parserEnvironment = ParserEnvironment.newParserEnvironment().document(input).build()
+ def parserEnvironment = newParserEnvironment().document(input).build()
Document document = Parser.parse(parserEnvironment)
OperationDefinition operationDefinition = (document.definitions[0] as OperationDefinition)
@@ -1043,7 +1025,8 @@ triple3 : """edge cases \\""" "" " \\"" \\" edge cases"""
def captureIgnoredCharsTRUE = ParserOptions.newParserOptions().captureIgnoredChars(true).build()
when: "explicitly off"
- def doc = new Parser().parseDocument(s, captureIgnoredCharsFALSE)
+ def parserEnvironment = newParserEnvironment().document(s).parserOptions(captureIgnoredCharsFALSE).build()
+ def doc = new Parser().parseDocument(parserEnvironment)
def type = doc.getDefinitionsOfType(ObjectTypeDefinition)[0]
then:
type.getIgnoredChars() == IgnoredChars.EMPTY
@@ -1058,7 +1041,8 @@ triple3 : """edge cases \\""" "" " \\"" \\" edge cases"""
when: "explicitly on"
- doc = new Parser().parseDocument(s, captureIgnoredCharsTRUE)
+ parserEnvironment = newParserEnvironment().document(s).parserOptions(captureIgnoredCharsTRUE).build()
+ doc = new Parser().parseDocument(parserEnvironment)
type = doc.getDefinitionsOfType(ObjectTypeDefinition)[0]
then:
@@ -1158,7 +1142,8 @@ triple3 : """edge cases \\""" "" " \\"" \\" edge cases"""
when:
options = ParserOptions.newParserOptions().captureSourceLocation(false).build()
- document = new Parser().parseDocument("{ f }", options)
+ def parserEnvironment = newParserEnvironment().document("{ f }").parserOptions(options).build()
+ document = new Parser().parseDocument(parserEnvironment)
then:
!options.isCaptureSourceLocation()
@@ -1166,104 +1151,5 @@ triple3 : """edge cases \\""" "" " \\"" \\" edge cases"""
document.getDefinitions()[0].getSourceLocation() == SourceLocation.EMPTY
}
- def "a billion laughs attack will be prevented by default"() {
- def lol = "@lol" * 10000 // two tokens = 20000+ tokens
- def text = "query { f $lol }"
- when:
- Parser.parse(text)
-
- then:
- def e = thrown(ParseCancelledException)
- e.getMessage().contains("parsing has been cancelled")
-
- when: "integration test to prove it cancels by default"
-
- def sdl = """type Query { f : ID} """
- def graphQL = TestUtil.graphQL(sdl).build()
- def er = graphQL.execute(text)
- then:
- er.errors.size() == 1
- er.errors[0].message.contains("parsing has been cancelled")
- }
-
- def "a large whitespace laughs attack will be prevented by default"() {
- def spaces = " " * 300_000
- def text = "query { f $spaces }"
- when:
- Parser.parse(text)
-
- then:
- def e = thrown(ParseCancelledException)
- e.getMessage().contains("parsing has been cancelled")
- when: "integration test to prove it cancels by default"
-
- def sdl = """type Query { f : ID} """
- def graphQL = TestUtil.graphQL(sdl).build()
- def er = graphQL.execute(text)
- then:
- er.errors.size() == 1
- er.errors[0].message.contains("parsing has been cancelled")
- }
-
- def "they can shoot themselves if they want to with large documents"() {
- def lol = "@lol" * 10000 // two tokens = 20000+ tokens
- def text = "query { f $lol }"
-
- def options = ParserOptions.newParserOptions().maxTokens(30000).build()
- when:
- def doc = new Parser().parseDocument(text, options)
-
- then:
- doc != null
- }
-
- def "they can shoot themselves if they want to with large documents with lots of whitespace"() {
- def spaces = " " * 300_000
- def text = "query { f $spaces }"
-
- def options = ParserOptions.newParserOptions().maxWhitespaceTokens(Integer.MAX_VALUE).build()
- when:
- def doc = new Parser().parseDocument(text, options)
-
- then:
- doc != null
- }
-
- def "they can set their own listener into action"() {
- def queryText = "query { f(arg : 1) }"
-
- def count = 0
- def tokens = []
- ParsingListener listener = { count++; tokens.add(it.getText()) }
- def parserOptions = ParserOptions.newParserOptions().parsingListener(listener).build()
- when:
- def doc = new Parser().parseDocument(queryText, parserOptions)
-
- then:
- doc != null
- count == 9
- tokens == ["query", "{", "f", "(", "arg", ":", "1", ")", "}"]
-
- when: "integration test to prove it be supplied via EI"
-
- def sdl = """type Query { f(arg : Int) : ID} """
- def graphQL = TestUtil.graphQL(sdl).build()
-
-
- def context = [:]
- context.put(ParserOptions.class, parserOptions)
- def executionInput = ExecutionInput.newExecutionInput()
- .query(queryText)
- .graphQLContext(context).build()
-
- count = 0
- tokens = []
- def er = graphQL.execute(executionInput)
- then:
- er.errors.size() == 0
- count == 9
- tokens == ["query", "{", "f", "(", "arg", ":", "1", ")", "}"]
-
- }
}
diff --git a/src/test/groovy/graphql/schema/PropertyDataFetcherClassLoadingTest.groovy b/src/test/groovy/graphql/schema/PropertyDataFetcherClassLoadingTest.groovy
new file mode 100644
index 0000000000..6ee7623dd3
--- /dev/null
+++ b/src/test/groovy/graphql/schema/PropertyDataFetcherClassLoadingTest.groovy
@@ -0,0 +1,62 @@
+package graphql.schema
+
+import graphql.GraphQLException
+import graphql.Scalars
+import spock.lang.Specification
+
+class PropertyDataFetcherClassLoadingTest extends Specification {
+
+ GraphQLFieldDefinition fld(String fldName) {
+ return GraphQLFieldDefinition.newFieldDefinition().name(fldName).type(Scalars.GraphQLString).build()
+ }
+
+ static class BrokenClass {
+ static {
+ // this should prevent it from existing
+ throw new RuntimeException("No soup for you!")
+ }
+ }
+
+
+ static class TargetClass {
+
+ String getOkThings() {
+ return "ok"
+ }
+
+ BrokenClass getBrokenThings() {
+ return BrokenClass.cast(null)
+ }
+ }
+
+ def "can survive linkage errors during access to broken classes in Lambda support"() {
+ def okDF = PropertyDataFetcher.fetching("okThings")
+ def brokenDF = PropertyDataFetcher.fetching("brokenThings")
+
+ def target = new TargetClass()
+
+ when:
+ def value = okDF.get(fld("okThings"), target, { -> null })
+ then:
+ value == "ok"
+
+ when:
+ brokenDF.get(fld("brokenThings"), target, { -> null })
+ then:
+ // This is because the reflection method finder cant get to it
+ // but it has made it past the Meta Lambda support
+ thrown(GraphQLException)
+
+ // multiple times - same result
+ when:
+ value = okDF.get(fld("okThings"), target, { -> null })
+ then:
+ value == "ok"
+
+ when:
+ brokenDF.get(fld("brokenThings"), target, { -> null })
+ then:
+ thrown(GraphQLException)
+
+ }
+}
diff --git a/src/test/groovy/graphql/schema/diffing/SchemaDiffingTest.groovy b/src/test/groovy/graphql/schema/diffing/SchemaDiffingTest.groovy
index 8f55d7bc12..e9d5bf5202 100644
--- a/src/test/groovy/graphql/schema/diffing/SchemaDiffingTest.groovy
+++ b/src/test/groovy/graphql/schema/diffing/SchemaDiffingTest.groovy
@@ -1083,6 +1083,34 @@ class SchemaDiffingTest extends Specification {
operations.size() == 2
}
+ def "rename enum value"() {
+ given:
+ def schema1 = schema("""
+ type Query {
+ foo: Foo
+ }
+ enum Foo {
+ V1
+ V2
+ }
+ """)
+ def schema2 = schema("""
+ type Query {
+ foo: Foo
+ }
+ enum Foo {
+ V1
+ V3
+ }
+ """)
+
+ when:
+ def operations = new SchemaDiffing().diffGraphQLSchema(schema1, schema2)
+
+ then:
+ operations.size() == 1
+ }
+
def "arguments in directives changed"() {
given:
def schema1 = schema('''
diff --git a/src/test/groovy/graphql/schema/diffing/ana/EditOperationAnalyzerAppliedDirectivesTest.groovy b/src/test/groovy/graphql/schema/diffing/ana/EditOperationAnalyzerAppliedDirectivesTest.groovy
index 4e1c30a21d..623c2dfc04 100644
--- a/src/test/groovy/graphql/schema/diffing/ana/EditOperationAnalyzerAppliedDirectivesTest.groovy
+++ b/src/test/groovy/graphql/schema/diffing/ana/EditOperationAnalyzerAppliedDirectivesTest.groovy
@@ -547,7 +547,11 @@ class EditOperationAnalyzerAppliedDirectivesTest extends Specification {
def changes = calcDiff(oldSdl, newSdl)
then:
changes.enumDifferences["E"] instanceof EnumModification
- def appliedDirective = (changes.enumDifferences["E"] as EnumModification).getDetails(AppliedDirectiveDeletion)
+ def diff = changes.enumDifferences["E"] as EnumModification
+
+ diff.getDetails().size() == 1
+
+ def appliedDirective = diff.getDetails(AppliedDirectiveDeletion)
(appliedDirective[0].locationDetail as AppliedDirectiveEnumLocation).name == "E"
appliedDirective[0].name == "d"
}
@@ -778,7 +782,6 @@ class EditOperationAnalyzerAppliedDirectivesTest extends Specification {
appliedDirective[0].name == "d"
}
-
def "applied directive deleted union"() {
given:
def oldSdl = '''
@@ -802,8 +805,12 @@ class EditOperationAnalyzerAppliedDirectivesTest extends Specification {
when:
def changes = calcDiff(oldSdl, newSdl)
then:
+ changes.unionDifferences.keySet() == ["U"] as Set
changes.unionDifferences["U"] instanceof UnionModification
- def appliedDirective = (changes.unionDifferences["U"] as UnionModification).getDetails(AppliedDirectiveDeletion)
+ def diff = changes.unionDifferences["U"] as UnionModification
+ diff.details.size() == 1
+
+ def appliedDirective = diff.getDetails(AppliedDirectiveDeletion)
(appliedDirective[0].locationDetail as AppliedDirectiveUnionLocation).name == "U"
appliedDirective[0].name == "d"
}
diff --git a/src/test/groovy/graphql/schema/diffing/ana/EditOperationAnalyzerTest.groovy b/src/test/groovy/graphql/schema/diffing/ana/EditOperationAnalyzerTest.groovy
index a60a747a3d..99c7071f1c 100644
--- a/src/test/groovy/graphql/schema/diffing/ana/EditOperationAnalyzerTest.groovy
+++ b/src/test/groovy/graphql/schema/diffing/ana/EditOperationAnalyzerTest.groovy
@@ -1,14 +1,14 @@
package graphql.schema.diffing.ana
-import com.sun.xml.internal.bind.v2.schemagen.xmlschema.Union
import graphql.TestUtil
import graphql.schema.diffing.SchemaDiffing
import spock.lang.Specification
-import static graphql.schema.diffing.ana.SchemaDifference.*
-import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveAddition
-import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveArgumentValueModification
+import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveDeletion
+import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveObjectFieldArgumentLocation
+import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveDirectiveArgumentLocation
import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveObjectFieldLocation
+import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveInterfaceFieldArgumentLocation
import static graphql.schema.diffing.ana.SchemaDifference.DirectiveAddition
import static graphql.schema.diffing.ana.SchemaDifference.DirectiveArgumentAddition
import static graphql.schema.diffing.ana.SchemaDifference.DirectiveArgumentDefaultValueModification
@@ -22,8 +22,14 @@ import static graphql.schema.diffing.ana.SchemaDifference.EnumDeletion
import static graphql.schema.diffing.ana.SchemaDifference.EnumModification
import static graphql.schema.diffing.ana.SchemaDifference.EnumValueAddition
import static graphql.schema.diffing.ana.SchemaDifference.EnumValueDeletion
+import static graphql.schema.diffing.ana.SchemaDifference.EnumValueRenamed
import static graphql.schema.diffing.ana.SchemaDifference.InputObjectAddition
import static graphql.schema.diffing.ana.SchemaDifference.InputObjectDeletion
+import static graphql.schema.diffing.ana.SchemaDifference.InputObjectFieldAddition
+import static graphql.schema.diffing.ana.SchemaDifference.InputObjectFieldDefaultValueModification
+import static graphql.schema.diffing.ana.SchemaDifference.InputObjectFieldDeletion
+import static graphql.schema.diffing.ana.SchemaDifference.InputObjectFieldRename
+import static graphql.schema.diffing.ana.SchemaDifference.InputObjectFieldTypeModification
import static graphql.schema.diffing.ana.SchemaDifference.InputObjectModification
import static graphql.schema.diffing.ana.SchemaDifference.InterfaceAddition
import static graphql.schema.diffing.ana.SchemaDifference.InterfaceFieldAddition
@@ -85,6 +91,7 @@ class EditOperationAnalyzerTest extends Specification {
changes.objectDifferences["Query"] instanceof ObjectModification
(changes.objectDifferences["Query"] as ObjectModification).oldName == "Query"
(changes.objectDifferences["Query"] as ObjectModification).newName == "MyQuery"
+ (changes.objectDifferences["Query"] as ObjectModification).isNameChanged()
}
def "interface renamed"() {
@@ -113,6 +120,7 @@ class EditOperationAnalyzerTest extends Specification {
changes.interfaceDifferences["I"] instanceof InterfaceModification
(changes.interfaceDifferences["I"] as InterfaceModification).oldName == "I"
(changes.interfaceDifferences["I"] as InterfaceModification).newName == "IRenamed"
+ (changes.interfaceDifferences["I"] as InterfaceModification).isNameChanged()
}
def "interface removed from object"() {
@@ -205,7 +213,6 @@ class EditOperationAnalyzerTest extends Specification {
def iFieldRenames = interfaceModification.getDetails(InterfaceFieldRename.class)
iFieldRenames[0].oldName == "hello"
iFieldRenames[0].newName == "hello2"
-
}
def "object and interface field deleted"() {
@@ -331,6 +338,7 @@ class EditOperationAnalyzerTest extends Specification {
changes.unionDifferences["U"] instanceof UnionModification
(changes.unionDifferences["U"] as UnionModification).oldName == "U"
(changes.unionDifferences["U"] as UnionModification).newName == "X"
+ (changes.unionDifferences["U"] as UnionModification).isNameChanged()
}
def "union renamed and member removed"() {
@@ -364,6 +372,7 @@ class EditOperationAnalyzerTest extends Specification {
unionDiff.oldName == "U"
unionDiff.newName == "X"
unionDiff.getDetails(UnionMemberDeletion)[0].name == "B"
+ unionDiff.isNameChanged()
}
def "union renamed and member added"() {
@@ -397,6 +406,7 @@ class EditOperationAnalyzerTest extends Specification {
def unionDiff = changes.unionDifferences["U"] as UnionModification
unionDiff.oldName == "U"
unionDiff.newName == "X"
+ unionDiff.isNameChanged()
unionDiff.getDetails(UnionMemberAddition)[0].name == "B"
}
@@ -552,15 +562,16 @@ class EditOperationAnalyzerTest extends Specification {
changes.objectDifferences["Query"] instanceof ObjectModification
def objectModification = changes.objectDifferences["Query"] as ObjectModification
def objectArgumentRenamed = objectModification.getDetails(ObjectFieldArgumentRename.class);
+ objectArgumentRenamed[0].fieldName == "hello"
objectArgumentRenamed[0].oldName == "arg"
objectArgumentRenamed[0].newName == "argRename"
and:
changes.interfaceDifferences["I"] instanceof InterfaceModification
def interfaceModification = changes.interfaceDifferences["I"] as InterfaceModification
def interfaceArgumentRenamed = interfaceModification.getDetails(InterfaceFieldArgumentRename.class);
+ interfaceArgumentRenamed[0].fieldName == "hello"
interfaceArgumentRenamed[0].oldName == "arg"
interfaceArgumentRenamed[0].newName == "argRename"
-
}
@@ -972,6 +983,7 @@ class EditOperationAnalyzerTest extends Specification {
changes.interfaceDifferences.size() == 2
changes.interfaceDifferences["Node"] === changes.interfaceDifferences["Node2"]
changes.interfaceDifferences["Node2"] instanceof InterfaceModification
+ (changes.interfaceDifferences["Node2"] as InterfaceModification).isNameChanged()
}
def "interfaced renamed and another interface added to it"() {
@@ -1008,6 +1020,7 @@ class EditOperationAnalyzerTest extends Specification {
changes.interfaceDifferences.size() == 3
changes.interfaceDifferences["Node"] == changes.interfaceDifferences["Node2"]
changes.interfaceDifferences["Node2"] instanceof InterfaceModification
+ (changes.interfaceDifferences["Node2"] as InterfaceModification).isNameChanged()
changes.interfaceDifferences["NewI"] instanceof InterfaceAddition
changes.objectDifferences.size() == 1
changes.objectDifferences["Foo"] instanceof ObjectModification
@@ -1015,7 +1028,6 @@ class EditOperationAnalyzerTest extends Specification {
def addedInterfaceDetails = objectModification.getDetails(ObjectInterfaceImplementationAddition)
addedInterfaceDetails.size() == 1
addedInterfaceDetails[0].name == "NewI"
-
}
def "enum renamed"() {
@@ -1043,7 +1055,7 @@ class EditOperationAnalyzerTest extends Specification {
def modification = changes.enumDifferences["E"] as EnumModification
modification.oldName == "E"
modification.newName == "ERenamed"
-
+ modification.isNameChanged()
}
def "enum added"() {
@@ -1143,6 +1155,39 @@ class EditOperationAnalyzerTest extends Specification {
enumModification.getDetails(EnumValueDeletion)[0].name == "B"
}
+ def "enum value added and removed"() {
+ given:
+ def oldSdl = '''
+ type Query {
+ e: MyEnum
+ }
+ enum MyEnum {
+ A
+ B
+ }
+ '''
+ def newSdl = '''
+ type Query {
+ e: MyEnum
+ }
+ enum MyEnum {
+ A
+ C
+ }
+ '''
+ when:
+ def changes = calcDiff(oldSdl, newSdl)
+ then:
+ changes.enumDifferences["MyEnum"] instanceof EnumModification
+
+ def enumModification = changes.enumDifferences["MyEnum"] as EnumModification
+ enumModification.getDetails().size() == 1
+
+ def rename = enumModification.getDetails(EnumValueRenamed)[0]
+ rename.oldName == "B"
+ rename.newName == "C"
+ }
+
def "scalar added"() {
given:
def oldSdl = '''
@@ -1204,6 +1249,7 @@ class EditOperationAnalyzerTest extends Specification {
def modification = changes.scalarDifferences["Foo"] as ScalarModification
modification.oldName == "Foo"
modification.newName == "Bar"
+ modification.isNameChanged()
}
def "input object added"() {
@@ -1443,6 +1489,7 @@ class EditOperationAnalyzerTest extends Specification {
def modification = changes.inputObjectDifferences["I"] as InputObjectModification
modification.oldName == "I"
modification.newName == "IRenamed"
+ modification.isNameChanged()
}
@@ -1507,6 +1554,7 @@ class EditOperationAnalyzerTest extends Specification {
def modification = changes.directiveDifferences["d"] as DirectiveModification
modification.oldName == "d"
modification.newName == "dRenamed"
+ modification.isNameChanged()
}
def "directive argument renamed"() {
@@ -1530,7 +1578,6 @@ class EditOperationAnalyzerTest extends Specification {
def renames = (changes.directiveDifferences["d"] as DirectiveModification).getDetails(DirectiveArgumentRename)
renames[0].oldName == "foo"
renames[0].newName == "bar"
-
}
def "directive argument added"() {
@@ -1655,9 +1702,240 @@ class EditOperationAnalyzerTest extends Specification {
argTypeModification[0].newType == '[String]!'
}
+ def "field renamed and output type changed and argument deleted"() {
+ given:
+ def oldSdl = '''
+ type Query {
+ ping(pong: String): ID
+ }
+ '''
+ def newSdl = '''
+ type Query {
+ echo: String
+ }
+ '''
+ when:
+ def changes = calcDiff(oldSdl, newSdl)
+ then:
+ true
+ changes.objectDifferences["Query"] instanceof ObjectModification
+ def objectDiff = changes.objectDifferences["Query"] as ObjectModification
+
+ def rename = objectDiff.getDetails(ObjectFieldRename)
+ rename.size() == 1
+ rename[0].oldName == "ping"
+ rename[0].newName == "echo"
+
+ def argumentDeletion = objectDiff.getDetails(ObjectFieldArgumentDeletion)
+ argumentDeletion.size() == 1
+ argumentDeletion[0].fieldName == "ping"
+ argumentDeletion[0].name == "pong"
+
+ def typeModification = objectDiff.getDetails(ObjectFieldTypeModification)
+ typeModification.size() == 1
+ typeModification[0].fieldName == "echo"
+ typeModification[0].oldType == "ID"
+ typeModification[0].newType == "String"
+ }
+ def "object field argument changed and applied directive deleted"() {
+ given:
+ def oldSdl = '''
+ type Query {
+ ping(pong: String @d): ID
+ }
+ directive @d on ARGUMENT_DEFINITION
+ '''
+ def newSdl = '''
+ type Query {
+ ping(pong: Int): ID
+ }
+ '''
+ when:
+ def changes = calcDiff(oldSdl, newSdl)
+ then:
+ true
+ changes.objectDifferences["Query"] instanceof ObjectModification
+ def objectDiff = changes.objectDifferences["Query"] as ObjectModification
+ def typeModification = objectDiff.getDetails(ObjectFieldArgumentTypeModification)
+ typeModification.size() == 1
+ typeModification[0].oldType == "String"
+ typeModification[0].newType == "Int"
+ typeModification[0].fieldName == "ping"
+ typeModification[0].argumentName == "pong"
+
+ def directiveDeletion = objectDiff.getDetails(AppliedDirectiveDeletion)
+ directiveDeletion.size() == 1
+ directiveDeletion[0].name == "d"
+ directiveDeletion[0].locationDetail instanceof AppliedDirectiveObjectFieldArgumentLocation
+
+ def location = directiveDeletion[0].locationDetail as AppliedDirectiveObjectFieldArgumentLocation
+ location.objectName == "Query"
+ location.fieldName == "ping"
+ location.argumentName == "pong"
+ }
+ def "interface field argument changed and applied directive deleted"() {
+ given:
+ def oldSdl = '''
+ type Query {
+ echo: String
+ }
+ interface TableTennis {
+ ping(pong: String @d): ID
+ }
+ directive @d on ARGUMENT_DEFINITION
+ '''
+ def newSdl = '''
+ type Query {
+ echo: String
+ }
+ interface TableTennis {
+ ping(pong: Int): ID
+ }
+ '''
+ when:
+ def changes = calcDiff(oldSdl, newSdl)
+ then:
+ true
+ changes.interfaceDifferences["TableTennis"] instanceof InterfaceModification
+ def diff = changes.interfaceDifferences["TableTennis"] as InterfaceModification
+
+ def typeModification = diff.getDetails(InterfaceFieldArgumentTypeModification)
+ typeModification.size() == 1
+ typeModification[0].oldType == "String"
+ typeModification[0].newType == "Int"
+ typeModification[0].fieldName == "ping"
+ typeModification[0].argumentName == "pong"
+
+ def directiveDeletion = diff.getDetails(AppliedDirectiveDeletion)
+ directiveDeletion.size() == 1
+ directiveDeletion[0].name == "d"
+ directiveDeletion[0].locationDetail instanceof AppliedDirectiveInterfaceFieldArgumentLocation
+
+ def location = directiveDeletion[0].locationDetail as AppliedDirectiveInterfaceFieldArgumentLocation
+ location.interfaceName == "TableTennis"
+ location.fieldName == "ping"
+ location.argumentName == "pong"
+ }
+
+ def "directive argument changed and applied directive deleted"() {
+ given:
+ def oldSdl = '''
+ type Query {
+ ping(pong: String): ID @d
+ }
+ directive @a on ARGUMENT_DEFINITION
+ directive @d(message: ID @a) on FIELD_DEFINITION
+ '''
+ def newSdl = '''
+ type Query {
+ ping(pong: String): ID @d
+ }
+ directive @a on ARGUMENT_DEFINITION
+ directive @d(message: String) on FIELD_DEFINITION
+ '''
+ when:
+ def changes = calcDiff(oldSdl, newSdl)
+ then:
+ true
+ changes.directiveDifferences["d"] instanceof DirectiveModification
+ def diff = changes.directiveDifferences["d"] as DirectiveModification
+
+ def typeModification = diff.getDetails(DirectiveArgumentTypeModification)
+ typeModification.size() == 1
+ typeModification[0].oldType == "ID"
+ typeModification[0].newType == "String"
+ typeModification[0].argumentName == "message"
+
+ def directiveDeletion = diff.getDetails(AppliedDirectiveDeletion)
+ directiveDeletion.size() == 1
+ directiveDeletion[0].name == "a"
+ directiveDeletion[0].locationDetail instanceof AppliedDirectiveDirectiveArgumentLocation
+
+ def location = directiveDeletion[0].locationDetail as AppliedDirectiveDirectiveArgumentLocation
+ location.directiveName == "d"
+ location.argumentName == "message"
+ }
+
+ def "field output type changed and applied directive removed"() {
+ given:
+ def oldSdl = '''
+ type Query {
+ echo: ID @d
+ }
+ directive @d on FIELD_DEFINITION
+ '''
+ def newSdl = '''
+ type Query {
+ echo: String
+ }
+ '''
+ when:
+ def changes = calcDiff(oldSdl, newSdl)
+ then:
+ true
+ changes.objectDifferences["Query"] instanceof ObjectModification
+ def objectDiff = changes.objectDifferences["Query"] as ObjectModification
+
+ def typeModification = objectDiff.getDetails(ObjectFieldTypeModification)
+ typeModification.size() == 1
+ typeModification[0].fieldName == "echo"
+ typeModification[0].oldType == "ID"
+ typeModification[0].newType == "String"
+
+ def directiveDeletion = objectDiff.getDetails(AppliedDirectiveDeletion)
+ directiveDeletion.size() == 1
+ directiveDeletion[0].name == "d"
+ directiveDeletion[0].locationDetail instanceof AppliedDirectiveObjectFieldLocation
+
+ def location = directiveDeletion[0].locationDetail as AppliedDirectiveObjectFieldLocation
+ location.objectName == "Query"
+ location.fieldName == "echo"
+ }
+
+ def "input field renamed and type changed and applied directive removed"() {
+ given:
+ def oldSdl = '''
+ type Query {
+ echo(input: Echo): String
+ }
+ input Echo {
+ message: String @d
+ }
+ directive @d on INPUT_FIELD_DEFINITION
+ '''
+ def newSdl = '''
+ type Query {
+ echo(input: Echo): String
+ }
+ input Echo {
+ age: Int
+ }
+ '''
+ when:
+ def changes = calcDiff(oldSdl, newSdl)
+ then:
+ true
+ changes.inputObjectDifferences["Echo"] instanceof InputObjectModification
+ def diff = changes.inputObjectDifferences["Echo"] as InputObjectModification
+
+ def rename = diff.getDetails(InputObjectFieldRename)
+ rename.size() == 1
+ rename[0].oldName == "message"
+ rename[0].newName == "age"
+
+ def typeModification = diff.getDetails(InputObjectFieldTypeModification)
+ typeModification.size() == 1
+ typeModification[0].fieldName == "age"
+ typeModification[0].oldType == "String"
+ typeModification[0].newType == "Int"
+
+ def directiveDeletion = diff.getDetails(AppliedDirectiveDeletion)
+ directiveDeletion.size() == 1
+ directiveDeletion[0].name == "d"
+ }
EditOperationAnalysisResult calcDiff(
String oldSdl,
diff --git a/src/test/groovy/graphql/schema/fetching/LambdaFetchingSupportTest.groovy b/src/test/groovy/graphql/schema/fetching/LambdaFetchingSupportTest.groovy
index d3a68f992d..9343af806f 100644
--- a/src/test/groovy/graphql/schema/fetching/LambdaFetchingSupportTest.groovy
+++ b/src/test/groovy/graphql/schema/fetching/LambdaFetchingSupportTest.groovy
@@ -1,5 +1,9 @@
package graphql.schema.fetching
+import graphql.Scalars
+import graphql.schema.GraphQLFieldDefinition
+import graphql.schema.PropertyDataFetcher
+import graphql.util.javac.DynamicJavacSupport
import spock.lang.Specification
class LambdaFetchingSupportTest extends Specification {
@@ -141,4 +145,56 @@ class LambdaFetchingSupportTest extends Specification {
then:
!getter.isPresent()
}
+
+ GraphQLFieldDefinition fld(String fldName) {
+ return GraphQLFieldDefinition.newFieldDefinition().name(fldName).type(Scalars.GraphQLString).build()
+ }
+
+ def "different class loaders induce certain behaviours"() {
+ String sourceCode = '''
+ package com.dynamic;
+ public class TestClass {
+ public String hello() {
+ return "world";
+ }
+ }
+ '''
+
+ def customClass = new DynamicJavacSupport(null).compile("com.dynamic.TestClass", sourceCode)
+ def targetObject = customClass.getDeclaredConstructor().newInstance()
+
+ // show that the graphql-java classes cant access this custom loaded class
+ when:
+ LambdaFetchingSupport.class.getClassLoader().loadClass("com.dynamic.TestClass")
+ then:
+ thrown(ClassNotFoundException)
+
+ // show that reflection works
+ when:
+ def helloMethod = targetObject.getClass().getMethod("hello")
+ def reflectedValue = helloMethod.invoke(targetObject)
+ then:
+ reflectedValue == "world"
+
+ // without MethodHandles.privateLookupIn this will fail crossing class loaders in Java 8
+ // if we change to privateLookupIn - then this will start working and this test will need to be changed
+ when:
+ def getter = LambdaFetchingSupport.createGetter(customClass, "hello")
+ then:
+
+ getter.isPresent()
+ try {
+ getter.get().apply(targetObject)
+ assert false, "We expect this to fail on Java 8 without access to MethodHandles.privateLookupIn"
+ } catch (LinkageError | ClassCastException ignored) {
+ }
+
+ // show that a DF can still be used access this because of the reflection fallback
+ // in the future it will work via MethodHandles.privateLookupIn
+ when:
+ def ageDF = PropertyDataFetcher.fetching("hello")
+ def value = ageDF.get(fld("hello"), targetObject, { -> null })
+ then:
+ value == "world"
+ }
}
diff --git a/src/test/groovy/graphql/schema/idl/SchemaParserTest.groovy b/src/test/groovy/graphql/schema/idl/SchemaParserTest.groovy
index 8fcbcd44e1..24d9af718a 100644
--- a/src/test/groovy/graphql/schema/idl/SchemaParserTest.groovy
+++ b/src/test/groovy/graphql/schema/idl/SchemaParserTest.groovy
@@ -1,5 +1,6 @@
package graphql.schema.idl
+import graphql.TestUtil
import graphql.language.EnumTypeDefinition
import graphql.language.InterfaceTypeDefinition
import graphql.language.ObjectTypeDefinition
@@ -9,6 +10,8 @@ import graphql.schema.idl.errors.SchemaProblem
import spock.lang.Specification
import spock.lang.Unroll
+import static graphql.schema.idl.SchemaPrinter.Options.defaultOptions
+
/**
* We don't want to retest the base GraphQL parser since it has its own testing
* but we do want to test our aspects of it
@@ -361,4 +364,120 @@ class SchemaParserTest extends Specification {
e.errors[0].message.contains("parsing has been cancelled")
}
+
+ def "correctly parses schema keyword block, include Query, does not include Mutation type"() {
+ // From RFC to clarify spec https://github.com/graphql/graphql-spec/pull/987
+ when:
+ def schema = """schema {
+ query: Query
+}
+
+type Mutation {
+ geneSequence: String!
+ name: String!
+}
+
+type Query {
+ viruses: [Virus!]
+}
+
+type Virus {
+ knownMutations: [Mutation!]!
+ name: String!
+}
+"""
+ def graphQL = TestUtil.graphQL(schema).build()
+
+ then:
+ graphQL.graphQLSchema.definition.operationTypeDefinitions.size() == 1
+ graphQL.graphQLSchema.definition.operationTypeDefinitions.first().name == "query"
+ graphQL.graphQLSchema.queryType != null
+ graphQL.graphQLSchema.mutationType == null
+
+ when:
+ // Verify that the printed schema is the same as the original
+ def options = defaultOptions()
+ .includeIntrospectionTypes(false)
+ .includeScalarTypes(false)
+ .includeDirectiveDefinitions(false)
+ .includeSchemaDefinition(true)
+
+ then:
+ def printedSchema = new SchemaPrinter(options).print(graphQL.graphQLSchema)
+ printedSchema == schema
+ }
+
+ def "correctly parses schema keyword block, include Query, does not include Subscription type"() {
+ // From RFC to clarify spec https://github.com/graphql/graphql-spec/pull/987
+ when:
+ def schema = """schema {
+ query: Query
+}
+
+type Query {
+ viruses: [Virus!]
+}
+
+type Subscription {
+ newspaper: String!
+}
+
+type Virus {
+ name: String!
+}
+"""
+ def graphQL = TestUtil.graphQL(schema).build()
+
+ then:
+ graphQL.graphQLSchema.definition.operationTypeDefinitions.size() == 1
+ graphQL.graphQLSchema.definition.operationTypeDefinitions.first().name == "query"
+ graphQL.graphQLSchema.queryType != null
+ graphQL.graphQLSchema.subscriptionType == null
+
+ when:
+ // Verify that the printed schema is the same as the original
+ def options = defaultOptions()
+ .includeIntrospectionTypes(false)
+ .includeScalarTypes(false)
+ .includeDirectiveDefinitions(false)
+ .includeSchemaDefinition(true)
+
+ then:
+ def printedSchema = new SchemaPrinter(options).print(graphQL.graphQLSchema)
+ printedSchema == schema
+ }
+
+ def "correctly parses schema that does not contain a schema definition block, includes Query and Mutation types"() {
+ when:
+ def schema = """type Mutation {
+ geneSequence: String!
+ name: String!
+}
+
+type Query {
+ viruses: [Virus!]
+}
+
+type Virus {
+ name: String!
+}
+"""
+ def graphQL = TestUtil.graphQL(schema).build()
+
+ then:
+ graphQL.graphQLSchema.definition == null // No SchemaDefinition
+ graphQL.graphQLSchema.queryType != null
+ graphQL.graphQLSchema.mutationType != null
+
+ when:
+ // Verify that the printed schema is the same as the original
+ def options = defaultOptions()
+ .includeIntrospectionTypes(false)
+ .includeScalarTypes(false)
+ .includeDirectiveDefinitions(false)
+
+ then:
+ def printedSchema = new SchemaPrinter(options).print(graphQL.graphQLSchema)
+ printedSchema == schema
+ }
}
diff --git a/src/test/groovy/graphql/schema/impl/SchemaUtilTest.groovy b/src/test/groovy/graphql/schema/impl/SchemaUtilTest.groovy
index 19dc803bfa..a36dfaf924 100644
--- a/src/test/groovy/graphql/schema/impl/SchemaUtilTest.groovy
+++ b/src/test/groovy/graphql/schema/impl/SchemaUtilTest.groovy
@@ -4,6 +4,7 @@ import graphql.AssertException
import graphql.DirectivesUtil
import graphql.NestedInputSchema
import graphql.introspection.Introspection
+import graphql.schema.GraphQLAppliedDirectiveArgument
import graphql.schema.GraphQLArgument
import graphql.schema.GraphQLFieldDefinition
import graphql.schema.GraphQLInputObjectType
@@ -164,12 +165,15 @@ class SchemaUtilTest extends Specification {
GraphQLObjectType person = ((GraphQLObjectType) SchemaWithReferences.getType("Person"))
GraphQLArgument cacheEnabled = SchemaWithReferences.getDirectivesByName()
.get(Cache.getName()).getArgument("enabled")
+ GraphQLAppliedDirectiveArgument appliedCacheEnabled = SchemaWithReferences.getSchemaAppliedDirective(Cache.getName())
+ .getArgument("enabled")
then:
SchemaWithReferences.allTypesAsList.findIndexOf { it instanceof GraphQLTypeReference } == -1
pet.types.findIndexOf { it instanceof GraphQLTypeReference } == -1
person.interfaces.findIndexOf { it instanceof GraphQLTypeReference } == -1
!(cacheEnabled.getType() instanceof GraphQLTypeReference)
+ !(appliedCacheEnabled.getType() instanceof GraphQLTypeReference)
}
def "all references are replaced with deprecated directiveWithArg"() {
diff --git a/src/test/groovy/graphql/util/javac/DynamicJavacSupport.java b/src/test/groovy/graphql/util/javac/DynamicJavacSupport.java
new file mode 100644
index 0000000000..6655bcd599
--- /dev/null
+++ b/src/test/groovy/graphql/util/javac/DynamicJavacSupport.java
@@ -0,0 +1,157 @@
+package graphql.util.javac;
+
+import javax.tools.DiagnosticCollector;
+import javax.tools.FileObject;
+import javax.tools.ForwardingJavaFileManager;
+import javax.tools.JavaCompiler;
+import javax.tools.JavaFileManager;
+import javax.tools.JavaFileObject;
+import javax.tools.SimpleJavaFileObject;
+import javax.tools.StandardJavaFileManager;
+import javax.tools.ToolProvider;
+import java.io.ByteArrayOutputStream;
+import java.io.OutputStream;
+import java.net.URI;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+import static java.util.Objects.requireNonNull;
+
+
+/**
+ * This utility allows is to dynamically create Java classes and place them into
+ * floating class loaders. This will allow us to test class loader challenges
+ *
+ * Proprs to https://www.baeldung.com/java-string-compile-execute-code where
+ * most of this code came from.
+ */
+public class DynamicJavacSupport {
+
+ private final JavaCompiler compiler;
+ private final InMemoryFileManager manager;
+
+ public DynamicJavacSupport(ClassLoader parentClassLoader) {
+ compiler = ToolProvider.getSystemJavaCompiler();
+ StandardJavaFileManager standardFileManager = compiler.getStandardFileManager(null, null, null);
+ manager = new InMemoryFileManager(parentClassLoader, standardFileManager);
+ }
+
+
+ public Class compile(String qualifiedClassName, String sourceCode) throws ClassNotFoundException {
+
+ List sourceFiles = Collections.singletonList(new JavaSourceFromString(qualifiedClassName, sourceCode));
+
+ DiagnosticCollector diagnostics = new DiagnosticCollector<>();
+ JavaCompiler.CompilationTask task = compiler.getTask(null, manager, diagnostics, null, null, sourceFiles);
+
+ boolean result = task.call();
+
+ if (!result) {
+ diagnostics.getDiagnostics()
+ .forEach(d -> System.out.printf("dyna-javac : %s\n", d));
+ throw new IllegalStateException("Could not compile " + qualifiedClassName + " as a class");
+ } else {
+ ClassLoader classLoader = manager.getClassLoader(null);
+ Class> clazz = classLoader.loadClass(qualifiedClassName);
+ return (Class) clazz;
+ }
+ }
+
+ static class InMemoryFileManager extends ForwardingJavaFileManager {
+ private final InMemoryClassLoader loader;
+ private final Map compiledClasses;
+
+ InMemoryFileManager(ClassLoader parentClassLoader, StandardJavaFileManager standardManager) {
+ super(standardManager);
+ this.compiledClasses = new ConcurrentHashMap<>();
+ this.loader = new InMemoryClassLoader(parentClassLoader, this);
+ }
+
+ @Override
+ public JavaFileObject getJavaFileForOutput(Location location,
+ String className, JavaFileObject.Kind kind, FileObject sibling) {
+
+ JavaClassAsBytes classAsBytes = new JavaClassAsBytes(className, kind);
+ compiledClasses.put(className, classAsBytes);
+
+ return classAsBytes;
+ }
+
+ public Map getBytesMap() {
+ return compiledClasses;
+ }
+
+ @Override
+ public ClassLoader getClassLoader(Location location) {
+ return loader;
+ }
+ }
+
+ static class InMemoryClassLoader extends ClassLoader {
+
+ private InMemoryFileManager manager;
+
+ InMemoryClassLoader(ClassLoader parentClassLoader, InMemoryFileManager manager) {
+ super(parentClassLoader);
+ this.manager = requireNonNull(manager, "manager must not be null");
+ }
+
+ @Override
+ protected Class> findClass(String name) throws ClassNotFoundException {
+
+ Map compiledClasses = manager.getBytesMap();
+
+ if (compiledClasses.containsKey(name)) {
+ byte[] bytes = compiledClasses.get(name).getBytes();
+ return defineClass(name, bytes, 0, bytes.length);
+ } else {
+ throw new ClassNotFoundException();
+ }
+ }
+
+ }
+
+ static class JavaSourceFromString extends SimpleJavaFileObject {
+
+
+ private String sourceCode;
+
+ JavaSourceFromString(String name, String sourceCode) {
+ super(URI.create("string:///" + name.replace('.', '/') + Kind.SOURCE.extension),
+ Kind.SOURCE);
+ this.sourceCode = requireNonNull(sourceCode, "sourceCode must not be null");
+ }
+
+ @Override
+ public CharSequence getCharContent(boolean ignoreEncodingErrors) {
+ return sourceCode;
+ }
+
+ }
+
+ static class JavaClassAsBytes extends SimpleJavaFileObject {
+
+
+ protected ByteArrayOutputStream bos =
+ new ByteArrayOutputStream();
+
+ JavaClassAsBytes(String name, Kind kind) {
+ super(URI.create("string:///" + name.replace('.', '/')
+ + kind.extension), kind);
+ }
+
+ public byte[] getBytes() {
+ return bos.toByteArray();
+ }
+
+ @Override
+ public OutputStream openOutputStream() {
+ return bos;
+ }
+
+ }
+
+
+}
diff --git a/src/test/groovy/graphql/util/javac/DynamicJavacSupportTest.groovy b/src/test/groovy/graphql/util/javac/DynamicJavacSupportTest.groovy
new file mode 100644
index 0000000000..a07c4cbbd2
--- /dev/null
+++ b/src/test/groovy/graphql/util/javac/DynamicJavacSupportTest.groovy
@@ -0,0 +1,57 @@
+package graphql.util.javac
+
+import spock.lang.Specification
+
+class DynamicJavacSupportTest extends Specification {
+
+ String sourceCode = '''
+ package com.dynamic;
+ public class TestClass {
+ public String hello() {
+ return "world";
+ }
+ }
+ '''
+
+ def "can compile things without a parent class loader"() {
+
+ def javacSupport = new DynamicJavacSupport(null)
+
+ when:
+ def compiledClass = javacSupport.compile("com.dynamic.TestClass", sourceCode)
+ def instance = compiledClass.getDeclaredConstructor().newInstance()
+ def runCodeMethod = compiledClass.getMethod("hello")
+ def value = runCodeMethod.invoke(instance)
+
+ then:
+ value == "world"
+
+ // with a null parent class loader, this class loader should not be able to see this code
+ when:
+ compiledClass.getClassLoader().loadClass(this.getClass().getCanonicalName())
+ then:
+ thrown(ClassNotFoundException)
+ }
+
+
+ def "can compile things with a parent class loader"() {
+
+ def javacSupport = new DynamicJavacSupport(this.getClass().getClassLoader())
+
+ when:
+ def compiledClass = javacSupport.compile("com.dynamic.TestClass", sourceCode)
+ def instance = compiledClass.getDeclaredConstructor().newInstance()
+ def runCodeMethod = compiledClass.getMethod("hello")
+ def value = runCodeMethod.invoke(instance)
+
+ then:
+ noExceptionThrown()
+ value == "world"
+
+ // with a parent class loader, this class loader should be able to see this code
+ when:
+ def backToUs = compiledClass.getClassLoader().loadClass(this.getClass().getCanonicalName())
+ then:
+ backToUs === this.getClass()
+ }
+}
diff --git a/src/test/groovy/graphql/validation/rules/UniqueObjectFieldNameTest.groovy b/src/test/groovy/graphql/validation/rules/UniqueObjectFieldNameTest.groovy
new file mode 100644
index 0000000000..f59a4e7c07
--- /dev/null
+++ b/src/test/groovy/graphql/validation/rules/UniqueObjectFieldNameTest.groovy
@@ -0,0 +1,30 @@
+package graphql.validation.rules
+
+import graphql.parser.Parser
+import graphql.validation.Validator
+import spock.lang.Specification
+
+class UniqueObjectFieldNameTest extends Specification {
+
+ def 'Object Field Name Uniqueness Not Valid'() {
+ def query = """
+ query {
+ dogWithInput(leash: {
+ id: "foo"
+ id: "bar"
+ }) {
+ name
+ }
+ }
+ """
+ when:
+ def document = Parser.parse(query)
+ def validationErrors = new Validator().validateDocument(Harness.Schema, document, Locale.ENGLISH)
+
+ then:
+ !validationErrors.empty
+ validationErrors.size() == 1
+ validationErrors[0].message == "Validation Error (UniqueObjectFieldName@[dogWithInput]) : There can be only one field named 'id'"
+ }
+
+}
diff --git a/src/test/groovy/readme/InstrumentationExamples.java b/src/test/groovy/readme/InstrumentationExamples.java
index 3fa00e641d..fe27b6e161 100644
--- a/src/test/groovy/readme/InstrumentationExamples.java
+++ b/src/test/groovy/readme/InstrumentationExamples.java
@@ -78,14 +78,13 @@ public InstrumentationState createState() {
return new SimpleInstrumentationContext() {
@Override
public void onCompleted(ExecutionResult result, Throwable t) {
- CustomInstrumentationState state = parameters.getInstrumentationState();
- state.recordTiming(parameters.getQuery(), System.nanoTime() - startNanos);
+ ((CustomInstrumentationState) state).recordTiming(parameters.getQuery(), System.nanoTime() - startNanos);
}
};
}
@Override
- public @NotNull DataFetcher> instrumentDataFetcher(DataFetcher> dataFetcher, InstrumentationFieldFetchParameters parameters) {
+ public @NotNull DataFetcher> instrumentDataFetcher(DataFetcher> dataFetcher, InstrumentationFieldFetchParameters parameters, InstrumentationState state) {
//
// this allows you to intercept the data fetcher used to fetch a field and provide another one, perhaps
// that enforces certain behaviours or has certain side effects on the data
@@ -94,9 +93,9 @@ public void onCompleted(ExecutionResult result, Throwable t) {
}
@Override
- public @NotNull CompletableFuture instrumentExecutionResult(ExecutionResult executionResult, InstrumentationExecutionParameters parameters) {
+ public @NotNull CompletableFuture instrumentExecutionResult(ExecutionResult executionResult, InstrumentationExecutionParameters parameters, InstrumentationState state) {
//
- // this allows you to instrument the execution result some how. For example the Tracing support uses this to put
+ // this allows you to instrument the execution result somehow. For example the Tracing support uses this to put
// the `extensions` map of data in place
//
return CompletableFuture.completedFuture(executionResult);