diff --git a/.github/workflows/invoke_test_runner.yml b/.github/workflows/invoke_test_runner.yml index d14afc17e7..d2d495fd42 100644 --- a/.github/workflows/invoke_test_runner.yml +++ b/.github/workflows/invoke_test_runner.yml @@ -50,7 +50,7 @@ jobs: - id: 'auth' name: 'Authenticate to Google Cloud' - uses: google-github-actions/auth@v1.0.0 + uses: google-github-actions/auth@v1.1.1 with: credentials_json: ${{ secrets.GOOGLE_APPLICATION_CREDENTIALS }} diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index c35ea0eee0..deb7678d26 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -15,9 +15,10 @@ jobs: steps: - uses: actions/checkout@v3 - uses: gradle/wrapper-validation-action@v1 - - name: Set up JDK 1.8 - uses: actions/setup-java@v1 + - name: Set up JDK 11 + uses: actions/setup-java@v3 with: - java-version: '8.0.282' + java-version: '11' + distribution: 'corretto' - name: build test and publish run: ./gradlew assemble && ./gradlew check --info && ./gradlew publishToSonatype closeAndReleaseSonatypeStagingRepository -x check --info --stacktrace diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 39e84200df..006a2c915d 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -7,6 +7,7 @@ on: pull_request: branches: - master + - 20.x - 19.x - 18.x - 17.x @@ -16,9 +17,10 @@ jobs: steps: - uses: actions/checkout@v3 - uses: gradle/wrapper-validation-action@v1 - - name: Set up JDK 1.8 - uses: actions/setup-java@v1 + - name: Set up JDK 11 + uses: actions/setup-java@v3 with: - java-version: '8.0.282' + java-version: '11' + distribution: 'corretto' - name: build and test run: ./gradlew assemble && ./gradlew check --info --stacktrace diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 0a855a32e3..6de60e8c86 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -19,9 +19,10 @@ jobs: steps: - uses: actions/checkout@v3 - uses: gradle/wrapper-validation-action@v1 - - name: Set up JDK 1.8 - uses: actions/setup-java@v1 + - name: Set up JDK 11 + uses: actions/setup-java@v3 with: - java-version: '8.0.282' + java-version: '11' + distribution: 'corretto' - name: build test and publish run: ./gradlew assemble && ./gradlew check --info && ./gradlew publishToSonatype closeAndReleaseSonatypeStagingRepository -x check --info --stacktrace diff --git a/build.gradle b/build.gradle index 1a9de91edd..d545b92572 100644 --- a/build.gradle +++ b/build.gradle @@ -7,16 +7,16 @@ plugins { id 'maven-publish' id 'antlr' id 'signing' - id "com.github.johnrengelman.shadow" version "7.1.2" - id "biz.aQute.bnd.builder" version "6.3.1" - id "io.github.gradle-nexus.publish-plugin" version "1.1.0" + id "com.github.johnrengelman.shadow" version "8.1.1" + id "biz.aQute.bnd.builder" version "6.4.0" + id "io.github.gradle-nexus.publish-plugin" version "1.3.0" id "groovy" - id "me.champeau.jmh" version "0.6.6" + id "me.champeau.jmh" version "0.7.1" } java { toolchain { - languageVersion = JavaLanguageVersion.of(8) + languageVersion = JavaLanguageVersion.of(11) } } @@ -47,9 +47,9 @@ def getDevelopmentVersion() { } def reactiveStreamsVersion = '1.0.3' -def slf4jVersion = '1.7.35' +def slf4jVersion = '2.0.7' def releaseVersion = System.env.RELEASE_VERSION -def antlrVersion = '4.9.3' // https://mvnrepository.com/artifact/org.antlr/antlr4-runtime +def antlrVersion = '4.11.1' // https://mvnrepository.com/artifact/org.antlr/antlr4-runtime version = releaseVersion ? releaseVersion : getDevelopmentVersion() group = 'com.graphql-java' @@ -89,26 +89,27 @@ dependencies { api 'com.graphql-java:java-dataloader:3.2.0' api 'org.reactivestreams:reactive-streams:' + reactiveStreamsVersion antlr 'org.antlr:antlr4:' + antlrVersion - implementation 'com.google.guava:guava:31.0.1-jre' + implementation 'com.google.guava:guava:32.1.1-jre' testImplementation group: 'junit', name: 'junit', version: '4.13.2' testImplementation 'org.spockframework:spock-core:2.0-groovy-3.0' - testImplementation 'org.codehaus.groovy:groovy:3.0.16' - testImplementation 'com.google.code.gson:gson:2.8.9' - testImplementation 'org.eclipse.jetty:jetty-server:9.4.26.v20200117' - testImplementation 'com.fasterxml.jackson.core:jackson-databind:2.13.1' + testImplementation 'org.codehaus.groovy:groovy:3.0.18' + testImplementation 'org.codehaus.groovy:groovy-json:3.0.18' + testImplementation 'com.google.code.gson:gson:2.10.1' + testImplementation 'org.eclipse.jetty:jetty-server:11.0.15' + testImplementation 'com.fasterxml.jackson.core:jackson-databind:2.15.2' testImplementation 'org.slf4j:slf4j-simple:' + slf4jVersion - testImplementation 'org.awaitility:awaitility-groovy:3.1.6' - testImplementation 'com.github.javafaker:javafaker:0.13' + testImplementation 'org.awaitility:awaitility-groovy:4.2.0' + testImplementation 'com.github.javafaker:javafaker:1.0.2' testImplementation 'org.reactivestreams:reactive-streams-tck:' + reactiveStreamsVersion testImplementation "io.reactivex.rxjava2:rxjava:2.2.21" - testImplementation 'org.testng:testng:6.1.1' // use for reactive streams test inheritance + testImplementation 'org.testng:testng:7.8.0' // use for reactive streams test inheritance - testImplementation 'org.openjdk.jmh:jmh-core:1.35' - testAnnotationProcessor 'org.openjdk.jmh:jmh-generator-annprocess:1.35' - jmh 'org.openjdk.jmh:jmh-core:1.35' - jmh 'org.openjdk.jmh:jmh-generator-annprocess:1.35' + testImplementation 'org.openjdk.jmh:jmh-core:1.36' + testAnnotationProcessor 'org.openjdk.jmh:jmh-generator-annprocess:1.36' + jmh 'org.openjdk.jmh:jmh-core:1.36' + jmh 'org.openjdk.jmh:jmh-generator-annprocess:1.36' } shadowJar { @@ -123,7 +124,7 @@ shadowJar { } relocate('org.antlr.v4.runtime', 'graphql.org.antlr.v4.runtime') dependencies { - include(dependency('com.google.guava:guava:31.0.1-jre')) + include(dependency('com.google.guava:guava:32.1.1-jre')) include(dependency('org.antlr:antlr4-runtime:' + antlrVersion)) } from "LICENSE.md" @@ -165,6 +166,8 @@ task extractWithoutGuava(type: Copy) { into layout.buildDirectory.dir("extract") } +extractWithoutGuava.dependsOn jar + task buildNewJar(type: Jar) { from layout.buildDirectory.dir("extract") archiveFileName = "graphql-java-tmp.jar" @@ -178,6 +181,8 @@ task buildNewJar(type: Jar) { } } +buildNewJar.dependsOn extractWithoutGuava + shadowJar.finalizedBy extractWithoutGuava, buildNewJar @@ -202,12 +207,12 @@ generateGrammarSource.inputs.dir('src/main/antlr') task sourcesJar(type: Jar) { dependsOn classes - classifier 'sources' + archiveClassifier = 'sources' from sourceSets.main.allSource } task javadocJar(type: Jar, dependsOn: javadoc) { - classifier = 'javadoc' + archiveClassifier = 'javadoc' from javadoc.destinationDir } @@ -241,7 +246,7 @@ test { * * See https://github.com/gradle/gradle/issues/20151 */ -gradle. buildFinished { +gradle.buildFinished { if (!failedTests.isEmpty()) { println "\n\n" println "============================" @@ -270,10 +275,10 @@ publishing { from components.java artifact sourcesJar { - classifier "sources" + archiveClassifier = "sources" } artifact javadocJar { - classifier "javadoc" + archiveClassifier = "javadoc" } pom.withXml { // Removing antlr4 below (introduced in `1ac98bf`) addresses an issue with diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 7454180f2a..ccebba7710 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 070cb702f0..bdc9a83b1e 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.6-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.0.2-bin.zip +networkTimeout=10000 zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew index 1b6c787337..79a61d421c 100755 --- a/gradlew +++ b/gradlew @@ -55,7 +55,7 @@ # Darwin, MinGW, and NonStop. # # (3) This script is generated from the Groovy template -# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt # within the Gradle project. # # You can find Gradle at https://github.com/gradle/gradle/. @@ -80,10 +80,10 @@ do esac done -APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit - -APP_NAME="Gradle" +# This is normally unused +# shellcheck disable=SC2034 APP_BASE_NAME=${0##*/} +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' @@ -143,12 +143,16 @@ fi if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then case $MAX_FD in #( max*) + # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC3045 MAX_FD=$( ulimit -H -n ) || warn "Could not query maximum file descriptor limit" esac case $MAX_FD in #( '' | soft) :;; #( *) + # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC3045 ulimit -n "$MAX_FD" || warn "Could not set maximum file descriptor limit to $MAX_FD" esac @@ -205,6 +209,12 @@ set -- \ org.gradle.wrapper.GradleWrapperMain \ "$@" +# Stop when "xargs" is not available. +if ! command -v xargs >/dev/null 2>&1 +then + die "xargs is not available" +fi + # Use "xargs" to parse quoted args. # # With -n1 it outputs one arg per line, with the quotes and backslashes removed. diff --git a/gradlew.bat b/gradlew.bat index ac1b06f938..6689b85bee 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -14,7 +14,7 @@ @rem limitations under the License. @rem -@if "%DEBUG%" == "" @echo off +@if "%DEBUG%"=="" @echo off @rem ########################################################################## @rem @rem Gradle startup script for Windows @@ -25,7 +25,8 @@ if "%OS%"=="Windows_NT" setlocal set DIRNAME=%~dp0 -if "%DIRNAME%" == "" set DIRNAME=. +if "%DIRNAME%"=="" set DIRNAME=. +@rem This is normally unused set APP_BASE_NAME=%~n0 set APP_HOME=%DIRNAME% @@ -40,7 +41,7 @@ if defined JAVA_HOME goto findJavaFromJavaHome set JAVA_EXE=java.exe %JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto execute +if %ERRORLEVEL% equ 0 goto execute echo. echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. @@ -75,13 +76,15 @@ set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar :end @rem End local scope for the variables with windows NT shell -if "%ERRORLEVEL%"=="0" goto mainEnd +if %ERRORLEVEL% equ 0 goto mainEnd :fail rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of rem the _cmd.exe /c_ return code! -if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 -exit /b 1 +set EXIT_CODE=%ERRORLEVEL% +if %EXIT_CODE% equ 0 set EXIT_CODE=1 +if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% +exit /b %EXIT_CODE% :mainEnd if "%OS%"=="Windows_NT" endlocal diff --git a/src/main/java/graphql/ExecutionInput.java b/src/main/java/graphql/ExecutionInput.java index 4cace2b7b5..f924aab7e4 100644 --- a/src/main/java/graphql/ExecutionInput.java +++ b/src/main/java/graphql/ExecutionInput.java @@ -1,6 +1,5 @@ package graphql; -import graphql.cachecontrol.CacheControl; import graphql.collect.ImmutableKit; import graphql.execution.ExecutionId; import graphql.execution.RawVariables; @@ -28,7 +27,6 @@ public class ExecutionInput { private final RawVariables rawVariables; private final Map extensions; private final DataLoaderRegistry dataLoaderRegistry; - private final CacheControl cacheControl; private final ExecutionId executionId; private final Locale locale; @@ -42,7 +40,6 @@ private ExecutionInput(Builder builder) { this.root = builder.root; this.rawVariables = builder.rawVariables; this.dataLoaderRegistry = builder.dataLoaderRegistry; - this.cacheControl = builder.cacheControl; this.executionId = builder.executionId; this.locale = builder.locale != null ? builder.locale : Locale.getDefault(); // always have a locale in place this.localContext = builder.localContext; @@ -119,16 +116,6 @@ public DataLoaderRegistry getDataLoaderRegistry() { return dataLoaderRegistry; } - /** - * @return the cache control helper associated with this execution - * - * @deprecated - Apollo has deprecated the Cache Control specification - */ - @Deprecated - @DeprecatedAt("2022-07-26") - public CacheControl getCacheControl() { - return cacheControl; - } /** * @return Id that will be/was used to execute this operation. @@ -170,7 +157,6 @@ public ExecutionInput transform(Consumer builderConsumer) { .localContext(this.localContext) .root(this.root) .dataLoaderRegistry(this.dataLoaderRegistry) - .cacheControl(this.cacheControl) .variables(this.rawVariables.toMap()) .extensions(this.extensions) .executionId(this.executionId) @@ -229,8 +215,6 @@ public static class Builder { // dataloader field tracking away. // private DataLoaderRegistry dataLoaderRegistry = DataLoaderDispatcherInstrumentationState.EMPTY_DATALOADER_REGISTRY; - @DeprecatedAt("2022-07-26") - private CacheControl cacheControl = CacheControl.newCacheControl(); private Locale locale = Locale.getDefault(); private ExecutionId executionId; @@ -399,13 +383,6 @@ public Builder dataLoaderRegistry(DataLoaderRegistry dataLoaderRegistry) { return this; } - @Deprecated - @DeprecatedAt("2022-07-26") - public Builder cacheControl(CacheControl cacheControl) { - this.cacheControl = assertNotNull(cacheControl); - return this; - } - public ExecutionInput build() { return new ExecutionInput(this); } diff --git a/src/main/java/graphql/GraphQL.java b/src/main/java/graphql/GraphQL.java index 435ed82935..5b49617255 100644 --- a/src/main/java/graphql/GraphQL.java +++ b/src/main/java/graphql/GraphQL.java @@ -418,14 +418,13 @@ public CompletableFuture executeAsync(UnaryOperator executeAsync(ExecutionInput executionInput) { - try { - if (logNotSafe.isDebugEnabled()) { - logNotSafe.debug("Executing request. operation name: '{}'. query: '{}'. variables '{}'", executionInput.getOperationName(), executionInput.getQuery(), executionInput.getVariables()); - } - executionInput = ensureInputHasId(executionInput); - - InstrumentationState instrumentationState = instrumentation.createState(new InstrumentationCreateStateParameters(this.graphQLSchema, executionInput)); + if (logNotSafe.isDebugEnabled()) { + logNotSafe.debug("Executing request. operation name: '{}'. query: '{}'. variables '{}'", executionInput.getOperationName(), executionInput.getQuery(), executionInput.getVariables()); + } + executionInput = ensureInputHasId(executionInput); + InstrumentationState instrumentationState = instrumentation.createState(new InstrumentationCreateStateParameters(this.graphQLSchema, executionInput)); + try { InstrumentationExecutionParameters inputInstrumentationParameters = new InstrumentationExecutionParameters(executionInput, this.graphQLSchema, instrumentationState); executionInput = instrumentation.instrumentExecutionInput(executionInput, inputInstrumentationParameters, instrumentationState); @@ -445,10 +444,19 @@ public CompletableFuture executeAsync(ExecutionInput executionI executionResult = executionResult.thenCompose(result -> instrumentation.instrumentExecutionResult(result, instrumentationParameters, instrumentationState)); return executionResult; } catch (AbortExecutionException abortException) { - return CompletableFuture.completedFuture(abortException.toExecutionResult()); + return handleAbortException(executionInput, instrumentationState, abortException); } } + private CompletableFuture handleAbortException(ExecutionInput executionInput, InstrumentationState instrumentationState, AbortExecutionException abortException) { + CompletableFuture executionResult = CompletableFuture.completedFuture(abortException.toExecutionResult()); + InstrumentationExecutionParameters instrumentationParameters = new InstrumentationExecutionParameters(executionInput, this.graphQLSchema, instrumentationState); + // + // allow instrumentation to tweak the result + executionResult = executionResult.thenCompose(result -> instrumentation.instrumentExecutionResult(result, instrumentationParameters, instrumentationState)); + return executionResult; + } + private ExecutionInput ensureInputHasId(ExecutionInput executionInput) { if (executionInput.getExecutionId() != null) { return executionInput; diff --git a/src/main/java/graphql/analysis/MaxQueryComplexityInstrumentation.java b/src/main/java/graphql/analysis/MaxQueryComplexityInstrumentation.java index 4b055d4fd4..87a00e976a 100644 --- a/src/main/java/graphql/analysis/MaxQueryComplexityInstrumentation.java +++ b/src/main/java/graphql/analysis/MaxQueryComplexityInstrumentation.java @@ -15,16 +15,13 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.LinkedHashMap; import java.util.List; -import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; import static graphql.Assert.assertNotNull; import static graphql.execution.instrumentation.InstrumentationState.ofState; import static graphql.execution.instrumentation.SimpleInstrumentationContext.noOp; -import static java.util.Optional.ofNullable; /** * Prevents execution if the query complexity is greater than the specified maxComplexity. @@ -101,21 +98,8 @@ public InstrumentationState createState(InstrumentationCreateStateParameters par @Override public @Nullable InstrumentationContext beginExecuteOperation(InstrumentationExecuteOperationParameters instrumentationExecuteOperationParameters, InstrumentationState rawState) { State state = ofState(rawState); - QueryTraverser queryTraverser = newQueryTraverser(instrumentationExecuteOperationParameters.getExecutionContext()); - - Map valuesByParent = new LinkedHashMap<>(); - queryTraverser.visitPostOrder(new QueryVisitorStub() { - @Override - public void visitField(QueryVisitorFieldEnvironment env) { - int childComplexity = valuesByParent.getOrDefault(env, 0); - int value = calculateComplexity(env, childComplexity); - - valuesByParent.compute(env.getParentEnvironment(), (key, oldValue) -> - ofNullable(oldValue).orElse(0) + value - ); - } - }); - int totalComplexity = valuesByParent.getOrDefault(null, 0); + QueryComplexityCalculator queryComplexityCalculator = newQueryComplexityCalculator(instrumentationExecuteOperationParameters.getExecutionContext()); + int totalComplexity = queryComplexityCalculator.calculate(); if (log.isDebugEnabled()) { log.debug("Query complexity: {}", totalComplexity); } @@ -133,6 +117,16 @@ public void visitField(QueryVisitorFieldEnvironment env) { return noOp(); } + private QueryComplexityCalculator newQueryComplexityCalculator(ExecutionContext executionContext) { + return QueryComplexityCalculator.newCalculator() + .fieldComplexityCalculator(fieldComplexityCalculator) + .schema(executionContext.getGraphQLSchema()) + .document(executionContext.getDocument()) + .operationName(executionContext.getExecutionInput().getOperationName()) + .variables(executionContext.getCoercedVariables()) + .build(); + } + /** * Called to generate your own error message or custom exception class * @@ -145,37 +139,6 @@ protected AbortExecutionException mkAbortException(int totalComplexity, int maxC return new AbortExecutionException("maximum query complexity exceeded " + totalComplexity + " > " + maxComplexity); } - QueryTraverser newQueryTraverser(ExecutionContext executionContext) { - return QueryTraverser.newQueryTraverser() - .schema(executionContext.getGraphQLSchema()) - .document(executionContext.getDocument()) - .operationName(executionContext.getExecutionInput().getOperationName()) - .coercedVariables(executionContext.getCoercedVariables()) - .build(); - } - - private int calculateComplexity(QueryVisitorFieldEnvironment queryVisitorFieldEnvironment, int childComplexity) { - if (queryVisitorFieldEnvironment.isTypeNameIntrospectionField()) { - return 0; - } - FieldComplexityEnvironment fieldComplexityEnvironment = convertEnv(queryVisitorFieldEnvironment); - return fieldComplexityCalculator.calculate(fieldComplexityEnvironment, childComplexity); - } - - private FieldComplexityEnvironment convertEnv(QueryVisitorFieldEnvironment queryVisitorFieldEnvironment) { - FieldComplexityEnvironment parentEnv = null; - if (queryVisitorFieldEnvironment.getParentEnvironment() != null) { - parentEnv = convertEnv(queryVisitorFieldEnvironment.getParentEnvironment()); - } - return new FieldComplexityEnvironment( - queryVisitorFieldEnvironment.getField(), - queryVisitorFieldEnvironment.getFieldDefinition(), - queryVisitorFieldEnvironment.getFieldsContainer(), - queryVisitorFieldEnvironment.getArguments(), - parentEnv - ); - } - private static class State implements InstrumentationState { AtomicReference instrumentationValidationParameters = new AtomicReference<>(); } diff --git a/src/main/java/graphql/analysis/NodeVisitorWithTypeTracking.java b/src/main/java/graphql/analysis/NodeVisitorWithTypeTracking.java index 7b65c8e9ea..972a6f8e9c 100644 --- a/src/main/java/graphql/analysis/NodeVisitorWithTypeTracking.java +++ b/src/main/java/graphql/analysis/NodeVisitorWithTypeTracking.java @@ -3,8 +3,8 @@ import graphql.GraphQLContext; import graphql.Internal; import graphql.execution.CoercedVariables; -import graphql.execution.ConditionalNodes; import graphql.execution.ValuesResolver; +import graphql.execution.conditional.ConditionalNodes; import graphql.introspection.Introspection; import graphql.language.Argument; import graphql.language.Directive; @@ -68,7 +68,9 @@ public TraversalControl visitDirective(Directive node, TraverserContext co @Override public TraversalControl visitInlineFragment(InlineFragment inlineFragment, TraverserContext context) { - if (!conditionalNodes.shouldInclude(variables, inlineFragment.getDirectives())) { + QueryTraversalContext parentEnv = context.getVarFromParents(QueryTraversalContext.class); + GraphQLContext graphQLContext = parentEnv.getGraphQLContext(); + if (!conditionalNodes.shouldInclude(inlineFragment, variables, null, graphQLContext)) { return TraversalControl.ABORT; } @@ -82,7 +84,6 @@ public TraversalControl visitInlineFragment(InlineFragment inlineFragment, Trave preOrderCallback.visitInlineFragment(inlineFragmentEnvironment); // inline fragments are allowed not have type conditions, if so the parent type counts - QueryTraversalContext parentEnv = context.getVarFromParents(QueryTraversalContext.class); GraphQLCompositeType fragmentCondition; if (inlineFragment.getTypeCondition() != null) { @@ -92,17 +93,19 @@ public TraversalControl visitInlineFragment(InlineFragment inlineFragment, Trave fragmentCondition = parentEnv.getUnwrappedOutputType(); } // for unions we only have other fragments inside - context.setVar(QueryTraversalContext.class, new QueryTraversalContext(fragmentCondition, parentEnv.getEnvironment(), inlineFragment)); + context.setVar(QueryTraversalContext.class, new QueryTraversalContext(fragmentCondition, parentEnv.getEnvironment(), inlineFragment, graphQLContext)); return TraversalControl.CONTINUE; } @Override - public TraversalControl visitFragmentDefinition(FragmentDefinition node, TraverserContext context) { - if (!conditionalNodes.shouldInclude(variables, node.getDirectives())) { + public TraversalControl visitFragmentDefinition(FragmentDefinition fragmentDefinition, TraverserContext context) { + QueryTraversalContext parentEnv = context.getVarFromParents(QueryTraversalContext.class); + GraphQLContext graphQLContext = parentEnv.getGraphQLContext(); + if (!conditionalNodes.shouldInclude(fragmentDefinition, variables, null, graphQLContext)) { return TraversalControl.ABORT; } - QueryVisitorFragmentDefinitionEnvironment fragmentEnvironment = new QueryVisitorFragmentDefinitionEnvironmentImpl(node, context, schema); + QueryVisitorFragmentDefinitionEnvironment fragmentEnvironment = new QueryVisitorFragmentDefinitionEnvironmentImpl(fragmentDefinition, context, schema); if (context.getPhase() == LEAVE) { postOrderCallback.visitFragmentDefinition(fragmentEnvironment); @@ -110,20 +113,21 @@ public TraversalControl visitFragmentDefinition(FragmentDefinition node, Travers } preOrderCallback.visitFragmentDefinition(fragmentEnvironment); - QueryTraversalContext parentEnv = context.getVarFromParents(QueryTraversalContext.class); - GraphQLCompositeType typeCondition = (GraphQLCompositeType) schema.getType(node.getTypeCondition().getName()); - context.setVar(QueryTraversalContext.class, new QueryTraversalContext(typeCondition, parentEnv.getEnvironment(), node)); + GraphQLCompositeType typeCondition = (GraphQLCompositeType) schema.getType(fragmentDefinition.getTypeCondition().getName()); + context.setVar(QueryTraversalContext.class, new QueryTraversalContext(typeCondition, parentEnv.getEnvironment(), fragmentDefinition, graphQLContext)); return TraversalControl.CONTINUE; } @Override public TraversalControl visitFragmentSpread(FragmentSpread fragmentSpread, TraverserContext context) { - if (!conditionalNodes.shouldInclude(variables, fragmentSpread.getDirectives())) { + QueryTraversalContext parentEnv = context.getVarFromParents(QueryTraversalContext.class); + GraphQLContext graphQLContext = parentEnv.getGraphQLContext(); + if (!conditionalNodes.shouldInclude(fragmentSpread, variables, null, graphQLContext)) { return TraversalControl.ABORT; } FragmentDefinition fragmentDefinition = fragmentsByName.get(fragmentSpread.getName()); - if (!conditionalNodes.shouldInclude(variables, fragmentDefinition.getDirectives())) { + if (!conditionalNodes.shouldInclude(fragmentDefinition, variables, null, graphQLContext)) { return TraversalControl.ABORT; } @@ -135,19 +139,19 @@ public TraversalControl visitFragmentSpread(FragmentSpread fragmentSpread, Trave preOrderCallback.visitFragmentSpread(fragmentSpreadEnvironment); - QueryTraversalContext parentEnv = context.getVarFromParents(QueryTraversalContext.class); GraphQLCompositeType typeCondition = (GraphQLCompositeType) schema.getType(fragmentDefinition.getTypeCondition().getName()); assertNotNull(typeCondition, () -> format("Invalid type condition '%s' in fragment '%s'", fragmentDefinition.getTypeCondition().getName(), fragmentDefinition.getName())); - context.setVar(QueryTraversalContext.class, new QueryTraversalContext(typeCondition, parentEnv.getEnvironment(), fragmentDefinition)); + context.setVar(QueryTraversalContext.class, new QueryTraversalContext(typeCondition, parentEnv.getEnvironment(), fragmentDefinition, graphQLContext)); return TraversalControl.CONTINUE; } @Override public TraversalControl visitField(Field field, TraverserContext context) { QueryTraversalContext parentEnv = context.getVarFromParents(QueryTraversalContext.class); + GraphQLContext graphQLContext = parentEnv.getGraphQLContext(); GraphQLFieldDefinition fieldDefinition = Introspection.getFieldDef(schema, (GraphQLCompositeType) unwrapAll(parentEnv.getOutputType()), field.getName()); boolean isTypeNameIntrospectionField = fieldDefinition == schema.getIntrospectionTypenameFieldDefinition(); @@ -174,7 +178,7 @@ public TraversalControl visitField(Field field, TraverserContext context) return TraversalControl.CONTINUE; } - if (!conditionalNodes.shouldInclude(variables, field.getDirectives())) { + if (!conditionalNodes.shouldInclude(field, variables, null, graphQLContext)) { return TraversalControl.ABORT; } @@ -182,8 +186,8 @@ public TraversalControl visitField(Field field, TraverserContext context) GraphQLUnmodifiedType unmodifiedType = unwrapAll(fieldDefinition.getType()); QueryTraversalContext fieldEnv = (unmodifiedType instanceof GraphQLCompositeType) - ? new QueryTraversalContext(fieldDefinition.getType(), environment, field) - : new QueryTraversalContext(null, environment, field);// Terminal (scalar) node, EMPTY FRAME + ? new QueryTraversalContext(fieldDefinition.getType(), environment, field, graphQLContext) + : new QueryTraversalContext(null, environment, field, graphQLContext);// Terminal (scalar) node, EMPTY FRAME context.setVar(QueryTraversalContext.class, fieldEnv); diff --git a/src/main/java/graphql/analysis/QueryComplexityCalculator.java b/src/main/java/graphql/analysis/QueryComplexityCalculator.java new file mode 100644 index 0000000000..99efe1fcfa --- /dev/null +++ b/src/main/java/graphql/analysis/QueryComplexityCalculator.java @@ -0,0 +1,134 @@ +package graphql.analysis; + +import graphql.PublicApi; +import graphql.execution.CoercedVariables; +import graphql.language.Document; +import graphql.schema.GraphQLSchema; + +import java.util.LinkedHashMap; +import java.util.Map; + +import static graphql.Assert.assertNotNull; +import static java.util.Optional.ofNullable; + +/** + * This can calculate the complexity of an operation using the specified {@link FieldComplexityCalculator} you pass + * into it. + */ +@PublicApi +public class QueryComplexityCalculator { + + private final FieldComplexityCalculator fieldComplexityCalculator; + private final GraphQLSchema schema; + private final Document document; + private final String operationName; + private final CoercedVariables variables; + + public QueryComplexityCalculator(Builder builder) { + this.fieldComplexityCalculator = assertNotNull(builder.fieldComplexityCalculator, () -> "fieldComplexityCalculator can't be null"); + this.schema = assertNotNull(builder.schema, () -> "schema can't be null"); + this.document = assertNotNull(builder.document, () -> "document can't be null"); + this.variables = assertNotNull(builder.variables, () -> "variables can't be null"); + this.operationName = builder.operationName; + } + + + public int calculate() { + Map valuesByParent = calculateByParents(); + return valuesByParent.getOrDefault(null, 0); + } + + /** + * @return a map that shows the field complexity for each field level in the operation + */ + public Map calculateByParents() { + QueryTraverser queryTraverser = QueryTraverser.newQueryTraverser() + .schema(this.schema) + .document(this.document) + .operationName(this.operationName) + .coercedVariables(this.variables) + .build(); + + + Map valuesByParent = new LinkedHashMap<>(); + queryTraverser.visitPostOrder(new QueryVisitorStub() { + @Override + public void visitField(QueryVisitorFieldEnvironment env) { + int childComplexity = valuesByParent.getOrDefault(env, 0); + int value = calculateComplexity(env, childComplexity); + + QueryVisitorFieldEnvironment parentEnvironment = env.getParentEnvironment(); + valuesByParent.compute(parentEnvironment, (key, oldValue) -> { + Integer currentValue = ofNullable(oldValue).orElse(0); + return currentValue + value; + } + ); + } + }); + + return valuesByParent; + } + + private int calculateComplexity(QueryVisitorFieldEnvironment queryVisitorFieldEnvironment, int childComplexity) { + if (queryVisitorFieldEnvironment.isTypeNameIntrospectionField()) { + return 0; + } + FieldComplexityEnvironment fieldComplexityEnvironment = convertEnv(queryVisitorFieldEnvironment); + return fieldComplexityCalculator.calculate(fieldComplexityEnvironment, childComplexity); + } + + private FieldComplexityEnvironment convertEnv(QueryVisitorFieldEnvironment queryVisitorFieldEnvironment) { + FieldComplexityEnvironment parentEnv = null; + if (queryVisitorFieldEnvironment.getParentEnvironment() != null) { + parentEnv = convertEnv(queryVisitorFieldEnvironment.getParentEnvironment()); + } + return new FieldComplexityEnvironment( + queryVisitorFieldEnvironment.getField(), + queryVisitorFieldEnvironment.getFieldDefinition(), + queryVisitorFieldEnvironment.getFieldsContainer(), + queryVisitorFieldEnvironment.getArguments(), + parentEnv + ); + } + + public static Builder newCalculator() { + return new Builder(); + } + + public static class Builder { + private FieldComplexityCalculator fieldComplexityCalculator; + private GraphQLSchema schema; + private Document document; + private String operationName; + private CoercedVariables variables = CoercedVariables.emptyVariables(); + + public Builder schema(GraphQLSchema graphQLSchema) { + this.schema = graphQLSchema; + return this; + } + + public Builder fieldComplexityCalculator(FieldComplexityCalculator complexityCalculator) { + this.fieldComplexityCalculator = complexityCalculator; + return this; + } + + public Builder document(Document document) { + this.document = document; + return this; + } + + public Builder operationName(String operationName) { + this.operationName = operationName; + return this; + } + + public Builder variables(CoercedVariables variables) { + this.variables = variables; + return this; + } + + public QueryComplexityCalculator build() { + return new QueryComplexityCalculator(this); + } + } +} \ No newline at end of file diff --git a/src/main/java/graphql/analysis/QueryTransformer.java b/src/main/java/graphql/analysis/QueryTransformer.java index 35c840bb04..9c45902dae 100644 --- a/src/main/java/graphql/analysis/QueryTransformer.java +++ b/src/main/java/graphql/analysis/QueryTransformer.java @@ -1,5 +1,6 @@ package graphql.analysis; +import graphql.GraphQLContext; import graphql.PublicApi; import graphql.language.FragmentDefinition; import graphql.language.Node; @@ -67,7 +68,7 @@ public Node transform(QueryVisitor queryVisitor) { NodeVisitorWithTypeTracking nodeVisitor = new NodeVisitorWithTypeTracking(queryVisitor, noOp, variables, schema, fragmentsByName); Map, Object> rootVars = new LinkedHashMap<>(); - rootVars.put(QueryTraversalContext.class, new QueryTraversalContext(rootParentType, null, null)); + rootVars.put(QueryTraversalContext.class, new QueryTraversalContext(rootParentType, null, null, GraphQLContext.getDefault())); TraverserVisitor nodeTraverserVisitor = new TraverserVisitor() { diff --git a/src/main/java/graphql/analysis/QueryTraversalContext.java b/src/main/java/graphql/analysis/QueryTraversalContext.java index de591141cc..8fc02fd582 100644 --- a/src/main/java/graphql/analysis/QueryTraversalContext.java +++ b/src/main/java/graphql/analysis/QueryTraversalContext.java @@ -1,5 +1,6 @@ package graphql.analysis; +import graphql.GraphQLContext; import graphql.Internal; import graphql.language.SelectionSetContainer; import graphql.schema.GraphQLCompositeType; @@ -16,14 +17,17 @@ class QueryTraversalContext { // never used for scalars/enums, always a possibly wrapped composite type private final GraphQLOutputType outputType; private final QueryVisitorFieldEnvironment environment; - private final SelectionSetContainer selectionSetContainer; + private final SelectionSetContainer selectionSetContainer; + private final GraphQLContext graphQLContext; QueryTraversalContext(GraphQLOutputType outputType, QueryVisitorFieldEnvironment environment, - SelectionSetContainer selectionSetContainer) { + SelectionSetContainer selectionSetContainer, + GraphQLContext graphQLContext) { this.outputType = outputType; this.environment = environment; this.selectionSetContainer = selectionSetContainer; + this.graphQLContext = graphQLContext; } public GraphQLOutputType getOutputType() { @@ -34,13 +38,15 @@ public GraphQLCompositeType getUnwrappedOutputType() { return (GraphQLCompositeType) GraphQLTypeUtil.unwrapAll(outputType); } - public QueryVisitorFieldEnvironment getEnvironment() { return environment; } - public SelectionSetContainer getSelectionSetContainer() { - + public SelectionSetContainer getSelectionSetContainer() { return selectionSetContainer; } + + public GraphQLContext getGraphQLContext() { + return graphQLContext; + } } diff --git a/src/main/java/graphql/analysis/QueryTraverser.java b/src/main/java/graphql/analysis/QueryTraverser.java index 14d873f599..0ec067595b 100644 --- a/src/main/java/graphql/analysis/QueryTraverser.java +++ b/src/main/java/graphql/analysis/QueryTraverser.java @@ -177,7 +177,7 @@ private List childrenOf(Node node) { private Object visitImpl(QueryVisitor visitFieldCallback, Boolean preOrder) { Map, Object> rootVars = new LinkedHashMap<>(); - rootVars.put(QueryTraversalContext.class, new QueryTraversalContext(rootParentType, null, null)); + rootVars.put(QueryTraversalContext.class, new QueryTraversalContext(rootParentType, null, null, GraphQLContext.getDefault())); QueryVisitor preOrderCallback; QueryVisitor postOrderCallback; diff --git a/src/main/java/graphql/cachecontrol/CacheControl.java b/src/main/java/graphql/cachecontrol/CacheControl.java deleted file mode 100644 index 7b12b2fb0d..0000000000 --- a/src/main/java/graphql/cachecontrol/CacheControl.java +++ /dev/null @@ -1,222 +0,0 @@ -package graphql.cachecontrol; - -import graphql.DeprecatedAt; -import graphql.ExecutionInput; -import graphql.ExecutionResult; -import graphql.PublicApi; -import graphql.execution.ResultPath; -import graphql.schema.DataFetchingEnvironment; - -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.CopyOnWriteArrayList; - -import static graphql.Assert.assertNotEmpty; -import static graphql.Assert.assertNotNull; -import static graphql.collect.ImmutableKit.map; - -/** - * Apollo has deprecated their Cache Control specification https://github.com/apollographql/apollo-cache-control - * This has been deprecated/removed from Apollo some time. - * Apollo now provides an alternative approach via the @cacheControl directive https://www.apollographql.com/docs/apollo-server/performance/caching/ - * We are deprecating CacheControl inside graphql-java and this will be deleted in a future release. - * - * This class implements the graphql Cache Control specification as outlined in https://github.com/apollographql/apollo-cache-control - *

- * To best use this class you need to pass a CacheControl object to each {@link graphql.schema.DataFetcher} and have them decide on - * the caching hint values. - *

- * The easiest way to do this is create a CacheControl object at query start and pass it in as a "context" object via {@link ExecutionInput#getGraphQLContext()} and then have - * each {@link graphql.schema.DataFetcher} that wants to make cache control hints use that. - *

- * Then at the end of the query you would call {@link #addTo(graphql.ExecutionResult)} to record the cache control hints into the {@link graphql.ExecutionResult} - * extensions map as per the specification. - */ -@Deprecated -@DeprecatedAt("2022-07-26") -@PublicApi -public class CacheControl { - - public static final String CACHE_CONTROL_EXTENSION_KEY = "cacheControl"; - - /** - * If the scope is set to PRIVATE, this indicates anything under this path should only be cached per-user, - * unless the value is overridden on a sub path. PUBLIC is the default and means anything under this path - * can be stored in a shared cache. - */ - public enum Scope { - PUBLIC, PRIVATE - } - - private static final class Hint { - private final List path; - private final Integer maxAge; - private final Scope scope; - - private Hint(List path, Integer maxAge, Scope scope) { - assertNotEmpty(path); - this.path = path; - this.maxAge = maxAge; - this.scope = scope; - } - - Map toMap() { - Map map = new LinkedHashMap<>(); - map.put("path", path); - if (maxAge != null) { - map.put("maxAge", maxAge); - } - if (scope != null) { - map.put("scope", scope.name()); - } - return map; - } - } - - private final List hints; - - private CacheControl() { - hints = new CopyOnWriteArrayList<>(); - } - - - /** - * This creates a cache control hint for the specified path - * - * @param path the path to the field that has the cache control hint - * @param maxAge the caching time in seconds - * @param scope the scope of the cache control hint - * @return this object builder style - * - * @deprecated - Apollo has deprecated the Cache Control specification - */ - @Deprecated - @DeprecatedAt("2022-07-26") - public CacheControl hint(ResultPath path, Integer maxAge, Scope scope) { - assertNotNull(path); - assertNotNull(scope); - hints.add(new Hint(path.toList(), maxAge, scope)); - return this; - } - - /** - * This creates a cache control hint for the specified path - * - * @param path the path to the field that has the cache control hint - * @param scope the scope of the cache control hint - * @return this object builder style - * - * @deprecated - Apollo has deprecated the Cache Control specification - */ - @Deprecated - @DeprecatedAt("2022-07-26") - public CacheControl hint(ResultPath path, Scope scope) { - return hint(path, null, scope); - } - - /** - * This creates a cache control hint for the specified path - * - * @param path the path to the field that has the cache control hint - * @param maxAge the caching time in seconds - * @return this object builder style - * - * @deprecated - Apollo has deprecated the Cache Control specification - */ - @Deprecated - @DeprecatedAt("2022-07-26") - public CacheControl hint(ResultPath path, Integer maxAge) { - return hint(path, maxAge, Scope.PUBLIC); - } - - /** - * This creates a cache control hint for the specified field being fetched - * - * @param dataFetchingEnvironment the path to the field that has the cache control hint - * @param maxAge the caching time in seconds - * @param scope the scope of the cache control hint - * @return this object builder style - * - * @deprecated - Apollo has deprecated the Cache Control specification - */ - @Deprecated - @DeprecatedAt("2022-07-26") - public CacheControl hint(DataFetchingEnvironment dataFetchingEnvironment, Integer maxAge, Scope scope) { - assertNotNull(dataFetchingEnvironment); - assertNotNull(scope); - hint(dataFetchingEnvironment.getExecutionStepInfo().getPath(), maxAge, scope); - return this; - } - - /** - * This creates a cache control hint for the specified field being fetched with a PUBLIC scope - * - * @param dataFetchingEnvironment the path to the field that has the cache control hint - * @param maxAge the caching time in seconds - * @return this object builder style - * - * @deprecated - Apollo has deprecated the Cache Control specification - */ - @Deprecated - @DeprecatedAt("2022-07-26") - public CacheControl hint(DataFetchingEnvironment dataFetchingEnvironment, Integer maxAge) { - hint(dataFetchingEnvironment, maxAge, Scope.PUBLIC); - return this; - } - - /** - * This creates a cache control hint for the specified field being fetched with a specified scope - * - * @param dataFetchingEnvironment the path to the field that has the cache control hint - * @param scope the scope of the cache control hint - * @return this object builder style - * - * @deprecated - Apollo has deprecated the Cache Control specification - */ - @Deprecated - @DeprecatedAt("2022-07-26") - public CacheControl hint(DataFetchingEnvironment dataFetchingEnvironment, Scope scope) { - return hint(dataFetchingEnvironment, null, scope); - } - - /** - * Creates a new CacheControl object that can be used to trick caching hints - * - * @return the new object - * - * @deprecated - Apollo has deprecated the Cache Control specification - */ - @Deprecated - @DeprecatedAt("2022-07-26") - public static CacheControl newCacheControl() { - return new CacheControl(); - } - - /** - * This will record the values in the cache control object into the provided execution result object which creates a new {@link graphql.ExecutionResult} - * object back out - * - * @param executionResult the starting execution result object - * @return a new execution result with the hints in the extensions map. - * - * @deprecated - Apollo has deprecated the Cache Control specification - */ - @Deprecated - @DeprecatedAt("2022-07-26") - public ExecutionResult addTo(ExecutionResult executionResult) { - return ExecutionResult.newExecutionResult() - .from(executionResult) - .addExtension(CACHE_CONTROL_EXTENSION_KEY, hintsToCacheControlProperties()) - .build(); - } - - private Map hintsToCacheControlProperties() { - List> recordedHints = map(hints, Hint::toMap); - - Map cacheControl = new LinkedHashMap<>(); - cacheControl.put("version", 1); - cacheControl.put("hints", recordedHints); - return cacheControl; - } -} diff --git a/src/main/java/graphql/collect/ImmutableMapWithNullValues.java b/src/main/java/graphql/collect/ImmutableMapWithNullValues.java index 207ad9b5d3..8eab40c56d 100644 --- a/src/main/java/graphql/collect/ImmutableMapWithNullValues.java +++ b/src/main/java/graphql/collect/ImmutableMapWithNullValues.java @@ -14,9 +14,9 @@ import java.util.function.Function; /** - * The standard ImmutableMap does not allow null values. The implementation does. - * We have cases in graphql, around arguments where a mep entry can be explicitly set to null - * and we want immutable smart maps for these case. + * The standard ImmutableMap does not allow null values. The implementation does. + * We have cases in graphql, around arguments where a map entry can be explicitly set to null + * and we want immutable smart maps for these cases. * * @param for key * @param for victory diff --git a/src/main/java/graphql/execution/AbstractAsyncExecutionStrategy.java b/src/main/java/graphql/execution/AbstractAsyncExecutionStrategy.java index 9e482865fd..577bb00f96 100644 --- a/src/main/java/graphql/execution/AbstractAsyncExecutionStrategy.java +++ b/src/main/java/graphql/execution/AbstractAsyncExecutionStrategy.java @@ -1,5 +1,6 @@ package graphql.execution; +import com.google.common.collect.Maps; import graphql.ExecutionResult; import graphql.ExecutionResultImpl; import graphql.PublicSpi; @@ -28,10 +29,9 @@ protected BiConsumer, Throwable> handleResults(ExecutionCo handleNonNullException(executionContext, overallResult, exception); return; } - Map resolvedValuesByField = new LinkedHashMap<>(fieldNames.size()); + Map resolvedValuesByField = Maps.newLinkedHashMapWithExpectedSize(fieldNames.size()); int ix = 0; for (ExecutionResult executionResult : results) { - String fieldName = fieldNames.get(ix++); resolvedValuesByField.put(fieldName, executionResult.getData()); } diff --git a/src/main/java/graphql/execution/Async.java b/src/main/java/graphql/execution/Async.java index cc2f631401..ec71e2bdc9 100644 --- a/src/main/java/graphql/execution/Async.java +++ b/src/main/java/graphql/execution/Async.java @@ -12,6 +12,7 @@ import java.util.concurrent.CompletionException; import java.util.concurrent.CompletionStage; import java.util.function.BiFunction; +import java.util.function.Function; import java.util.function.Supplier; @Internal @@ -56,7 +57,16 @@ public void add(CompletableFuture completableFuture) { @Override public CompletableFuture> await() { Assert.assertTrue(ix == 0, () -> "expected size was " + 0 + " got " + ix); - return CompletableFuture.completedFuture(Collections.emptyList()); + return typedEmpty(); + } + + + // implementation details: infer the type of Completable> from a singleton empty + private static final CompletableFuture> EMPTY = CompletableFuture.completedFuture(Collections.emptyList()); + + @SuppressWarnings("unchecked") + private static CompletableFuture typedEmpty() { + return (CompletableFuture) EMPTY; } } @@ -75,18 +85,7 @@ public void add(CompletableFuture completableFuture) { @Override public CompletableFuture> await() { Assert.assertTrue(ix == 1, () -> "expected size was " + 1 + " got " + ix); - - CompletableFuture> overallResult = new CompletableFuture<>(); - completableFuture - .whenComplete((ignored, exception) -> { - if (exception != null) { - overallResult.completeExceptionally(exception); - return; - } - List results = Collections.singletonList(completableFuture.join()); - overallResult.complete(results); - }); - return overallResult; + return completableFuture.thenApply(Collections::singletonList); } } @@ -128,18 +127,12 @@ public CompletableFuture> await() { } - @FunctionalInterface - public interface CFFactory { - CompletableFuture apply(T input, int index, List previousResults); - } - - public static CompletableFuture> each(Collection list, BiFunction> cfFactory) { + public static CompletableFuture> each(Collection list, Function> cfFactory) { CombinedBuilder futures = ofExpectedSize(list.size()); - int index = 0; for (T t : list) { CompletableFuture cf; try { - cf = cfFactory.apply(t, index++); + cf = cfFactory.apply(t); Assert.assertNotNull(cf, () -> "cfFactory must return a non null value"); } catch (Exception e) { cf = new CompletableFuture<>(); @@ -151,20 +144,20 @@ public static CompletableFuture> each(Collection list, BiFunct return futures.await(); } - public static CompletableFuture> eachSequentially(Iterable list, CFFactory cfFactory) { + public static CompletableFuture> eachSequentially(Iterable list, BiFunction, CompletableFuture> cfFactory) { CompletableFuture> result = new CompletableFuture<>(); - eachSequentiallyImpl(list.iterator(), cfFactory, 0, new ArrayList<>(), result); + eachSequentiallyImpl(list.iterator(), cfFactory, new ArrayList<>(), result); return result; } - private static void eachSequentiallyImpl(Iterator iterator, CFFactory cfFactory, int index, List tmpResult, CompletableFuture> overallResult) { + private static void eachSequentiallyImpl(Iterator iterator, BiFunction, CompletableFuture> cfFactory, List tmpResult, CompletableFuture> overallResult) { if (!iterator.hasNext()) { overallResult.complete(tmpResult); return; } CompletableFuture cf; try { - cf = cfFactory.apply(iterator.next(), index, tmpResult); + cf = cfFactory.apply(iterator.next(), tmpResult); Assert.assertNotNull(cf, () -> "cfFactory must return a non null value"); } catch (Exception e) { cf = new CompletableFuture<>(); @@ -176,7 +169,7 @@ private static void eachSequentiallyImpl(Iterator iterator, CFFactory< return; } tmpResult.add(cfResult); - eachSequentiallyImpl(iterator, cfFactory, index + 1, tmpResult, overallResult); + eachSequentiallyImpl(iterator, cfFactory, tmpResult, overallResult); }); } diff --git a/src/main/java/graphql/execution/AsyncExecutionStrategy.java b/src/main/java/graphql/execution/AsyncExecutionStrategy.java index fdebcb6cca..6608fba0f3 100644 --- a/src/main/java/graphql/execution/AsyncExecutionStrategy.java +++ b/src/main/java/graphql/execution/AsyncExecutionStrategy.java @@ -64,6 +64,7 @@ public CompletableFuture execute(ExecutionContext executionCont handleResultsConsumer.accept(null, throwable.getCause()); return; } + Async.CombinedBuilder executionResultFutures = Async.ofExpectedSize(completeValueInfos.size()); for (FieldValueInfo completeValueInfo : completeValueInfos) { executionResultFutures.add(completeValueInfo.getFieldValue()); diff --git a/src/main/java/graphql/execution/AsyncSerialExecutionStrategy.java b/src/main/java/graphql/execution/AsyncSerialExecutionStrategy.java index bf094ac41b..fc2dde0980 100644 --- a/src/main/java/graphql/execution/AsyncSerialExecutionStrategy.java +++ b/src/main/java/graphql/execution/AsyncSerialExecutionStrategy.java @@ -39,7 +39,7 @@ public CompletableFuture execute(ExecutionContext executionCont MergedSelectionSet fields = parameters.getFields(); ImmutableList fieldNames = ImmutableList.copyOf(fields.keySet()); - CompletableFuture> resultsFuture = Async.eachSequentially(fieldNames, (fieldName, index, prevResults) -> { + CompletableFuture> resultsFuture = Async.eachSequentially(fieldNames, (fieldName, prevResults) -> { MergedField currentField = fields.getSubField(fieldName); ResultPath fieldPath = parameters.getPath().segment(mkNameForPath(currentField)); ExecutionStrategyParameters newParameters = parameters diff --git a/src/main/java/graphql/execution/ConditionalNodes.java b/src/main/java/graphql/execution/ConditionalNodes.java deleted file mode 100644 index a9e3ca733e..0000000000 --- a/src/main/java/graphql/execution/ConditionalNodes.java +++ /dev/null @@ -1,43 +0,0 @@ -package graphql.execution; - -import graphql.Assert; -import graphql.GraphQLContext; -import graphql.Internal; -import graphql.language.Directive; -import graphql.language.NodeUtil; - -import java.util.List; -import java.util.Locale; -import java.util.Map; - -import static graphql.Directives.IncludeDirective; -import static graphql.Directives.SkipDirective; - -@Internal -public class ConditionalNodes { - - public boolean shouldInclude(Map variables, List directives) { - // shortcut on no directives - if (directives.isEmpty()) { - return true; - } - boolean skip = getDirectiveResult(variables, directives, SkipDirective.getName(), false); - if (skip) { - return false; - } - - return getDirectiveResult(variables, directives, IncludeDirective.getName(), true); - } - - private boolean getDirectiveResult(Map variables, List directives, String directiveName, boolean defaultValue) { - Directive foundDirective = NodeUtil.findNodeByName(directives, directiveName); - if (foundDirective != null) { - Map argumentValues = ValuesResolver.getArgumentValues(SkipDirective.getArguments(), foundDirective.getArguments(), CoercedVariables.of(variables), GraphQLContext.getDefault(), Locale.getDefault()); - Object flag = argumentValues.get("if"); - Assert.assertTrue(flag instanceof Boolean, () -> String.format("The '%s' directive MUST have a value for the 'if' argument", directiveName)); - return (Boolean) flag; - } - return defaultValue; - } - -} diff --git a/src/main/java/graphql/execution/DataFetcherExceptionHandler.java b/src/main/java/graphql/execution/DataFetcherExceptionHandler.java index a318ebf8df..7b7d294a0b 100644 --- a/src/main/java/graphql/execution/DataFetcherExceptionHandler.java +++ b/src/main/java/graphql/execution/DataFetcherExceptionHandler.java @@ -14,24 +14,6 @@ @PublicSpi public interface DataFetcherExceptionHandler { - /** - * When an exception occurs during a call to a {@link DataFetcher} then this handler - * is called to shape the errors that should be placed in the {@link ExecutionResult#getErrors()} - * list of errors. - * - * @param handlerParameters the parameters to this callback - * - * @return a result that can contain custom formatted {@link graphql.GraphQLError}s - * - * @deprecated use {@link #handleException(DataFetcherExceptionHandlerParameters)} instead which as an asynchronous - * version - */ - @Deprecated - @DeprecatedAt("2021-06-23") - default DataFetcherExceptionHandlerResult onException(DataFetcherExceptionHandlerParameters handlerParameters) { - return SimpleDataFetcherExceptionHandler.defaultImpl.onException(handlerParameters); - } - /** * When an exception occurs during a call to a {@link DataFetcher} then this handler * is called to shape the errors that should be placed in the {@link ExecutionResult#getErrors()} @@ -41,8 +23,5 @@ default DataFetcherExceptionHandlerResult onException(DataFetcherExceptionHandle * * @return a result that can contain custom formatted {@link graphql.GraphQLError}s */ - default CompletableFuture handleException(DataFetcherExceptionHandlerParameters handlerParameters) { - DataFetcherExceptionHandlerResult result = onException(handlerParameters); - return CompletableFuture.completedFuture(result); - } + CompletableFuture handleException(DataFetcherExceptionHandlerParameters handlerParameters); } diff --git a/src/main/java/graphql/execution/DataFetcherResult.java b/src/main/java/graphql/execution/DataFetcherResult.java index 9b78497ed9..460b07daf9 100644 --- a/src/main/java/graphql/execution/DataFetcherResult.java +++ b/src/main/java/graphql/execution/DataFetcherResult.java @@ -2,6 +2,7 @@ import com.google.common.collect.ImmutableList; import graphql.DeprecatedAt; +import graphql.ExecutionResult; import graphql.GraphQLError; import graphql.Internal; import graphql.PublicApi; @@ -9,20 +10,26 @@ import java.util.ArrayList; import java.util.List; +import java.util.Map; import java.util.function.Consumer; +import java.util.function.Function; import static graphql.Assert.assertNotNull; /** - * An object that can be returned from a {@link DataFetcher} that contains both data, local context and errors to be relativized and - * added to the final result. This is a useful when your ``DataFetcher`` retrieves data from multiple sources - * or from another GraphQL resource or you want to pass extra context to lower levels. - * + * An object that can be returned from a {@link DataFetcher} that contains both data, local context and errors to be added to the final result. + * This is a useful when your ``DataFetcher`` retrieves data from multiple sources + * or from another GraphQL resource, or you want to pass extra context to lower levels. + *

* This also allows you to pass down new local context objects between parent and child fields. If you return a * {@link #getLocalContext()} value then it will be passed down into any child fields via * {@link graphql.schema.DataFetchingEnvironment#getLocalContext()} * + * You can also have {@link DataFetcher}s contribute to the {@link ExecutionResult#getExtensions()} by returning + * extensions maps that will be merged together via the {@link graphql.extensions.ExtensionsBuilder} and its {@link graphql.extensions.ExtensionsMerger} + * in place. + * * @param The type of the data fetched */ @PublicApi @@ -31,6 +38,7 @@ public class DataFetcherResult { private final T data; private final List errors; private final Object localContext; + private final Map extensions; /** * Creates a data fetcher result @@ -44,13 +52,14 @@ public class DataFetcherResult { @Deprecated @DeprecatedAt("2019-01-11") public DataFetcherResult(T data, List errors) { - this(data, errors, null); + this(data, errors, null, null); } - private DataFetcherResult(T data, List errors, Object localContext) { + private DataFetcherResult(T data, List errors, Object localContext, Map extensions) { this.data = data; this.errors = ImmutableList.copyOf(assertNotNull(errors)); this.localContext = localContext; + this.extensions = extensions; } /** @@ -83,6 +92,22 @@ public Object getLocalContext() { return localContext; } + /** + * A data fetcher result can supply extension values that will be merged into the result + * via the {@link graphql.extensions.ExtensionsBuilder} at the end of the operation. + *

+ * The {@link graphql.extensions.ExtensionsMerger} in place inside the {@link graphql.extensions.ExtensionsBuilder} + * will control how these extension values get merged. + * + * @return a map of extension values to be merged + * + * @see graphql.extensions.ExtensionsBuilder + * @see graphql.extensions.ExtensionsMerger + */ + public Map getExtensions() { + return extensions; + } + /** * This helps you transform the current DataFetcherResult into another one by starting a builder with all * the current values and allows you to transform it how you want. @@ -97,6 +122,23 @@ public DataFetcherResult transform(Consumer> builderConsumer) { return builder.build(); } + /** + * Transforms the data of the current DataFetcherResult using the provided function. + * All other values are left unmodified. + * + * @param transformation the transformation that should be applied to the data + * @param the result type + * + * @return a new instance with where the data value has been transformed + */ + public DataFetcherResult map(Function transformation) { + return new Builder<>(transformation.apply(this.data)) + .errors(this.errors) + .extensions(this.extensions) + .localContext(this.localContext) + .build(); + } + /** * Creates a new data fetcher result builder * @@ -112,11 +154,13 @@ public static class Builder { private T data; private Object localContext; private final List errors = new ArrayList<>(); + private Map extensions; public Builder(DataFetcherResult existing) { data = existing.getData(); localContext = existing.getLocalContext(); errors.addAll(existing.getErrors()); + extensions = existing.extensions; } public Builder(T data) { @@ -158,8 +202,13 @@ public Builder localContext(Object localContext) { return this; } + public Builder extensions(Map extensions) { + this.extensions = extensions; + return this; + } + public DataFetcherResult build() { - return new DataFetcherResult<>(data, errors, localContext); + return new DataFetcherResult<>(data, errors, localContext, extensions); } } } diff --git a/src/main/java/graphql/execution/Execution.java b/src/main/java/graphql/execution/Execution.java index 401258dedb..916bc64659 100644 --- a/src/main/java/graphql/execution/Execution.java +++ b/src/main/java/graphql/execution/Execution.java @@ -90,7 +90,6 @@ public CompletableFuture execute(Document document, GraphQLSche .document(document) .operationDefinition(operationDefinition) .dataLoaderRegistry(executionInput.getDataLoaderRegistry()) - .cacheControl(executionInput.getCacheControl()) .locale(executionInput.getLocale()) .valueUnboxer(valueUnboxer) .executionInput(executionInput) @@ -134,7 +133,8 @@ private CompletableFuture executeOperation(ExecutionContext exe .schema(executionContext.getGraphQLSchema()) .objectType(operationRootType) .fragments(executionContext.getFragmentsByName()) - .variables(executionContext.getVariables()) + .variables(executionContext.getCoercedVariables().toMap()) + .graphQLContext(graphQLContext) .build(); MergedSelectionSet fields = fieldCollector.collectFields(collectorParameters, operationDefinition.getSelectionSet()); diff --git a/src/main/java/graphql/execution/ExecutionContext.java b/src/main/java/graphql/execution/ExecutionContext.java index 9c1aa2032b..a517f5eea9 100644 --- a/src/main/java/graphql/execution/ExecutionContext.java +++ b/src/main/java/graphql/execution/ExecutionContext.java @@ -8,7 +8,6 @@ import graphql.GraphQLContext; import graphql.GraphQLError; import graphql.PublicApi; -import graphql.cachecontrol.CacheControl; import graphql.collect.ImmutableKit; import graphql.execution.instrumentation.Instrumentation; import graphql.execution.instrumentation.InstrumentationState; @@ -52,7 +51,6 @@ public class ExecutionContext { private final AtomicReference> errors = new AtomicReference<>(ImmutableKit.emptyList()); private final Set errorPaths = new HashSet<>(); private final DataLoaderRegistry dataLoaderRegistry; - private final CacheControl cacheControl; private final Locale locale; private final ValueUnboxer valueUnboxer; private final ExecutionInput executionInput; @@ -74,7 +72,6 @@ public class ExecutionContext { this.root = builder.root; this.instrumentation = builder.instrumentation; this.dataLoaderRegistry = builder.dataLoaderRegistry; - this.cacheControl = builder.cacheControl; this.locale = builder.locale; this.valueUnboxer = builder.valueUnboxer; this.errors.set(builder.errors); @@ -166,12 +163,6 @@ public DataLoaderRegistry getDataLoaderRegistry() { return dataLoaderRegistry; } - @Deprecated - @DeprecatedAt("2022-07-26") - public CacheControl getCacheControl() { - return cacheControl; - } - public Locale getLocale() { return locale; } diff --git a/src/main/java/graphql/execution/ExecutionContextBuilder.java b/src/main/java/graphql/execution/ExecutionContextBuilder.java index 12198cf879..f941be07b7 100644 --- a/src/main/java/graphql/execution/ExecutionContextBuilder.java +++ b/src/main/java/graphql/execution/ExecutionContextBuilder.java @@ -8,7 +8,6 @@ import graphql.GraphQLError; import graphql.Internal; import graphql.PublicApi; -import graphql.cachecontrol.CacheControl; import graphql.collect.ImmutableKit; import graphql.execution.instrumentation.Instrumentation; import graphql.execution.instrumentation.InstrumentationState; @@ -42,7 +41,6 @@ public class ExecutionContextBuilder { CoercedVariables coercedVariables = CoercedVariables.emptyVariables(); ImmutableMap fragmentsByName = ImmutableKit.emptyMap(); DataLoaderRegistry dataLoaderRegistry; - CacheControl cacheControl; Locale locale; ImmutableList errors = emptyList(); ValueUnboxer valueUnboxer; @@ -89,7 +87,6 @@ public ExecutionContextBuilder() { coercedVariables = other.getCoercedVariables(); fragmentsByName = ImmutableMap.copyOf(other.getFragmentsByName()); dataLoaderRegistry = other.getDataLoaderRegistry(); - cacheControl = other.getCacheControl(); locale = other.getLocale(); errors = ImmutableList.copyOf(other.getErrors()); valueUnboxer = other.getValueUnboxer(); @@ -194,13 +191,6 @@ public ExecutionContextBuilder dataLoaderRegistry(DataLoaderRegistry dataLoaderR return this; } - @Deprecated - @DeprecatedAt("2022-07-26") - public ExecutionContextBuilder cacheControl(CacheControl cacheControl) { - this.cacheControl = cacheControl; - return this; - } - public ExecutionContextBuilder locale(Locale locale) { this.locale = locale; return this; diff --git a/src/main/java/graphql/execution/ExecutionStepInfoFactory.java b/src/main/java/graphql/execution/ExecutionStepInfoFactory.java index 3338961388..1a9f91aa46 100644 --- a/src/main/java/graphql/execution/ExecutionStepInfoFactory.java +++ b/src/main/java/graphql/execution/ExecutionStepInfoFactory.java @@ -1,52 +1,15 @@ package graphql.execution; import graphql.Internal; -import graphql.introspection.Introspection; -import graphql.language.Argument; -import graphql.schema.GraphQLCodeRegistry; -import graphql.schema.GraphQLFieldDefinition; import graphql.schema.GraphQLList; -import graphql.schema.GraphQLObjectType; import graphql.schema.GraphQLOutputType; -import graphql.util.FpKit; - -import java.util.List; -import java.util.Map; -import java.util.function.Supplier; @Internal public class ExecutionStepInfoFactory { - public ExecutionStepInfo newExecutionStepInfoForSubField(ExecutionContext executionContext, MergedField mergedField, ExecutionStepInfo parentInfo) { - GraphQLObjectType parentType = (GraphQLObjectType) parentInfo.getUnwrappedNonNullType(); - GraphQLFieldDefinition fieldDefinition = Introspection.getFieldDef(executionContext.getGraphQLSchema(), parentType, mergedField.getName()); - GraphQLOutputType fieldType = fieldDefinition.getType(); - List fieldArgs = mergedField.getArguments(); - GraphQLCodeRegistry codeRegistry = executionContext.getGraphQLSchema().getCodeRegistry(); - Supplier> argumentValuesSupplier = () -> ValuesResolver.getArgumentValues(codeRegistry, - fieldDefinition.getArguments(), - fieldArgs, - executionContext.getCoercedVariables(), - executionContext.getGraphQLContext(), - executionContext.getLocale()); - Supplier> argumentValues = FpKit.intraThreadMemoize(argumentValuesSupplier); - - ResultPath newPath = parentInfo.getPath().segment(mergedField.getResultKey()); - - return parentInfo.transform(builder -> builder - .parentInfo(parentInfo) - .type(fieldType) - .fieldDefinition(fieldDefinition) - .fieldContainer(parentType) - .field(mergedField) - .path(newPath) - .arguments(argumentValues)); - } - - public ExecutionStepInfo newExecutionStepInfoForListElement(ExecutionStepInfo executionInfo, int index) { + public ExecutionStepInfo newExecutionStepInfoForListElement(ExecutionStepInfo executionInfo, ResultPath indexedPath) { GraphQLList fieldType = (GraphQLList) executionInfo.getUnwrappedNonNullType(); GraphQLOutputType typeInList = (GraphQLOutputType) fieldType.getWrappedType(); - ResultPath indexedPath = executionInfo.getPath().segment(index); return executionInfo.transform(builder -> builder .parentInfo(executionInfo) .type(typeInList) diff --git a/src/main/java/graphql/execution/ExecutionStrategy.java b/src/main/java/graphql/execution/ExecutionStrategy.java index 9e38a58372..4ed0f1e644 100644 --- a/src/main/java/graphql/execution/ExecutionStrategy.java +++ b/src/main/java/graphql/execution/ExecutionStrategy.java @@ -18,6 +18,7 @@ import graphql.execution.instrumentation.parameters.InstrumentationFieldCompleteParameters; import graphql.execution.instrumentation.parameters.InstrumentationFieldFetchParameters; import graphql.execution.instrumentation.parameters.InstrumentationFieldParameters; +import graphql.extensions.ExtensionsBuilder; import graphql.introspection.Introspection; import graphql.language.Argument; import graphql.language.Field; @@ -290,9 +291,10 @@ protected CompletableFuture fetchField(ExecutionContext executionC .handle((result, exception) -> { fetchCtx.onCompleted(result, exception); if (exception != null) { - return handleFetchingException(executionContext, dataFetchingEnvironment.get(), exception); + return handleFetchingException(dataFetchingEnvironment.get(), exception); } else { - return CompletableFuture.completedFuture(result); + // we can simply return the fetched value CF and avoid a allocation + return fetchedValue; } }) .thenCompose(Function.identity()) @@ -330,6 +332,7 @@ protected FetchedValue unboxPossibleDataFetcherResult(ExecutionContext execution if (result instanceof DataFetcherResult) { DataFetcherResult dataFetcherResult = (DataFetcherResult) result; executionContext.addErrors(dataFetcherResult.getErrors()); + addExtensionsIfPresent(executionContext, dataFetcherResult); Object localContext = dataFetcherResult.getLocalContext(); if (localContext == null) { @@ -351,9 +354,19 @@ protected FetchedValue unboxPossibleDataFetcherResult(ExecutionContext execution } } - protected CompletableFuture handleFetchingException(ExecutionContext executionContext, - DataFetchingEnvironment environment, - Throwable e) { + private void addExtensionsIfPresent(ExecutionContext executionContext, DataFetcherResult dataFetcherResult) { + Map extensions = dataFetcherResult.getExtensions(); + if (extensions != null) { + ExtensionsBuilder extensionsBuilder = executionContext.getGraphQLContext().get(ExtensionsBuilder.class); + if (extensionsBuilder != null) { + extensionsBuilder.addValues(extensions); + } + } + } + + protected CompletableFuture handleFetchingException( + DataFetchingEnvironment environment, + Throwable e) { DataFetcherExceptionHandlerParameters handlerParameters = DataFetcherExceptionHandlerParameters.newExceptionParameters() .dataFetchingEnvironment(environment) .exception(e) @@ -557,19 +570,16 @@ protected FieldValueInfo completeValueForList(ExecutionContext executionContext, for (Object item : iterableValues) { ResultPath indexedPath = parameters.getPath().segment(index); - ExecutionStepInfo stepInfoForListElement = executionStepInfoFactory.newExecutionStepInfoForListElement(executionStepInfo, index); + ExecutionStepInfo stepInfoForListElement = executionStepInfoFactory.newExecutionStepInfoForListElement(executionStepInfo, indexedPath); NonNullableFieldValidator nonNullableFieldValidator = new NonNullableFieldValidator(executionContext, stepInfoForListElement); - int finalIndex = index; FetchedValue value = unboxPossibleDataFetcherResult(executionContext, parameters, item); ExecutionStrategyParameters newParameters = parameters.transform(builder -> builder.executionStepInfo(stepInfoForListElement) .nonNullFieldValidator(nonNullableFieldValidator) - .listSize(size.orElse(-1)) // -1 signals that we don't know the size .localContext(value.getLocalContext()) - .currentListIndex(finalIndex) .path(indexedPath) .source(value.getFetchedValue()) ); @@ -577,7 +587,7 @@ protected FieldValueInfo completeValueForList(ExecutionContext executionContext, index++; } - CompletableFuture> resultsFuture = Async.each(fieldValueInfos, (item, i) -> item.getFieldValue()); + CompletableFuture> resultsFuture = Async.each(fieldValueInfos, FieldValueInfo::getFieldValue); CompletableFuture overallResult = new CompletableFuture<>(); completeListCtx.onDispatched(overallResult); @@ -702,19 +712,6 @@ private Object handleCoercionProblem(ExecutionContext context, ExecutionStrategy return null; } - /** - * Converts an object that is known to should be an Iterable into one - * - * @param result the result object - * - * @return an Iterable from that object - * - * @throws java.lang.ClassCastException if it's not an Iterable - */ - protected Iterable toIterable(Object result) { - return FpKit.toIterable(result); - } - protected GraphQLObjectType resolveType(ExecutionContext executionContext, ExecutionStrategyParameters parameters, GraphQLType fieldType) { // we can avoid a method call and type resolver environment allocation if we know it's an object type if (fieldType instanceof GraphQLObjectType) { diff --git a/src/main/java/graphql/execution/ExecutionStrategyParameters.java b/src/main/java/graphql/execution/ExecutionStrategyParameters.java index 2a83dbd58d..b413e4321a 100644 --- a/src/main/java/graphql/execution/ExecutionStrategyParameters.java +++ b/src/main/java/graphql/execution/ExecutionStrategyParameters.java @@ -19,8 +19,6 @@ public class ExecutionStrategyParameters { private final NonNullableFieldValidator nonNullableFieldValidator; private final ResultPath path; private final MergedField currentField; - private final int listSize; - private final int currentListIndex; private final ExecutionStrategyParameters parent; private ExecutionStrategyParameters(ExecutionStepInfo executionStepInfo, @@ -30,8 +28,6 @@ private ExecutionStrategyParameters(ExecutionStepInfo executionStepInfo, NonNullableFieldValidator nonNullableFieldValidator, ResultPath path, MergedField currentField, - int listSize, - int currentListIndex, ExecutionStrategyParameters parent) { this.executionStepInfo = assertNotNull(executionStepInfo, () -> "executionStepInfo is null"); @@ -41,8 +37,6 @@ private ExecutionStrategyParameters(ExecutionStepInfo executionStepInfo, this.nonNullableFieldValidator = nonNullableFieldValidator; this.path = path; this.currentField = currentField; - this.listSize = listSize; - this.currentListIndex = currentListIndex; this.parent = parent; } @@ -70,14 +64,6 @@ public Object getLocalContext() { return localContext; } - public int getListSize() { - return listSize; - } - - public int getCurrentListIndex() { - return currentListIndex; - } - public ExecutionStrategyParameters getParent() { return parent; } @@ -119,8 +105,6 @@ public static class Builder { NonNullableFieldValidator nonNullableFieldValidator; ResultPath path = ResultPath.rootPath(); MergedField currentField; - int listSize; - int currentListIndex; ExecutionStrategyParameters parent; /** @@ -141,8 +125,6 @@ private Builder(ExecutionStrategyParameters oldParameters) { this.currentField = oldParameters.currentField; this.path = oldParameters.path; this.parent = oldParameters.parent; - this.listSize = oldParameters.listSize; - this.currentListIndex = oldParameters.currentListIndex; } public Builder executionStepInfo(ExecutionStepInfo executionStepInfo) { @@ -185,16 +167,6 @@ public Builder path(ResultPath path) { return this; } - public Builder listSize(int listSize) { - this.listSize = listSize; - return this; - } - - public Builder currentListIndex(int currentListIndex) { - this.currentListIndex = currentListIndex; - return this; - } - public Builder parent(ExecutionStrategyParameters parent) { this.parent = parent; return this; @@ -202,7 +174,7 @@ public Builder parent(ExecutionStrategyParameters parent) { public ExecutionStrategyParameters build() { - return new ExecutionStrategyParameters(executionStepInfo, source, localContext, fields, nonNullableFieldValidator, path, currentField, listSize, currentListIndex, parent); + return new ExecutionStrategyParameters(executionStepInfo, source, localContext, fields, nonNullableFieldValidator, path, currentField, parent); } } } diff --git a/src/main/java/graphql/execution/FieldCollector.java b/src/main/java/graphql/execution/FieldCollector.java index d32218bfc8..a6f1310a8c 100644 --- a/src/main/java/graphql/execution/FieldCollector.java +++ b/src/main/java/graphql/execution/FieldCollector.java @@ -2,6 +2,7 @@ import graphql.Internal; +import graphql.execution.conditional.ConditionalNodes; import graphql.language.Field; import graphql.language.FragmentDefinition; import graphql.language.FragmentSpread; @@ -76,13 +77,19 @@ private void collectFragmentSpread(FieldCollectorParameters parameters, Set visitedFragments, Map fields, InlineFragment inlineFragment) { - if (!conditionalNodes.shouldInclude(parameters.getVariables(), inlineFragment.getDirectives()) || + if (!conditionalNodes.shouldInclude(inlineFragment, + parameters.getVariables(), + parameters.getGraphQLSchema(), + parameters.getGraphQLContext()) || !doesFragmentConditionMatch(parameters, inlineFragment)) { return; } @@ -100,7 +110,10 @@ private void collectInlineFragment(FieldCollectorParameters parameters, Set fields, Field field) { - if (!conditionalNodes.shouldInclude(parameters.getVariables(), field.getDirectives())) { + if (!conditionalNodes.shouldInclude(field, + parameters.getVariables(), + parameters.getGraphQLSchema(), + parameters.getGraphQLContext())) { return; } String name = field.getResultKey(); diff --git a/src/main/java/graphql/execution/FieldCollectorParameters.java b/src/main/java/graphql/execution/FieldCollectorParameters.java index b1878ff2a7..c0a23404a7 100644 --- a/src/main/java/graphql/execution/FieldCollectorParameters.java +++ b/src/main/java/graphql/execution/FieldCollectorParameters.java @@ -1,6 +1,7 @@ package graphql.execution; import graphql.Assert; +import graphql.GraphQLContext; import graphql.Internal; import graphql.language.FragmentDefinition; import graphql.schema.GraphQLObjectType; @@ -17,6 +18,7 @@ public class FieldCollectorParameters { private final Map fragmentsByName; private final Map variables; private final GraphQLObjectType objectType; + private final GraphQLContext graphQLContext; public GraphQLSchema getGraphQLSchema() { return graphQLSchema; @@ -34,11 +36,16 @@ public GraphQLObjectType getObjectType() { return objectType; } - private FieldCollectorParameters(GraphQLSchema graphQLSchema, Map variables, Map fragmentsByName, GraphQLObjectType objectType) { - this.fragmentsByName = fragmentsByName; - this.graphQLSchema = graphQLSchema; - this.variables = variables; - this.objectType = objectType; + public GraphQLContext getGraphQLContext() { + return graphQLContext; + } + + private FieldCollectorParameters(Builder builder) { + this.fragmentsByName = builder.fragmentsByName; + this.graphQLSchema = builder.graphQLSchema; + this.variables = builder.variables; + this.objectType = builder.objectType; + this.graphQLContext = builder.graphQLContext; } public static Builder newParameters() { @@ -50,6 +57,7 @@ public static class Builder { private Map fragmentsByName; private Map variables; private GraphQLObjectType objectType; + private GraphQLContext graphQLContext = GraphQLContext.getDefault(); /** * @see FieldCollectorParameters#newParameters() @@ -68,6 +76,11 @@ public Builder objectType(GraphQLObjectType objectType) { return this; } + public Builder graphQLContext(GraphQLContext graphQLContext) { + this.graphQLContext = graphQLContext; + return this; + } + public Builder fragments(Map fragmentsByName) { this.fragmentsByName = fragmentsByName; return this; @@ -80,7 +93,7 @@ public Builder variables(Map variables) { public FieldCollectorParameters build() { Assert.assertNotNull(graphQLSchema, () -> "You must provide a schema"); - return new FieldCollectorParameters(graphQLSchema, variables, fragmentsByName, objectType); + return new FieldCollectorParameters(this); } } diff --git a/src/main/java/graphql/execution/SimpleDataFetcherExceptionHandler.java b/src/main/java/graphql/execution/SimpleDataFetcherExceptionHandler.java index 12d46972a4..606de0f8a9 100644 --- a/src/main/java/graphql/execution/SimpleDataFetcherExceptionHandler.java +++ b/src/main/java/graphql/execution/SimpleDataFetcherExceptionHandler.java @@ -20,8 +20,7 @@ public class SimpleDataFetcherExceptionHandler implements DataFetcherExceptionHa static final SimpleDataFetcherExceptionHandler defaultImpl = new SimpleDataFetcherExceptionHandler(); - @Override - public DataFetcherExceptionHandlerResult onException(DataFetcherExceptionHandlerParameters handlerParameters) { + private DataFetcherExceptionHandlerResult handleExceptionImpl(DataFetcherExceptionHandlerParameters handlerParameters) { Throwable exception = unwrap(handlerParameters.getException()); SourceLocation sourceLocation = handlerParameters.getSourceLocation(); ResultPath path = handlerParameters.getPath(); @@ -34,7 +33,7 @@ public DataFetcherExceptionHandlerResult onException(DataFetcherExceptionHandler @Override public CompletableFuture handleException(DataFetcherExceptionHandlerParameters handlerParameters) { - return CompletableFuture.completedFuture(onException(handlerParameters)); + return CompletableFuture.completedFuture(handleExceptionImpl(handlerParameters)); } /** diff --git a/src/main/java/graphql/execution/ValuesResolver.java b/src/main/java/graphql/execution/ValuesResolver.java index 78b2e79d83..b7ba457910 100644 --- a/src/main/java/graphql/execution/ValuesResolver.java +++ b/src/main/java/graphql/execution/ValuesResolver.java @@ -4,6 +4,7 @@ import graphql.GraphQLContext; import graphql.Internal; import graphql.collect.ImmutableKit; +import graphql.execution.values.InputInterceptor; import graphql.language.Argument; import graphql.language.ArrayValue; import graphql.language.NullValue; @@ -78,7 +79,14 @@ public static CoercedVariables coerceVariableValues(GraphQLSchema schema, GraphQLContext graphqlContext, Locale locale) throws CoercingParseValueException, NonNullableValueCoercedAsNullException { - return ValuesResolverConversion.externalValueToInternalValueForVariables(schema, variableDefinitions, rawVariables, graphqlContext, locale); + InputInterceptor inputInterceptor = graphqlContext.get(InputInterceptor.class); + return ValuesResolverConversion.externalValueToInternalValueForVariables( + inputInterceptor, + schema, + variableDefinitions, + rawVariables, + graphqlContext, + locale); } @@ -93,11 +101,13 @@ public static CoercedVariables coerceVariableValues(GraphQLSchema schema, * * @return a map of the normalised values */ - public static Map getNormalizedVariableValues(GraphQLSchema schema, - List variableDefinitions, - RawVariables rawVariables, - GraphQLContext graphqlContext, - Locale locale) { + public static Map getNormalizedVariableValues( + GraphQLSchema schema, + List variableDefinitions, + RawVariables rawVariables, + GraphQLContext graphqlContext, + Locale locale + ) { GraphqlFieldVisibility fieldVisibility = schema.getCodeRegistry().getFieldVisibility(); Map result = new LinkedHashMap<>(); for (VariableDefinition variableDefinition : variableDefinitions) { @@ -138,12 +148,15 @@ public static Map getNormalizedVariableValues(Grap * * @return a map of named argument values */ - public static Map getArgumentValues(List argumentTypes, - List arguments, - CoercedVariables coercedVariables, - GraphQLContext graphqlContext, - Locale locale) { - return getArgumentValuesImpl(DEFAULT_FIELD_VISIBILITY, argumentTypes, arguments, coercedVariables, graphqlContext, locale); + public static Map getArgumentValues( + List argumentTypes, + List arguments, + CoercedVariables coercedVariables, + GraphQLContext graphqlContext, + Locale locale + ) { + InputInterceptor inputInterceptor = graphqlContext.get(InputInterceptor.class); + return getArgumentValuesImpl(inputInterceptor, DEFAULT_FIELD_VISIBILITY, argumentTypes, arguments, coercedVariables, graphqlContext, locale); } /** @@ -155,9 +168,11 @@ public static Map getArgumentValues(List argume * * @return a map of named normalised values */ - public static Map getNormalizedArgumentValues(List argumentTypes, - List arguments, - Map normalizedVariables) { + public static Map getNormalizedArgumentValues( + List argumentTypes, + List arguments, + Map normalizedVariables + ) { if (argumentTypes.isEmpty()) { return ImmutableKit.emptyMap(); } @@ -183,13 +198,16 @@ public static Map getNormalizedArgumentValues(List return result; } - public static Map getArgumentValues(GraphQLCodeRegistry codeRegistry, - List argumentTypes, - List arguments, - CoercedVariables coercedVariables, - GraphQLContext graphqlContext, - Locale locale) { - return getArgumentValuesImpl(codeRegistry.getFieldVisibility(), argumentTypes, arguments, coercedVariables, graphqlContext, locale); + public static Map getArgumentValues( + GraphQLCodeRegistry codeRegistry, + List argumentTypes, + List arguments, + CoercedVariables coercedVariables, + GraphQLContext graphqlContext, + Locale locale + ) { + InputInterceptor inputInterceptor = graphqlContext.get(InputInterceptor.class); + return getArgumentValuesImpl(inputInterceptor, codeRegistry.getFieldVisibility(), argumentTypes, arguments, coercedVariables, graphqlContext, locale); } /** @@ -205,26 +223,50 @@ public static Map getArgumentValues(GraphQLCodeRegistry codeRegi * * @return a value converted to a literal */ - public static Value valueToLiteral(@NotNull GraphqlFieldVisibility fieldVisibility, - @NotNull InputValueWithState inputValueWithState, - @NotNull GraphQLType type, - GraphQLContext graphqlContext, - Locale locale) { - return (Value) ValuesResolverConversion.valueToLiteralImpl(fieldVisibility, inputValueWithState, type, ValueMode.LITERAL, graphqlContext, locale); + public static Value valueToLiteral( + @NotNull GraphqlFieldVisibility fieldVisibility, + @NotNull InputValueWithState inputValueWithState, + @NotNull GraphQLType type, + GraphQLContext graphqlContext, + Locale locale + ) { + return (Value) ValuesResolverConversion.valueToLiteralImpl( + fieldVisibility, + inputValueWithState, + type, + ValueMode.LITERAL, + graphqlContext, + locale); } - public static Value valueToLiteral(@NotNull InputValueWithState inputValueWithState, - @NotNull GraphQLType type, - GraphQLContext graphqlContext, - Locale locale) { - return (Value) ValuesResolverConversion.valueToLiteralImpl(DEFAULT_FIELD_VISIBILITY, inputValueWithState, type, ValueMode.LITERAL, graphqlContext, locale); + public static Value valueToLiteral( + @NotNull InputValueWithState inputValueWithState, + @NotNull GraphQLType type, + GraphQLContext graphqlContext, + Locale locale + ) { + return (Value) ValuesResolverConversion.valueToLiteralImpl( + DEFAULT_FIELD_VISIBILITY, + inputValueWithState, + type, + ValueMode.LITERAL, + graphqlContext, + locale); } - public static Object valueToInternalValue(InputValueWithState inputValueWithState, - GraphQLType type, - GraphQLContext graphqlContext, - Locale locale) throws CoercingParseValueException, CoercingParseLiteralException { - return ValuesResolverConversion.valueToInternalValueImpl(inputValueWithState, type, graphqlContext, locale); + public static Object valueToInternalValue( + InputValueWithState inputValueWithState, + GraphQLInputType inputType, + GraphQLContext graphqlContext, + Locale locale + ) throws CoercingParseValueException, CoercingParseLiteralException { + InputInterceptor inputInterceptor = graphqlContext.get(InputInterceptor.class); + return ValuesResolverConversion.valueToInternalValueImpl( + inputInterceptor, + inputValueWithState, + inputType, + graphqlContext, + locale); } /** @@ -238,34 +280,52 @@ public static Object valueToInternalValue(InputValueWithState inputValueWithStat * * @return a value converted to an internal value */ - public static Object externalValueToInternalValue(GraphqlFieldVisibility fieldVisibility, - Object externalValue, - GraphQLInputType type, - GraphQLContext graphqlContext, - Locale locale) { - return externalValueToInternalValueImpl(fieldVisibility, type, externalValue, graphqlContext, locale); + public static Object externalValueToInternalValue( + GraphqlFieldVisibility fieldVisibility, + Object externalValue, + GraphQLInputType type, + GraphQLContext graphqlContext, + Locale locale + ) { + InputInterceptor inputInterceptor = graphqlContext.get(InputInterceptor.class); + return externalValueToInternalValueImpl( + inputInterceptor, + fieldVisibility, + type, + externalValue, + graphqlContext, + locale); } @Nullable @SuppressWarnings("unchecked") - public static T getInputValueImpl(GraphQLInputType inputType, - InputValueWithState inputValue, - GraphQLContext graphqlContext, - Locale locale) { + public static T getInputValueImpl( + GraphQLInputType inputType, + InputValueWithState inputValue, + GraphQLContext graphqlContext, + Locale locale + ) { if (inputValue.isNotSet()) { return null; } - return (T) valueToInternalValue(inputValue, inputType, graphqlContext, locale); + return (T) valueToInternalValue( + inputValue, + inputType, + graphqlContext, + locale); } - private static Map getArgumentValuesImpl(GraphqlFieldVisibility fieldVisibility, - List argumentTypes, - List arguments, - CoercedVariables coercedVariables, - GraphQLContext graphqlContext, - Locale locale) { + private static Map getArgumentValuesImpl( + InputInterceptor inputInterceptor, + GraphqlFieldVisibility fieldVisibility, + List argumentTypes, + List arguments, + CoercedVariables coercedVariables, + GraphQLContext graphqlContext, + Locale locale + ) { if (argumentTypes.isEmpty()) { return ImmutableKit.emptyMap(); } @@ -289,10 +349,12 @@ private static Map getArgumentValuesImpl(GraphqlFieldVisibility } if (!hasValue && argumentDefinition.hasSetDefaultValue()) { Object coercedDefaultValue = ValuesResolverConversion.defaultValueToInternalValue( + inputInterceptor, fieldVisibility, defaultValue, argumentType, - graphqlContext, locale); + graphqlContext, + locale); coercedValues.put(argumentName, coercedDefaultValue); } else if (isNonNull(argumentType) && (!hasValue || ValuesResolverConversion.isNullValue(value))) { throw new NonNullableValueCoercedAsNullException(argumentDefinition); @@ -302,7 +364,13 @@ private static Map getArgumentValuesImpl(GraphqlFieldVisibility } else if (argumentValue instanceof VariableReference) { coercedValues.put(argumentName, value); } else { - value = ValuesResolverConversion.literalToInternalValue(fieldVisibility, argumentType, argument.getValue(), coercedVariables, graphqlContext, locale); + value = ValuesResolverConversion.literalToInternalValue(inputInterceptor, + fieldVisibility, + argumentType, + argument.getValue(), + coercedVariables, + graphqlContext, + locale); coercedValues.put(argumentName, value); } } @@ -337,16 +405,28 @@ public static Object literalToNormalizedValue(GraphqlFieldVisibility fieldVisibi return inputValue; } if (isNonNull(type)) { - return literalToNormalizedValue(fieldVisibility, unwrapOne(type), inputValue, normalizedVariables); + return literalToNormalizedValue( + fieldVisibility, + unwrapOne(type), + inputValue, + normalizedVariables); } if (type instanceof GraphQLInputObjectType) { - return literalToNormalizedValueForInputObject(fieldVisibility, (GraphQLInputObjectType) type, (ObjectValue) inputValue, normalizedVariables); + return literalToNormalizedValueForInputObject( + fieldVisibility, + (GraphQLInputObjectType) type, + (ObjectValue) inputValue, + normalizedVariables); } if (type instanceof GraphQLEnumType) { return inputValue; } if (isList(type)) { - return literalToNormalizedValueForList(fieldVisibility, (GraphQLList) type, inputValue, normalizedVariables); + return literalToNormalizedValueForList( + fieldVisibility, + (GraphQLList) type, + inputValue, + normalizedVariables); } return null; } @@ -364,7 +444,11 @@ private static Object literalToNormalizedValueForInputObject(GraphqlFieldVisibil } GraphQLInputType fieldType = type.getField(field.getName()).getType(); - Object fieldValue = literalToNormalizedValue(fieldVisibility, fieldType, field.getValue(), normalizedVariables); + Object fieldValue = literalToNormalizedValue( + fieldVisibility, + fieldType, + field.getValue(), + normalizedVariables); result.put(field.getName(), new NormalizedInputValue(simplePrint(fieldType), fieldValue)); } return result; @@ -377,11 +461,20 @@ private static List literalToNormalizedValueForList(GraphqlFieldVisibili if (value instanceof ArrayValue) { List result = new ArrayList<>(); for (Value valueInArray : ((ArrayValue) value).getValues()) { - result.add(literalToNormalizedValue(fieldVisibility, type.getWrappedType(), valueInArray, normalizedVariables)); + Object normalisedValue = literalToNormalizedValue( + fieldVisibility, + type.getWrappedType(), + valueInArray, + normalizedVariables); + result.add(normalisedValue); } return result; } else { - return Collections.singletonList(literalToNormalizedValue(fieldVisibility, type.getWrappedType(), value, normalizedVariables)); + return Collections.singletonList(literalToNormalizedValue( + fieldVisibility, + type.getWrappedType(), + value, + normalizedVariables)); } } diff --git a/src/main/java/graphql/execution/ValuesResolverConversion.java b/src/main/java/graphql/execution/ValuesResolverConversion.java index 9c32048dc9..83f5285b0d 100644 --- a/src/main/java/graphql/execution/ValuesResolverConversion.java +++ b/src/main/java/graphql/execution/ValuesResolverConversion.java @@ -3,6 +3,7 @@ import com.google.common.collect.ImmutableList; import graphql.GraphQLContext; import graphql.Internal; +import graphql.execution.values.InputInterceptor; import graphql.language.ArrayValue; import graphql.language.NullValue; import graphql.language.ObjectField; @@ -46,7 +47,7 @@ import static graphql.schema.GraphQLTypeUtil.isNonNull; import static graphql.schema.GraphQLTypeUtil.simplePrint; import static graphql.schema.GraphQLTypeUtil.unwrapNonNull; -import static graphql.schema.GraphQLTypeUtil.unwrapOne; +import static graphql.schema.GraphQLTypeUtil.unwrapOneAs; import static graphql.schema.visibility.DefaultGraphqlFieldVisibility.DEFAULT_FIELD_VISIBILITY; import static java.util.stream.Collectors.toList; @@ -68,13 +69,23 @@ static Object valueToLiteralImpl(GraphqlFieldVisibility fieldVisibility, if (valueMode == NORMALIZED) { return assertShouldNeverHappen("can't infer normalized structure"); } - return ValuesResolverLegacy.valueToLiteralLegacy(inputValueWithState.getValue(), type, graphqlContext, locale); + return ValuesResolverLegacy.valueToLiteralLegacy( + inputValueWithState.getValue(), + type, + graphqlContext, + locale); } if (inputValueWithState.isLiteral()) { return inputValueWithState.getValue(); } if (inputValueWithState.isExternal()) { - return externalValueToLiteral(fieldVisibility, inputValueWithState.getValue(), (GraphQLInputType) type, valueMode, graphqlContext, locale); + return externalValueToLiteral( + fieldVisibility, + inputValueWithState.getValue(), + (GraphQLInputType) type, + valueMode, + graphqlContext, + locale); } return assertShouldNeverHappen("unexpected value state " + inputValueWithState); } @@ -95,20 +106,47 @@ static Object externalValueToInternalValue(GraphqlFieldVisibility fieldVisibilit GraphQLInputType type, GraphQLContext graphqlContext, Locale locale) { - return externalValueToInternalValueImpl(fieldVisibility, type, externalValue, graphqlContext, locale); + InputInterceptor inputInterceptor = graphqlContext.get(InputInterceptor.class); + return externalValueToInternalValueImpl( + inputInterceptor, + fieldVisibility, + type, + externalValue, + graphqlContext, + locale); } @Nullable - static Object valueToInternalValueImpl(InputValueWithState inputValueWithState, GraphQLType type, GraphQLContext graphqlContext, Locale locale) { + static Object valueToInternalValueImpl( + InputInterceptor inputInterceptor, + InputValueWithState inputValueWithState, + GraphQLInputType inputType, + GraphQLContext graphqlContext, + Locale locale + ) { DefaultGraphqlFieldVisibility fieldVisibility = DEFAULT_FIELD_VISIBILITY; + if (inputValueWithState.isInternal()) { return inputValueWithState.getValue(); } if (inputValueWithState.isLiteral()) { - return literalToInternalValue(fieldVisibility, type, (Value) inputValueWithState.getValue(), CoercedVariables.emptyVariables(), graphqlContext, locale); + return literalToInternalValue( + inputInterceptor, + fieldVisibility, + inputType, + (Value) inputValueWithState.getValue(), + CoercedVariables.emptyVariables(), + graphqlContext, + locale); } if (inputValueWithState.isExternal()) { - return externalValueToInternalValueImpl(fieldVisibility, type, inputValueWithState.getValue(), graphqlContext, locale); + return externalValueToInternalValueImpl( + inputInterceptor, + fieldVisibility, + inputType, + inputValueWithState.getValue(), + graphqlContext, + locale); } return assertShouldNeverHappen("unexpected value state " + inputValueWithState); } @@ -116,26 +154,54 @@ static Object valueToInternalValueImpl(InputValueWithState inputValueWithState, /** * No validation: the external value is assumed to be valid. */ - static Object externalValueToLiteral(GraphqlFieldVisibility fieldVisibility, - @Nullable Object value, - GraphQLInputType type, - ValuesResolver.ValueMode valueMode, - GraphQLContext graphqlContext, - Locale locale) { + static Object externalValueToLiteral( + GraphqlFieldVisibility fieldVisibility, + @Nullable Object value, + GraphQLInputType type, + ValuesResolver.ValueMode valueMode, + GraphQLContext graphqlContext, + Locale locale + ) { if (value == null) { return newNullValue().build(); } if (GraphQLTypeUtil.isNonNull(type)) { - return externalValueToLiteral(fieldVisibility, value, (GraphQLInputType) unwrapNonNull(type), valueMode, graphqlContext, locale); + return externalValueToLiteral( + fieldVisibility, + value, + (GraphQLInputType) unwrapNonNull(type), + valueMode, + graphqlContext, + locale); } if (type instanceof GraphQLScalarType) { - return externalValueToLiteralForScalar((GraphQLScalarType) type, value, graphqlContext, locale); + return externalValueToLiteralForScalar( + (GraphQLScalarType) type, + value, + graphqlContext, + locale); } else if (type instanceof GraphQLEnumType) { - return externalValueToLiteralForEnum((GraphQLEnumType) type, value, graphqlContext, locale); + return externalValueToLiteralForEnum( + (GraphQLEnumType) type, + value, + graphqlContext, + locale); } else if (type instanceof GraphQLList) { - return externalValueToLiteralForList(fieldVisibility, (GraphQLList) type, value, valueMode, graphqlContext, locale); + return externalValueToLiteralForList( + fieldVisibility, + (GraphQLList) type, + value, + valueMode, + graphqlContext, + locale); } else if (type instanceof GraphQLInputObjectType) { - return externalValueToLiteralForObject(fieldVisibility, (GraphQLInputObjectType) type, value, valueMode, graphqlContext, locale); + return externalValueToLiteralForObject( + fieldVisibility, + (GraphQLInputObjectType) type, + value, + valueMode, + graphqlContext, + locale); } else { return assertShouldNeverHappen("unexpected type %s", type); } @@ -144,7 +210,12 @@ static Object externalValueToLiteral(GraphqlFieldVisibility fieldVisibility, /** * No validation */ - private static Value externalValueToLiteralForScalar(GraphQLScalarType scalarType, Object value, GraphQLContext graphqlContext, @NotNull Locale locale) { + private static Value externalValueToLiteralForScalar( + GraphQLScalarType scalarType, + Object value, + GraphQLContext graphqlContext, + @NotNull Locale locale + ) { return scalarType.getCoercing().valueToLiteral(value, graphqlContext, locale); } @@ -152,24 +223,39 @@ private static Value externalValueToLiteralForScalar(GraphQLScalarType scalar /** * No validation */ - private static Value externalValueToLiteralForEnum(GraphQLEnumType enumType, Object value, GraphQLContext graphqlContext, Locale locale) { - return enumType.valueToLiteral(value, graphqlContext, locale); + private static Value externalValueToLiteralForEnum( + GraphQLEnumType enumType, + Object value, + GraphQLContext graphqlContext, + Locale locale) { + return enumType.valueToLiteral( + value, + graphqlContext, + locale); } /** * No validation */ @SuppressWarnings("unchecked") - private static Object externalValueToLiteralForList(GraphqlFieldVisibility fieldVisibility, - GraphQLList listType, - Object value, - ValuesResolver.ValueMode valueMode, - GraphQLContext graphqlContext, - Locale locale) { + private static Object externalValueToLiteralForList( + GraphqlFieldVisibility fieldVisibility, + GraphQLList listType, + Object value, + ValuesResolver.ValueMode valueMode, + GraphQLContext graphqlContext, + Locale locale + ) { GraphQLInputType wrappedType = (GraphQLInputType) listType.getWrappedType(); List result = FpKit.toListOrSingletonList(value) .stream() - .map(val -> externalValueToLiteral(fieldVisibility, val, wrappedType, valueMode, graphqlContext, locale)) + .map(val -> externalValueToLiteral( + fieldVisibility, + val, + wrappedType, + valueMode, + graphqlContext, + locale)) .collect(toList()); if (valueMode == NORMALIZED) { return result; @@ -182,11 +268,14 @@ private static Object externalValueToLiteralForList(GraphqlFieldVisibility field * No validation */ @SuppressWarnings("unchecked") - private static Object externalValueToLiteralForObject(GraphqlFieldVisibility fieldVisibility, - GraphQLInputObjectType inputObjectType, - Object inputValue, - ValuesResolver.ValueMode valueMode, - GraphQLContext graphqlContext, Locale locale) { + private static Object externalValueToLiteralForObject( + GraphqlFieldVisibility fieldVisibility, + GraphQLInputObjectType inputObjectType, + Object inputValue, + ValuesResolver.ValueMode valueMode, + GraphQLContext graphqlContext, + Locale locale + ) { assertTrue(inputValue instanceof Map, () -> "Expect Map as input"); Map inputMap = (Map) inputValue; List fieldDefinitions = fieldVisibility.getFieldDefinitions(inputObjectType); @@ -200,7 +289,13 @@ private static Object externalValueToLiteralForObject(GraphqlFieldVisibility fie Object fieldValue = inputMap.getOrDefault(fieldName, null); if (!hasValue && inputFieldDefinition.hasSetDefaultValue()) { //TODO: consider valueMode - Object defaultValueLiteral = valueToLiteralImpl(fieldVisibility, inputFieldDefinition.getInputFieldDefaultValue(), fieldType, ValuesResolver.ValueMode.LITERAL, graphqlContext, locale); + Object defaultValueLiteral = valueToLiteralImpl( + fieldVisibility, + inputFieldDefinition.getInputFieldDefaultValue(), + fieldType, + ValuesResolver.ValueMode.LITERAL, + graphqlContext, + locale); if (valueMode == ValuesResolver.ValueMode.LITERAL) { normalizedResult.put(fieldName, new NormalizedInputValue(simplePrint(fieldType), defaultValueLiteral)); } else { @@ -214,7 +309,8 @@ private static Object externalValueToLiteralForObject(GraphqlFieldVisibility fie objectFields.add(newObjectField().name(fieldName).value(newNullValue().build()).build()); } } else { - Object literal = externalValueToLiteral(fieldVisibility, + Object literal = externalValueToLiteral( + fieldVisibility, fieldValue, fieldType, valueMode, @@ -236,10 +332,13 @@ private static Object externalValueToLiteralForObject(GraphqlFieldVisibility fie /** * performs validation too */ - static CoercedVariables externalValueToInternalValueForVariables(GraphQLSchema schema, - List variableDefinitions, - RawVariables rawVariables, - GraphQLContext graphqlContext, Locale locale) { + static CoercedVariables externalValueToInternalValueForVariables( + InputInterceptor inputInterceptor, + GraphQLSchema schema, + List variableDefinitions, + RawVariables rawVariables, + GraphQLContext graphqlContext, Locale locale + ) { GraphqlFieldVisibility fieldVisibility = schema.getCodeRegistry().getFieldVisibility(); Map coercedValues = new LinkedHashMap<>(); for (VariableDefinition variableDefinition : variableDefinitions) { @@ -247,12 +346,20 @@ static CoercedVariables externalValueToInternalValueForVariables(GraphQLSchema s String variableName = variableDefinition.getName(); GraphQLType variableType = TypeFromAST.getTypeFromAST(schema, variableDefinition.getType()); assertTrue(variableType instanceof GraphQLInputType); + GraphQLInputType variableInputType = (GraphQLInputType) variableType; // can be NullValue Value defaultValue = variableDefinition.getDefaultValue(); boolean hasValue = rawVariables.containsKey(variableName); Object value = rawVariables.get(variableName); if (!hasValue && defaultValue != null) { - Object coercedDefaultValue = literalToInternalValue(fieldVisibility, variableType, defaultValue, CoercedVariables.emptyVariables(), graphqlContext, locale); + Object coercedDefaultValue = literalToInternalValue( + inputInterceptor, + fieldVisibility, + variableInputType, + defaultValue, + CoercedVariables.emptyVariables(), + graphqlContext, + locale); coercedValues.put(variableName, coercedDefaultValue); } else if (isNonNull(variableType) && (!hasValue || value == null)) { throw new NonNullableValueCoercedAsNullException(variableDefinition, variableType); @@ -260,7 +367,13 @@ static CoercedVariables externalValueToInternalValueForVariables(GraphQLSchema s if (value == null) { coercedValues.put(variableName, null); } else { - Object coercedValue = externalValueToInternalValueImpl(fieldVisibility, variableType, value, graphqlContext, locale); + Object coercedValue = externalValueToInternalValueImpl( + inputInterceptor, + fieldVisibility, + variableInputType, + value, + graphqlContext, + locale); coercedValues.put(variableName, coercedValue); } } @@ -283,33 +396,69 @@ static CoercedVariables externalValueToInternalValueForVariables(GraphQLSchema s * Performs validation too */ @SuppressWarnings("unchecked") - static Object externalValueToInternalValueImpl(GraphqlFieldVisibility fieldVisibility, - GraphQLType graphQLType, - Object value, - GraphQLContext graphqlContext, - Locale locale) throws NonNullableValueCoercedAsNullException, CoercingParseValueException { + static Object externalValueToInternalValueImpl( + InputInterceptor inputInterceptor, + GraphqlFieldVisibility fieldVisibility, + GraphQLInputType graphQLType, + Object originalValue, + GraphQLContext graphqlContext, + Locale locale + ) throws NonNullableValueCoercedAsNullException, CoercingParseValueException { if (isNonNull(graphQLType)) { - Object returnValue = - externalValueToInternalValueImpl(fieldVisibility, unwrapOne(graphQLType), value, graphqlContext, locale); + Object returnValue = externalValueToInternalValueImpl( + inputInterceptor, + fieldVisibility, + unwrapOneAs(graphQLType), + originalValue, + graphqlContext, + locale); if (returnValue == null) { throw new NonNullableValueCoercedAsNullException(graphQLType); } return returnValue; } - + // + // we have a @Internal hook that allows input values to be changed before they are + // presented to scalars and enums - if it's not present then the cost is an extra `if` + // statement. We expect this to be NOT present most of the time + // + Object value = originalValue; + if (inputInterceptor != null) { + value = inputInterceptor.intercept(originalValue, graphQLType, graphqlContext, locale); + } if (value == null) { return null; } if (graphQLType instanceof GraphQLScalarType) { - return externalValueToInternalValueForScalar((GraphQLScalarType) graphQLType, value, graphqlContext, locale); + return externalValueToInternalValueForScalar( + (GraphQLScalarType) graphQLType, + value, + graphqlContext, + locale); } else if (graphQLType instanceof GraphQLEnumType) { - return externalValueToInternalValueForEnum((GraphQLEnumType) graphQLType, value, graphqlContext, locale); + return externalValueToInternalValueForEnum( + (GraphQLEnumType) graphQLType, + value, + graphqlContext, + locale); } else if (graphQLType instanceof GraphQLList) { - return externalValueToInternalValueForList(fieldVisibility, (GraphQLList) graphQLType, value, graphqlContext, locale); + return externalValueToInternalValueForList( + inputInterceptor, + fieldVisibility, + (GraphQLList) graphQLType, + value, + graphqlContext, + locale); } else if (graphQLType instanceof GraphQLInputObjectType) { if (value instanceof Map) { - return externalValueToInternalValueForObject(fieldVisibility, (GraphQLInputObjectType) graphQLType, (Map) value, graphqlContext, locale); + return externalValueToInternalValueForObject( + inputInterceptor, + fieldVisibility, + (GraphQLInputObjectType) graphQLType, + (Map) value, + graphqlContext, + locale); } else { throw CoercingParseValueException.newCoercingParseValueException() .message("Expected type 'Map' but was '" + value.getClass().getSimpleName() + @@ -324,11 +473,14 @@ static Object externalValueToInternalValueImpl(GraphqlFieldVisibility fieldVisib /** * performs validation */ - private static Object externalValueToInternalValueForObject(GraphqlFieldVisibility fieldVisibility, - GraphQLInputObjectType inputObjectType, - Map inputMap, - GraphQLContext graphqlContext, - Locale locale) throws NonNullableValueCoercedAsNullException, CoercingParseValueException { + private static Object externalValueToInternalValueForObject( + InputInterceptor inputInterceptor, + GraphqlFieldVisibility fieldVisibility, + GraphQLInputObjectType inputObjectType, + Map inputMap, + GraphQLContext graphqlContext, + Locale locale + ) throws NonNullableValueCoercedAsNullException, CoercingParseValueException { List fieldDefinitions = fieldVisibility.getFieldDefinitions(inputObjectType); List fieldNames = map(fieldDefinitions, GraphQLInputObjectField::getName); for (String providedFieldName : inputMap.keySet()) { @@ -346,9 +498,13 @@ private static Object externalValueToInternalValueForObject(GraphqlFieldVisibili boolean hasValue = inputMap.containsKey(fieldName); Object value = inputMap.getOrDefault(fieldName, null); if (!hasValue && inputFieldDefinition.hasSetDefaultValue()) { - Object coercedDefaultValue = defaultValueToInternalValue(fieldVisibility, + Object coercedDefaultValue = defaultValueToInternalValue( + inputInterceptor, + fieldVisibility, defaultValue, - fieldType, graphqlContext, locale); + fieldType, + graphqlContext, + locale); coercedValues.put(fieldName, coercedDefaultValue); } else if (isNonNull(fieldType) && (!hasValue || value == null)) { throw new NonNullableValueCoercedAsNullException(fieldName, emptyList(), fieldType); @@ -356,8 +512,13 @@ private static Object externalValueToInternalValueForObject(GraphqlFieldVisibili if (value == null) { coercedValues.put(fieldName, null); } else { - value = externalValueToInternalValueImpl(fieldVisibility, - fieldType, value, graphqlContext, locale); + value = externalValueToInternalValueImpl( + inputInterceptor, + fieldVisibility, + fieldType, + value, + graphqlContext, + locale); coercedValues.put(fieldName, value); } } @@ -368,30 +529,55 @@ private static Object externalValueToInternalValueForObject(GraphqlFieldVisibili /** * including validation */ - private static Object externalValueToInternalValueForScalar(GraphQLScalarType graphQLScalarType, Object value, GraphQLContext graphqlContext, Locale locale) throws CoercingParseValueException { - return graphQLScalarType.getCoercing().parseValue(value, graphqlContext, locale); + private static Object externalValueToInternalValueForScalar( + GraphQLScalarType graphQLScalarType, + Object value, + GraphQLContext graphqlContext, + Locale locale + ) throws CoercingParseValueException { + return graphQLScalarType.getCoercing().parseValue( + value, + graphqlContext, + locale); } /** * including validation */ - private static Object externalValueToInternalValueForEnum(GraphQLEnumType graphQLEnumType, Object value, GraphQLContext graphqlContext, Locale locale) throws CoercingParseValueException { - return graphQLEnumType.parseValue(value, graphqlContext, locale); + private static Object externalValueToInternalValueForEnum( + GraphQLEnumType graphQLEnumType, + Object value, + GraphQLContext graphqlContext, + Locale locale + ) throws CoercingParseValueException { + return graphQLEnumType.parseValue( + value, + graphqlContext, + locale); } /** * including validation */ - private static List externalValueToInternalValueForList(GraphqlFieldVisibility fieldVisibility, - GraphQLList graphQLList, - Object value, - GraphQLContext graphqlContext, - Locale locale) throws CoercingParseValueException, NonNullableValueCoercedAsNullException { - - GraphQLType wrappedType = graphQLList.getWrappedType(); + private static List externalValueToInternalValueForList( + InputInterceptor inputInterceptor, + GraphqlFieldVisibility fieldVisibility, + GraphQLList graphQLList, + Object value, + GraphQLContext graphqlContext, + Locale locale + ) throws CoercingParseValueException, NonNullableValueCoercedAsNullException { + + GraphQLInputType wrappedType = (GraphQLInputType) graphQLList.getWrappedType(); return FpKit.toListOrSingletonList(value) .stream() - .map(val -> externalValueToInternalValueImpl(fieldVisibility, wrappedType, val, graphqlContext, locale)) + .map(val -> externalValueToInternalValueImpl( + inputInterceptor, + fieldVisibility, + wrappedType, + val, + graphqlContext, + locale)) .collect(toList()); } @@ -407,38 +593,82 @@ private static List externalValueToInternalValueForList(GraphqlFieldVisibility f * * @return literal converted to an internal value */ - static Object literalToInternalValue(GraphqlFieldVisibility fieldVisibility, - GraphQLType type, - Value inputValue, - CoercedVariables coercedVariables, - GraphQLContext graphqlContext, - Locale locale) { - - return literalToInternalValueImpl(fieldVisibility, type, inputValue, coercedVariables, graphqlContext, locale); + static Object literalToInternalValue( + InputInterceptor inputInterceptor, + GraphqlFieldVisibility fieldVisibility, + GraphQLInputType type, + Value inputValue, + CoercedVariables coercedVariables, + GraphQLContext graphqlContext, + Locale locale + ) { + return literalToInternalValueImpl( + inputInterceptor, + fieldVisibility, + type, + inputValue, + coercedVariables, + graphqlContext, + locale); } @Nullable - private static Object literalToInternalValueImpl(GraphqlFieldVisibility fieldVisibility, GraphQLType type, Value inputValue, CoercedVariables coercedVariables, GraphQLContext graphqlContext, Locale locale) { + private static Object literalToInternalValueImpl( + InputInterceptor inputInterceptor, + GraphqlFieldVisibility fieldVisibility, + GraphQLType type, + Value inputValue, + CoercedVariables coercedVariables, + GraphQLContext graphqlContext, + Locale locale + ) { if (inputValue instanceof VariableReference) { - return coercedVariables.get(((VariableReference) inputValue).getName()); + String variableName = ((VariableReference) inputValue).getName(); + return coercedVariables.get(variableName); } if (inputValue instanceof NullValue) { return null; } if (type instanceof GraphQLScalarType) { - return literalToInternalValueForScalar(inputValue, (GraphQLScalarType) type, coercedVariables, graphqlContext, locale); + return literalToInternalValueForScalar( + inputValue, + (GraphQLScalarType) type, + coercedVariables, + graphqlContext, + locale); } if (isNonNull(type)) { - return literalToInternalValue(fieldVisibility, unwrapOne(type), inputValue, coercedVariables, graphqlContext, locale); + return literalToInternalValue( + inputInterceptor, + fieldVisibility, + unwrapOneAs(type), + inputValue, + coercedVariables, + graphqlContext, + locale); } if (type instanceof GraphQLInputObjectType) { - return literalToInternalValueForInputObject(fieldVisibility, (GraphQLInputObjectType) type, (ObjectValue) inputValue, coercedVariables, graphqlContext, locale); + return literalToInternalValueForInputObject( + inputInterceptor, + fieldVisibility, + (GraphQLInputObjectType) type, + (ObjectValue) inputValue, + coercedVariables, + graphqlContext, + locale); } if (type instanceof GraphQLEnumType) { return ((GraphQLEnumType) type).parseLiteral(inputValue, graphqlContext, locale); } if (isList(type)) { - return literalToInternalValueForList(fieldVisibility, (GraphQLList) type, inputValue, coercedVariables, graphqlContext, locale); + return literalToInternalValueForList( + inputInterceptor, + fieldVisibility, + (GraphQLList) type, + inputValue, + coercedVariables, + graphqlContext, + locale); } return null; } @@ -446,47 +676,74 @@ private static Object literalToInternalValueImpl(GraphqlFieldVisibility fieldVis /** * no validation */ - private static Object literalToInternalValueForScalar(Value inputValue, GraphQLScalarType scalarType, CoercedVariables coercedVariables, GraphQLContext graphqlContext, @NotNull Locale locale) { + private static Object literalToInternalValueForScalar( + Value inputValue, + GraphQLScalarType scalarType, + CoercedVariables coercedVariables, + GraphQLContext graphqlContext, + @NotNull Locale locale + ) { // the CoercingParseLiteralException exception that could happen here has been validated earlier via ValidationUtil - return scalarType.getCoercing().parseLiteral(inputValue, coercedVariables, graphqlContext, locale); + return scalarType.getCoercing().parseLiteral( + inputValue, + coercedVariables, + graphqlContext, + locale); } /** * no validation */ - private static Object literalToInternalValueForList(GraphqlFieldVisibility fieldVisibility, - GraphQLList graphQLList, - Value value, - CoercedVariables coercedVariables, - GraphQLContext graphqlContext, - Locale locale) { - + private static Object literalToInternalValueForList( + InputInterceptor inputInterceptor, + GraphqlFieldVisibility fieldVisibility, + GraphQLList graphQLList, + Value value, + CoercedVariables coercedVariables, + GraphQLContext graphqlContext, + Locale locale + ) { + + GraphQLInputType inputType = (GraphQLInputType) graphQLList.getWrappedType(); if (value instanceof ArrayValue) { ArrayValue arrayValue = (ArrayValue) value; List result = new ArrayList<>(); for (Value singleValue : arrayValue.getValues()) { - result.add(literalToInternalValue(fieldVisibility, graphQLList.getWrappedType(), singleValue, coercedVariables, graphqlContext, locale)); + result.add(literalToInternalValue( + inputInterceptor, + fieldVisibility, + inputType, + singleValue, + coercedVariables, + graphqlContext, + locale)); } return result; } else { return Collections.singletonList( - literalToInternalValue(fieldVisibility, - graphQLList.getWrappedType(), + literalToInternalValue( + inputInterceptor, + fieldVisibility, + inputType, value, coercedVariables, - graphqlContext, locale)); + graphqlContext, + locale)); } } /** * no validation */ - private static Object literalToInternalValueForInputObject(GraphqlFieldVisibility fieldVisibility, - GraphQLInputObjectType type, - ObjectValue inputValue, - CoercedVariables coercedVariables, - GraphQLContext graphqlContext, - Locale locale) { + private static Object literalToInternalValueForInputObject( + InputInterceptor inputInterceptor, + GraphqlFieldVisibility fieldVisibility, + GraphQLInputObjectType type, + ObjectValue inputValue, + CoercedVariables coercedVariables, + GraphQLContext graphqlContext, + Locale locale + ) { Map coercedValues = new LinkedHashMap<>(); Map inputFieldsByName = mapObjectValueFieldsByName(inputValue); @@ -508,10 +765,13 @@ private static Object literalToInternalValueForInputObject(GraphqlFieldVisibilit value = fieldValue; } if (!hasValue && inputFieldDefinition.hasSetDefaultValue()) { - Object coercedDefaultValue = defaultValueToInternalValue(fieldVisibility, + Object coercedDefaultValue = defaultValueToInternalValue( + inputInterceptor, + fieldVisibility, inputFieldDefinition.getInputFieldDefaultValue(), fieldType, - graphqlContext, locale); + graphqlContext, + locale); coercedValues.put(fieldName, coercedDefaultValue); } else if (isNonNull(fieldType) && (!hasValue || isNullValue(value))) { return assertShouldNeverHappen("Should have been validated before"); @@ -521,7 +781,14 @@ private static Object literalToInternalValueForInputObject(GraphqlFieldVisibilit } else if (fieldValue instanceof VariableReference) { coercedValues.put(fieldName, value); } else { - value = literalToInternalValue(fieldVisibility, fieldType, fieldValue, coercedVariables, graphqlContext, locale); + value = literalToInternalValue( + inputInterceptor, + fieldVisibility, + fieldType, + fieldValue, + coercedVariables, + graphqlContext, + locale); coercedValues.put(fieldName, value); } } @@ -547,21 +814,37 @@ private static Map mapObjectValueFieldsByName(ObjectValue i return inputValueFieldsByName; } - static Object defaultValueToInternalValue(GraphqlFieldVisibility fieldVisibility, - InputValueWithState defaultValue, - GraphQLInputType type, - GraphQLContext graphqlContext, - Locale locale) { + static Object defaultValueToInternalValue( + InputInterceptor inputInterceptor, + GraphqlFieldVisibility fieldVisibility, + InputValueWithState defaultValue, + GraphQLInputType type, + GraphQLContext graphqlContext, + Locale locale + ) { if (defaultValue.isInternal()) { return defaultValue.getValue(); } if (defaultValue.isLiteral()) { // default value literals can't reference variables, this is why the variables are empty - return literalToInternalValue(fieldVisibility, type, (Value) defaultValue.getValue(), CoercedVariables.emptyVariables(), graphqlContext, locale); + return literalToInternalValue( + inputInterceptor, + fieldVisibility, + type, + (Value) defaultValue.getValue(), + CoercedVariables.emptyVariables(), + graphqlContext, + locale); } if (defaultValue.isExternal()) { // performs validation too - return externalValueToInternalValueImpl(fieldVisibility, type, defaultValue.getValue(), graphqlContext, locale); + return externalValueToInternalValueImpl( + inputInterceptor, + fieldVisibility, + type, + defaultValue.getValue(), + graphqlContext, + locale); } return assertShouldNeverHappen(); } diff --git a/src/main/java/graphql/execution/conditional/ConditionalNodeDecision.java b/src/main/java/graphql/execution/conditional/ConditionalNodeDecision.java new file mode 100644 index 0000000000..69afc6bbc2 --- /dev/null +++ b/src/main/java/graphql/execution/conditional/ConditionalNodeDecision.java @@ -0,0 +1,23 @@ +package graphql.execution.conditional; + +import graphql.ExperimentalApi; + +/** + * This callback interface allows custom implementations to decide if a field is included in a query or not. + *

+ * The default `@skip / @include` is built in, but you can create your own implementations to allow you to make + * decisions on whether fields are considered part of a query. + */ +@ExperimentalApi +public interface ConditionalNodeDecision { + + /** + * This is called to decide if a {@link graphql.language.Node} should be included or not + * + * @param decisionEnv ghe environment you can use to make the decision + * + * @return true if the node should be included or false if it should be excluded + */ + boolean shouldInclude(ConditionalNodeDecisionEnvironment decisionEnv); +} + diff --git a/src/main/java/graphql/execution/conditional/ConditionalNodeDecisionEnvironment.java b/src/main/java/graphql/execution/conditional/ConditionalNodeDecisionEnvironment.java new file mode 100644 index 0000000000..e0e116a1b4 --- /dev/null +++ b/src/main/java/graphql/execution/conditional/ConditionalNodeDecisionEnvironment.java @@ -0,0 +1,48 @@ +package graphql.execution.conditional; + +import graphql.GraphQLContext; +import graphql.execution.CoercedVariables; +import graphql.language.Directive; +import graphql.language.DirectivesContainer; +import graphql.schema.GraphQLSchema; +import org.jetbrains.annotations.Nullable; + +import java.util.List; + +/** + * The parameters given to a {@link ConditionalNodeDecision} + */ +public interface ConditionalNodeDecisionEnvironment { + + /** + * This is an AST {@link graphql.language.Node} that has directives on it. + * {@link graphql.language.Field}, @{@link graphql.language.FragmentSpread} and + * {@link graphql.language.InlineFragment} are examples of nodes + * that can be conditionally included. + * + * @return the AST element in question + */ + DirectivesContainer getDirectivesContainer(); + + /** + * @return the list of directives associated with the {@link #getDirectivesContainer()} + */ + default List getDirectives() { + return getDirectivesContainer().getDirectives(); + } + + /** + * @return a map of the current variables + */ + CoercedVariables getVariables(); + + /** + * @return the {@link GraphQLSchema} in question - this can be null for certain call paths + */ + @Nullable GraphQLSchema getGraphQlSchema(); + + /** + * @return a graphql context + */ + GraphQLContext getGraphQLContext(); +} diff --git a/src/main/java/graphql/execution/conditional/ConditionalNodes.java b/src/main/java/graphql/execution/conditional/ConditionalNodes.java new file mode 100644 index 0000000000..9c90deead0 --- /dev/null +++ b/src/main/java/graphql/execution/conditional/ConditionalNodes.java @@ -0,0 +1,102 @@ +package graphql.execution.conditional; + +import graphql.Assert; +import graphql.GraphQLContext; +import graphql.Internal; +import graphql.execution.CoercedVariables; +import graphql.execution.ValuesResolver; +import graphql.language.Directive; +import graphql.language.DirectivesContainer; +import graphql.language.NodeUtil; +import graphql.schema.GraphQLSchema; + +import java.util.List; +import java.util.Locale; +import java.util.Map; + +import static graphql.Directives.IncludeDirective; +import static graphql.Directives.SkipDirective; + +@Internal +public class ConditionalNodes { + + + public boolean shouldInclude(DirectivesContainer element, + Map variables, + GraphQLSchema graphQLSchema, + GraphQLContext graphQLContext + ) { + // + // call the base @include / @skip first + if (!shouldInclude(variables, element.getDirectives())) { + return false; + } + // + // if they have declared a decision callback, then we will use it but we expect this to be mostly + // empty and hence the cost is a map lookup. + if (graphQLContext != null) { + ConditionalNodeDecision conditionalDecision = graphQLContext.get(ConditionalNodeDecision.class); + if (conditionalDecision != null) { + return customShouldInclude(variables, element, graphQLSchema, graphQLContext, conditionalDecision); + } + } + // if no one says otherwise, the node is considered included + return true; + } + + private boolean customShouldInclude(Map variables, + DirectivesContainer element, + GraphQLSchema graphQLSchema, + GraphQLContext graphQLContext, + ConditionalNodeDecision conditionalDecision + ) { + CoercedVariables coercedVariables = CoercedVariables.of(variables); + return conditionalDecision.shouldInclude(new ConditionalNodeDecisionEnvironment() { + @Override + public DirectivesContainer getDirectivesContainer() { + return element; + } + + @Override + public CoercedVariables getVariables() { + return coercedVariables; + } + + @Override + public GraphQLSchema getGraphQlSchema() { + return graphQLSchema; + } + + @Override + public GraphQLContext getGraphQLContext() { + return graphQLContext; + } + }); + } + + + private boolean shouldInclude(Map variables, List directives) { + // shortcut on no directives + if (directives.isEmpty()) { + return true; + } + boolean skip = getDirectiveResult(variables, directives, SkipDirective.getName(), false); + if (skip) { + return false; + } + + return getDirectiveResult(variables, directives, IncludeDirective.getName(), true); + } + + private boolean getDirectiveResult(Map variables, List directives, String directiveName, boolean defaultValue) { + Directive foundDirective = NodeUtil.findNodeByName(directives, directiveName); + if (foundDirective != null) { + Map argumentValues = ValuesResolver.getArgumentValues(SkipDirective.getArguments(), foundDirective.getArguments(), CoercedVariables.of(variables), GraphQLContext.getDefault(), Locale.getDefault()); + Object flag = argumentValues.get("if"); + Assert.assertTrue(flag instanceof Boolean, () -> String.format("The '%s' directive MUST have a value for the 'if' argument", directiveName)); + return (Boolean) flag; + } + return defaultValue; + } + +} diff --git a/src/main/java/graphql/execution/directives/QueryDirectives.java b/src/main/java/graphql/execution/directives/QueryDirectives.java index 97142f2069..a7fafdad9b 100644 --- a/src/main/java/graphql/execution/directives/QueryDirectives.java +++ b/src/main/java/graphql/execution/directives/QueryDirectives.java @@ -1,11 +1,16 @@ package graphql.execution.directives; import graphql.DeprecatedAt; +import graphql.GraphQLContext; import graphql.PublicApi; +import graphql.execution.CoercedVariables; +import graphql.execution.MergedField; import graphql.language.Field; import graphql.schema.GraphQLDirective; +import graphql.schema.GraphQLSchema; import java.util.List; +import java.util.Locale; import java.util.Map; /** @@ -89,4 +94,28 @@ public interface QueryDirectives { @Deprecated @DeprecatedAt("2022-02-24") Map> getImmediateDirectivesByField(); + + /** + * @return a builder of {@link QueryDirectives} + */ + static Builder newQueryDirectives() { + return new QueryDirectivesBuilder(); + } + + interface Builder { + + Builder schema(GraphQLSchema schema); + + Builder mergedField(MergedField mergedField); + + Builder field(Field field); + + Builder coercedVariables(CoercedVariables coercedVariables); + + Builder graphQLContext(GraphQLContext graphQLContext); + + Builder locale(Locale locale); + + QueryDirectives build(); + } } diff --git a/src/main/java/graphql/execution/directives/QueryDirectivesBuilder.java b/src/main/java/graphql/execution/directives/QueryDirectivesBuilder.java new file mode 100644 index 0000000000..80f80d1a06 --- /dev/null +++ b/src/main/java/graphql/execution/directives/QueryDirectivesBuilder.java @@ -0,0 +1,62 @@ +package graphql.execution.directives; + +import graphql.GraphQLContext; +import graphql.Internal; +import graphql.execution.CoercedVariables; +import graphql.execution.MergedField; +import graphql.language.Field; +import graphql.schema.GraphQLSchema; + +import java.util.Locale; + +@Internal +public class QueryDirectivesBuilder implements QueryDirectives.Builder { + + private MergedField mergedField; + private GraphQLSchema schema; + private CoercedVariables coercedVariables = CoercedVariables.emptyVariables(); + private GraphQLContext graphQLContext = GraphQLContext.getDefault(); + private Locale locale = Locale.getDefault(); + + @Override + public QueryDirectives.Builder schema(GraphQLSchema schema) { + this.schema = schema; + return this; + } + + @Override + public QueryDirectives.Builder mergedField(MergedField mergedField) { + this.mergedField = mergedField; + return this; + } + + @Override + public QueryDirectives.Builder field(Field field) { + this.mergedField = MergedField.newMergedField(field).build(); + return this; + } + + @Override + public QueryDirectives.Builder coercedVariables(CoercedVariables coercedVariables) { + this.coercedVariables = coercedVariables; + return this; + } + + @Override + public QueryDirectives.Builder graphQLContext(GraphQLContext graphQLContext) { + this.graphQLContext = graphQLContext; + return this; + } + + @Override + public QueryDirectives.Builder locale(Locale locale) { + this.locale = locale; + return this; + } + + + @Override + public QueryDirectives build() { + return new QueryDirectivesImpl(mergedField, schema, coercedVariables.toMap(), graphQLContext, locale); + } +} diff --git a/src/main/java/graphql/execution/instrumentation/ChainedInstrumentation.java b/src/main/java/graphql/execution/instrumentation/ChainedInstrumentation.java index f8fdef568d..bfafc49ed7 100644 --- a/src/main/java/graphql/execution/instrumentation/ChainedInstrumentation.java +++ b/src/main/java/graphql/execution/instrumentation/ChainedInstrumentation.java @@ -340,7 +340,7 @@ public CompletableFuture instrumentExecutionResult(ExecutionRes @NotNull @Override public CompletableFuture instrumentExecutionResult(ExecutionResult executionResult, InstrumentationExecutionParameters parameters, InstrumentationState state) { - CompletableFuture> resultsFuture = Async.eachSequentially(instrumentations, (instrumentation, index, prevResults) -> { + CompletableFuture> resultsFuture = Async.eachSequentially(instrumentations, (instrumentation, prevResults) -> { InstrumentationState specificState = getSpecificState(instrumentation, state); ExecutionResult lastResult = prevResults.size() > 0 ? prevResults.get(prevResults.size() - 1) : executionResult; return instrumentation.instrumentExecutionResult(lastResult, parameters, specificState); diff --git a/src/main/java/graphql/execution/values/InputInterceptor.java b/src/main/java/graphql/execution/values/InputInterceptor.java new file mode 100644 index 0000000000..b5420e5035 --- /dev/null +++ b/src/main/java/graphql/execution/values/InputInterceptor.java @@ -0,0 +1,42 @@ +package graphql.execution.values; + +import graphql.GraphQLContext; +import graphql.Internal; +import graphql.schema.GraphQLInputType; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; + +import java.util.Locale; + +/** + * This INTERNAL class can be used to intercept input values before they are coerced into runtime values + * by the {@link graphql.execution.ValuesResolver} code. + *

+ * You could use it to observe input values and optionally change them. Perhaps some sort of migration of data + * needs to happen, and you need to know what data you are getting in type terms. This would help you do that. + *

+ * If this is present in a {@link GraphQLContext} it will be called. By default, it is not present + * so no calls to it will be made. + *

+ * There is a performance aspect to using this code. If you take too long to return values then you + * are going to slow down your system depending on how big your input objects are. + */ +@Internal +public interface InputInterceptor { + + /** + * This is called with a value that is to be presented to the {@link graphql.execution.ValuesResolver} code. The values + * may be scalars, enums and complex input types. + * + * @param value the input value that can be null + * @param graphQLType the input type + * @param graphqlContext the graphql context in play + * @param locale the locale in play + * + * @return a value that may differ from the original value + */ + Object intercept(@Nullable Object value, + @NotNull GraphQLInputType graphQLType, + @NotNull GraphQLContext graphqlContext, + @NotNull Locale locale); +} diff --git a/src/main/java/graphql/execution/values/legacycoercing/LegacyCoercingInputInterceptor.java b/src/main/java/graphql/execution/values/legacycoercing/LegacyCoercingInputInterceptor.java new file mode 100644 index 0000000000..e6f9f5d363 --- /dev/null +++ b/src/main/java/graphql/execution/values/legacycoercing/LegacyCoercingInputInterceptor.java @@ -0,0 +1,179 @@ +package graphql.execution.values.legacycoercing; + +import graphql.GraphQLContext; +import graphql.Scalars; +import graphql.execution.values.InputInterceptor; +import graphql.scalar.CoercingUtil; +import graphql.schema.GraphQLInputType; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; + +import java.math.BigDecimal; +import java.util.Locale; +import java.util.function.BiConsumer; +import java.util.function.BiFunction; + +import static graphql.Assert.assertNotNull; +import static graphql.scalar.CoercingUtil.isNumberIsh; + +public class LegacyCoercingInputInterceptor implements InputInterceptor { + + /** + * This will ONLY observe legacy values and invoke the callback when it gets one. you can use this to enumerate how many + * legacy values are hitting you graphql implementation + * + * @param observerCallback a callback allowing you to observe a legacy scalar value + * + * @return an InputInterceptor that only observes values + */ + public static LegacyCoercingInputInterceptor observesValues(BiConsumer observerCallback) { + return new LegacyCoercingInputInterceptor(((input, graphQLInputType) -> { + observerCallback.accept(input, graphQLInputType); + return input; + })); + } + + /** + * This will change legacy values as it encounters them to something acceptable to the more strict coercion rules. + * + * @return an InputInterceptor that migrates values to a more strict value + */ + public static LegacyCoercingInputInterceptor migratesValues() { + return migratesValues((input, type) -> { + }); + } + + /** + * This will change legacy values as it encounters them to something acceptable to the more strict coercion rules. + * The observer callback will be invoked if it detects a legacy value that it will change. + * + * @param observerCallback a callback allowing you to observe a legacy scalar value before it is migrated + * + * @return an InputInterceptor that both observes values and migrates them to a more strict value + */ + public static LegacyCoercingInputInterceptor migratesValues(BiConsumer observerCallback) { + return new LegacyCoercingInputInterceptor(((input, graphQLInputType) -> { + observerCallback.accept(input, graphQLInputType); + if (Scalars.GraphQLBoolean.equals(graphQLInputType)) { + return coerceLegacyBooleanValue(input); + } + if (Scalars.GraphQLFloat.equals(graphQLInputType)) { + return coerceLegacyFloatValue(input); + } + if (Scalars.GraphQLInt.equals(graphQLInputType)) { + return coerceLegacyIntValue(input); + } + if (Scalars.GraphQLString.equals(graphQLInputType)) { + return coerceLegacyStringValue(input); + } + return input; + })); + } + + private final BiFunction behavior; + + private LegacyCoercingInputInterceptor(BiFunction behavior) { + this.behavior = assertNotNull(behavior); + } + + @Override + public Object intercept(@Nullable Object input, @NotNull GraphQLInputType graphQLType, @NotNull GraphQLContext graphqlContext, @NotNull Locale locale) { + if (isLegacyValue(input, graphQLType)) { + // we ONLY apply the new behavior IF it's an old acceptable legacy value. + // so for compliant values - we change nothing and invoke no behaviour + // and for values that would not coerce anyway, we also invoke no behavior + return behavior.apply(input, graphQLType); + } + return input; + } + + @SuppressWarnings("RedundantIfStatement") + static boolean isLegacyValue(Object input, GraphQLInputType graphQLType) { + if (Scalars.GraphQLBoolean.equals(graphQLType)) { + return isLegacyBooleanValue(input); + } else if (Scalars.GraphQLFloat.equals(graphQLType)) { + return isLegacyFloatValue(input); + } else if (Scalars.GraphQLInt.equals(graphQLType)) { + return isLegacyIntValue(input); + } else if (Scalars.GraphQLString.equals(graphQLType)) { + return isLegacyStringValue(input); + } else { + return false; + } + } + + static boolean isLegacyBooleanValue(Object input) { + return input instanceof String || CoercingUtil.isNumberIsh(input); + } + + static boolean isLegacyFloatValue(Object input) { + return input instanceof String; + } + + static boolean isLegacyIntValue(Object input) { + return input instanceof String; + } + + static boolean isLegacyStringValue(Object input) { + return !(input instanceof String); + } + + static Object coerceLegacyBooleanValue(Object input) { + if (input instanceof String) { + String lStr = ((String) input).toLowerCase(); + if (lStr.equals("true")) { + return true; + } + if (lStr.equals("false")) { + return false; + } + return input; + } else if (isNumberIsh(input)) { + BigDecimal value; + try { + value = new BigDecimal(input.toString()); + } catch (NumberFormatException e) { + // this should never happen because String is handled above + return input; + } + return value.compareTo(BigDecimal.ZERO) != 0; + } + // unchanged + return input; + } + + static Object coerceLegacyFloatValue(Object input) { + if (isNumberIsh(input)) { + BigDecimal value; + try { + value = new BigDecimal(input.toString()); + } catch (NumberFormatException e) { + return input; + } + return value.doubleValue(); + } + return input; + } + + static Object coerceLegacyIntValue(Object input) { + if (isNumberIsh(input)) { + BigDecimal value; + try { + value = new BigDecimal(input.toString()); + } catch (NumberFormatException e) { + return input; + } + try { + return value.intValueExact(); + } catch (ArithmeticException e) { + return input; + } + } + return input; + } + + + static Object coerceLegacyStringValue(Object input) { + return String.valueOf(input); + } +} diff --git a/src/main/java/graphql/extensions/ExtensionsBuilder.java b/src/main/java/graphql/extensions/ExtensionsBuilder.java index 6c3a982e0c..fe37c64743 100644 --- a/src/main/java/graphql/extensions/ExtensionsBuilder.java +++ b/src/main/java/graphql/extensions/ExtensionsBuilder.java @@ -55,6 +55,12 @@ public static ExtensionsBuilder newExtensionsBuilder(ExtensionsMerger extensions return new ExtensionsBuilder(extensionsMerger); } + /** + * @return how many extension changes have been made so far + */ + public int getChangeCount() { + return changes.size(); + } /** * Adds new values into the extension builder @@ -65,7 +71,9 @@ public static ExtensionsBuilder newExtensionsBuilder(ExtensionsMerger extensions */ public ExtensionsBuilder addValues(@NotNull Map newValues) { assertNotNull(newValues); - changes.add(newValues); + if (!newValues.isEmpty()) { + changes.add(newValues); + } return this; } diff --git a/src/main/java/graphql/normalized/ExecutableNormalizedField.java b/src/main/java/graphql/normalized/ExecutableNormalizedField.java index 98e527811f..41ddd594b3 100644 --- a/src/main/java/graphql/normalized/ExecutableNormalizedField.java +++ b/src/main/java/graphql/normalized/ExecutableNormalizedField.java @@ -5,22 +5,25 @@ import graphql.Assert; import graphql.Internal; import graphql.Mutable; +import graphql.PublicApi; import graphql.collect.ImmutableKit; import graphql.introspection.Introspection; import graphql.language.Argument; import graphql.schema.GraphQLFieldDefinition; import graphql.schema.GraphQLInterfaceType; +import graphql.schema.GraphQLNamedOutputType; import graphql.schema.GraphQLObjectType; import graphql.schema.GraphQLOutputType; import graphql.schema.GraphQLSchema; -import graphql.schema.GraphQLType; import graphql.schema.GraphQLUnionType; import graphql.util.FpKit; +import graphql.util.MutableRef; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.LinkedList; @@ -38,9 +41,13 @@ import static java.util.stream.Collectors.toSet; /** - * Intentionally Mutable + * An {@link ExecutableNormalizedField} represents a field in an executable graphql operation. Its models what + * could be executed during a given operation. + *

+ * This class is intentionally mutable for performance reasons since building immutable parent child + * objects is too expensive. */ -@Internal +@PublicApi @Mutable public class ExecutableNormalizedField { private final String alias; @@ -70,7 +77,7 @@ private ExecutableNormalizedField(Builder builder) { } /** - * Determines whether this NF needs a fragment to select the field. However, it considers the parent + * Determines whether this {@link ExecutableNormalizedField} needs a fragment to select the field. However, it considers the parent * output type when determining whether it needs a fragment. *

* Consider the following schema @@ -106,7 +113,7 @@ private ExecutableNormalizedField(Builder builder) { * } * *

- * Then we would get the following normalized operation tree + * Then we would get the following {@link ExecutableNormalizedOperation} * *

      * -Query.animal: Animal
@@ -119,10 +126,9 @@ private ExecutableNormalizedField(Builder builder) {
      * our question whether this is conditional?
      * 

* We MUST consider that the output type of the {@code parent} field is {@code Animal} and - * NOT {@code Cat} or {@code Dog} as their respective impls would say. + * NOT {@code Cat} or {@code Dog} as their respective implementations would say. * * @param schema - the graphql schema in play - * * @return true if the field is conditional */ public boolean isConditional(@NotNull GraphQLSchema schema) { @@ -130,33 +136,13 @@ public boolean isConditional(@NotNull GraphQLSchema schema) { return false; } - /** - * checking if we have an interface which can be used as an unconditional parent type - */ - ImmutableList parentTypes = ImmutableKit.map(parent.getFieldDefinitions(schema), fd -> unwrapAll(fd.getType())); - - Set interfacesImplementedByAllParents = null; - for (GraphQLType parentType : parentTypes) { - List toAdd = new ArrayList<>(); - if (parentType instanceof GraphQLObjectType) { - toAdd.addAll((List) ((GraphQLObjectType) parentType).getInterfaces()); - } else if (parentType instanceof GraphQLInterfaceType) { - toAdd.add((GraphQLInterfaceType) parentType); - toAdd.addAll((List) ((GraphQLInterfaceType) parentType).getInterfaces()); - } - if (interfacesImplementedByAllParents == null) { - interfacesImplementedByAllParents = new LinkedHashSet<>(toAdd); - } else { - interfacesImplementedByAllParents.retainAll(toAdd); - } - } - for (GraphQLInterfaceType parentInterfaceType : interfacesImplementedByAllParents) { - List implementations = schema.getImplementations(parentInterfaceType); + for (GraphQLInterfaceType commonParentOutputInterface : parent.getInterfacesCommonToAllOutputTypes(schema)) { + List implementations = schema.getImplementations(commonParentOutputInterface); // __typename - if (this.fieldName.equals(Introspection.TypeNameMetaFieldDef.getName()) && implementations.size() == objectTypeNames.size()) { + if (fieldName.equals(Introspection.TypeNameMetaFieldDef.getName()) && implementations.size() == objectTypeNames.size()) { return false; } - if (parentInterfaceType.getField(fieldName) == null) { + if (commonParentOutputInterface.getField(fieldName) == null) { continue; } if (implementations.size() == objectTypeNames.size()) { @@ -164,20 +150,16 @@ public boolean isConditional(@NotNull GraphQLSchema schema) { } } - /** - *__typename is the only field in a union type that CAN be NOT conditional - */ - List fieldDefinitions = parent.getFieldDefinitions(schema); - if (unwrapAll(fieldDefinitions.get(0).getType()) instanceof GraphQLUnionType) { - GraphQLUnionType parentOutputTypeAsUnion = (GraphQLUnionType) unwrapAll(fieldDefinitions.get(0).getType()); - if (this.fieldName.equals(Introspection.TypeNameMetaFieldDef.getName()) && objectTypeNames.size() == parentOutputTypeAsUnion.getTypes().size()) { + // __typename is the only field in a union type that CAN be NOT conditional + GraphQLFieldDefinition parentFieldDef = parent.getOneFieldDefinition(schema); + if (unwrapAll(parentFieldDef.getType()) instanceof GraphQLUnionType) { + GraphQLUnionType parentOutputTypeAsUnion = (GraphQLUnionType) unwrapAll(parentFieldDef.getType()); + if (fieldName.equals(Introspection.TypeNameMetaFieldDef.getName()) && objectTypeNames.size() == parentOutputTypeAsUnion.getTypes().size()) { return false; // Not conditional } } - /** - * This means there is no Union or Interface which could serve as unconditional parent - */ + // This means there is no Union or Interface which could serve as unconditional parent if (objectTypeNames.size() > 1) { return true; // Conditional } @@ -186,7 +168,7 @@ public boolean isConditional(@NotNull GraphQLSchema schema) { } GraphQLObjectType oneObjectType = (GraphQLObjectType) schema.getType(objectTypeNames.iterator().next()); - return unwrapAll(parent.getFieldDefinitions(schema).get(0).getType()) != oneObjectType; + return unwrapAll(parentFieldDef.getType()) != oneObjectType; } public boolean hasChildren() { @@ -201,24 +183,44 @@ public GraphQLOutputType getType(GraphQLSchema schema) { } public List getTypes(GraphQLSchema schema) { - List fieldTypes = ImmutableKit.map(getFieldDefinitions(schema), fd -> fd.getType()); - return fieldTypes; + return ImmutableKit.map(getFieldDefinitions(schema), fd -> fd.getType()); } - - public List getFieldDefinitions(GraphQLSchema schema) { - GraphQLFieldDefinition fieldDefinition = resolveIntrospectionField(schema, objectTypeNames, fieldName); + public void forEachFieldDefinition(GraphQLSchema schema, Consumer consumer) { + var fieldDefinition = resolveIntrospectionField(schema, objectTypeNames, fieldName); if (fieldDefinition != null) { - return ImmutableList.of(fieldDefinition); + consumer.accept(fieldDefinition); + return; } - ImmutableList.Builder builder = ImmutableList.builder(); + for (String objectTypeName : objectTypeNames) { GraphQLObjectType type = (GraphQLObjectType) assertNotNull(schema.getType(objectTypeName)); - builder.add(assertNotNull(type.getField(fieldName), () -> String.format("no field %s found for type %s", fieldName, objectTypeNames.iterator().next()))); + consumer.accept(assertNotNull(type.getField(fieldName), () -> String.format("No field %s found for type %s", fieldName, objectTypeName))); } + } + + public List getFieldDefinitions(GraphQLSchema schema) { + ImmutableList.Builder builder = ImmutableList.builder(); + forEachFieldDefinition(schema, builder::add); return builder.build(); } + /** + * This is NOT public as it is not recommended usage. + *

+ * Internally there are cases where we know it is safe to use this, so this exists. + */ + private GraphQLFieldDefinition getOneFieldDefinition(GraphQLSchema schema) { + var fieldDefinition = resolveIntrospectionField(schema, objectTypeNames, fieldName); + if (fieldDefinition != null) { + return fieldDefinition; + } + + String objectTypeName = objectTypeNames.iterator().next(); + GraphQLObjectType type = (GraphQLObjectType) assertNotNull(schema.getType(objectTypeName)); + return assertNotNull(type.getField(fieldName), () -> String.format("No field %s found for type %s", fieldName, objectTypeName)); + } + private static GraphQLFieldDefinition resolveIntrospectionField(GraphQLSchema schema, Set objectTypeNames, String fieldName) { if (fieldName.equals(schema.getIntrospectionTypenameFieldDefinition().getName())) { return schema.getIntrospectionTypenameFieldDefinition(); @@ -232,39 +234,58 @@ private static GraphQLFieldDefinition resolveIntrospectionField(GraphQLSchema sc return null; } + @Internal public void addObjectTypeNames(Collection objectTypeNames) { this.objectTypeNames.addAll(objectTypeNames); } + @Internal public void setObjectTypeNames(Collection objectTypeNames) { this.objectTypeNames.clear(); this.objectTypeNames.addAll(objectTypeNames); } + @Internal public void addChild(ExecutableNormalizedField executableNormalizedField) { this.children.add(executableNormalizedField); } + @Internal public void clearChildren() { this.children.clear(); } /** - * All merged fields have the same name. + * All merged fields have the same name so this is the name of the {@link ExecutableNormalizedField}. *

- * WARNING: This is not always the key in the execution result, because of possible aliases. See {@link #getResultKey()} + * WARNING: This is not always the key in the execution result, because of possible field aliases. * - * @return the name of of the merged fields. + * @return the name of this {@link ExecutableNormalizedField} + * + * @see #getResultKey() + * @see #getAlias() */ public String getName() { return getFieldName(); } /** - * Returns the key of this MergedFieldWithType for the overall result. - * This is either an alias or the FieldWTC name. + * @return the same value as {@link #getName()} + * + * @see #getResultKey() + * @see #getAlias() + */ + public String getFieldName() { + return fieldName; + } + + /** + * Returns the result key of this {@link ExecutableNormalizedField} within the overall result. + * This is either a field alias or the value of {@link #getName()} * - * @return the key for this MergedFieldWithType. + * @return the result key for this {@link ExecutableNormalizedField}. + * + * @see #getName() */ public String getResultKey() { if (alias != null) { @@ -273,56 +294,80 @@ public String getResultKey() { return getName(); } + /** + * @return the field alias used or null if there is none + * + * @see #getResultKey() + * @see #getName() + */ public String getAlias() { return alias; } + /** + * @return a list of the {@link Argument}s on the field + */ public ImmutableList getAstArguments() { return astArguments; } + /** + * Returns an argument value as a {@link NormalizedInputValue} which contains its type name and its current value + * + * @param name the name of the argument + * + * @return an argument value + */ public NormalizedInputValue getNormalizedArgument(String name) { return normalizedArguments.get(name); } + /** + * @return a map of all the arguments in {@link NormalizedInputValue} form + */ public ImmutableMap getNormalizedArguments() { return normalizedArguments; } + /** + * @return a map of the resolved argument values + */ public LinkedHashMap getResolvedArguments() { return resolvedArguments; } - public static Builder newNormalizedField() { - return new Builder(); - } - - - public String getFieldName() { - return fieldName; - } - - - public ExecutableNormalizedField transform(Consumer builderConsumer) { - Builder builder = new Builder(this); - builderConsumer.accept(builder); - return builder.build(); - } - - /** - * @return Warning: returns a Mutable Set. No defensive copy is made for performance reasons. + * A {@link ExecutableNormalizedField} can sometimes (for non-concrete types like interfaces and unions) + * have more than one object type it could be when executed. There is no way to know what it will be until + * the field is executed over data and the type is resolved via a {@link graphql.schema.TypeResolver}. + *

+ * This method returns all the possible types a field can be which is one or more {@link GraphQLObjectType} + * names. + *

+ * Warning: This returns a Mutable Set. No defensive copy is made for performance reasons. + * + * @return a set of the possible type names this field could be. */ public Set getObjectTypeNames() { return objectTypeNames; } + + /** + * This returns the first entry in {@link #getObjectTypeNames()}. Sometimes you know a field cant be more than one + * type and this method is a shortcut one to help you. + * + * @return the first entry from + */ public String getSingleObjectTypeName() { return objectTypeNames.iterator().next(); } + /** + * @return a helper method show field details + */ public String printDetails() { StringBuilder result = new StringBuilder(); if (getAlias() != null) { @@ -331,6 +376,9 @@ public String printDetails() { return result + objectTypeNamesToString() + "." + fieldName; } + /** + * @return a helper method to show the object types names as a string + */ public String objectTypeNamesToString() { if (objectTypeNames.size() == 1) { return objectTypeNames.iterator().next(); @@ -339,6 +387,12 @@ public String objectTypeNamesToString() { } } + /** + * This returns the list of the result keys (see {@link #getResultKey()} that lead from this field upwards to + * its parent field + * + * @return a list of the result keys from this {@link ExecutableNormalizedField} to the top of the operation via parent fields + */ public List getListOfResultKeys() { LinkedList list = new LinkedList<>(); ExecutableNormalizedField current = this; @@ -349,10 +403,20 @@ public List getListOfResultKeys() { return list; } + /** + * @return the children of the {@link ExecutableNormalizedField} + */ public List getChildren() { return children; } + /** + * Returns the list of child fields that would have the same result key + * + * @param resultKey the result key to check + * + * @return a list of all direct {@link ExecutableNormalizedField} children with the specified result key + */ public List getChildrenWithSameResultKey(String resultKey) { return FpKit.filterList(children, child -> child.getResultKey().equals(resultKey)); } @@ -380,14 +444,24 @@ public List getChildren(String objectTypeName) { .collect(toList()); } + /** + * the level of the {@link ExecutableNormalizedField} in the operation hierarchy with top level fields + * starting at 1 + * + * @return the level of the {@link ExecutableNormalizedField} in the operation hierarchy + */ public int getLevel() { return level; } + /** + * @return the parent of this {@link ExecutableNormalizedField} or null if it's a top level field + */ public ExecutableNormalizedField getParent() { return parent; } + @Internal public void replaceParent(ExecutableNormalizedField newParent) { this.parent = newParent; } @@ -404,6 +478,11 @@ public String toString() { } + /** + * Traverse from this {@link ExecutableNormalizedField} down into itself and all of its children + * + * @param consumer the callback for each {@link ExecutableNormalizedField} in the hierarchy. + */ public void traverseSubTree(Consumer consumer) { this.getChildren().forEach(child -> { traverseImpl(child, consumer, 1, Integer.MAX_VALUE); @@ -423,6 +502,81 @@ private void traverseImpl(ExecutableNormalizedField root, }); } + /** + * This tries to find interfaces common to all the field output types. + *

+ * i.e. goes through {@link #getFieldDefinitions(GraphQLSchema)} and finds interfaces that + * all the field's unwrapped output types are assignable to. + */ + @SuppressWarnings({"unchecked", "rawtypes"}) + private Set getInterfacesCommonToAllOutputTypes(GraphQLSchema schema) { + // Shortcut for performance + if (objectTypeNames.size() == 1) { + var fieldDef = getOneFieldDefinition(schema); + var outputType = unwrapAll(fieldDef.getType()); + + if (outputType instanceof GraphQLObjectType) { + return new LinkedHashSet<>((List) ((GraphQLObjectType) outputType).getInterfaces()); + } else if (outputType instanceof GraphQLInterfaceType) { + var result = new LinkedHashSet<>((List) ((GraphQLInterfaceType) outputType).getInterfaces()); + result.add(outputType); + return result; + } else { + return Collections.emptySet(); + } + } + + MutableRef> commonInterfaces = new MutableRef<>(); + forEachFieldDefinition(schema, (fieldDef) -> { + var outputType = unwrapAll(fieldDef.getType()); + + List outputTypeInterfaces; + if (outputType instanceof GraphQLObjectType) { + outputTypeInterfaces = (List) ((GraphQLObjectType) outputType).getInterfaces(); + } else if (outputType instanceof GraphQLInterfaceType) { + // This interface and superinterfaces + List superInterfaces = ((GraphQLInterfaceType) outputType).getInterfaces(); + + outputTypeInterfaces = new ArrayList<>(superInterfaces.size() + 1); + outputTypeInterfaces.add((GraphQLInterfaceType) outputType); + + if (!superInterfaces.isEmpty()) { + outputTypeInterfaces.addAll((List) superInterfaces); + } + } else { + outputTypeInterfaces = Collections.emptyList(); + } + + if (commonInterfaces.value == null) { + commonInterfaces.value = new LinkedHashSet<>(outputTypeInterfaces); + } else { + commonInterfaces.value.retainAll(outputTypeInterfaces); + } + }); + + return commonInterfaces.value; + } + + /** + * @return a {@link Builder} of {@link ExecutableNormalizedField}s + */ + public static Builder newNormalizedField() { + return new Builder(); + } + + /** + * Allows this {@link ExecutableNormalizedField} to be transformed via a {@link Builder} consumer callback + * + * @param builderConsumer the consumer given a builder + * + * @return a new transformed {@link ExecutableNormalizedField} + */ + public ExecutableNormalizedField transform(Consumer builderConsumer) { + Builder builder = new Builder(this); + builderConsumer.accept(builder); + return builder.build(); + } + public static class Builder { private LinkedHashSet objectTypeNames = new LinkedHashSet<>(); private String fieldName; diff --git a/src/main/java/graphql/normalized/ExecutableNormalizedOperation.java b/src/main/java/graphql/normalized/ExecutableNormalizedOperation.java index 4958649841..ce50c9931b 100644 --- a/src/main/java/graphql/normalized/ExecutableNormalizedOperation.java +++ b/src/main/java/graphql/normalized/ExecutableNormalizedOperation.java @@ -2,9 +2,10 @@ import com.google.common.collect.ImmutableListMultimap; import graphql.Assert; -import graphql.Internal; +import graphql.PublicApi; import graphql.execution.MergedField; import graphql.execution.ResultPath; +import graphql.execution.directives.QueryDirectives; import graphql.language.Field; import graphql.language.OperationDefinition; import graphql.schema.FieldCoordinates; @@ -13,13 +14,22 @@ import java.util.List; import java.util.Map; -@Internal +/** + * A {@link ExecutableNormalizedOperation} represent how the text of a graphql operation (sometimes known colloquially as a query) + * will be executed at runtime according to the graphql specification. It handles complex mechanisms like merging + * duplicate fields into one and also detecting when the types of a given field may actually be for more than one possible object + * type. + *

+ * An operation consists of a list of {@link ExecutableNormalizedField}s in a parent child hierarchy + */ +@PublicApi public class ExecutableNormalizedOperation { private final OperationDefinition.Operation operation; private final String operationName; private final List topLevelFields; private final ImmutableListMultimap fieldToNormalizedField; private final Map normalizedFieldToMergedField; + private final Map normalizedFieldToQueryDirectives; private final ImmutableListMultimap coordinatesToNormalizedFields; public ExecutableNormalizedOperation( @@ -28,6 +38,7 @@ public ExecutableNormalizedOperation( List topLevelFields, ImmutableListMultimap fieldToNormalizedField, Map normalizedFieldToMergedField, + Map normalizedFieldToQueryDirectives, ImmutableListMultimap coordinatesToNormalizedFields ) { this.operation = operation; @@ -35,46 +46,106 @@ public ExecutableNormalizedOperation( this.topLevelFields = topLevelFields; this.fieldToNormalizedField = fieldToNormalizedField; this.normalizedFieldToMergedField = normalizedFieldToMergedField; + this.normalizedFieldToQueryDirectives = normalizedFieldToQueryDirectives; this.coordinatesToNormalizedFields = coordinatesToNormalizedFields; } + /** + * @return operation AST being executed + */ public OperationDefinition.Operation getOperation() { return operation; } + /** + * @return the operation name, which can be null + */ public String getOperationName() { return operationName; } + /** + * This multimap shows how a given {@link ExecutableNormalizedField} maps to a one or more field coordinate in the schema + * + * @return a multimap of fields to schema field coordinates + */ public ImmutableListMultimap getCoordinatesToNormalizedFields() { return coordinatesToNormalizedFields; } + /** + * @return a list of the top level {@link ExecutableNormalizedField}s in this operation. + */ public List getTopLevelFields() { return topLevelFields; } /** - * This is a multimap: the size of it reflects the all the normalized fields + * This is a multimap and the size of it reflects all the normalized fields in the operation * - * @return an immutable list multi map of field to normalised field + * @return an immutable list multimap of {@link Field} to {@link ExecutableNormalizedField} */ public ImmutableListMultimap getFieldToNormalizedField() { return fieldToNormalizedField; } + /** + * Looks up one or more {@link ExecutableNormalizedField}s given a {@link Field} AST element in the operation + * + * @param field the field to look up + * + * @return zero, one or more possible {@link ExecutableNormalizedField}s that represent that field + */ public List getNormalizedFields(Field field) { return fieldToNormalizedField.get(field); } + /** + * @return a map of {@link ExecutableNormalizedField} to {@link MergedField}s + */ public Map getNormalizedFieldToMergedField() { return normalizedFieldToMergedField; } + /** + * Looks up the {@link MergedField} given a {@link ExecutableNormalizedField} + * + * @param executableNormalizedField the field to use the key + * + * @return a {@link MergedField} or null if its not present + */ public MergedField getMergedField(ExecutableNormalizedField executableNormalizedField) { return normalizedFieldToMergedField.get(executableNormalizedField); } + /** + * @return a map of {@link ExecutableNormalizedField} to its {@link QueryDirectives} + */ + public Map getNormalizedFieldToQueryDirectives() { + return normalizedFieldToQueryDirectives; + + } + + /** + * This looks up the {@link QueryDirectives} associated with the given {@link ExecutableNormalizedField} + * + * @param executableNormalizedField the executable normalised field in question + * + * @return the fields query directives or null + */ + public QueryDirectives getQueryDirectives(ExecutableNormalizedField executableNormalizedField) { + return normalizedFieldToQueryDirectives.get(executableNormalizedField); + } + + /** + * This will find a {@link ExecutableNormalizedField} given a merged field and a result path. If this does not find a field it will assert with an exception + * + * @param mergedField the merged field + * @param fieldsContainer the containing type of that field + * @param resultPath the result path in play + * + * @return the ExecutableNormalizedField + */ public ExecutableNormalizedField getNormalizedField(MergedField mergedField, GraphQLFieldsContainer fieldsContainer, ResultPath resultPath) { List executableNormalizedFields = fieldToNormalizedField.get(mergedField.getSingleField()); List keysOnlyPath = resultPath.getKeysOnly(); diff --git a/src/main/java/graphql/normalized/ExecutableNormalizedOperationFactory.java b/src/main/java/graphql/normalized/ExecutableNormalizedOperationFactory.java index 6093fb0967..d5e0d4db80 100644 --- a/src/main/java/graphql/normalized/ExecutableNormalizedOperationFactory.java +++ b/src/main/java/graphql/normalized/ExecutableNormalizedOperationFactory.java @@ -5,13 +5,15 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import graphql.GraphQLContext; -import graphql.Internal; +import graphql.PublicApi; import graphql.collect.ImmutableKit; import graphql.execution.CoercedVariables; -import graphql.execution.ConditionalNodes; import graphql.execution.MergedField; import graphql.execution.RawVariables; import graphql.execution.ValuesResolver; +import graphql.execution.conditional.ConditionalNodes; +import graphql.execution.directives.QueryDirectives; +import graphql.execution.directives.QueryDirectivesImpl; import graphql.introspection.Introspection; import graphql.language.Document; import graphql.language.Field; @@ -27,6 +29,7 @@ import graphql.schema.GraphQLCompositeType; import graphql.schema.GraphQLFieldDefinition; import graphql.schema.GraphQLInterfaceType; +import graphql.schema.GraphQLNamedOutputType; import graphql.schema.GraphQLObjectType; import graphql.schema.GraphQLSchema; import graphql.schema.GraphQLType; @@ -43,11 +46,11 @@ import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.function.BiConsumer; import static graphql.Assert.assertNotNull; import static graphql.Assert.assertShouldNeverHappen; import static graphql.collect.ImmutableKit.map; -import static graphql.execution.MergedField.newMergedField; import static graphql.schema.GraphQLTypeUtil.unwrapAll; import static graphql.util.FpKit.filterSet; import static graphql.util.FpKit.groupingBy; @@ -55,41 +58,118 @@ import static java.util.Collections.singleton; import static java.util.Collections.singletonList; -@Internal +/** + * This factory can create a {@link ExecutableNormalizedOperation} which represents what would be executed + * during a given graphql operation. + */ +@PublicApi public class ExecutableNormalizedOperationFactory { private final ConditionalNodes conditionalNodes = new ConditionalNodes(); - public static ExecutableNormalizedOperation createExecutableNormalizedOperation(GraphQLSchema graphQLSchema, - Document document, - String operationName, - CoercedVariables coercedVariableValues) { + /** + * This will create a runtime representation of the graphql operation that would be executed + * in a runtime sense. + * + * @param graphQLSchema the schema to be used + * @param document the {@link Document} holding the operation text + * @param operationName the operation name to use + * @param coercedVariableValues the coerced variables to use + * + * @return a runtime representation of the graphql operation. + */ + public static ExecutableNormalizedOperation createExecutableNormalizedOperation( + GraphQLSchema graphQLSchema, + Document document, + String operationName, + CoercedVariables coercedVariableValues + ) { NodeUtil.GetOperationResult getOperationResult = NodeUtil.getOperation(document, operationName); - return new ExecutableNormalizedOperationFactory().createNormalizedQueryImpl(graphQLSchema, getOperationResult.operationDefinition, getOperationResult.fragmentsByName, coercedVariableValues, null); + return new ExecutableNormalizedOperationFactory().createNormalizedQueryImpl(graphQLSchema, + getOperationResult.operationDefinition, + getOperationResult.fragmentsByName, + coercedVariableValues, + null, + GraphQLContext.getDefault(), + Locale.getDefault()); } + /** + * This will create a runtime representation of the graphql operation that would be executed + * in a runtime sense. + * + * @param graphQLSchema the schema to be used + * @param operationDefinition the operation to be executed + * @param fragments a set of fragments associated with the operation + * @param coercedVariableValues the coerced variables to use + * + * @return a runtime representation of the graphql operation. + */ public static ExecutableNormalizedOperation createExecutableNormalizedOperation(GraphQLSchema graphQLSchema, OperationDefinition operationDefinition, Map fragments, CoercedVariables coercedVariableValues) { - return new ExecutableNormalizedOperationFactory().createNormalizedQueryImpl(graphQLSchema, operationDefinition, fragments, coercedVariableValues, null); + return new ExecutableNormalizedOperationFactory().createNormalizedQueryImpl(graphQLSchema, + operationDefinition, + fragments, + coercedVariableValues, + null, + GraphQLContext.getDefault(), + Locale.getDefault()); } + /** + * This will create a runtime representation of the graphql operation that would be executed + * in a runtime sense. + * + * @param graphQLSchema the schema to be used + * @param document the {@link Document} holding the operation text + * @param operationName the operation name to use + * @param rawVariables the raw variables to be coerced + * + * @return a runtime representation of the graphql operation. + */ public static ExecutableNormalizedOperation createExecutableNormalizedOperationWithRawVariables(GraphQLSchema graphQLSchema, Document document, String operationName, RawVariables rawVariables) { - return createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, operationName, rawVariables, GraphQLContext.getDefault(), Locale.getDefault()); + return createExecutableNormalizedOperationWithRawVariables(graphQLSchema, + document, + operationName, + rawVariables, + GraphQLContext.getDefault(), + Locale.getDefault()); } - public static ExecutableNormalizedOperation createExecutableNormalizedOperationWithRawVariables(GraphQLSchema graphQLSchema, - Document document, - String operationName, - RawVariables rawVariables, - GraphQLContext graphQLContext, - Locale locale) { + + /** + * This will create a runtime representation of the graphql operation that would be executed + * in a runtime sense. + * + * @param graphQLSchema the schema to be used + * @param document the {@link Document} holding the operation text + * @param operationName the operation name to use + * @param rawVariables the raw variables that have not yet been coerced + * @param locale the {@link Locale} to use during coercion + * @param graphQLContext the {@link GraphQLContext} to use during coercion + * + * @return a runtime representation of the graphql operation. + */ + public static ExecutableNormalizedOperation createExecutableNormalizedOperationWithRawVariables( + GraphQLSchema graphQLSchema, + Document document, + String operationName, + RawVariables rawVariables, + GraphQLContext graphQLContext, + Locale locale + ) { NodeUtil.GetOperationResult getOperationResult = NodeUtil.getOperation(document, operationName); - return new ExecutableNormalizedOperationFactory().createExecutableNormalizedOperationImplWithRawVariables(graphQLSchema, getOperationResult.operationDefinition, getOperationResult.fragmentsByName, rawVariables, graphQLContext, locale); + return new ExecutableNormalizedOperationFactory().createExecutableNormalizedOperationImplWithRawVariables(graphQLSchema, + getOperationResult.operationDefinition, + getOperationResult.fragmentsByName, + rawVariables, + graphQLContext, + locale); } private ExecutableNormalizedOperation createExecutableNormalizedOperationImplWithRawVariables(GraphQLSchema graphQLSchema, @@ -100,19 +180,34 @@ private ExecutableNormalizedOperation createExecutableNormalizedOperationImplWit Locale locale) { List variableDefinitions = operationDefinition.getVariableDefinitions(); - CoercedVariables coercedVariableValues = ValuesResolver.coerceVariableValues(graphQLSchema, variableDefinitions, rawVariables, graphQLContext, locale); - Map normalizedVariableValues = ValuesResolver.getNormalizedVariableValues(graphQLSchema, variableDefinitions, rawVariables, graphQLContext, locale); - return createNormalizedQueryImpl(graphQLSchema, operationDefinition, fragments, coercedVariableValues, normalizedVariableValues); + CoercedVariables coercedVariableValues = ValuesResolver.coerceVariableValues(graphQLSchema, + variableDefinitions, + rawVariables, + graphQLContext, + locale); + Map normalizedVariableValues = ValuesResolver.getNormalizedVariableValues(graphQLSchema, + variableDefinitions, + rawVariables, + graphQLContext, + locale); + return createNormalizedQueryImpl(graphQLSchema, + operationDefinition, + fragments, + coercedVariableValues, + normalizedVariableValues, + graphQLContext, + locale); } /** - * Creates a new Normalized query tree for the provided query + * Creates a new ExecutableNormalizedOperation for the provided query */ private ExecutableNormalizedOperation createNormalizedQueryImpl(GraphQLSchema graphQLSchema, OperationDefinition operationDefinition, Map fragments, CoercedVariables coercedVariableValues, - @Nullable Map normalizedVariableValues) { + @Nullable Map normalizedVariableValues, + GraphQLContext graphQLContext, Locale locale) { FieldCollectorNormalizedQueryParams parameters = FieldCollectorNormalizedQueryParams .newParameters() .fragments(fragments) @@ -127,19 +222,30 @@ private ExecutableNormalizedOperation createNormalizedQueryImpl(GraphQLSchema gr ImmutableListMultimap.Builder fieldToNormalizedField = ImmutableListMultimap.builder(); ImmutableMap.Builder normalizedFieldToMergedField = ImmutableMap.builder(); + ImmutableMap.Builder normalizedFieldToQueryDirectives = ImmutableMap.builder(); ImmutableListMultimap.Builder coordinatesToNormalizedFields = ImmutableListMultimap.builder(); + BiConsumer captureMergedField = (enf, mergedFld) -> { + //QueryDirectivesImpl is a lazy object and only computes itself when asked for + QueryDirectives queryDirectives = new QueryDirectivesImpl(mergedFld, graphQLSchema, coercedVariableValues.toMap(), graphQLContext, locale); + normalizedFieldToQueryDirectives.put(enf, queryDirectives); + normalizedFieldToMergedField.put(enf, mergedFld); + }; + for (ExecutableNormalizedField topLevel : collectFromOperationResult.children) { - ImmutableList mergedField = collectFromOperationResult.normalizedFieldToAstFields.get(topLevel); - normalizedFieldToMergedField.put(topLevel, newMergedField(map(mergedField, fieldAndAstParent -> fieldAndAstParent.field)).build()); - updateFieldToNFMap(topLevel, mergedField, fieldToNormalizedField); + ImmutableList fieldAndAstParents = collectFromOperationResult.normalizedFieldToAstFields.get(topLevel); + MergedField mergedField = newMergedField(fieldAndAstParents); + + captureMergedField.accept(topLevel, mergedField); + + updateFieldToNFMap(topLevel, fieldAndAstParents, fieldToNormalizedField); updateCoordinatedToNFMap(coordinatesToNormalizedFields, topLevel); buildFieldWithChildren(topLevel, - mergedField, + fieldAndAstParents, parameters, fieldToNormalizedField, - normalizedFieldToMergedField, + captureMergedField, coordinatesToNormalizedFields, 1); @@ -154,37 +260,45 @@ private ExecutableNormalizedOperation createNormalizedQueryImpl(GraphQLSchema gr new ArrayList<>(collectFromOperationResult.children), fieldToNormalizedField.build(), normalizedFieldToMergedField.build(), + normalizedFieldToQueryDirectives.build(), coordinatesToNormalizedFields.build() ); } - private void buildFieldWithChildren(ExecutableNormalizedField field, - ImmutableList mergedField, + private void buildFieldWithChildren(ExecutableNormalizedField executableNormalizedField, + ImmutableList fieldAndAstParents, FieldCollectorNormalizedQueryParams fieldCollectorNormalizedQueryParams, ImmutableListMultimap.Builder fieldNormalizedField, - ImmutableMap.Builder normalizedFieldToMergedField, + BiConsumer captureMergedField, ImmutableListMultimap.Builder coordinatesToNormalizedFields, int curLevel) { - CollectNFResult nextLevel = collectFromMergedField(fieldCollectorNormalizedQueryParams, field, mergedField, curLevel + 1); + CollectNFResult nextLevel = collectFromMergedField(fieldCollectorNormalizedQueryParams, executableNormalizedField, fieldAndAstParents, curLevel + 1); - for (ExecutableNormalizedField child : nextLevel.children) { - field.addChild(child); - ImmutableList mergedFieldForChild = nextLevel.normalizedFieldToAstFields.get(child); - normalizedFieldToMergedField.put(child, newMergedField(map(mergedFieldForChild, fieldAndAstParent -> fieldAndAstParent.field)).build()); - updateFieldToNFMap(child, mergedFieldForChild, fieldNormalizedField); - updateCoordinatedToNFMap(coordinatesToNormalizedFields, child); + for (ExecutableNormalizedField childENF : nextLevel.children) { + executableNormalizedField.addChild(childENF); + ImmutableList childFieldAndAstParents = nextLevel.normalizedFieldToAstFields.get(childENF); - buildFieldWithChildren(child, - mergedFieldForChild, + MergedField mergedField = newMergedField(childFieldAndAstParents); + captureMergedField.accept(childENF, mergedField); + + updateFieldToNFMap(childENF, childFieldAndAstParents, fieldNormalizedField); + updateCoordinatedToNFMap(coordinatesToNormalizedFields, childENF); + + buildFieldWithChildren(childENF, + childFieldAndAstParents, fieldCollectorNormalizedQueryParams, fieldNormalizedField, - normalizedFieldToMergedField, + captureMergedField, coordinatesToNormalizedFields, curLevel + 1); } } + private static MergedField newMergedField(ImmutableList fieldAndAstParents) { + return MergedField.newMergedField(map(fieldAndAstParents, fieldAndAstParent -> fieldAndAstParent.field)).build(); + } + private void updateFieldToNFMap(ExecutableNormalizedField executableNormalizedField, ImmutableList mergedField, ImmutableListMultimap.Builder fieldToNormalizedField) { @@ -376,7 +490,7 @@ private void collectFromSelectionSet(FieldCollectorNormalizedQueryParams paramet } else if (selection instanceof InlineFragment) { collectInlineFragment(parameters, result, (InlineFragment) selection, possibleObjects, astTypeCondition); } else if (selection instanceof FragmentSpread) { - collectFragmentSpread(parameters, result, (FragmentSpread) selection, possibleObjects, astTypeCondition); + collectFragmentSpread(parameters, result, (FragmentSpread) selection, possibleObjects); } } } @@ -404,15 +518,20 @@ public boolean isConcrete() { private void collectFragmentSpread(FieldCollectorNormalizedQueryParams parameters, List result, FragmentSpread fragmentSpread, - Set possibleObjects, - GraphQLCompositeType astTypeCondition + Set possibleObjects ) { - if (!conditionalNodes.shouldInclude(parameters.getCoercedVariableValues(), fragmentSpread.getDirectives())) { + if (!conditionalNodes.shouldInclude(fragmentSpread, + parameters.getCoercedVariableValues(), + parameters.getGraphQLSchema(), + parameters.getGraphQLContext())) { return; } FragmentDefinition fragmentDefinition = assertNotNull(parameters.getFragmentsByName().get(fragmentSpread.getName())); - if (!conditionalNodes.shouldInclude(parameters.getCoercedVariableValues(), fragmentDefinition.getDirectives())) { + if (!conditionalNodes.shouldInclude(fragmentDefinition, + parameters.getCoercedVariableValues(), + parameters.getGraphQLSchema(), + parameters.getGraphQLContext())) { return; } GraphQLCompositeType newAstTypeCondition = (GraphQLCompositeType) assertNotNull(parameters.getGraphQLSchema().getType(fragmentDefinition.getTypeCondition().getName())); @@ -427,7 +546,7 @@ private void collectInlineFragment(FieldCollectorNormalizedQueryParams parameter Set possibleObjects, GraphQLCompositeType astTypeCondition ) { - if (!conditionalNodes.shouldInclude(parameters.getCoercedVariableValues(), inlineFragment.getDirectives())) { + if (!conditionalNodes.shouldInclude(inlineFragment, parameters.getCoercedVariableValues(), parameters.getGraphQLSchema(), parameters.getGraphQLContext())) { return; } Set newPossibleObjects = possibleObjects; @@ -447,10 +566,13 @@ private void collectField(FieldCollectorNormalizedQueryParams parameters, Set possibleObjectTypes, GraphQLCompositeType astTypeCondition ) { - if (!conditionalNodes.shouldInclude(parameters.getCoercedVariableValues(), field.getDirectives())) { + if (!conditionalNodes.shouldInclude(field, + parameters.getCoercedVariableValues(), + parameters.getGraphQLSchema(), + parameters.getGraphQLContext())) { return; } - // this means there is actually no possible type for this field and we are done + // this means there is actually no possible type for this field, and we are done if (possibleObjectTypes.isEmpty()) { return; } @@ -489,8 +611,8 @@ private ImmutableSet resolvePossibleObjects(GraphQLCompositeT } else if (type instanceof GraphQLInterfaceType) { return ImmutableSet.copyOf(graphQLSchema.getImplementations((GraphQLInterfaceType) type)); } else if (type instanceof GraphQLUnionType) { - List types = ((GraphQLUnionType) type).getTypes(); - return ImmutableSet.copyOf(types); + List unionTypes = ((GraphQLUnionType) type).getTypes(); + return ImmutableSet.copyOf(ImmutableKit.map(unionTypes, GraphQLObjectType.class::cast)); } else { return assertShouldNeverHappen(); } diff --git a/src/main/java/graphql/normalized/ExecutableNormalizedOperationToAstCompiler.java b/src/main/java/graphql/normalized/ExecutableNormalizedOperationToAstCompiler.java index d4250b8e40..2d68b2f821 100644 --- a/src/main/java/graphql/normalized/ExecutableNormalizedOperationToAstCompiler.java +++ b/src/main/java/graphql/normalized/ExecutableNormalizedOperationToAstCompiler.java @@ -3,7 +3,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import graphql.Assert; -import graphql.Internal; +import graphql.PublicApi; import graphql.introspection.Introspection; import graphql.language.Argument; import graphql.language.ArrayValue; @@ -40,9 +40,25 @@ import static graphql.language.TypeName.newTypeName; import static graphql.schema.GraphQLTypeUtil.unwrapAll; -@Internal +/** + * This class can take a list of {@link ExecutableNormalizedField}s and compiling out a + * normalised operation {@link Document} that would represent how those fields + * maybe executed. + *

+ * This is essentially the reverse of {@link ExecutableNormalizedOperationFactory} which takes + * operation text and makes {@link ExecutableNormalizedField}s from it, this takes {@link ExecutableNormalizedField}s + * and makes operation text from it. + *

+ * You could for example send that operation text onto to some other graphql server if it + * has the same schema as the one provided. + */ +@PublicApi public class ExecutableNormalizedOperationToAstCompiler { + /** + * The result is a {@link Document} and a map of variables + * that would go with that document. + */ public static class CompilerResult { private final Document document; private final Map variables; @@ -61,6 +77,20 @@ public Map getVariables() { } } + /** + * This will compile a operation text {@link Document} with possibly variables from the given {@link ExecutableNormalizedField}s + * + * The {@link VariablePredicate} is used called to decide if the given argument values should be made into a variable + * OR inlined into the operation text as a graphql literal. + * + * @param schema the graphql schema to use + * @param operationKind the kind of operation + * @param operationName the name of the operation to use + * @param topLevelFields the top level {@link ExecutableNormalizedField}s to start from + * @param variablePredicate the variable predicate that decides if arguments turn into variables or not during compilation + * + * @return a {@link CompilerResult} object + */ public static CompilerResult compileToDocument(@NotNull GraphQLSchema schema, @NotNull OperationDefinition.Operation operationKind, @Nullable String operationName, diff --git a/src/main/java/graphql/normalized/NormalizedInputValue.java b/src/main/java/graphql/normalized/NormalizedInputValue.java index 912ab11614..390bac032a 100644 --- a/src/main/java/graphql/normalized/NormalizedInputValue.java +++ b/src/main/java/graphql/normalized/NormalizedInputValue.java @@ -1,5 +1,6 @@ package graphql.normalized; +import graphql.PublicApi; import graphql.language.Value; import java.util.Objects; @@ -10,8 +11,9 @@ import static graphql.language.AstPrinter.printAst; /** - * A value with type information. + * An argument value with type information. */ +@PublicApi public class NormalizedInputValue { private final String typeName; private final Object value; diff --git a/src/main/java/graphql/normalized/VariablePredicate.java b/src/main/java/graphql/normalized/VariablePredicate.java index 74d85fb256..e4a0347050 100644 --- a/src/main/java/graphql/normalized/VariablePredicate.java +++ b/src/main/java/graphql/normalized/VariablePredicate.java @@ -1,11 +1,12 @@ package graphql.normalized; -import graphql.Internal; +import graphql.PublicSpi; /** - * This predicate indicates whether a variable should be made for this field argument + * This predicate indicates whether a variable should be made for this field argument OR whether it will be compiled + * into a graphql AST literal. */ -@Internal +@PublicSpi public interface VariablePredicate { /** * Return true if a variable should be made for this field argument @@ -13,6 +14,7 @@ public interface VariablePredicate { * @param executableNormalizedField the field in question * @param argName the argument on the field * @param normalizedInputValue the input value for that argument + * * @return true if a variable should be made */ boolean shouldMakeVariable(ExecutableNormalizedField executableNormalizedField, String argName, NormalizedInputValue normalizedInputValue); diff --git a/src/main/java/graphql/scalar/GraphqlBooleanCoercing.java b/src/main/java/graphql/scalar/GraphqlBooleanCoercing.java index 6ef64c5976..266e64c4a5 100644 --- a/src/main/java/graphql/scalar/GraphqlBooleanCoercing.java +++ b/src/main/java/graphql/scalar/GraphqlBooleanCoercing.java @@ -68,12 +68,13 @@ private Boolean serializeImpl(@NotNull Object input, @NotNull Locale locale) { @NotNull private Boolean parseValueImpl(@NotNull Object input, @NotNull Locale locale) { - if (!(input instanceof Boolean)) { + Boolean result = convertImpl(input); + if (result == null) { throw new CoercingParseValueException( - i18nMsg(locale, "Boolean.unexpectedRawValueType", typeName(input)) + i18nMsg(locale, "Boolean.notBoolean", typeName(input)) ); } - return (Boolean) input; + return result; } private static boolean parseLiteralImpl(@NotNull Object input, @NotNull Locale locale) { diff --git a/src/main/java/graphql/scalar/GraphqlFloatCoercing.java b/src/main/java/graphql/scalar/GraphqlFloatCoercing.java index 683f915634..58329e56f3 100644 --- a/src/main/java/graphql/scalar/GraphqlFloatCoercing.java +++ b/src/main/java/graphql/scalar/GraphqlFloatCoercing.java @@ -65,12 +65,6 @@ private Double serialiseImpl(Object input, @NotNull Locale locale) { @NotNull private Double parseValueImpl(@NotNull Object input, @NotNull Locale locale) { - if (!(input instanceof Number)) { - throw new CoercingParseValueException( - i18nMsg(locale, "Float.unexpectedRawValueType", typeName(input)) - ); - } - Double result = convertImpl(input); if (result == null) { throw new CoercingParseValueException( diff --git a/src/main/java/graphql/scalar/GraphqlIntCoercing.java b/src/main/java/graphql/scalar/GraphqlIntCoercing.java index 350c4f4814..99e9eeb84b 100644 --- a/src/main/java/graphql/scalar/GraphqlIntCoercing.java +++ b/src/main/java/graphql/scalar/GraphqlIntCoercing.java @@ -64,45 +64,15 @@ private Integer serialiseImpl(Object input, @NotNull Locale locale) { @NotNull private Integer parseValueImpl(@NotNull Object input, @NotNull Locale locale) { - if (!(input instanceof Number)) { - throw new CoercingParseValueException( - i18nMsg(locale, "Int.notInt", typeName(input)) - ); - } - - if (input instanceof Integer) { - return (Integer) input; - } + Integer result = convertImpl(input); - BigInteger result = convertParseValueImpl(input); if (result == null) { throw new CoercingParseValueException( i18nMsg(locale, "Int.notInt", typeName(input)) ); } - if (result.compareTo(INT_MIN) < 0 || result.compareTo(INT_MAX) > 0) { - throw new CoercingParseValueException( - i18nMsg(locale, "Int.outsideRange", result.toString()) - ); - } - return result.intValueExact(); - } - - private BigInteger convertParseValueImpl(Object input) { - BigDecimal value; - try { - value = new BigDecimal(input.toString()); - } catch (NumberFormatException e) { - return null; - } - - try { - return value.toBigIntegerExact(); - } catch (ArithmeticException e) { - // Exception if number has non-zero fractional part - return null; - } + return result; } private static int parseLiteralImpl(Object input, @NotNull Locale locale) { diff --git a/src/main/java/graphql/scalar/GraphqlStringCoercing.java b/src/main/java/graphql/scalar/GraphqlStringCoercing.java index b330f254ae..9b0d6b84ae 100644 --- a/src/main/java/graphql/scalar/GraphqlStringCoercing.java +++ b/src/main/java/graphql/scalar/GraphqlStringCoercing.java @@ -28,15 +28,6 @@ private String toStringImpl(Object input) { return String.valueOf(input); } - private String parseValueImpl(@NotNull Object input, @NotNull Locale locale) { - if (!(input instanceof String)) { - throw new CoercingParseValueException( - i18nMsg(locale, "String.unexpectedRawValueType", typeName(input)) - ); - } - return (String) input; - } - private String parseLiteralImpl(@NotNull Object input, Locale locale) { if (!(input instanceof StringValue)) { throw new CoercingParseLiteralException( @@ -64,12 +55,12 @@ public String serialize(@NotNull Object dataFetcherResult) { @Override @Deprecated public String parseValue(@NotNull Object input) { - return parseValueImpl(input, Locale.getDefault()); + return toStringImpl(input); } @Override public String parseValue(@NotNull Object input, @NotNull GraphQLContext graphQLContext, @NotNull Locale locale) throws CoercingParseValueException { - return parseValueImpl(input, locale); + return toStringImpl(input); } @Override diff --git a/src/main/java/graphql/schema/DataFetchingEnvironment.java b/src/main/java/graphql/schema/DataFetchingEnvironment.java index 578a234c36..041d6a9ca2 100644 --- a/src/main/java/graphql/schema/DataFetchingEnvironment.java +++ b/src/main/java/graphql/schema/DataFetchingEnvironment.java @@ -3,7 +3,6 @@ import graphql.DeprecatedAt; import graphql.GraphQLContext; import graphql.PublicApi; -import graphql.cachecontrol.CacheControl; import graphql.execution.ExecutionId; import graphql.execution.ExecutionStepInfo; import graphql.execution.MergedField; @@ -77,10 +76,10 @@ public interface DataFetchingEnvironment extends IntrospectionDataFetchingEnviro T getArgumentOrDefault(String name, T defaultValue); /** - * Returns a legacy context argument that is set up when the {@link graphql.GraphQL#execute(graphql.ExecutionInput)} )} method + * Returns a legacy context argument that is set up when the {@link graphql.GraphQL#execute(graphql.ExecutionInput)} method * is invoked. *

- * This is a info object which is provided to all DataFetchers, but never used by graphql-java itself. + * This is an info object which is provided to all DataFetchers, but never used by graphql-java itself. * * @param you decide what type it is * @@ -96,19 +95,19 @@ public interface DataFetchingEnvironment extends IntrospectionDataFetchingEnviro * Returns a shared context argument that is set up when the {@link graphql.GraphQL#execute(graphql.ExecutionInput)} )} method * is invoked. *

- * This is a info object which is provided to all DataFetchers. + * This is an info object which is provided to all DataFetchers. * * @return can NOT be null */ GraphQLContext getGraphQlContext(); /** - * This returns a context object that parent fields may have returned returned + * This returns a context object that parent fields may have returned * via {@link graphql.execution.DataFetcherResult#getLocalContext()} which can be used to pass down extra information to * fields beyond the normal {@link #getSource()} *

- * This differs from {@link #getContext()} in that it's field specific and passed from parent field to child field, - * whilst {@link #getContext()} is global for the whole query. + * This differs from {@link #getGraphQlContext()} in that it's field specific and passed from parent field to child field, + * whilst {@link #getGraphQlContext()} is global for the whole query. *

* If the field is a top level field then 'localContext' equals null since it's never be set until those * fields execute. @@ -144,7 +143,7 @@ public interface DataFetchingEnvironment extends IntrospectionDataFetchingEnviro List getFields(); /** - * It can happen that a query has overlapping fields which are + * It can happen that a query has overlapping fields which * are querying the same data. If this is the case they get merged * together and fetched only once, but this method returns all of the Fields * from the query. @@ -239,15 +238,6 @@ public interface DataFetchingEnvironment extends IntrospectionDataFetchingEnviro */ DataLoaderRegistry getDataLoaderRegistry(); - /** - * @return the current {@link CacheControl} instance used to add cache hints to the response - * - * @deprecated - Apollo has deprecated the Cache Control specification - */ - @Deprecated - @DeprecatedAt("2022-07-26") - CacheControl getCacheControl(); - /** * @return the current {@link java.util.Locale} instance used for this request */ diff --git a/src/main/java/graphql/schema/DataFetchingEnvironmentImpl.java b/src/main/java/graphql/schema/DataFetchingEnvironmentImpl.java index b6fc50e92f..dbf9618a43 100644 --- a/src/main/java/graphql/schema/DataFetchingEnvironmentImpl.java +++ b/src/main/java/graphql/schema/DataFetchingEnvironmentImpl.java @@ -5,7 +5,6 @@ import graphql.DeprecatedAt; import graphql.GraphQLContext; import graphql.Internal; -import graphql.cachecontrol.CacheControl; import graphql.collect.ImmutableKit; import graphql.collect.ImmutableMapWithNullValues; import graphql.execution.ExecutionContext; @@ -44,7 +43,6 @@ public class DataFetchingEnvironmentImpl implements DataFetchingEnvironment { private final DataFetchingFieldSelectionSet selectionSet; private final Supplier executionStepInfo; private final DataLoaderRegistry dataLoaderRegistry; - private final CacheControl cacheControl; private final Locale locale; private final OperationDefinition operationDefinition; private final Document document; @@ -68,7 +66,6 @@ private DataFetchingEnvironmentImpl(Builder builder) { this.selectionSet = builder.selectionSet; this.executionStepInfo = builder.executionStepInfo; this.dataLoaderRegistry = builder.dataLoaderRegistry; - this.cacheControl = builder.cacheControl; this.locale = builder.locale; this.operationDefinition = builder.operationDefinition; this.document = builder.document; @@ -95,7 +92,6 @@ public static Builder newDataFetchingEnvironment(ExecutionContext executionConte .graphQLSchema(executionContext.getGraphQLSchema()) .fragmentsByName(executionContext.getFragmentsByName()) .dataLoaderRegistry(executionContext.getDataLoaderRegistry()) - .cacheControl(executionContext.getCacheControl()) .locale(executionContext.getLocale()) .document(executionContext.getDocument()) .operationDefinition(executionContext.getOperationDefinition()) @@ -218,13 +214,6 @@ public DataLoaderRegistry getDataLoaderRegistry() { return dataLoaderRegistry; } - @Override - @Deprecated - @DeprecatedAt("2022-07-26") - public CacheControl getCacheControl() { - return cacheControl; - } - @Override public Locale getLocale() { return locale; @@ -268,7 +257,6 @@ public static class Builder { private DataFetchingFieldSelectionSet selectionSet; private Supplier executionStepInfo; private DataLoaderRegistry dataLoaderRegistry; - private CacheControl cacheControl; private Locale locale; private OperationDefinition operationDefinition; private Document document; @@ -294,7 +282,6 @@ public Builder(DataFetchingEnvironmentImpl env) { this.selectionSet = env.selectionSet; this.executionStepInfo = env.executionStepInfo; this.dataLoaderRegistry = env.dataLoaderRegistry; - this.cacheControl = env.cacheControl; this.locale = env.locale; this.operationDefinition = env.operationDefinition; this.document = env.document; @@ -395,13 +382,6 @@ public Builder dataLoaderRegistry(DataLoaderRegistry dataLoaderRegistry) { return this; } - @Deprecated - @DeprecatedAt("2022-07-26") - public Builder cacheControl(CacheControl cacheControl) { - this.cacheControl = cacheControl; - return this; - } - public Builder locale(Locale locale) { this.locale = locale; return this; diff --git a/src/main/java/graphql/schema/DelegatingDataFetchingEnvironment.java b/src/main/java/graphql/schema/DelegatingDataFetchingEnvironment.java index c2da68c397..ff10e1cdd7 100644 --- a/src/main/java/graphql/schema/DelegatingDataFetchingEnvironment.java +++ b/src/main/java/graphql/schema/DelegatingDataFetchingEnvironment.java @@ -3,7 +3,6 @@ import graphql.DeprecatedAt; import graphql.GraphQLContext; import graphql.PublicApi; -import graphql.cachecontrol.CacheControl; import graphql.execution.ExecutionId; import graphql.execution.ExecutionStepInfo; import graphql.execution.MergedField; @@ -164,13 +163,6 @@ public Locale getLocale() { return delegateEnvironment.getLocale(); } - @Override - @Deprecated - @DeprecatedAt("2022-07-26") - public CacheControl getCacheControl() { - return delegateEnvironment.getCacheControl(); - } - @Override public OperationDefinition getOperationDefinition() { return delegateEnvironment.getOperationDefinition(); diff --git a/src/main/java/graphql/schema/GraphQLCodeRegistry.java b/src/main/java/graphql/schema/GraphQLCodeRegistry.java index ff2c72c357..9620ca19a7 100644 --- a/src/main/java/graphql/schema/GraphQLCodeRegistry.java +++ b/src/main/java/graphql/schema/GraphQLCodeRegistry.java @@ -1,6 +1,7 @@ package graphql.schema; import graphql.Assert; +import graphql.DeprecatedAt; import graphql.Internal; import graphql.PublicApi; import graphql.schema.visibility.GraphqlFieldVisibility; @@ -56,11 +57,29 @@ public GraphqlFieldVisibility getFieldVisibility() { * @param fieldDefinition the field definition * * @return the DataFetcher associated with this field. All fields have data fetchers + * + * @see #getDataFetcher(GraphQLObjectType, GraphQLFieldDefinition) + * @deprecated This is confusing because {@link GraphQLInterfaceType}s cant have data fetchers. At runtime only a {@link GraphQLObjectType} + * can be used to fetch a field. This method allows the mapping to be made, but it is never useful if an interface is passed in. */ + @Deprecated + @DeprecatedAt("2023-05-13") public DataFetcher getDataFetcher(GraphQLFieldsContainer parentType, GraphQLFieldDefinition fieldDefinition) { return getDataFetcherImpl(FieldCoordinates.coordinates(parentType, fieldDefinition), fieldDefinition, dataFetcherMap, systemDataFetcherMap, defaultDataFetcherFactory); } + /** + * Returns a data fetcher associated with a field within an object type + * + * @param parentType the container type + * @param fieldDefinition the field definition + * + * @return the DataFetcher associated with this field. All fields have data fetchers + */ + public DataFetcher getDataFetcher(GraphQLObjectType parentType, GraphQLFieldDefinition fieldDefinition) { + return getDataFetcherImpl(FieldCoordinates.coordinates(parentType, fieldDefinition), fieldDefinition, dataFetcherMap, systemDataFetcherMap, defaultDataFetcherFactory); + } + /** * Returns a data fetcher associated with a field located at specified coordinates. * @@ -242,11 +261,29 @@ private Builder markChanged(boolean condition) { * @param fieldDefinition the field definition * * @return the DataFetcher associated with this field. All fields have data fetchers + * + * @see #getDataFetcher(GraphQLObjectType, GraphQLFieldDefinition) + * @deprecated This is confusing because {@link GraphQLInterfaceType}s cant have data fetchers. At runtime only a {@link GraphQLObjectType} + * can be used to fetch a field. This method allows the mapping to be made, but it is never useful if an interface is passed in. */ + @Deprecated + @DeprecatedAt("2023-05-13") public DataFetcher getDataFetcher(GraphQLFieldsContainer parentType, GraphQLFieldDefinition fieldDefinition) { return getDataFetcherImpl(FieldCoordinates.coordinates(parentType, fieldDefinition), fieldDefinition, dataFetcherMap, systemDataFetcherMap, defaultDataFetcherFactory); } + /** + * Returns a data fetcher associated with a field within an object type + * + * @param parentType the container type + * @param fieldDefinition the field definition + * + * @return the DataFetcher associated with this field. All fields have data fetchers + */ + public DataFetcher getDataFetcher(GraphQLObjectType parentType, GraphQLFieldDefinition fieldDefinition) { + return getDataFetcherImpl(FieldCoordinates.coordinates(parentType, fieldDefinition), fieldDefinition, dataFetcherMap, systemDataFetcherMap, defaultDataFetcherFactory); + } + /** * Returns a data fetcher associated with a field located at specified coordinates. * @@ -331,11 +368,31 @@ public Builder dataFetcher(FieldCoordinates coordinates, DataFetcher dataFetc * @param dataFetcher the data fetcher code for that field * * @return this builder + * + * @see #dataFetcher(GraphQLObjectType, GraphQLFieldDefinition, DataFetcher) + * @deprecated This is confusing because {@link GraphQLInterfaceType}s cant have data fetchers. At runtime only a {@link GraphQLObjectType} + * can be used to fetch a field. This method allows the mapping to be made, but it is never useful if an interface is passed in. */ + @Deprecated + @DeprecatedAt("2023-05-13") public Builder dataFetcher(GraphQLFieldsContainer parentType, GraphQLFieldDefinition fieldDefinition, DataFetcher dataFetcher) { return dataFetcher(FieldCoordinates.coordinates(parentType.getName(), fieldDefinition.getName()), dataFetcher); } + + /** + * Sets the data fetcher for a specific field inside an object type + * + * @param parentType the object type + * @param fieldDefinition the field definition + * @param dataFetcher the data fetcher code for that field + * + * @return this builder + */ + public Builder dataFetcher(GraphQLObjectType parentType, GraphQLFieldDefinition fieldDefinition, DataFetcher dataFetcher) { + return dataFetcher(FieldCoordinates.coordinates(parentType.getName(), fieldDefinition.getName()), dataFetcher); + } + /** * Called to place system data fetchers (eg Introspection fields) into the mix * diff --git a/src/main/java/graphql/schema/GraphQLTypeVisitor.java b/src/main/java/graphql/schema/GraphQLTypeVisitor.java index 3fbfcd2a99..7853fa8dac 100644 --- a/src/main/java/graphql/schema/GraphQLTypeVisitor.java +++ b/src/main/java/graphql/schema/GraphQLTypeVisitor.java @@ -13,54 +13,56 @@ */ @PublicApi public interface GraphQLTypeVisitor { - TraversalControl visitGraphQLArgument(GraphQLArgument node, TraverserContext context); - - TraversalControl visitGraphQLInterfaceType(GraphQLInterfaceType node, TraverserContext context); - - TraversalControl visitGraphQLEnumType(GraphQLEnumType node, TraverserContext context); - - TraversalControl visitGraphQLEnumValueDefinition(GraphQLEnumValueDefinition node, TraverserContext context); - - TraversalControl visitGraphQLFieldDefinition(GraphQLFieldDefinition node, TraverserContext context); - /** - * This method will be called twice. Once for a directive definition in a schema and then do each time a directive is applied to a schema element + * This method will be called when a directive is applied to a schema element. * - * When it's applied to a schema element then {@link TraverserContext#getParentNode()} will be the schema element that this is applied to. + * The {@link TraverserContext#getParentNode()} will be the schema element that this is applied to. * * The graphql-java code base is trying to slowly move away from using {@link GraphQLDirective}s when they really should be {@link GraphQLAppliedDirective}s - * and this is another place that has been left in. In the future this behavior will change and this will only visit directive definitions of a schema, not where - * they are applied. * - * @param node the directive + * @param node the applied directive * @param context the traversal context + * * @return how to control the visitation processing */ - TraversalControl visitGraphQLDirective(GraphQLDirective node, TraverserContext context); + default TraversalControl visitGraphQLAppliedDirective(GraphQLAppliedDirective node, TraverserContext context) { + return TraversalControl.CONTINUE; + } + + default TraversalControl visitGraphQLAppliedDirectiveArgument(GraphQLAppliedDirectiveArgument node, TraverserContext context) { + return TraversalControl.CONTINUE; + } + + TraversalControl visitGraphQLArgument(GraphQLArgument node, TraverserContext context); /** - * This method will be called when a directive is applied to a schema element. + * This method will be called twice. Once for a directive definition in a schema and then do each time a directive is applied to a schema element * - * The {@link TraverserContext#getParentNode()} will be the schema element that this is applied to. + * When it's applied to a schema element then {@link TraverserContext#getParentNode()} will be the schema element that this is applied to. * * The graphql-java code base is trying to slowly move away from using {@link GraphQLDirective}s when they really should be {@link GraphQLAppliedDirective}s + * and this is another place that has been left in. In the future this behavior will change and this will only visit directive definitions of a schema, not where + * they are applied. * - * @param node the applied directive + * @param node the directive * @param context the traversal context + * * @return how to control the visitation processing */ - default TraversalControl visitGraphQLAppliedDirective(GraphQLAppliedDirective node, TraverserContext context) { - return TraversalControl.CONTINUE; - } + TraversalControl visitGraphQLDirective(GraphQLDirective node, TraverserContext context); - default TraversalControl visitGraphQLAppliedDirectiveArgument(GraphQLAppliedDirectiveArgument node, TraverserContext context) { - return TraversalControl.CONTINUE; - } + TraversalControl visitGraphQLEnumType(GraphQLEnumType node, TraverserContext context); + + TraversalControl visitGraphQLEnumValueDefinition(GraphQLEnumValueDefinition node, TraverserContext context); + + TraversalControl visitGraphQLFieldDefinition(GraphQLFieldDefinition node, TraverserContext context); TraversalControl visitGraphQLInputObjectField(GraphQLInputObjectField node, TraverserContext context); TraversalControl visitGraphQLInputObjectType(GraphQLInputObjectType node, TraverserContext context); + TraversalControl visitGraphQLInterfaceType(GraphQLInterfaceType node, TraverserContext context); + TraversalControl visitGraphQLList(GraphQLList node, TraverserContext context); TraversalControl visitGraphQLNonNull(GraphQLNonNull node, TraverserContext context); @@ -86,10 +88,6 @@ default TraversalControl visitBackRef(TraverserContext con } // Marker interfaces - default TraversalControl visitGraphQLModifiedType(GraphQLModifiedType node, TraverserContext context) { - throw new UnsupportedOperationException(); - } - default TraversalControl visitGraphQLCompositeType(GraphQLCompositeType node, TraverserContext context) { throw new UnsupportedOperationException(); } @@ -110,6 +108,10 @@ default TraversalControl visitGraphQLInputType(GraphQLInputType node, TraverserC throw new UnsupportedOperationException(); } + default TraversalControl visitGraphQLModifiedType(GraphQLModifiedType node, TraverserContext context) { + throw new UnsupportedOperationException(); + } + default TraversalControl visitGraphQLNullableType(GraphQLNullableType node, TraverserContext context) { throw new UnsupportedOperationException(); } diff --git a/src/main/java/graphql/schema/PropertyFetchingImpl.java b/src/main/java/graphql/schema/PropertyFetchingImpl.java index 5a7a92989a..da8b153e3d 100644 --- a/src/main/java/graphql/schema/PropertyFetchingImpl.java +++ b/src/main/java/graphql/schema/PropertyFetchingImpl.java @@ -138,7 +138,18 @@ public Object getPropertyValue(String propertyName, Object object, GraphQLType g // // try by public getters name - object.getPropertyName() try { - MethodFinder methodFinder = (rootClass, methodName) -> findPubliclyAccessibleMethod(cacheKey, rootClass, methodName, dfeInUse); + MethodFinder methodFinder = (rootClass, methodName) -> findPubliclyAccessibleMethod(cacheKey, rootClass, methodName, dfeInUse, false); + return getPropertyViaGetterMethod(object, propertyName, graphQLType, methodFinder, singleArgumentValue); + } catch (NoSuchMethodException ignored) { + } + // + // try by public getters name - object.getPropertyName() where its static + try { + // we allow static getXXX() methods because we always have. It's strange in retrospect but + // in order to not break things we allow statics to be used. In theory this double code check is not needed + // because you CANT have a `static getFoo()` and a `getFoo()` in the same class hierarchy but to make the code read clearer + // I have repeated the lookup. Since we cache methods, this happens only once and does not slow us down + MethodFinder methodFinder = (rootClass, methodName) -> findPubliclyAccessibleMethod(cacheKey, rootClass, methodName, dfeInUse, true); return getPropertyViaGetterMethod(object, propertyName, graphQLType, methodFinder, singleArgumentValue); } catch (NoSuchMethodException ignored) { } @@ -215,7 +226,7 @@ private Object getPropertyViaGetterUsingPrefix(Object object, String propertyNam * which have abstract public interfaces implemented by package-protected * (generated) subclasses. */ - private Method findPubliclyAccessibleMethod(CacheKey cacheKey, Class rootClass, String methodName, boolean dfeInUse) throws NoSuchMethodException { + private Method findPubliclyAccessibleMethod(CacheKey cacheKey, Class rootClass, String methodName, boolean dfeInUse, boolean allowStaticMethods) throws NoSuchMethodException { Class currentClass = rootClass; while (currentClass != null) { if (Modifier.isPublic(currentClass.getModifiers())) { @@ -224,7 +235,7 @@ private Method findPubliclyAccessibleMethod(CacheKey cacheKey, Class rootClas // try a getter that takes singleArgumentType first (if we have one) try { Method method = currentClass.getMethod(methodName, singleArgumentType); - if (Modifier.isPublic(method.getModifiers())) { + if (isSuitablePublicMethod(method, allowStaticMethods)) { METHOD_CACHE.putIfAbsent(cacheKey, new CachedMethod(method)); return method; } @@ -233,7 +244,7 @@ private Method findPubliclyAccessibleMethod(CacheKey cacheKey, Class rootClas } } Method method = currentClass.getMethod(methodName); - if (Modifier.isPublic(method.getModifiers())) { + if (isSuitablePublicMethod(method, allowStaticMethods)) { METHOD_CACHE.putIfAbsent(cacheKey, new CachedMethod(method)); return method; } @@ -244,6 +255,18 @@ private Method findPubliclyAccessibleMethod(CacheKey cacheKey, Class rootClas return rootClass.getMethod(methodName); } + private boolean isSuitablePublicMethod(Method method, boolean allowStaticMethods) { + int methodModifiers = method.getModifiers(); + if (Modifier.isPublic(methodModifiers)) { + //noinspection RedundantIfStatement + if (Modifier.isStatic(methodModifiers) && !allowStaticMethods) { + return false; + } + return true; + } + return false; + } + /* https://docs.oracle.com/en/java/javase/15/language/records.html @@ -253,9 +276,11 @@ private Method findPubliclyAccessibleMethod(CacheKey cacheKey, Class rootClas However, we won't just restrict ourselves strictly to true records. We will find methods that are record like and fetch them - e.g. `object.propertyName()` + + We won't allow static methods for record like methods however */ private Method findRecordMethod(CacheKey cacheKey, Class rootClass, String methodName) throws NoSuchMethodException { - return findPubliclyAccessibleMethod(cacheKey, rootClass, methodName, false); + return findPubliclyAccessibleMethod(cacheKey, rootClass, methodName, false, false); } private Method findViaSetAccessible(CacheKey cacheKey, Class aClass, String methodName, boolean dfeInUse) throws NoSuchMethodException { diff --git a/src/main/java/graphql/schema/SchemaTransformer.java b/src/main/java/graphql/schema/SchemaTransformer.java index 70ad75cc38..06b6cfe08b 100644 --- a/src/main/java/graphql/schema/SchemaTransformer.java +++ b/src/main/java/graphql/schema/SchemaTransformer.java @@ -144,14 +144,14 @@ private Object transformImpl(final GraphQLSchema schema, GraphQLSchemaElement sc final Map typeReferences = new LinkedHashMap<>(); // first pass - general transformation - boolean schemaChanged = traverseAndTransform(dummyRoot, changedTypes, typeReferences, visitor, codeRegistry); + boolean schemaChanged = traverseAndTransform(dummyRoot, changedTypes, typeReferences, visitor, codeRegistry, schema); // if we have changed any named elements AND we have type references referring to them then // we need to make a second pass to replace these type references to the new names if (!changedTypes.isEmpty()) { boolean hasTypeRefsForChangedTypes = changedTypes.keySet().stream().anyMatch(typeReferences::containsKey); if (hasTypeRefsForChangedTypes) { - replaceTypeReferences(dummyRoot, codeRegistry, changedTypes); + replaceTypeReferences(dummyRoot, schema, codeRegistry, changedTypes); } } @@ -170,7 +170,7 @@ private Object transformImpl(final GraphQLSchema schema, GraphQLSchemaElement sc } } - private void replaceTypeReferences(DummyRoot dummyRoot, GraphQLCodeRegistry.Builder codeRegistry, Map changedTypes) { + private void replaceTypeReferences(DummyRoot dummyRoot, GraphQLSchema schema, GraphQLCodeRegistry.Builder codeRegistry, Map changedTypes) { GraphQLTypeVisitor typeRefVisitor = new GraphQLTypeVisitorStub() { @Override public TraversalControl visitGraphQLTypeReference(GraphQLTypeReference typeRef, TraverserContext context) { @@ -182,10 +182,10 @@ public TraversalControl visitGraphQLTypeReference(GraphQLTypeReference typeRef, return CONTINUE; } }; - traverseAndTransform(dummyRoot, new HashMap<>(), new HashMap<>(), typeRefVisitor, codeRegistry); + traverseAndTransform(dummyRoot, new HashMap<>(), new HashMap<>(), typeRefVisitor, codeRegistry, schema); } - private boolean traverseAndTransform(DummyRoot dummyRoot, Map changedTypes, Map typeReferences, GraphQLTypeVisitor visitor, GraphQLCodeRegistry.Builder codeRegistry) { + private boolean traverseAndTransform(DummyRoot dummyRoot, Map changedTypes, Map typeReferences, GraphQLTypeVisitor visitor, GraphQLCodeRegistry.Builder codeRegistry, GraphQLSchema schema) { List> zippers = new LinkedList<>(); Map> zipperByNodeAfterTraversing = new LinkedHashMap<>(); Map> zipperByOriginalNode = new LinkedHashMap<>(); @@ -195,7 +195,7 @@ private boolean traverseAndTransform(DummyRoot dummyRoot, Map> reverseDependencies = new LinkedHashMap<>(); Map> typeRefReverseDependencies = new LinkedHashMap<>(); - TraverserVisitor nodeTraverserVisitor = new TraverserVisitor() { + TraverserVisitor nodeTraverserVisitor = new TraverserVisitor<>() { @Override public TraversalControl enter(TraverserContext context) { GraphQLSchemaElement currentSchemaElement = context.thisNode(); @@ -271,6 +271,9 @@ public TraversalControl backRef(TraverserContext context) if (codeRegistry != null) { traverser.rootVar(GraphQLCodeRegistry.Builder.class, codeRegistry); } + if (schema != null) { + traverser.rootVar(GraphQLSchema.class, schema); + } traverser.traverse(dummyRoot, nodeTraverserVisitor); diff --git a/src/main/java/graphql/schema/SchemaTraverser.java b/src/main/java/graphql/schema/SchemaTraverser.java index 3be095aeaa..809904e30c 100644 --- a/src/main/java/graphql/schema/SchemaTraverser.java +++ b/src/main/java/graphql/schema/SchemaTraverser.java @@ -66,9 +66,11 @@ public TraverserResult depthFirstFullSchema(List typeVisitor roots.addAll(schema.getAdditionalTypes()); roots.addAll(schema.getDirectives()); roots.addAll(schema.getSchemaDirectives()); + roots.addAll(schema.getSchemaAppliedDirectives()); roots.add(schema.getIntrospectionSchemaType()); TraverserDelegateListVisitor traverserDelegateListVisitor = new TraverserDelegateListVisitor(typeVisitors); - return initTraverser().rootVars(rootVars).traverse(roots, traverserDelegateListVisitor); + Traverser traverser = initTraverser().rootVars(rootVars).rootVar(GraphQLSchema.class, schema); + return traverser.traverse(roots, traverserDelegateListVisitor); } public TraverserResult depthFirst(GraphQLTypeVisitor graphQLTypeVisitor, GraphQLSchemaElement root) { @@ -131,7 +133,10 @@ private static class TraverserDelegateListVisitor implements TraverserVisitor context) { for (GraphQLTypeVisitor graphQLTypeVisitor : typeVisitors) { - context.thisNode().accept(context, graphQLTypeVisitor); + TraversalControl control = context.thisNode().accept(context, graphQLTypeVisitor); + if (control != CONTINUE) { + return control; + } } return CONTINUE; } diff --git a/src/main/java/graphql/schema/diff/DiffEvent.java b/src/main/java/graphql/schema/diff/DiffEvent.java index 11d07a60be..1b58988eae 100644 --- a/src/main/java/graphql/schema/diff/DiffEvent.java +++ b/src/main/java/graphql/schema/diff/DiffEvent.java @@ -1,6 +1,5 @@ package graphql.schema.diff; -import graphql.DeprecatedAt; import graphql.PublicApi; import graphql.language.TypeKind; @@ -74,16 +73,6 @@ public String toString() { '}'; } - /** - * @return a Builder of Info level diff events - * @deprecated use {@link DiffEvent#apiInfo()} instead - */ - @Deprecated - @DeprecatedAt("2017-12-27") - public static Builder newInfo() { - return new Builder().level(DiffLevel.INFO); - } - public static Builder apiInfo() { return new Builder().level(DiffLevel.INFO); } diff --git a/src/main/java/graphql/schema/diffing/DiffImpl.java b/src/main/java/graphql/schema/diffing/DiffImpl.java index b91bdc48dd..9cfd39b9cf 100644 --- a/src/main/java/graphql/schema/diffing/DiffImpl.java +++ b/src/main/java/graphql/schema/diffing/DiffImpl.java @@ -3,11 +3,11 @@ import com.google.common.collect.HashMultiset; import com.google.common.collect.Multiset; import com.google.common.collect.Multisets; -import com.google.common.util.concurrent.AtomicDoubleArray; import graphql.Internal; import java.util.ArrayList; -import java.util.Arrays; +import java.util.Collection; +import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; @@ -15,78 +15,119 @@ import java.util.PriorityQueue; import java.util.Set; import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.atomic.AtomicInteger; +import static graphql.Assert.assertFalse; import static graphql.Assert.assertTrue; +import static graphql.schema.diffing.EditorialCostForMapping.baseEditorialCostForMapping; import static graphql.schema.diffing.EditorialCostForMapping.editorialCostForMapping; +/** + * This is an algorithm calculating the optimal edit to change the source graph into the target graph. + *

+ * It is based on the following two papers (both papers are from the same authors. The first one is newer, but the older one is more detailed in some aspects) + *

+ * Accelerating Graph Similarity Search via Efficient GED Computation (https://lijunchang.github.io/pdf/2022-ged-tkde.pdf) + *

+ * Efficient Graph Edit Distance Computation and Verification via Anchor-aware Lower Bound Estimation (https://arxiv.org/abs/1709.06810) + *

+ * The algorithm is a modified version of "AStar-BMao". + * It is adapted to directed graphs as a GraphQL schema is most naturally represented as directed graph (vs the undirected graphs used in the papers). + */ @Internal public class DiffImpl { - private static final MappingEntry LAST_ELEMENT = new MappingEntry(); + private final PossibleMappingsCalculator possibleMappingsCalculator; private final SchemaGraph completeSourceGraph; private final SchemaGraph completeTargetGraph; - private final FillupIsolatedVertices.IsolatedVertices isolatedVertices; + private final PossibleMappingsCalculator.PossibleMappings possibleMappings; private final SchemaDiffingRunningCheck runningCheck; private static class MappingEntry { - public boolean siblingsFinished; - public LinkedBlockingQueue mappingEntriesSiblings; + public LinkedBlockingQueue mappingEntriesSiblings = new LinkedBlockingQueue<>(); public int[] assignments; + + /** + * These are the available vertices, relative to the parent mapping. + * Meaning the last mapped element is NOT contained in it. + */ public List availableTargetVertices; - Mapping partialMapping = new Mapping(); + Mapping partialMapping; int level; // = partialMapping.size double lowerBoundCost; + public MappingEntry(Mapping partialMapping, int level, double lowerBoundCost) { this.partialMapping = partialMapping; this.level = level; this.lowerBoundCost = lowerBoundCost; } - public MappingEntry() { - - } } + /** + * An optimal edit from one graph to another. + * The mapping maps all vertices from source to target, but + * not all mappings represent an actual change. This is why there is a separate list + * of the actual changes. + */ public static class OptimalEdit { - public List mappings = new ArrayList<>(); - public List> listOfEditOperations = new ArrayList<>(); - - public List> listOfSets = new ArrayList<>(); + private final SchemaGraph completeSourceGraph; + private final SchemaGraph completeTargetGraph; + public Mapping mapping; public int ged = Integer.MAX_VALUE; - public OptimalEdit() { - + public OptimalEdit( + SchemaGraph completeSourceGraph, + SchemaGraph completeTargetGraph) { + this.completeSourceGraph = completeSourceGraph; + this.completeTargetGraph = completeTargetGraph; } - public OptimalEdit(List mappings, List> listOfEditOperations, int ged) { - this.mappings = mappings; - this.listOfEditOperations = listOfEditOperations; + public OptimalEdit( + SchemaGraph completeSourceGraph, + SchemaGraph completeTargetGraph, + Mapping mapping, + int ged) { + this.completeSourceGraph = completeSourceGraph; + this.completeTargetGraph = completeTargetGraph; + this.mapping = mapping; this.ged = ged; } + + public List getListOfEditOperations() { + ArrayList listOfEditOperations = new ArrayList<>(); + assertTrue(baseEditorialCostForMapping(mapping, completeSourceGraph, completeTargetGraph, listOfEditOperations) == ged); + return listOfEditOperations; + } } - public DiffImpl(SchemaGraph completeSourceGraph, SchemaGraph completeTargetGraph, FillupIsolatedVertices.IsolatedVertices isolatedVertices, SchemaDiffingRunningCheck runningCheck) { + public DiffImpl(PossibleMappingsCalculator possibleMappingsCalculator, SchemaGraph completeSourceGraph, SchemaGraph completeTargetGraph, PossibleMappingsCalculator.PossibleMappings possibleMappings, SchemaDiffingRunningCheck runningCheck) { + this.possibleMappingsCalculator = possibleMappingsCalculator; this.completeSourceGraph = completeSourceGraph; this.completeTargetGraph = completeTargetGraph; - this.isolatedVertices = isolatedVertices; + this.possibleMappings = possibleMappings; this.runningCheck = runningCheck; } - OptimalEdit diffImpl(Mapping startMapping, List relevantSourceList, List relevantTargetList) throws Exception { - int graphSize = relevantSourceList.size(); + OptimalEdit diffImpl(Mapping startMapping, List allSources, List allTargets, AtomicInteger algoIterationCount) throws Exception { + int graphSize = allSources.size(); - ArrayList initialEditOperations = new ArrayList<>(); - int mappingCost = editorialCostForMapping(startMapping, completeSourceGraph, completeTargetGraph, initialEditOperations); + int fixedEditorialCost = baseEditorialCostForMapping(startMapping, completeSourceGraph, completeTargetGraph); int level = startMapping.size(); - MappingEntry firstMappingEntry = new MappingEntry(startMapping, level, mappingCost); - System.out.println("first entry: lower bound: " + mappingCost + " at level " + level); - OptimalEdit optimalEdit = new OptimalEdit(); + List allNonFixedTargets = new ArrayList<>(allTargets); + startMapping.forEachTarget(allNonFixedTargets::remove); + + MappingEntry firstMappingEntry = new MappingEntry(startMapping, level, fixedEditorialCost); + firstMappingEntry.availableTargetVertices = allNonFixedTargets; + + OptimalEdit optimalEdit = new OptimalEdit(completeSourceGraph, completeTargetGraph); PriorityQueue queue = new PriorityQueue<>((mappingEntry1, mappingEntry2) -> { int compareResult = Double.compare(mappingEntry1.lowerBoundCost, mappingEntry2.lowerBoundCost); + // we prefer higher levels for equal lower bound costs if (compareResult == 0) { return Integer.compare(mappingEntry2.level, mappingEntry1.level); } else { @@ -94,33 +135,35 @@ OptimalEdit diffImpl(Mapping startMapping, List relevantSourceList, List } }); queue.add(firstMappingEntry); - firstMappingEntry.siblingsFinished = true; -// queue.add(new MappingEntry()); -// int counter = 0; + + while (!queue.isEmpty()) { MappingEntry mappingEntry = queue.poll(); -// System.out.println((++counter) + " check entry at level " + mappingEntry.level + " queue size: " + queue.size() + " lower bound " + mappingEntry.lowerBoundCost + " map " + getDebugMap(mappingEntry.partialMapping)); -// if ((++counter) % 100 == 0) { -// System.out.println((counter) + " entry at level"); -// } + algoIterationCount.incrementAndGet(); + if (mappingEntry.lowerBoundCost >= optimalEdit.ged) { - continue; + // once the lowest lowerBoundCost is not lower than the optimal edit, we are done + break; } - if (mappingEntry.level > 0 && !mappingEntry.siblingsFinished) { + + if (mappingEntry.level > 0 && !mappingEntry.mappingEntriesSiblings.isEmpty()) { addSiblingToQueue( + fixedEditorialCost, mappingEntry.level, queue, optimalEdit, - relevantSourceList, - relevantTargetList, + allSources, + allTargets, mappingEntry); } if (mappingEntry.level < graphSize) { - addChildToQueue(mappingEntry, + addChildToQueue( + fixedEditorialCost, + mappingEntry, queue, optimalEdit, - relevantSourceList, - relevantTargetList + allSources, + allTargets ); } @@ -132,124 +175,100 @@ OptimalEdit diffImpl(Mapping startMapping, List relevantSourceList, List // this calculates all children for the provided parentEntry, but only the first is directly added to the queue - private void addChildToQueue(MappingEntry parentEntry, + private void addChildToQueue(int fixedEditorialCost, + MappingEntry parentEntry, PriorityQueue queue, OptimalEdit optimalEdit, - List sourceList, - List targetList - + List allSources, + List allTargets ) { - Mapping partialMapping = parentEntry.partialMapping; - int level = parentEntry.level; + Mapping parentPartialMapping = parentEntry.partialMapping; + int parentLevel = parentEntry.level; + int level = parentLevel + 1; - assertTrue(level == partialMapping.size()); + assertTrue(parentLevel == parentPartialMapping.size()); + + // the available target vertices are the parent queue entry ones plus + // minus the additional mapped element in parentPartialMapping + ArrayList availableTargetVertices = new ArrayList<>(parentEntry.availableTargetVertices); + availableTargetVertices.remove(parentPartialMapping.getTarget(parentLevel - 1)); + assertTrue(availableTargetVertices.size() + parentPartialMapping.size() == allTargets.size()); + Vertex v_i = allSources.get(parentLevel); - ArrayList availableTargetVertices = new ArrayList<>(targetList); - availableTargetVertices.removeAll(partialMapping.getTargets()); - assertTrue(availableTargetVertices.size() + partialMapping.size() == targetList.size()); - Vertex v_i = sourceList.get(level); // the cost matrix is for the non mapped vertices - int costMatrixSize = sourceList.size() - level; + int costMatrixSize = allSources.size() - parentLevel; // costMatrix gets modified by the hungarian algorithm ... therefore we create two of them - AtomicDoubleArray[] costMatrixForHungarianAlgo = new AtomicDoubleArray[costMatrixSize]; - Arrays.setAll(costMatrixForHungarianAlgo, (index) -> new AtomicDoubleArray(costMatrixSize)); - AtomicDoubleArray[] costMatrix = new AtomicDoubleArray[costMatrixSize]; - Arrays.setAll(costMatrix, (index) -> new AtomicDoubleArray(costMatrixSize)); - - // we are skipping the first level -i indices - Set partialMappingSourceSet = new LinkedHashSet<>(partialMapping.getSources()); - Set partialMappingTargetSet = new LinkedHashSet<>(partialMapping.getTargets()); + double[][] costMatrixForHungarianAlgo = new double[costMatrixSize][costMatrixSize]; + double[][] costMatrix = new double[costMatrixSize][costMatrixSize]; + Map isolatedVerticesCache = new LinkedHashMap<>(); + Map nonFixedParentRestrictions = possibleMappingsCalculator.getNonFixedParentRestrictions(completeSourceGraph, completeTargetGraph, parentPartialMapping); - for (int i = level; i < sourceList.size(); i++) { - Vertex v = sourceList.get(i); + for (int i = parentLevel; i < allSources.size(); i++) { + Vertex v = allSources.get(i); int j = 0; for (Vertex u : availableTargetVertices) { - double cost = calcLowerBoundMappingCost(v, u, partialMapping.getSources(), partialMappingSourceSet, partialMapping.getTargets(), partialMappingTargetSet); - costMatrixForHungarianAlgo[i - level].set(j, cost); - costMatrix[i - level].set(j, cost); + double cost = calcLowerBoundMappingCost(v, u, parentPartialMapping, isolatedVerticesCache, nonFixedParentRestrictions); + costMatrixForHungarianAlgo[i - parentLevel][j] = cost; + costMatrix[i - parentLevel][j] = cost; j++; } - runningCheck.check(); } - HungarianAlgorithm hungarianAlgorithm = new HungarianAlgorithm(costMatrixForHungarianAlgo); + HungarianAlgorithm hungarianAlgorithm = new HungarianAlgorithm(costMatrixForHungarianAlgo); int[] assignments = hungarianAlgorithm.execute(); - int editorialCostForMapping = editorialCostForMapping(partialMapping, completeSourceGraph, completeTargetGraph, new ArrayList<>()); + int editorialCostForMapping = editorialCostForMapping(fixedEditorialCost, parentPartialMapping, completeSourceGraph, completeTargetGraph); double costMatrixSum = getCostMatrixSum(costMatrix, assignments); - - double lowerBoundForPartialMapping = editorialCostForMapping + costMatrixSum; - int v_i_target_IndexSibling = assignments[0]; - Vertex bestExtensionTargetVertexSibling = availableTargetVertices.get(v_i_target_IndexSibling); - Mapping newMappingSibling = partialMapping.extendMapping(v_i, bestExtensionTargetVertexSibling); + Mapping newMapping = parentPartialMapping.extendMapping(v_i, availableTargetVertices.get(assignments[0])); if (lowerBoundForPartialMapping >= optimalEdit.ged) { return; } - MappingEntry newMappingEntry = new MappingEntry(newMappingSibling, level + 1, lowerBoundForPartialMapping); + MappingEntry newMappingEntry = new MappingEntry(newMapping, level, lowerBoundForPartialMapping); LinkedBlockingQueue siblings = new LinkedBlockingQueue<>(); newMappingEntry.mappingEntriesSiblings = siblings; newMappingEntry.assignments = assignments; newMappingEntry.availableTargetVertices = availableTargetVertices; queue.add(newMappingEntry); - Mapping fullMapping = partialMapping.copy(); - for (int i = 0; i < assignments.length; i++) { - fullMapping.add(sourceList.get(level + i), availableTargetVertices.get(assignments[i])); - } - List editOperations = new ArrayList<>(); - int costForFullMapping = editorialCostForMapping(fullMapping, completeSourceGraph, completeTargetGraph, editOperations); - updateOptimalEdit(optimalEdit, costForFullMapping, fullMapping, editOperations); + expandMappingAndUpdateOptimalMapping(fixedEditorialCost, + level, + optimalEdit, + allSources, + parentPartialMapping.copy(), + assignments, + availableTargetVertices, + lowerBoundForPartialMapping); calculateRestOfChildren( availableTargetVertices, hungarianAlgorithm, costMatrix, editorialCostForMapping, - partialMapping, + parentPartialMapping, v_i, optimalEdit.ged, - level + 1, + level, siblings ); } - private void updateOptimalEdit(OptimalEdit optimalEdit, int newGed, Mapping mapping, List editOperations) { - if (newGed < optimalEdit.ged) { - optimalEdit.ged = newGed; - - optimalEdit.listOfEditOperations.clear(); - optimalEdit.listOfEditOperations.add(editOperations); - - optimalEdit.listOfSets.clear(); - optimalEdit.listOfSets.add(new LinkedHashSet<>(editOperations)); - - optimalEdit.mappings.clear(); - optimalEdit.mappings.add(mapping); - System.out.println("setting new best edit at level " + (mapping.size()) + " with size " + editOperations.size()); - } else if (newGed == optimalEdit.ged) { - Set newSet = new LinkedHashSet<>(editOperations); - for (Set set : optimalEdit.listOfSets) { - if (set.equals(newSet)) { - return; - } - } - optimalEdit.listOfSets.add(newSet); - optimalEdit.listOfEditOperations.add(editOperations); - optimalEdit.mappings.add(mapping); - } + private void updateOptimalEdit(OptimalEdit optimalEdit, int newGed, Mapping mapping) { + assertTrue(newGed < optimalEdit.ged); + optimalEdit.ged = newGed; + optimalEdit.mapping = mapping; } // generate all children mappings and save in MappingEntry.sibling private void calculateRestOfChildren(List availableTargetVertices, HungarianAlgorithm hungarianAlgorithm, - AtomicDoubleArray[] costMatrixCopy, + double[][] costMatrixCopy, double editorialCostForMapping, Mapping partialMapping, Vertex v_i, @@ -260,15 +279,13 @@ private void calculateRestOfChildren(List availableTargetVertices, // starting from 1 as we already generated the first one for (int child = 1; child < availableTargetVertices.size(); child++) { int[] assignments = hungarianAlgorithm.nextChild(); - if (hungarianAlgorithm.costMatrix[0].get(assignments[0]) == Integer.MAX_VALUE) { + if (hungarianAlgorithm.costMatrix[0][assignments[0]] == Integer.MAX_VALUE) { break; } double costMatrixSumSibling = getCostMatrixSum(costMatrixCopy, assignments); double lowerBoundForPartialMappingSibling = editorialCostForMapping + costMatrixSumSibling; - int v_i_target_IndexSibling = assignments[0]; - Vertex bestExtensionTargetVertexSibling = availableTargetVertices.get(v_i_target_IndexSibling); - Mapping newMappingSibling = partialMapping.extendMapping(v_i, bestExtensionTargetVertexSibling); + Mapping newMappingSibling = partialMapping.extendMapping(v_i, availableTargetVertices.get(assignments[0])); if (lowerBoundForPartialMappingSibling >= upperBound) { @@ -283,48 +300,67 @@ private void calculateRestOfChildren(List availableTargetVertices, runningCheck.check(); } - siblings.add(LAST_ELEMENT); - } // this retrieves the next sibling from MappingEntry.sibling and adds it to the queue if the lowerBound is less than the current upperBound private void addSiblingToQueue( + int fixedEditorialCost, int level, PriorityQueue queue, OptimalEdit optimalEdit, - List sourceList, - List targetGraph, + List allSources, + List allTargets, MappingEntry mappingEntry) throws InterruptedException { + assertFalse(mappingEntry.mappingEntriesSiblings.isEmpty()); + MappingEntry sibling = mappingEntry.mappingEntriesSiblings.take(); - if (sibling == LAST_ELEMENT) { - mappingEntry.siblingsFinished = true; - return; - } if (sibling.lowerBoundCost < optimalEdit.ged) { -// System.out.println("adding new sibling entry " + getDebugMap(sibling.partialMapping) + " at level " + level + " with candidates left: " + sibling.availableTargetVertices.size() + " at lower bound: " + sibling.lowerBoundCost); - queue.add(sibling); // we need to start here from the parent mapping, this is why we remove the last element - Mapping fullMapping = sibling.partialMapping.removeLastElement(); - for (int i = 0; i < sibling.assignments.length; i++) { - fullMapping.add(sourceList.get(level - 1 + i), sibling.availableTargetVertices.get(sibling.assignments[i])); - } -// assertTrue(fullMapping.size() == this.sourceGraph.size()); - List editOperations = new ArrayList<>(); - int costForFullMapping = editorialCostForMapping(fullMapping, completeSourceGraph, completeTargetGraph, editOperations); - updateOptimalEdit(optimalEdit, costForFullMapping, fullMapping, editOperations); - } else { -// System.out.println("sibling not good enough"); + Mapping toExpand = sibling.partialMapping.copyMappingWithLastElementRemoved(); + + expandMappingAndUpdateOptimalMapping(fixedEditorialCost, + level, + optimalEdit, + allSources, + toExpand, + sibling.assignments, + sibling.availableTargetVertices, + sibling.lowerBoundCost); + } + } + + /** + * Extend the partial mapping to a full mapping according to the optimal + * matching (hungarian algo result) and update the optimal edit if we + * found a better one. + */ + private void expandMappingAndUpdateOptimalMapping(int fixedEditorialCost, + int level, + OptimalEdit optimalEdit, + List allSources, + Mapping toExpand, + int[] assignments, + List availableTargetVertices, + double lowerBoundCost) { + for (int i = 0; i < assignments.length; i++) { + toExpand.add(allSources.get(level - 1 + i), availableTargetVertices.get(assignments[i])); + } + assertTrue(toExpand.size() == this.completeSourceGraph.size()); + int costForFullMapping = editorialCostForMapping(fixedEditorialCost, toExpand, completeSourceGraph, completeTargetGraph); + assertTrue(lowerBoundCost <= costForFullMapping); + if (costForFullMapping < optimalEdit.ged) { + updateOptimalEdit(optimalEdit, costForFullMapping, toExpand); } } - private double getCostMatrixSum(AtomicDoubleArray[] costMatrix, int[] assignments) { + private double getCostMatrixSum(double[][] costMatrix, int[] assignments) { double costMatrixSum = 0; for (int i = 0; i < assignments.length; i++) { - costMatrixSum += costMatrix[i].get(assignments[i]); + costMatrixSum += costMatrix[i][assignments[i]]; } return costMatrixSum; } @@ -333,89 +369,207 @@ private double getCostMatrixSum(AtomicDoubleArray[] costMatrix, int[] assignment * a partial mapping introduces a sub graph. The editorial cost is only calculated with respect to this sub graph. */ - // lower bound mapping cost between for v -> u in respect to a partial mapping - // this is BMa + + /** + * lower bound mapping cost between for v -> u in respect to a partial mapping. + * It basically tells the minimal costs we can expect for all mappings that come from extending + * the partial mapping with v -> u. + *

+ * This is basically the formula (5) from page 6 of https://lijunchang.github.io/pdf/2022-ged-tkde.pdf. + *

+ * + * The main difference is that the formula works with undirected graphs, but we have a directed graph, + * hence there is no 1/2 factor and for comparing the labels of anchored vertices to v/u we need to + * take both directions into account. + *

+ * + * The other optimization is that a schema graph will have never a lot of adjacent edges compared to + * the overall vertices count, therefore the algorithm for the anchored vertices costs iterates + * over the adjacent edges of v/u instead of all the mapped vertices. + *

+ * + * Additionally, there is a shortcut for isolated vertices, representing deletion/insertion which is also cached. + *

+ * Some naming: an anchored vertex is a vertex that is mapped via the partial mapping. + * An inner edge is an edge between two vertices that are both not anchored (mapped). + * The vertices v and u are by definition not mapped. + */ private double calcLowerBoundMappingCost(Vertex v, Vertex u, - List partialMappingSourceList, - Set partialMappingSourceSet, - List partialMappingTargetList, - Set partialMappingTargetSet + Mapping partialMapping, + Map isolatedVerticesCache, + Map nonFixedParentRestrictions) { + if (nonFixedParentRestrictions.containsKey(v) || partialMapping.hasParentRestriction(v)) { + Vertex uParentRestriction = nonFixedParentRestrictions.get(v); + if (uParentRestriction == null) { + uParentRestriction = partialMapping.getParentRestriction(v); + } - ) { - if (!isolatedVertices.mappingPossible(v, u)) { + Collection parentEdges = completeTargetGraph.getAdjacentEdgesInverseNonCopy(u); + if (parentEdges.size() != 1) { + return Integer.MAX_VALUE; + } + + Vertex uParent = parentEdges.iterator().next().getFrom(); + if (uParent != uParentRestriction) { + return Integer.MAX_VALUE; + } + } + + if (!possibleMappings.mappingPossible(v, u)) { return Integer.MAX_VALUE; } + if (u.isOfType(SchemaGraph.ISOLATED)) { + if (isolatedVerticesCache.containsKey(v)) { + return isolatedVerticesCache.get(v); + } + double result = calcLowerBoundMappingCostForIsolated(v, partialMapping, true); + isolatedVerticesCache.put(v, result); + return result; + } + if (v.isOfType(SchemaGraph.ISOLATED)) { + if (isolatedVerticesCache.containsKey(u)) { + return isolatedVerticesCache.get(u); + } + double result = calcLowerBoundMappingCostForIsolated(u, partialMapping, false); + isolatedVerticesCache.put(u, result); + return result; + } + boolean equalNodes = v.getType().equals(u.getType()) && v.getProperties().equals(u.getProperties()); - // inner edge labels of u (resp. v) in regards to the partial mapping: all labels of edges - // which are adjacent of u (resp. v) which are inner edges - List adjacentEdgesV = completeSourceGraph.getAdjacentEdges(v); + Collection adjacentEdgesV = completeSourceGraph.getAdjacentEdgesNonCopy(v); Multiset multisetLabelsV = HashMultiset.create(); for (Edge edge : adjacentEdgesV) { - // test if this an inner edge: meaning both edges vertices are part of the non mapped vertices - // or: at least one edge is part of the partial mapping - if (!partialMappingSourceSet.contains(edge.getFrom()) && !partialMappingSourceSet.contains(edge.getTo())) { + // test if this is an inner edge (meaning it not part of the subgraph induced by the partial mapping) + // we know that v is not part of the mapped vertices, therefore we only need to test the "to" vertex + if (!partialMapping.containsSource(edge.getTo())) { multisetLabelsV.add(edge.getLabel()); } } - List adjacentEdgesU = completeTargetGraph.getAdjacentEdges(u); + Collection adjacentEdgesU = completeTargetGraph.getAdjacentEdgesNonCopy(u); Multiset multisetLabelsU = HashMultiset.create(); for (Edge edge : adjacentEdgesU) { // test if this is an inner edge (meaning it not part of the subgraph induced by the partial mapping) - if (!partialMappingTargetSet.contains(edge.getFrom()) && !partialMappingTargetSet.contains(edge.getTo())) { + // we know that u is not part of the mapped vertices, therefore we only need to test the "to" vertex + if (!partialMapping.containsTarget(edge.getTo())) { multisetLabelsU.add(edge.getLabel()); } } - /** - * looking at all edges from x,vPrime and y,mappedVPrime - */ + int anchoredVerticesCost = calcAnchoredVerticesCost(v, u, partialMapping); + + Multiset intersection = Multisets.intersection(multisetLabelsV, multisetLabelsU); + int multiSetEditDistance = Math.max(multisetLabelsV.size(), multisetLabelsU.size()) - intersection.size(); + + double result = (equalNodes ? 0 : 1) + multiSetEditDistance + anchoredVerticesCost; + return result; + } + + + private int calcAnchoredVerticesCost(Vertex v, + Vertex u, + Mapping partialMapping) { int anchoredVerticesCost = 0; - for (int i = 0; i < partialMappingSourceList.size(); i++) { - Vertex vPrime = partialMappingSourceList.get(i); - Vertex mappedVPrime = partialMappingTargetList.get(i); - - Edge sourceEdge = completeSourceGraph.getEdge(v, vPrime); - String labelSourceEdge = sourceEdge != null ? sourceEdge.getLabel() : null; - Edge targetEdge = completeTargetGraph.getEdge(u, mappedVPrime); - String labelTargetEdge = targetEdge != null ? targetEdge.getLabel() : null; - if (!Objects.equals(labelSourceEdge, labelTargetEdge)) { - anchoredVerticesCost++; + + Collection adjacentEdgesV = completeSourceGraph.getAdjacentEdgesNonCopy(v); + Collection adjacentEdgesU = completeTargetGraph.getAdjacentEdgesNonCopy(u); + + Collection adjacentEdgesInverseV = completeSourceGraph.getAdjacentEdgesInverseNonCopy(v); + Collection adjacentEdgesInverseU = completeTargetGraph.getAdjacentEdgesInverseNonCopy(u); + + Set matchedTargetEdges = new LinkedHashSet<>(); + Set matchedTargetEdgesInverse = new LinkedHashSet<>(); + + outer: + for (Edge edgeV : adjacentEdgesV) { + // we are only interested in edges from anchored vertices + if (!partialMapping.containsSource(edgeV.getTo())) { + continue; + } + for (Edge edgeU : adjacentEdgesU) { + // looking for an adjacent edge from u matching it + if (partialMapping.getTarget(edgeV.getTo()) == edgeU.getTo()) { + matchedTargetEdges.add(edgeU); + // found two adjacent edges, comparing the labels + if (!Objects.equals(edgeV.getLabel(), edgeU.getLabel())) { + anchoredVerticesCost++; + } + continue outer; + } } + // no matching adjacent edge from u found means there is no + // edge from edgeV.getTo() to mapped(edgeV.getTo()) + // and we need to increase the costs + anchoredVerticesCost++; - Edge sourceEdgeInverse = completeSourceGraph.getEdge(vPrime, v); - String labelSourceEdgeInverse = sourceEdgeInverse != null ? sourceEdgeInverse.getLabel() : null; - Edge targetEdgeInverse = completeTargetGraph.getEdge(mappedVPrime, u); - String labelTargetEdgeInverse = targetEdgeInverse != null ? targetEdgeInverse.getLabel() : null; - if (!Objects.equals(labelSourceEdgeInverse, labelTargetEdgeInverse)) { - anchoredVerticesCost++; + } + + outer: + for (Edge edgeV : adjacentEdgesInverseV) { + // we are only interested in edges from anchored vertices + if (!partialMapping.containsSource(edgeV.getFrom())) { + continue; } + for (Edge edgeU : adjacentEdgesInverseU) { + if (partialMapping.getTarget(edgeV.getFrom()) == edgeU.getFrom()) { + matchedTargetEdgesInverse.add(edgeU); + if (!Objects.equals(edgeV.getLabel(), edgeU.getLabel())) { + anchoredVerticesCost++; + } + continue outer; + } + } + anchoredVerticesCost++; - runningCheck.check(); } - Multiset intersection = Multisets.intersection(multisetLabelsV, multisetLabelsU); - int multiSetEditDistance = Math.max(multisetLabelsV.size(), multisetLabelsU.size()) - intersection.size(); + /** + * what is missing now is all edges from u (and inverse), which have not been matched. + */ + for (Edge edgeU : adjacentEdgesU) { + // we are only interested in edges from anchored vertices + if (!partialMapping.containsTarget(edgeU.getTo()) || matchedTargetEdges.contains(edgeU)) { + continue; + } + anchoredVerticesCost++; - double result = (equalNodes ? 0 : 1) + multiSetEditDistance + anchoredVerticesCost; - return result; + } + for (Edge edgeU : adjacentEdgesInverseU) { + // we are only interested in edges from anchored vertices + if (!partialMapping.containsTarget(edgeU.getFrom()) || matchedTargetEdgesInverse.contains(edgeU)) { + continue; + } + anchoredVerticesCost++; + } + + return anchoredVerticesCost; } - private List getDebugMap(Mapping mapping) { - List result = new ArrayList<>(); -// if (mapping.size() > 0) { -// result.add(mapping.getSource(mapping.size() - 1).getType() + " -> " + mapping.getTarget(mapping.size() - 1).getType()); -// } - for (Map.Entry entry : mapping.getMap().entrySet()) { -// if (!entry.getKey().getType().equals(entry.getValue().getType())) { -// result.add(entry.getKey().getType() + "->" + entry.getValue().getType()); -// } - result.add(entry.getKey().getDebugName() + "->" + entry.getValue().getDebugName()); + + /** + * Simplified lower bound calc if the source/target vertex is isolated + */ + private double calcLowerBoundMappingCostForIsolated(Vertex vertex, + Mapping partialMapping, + boolean sourceOrTarget + ) { + SchemaGraph schemaGraph = sourceOrTarget ? completeSourceGraph : completeTargetGraph; + + // every adjacent edge is inserted/deleted for an isolated vertex + Collection adjacentEdges = schemaGraph.getAdjacentEdgesNonCopy(vertex); + + // for the inverse adjacent edges we only count the anchored ones + int anchoredInverseEdges = 0; + Collection adjacentEdgesInverse = schemaGraph.getAdjacentEdgesInverseNonCopy(vertex); + for (Edge edge : adjacentEdgesInverse) { + if (partialMapping.contains(edge.getFrom(), sourceOrTarget)) { + anchoredInverseEdges++; + } } - return result; + return 1 + adjacentEdges.size() + anchoredInverseEdges; } diff --git a/src/main/java/graphql/schema/diffing/EditOperation.java b/src/main/java/graphql/schema/diffing/EditOperation.java index 7ee0ef8adc..ff24400367 100644 --- a/src/main/java/graphql/schema/diffing/EditOperation.java +++ b/src/main/java/graphql/schema/diffing/EditOperation.java @@ -4,6 +4,15 @@ import java.util.Objects; +/** + * An edit operation between two graphs can be one of six types: + * insert vertex, + * delete vertex, + * change vertex, + * insert edge, + * delete edge, + * change edge + */ @Internal public class EditOperation { @@ -21,11 +30,11 @@ private EditOperation(Operation operation, this.targetEdge = targetEdge; } - public static EditOperation deleteVertex(String description, Vertex sourceVertex,Vertex targetVertex) { + public static EditOperation deleteVertex(String description, Vertex sourceVertex, Vertex targetVertex) { return new EditOperation(Operation.DELETE_VERTEX, description, sourceVertex, targetVertex, null, null); } - public static EditOperation insertVertex(String description,Vertex sourceVertex, Vertex targetVertex) { + public static EditOperation insertVertex(String description, Vertex sourceVertex, Vertex targetVertex) { return new EditOperation(Operation.INSERT_VERTEX, description, sourceVertex, targetVertex, null, null); } @@ -87,7 +96,6 @@ public String toString() { } - @Override public boolean equals(Object o) { if (this == o) { diff --git a/src/main/java/graphql/schema/diffing/EditorialCostForMapping.java b/src/main/java/graphql/schema/diffing/EditorialCostForMapping.java index 4137f90cbc..dbf38f2072 100644 --- a/src/main/java/graphql/schema/diffing/EditorialCostForMapping.java +++ b/src/main/java/graphql/schema/diffing/EditorialCostForMapping.java @@ -2,19 +2,51 @@ import graphql.Internal; +import java.util.ArrayList; +import java.util.LinkedHashSet; import java.util.List; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Predicate; @Internal public class EditorialCostForMapping { + /** + * @param mapping the mapping + * @param sourceGraph the source graph + * @param targetGraph the target graph + * + * @return the editorial cost + * + * @see #baseEditorialCostForMapping(Mapping, SchemaGraph, SchemaGraph, List) + */ + public static int baseEditorialCostForMapping(Mapping mapping, // can be a partial mapping + SchemaGraph sourceGraph, // the whole graph + SchemaGraph targetGraph // the whole graph + ) { + return baseEditorialCostForMapping(mapping, sourceGraph, targetGraph, new ArrayList<>()); + } /** - * a mapping introduces a subgraph consisting of all vertices and all edges between these vertices + * Gets the "editorial cost for mapping" for the base mapping. + *

+ * Use this is as base cost when invoking + * {@link #editorialCostForMapping(int, Mapping, SchemaGraph, SchemaGraph)} + * as it heavily speeds up performance. + * + * @param mapping the mapping + * @param sourceGraph the source graph + * @param targetGraph the target graph + * @param editOperationsResult the list of edit operations + * + * @return the editorial cost */ - public static int editorialCostForMapping(Mapping mapping, // can be a partial mapping - SchemaGraph sourceGraph, // the whole graph - SchemaGraph targetGraph, // the whole graph - List editOperationsResult) { + public static int baseEditorialCostForMapping(Mapping mapping, // can be a partial mapping + SchemaGraph sourceGraph, // the whole graph + SchemaGraph targetGraph, // the whole graph + List editOperationsResult) { int cost = 0; + for (int i = 0; i < mapping.size(); i++) { Vertex sourceVertex = mapping.getSource(i); Vertex targetVertex = mapping.getTarget(i); @@ -31,9 +63,9 @@ public static int editorialCostForMapping(Mapping mapping, // can be a partial m cost++; } } - List edges = sourceGraph.getEdges(); + // edge deletion or relabeling - for (Edge sourceEdge : edges) { + for (Edge sourceEdge : sourceGraph.getEdges()) { // only edges relevant to the subgraph if (!mapping.containsSource(sourceEdge.getFrom()) || !mapping.containsSource(sourceEdge.getTo())) { continue; @@ -50,7 +82,6 @@ public static int editorialCostForMapping(Mapping mapping, // can be a partial m } } - //TODO: iterates over all edges in the target Graph for (Edge targetEdge : targetGraph.getEdges()) { // only subgraph edges if (!mapping.containsTarget(targetEdge.getFrom()) || !mapping.containsTarget(targetEdge.getTo())) { @@ -63,8 +94,94 @@ public static int editorialCostForMapping(Mapping mapping, // can be a partial m cost++; } } + return cost; } + /** + * Calculates the "editorial cost for mapping" for the non-fixed targets in a {@link Mapping}. + *

+ * The {@code baseCost} argument should be the cost for the fixed mapping from + * {@link #baseEditorialCostForMapping(Mapping, SchemaGraph, SchemaGraph)}. + *

+ * The sum of the non-fixed costs and the fixed costs is total editorial cost for mapping. + * + * @param baseCost the starting base cost + * @param mapping the mapping + * @param sourceGraph the source graph + * @param targetGraph the target graph + * + * @return the editorial cost + */ + public static int editorialCostForMapping(int baseCost, + Mapping mapping, // can be a partial mapping + SchemaGraph sourceGraph, // the whole graph + SchemaGraph targetGraph // the whole graph + ) { + AtomicInteger cost = new AtomicInteger(baseCost); + + Set seenEdges = new LinkedHashSet<>(); + + // Tells us whether the edge should be visited. We need to avoid counting edges more than once + Predicate visitEdge = (data) -> { + if (seenEdges.contains(data)) { + return false; + } else { + seenEdges.add(data); + return true; + } + }; + + // Look through + mapping.forEachNonFixedSourceAndTarget((sourceVertex, targetVertex) -> { + // Vertex changing (relabeling) + boolean equalNodes = sourceVertex.getType().equals(targetVertex.getType()) && sourceVertex.getProperties().equals(targetVertex.getProperties()); + + if (!equalNodes) { + cost.getAndIncrement(); + } + + for (Edge sourceEdge : sourceGraph.getAdjacentEdgesAndInverseNonCopy(sourceVertex)) { + if (!visitEdge.test(sourceEdge)) { + continue; + } + + // only edges relevant to the subgraph + if (!mapping.containsSource(sourceEdge.getFrom()) || !mapping.containsSource(sourceEdge.getTo())) { + continue; + } + + Vertex targetFrom = mapping.getTarget(sourceEdge.getFrom()); + Vertex targetTo = mapping.getTarget(sourceEdge.getTo()); + Edge targetEdge = targetGraph.getEdge(targetFrom, targetTo); + + if (targetEdge == null) { + cost.getAndIncrement(); + } else if (!sourceEdge.getLabel().equals(targetEdge.getLabel())) { + cost.getAndIncrement(); + } + } + + for (Edge targetEdge : targetGraph.getAdjacentEdgesAndInverseNonCopy(targetVertex)) { + if (!visitEdge.test(targetEdge)) { + continue; + } + + // only edges relevant to the subgraph + if (!mapping.containsTarget(targetEdge.getFrom()) || !mapping.containsTarget(targetEdge.getTo())) { + continue; + } + + Vertex sourceFrom = mapping.getSource(targetEdge.getFrom()); + Vertex sourceTo = mapping.getSource(targetEdge.getTo()); + Edge sourceEdge = sourceGraph.getEdge(sourceFrom, sourceTo); + if (sourceEdge == null) { + cost.getAndIncrement(); + } + } + }); + + return cost.get(); + } } diff --git a/src/main/java/graphql/schema/diffing/GraphPrinter.java b/src/main/java/graphql/schema/diffing/GraphPrinter.java deleted file mode 100644 index da713fa22a..0000000000 --- a/src/main/java/graphql/schema/diffing/GraphPrinter.java +++ /dev/null @@ -1,23 +0,0 @@ -package graphql.schema.diffing; - -import graphql.Internal; -import graphql.schema.diffing.dot.Dotfile; - -@Internal -public class GraphPrinter { - - public static String print(SchemaGraph schemaGraph) { - Dotfile dotfile = new Dotfile(); - for (Vertex vertex : schemaGraph.getVertices()) { - String name = vertex.get("name"); - if (name == null) { - name = vertex.getType(); - } - dotfile.addNode("V" + Integer.toHexString(vertex.hashCode()), name, "blue"); - } - for (Edge edge : schemaGraph.getEdges()) { - dotfile.addEdge("V" + Integer.toHexString(edge.getFrom().hashCode()), "V" + Integer.toHexString(edge.getTo().hashCode()), edge.getLabel()); - } - return dotfile.print(); - } -} diff --git a/src/main/java/graphql/schema/diffing/HungarianAlgorithm.java b/src/main/java/graphql/schema/diffing/HungarianAlgorithm.java index 6476352792..b47d190f43 100644 --- a/src/main/java/graphql/schema/diffing/HungarianAlgorithm.java +++ b/src/main/java/graphql/schema/diffing/HungarianAlgorithm.java @@ -1,6 +1,5 @@ package graphql.schema.diffing; -import com.google.common.util.concurrent.AtomicDoubleArray; import graphql.Internal; import java.util.Arrays; @@ -35,7 +34,6 @@ * one worker and so that no worker is assigned to more than one job in such a * manner so as to minimize the total cost of completing the jobs. *

- *

* An assignment for a cost matrix that has more workers than jobs will * necessarily include unassigned workers, indicated by an assignment value of * -1; in no other circumstance will there be unassigned workers. Similarly, an @@ -44,7 +42,6 @@ * jobs. For completeness, an assignment for a square cost matrix will give * exactly one unique worker to each job. *

- *

* This version of the Hungarian algorithm runs in time O(n^3), where n is the * maximum among the number of workers and the number of jobs. * @@ -53,7 +50,7 @@ @Internal public class HungarianAlgorithm { // changed by reduce - public final AtomicDoubleArray[] costMatrix; + public final double[][] costMatrix; // constant always private final int rows; @@ -85,10 +82,10 @@ public class HungarianAlgorithm { * irregular in the sense that all rows must be the same length; in * addition, all entries must be non-infinite numbers. */ - public HungarianAlgorithm(AtomicDoubleArray[] costMatrix) { - this.dim = Math.max(costMatrix.length, costMatrix[0].length()); + public HungarianAlgorithm(double[][] costMatrix) { + this.dim = Math.max(costMatrix.length, costMatrix[0].length); this.rows = costMatrix.length; - this.cols = costMatrix[0].length(); + this.cols = costMatrix[0].length; this.costMatrix = costMatrix; // for (int w = 0; w < this.dim; w++) { // if (w < costMatrix.length) { @@ -131,8 +128,8 @@ protected void computeInitialFeasibleSolution() { } for (int w = 0; w < dim; w++) { for (int j = 0; j < dim; j++) { - if (costMatrix[w].get(j) < labelByJob[j]) { - labelByJob[j] = costMatrix[w].get(j); + if (costMatrix[w][j] < labelByJob[j]) { + labelByJob[j] = costMatrix[w][j]; } } } @@ -181,7 +178,6 @@ public int[] execute() { * more zero-slack edges (the labels of committed jobs are simultaneously * decreased by the same amount in order to maintain a feasible labeling). *

- *

* The runtime of a single phase of the algorithm is O(n^2), where n is the * dimension of the internal square cost matrix, since each edge is visited at * most once and since increasing the labeling is accomplished in time O(n) by @@ -241,7 +237,7 @@ protected void executePhase() { committedWorkers[worker] = true; for (int j = 0; j < dim; j++) { if (parentWorkerByCommittedJob[j] == -1) { - double slack = costMatrix[worker].get(j) - labelByWorker[worker] + double slack = costMatrix[worker][j] - labelByWorker[worker] - labelByJob[j]; if (minSlackValueByJob[j] > slack) { minSlackValueByJob[j] = slack; @@ -274,7 +270,7 @@ protected void greedyMatch() { for (int w = 0; w < dim; w++) { for (int j = 0; j < dim; j++) { if (matchJobByWorker[w] == -1 && matchWorkerByJob[j] == -1 - && costMatrix[w].get(j) - labelByWorker[w] - labelByJob[j] == 0) { + && costMatrix[w][j] - labelByWorker[w] - labelByJob[j] == 0) { match(w, j); } } @@ -293,13 +289,16 @@ protected void initializePhase(int w) { Arrays.fill(parentWorkerByCommittedJob, -1); committedWorkers[w] = true; for (int j = 0; j < dim; j++) { - minSlackValueByJob[j] = costMatrix[w].get(j) - labelByWorker[w] - labelByJob[j]; + minSlackValueByJob[j] = costMatrix[w][j] - labelByWorker[w] - labelByJob[j]; minSlackWorkerByJob[j] = w; } } /** * Helper method to record a matching between worker w and job j. + * + * @param w the worker + * @param j the job */ protected void match(int w, int j) { matchJobByWorker[w] = j; @@ -316,12 +315,12 @@ protected void reduce() { for (int w = 0; w < dim; w++) { double min = Double.POSITIVE_INFINITY; for (int j = 0; j < dim; j++) { - if (costMatrix[w].get(j) < min) { - min = costMatrix[w].get(j); + if (costMatrix[w][j] < min) { + min = costMatrix[w][j]; } } for (int j = 0; j < dim; j++) { - costMatrix[w].set(j, costMatrix[w].get(j) - min); + costMatrix[w][j] = costMatrix[w][j] - min; } } double[] min = new double[dim]; @@ -330,14 +329,14 @@ protected void reduce() { } for (int w = 0; w < dim; w++) { for (int j = 0; j < dim; j++) { - if (costMatrix[w].get(j) < min[j]) { - min[j] = costMatrix[w].get(j); + if (costMatrix[w][j] < min[j]) { + min[j] = costMatrix[w][j]; } } } for (int w = 0; w < dim; w++) { for (int j = 0; j < dim; j++) { - costMatrix[w].set(j, costMatrix[w].get(j) - min[j]); + costMatrix[w][j] = costMatrix[w][j] - min[j]; } } } @@ -346,6 +345,8 @@ protected void reduce() { * Update labels with the specified slack by adding the slack value for * committed workers and by subtracting the slack value for committed jobs. In * addition, update the minimum slack values appropriately. + * + * @param slack the specified slack */ protected void updateLabeling(double slack) { for (int w = 0; w < dim; w++) { @@ -365,7 +366,7 @@ protected void updateLabeling(double slack) { public int[] nextChild() { int currentJobAssigned = matchJobByWorker[0]; // we want to make currentJobAssigned not allowed,meaning we set the size to Infinity - costMatrix[0].set(currentJobAssigned, Integer.MAX_VALUE); + costMatrix[0][currentJobAssigned] = Integer.MAX_VALUE; matchWorkerByJob[currentJobAssigned] = -1; matchJobByWorker[0] = -1; minSlackValueByJob[currentJobAssigned] = Integer.MAX_VALUE; diff --git a/src/main/java/graphql/schema/diffing/Mapping.java b/src/main/java/graphql/schema/diffing/Mapping.java index 42ec164f03..68fbf22bd0 100644 --- a/src/main/java/graphql/schema/diffing/Mapping.java +++ b/src/main/java/graphql/schema/diffing/Mapping.java @@ -5,58 +5,126 @@ import graphql.Internal; import java.util.ArrayList; +import java.util.Collections; import java.util.List; -import java.util.Objects; - +import java.util.Map; +import java.util.function.BiConsumer; +import java.util.function.Consumer; + +/** + * A mapping (in the math sense) from a list of vertices to another list of + * vertices. + * A mapping can semantically mean a change, but doesn't have to: a vertex + * can be mapped to the same vertex (semantically the same, Java object wise they are different). + */ @Internal public class Mapping { - private BiMap map = HashBiMap.create(); - private List sourceList = new ArrayList<>(); - private List targetList = new ArrayList<>(); - private Mapping(BiMap map, List sourceList, List targetList) { + + private final Map fixedParentRestrictions; + private final BiMap fixedMappings; + private final List fixedSourceList; + private final List fixedTargetList; + + private final BiMap map; + private final List sourceList; + private final List targetList; + + private Mapping(Map fixedParentRestrictions, + BiMap fixedMappings, + List fixedSourceList, + List fixedTargetList, + BiMap map, + List sourceList, + List targetList) { + this.fixedParentRestrictions = fixedParentRestrictions; + this.fixedMappings = fixedMappings; + this.fixedSourceList = fixedSourceList; + this.fixedTargetList = fixedTargetList; this.map = map; this.sourceList = sourceList; this.targetList = targetList; } - public Mapping() { + public static Mapping newMapping(Map fixedParentRestrictions, + BiMap fixedMappings, + List fixedSourceList, + List fixedTargetList) { + return new Mapping( + fixedParentRestrictions, + fixedMappings, + fixedSourceList, + fixedTargetList, + HashBiMap.create(), + Collections.emptyList(), + Collections.emptyList()); + } + + public boolean hasParentRestriction(Vertex v) { + return fixedParentRestrictions.containsKey(v); + } + public Vertex getParentRestriction(Vertex v) { + return fixedParentRestrictions.get(v); } public Vertex getSource(Vertex target) { + if (fixedMappings.containsValue(target)) { + return fixedMappings.inverse().get(target); + } return map.inverse().get(target); } public Vertex getTarget(Vertex source) { + if (fixedMappings.containsKey(source)) { + return fixedMappings.get(source); + } return map.get(source); } public Vertex getSource(int i) { - return sourceList.get(i); + if (i < fixedSourceList.size()) { + return fixedSourceList.get(i); + } + return sourceList.get(i - fixedSourceList.size()); } public Vertex getTarget(int i) { - return targetList.get(i); - } - - public List getTargets() { - return targetList; - } - - public List getSources() { - return sourceList; + if (i < fixedTargetList.size()) { + return fixedTargetList.get(i); + } + return targetList.get(i - fixedTargetList.size()); } public boolean containsSource(Vertex sourceVertex) { + if (fixedMappings.containsKey(sourceVertex)) { + return true; + } return map.containsKey(sourceVertex); } public boolean containsTarget(Vertex targetVertex) { + if (fixedMappings.containsValue(targetVertex)) { + return true; + } return map.containsValue(targetVertex); } + + public boolean contains(Vertex vertex, boolean sourceOrTarget) { + return sourceOrTarget ? containsSource(vertex) : containsTarget(vertex); + } + + public int size() { + return fixedMappings.size() + map.size(); + } + + public int fixedSize() { + return fixedMappings.size(); + } + + public int nonFixedSize() { return map.size(); } @@ -66,19 +134,19 @@ public void add(Vertex source, Vertex target) { this.targetList.add(target); } - public Mapping removeLastElement() { + public Mapping copyMappingWithLastElementRemoved() { HashBiMap newMap = HashBiMap.create(map); newMap.remove(this.sourceList.get(this.sourceList.size() - 1)); List newSourceList = new ArrayList<>(this.sourceList.subList(0, this.sourceList.size() - 1)); List newTargetList = new ArrayList<>(this.targetList.subList(0, this.targetList.size() - 1)); - return new Mapping(newMap, newSourceList, newTargetList); + return new Mapping(fixedParentRestrictions, fixedMappings, fixedSourceList, fixedTargetList, newMap, newSourceList, newTargetList); } public Mapping copy() { HashBiMap newMap = HashBiMap.create(map); List newSourceList = new ArrayList<>(this.sourceList); List newTargetList = new ArrayList<>(this.targetList); - return new Mapping(newMap, newSourceList, newTargetList); + return new Mapping(fixedParentRestrictions, fixedMappings, fixedSourceList, fixedTargetList, newMap, newSourceList, newTargetList); } public Mapping extendMapping(Vertex source, Vertex target) { @@ -88,27 +156,39 @@ public Mapping extendMapping(Vertex source, Vertex target) { newSourceList.add(source); List newTargetList = new ArrayList<>(this.targetList); newTargetList.add(target); - return new Mapping(newMap, newSourceList, newTargetList); + return new Mapping(fixedParentRestrictions, fixedMappings, fixedSourceList, fixedTargetList, newMap, newSourceList, newTargetList); } - public BiMap getMap() { - return map; + public void forEachTarget(Consumer action) { + for (Vertex t : fixedTargetList) { + action.accept(t); + } + for (Vertex t : targetList) { + action.accept(t); + } } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; + public void forEachNonFixedTarget(Consumer action) { + for (Vertex t : targetList) { + action.accept(t); } - Mapping mapping = (Mapping) o; - return Objects.equals(map, mapping.map); } - @Override - public int hashCode() { - return Objects.hash(map); + public void forEachNonFixedSourceAndTarget(BiConsumer consumer) { + map.forEach(consumer); + } + + public Mapping invert() { + BiMap invertedFixedMappings = HashBiMap.create(); + for (Vertex s : fixedMappings.keySet()) { + Vertex t = fixedMappings.get(s); + invertedFixedMappings.put(t, s); + } + BiMap invertedMap = HashBiMap.create(); + for (Vertex s : map.keySet()) { + Vertex t = map.get(s); + invertedMap.put(t, s); + } + return new Mapping(fixedParentRestrictions, invertedFixedMappings, fixedTargetList, fixedSourceList, invertedMap, targetList, sourceList); } } diff --git a/src/main/java/graphql/schema/diffing/FillupIsolatedVertices.java b/src/main/java/graphql/schema/diffing/PossibleMappingsCalculator.java similarity index 70% rename from src/main/java/graphql/schema/diffing/FillupIsolatedVertices.java rename to src/main/java/graphql/schema/diffing/PossibleMappingsCalculator.java index a648a1161f..ac13a82dc8 100644 --- a/src/main/java/graphql/schema/diffing/FillupIsolatedVertices.java +++ b/src/main/java/graphql/schema/diffing/PossibleMappingsCalculator.java @@ -15,6 +15,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; @@ -36,20 +37,32 @@ import static graphql.schema.diffing.SchemaGraph.SCHEMA; import static graphql.schema.diffing.SchemaGraph.UNION; import static graphql.util.FpKit.concat; -import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; +/** + * We don't want to allow arbitrary schema changes. For example changing an Object type into a Scalar + * is not something we want to consider. + *

+ * We do this to make SchemaDiffings better understandable, but also to improve the overall runtime of + * the algorithm. By restricting the possible mappings the Schema diffing algo is actually able to + * finish in a reasonable time for real life inputs. + *

+ * + * We restrict the algo by calculating which mappings are possible for given vertex. This is later used in + * {@link DiffImpl#calcLowerBoundMappingCost}. + * While doing this we need to also ensure that there are the same amount of vertices in the same "context": + * for example if the source graph has 3 Objects, the target graph needs to have 3 Objects. We achieve this by + * adding "isolated vertices" as needed. + */ @Internal -public class FillupIsolatedVertices { +public class PossibleMappingsCalculator { private final SchemaDiffingRunningCheck runningCheck; private final SchemaGraph sourceGraph; private final SchemaGraph targetGraph; - private final IsolatedVertices isolatedVertices; + private final PossibleMappings possibleMappings; - private final BiMap toRemove = HashBiMap.create(); - - final static Map> typeContexts = new LinkedHashMap<>(); + private static final Map> typeContexts = new LinkedHashMap<>(); static { typeContexts.put(SCHEMA, schemaContext()); @@ -351,11 +364,19 @@ public boolean filter(Vertex vertex, SchemaGraph schemaGraph) { return APPLIED_DIRECTIVE.equals(vertex.getType()); } }; - VertexContextSegment appliedDirectiveName = new VertexContextSegment() { + VertexContextSegment appliedDirectiveIndex = new VertexContextSegment() { @Override public String idForVertex(Vertex appliedDirective, SchemaGraph schemaGraph) { int appliedDirectiveIndex = schemaGraph.getAppliedDirectiveIndex(appliedDirective); - return appliedDirectiveIndex + ":" + appliedDirective.getName(); + return Integer.toString(appliedDirectiveIndex); + } + + }; + + VertexContextSegment appliedDirectiveName = new VertexContextSegment() { + @Override + public String idForVertex(Vertex appliedDirective, SchemaGraph schemaGraph) { + return appliedDirective.getName(); } @Override @@ -455,7 +476,16 @@ public boolean filter(Vertex vertex, SchemaGraph schemaGraph) { return true; } }; - List contexts = Arrays.asList(appliedDirectiveType, parentOfParentOfContainer, parentOfContainer, appliedDirectiveContainer, appliedDirectiveName); + VertexContextSegment vertexContextSegment = new VertexContextSegment() { + @Override + public String idForVertex(Vertex vertex, SchemaGraph schemaGraph) { + return parentOfParentOfContainer.idForVertex(vertex, schemaGraph) + "." + + parentOfContainer.idForVertex(vertex, schemaGraph) + "." + + appliedDirectiveContainer.idForVertex(vertex, schemaGraph) + "." + + appliedDirectiveName.idForVertex(vertex, schemaGraph); + } + }; + List contexts = Arrays.asList(appliedDirectiveType, vertexContextSegment, appliedDirectiveIndex); return contexts; } @@ -591,7 +621,17 @@ public boolean filter(Vertex vertex, SchemaGraph schemaGraph) { return true; } }; - List contexts = Arrays.asList(appliedArgumentType, parentOfParentOfContainer, parentOfContainer, appliedDirectiveContainer, appliedDirective, appliedArgumentName); + VertexContextSegment combined = new VertexContextSegment() { + @Override + public String idForVertex(Vertex vertex, SchemaGraph schemaGraph) { + return parentOfContainer.idForVertex(vertex, schemaGraph) + "." + + parentOfContainer.idForVertex(vertex, schemaGraph) + "." + + appliedDirectiveContainer.idForVertex(vertex, schemaGraph) + "." + + appliedDirective.idForVertex(vertex, schemaGraph) + "." + + appliedArgumentName.idForVertex(vertex, schemaGraph); + } + }; + List contexts = Arrays.asList(appliedArgumentType, combined); return contexts; } @@ -711,19 +751,18 @@ public boolean filter(Vertex argument, SchemaGraph schemaGraph) { } - public FillupIsolatedVertices(SchemaGraph sourceGraph, SchemaGraph targetGraph, SchemaDiffingRunningCheck runningCheck) { + public PossibleMappingsCalculator(SchemaGraph sourceGraph, SchemaGraph targetGraph, SchemaDiffingRunningCheck runningCheck) { this.runningCheck = runningCheck; this.sourceGraph = sourceGraph; this.targetGraph = targetGraph; - this.isolatedVertices = new IsolatedVertices(); + this.possibleMappings = new PossibleMappings(); } - public void ensureGraphAreSameSize() { + public PossibleMappings calculate() { calcPossibleMappings(typeContexts.get(SCHEMA), SCHEMA); calcPossibleMappings(typeContexts.get(FIELD), FIELD); calcPossibleMappings(typeContexts.get(ARGUMENT), ARGUMENT); calcPossibleMappings(typeContexts.get(INPUT_FIELD), INPUT_FIELD); -// calcPossibleMappings(typeContexts.get(DUMMY_TYPE_VERTEX), DUMMY_TYPE_VERTEX); calcPossibleMappings(typeContexts.get(OBJECT), OBJECT); calcPossibleMappings(typeContexts.get(INTERFACE), INTERFACE); calcPossibleMappings(typeContexts.get(UNION), UNION); @@ -736,35 +775,34 @@ public void ensureGraphAreSameSize() { calcPossibleMappings(typeContexts.get(DIRECTIVE), DIRECTIVE); - sourceGraph.addVertices(isolatedVertices.allIsolatedSource); - targetGraph.addVertices(isolatedVertices.allIsolatedTarget); + sourceGraph.addVertices(possibleMappings.allIsolatedSource); + targetGraph.addVertices(possibleMappings.allIsolatedTarget); Assert.assertTrue(sourceGraph.size() == targetGraph.size()); - } + Set vertices = possibleMappings.possibleMappings.keySet(); + for (Vertex vertex : vertices) { + if (possibleMappings.possibleMappings.get(vertex).size() > 1) { +// System.out.println("vertex with possible mappings: " + possibleMappings.possibleMappings.get(vertex).size()); +// System.out.println("vertex " + vertex); +// System.out.println("-------------"); + } + } + return possibleMappings; + } public abstract static class VertexContextSegment { - - private List children; - - public VertexContextSegment(List children) { - this.children = children; - } - public VertexContextSegment() { - this.children = emptyList(); - } - - public VertexContextSegment(VertexContextSegment child) { - this.children = singletonList(child); } public abstract String idForVertex(Vertex vertex, SchemaGraph schemaGraph); - public abstract boolean filter(Vertex vertex, SchemaGraph schemaGraph); + public boolean filter(Vertex vertex, SchemaGraph schemaGraph) { + return true; + } } - public class IsolatedVertices { + public class PossibleMappings { public Set allIsolatedSource = new LinkedHashSet<>(); public Set allIsolatedTarget = new LinkedHashSet<>(); @@ -772,38 +810,119 @@ public class IsolatedVertices { public Table, Set, Set> contexts = HashBasedTable.create(); public Multimap possibleMappings = HashMultimap.create(); - public Mapping mapping = new Mapping(); - public void putPossibleMappings(Collection sourceVertices, Collection targetVertex) { + public BiMap fixedOneToOneMappings = HashBiMap.create(); + public List fixedOneToOneSources = new ArrayList<>(); + public List fixedOneToOneTargets = new ArrayList<>(); + + public void putPossibleMappings(List contextId, + Collection sourceVertices, + Collection targetVertices, + String typeName) { + if (sourceVertices.isEmpty() && targetVertices.isEmpty()) { + return; + } + + if (sourceVertices.size() == 1 && targetVertices.size() == 1) { + Vertex sourceVertex = sourceVertices.iterator().next(); + Vertex targetVertex = targetVertices.iterator().next(); + fixedOneToOneMappings.put(sourceVertex, targetVertex); + fixedOneToOneSources.add(sourceVertex); + fixedOneToOneTargets.add(targetVertex); + return; + } + + if (APPLIED_DIRECTIVE.equals(typeName) || APPLIED_ARGUMENT.equals(typeName)) { + for (Vertex sourceVertex : sourceVertices) { + Vertex isolatedTarget = Vertex.newIsolatedNode("target-isolated-" + typeName); + allIsolatedTarget.add(isolatedTarget); + fixedOneToOneMappings.put(sourceVertex, isolatedTarget); + fixedOneToOneSources.add(sourceVertex); + fixedOneToOneTargets.add(isolatedTarget); + } + for (Vertex targetVertex : targetVertices) { + Vertex isolatedSource = Vertex.newIsolatedNode("source-isolated-" + typeName); + allIsolatedSource.add(isolatedSource); + fixedOneToOneMappings.put(isolatedSource, targetVertex); + fixedOneToOneSources.add(isolatedSource); + fixedOneToOneTargets.add(targetVertex); + } + return; + } + + Set newIsolatedSource = Collections.emptySet(); + Set newIsolatedTarget = Collections.emptySet(); + if (sourceVertices.size() > targetVertices.size()) { + newIsolatedTarget = Vertex.newIsolatedNodes(sourceVertices.size() - targetVertices.size(), "target-isolated-" + typeName + "-"); + } else if (targetVertices.size() > sourceVertices.size()) { + newIsolatedSource = Vertex.newIsolatedNodes(targetVertices.size() - sourceVertices.size(), "source-isolated-" + typeName + "-"); + } + this.allIsolatedSource.addAll(newIsolatedSource); + this.allIsolatedTarget.addAll(newIsolatedTarget); + + if (sourceVertices.size() == 0) { + Iterator iterator = newIsolatedSource.iterator(); + for (Vertex targetVertex : targetVertices) { + Vertex isolatedSourceVertex = iterator.next(); + fixedOneToOneMappings.put(isolatedSourceVertex, targetVertex); + fixedOneToOneSources.add(isolatedSourceVertex); + fixedOneToOneTargets.add(targetVertex); + } + return; + } + if (targetVertices.size() == 0) { + Iterator iterator = newIsolatedTarget.iterator(); + for (Vertex sourceVertex : sourceVertices) { + Vertex isolatedTargetVertex = iterator.next(); + fixedOneToOneMappings.put(sourceVertex, isolatedTargetVertex); + fixedOneToOneSources.add(sourceVertex); + fixedOneToOneTargets.add(isolatedTargetVertex); + } + return; + } + +// System.out.println("multiple mappings for context" + contextId + " overall size: " + (sourceVertices.size() + newIsolatedSource.size())); +// List vertexContextSegments = typeContexts.get(typeName); +// System.out.println("source ids: " + sourceVertices.size()); +// for (Vertex sourceVertex : sourceVertices) { +// List id = vertexContextSegments.stream().map(vertexContextSegment -> vertexContextSegment.idForVertex(sourceVertex, sourceGraph)) +// .collect(Collectors.toList()); +// System.out.println("id: " + id); +// } +// System.out.println("target ids ==================: " + targetVertices.size()); +// for (Vertex targetVertex : targetVertices) { +// List id = vertexContextSegments.stream().map(vertexContextSegment -> vertexContextSegment.idForVertex(targetVertex, targetGraph)) +// .collect(Collectors.toList()); +// System.out.println("id: " + id); +// } +// System.out.println("-------------------"); +// System.out.println("-------------------"); + + Assert.assertFalse(contexts.containsRow(contextId)); + + Set allSource = new LinkedHashSet<>(); + allSource.addAll(sourceVertices); + allSource.addAll(newIsolatedSource); + Set allTarget = new LinkedHashSet<>(); + allTarget.addAll(targetVertices); + allTarget.addAll(newIsolatedTarget); + contexts.put(contextId, allSource, allTarget); for (Vertex sourceVertex : sourceVertices) { - possibleMappings.putAll(sourceVertex, targetVertex); + possibleMappings.putAll(sourceVertex, targetVertices); + possibleMappings.putAll(sourceVertex, newIsolatedTarget); + } + for (Vertex sourceIsolatedVertex : newIsolatedSource) { + possibleMappings.putAll(sourceIsolatedVertex, targetVertices); + possibleMappings.putAll(sourceIsolatedVertex, newIsolatedTarget); } - } - public void addIsolatedSource(Collection isolatedSource) { - allIsolatedSource.addAll(isolatedSource); - } - public void addIsolatedTarget(Collection isolatedTarget) { - allIsolatedTarget.addAll(isolatedTarget); } // public boolean mappingPossible(Vertex sourceVertex, Vertex targetVertex) { return possibleMappings.containsEntry(sourceVertex, targetVertex); } - - public void putContext(List contextId, Set source, Set target) { - if (contexts.containsRow(contextId)) { - throw new IllegalArgumentException("Already context " + contextId); - } - Assert.assertTrue(source.size() == target.size()); - if (source.size() == 1) { - mapping.add(source.iterator().next(), target.iterator().next()); - } - contexts.put(contextId, source, target); - } - } @@ -866,22 +985,23 @@ private void calcPossibleMappingImpl( Set notUsedTarget = new LinkedHashSet<>(targetVerticesInContext); notUsedTarget.removeAll(usedTargetVertices); - // make sure the current context is the same size - if (notUsedSource.size() > notUsedTarget.size()) { - Set newTargetVertices = Vertex.newIsolatedNodes(notUsedSource.size() - notUsedTarget.size(), "target-isolated-" + typeNameForDebug + "-"); - isolatedVertices.addIsolatedTarget(newTargetVertices); - notUsedTarget.addAll(newTargetVertices); - } else if (notUsedTarget.size() > notUsedSource.size()) { - Set newSourceVertices = Vertex.newIsolatedNodes(notUsedTarget.size() - notUsedSource.size(), "source-isolated-" + typeNameForDebug + "-"); - isolatedVertices.addIsolatedSource(newSourceVertices); - notUsedSource.addAll(newSourceVertices); - } - isolatedVertices.putPossibleMappings(notUsedSource, notUsedTarget); + possibleMappings.putPossibleMappings(currentContextId, notUsedSource, notUsedTarget, typeNameForDebug); usedSourceVertices.addAll(notUsedSource); usedTargetVertices.addAll(notUsedTarget); - if (notUsedSource.size() > 0) { - isolatedVertices.putContext(currentContextId, notUsedSource, notUsedTarget); + } + + /** + * update the used vertices with the deleted and inserted contexts + */ + Set possibleSourceVertices = new LinkedHashSet<>(); + for (String deletedContext : deletedContexts) { + ImmutableList vertices = sourceGroups.get(deletedContext); + for (Vertex sourceVertex : vertices) { + if (!usedSourceVertices.contains(sourceVertex)) { + possibleSourceVertices.add(sourceVertex); + } } + usedSourceVertices.addAll(vertices); } Set possibleTargetVertices = new LinkedHashSet<>(); @@ -894,38 +1014,129 @@ private void calcPossibleMappingImpl( } usedTargetVertices.addAll(vertices); } + if (contextId.size() == 0) { + contextId = singletonList(typeNameForDebug); + } + possibleMappings.putPossibleMappings(contextId, possibleSourceVertices, possibleTargetVertices, typeNameForDebug); + } - Set possibleSourceVertices = new LinkedHashSet<>(); - for (String deletedContext : deletedContexts) { - ImmutableList vertices = sourceGroups.get(deletedContext); - for (Vertex sourceVertex : vertices) { - if (!usedSourceVertices.contains(sourceVertex)) { - possibleSourceVertices.add(sourceVertex); + public Map getFixedParentRestrictions() { + return getFixedParentRestrictions( + sourceGraph, + possibleMappings.fixedOneToOneSources, + possibleMappings.fixedOneToOneMappings + ); + } + + public Map getFixedParentRestrictionsInverse(Map fixedOneToOneMappingsInverted) { + return getFixedParentRestrictions( + targetGraph, + possibleMappings.fixedOneToOneTargets, + fixedOneToOneMappingsInverted + ); + } + + /** + * This computes the initial set of parent restrictions based on the fixed portion of the mapping. + *

+ * See {@link Mapping} for definition of fixed vs non-fixed. + *

+ * If a {@link Vertex} is present in the output {@link Map} then the value is the parent the + * vertex MUST map to. + *

+ * e.g. for an output {collar: Dog} then the collar vertex must be a child of Dog in the mapping. + * + * @return Map where key is any vertex, and the value is the parent that vertex must map to + */ + private Map getFixedParentRestrictions(SchemaGraph sourceGraph, + List fixedSourceVertices, + Map fixedOneToOneMappings) { + Assert.assertFalse(fixedOneToOneMappings.isEmpty()); + + List needsFixing = new ArrayList<>(sourceGraph.getVertices()); + needsFixing.removeAll(fixedSourceVertices); + + Map restrictions = new LinkedHashMap<>(); + + for (Vertex vertex : needsFixing) { + if (hasParentRestrictions(vertex)) { + Vertex sourceParent = sourceGraph.getSingleAdjacentInverseVertex(vertex); + Vertex fixedTargetParent = fixedOneToOneMappings.get(sourceParent); + + if (fixedTargetParent != null) { + for (Edge edge : sourceGraph.getAdjacentEdgesNonCopy(sourceParent)) { + Vertex sibling = edge.getTo(); + + if (hasParentRestrictions(sibling)) { + restrictions.put(sibling, fixedTargetParent); + } + } } } - usedSourceVertices.addAll(vertices); } - if (possibleSourceVertices.size() > possibleTargetVertices.size()) { - Set newTargetVertices = Vertex.newIsolatedNodes(possibleSourceVertices.size() - possibleTargetVertices.size(), "target-isolated-" + typeNameForDebug + "-"); - isolatedVertices.addIsolatedTarget(newTargetVertices); - possibleTargetVertices.addAll(newTargetVertices); - } else if (possibleTargetVertices.size() > possibleSourceVertices.size()) { - Set newSourceVertices = Vertex.newIsolatedNodes(possibleTargetVertices.size() - possibleSourceVertices.size(), "source-isolated-" + typeNameForDebug + "-"); - isolatedVertices.addIsolatedSource(newSourceVertices); - possibleSourceVertices.addAll(newSourceVertices); - } - // if there are only added or removed vertices in the current context, contextId might be empty - if (possibleSourceVertices.size() > 0) { - if (contextId.size() == 0) { - contextId = singletonList(typeNameForDebug); + return restrictions; + } + + /** + * This computes the initial set of parent restrictions based on the given non-fixed mapping. + *

+ * i.e. this introduces restrictions as the {@link Mapping} is being built, as decisions + * can have knock on effects on other vertices' possible mappings. + *

+ * See {@link Mapping} for definition of fixed vs non-fixed. + *

+ * If a {@link Vertex} is present in the output {@link Map} then the value is the parent the + * vertex MUST map to. + *

+ * e.g. for an output {collar: Dog} then the collar vertex must be a child of Dog in the mapping. + * + * @param mapping the mapping to get non-fixed parent restrictions for + * @param sourceGraph the source graph + * @param targetGraph the target graph + * @return Map where key is any vertex, and the value is the parent that vertex must map to + */ + public Map getNonFixedParentRestrictions(SchemaGraph sourceGraph, + SchemaGraph targetGraph, + Mapping mapping) { + Map restrictions = new LinkedHashMap<>(); + + mapping.forEachNonFixedSourceAndTarget((source, target) -> { + if (hasChildrenRestrictions(source) && hasChildrenRestrictions(target)) { + for (Edge edge : sourceGraph.getAdjacentEdgesNonCopy(source)) { + Vertex child = edge.getTo(); + + if (hasParentRestrictions(child)) { + restrictions.put(child, target); + } + } + } else if (hasParentRestrictions(source) && hasParentRestrictions(target)) { + Vertex sourceParent = sourceGraph.getSingleAdjacentInverseVertex(source); + Vertex targetParent = targetGraph.getSingleAdjacentInverseVertex(target); + + for (Edge edge : sourceGraph.getAdjacentEdgesNonCopy(sourceParent)) { + Vertex sibling = edge.getTo(); + + if (hasParentRestrictions(sibling)) { + restrictions.put(sibling, targetParent); + } + } } - isolatedVertices.putContext(contextId, possibleSourceVertices, possibleTargetVertices); - } - isolatedVertices.putPossibleMappings(possibleSourceVertices, possibleTargetVertices); + }); + + return restrictions; + } + + public static boolean hasParentRestrictions(Vertex vertex) { + return vertex.isOfType(SchemaGraph.FIELD) + || vertex.isOfType(SchemaGraph.INPUT_FIELD) + || vertex.isOfType(SchemaGraph.ENUM_VALUE) + || vertex.isOfType(SchemaGraph.ARGUMENT); } - public IsolatedVertices getIsolatedVertices() { - return isolatedVertices; + public static boolean hasChildrenRestrictions(Vertex vertex) { + return vertex.isOfType(SchemaGraph.INPUT_OBJECT) + || vertex.isOfType(SchemaGraph.OBJECT) + || vertex.isOfType(SchemaGraph.ENUM); } } diff --git a/src/main/java/graphql/schema/diffing/SchemaDiffing.java b/src/main/java/graphql/schema/diffing/SchemaDiffing.java index 7b1db49647..de047bf1fd 100644 --- a/src/main/java/graphql/schema/diffing/SchemaDiffing.java +++ b/src/main/java/graphql/schema/diffing/SchemaDiffing.java @@ -1,17 +1,21 @@ package graphql.schema.diffing; +import com.google.common.collect.BiMap; +import com.google.common.collect.HashBiMap; +import com.google.common.collect.HashMultimap; +import com.google.common.collect.Multimaps; import graphql.Internal; import graphql.schema.GraphQLSchema; import graphql.schema.diffing.ana.EditOperationAnalysisResult; import graphql.schema.diffing.ana.EditOperationAnalyzer; import java.util.ArrayList; -import java.util.Collections; +import java.util.Comparator; import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; import static graphql.Assert.assertTrue; -import static graphql.schema.diffing.EditorialCostForMapping.editorialCostForMapping; -import static java.util.Collections.singletonList; +import static graphql.schema.diffing.EditorialCostForMapping.baseEditorialCostForMapping; @Internal public class SchemaDiffing { @@ -31,123 +35,101 @@ public void stop() { public List diffGraphQLSchema(GraphQLSchema graphQLSchema1, GraphQLSchema graphQLSchema2) throws Exception { sourceGraph = new SchemaGraphFactory("source-").createGraph(graphQLSchema1); targetGraph = new SchemaGraphFactory("target-").createGraph(graphQLSchema2); - return diffImpl(sourceGraph, targetGraph).listOfEditOperations.get(0); + return diffImpl(sourceGraph, targetGraph, new AtomicInteger()).getListOfEditOperations(); } public EditOperationAnalysisResult diffAndAnalyze(GraphQLSchema graphQLSchema1, GraphQLSchema graphQLSchema2) throws Exception { sourceGraph = new SchemaGraphFactory("source-").createGraph(graphQLSchema1); targetGraph = new SchemaGraphFactory("target-").createGraph(graphQLSchema2); - DiffImpl.OptimalEdit optimalEdit = diffImpl(sourceGraph, targetGraph); + DiffImpl.OptimalEdit optimalEdit = diffImpl(sourceGraph, targetGraph, new AtomicInteger()); EditOperationAnalyzer editOperationAnalyzer = new EditOperationAnalyzer(graphQLSchema1, graphQLSchema1, sourceGraph, targetGraph); - return editOperationAnalyzer.analyzeEdits(optimalEdit.listOfEditOperations.get(0), optimalEdit.mappings.get(0)); + return editOperationAnalyzer.analyzeEdits(optimalEdit.getListOfEditOperations(), optimalEdit.mapping); } - public DiffImpl.OptimalEdit diffGraphQLSchemaAllEdits(GraphQLSchema graphQLSchema1, GraphQLSchema graphQLSchema2) throws Exception { + public DiffImpl.OptimalEdit diffGraphQLSchemaAllEdits(GraphQLSchema graphQLSchema1, GraphQLSchema graphQLSchema2, AtomicInteger algoIterationCount) throws Exception { sourceGraph = new SchemaGraphFactory("source-").createGraph(graphQLSchema1); targetGraph = new SchemaGraphFactory("target-").createGraph(graphQLSchema2); - return diffImpl(sourceGraph, targetGraph); + return diffImpl(sourceGraph, targetGraph, algoIterationCount); } - private DiffImpl.OptimalEdit diffImpl(SchemaGraph sourceGraph, SchemaGraph targetGraph) throws Exception { - int sizeDiff = targetGraph.size() - sourceGraph.size(); - System.out.println("graph diff: " + sizeDiff); - FillupIsolatedVertices fillupIsolatedVertices = new FillupIsolatedVertices(sourceGraph, targetGraph, runningCheck); - fillupIsolatedVertices.ensureGraphAreSameSize(); - FillupIsolatedVertices.IsolatedVertices isolatedVertices = fillupIsolatedVertices.getIsolatedVertices(); + private DiffImpl.OptimalEdit diffImpl(SchemaGraph sourceGraph, SchemaGraph targetGraph, AtomicInteger algoIterationCount) throws Exception { + PossibleMappingsCalculator possibleMappingsCalculator = new PossibleMappingsCalculator(sourceGraph, targetGraph, runningCheck); + PossibleMappingsCalculator.PossibleMappings possibleMappings = possibleMappingsCalculator.calculate(); + + Mapping startMapping = Mapping.newMapping( + possibleMappingsCalculator.getFixedParentRestrictions(), + possibleMappings.fixedOneToOneMappings, + possibleMappings.fixedOneToOneSources, + possibleMappings.fixedOneToOneTargets); assertTrue(sourceGraph.size() == targetGraph.size()); -// if (sizeDiff != 0) { -// SortSourceGraph.sortSourceGraph(sourceGraph, targetGraph, isolatedVertices); -// } - Mapping fixedMappings = isolatedVertices.mapping; - System.out.println("fixed mappings: " + fixedMappings.size() + " vs " + sourceGraph.size()); - if (fixedMappings.size() == sourceGraph.size()) { - List result = new ArrayList<>(); - editorialCostForMapping(fixedMappings, sourceGraph, targetGraph, result); - return new DiffImpl.OptimalEdit(singletonList(fixedMappings), singletonList(result), result.size()); + if (possibleMappings.fixedOneToOneMappings.size() == sourceGraph.size()) { + return new DiffImpl.OptimalEdit(sourceGraph, targetGraph, startMapping, baseEditorialCostForMapping(startMapping, sourceGraph, targetGraph)); } - DiffImpl diffImpl = new DiffImpl(sourceGraph, targetGraph, isolatedVertices, runningCheck); List nonMappedSource = new ArrayList<>(sourceGraph.getVertices()); - nonMappedSource.removeAll(fixedMappings.getSources()); -// for(Vertex vertex: nonMappedSource) { -// System.out.println("non mapped: " + vertex); -// } -// for (List context : isolatedVertices.contexts.rowKeySet()) { -// Map, Set> row = isolatedVertices.contexts.row(context); -// System.out.println("context: " + context + " from " + row.keySet().iterator().next().size() + " to " + row.values().iterator().next().size()); -// } + nonMappedSource.removeAll(possibleMappings.fixedOneToOneSources); List nonMappedTarget = new ArrayList<>(targetGraph.getVertices()); - nonMappedTarget.removeAll(fixedMappings.getTargets()); + nonMappedTarget.removeAll(possibleMappings.fixedOneToOneTargets); runningCheck.check(); - sortListBasedOnPossibleMapping(nonMappedSource, isolatedVertices); - - // the non mapped vertices go to the end - List sourceVertices = new ArrayList<>(); - sourceVertices.addAll(fixedMappings.getSources()); - sourceVertices.addAll(nonMappedSource); - - List targetGraphVertices = new ArrayList<>(); - targetGraphVertices.addAll(fixedMappings.getTargets()); - targetGraphVertices.addAll(nonMappedTarget); - - - DiffImpl.OptimalEdit optimalEdit = diffImpl.diffImpl(fixedMappings, sourceVertices, targetGraphVertices); -// System.out.println("different edit counts: " + optimalEdit.listOfEditOperations.size()); -// for (int i = 0; i < optimalEdit.listOfEditOperations.size(); i++) { -// System.out.println("--------------"); -// System.out.println("edit: " + i); -// System.out.println("--------------"); -// for (EditOperation editOperation : optimalEdit.listOfEditOperations.get(i)) { -// System.out.println(editOperation); -// } -// System.out.println("--------------"); -// System.out.println("--------------"); -// } - return optimalEdit; - } - - private void sortListBasedOnPossibleMapping(List sourceVertices, FillupIsolatedVertices.IsolatedVertices isolatedVertices) { - Collections.sort(sourceVertices, (v1, v2) -> - { - int v2Count = isolatedVertices.possibleMappings.get(v2).size(); - int v1Count = isolatedVertices.possibleMappings.get(v1).size(); - return Integer.compare(v2Count, v1Count); - }); - -// for (Vertex vertex : sourceGraph.getVertices()) { -// System.out.println("c: " + isolatedVertices.possibleMappings.get(vertex).size() + " v: " + vertex); -// } - } - - private List calcEdgeOperations(Mapping mapping) { - List edges = sourceGraph.getEdges(); - List result = new ArrayList<>(); - // edge deletion or relabeling - for (Edge sourceEdge : edges) { - Vertex target1 = mapping.getTarget(sourceEdge.getFrom()); - Vertex target2 = mapping.getTarget(sourceEdge.getTo()); - Edge targetEdge = targetGraph.getEdge(target1, target2); - if (targetEdge == null) { - result.add(EditOperation.deleteEdge("Delete edge " + sourceEdge, sourceEdge)); - } else if (!sourceEdge.getLabel().equals(targetEdge.getLabel())) { - result.add(EditOperation.changeEdge("Change " + sourceEdge + " to " + targetEdge, sourceEdge, targetEdge)); + int isolatedSourceCount = (int) nonMappedSource.stream().filter(Vertex::isIsolated).count(); + int isolatedTargetCount = (int) nonMappedTarget.stream().filter(Vertex::isIsolated).count(); + if (isolatedTargetCount > isolatedSourceCount) { + // we flip source and target because the algo works much faster with + // this way for delete heavy graphs + BiMap fixedOneToOneInverted = HashBiMap.create(); + for (Vertex s : possibleMappings.fixedOneToOneMappings.keySet()) { + Vertex t = possibleMappings.fixedOneToOneMappings.get(s); + fixedOneToOneInverted.put(t, s); } + Mapping startMappingInverted = Mapping.newMapping( + possibleMappingsCalculator.getFixedParentRestrictionsInverse(fixedOneToOneInverted), + fixedOneToOneInverted, + possibleMappings.fixedOneToOneTargets, + possibleMappings.fixedOneToOneSources + ); + HashMultimap invertedPossibleOnes = HashMultimap.create(); + Multimaps.invertFrom(possibleMappings.possibleMappings, invertedPossibleOnes); + possibleMappings.possibleMappings = invertedPossibleOnes; + + List sourceVertices = new ArrayList<>(); + sourceVertices.addAll(possibleMappings.fixedOneToOneSources); + sourceVertices.addAll(nonMappedSource); + + List targetVertices = new ArrayList<>(); + targetVertices.addAll(possibleMappings.fixedOneToOneTargets); + targetVertices.addAll(nonMappedTarget); + + sortVertices(nonMappedTarget, targetGraph, possibleMappings); + + DiffImpl diffImpl = new DiffImpl(possibleMappingsCalculator, targetGraph, sourceGraph, possibleMappings, runningCheck); + DiffImpl.OptimalEdit optimalEdit = diffImpl.diffImpl(startMappingInverted, targetVertices, sourceVertices, algoIterationCount); + DiffImpl.OptimalEdit invertedBackOptimalEdit = new DiffImpl.OptimalEdit(sourceGraph, targetGraph, optimalEdit.mapping.invert(), optimalEdit.ged); + return invertedBackOptimalEdit; + } else { + sortVertices(nonMappedSource, sourceGraph, possibleMappings); + + List sourceVertices = new ArrayList<>(); + sourceVertices.addAll(possibleMappings.fixedOneToOneSources); + sourceVertices.addAll(nonMappedSource); + + List targetVertices = new ArrayList<>(); + targetVertices.addAll(possibleMappings.fixedOneToOneTargets); + targetVertices.addAll(nonMappedTarget); + + DiffImpl diffImpl = new DiffImpl(possibleMappingsCalculator, sourceGraph, targetGraph, possibleMappings, runningCheck); + DiffImpl.OptimalEdit optimalEdit = diffImpl.diffImpl(startMapping, sourceVertices, targetVertices, algoIterationCount); + return optimalEdit; } + } - //TODO: iterates over all edges in the target Graph - for (Edge targetEdge : targetGraph.getEdges()) { - // only subgraph edges - Vertex sourceFrom = mapping.getSource(targetEdge.getFrom()); - Vertex sourceTo = mapping.getSource(targetEdge.getTo()); - if (sourceGraph.getEdge(sourceFrom, sourceTo) == null) { - result.add(EditOperation.insertEdge("Insert edge " + targetEdge, targetEdge)); - } - } - return result; + + private void sortVertices(List vertices, SchemaGraph schemaGraph, PossibleMappingsCalculator.PossibleMappings possibleMappings) { + Comparator vertexComparator = Comparator.comparing(schemaGraph::adjacentEdgesAndInverseCount).reversed(); + vertices.sort(vertexComparator); } } diff --git a/src/main/java/graphql/schema/diffing/SchemaGraph.java b/src/main/java/graphql/schema/diffing/SchemaGraph.java index eb64c3e4ef..edaebe5284 100644 --- a/src/main/java/graphql/schema/diffing/SchemaGraph.java +++ b/src/main/java/graphql/schema/diffing/SchemaGraph.java @@ -2,6 +2,7 @@ import com.google.common.collect.HashBasedTable; +import com.google.common.collect.Iterables; import com.google.common.collect.LinkedHashMultimap; import com.google.common.collect.Multimap; import com.google.common.collect.Table; @@ -84,14 +85,22 @@ public void addEdge(Edge edge) { edgesByInverseDirection.put(edge.getTo(), edge.getFrom(), edge); } - public List getAdjacentEdges(Vertex from) { - return new ArrayList<>(edgesByDirection.row(from).values()); + // +// public List getAdjacentEdges(Vertex from) { +// return new ArrayList<>(edgesByDirection.row(from).values()); +// } + public Collection getAdjacentEdgesNonCopy(Vertex from) { + return edgesByDirection.row(from).values(); } - public List getAdjacentEdgesAndInverse(Vertex fromAndTo) { - List result = new ArrayList<>(edgesByDirection.row(fromAndTo).values()); - result.addAll(edgesByInverseDirection.row(fromAndTo).values()); - return result; + public Iterable getAdjacentEdgesAndInverseNonCopy(Vertex fromAndTo) { + Collection edges = edgesByInverseDirection.row(fromAndTo).values(); + Collection edgesInverse = edgesByDirection.row(fromAndTo).values(); + return Iterables.concat(edges, edgesInverse); + } + + public int adjacentEdgesAndInverseCount(Vertex fromAndTo) { + return edgesByInverseDirection.row(fromAndTo).size() + edgesByDirection.row(fromAndTo).size(); } public List getAdjacentVertices(Vertex from) { @@ -135,8 +144,12 @@ public List getAdjacentEdges(Vertex from, Predicate predicate) { return result; } - public List getAdjacentEdgesInverse(Vertex to) { - return getAdjacentEdgesInverse(to, x -> true); + public List getAdjacentEdgesInverseCopied(Vertex to) { + return new ArrayList<>(edgesByInverseDirection.row(to).values()); + } + + public Collection getAdjacentEdgesInverseNonCopy(Vertex to) { + return edgesByInverseDirection.row(to).values(); } public List getAdjacentEdgesInverse(Vertex to, Predicate predicate) { @@ -245,13 +258,24 @@ public Vertex getAppliedDirectiveContainerForAppliedDirective(Vertex appliedDire return adjacentVertices.get(0); } + /** + * Gets the one inverse adjacent edge to the input and gets the other vertex. + * + * @param input the vertex input + * @return a vertex + */ + public Vertex getSingleAdjacentInverseVertex(Vertex input) { + Collection adjacentVertices = this.getAdjacentEdgesInverseNonCopy(input); + assertTrue(adjacentVertices.size() == 1, () -> format("No parent found for %s", input)); + return adjacentVertices.iterator().next().getFrom(); + } + public int getAppliedDirectiveIndex(Vertex appliedDirective) { - List adjacentEdges = this.getAdjacentEdgesInverse(appliedDirective); + List adjacentEdges = this.getAdjacentEdgesInverseCopied(appliedDirective); assertTrue(adjacentEdges.size() == 1, () -> format("No applied directive container found for %s", appliedDirective)); return Integer.parseInt(adjacentEdges.get(0).getLabel()); } - public Vertex getEnumForEnumValue(Vertex enumValue) { List adjacentVertices = this.getAdjacentVerticesInverse(enumValue); assertTrue(adjacentVertices.size() == 1, () -> format("No enum found for %s", enumValue)); diff --git a/src/main/java/graphql/schema/diffing/SortSourceGraph.java b/src/main/java/graphql/schema/diffing/SortSourceGraph.java index d9169e167d..6b96ccfae3 100644 --- a/src/main/java/graphql/schema/diffing/SortSourceGraph.java +++ b/src/main/java/graphql/schema/diffing/SortSourceGraph.java @@ -1,141 +1,141 @@ -package graphql.schema.diffing; - -import graphql.Internal; - -import java.util.ArrayList; -import java.util.Comparator; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.atomic.AtomicInteger; - -@Internal -public class SortSourceGraph { - - public static void sortSourceGraph(SchemaGraph sourceGraph, SchemaGraph targetGraph, FillupIsolatedVertices.IsolatedVertices isolatedVertices) { -// // we sort descending by number of possible target vertices -// Collections.sort(sourceGraph.getVertices(), (v1, v2) -> -// -// { -// -// int v2Count = v2.isBuiltInType() ? -1 : (v2.isIsolated() ? 0 : isolatedVertices.possibleMappings.get(v2).size()); -// int v1Count = v1.isBuiltInType() ? -1 : (v1.isIsolated() ? 0 : isolatedVertices.possibleMappings.get(v1).size()); -// return Integer.compare(v2Count, v1Count); -// }); +//package graphql.schema.diffing; // +//import graphql.Internal; +// +//import java.util.ArrayList; +//import java.util.Comparator; +//import java.util.LinkedHashMap; +//import java.util.List; +//import java.util.Map; +//import java.util.concurrent.atomic.AtomicInteger; +// +//@Internal +//public class SortSourceGraph { +// +// public static void sortSourceGraph(SchemaGraph sourceGraph, SchemaGraph targetGraph, PossibleMappingsCalculator.PossibleMappings possibleMappings) { +//// // we sort descending by number of possible target vertices +//// Collections.sort(sourceGraph.getVertices(), (v1, v2) -> +//// +//// { +//// +//// int v2Count = v2.isBuiltInType() ? -1 : (v2.isIsolated() ? 0 : isolatedVertices.possibleMappings.get(v2).size()); +//// int v1Count = v1.isBuiltInType() ? -1 : (v1.isIsolated() ? 0 : isolatedVertices.possibleMappings.get(v1).size()); +//// return Integer.compare(v2Count, v1Count); +//// }); +//// +//// for (Vertex vertex : sourceGraph.getVertices()) { +//// System.out.println("c: " + isolatedVertices.possibleMappings.get(vertex).size() + " v: " + vertex); +//// } +// +//// +//// +//// // how often does each source edge (based on the label) appear in target graph +// Map targetLabelCount = new LinkedHashMap<>(); +// for (Edge targetEdge : targetGraph.getEdges()) { +// targetLabelCount.computeIfAbsent(targetEdge.getLabel(), __ -> new AtomicInteger()).incrementAndGet(); +// } +// // how often does each source vertex (based on the data) appear in the target graph +// Map targetVertexDataCount = new LinkedHashMap<>(); +// for (Vertex targetVertex : targetGraph.getVertices()) { +// targetVertexDataCount.computeIfAbsent(targetVertex.toData(), __ -> new AtomicInteger()).incrementAndGet(); +// } +// +// // an infrequency weight is 1 - count in target. Meaning the higher the +// // value, the smaller the count, the less frequent it. +// // Higher Infrequency => more unique is the vertex/label +// Map vertexInfrequencyWeights = new LinkedHashMap<>(); +// Map edgesInfrequencyWeights = new LinkedHashMap<>(); // for (Vertex vertex : sourceGraph.getVertices()) { -// System.out.println("c: " + isolatedVertices.possibleMappings.get(vertex).size() + " v: " + vertex); +// vertexInfrequencyWeights.put(vertex, 1 - targetVertexDataCount.getOrDefault(vertex.toData(), new AtomicInteger()).get()); +// } +// for (Edge edge : sourceGraph.getEdges()) { +// edgesInfrequencyWeights.put(edge, 1 - targetLabelCount.getOrDefault(edge.getLabel(), new AtomicInteger()).get()); +// } +// +// /** +// * vertices are sorted by increasing frequency/decreasing infrequency/decreasing uniqueness +// * we start with the most unique/least frequent/most infrequent and add incrementally the next most infrequent. +// */ +// +// //TODO: improve this: this is doing to much: we just want the max infrequent vertex, not all sorted +// ArrayList nextCandidates = new ArrayList<>(sourceGraph.getVertices()); +// nextCandidates.sort(Comparator.comparingInt(o -> totalInfrequencyWeightWithAdjacentEdges(sourceGraph, o, vertexInfrequencyWeights, edgesInfrequencyWeights))); +// +// Vertex curVertex = nextCandidates.get(nextCandidates.size() - 1); +// nextCandidates.remove(nextCandidates.size() - 1); +// +// List result = new ArrayList<>(); +// result.add(curVertex); +// while (nextCandidates.size() > 0) { Vertex nextOne = null; +// int curMaxWeight = Integer.MIN_VALUE; +// int index = 0; +// int nextOneIndex = -1; +// +// // which ones of the candidates has the highest infrequency weight relatively to the current result set of vertices +// for (Vertex candidate : nextCandidates) { +// List allAdjacentEdges = sourceGraph.getAllAdjacentEdges(result, candidate); +// int totalWeight = totalInfrequencyWeightWithSomeEdges(candidate, allAdjacentEdges, vertexInfrequencyWeights, edgesInfrequencyWeights); +// if (totalWeight > curMaxWeight) { +// nextOne = candidate; +// nextOneIndex = index; +// curMaxWeight = totalWeight; +// } +// index++; +// } +// result.add(nextOne); +// nextCandidates.remove(nextOneIndex); // } - -// -// -// // how often does each source edge (based on the label) appear in target graph - Map targetLabelCount = new LinkedHashMap<>(); - for (Edge targetEdge : targetGraph.getEdges()) { - targetLabelCount.computeIfAbsent(targetEdge.getLabel(), __ -> new AtomicInteger()).incrementAndGet(); - } - // how often does each source vertex (based on the data) appear in the target graph - Map targetVertexDataCount = new LinkedHashMap<>(); - for (Vertex targetVertex : targetGraph.getVertices()) { - targetVertexDataCount.computeIfAbsent(targetVertex.toData(), __ -> new AtomicInteger()).incrementAndGet(); - } - - // an infrequency weight is 1 - count in target. Meaning the higher the - // value, the smaller the count, the less frequent it. - // Higher Infrequency => more unique is the vertex/label - Map vertexInfrequencyWeights = new LinkedHashMap<>(); - Map edgesInfrequencyWeights = new LinkedHashMap<>(); - for (Vertex vertex : sourceGraph.getVertices()) { - vertexInfrequencyWeights.put(vertex, 1 - targetVertexDataCount.getOrDefault(vertex.toData(), new AtomicInteger()).get()); - } - for (Edge edge : sourceGraph.getEdges()) { - edgesInfrequencyWeights.put(edge, 1 - targetLabelCount.getOrDefault(edge.getLabel(), new AtomicInteger()).get()); - } - - /** - * vertices are sorted by increasing frequency/decreasing infrequency/decreasing uniqueness - * we start with the most unique/least frequent/most infrequent and add incrementally the next most infrequent. - */ - - //TODO: improve this: this is doing to much: we just want the max infrequent vertex, not all sorted - ArrayList nextCandidates = new ArrayList<>(sourceGraph.getVertices()); - nextCandidates.sort(Comparator.comparingInt(o -> totalInfrequencyWeightWithAdjacentEdges(sourceGraph, o, vertexInfrequencyWeights, edgesInfrequencyWeights))); - - Vertex curVertex = nextCandidates.get(nextCandidates.size() - 1); - nextCandidates.remove(nextCandidates.size() - 1); - - List result = new ArrayList<>(); - result.add(curVertex); - while (nextCandidates.size() > 0) { Vertex nextOne = null; - int curMaxWeight = Integer.MIN_VALUE; - int index = 0; - int nextOneIndex = -1; - - // which ones of the candidates has the highest infrequency weight relatively to the current result set of vertices - for (Vertex candidate : nextCandidates) { - List allAdjacentEdges = sourceGraph.getAllAdjacentEdges(result, candidate); - int totalWeight = totalInfrequencyWeightWithSomeEdges(candidate, allAdjacentEdges, vertexInfrequencyWeights, edgesInfrequencyWeights); - if (totalWeight > curMaxWeight) { - nextOne = candidate; - nextOneIndex = index; - curMaxWeight = totalWeight; - } - index++; - } - result.add(nextOne); - nextCandidates.remove(nextOneIndex); - } - sourceGraph.setVertices(result); - } - - - private static int totalInfrequencyWeightWithSomeEdges(Vertex vertex, - List edges, - Map vertexInfrequencyWeights, - Map edgesInfrequencyWeights) { - if (vertex.isBuiltInType()) { - return Integer.MIN_VALUE + 1; - } - if (vertex.isIsolated()) { - return Integer.MIN_VALUE + 2; - } - return vertexInfrequencyWeights.get(vertex) + edges.stream().mapToInt(edgesInfrequencyWeights::get).sum(); - } - - private static int totalInfrequencyWeightWithAdjacentEdges(SchemaGraph sourceGraph, - Vertex vertex, - Map vertexInfrequencyWeights, - Map edgesInfrequencyWeights) { - if (vertex.isBuiltInType()) { - return Integer.MIN_VALUE + 1; - } - if (vertex.isIsolated()) { - return Integer.MIN_VALUE + 2; - } - List adjacentEdges = sourceGraph.getAdjacentEdges(vertex); - return vertexInfrequencyWeights.get(vertex) + adjacentEdges.stream().mapToInt(edgesInfrequencyWeights::get).sum(); - } - - private int infrequencyWeightForVertex(Vertex sourceVertex, SchemaGraph targetGraph) { - int count = 0; - for (Vertex targetVertex : targetGraph.getVertices()) { - if (sourceVertex.isEqualTo(targetVertex)) { - count++; - } - } - return 1 - count; - } - - private int infrequencyWeightForEdge(Edge sourceEdge, SchemaGraph targetGraph) { - int count = 0; - for (Edge targetEdge : targetGraph.getEdges()) { - if (sourceEdge.isEqualTo(targetEdge)) { - count++; - } - } - return 1 - count; - } - - - -} +// sourceGraph.setVertices(result); +// } +// +// +// private static int totalInfrequencyWeightWithSomeEdges(Vertex vertex, +// List edges, +// Map vertexInfrequencyWeights, +// Map edgesInfrequencyWeights) { +// if (vertex.isBuiltInType()) { +// return Integer.MIN_VALUE + 1; +// } +// if (vertex.isIsolated()) { +// return Integer.MIN_VALUE + 2; +// } +// return vertexInfrequencyWeights.get(vertex) + edges.stream().mapToInt(edgesInfrequencyWeights::get).sum(); +// } +// +// private static int totalInfrequencyWeightWithAdjacentEdges(SchemaGraph sourceGraph, +// Vertex vertex, +// Map vertexInfrequencyWeights, +// Map edgesInfrequencyWeights) { +// if (vertex.isBuiltInType()) { +// return Integer.MIN_VALUE + 1; +// } +// if (vertex.isIsolated()) { +// return Integer.MIN_VALUE + 2; +// } +// List adjacentEdges = sourceGraph.getAdjacentEdges(vertex); +// return vertexInfrequencyWeights.get(vertex) + adjacentEdges.stream().mapToInt(edgesInfrequencyWeights::get).sum(); +// } +// +// private int infrequencyWeightForVertex(Vertex sourceVertex, SchemaGraph targetGraph) { +// int count = 0; +// for (Vertex targetVertex : targetGraph.getVertices()) { +// if (sourceVertex.isEqualTo(targetVertex)) { +// count++; +// } +// } +// return 1 - count; +// } +// +// private int infrequencyWeightForEdge(Edge sourceEdge, SchemaGraph targetGraph) { +// int count = 0; +// for (Edge targetEdge : targetGraph.getEdges()) { +// if (sourceEdge.isEqualTo(targetEdge)) { +// count++; +// } +// } +// return 1 - count; +// } +// +// +// +//} diff --git a/src/main/java/graphql/schema/diffing/Vertex.java b/src/main/java/graphql/schema/diffing/Vertex.java index 0011f63fe8..3a39a34e62 100644 --- a/src/main/java/graphql/schema/diffing/Vertex.java +++ b/src/main/java/graphql/schema/diffing/Vertex.java @@ -95,7 +95,7 @@ public void setBuiltInType(boolean builtInType) { public String toString() { return "Vertex{" + "type='" + type + '\'' + - ", properties=" + properties + + ", properties=" + properties.toString().replace("\n", "") + ", debugName='" + debugName + '\'' + ", builtInType='" + builtInType + '\'' + '}'; diff --git a/src/main/java/graphql/schema/diffing/ana/EditOperationAnalyzer.java b/src/main/java/graphql/schema/diffing/ana/EditOperationAnalyzer.java index f1ea24f024..acf39508ad 100644 --- a/src/main/java/graphql/schema/diffing/ana/EditOperationAnalyzer.java +++ b/src/main/java/graphql/schema/diffing/ana/EditOperationAnalyzer.java @@ -2,6 +2,7 @@ import graphql.Assert; import graphql.Internal; +import graphql.VisibleForTesting; import graphql.schema.GraphQLSchema; import graphql.schema.diffing.Edge; import graphql.schema.diffing.EditOperation; @@ -10,6 +11,8 @@ import graphql.schema.diffing.Vertex; import graphql.schema.idl.ScalarInfo; +import java.util.ArrayList; +import java.util.Comparator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -106,19 +109,19 @@ @Internal public class EditOperationAnalyzer { - private GraphQLSchema oldSchema; - private GraphQLSchema newSchema; - private SchemaGraph oldSchemaGraph; - private SchemaGraph newSchemaGraph; + private final GraphQLSchema oldSchema; + private final GraphQLSchema newSchema; + private final SchemaGraph oldSchemaGraph; + private final SchemaGraph newSchemaGraph; - private Map objectDifferences = new LinkedHashMap<>(); - private Map interfaceDifferences = new LinkedHashMap<>(); - private Map unionDifferences = new LinkedHashMap<>(); - private Map enumDifferences = new LinkedHashMap<>(); - private Map inputObjectDifferences = new LinkedHashMap<>(); - private Map scalarDifferences = new LinkedHashMap<>(); + private final Map objectDifferences = new LinkedHashMap<>(); + private final Map interfaceDifferences = new LinkedHashMap<>(); + private final Map unionDifferences = new LinkedHashMap<>(); + private final Map enumDifferences = new LinkedHashMap<>(); + private final Map inputObjectDifferences = new LinkedHashMap<>(); + private final Map scalarDifferences = new LinkedHashMap<>(); - private Map directiveDifferences = new LinkedHashMap<>(); + private final Map directiveDifferences = new LinkedHashMap<>(); public EditOperationAnalyzer(GraphQLSchema oldSchema, GraphQLSchema newSchema, @@ -132,6 +135,8 @@ public EditOperationAnalyzer(GraphQLSchema oldSchema, } public EditOperationAnalysisResult analyzeEdits(List editOperations, Mapping mapping) { + editOperations = getTraversalOrder(editOperations); + handleTypeVertexChanges(editOperations); for (EditOperation editOperation : editOperations) { @@ -140,7 +145,7 @@ public EditOperationAnalysisResult analyzeEdits(List editOperatio if (editOperation.getTargetVertex().isOfType(SchemaGraph.FIELD)) { fieldChanged(editOperation); } else if (editOperation.getTargetVertex().isOfType(SchemaGraph.ARGUMENT)) { - handleArgumentChange(editOperation); + handleArgumentChange(editOperation, mapping); } else if (editOperation.getTargetVertex().isOfType(SchemaGraph.INPUT_FIELD)) { handleInputFieldChange(editOperation); } @@ -164,11 +169,13 @@ public EditOperationAnalysisResult analyzeEdits(List editOperatio } } } + handleTypeChanges(editOperations, mapping); handleImplementsChanges(editOperations, mapping); handleUnionMemberChanges(editOperations, mapping); handleEnumValuesChanges(editOperations, mapping); handleAppliedDirectives(editOperations, mapping); + handleArgumentChanges(editOperations, mapping); return new EditOperationAnalysisResult( objectDifferences, @@ -180,6 +187,23 @@ public EditOperationAnalysisResult analyzeEdits(List editOperatio directiveDifferences); } + private void handleArgumentChanges(List editOperations, Mapping mapping) { + for (EditOperation editOperation : editOperations) { + switch (editOperation.getOperation()) { + case INSERT_EDGE: + if (editOperation.getTargetEdge().getTo().isOfType(SchemaGraph.ARGUMENT)) { + argumentAdded(editOperation); + } + break; + case DELETE_EDGE: + if (editOperation.getSourceEdge().getTo().isOfType(SchemaGraph.ARGUMENT)) { + argumentDeleted(editOperation); + } + break; + } + } + } + private void handleAppliedDirectives(List editOperations, Mapping mapping) { @@ -580,7 +604,7 @@ private void handleTypeChanges(List editOperations, Mapping mappi break; case CHANGE_EDGE: if (newEdge.getLabel().startsWith("type=")) { - typeEdgeChanged(editOperation); + typeEdgeChanged(editOperation, mapping); } break; } @@ -633,21 +657,43 @@ private void handleEnumValuesChanges(List editOperations, Mapping private void handleInputFieldChange(EditOperation editOperation) { Vertex inputField = editOperation.getTargetVertex(); Vertex inputObject = newSchemaGraph.getInputObjectForInputField(inputField); + String oldName = editOperation.getSourceVertex().getName(); - String newName = inputObject.getName(); - getInputObjectModification(newName).getDetails().add(new InputObjectFieldRename(oldName, inputField.getName())); + String newName = inputField.getName(); + + if (oldName.equals(newName)) { + // Something else like description could have changed + return; + } + + if (isInputObjectAdded(inputObject.getName())) { + return; + } + + getInputObjectModification(inputObject.getName()).getDetails().add(new InputObjectFieldRename(oldName, newName)); } - private void handleArgumentChange(EditOperation editOperation) { + private void handleArgumentChange(EditOperation editOperation, Mapping mapping) { + Vertex oldArgument = editOperation.getSourceVertex(); Vertex argument = editOperation.getTargetVertex(); + + String oldName = oldArgument.getName(); + String newName = argument.getName(); + + if (oldName.equals(newName)) { + // Something else like description could have changed + return; + } + + if (!doesArgumentChangeMakeSense(oldArgument, argument, mapping)) { + return; + } + Vertex fieldOrDirective = newSchemaGraph.getFieldOrDirectiveForArgument(argument); if (fieldOrDirective.isOfType(SchemaGraph.DIRECTIVE)) { Vertex directive = fieldOrDirective; DirectiveModification directiveModification = getDirectiveModification(directive.getName()); - String oldName = editOperation.getSourceVertex().getName(); - String newName = argument.getName(); directiveModification.getDetails().add(new DirectiveArgumentRename(oldName, newName)); - } else { assertTrue(fieldOrDirective.isOfType(SchemaGraph.FIELD)); Vertex field = fieldOrDirective; @@ -656,17 +702,12 @@ private void handleArgumentChange(EditOperation editOperation) { if (fieldsContainerForField.isOfType(SchemaGraph.OBJECT)) { Vertex object = fieldsContainerForField; ObjectModification objectModification = getObjectModification(object.getName()); - String oldName = editOperation.getSourceVertex().getName(); - String newName = argument.getName(); objectModification.getDetails().add(new ObjectFieldArgumentRename(fieldName, oldName, newName)); } else { assertTrue(fieldsContainerForField.isOfType(SchemaGraph.INTERFACE)); Vertex interfaze = fieldsContainerForField; InterfaceModification interfaceModification = getInterfaceModification(interfaze.getName()); - String oldName = editOperation.getSourceVertex().getName(); - String newName = argument.getName(); interfaceModification.getDetails().add(new InterfaceFieldArgumentRename(fieldName, oldName, newName)); - } } } @@ -755,11 +796,21 @@ private void fieldChanged(EditOperation editOperation) { if (fieldsContainerForField.isOfType(SchemaGraph.OBJECT)) { Vertex object = fieldsContainerForField; + + if (isObjectAdded(object.getName())) { + return; + } + ObjectModification objectModification = getObjectModification(object.getName()); objectModification.getDetails().add(new ObjectFieldRename(oldName, newName)); } else { assertTrue(fieldsContainerForField.isOfType(SchemaGraph.INTERFACE)); Vertex interfaze = fieldsContainerForField; + + if (isInterfaceAdded(interfaze.getName())) { + return; + } + InterfaceModification interfaceModification = getInterfaceModification(interfaze.getName()); interfaceModification.getDetails().add(new InterfaceFieldRename(oldName, newName)); } @@ -924,7 +975,6 @@ private void changedTypeVertex(EditOperation editOperation) { changedDirective(editOperation); break; } - } @@ -942,8 +992,9 @@ private void typeEdgeInserted(EditOperation editOperation, List e } - private void typeEdgeInsertedForInputField(EditOperation - editOperation, List editOperations, Mapping mapping) { + private void typeEdgeInsertedForInputField(EditOperation editOperation, + List editOperations, + Mapping mapping) { Vertex inputField = editOperation.getTargetEdge().getFrom(); Vertex inputObject = newSchemaGraph.getInputObjectForInputField(inputField); if (isInputObjectAdded(inputObject.getName())) { @@ -959,8 +1010,9 @@ private void typeEdgeInsertedForInputField(EditOperation getInputObjectModification(inputObject.getName()).getDetails().add(inputObjectFieldTypeModification); } - private void typeEdgeInsertedForArgument(EditOperation - editOperation, List editOperations, Mapping mapping) { + private void typeEdgeInsertedForArgument(EditOperation editOperation, + List editOperations, + Mapping mapping) { Vertex argument = editOperation.getTargetEdge().getFrom(); Vertex fieldOrDirective = newSchemaGraph.getFieldOrDirectiveForArgument(argument); if (fieldOrDirective.isOfType(SchemaGraph.FIELD)) { @@ -969,6 +1021,7 @@ private void typeEdgeInsertedForArgument(EditOperation if (objectOrInterface.isOfType(SchemaGraph.OBJECT)) { Vertex object = objectOrInterface; + // if the whole object is new we are done if (isObjectAdded(object.getName())) { return; @@ -981,6 +1034,7 @@ private void typeEdgeInsertedForArgument(EditOperation if (isArgumentNewForExistingObjectField(object.getName(), field.getName(), argument.getName())) { return; } + String newType = getTypeFromEdgeLabel(editOperation.getTargetEdge()); // this means we have an existing object changed its type // and there must be a deleted edge with the old type information @@ -988,9 +1042,16 @@ private void typeEdgeInsertedForArgument(EditOperation String oldType = getTypeFromEdgeLabel(deletedTypeEdgeOperation.getSourceEdge()); ObjectFieldArgumentTypeModification objectFieldArgumentTypeModification = new ObjectFieldArgumentTypeModification(field.getName(), argument.getName(), oldType, newType); getObjectModification(object.getName()).getDetails().add(objectFieldArgumentTypeModification); + + String oldDefaultValue = getDefaultValueFromEdgeLabel(deletedTypeEdgeOperation.getSourceEdge()); + String newDefaultValue = getDefaultValueFromEdgeLabel(editOperation.getTargetEdge()); + if (!oldDefaultValue.equals(newDefaultValue)) { + getObjectModification(object.getName()).getDetails().add(new ObjectFieldArgumentDefaultValueModification(field.getName(), argument.getName(), oldDefaultValue, newDefaultValue)); + } } else { assertTrue(objectOrInterface.isOfType(SchemaGraph.INTERFACE)); Vertex interfaze = objectOrInterface; + // if the whole object is new we are done if (isInterfaceAdded(interfaze.getName())) { return; @@ -1003,6 +1064,7 @@ private void typeEdgeInsertedForArgument(EditOperation if (isArgumentNewForExistingInterfaceField(interfaze.getName(), field.getName(), argument.getName())) { return; } + String newType = getTypeFromEdgeLabel(editOperation.getTargetEdge()); // this means we have an existing object changed its type // and there must be a deleted edge with the old type information @@ -1010,27 +1072,41 @@ private void typeEdgeInsertedForArgument(EditOperation String oldType = getTypeFromEdgeLabel(deletedTypeEdgeOperation.getSourceEdge()); InterfaceFieldArgumentTypeModification interfaceFieldArgumentTypeModification = new InterfaceFieldArgumentTypeModification(field.getName(), argument.getName(), oldType, newType); getInterfaceModification(interfaze.getName()).getDetails().add(interfaceFieldArgumentTypeModification); + + String oldDefaultValue = getDefaultValueFromEdgeLabel(deletedTypeEdgeOperation.getSourceEdge()); + String newDefaultValue = getDefaultValueFromEdgeLabel(editOperation.getTargetEdge()); + if (!oldDefaultValue.equals(newDefaultValue)) { + getInterfaceModification(interfaze.getName()).getDetails().add(new InterfaceFieldArgumentDefaultValueModification(field.getName(), argument.getName(), oldDefaultValue, newDefaultValue)); + } } } else { assertTrue(fieldOrDirective.isOfType(SchemaGraph.DIRECTIVE)); Vertex directive = fieldOrDirective; + if (isDirectiveAdded(directive.getName())) { return; } if (isArgumentNewForExistingDirective(directive.getName(), argument.getName())) { return; } + String newType = getTypeFromEdgeLabel(editOperation.getTargetEdge()); EditOperation deletedTypeEdgeOperation = findDeletedEdge(argument, editOperations, mapping, this::isTypeEdge); String oldType = getTypeFromEdgeLabel(deletedTypeEdgeOperation.getSourceEdge()); DirectiveArgumentTypeModification directiveArgumentTypeModification = new DirectiveArgumentTypeModification(argument.getName(), oldType, newType); getDirectiveModification(directive.getName()).getDetails().add(directiveArgumentTypeModification); - } + String oldDefaultValue = getDefaultValueFromEdgeLabel(deletedTypeEdgeOperation.getSourceEdge()); + String newDefaultValue = getDefaultValueFromEdgeLabel(editOperation.getTargetEdge()); + if (!oldDefaultValue.equals(newDefaultValue)) { + getDirectiveModification(directive.getName()).getDetails().add(new DirectiveArgumentDefaultValueModification(argument.getName(), oldDefaultValue, newDefaultValue)); + } + } } - private void typeEdgeInsertedForField(EditOperation - editOperation, List editOperations, Mapping mapping) { + private void typeEdgeInsertedForField(EditOperation editOperation, + List editOperations, + Mapping mapping) { Vertex field = editOperation.getTargetEdge().getFrom(); Vertex objectOrInterface = newSchemaGraph.getFieldsContainerForField(field); if (objectOrInterface.isOfType(SchemaGraph.OBJECT)) { @@ -1087,13 +1163,13 @@ private EditOperation findDeletedEdge(Vertex targetVertexFrom, } - private void typeEdgeChanged(EditOperation editOperation) { + private void typeEdgeChanged(EditOperation editOperation, Mapping mapping) { Edge targetEdge = editOperation.getTargetEdge(); Vertex from = targetEdge.getFrom(); if (from.isOfType(SchemaGraph.FIELD)) { outputFieldTypeChanged(editOperation); } else if (from.isOfType(SchemaGraph.ARGUMENT)) { - argumentTypeOrDefaultValueChanged(editOperation); + argumentTypeOrDefaultValueChanged(editOperation, mapping); } else if (from.isOfType(SchemaGraph.INPUT_FIELD)) { inputFieldTypeOrDefaultValueChanged(editOperation); } @@ -1103,6 +1179,11 @@ private void inputFieldTypeOrDefaultValueChanged(EditOperation editOperation) { Edge targetEdge = editOperation.getTargetEdge(); Vertex inputField = targetEdge.getFrom(); Vertex inputObject = newSchemaGraph.getInputObjectForInputField(inputField); + + if (isInputObjectAdded(inputObject.getName())) { + return; + } + String oldDefaultValue = getDefaultValueFromEdgeLabel(editOperation.getSourceEdge()); String newDefaultValue = getDefaultValueFromEdgeLabel(editOperation.getTargetEdge()); if (!oldDefaultValue.equals(newDefaultValue)) { @@ -1117,9 +1198,14 @@ private void inputFieldTypeOrDefaultValueChanged(EditOperation editOperation) { } } - private void argumentTypeOrDefaultValueChanged(EditOperation editOperation) { - Edge targetEdge = editOperation.getTargetEdge(); - Vertex argument = targetEdge.getFrom(); + private void argumentTypeOrDefaultValueChanged(EditOperation editOperation, Mapping mapping) { + Vertex oldArgument = editOperation.getSourceEdge().getFrom(); + Vertex argument = editOperation.getTargetEdge().getFrom(); + + if (!doesArgumentChangeMakeSense(oldArgument, argument, mapping)) { + return; + } + Vertex fieldOrDirective = newSchemaGraph.getFieldOrDirectiveForArgument(argument); if (fieldOrDirective.isOfType(SchemaGraph.FIELD)) { Vertex field = fieldOrDirective; @@ -1158,7 +1244,6 @@ private void argumentTypeOrDefaultValueChanged(EditOperation editOperation) { getInterfaceModification(objectOrInterface.getName()).getDetails().add(interfaceFieldArgumentTypeModification); } } - } else { assertTrue(fieldOrDirective.isOfType(SchemaGraph.DIRECTIVE)); Vertex directive = fieldOrDirective; @@ -1168,16 +1253,29 @@ private void argumentTypeOrDefaultValueChanged(EditOperation editOperation) { if (!oldDefaultValue.equals(newDefaultValue)) { getDirectiveModification(directive.getName()).getDetails().add(new DirectiveArgumentDefaultValueModification(argument.getName(), oldDefaultValue, newDefaultValue)); } - String oldType = getTypeFromEdgeLabel(editOperation.getSourceEdge()); String newType = getTypeFromEdgeLabel(editOperation.getTargetEdge()); if (!oldType.equals(newType)) { getDirectiveModification(directive.getName()).getDetails().add(new DirectiveArgumentTypeModification(argument.getName(), oldType, newType)); - } } + } + /** + * Sometimes the diffing algorithm will give us an argument change when the argument container + * changed i.e. the argument was "moved" around because the deleted and newly added arguments + * look similar. + *

+ * We only want to report argument type changes if it makes sense i.e. if the argument container was the same. + */ + private boolean doesArgumentChangeMakeSense(Vertex oldArgument, Vertex newArgument, Mapping mapping) { + // Container for an argument in this case should be a field or directive + Vertex oldContainer = oldSchemaGraph.getFieldOrDirectiveForArgument(oldArgument); + Vertex newContainer = newSchemaGraph.getFieldOrDirectiveForArgument(newArgument); + + // Make sure the container is the same + return mapping.getTarget(oldContainer) == newContainer; } private void outputFieldTypeChanged(EditOperation editOperation) { @@ -1199,10 +1297,7 @@ private void outputFieldTypeChanged(EditOperation editOperation) { String oldType = getTypeFromEdgeLabel(editOperation.getSourceEdge()); String newType = getTypeFromEdgeLabel(editOperation.getTargetEdge()); interfaceModification.getDetails().add(new InterfaceFieldTypeModification(fieldName, oldType, newType)); - } - - } // TODO: this is not great, we should avoid parsing the label like that @@ -1690,7 +1785,13 @@ private void deletedDirective(EditOperation editOperation) { } private void argumentDeleted(EditOperation editOperation) { + // Note: sometimes the edit operation is the argument vertex itself being deleted + // Other times, it is the edge to the argument type being deleted Vertex deletedArgument = editOperation.getSourceVertex(); + if (deletedArgument == null) { + deletedArgument = editOperation.getSourceEdge().getTo(); + } + Vertex fieldOrDirective = oldSchemaGraph.getFieldOrDirectiveForArgument(deletedArgument); if (fieldOrDirective.isOfType(SchemaGraph.FIELD)) { Vertex field = fieldOrDirective; @@ -1700,6 +1801,12 @@ private void argumentDeleted(EditOperation editOperation) { if (isObjectDeleted(object.getName())) { return; } + if (isFieldDeletedFromExistingObject(object.getName(), field.getName())) { + return; + } + if (isArgumentDeletedFromExistingObjectField(object.getName(), field.getName(), deletedArgument.getName())) { + return; + } getObjectModification(object.getName()).getDetails().add(new ObjectFieldArgumentDeletion(field.getName(), deletedArgument.getName())); } else { assertTrue(fieldsContainerForField.isOfType(SchemaGraph.INTERFACE)); @@ -1707,6 +1814,12 @@ private void argumentDeleted(EditOperation editOperation) { if (isInterfaceDeleted(interfaze.getName())) { return; } + if (isFieldDeletedFromExistingInterface(interfaze.getName(), field.getName())) { + return; + } + if (isArgumentDeletedFromExistingInterfaceField(interfaze.getName(), field.getName(), deletedArgument.getName())) { + return; + } getInterfaceModification(interfaze.getName()).getDetails().add(new InterfaceFieldArgumentDeletion(field.getName(), deletedArgument.getName())); } } else { @@ -1715,14 +1828,21 @@ private void argumentDeleted(EditOperation editOperation) { if (isDirectiveDeleted(directive.getName())) { return; } + if (isArgumentDeletedFromExistingDirective(directive.getName(), deletedArgument.getName())) { + return; + } getDirectiveModification(directive.getName()).getDetails().add(new DirectiveArgumentDeletion(deletedArgument.getName())); } - } private void argumentAdded(EditOperation editOperation) { Vertex addedArgument = editOperation.getTargetVertex(); + if (addedArgument == null) { + addedArgument = editOperation.getTargetEdge().getTo(); + } + Vertex fieldOrDirective = newSchemaGraph.getFieldOrDirectiveForArgument(addedArgument); + if (fieldOrDirective.isOfType(SchemaGraph.FIELD)) { Vertex field = fieldOrDirective; Vertex fieldsContainerForField = newSchemaGraph.getFieldsContainerForField(field); @@ -1731,6 +1851,12 @@ private void argumentAdded(EditOperation editOperation) { if (isObjectAdded(object.getName())) { return; } + if (isFieldNewForExistingObject(object.getName(), field.getName())) { + return; + } + if (isArgumentNewForExistingObjectField(object.getName(), field.getName(), addedArgument.getName())) { + return; + } getObjectModification(object.getName()).getDetails().add(new ObjectFieldArgumentAddition(field.getName(), addedArgument.getName())); } else { assertTrue(fieldsContainerForField.isOfType(SchemaGraph.INTERFACE)); @@ -1738,6 +1864,12 @@ private void argumentAdded(EditOperation editOperation) { if (isInterfaceAdded(interfaze.getName())) { return; } + if (isFieldNewForExistingInterface(interfaze.getName(), field.getName())) { + return; + } + if (isArgumentNewForExistingInterfaceField(interfaze.getName(), field.getName(), addedArgument.getName())) { + return; + } getInterfaceModification(interfaze.getName()).getDetails().add(new InterfaceFieldArgumentAddition(field.getName(), addedArgument.getName())); } } else { @@ -1746,6 +1878,9 @@ private void argumentAdded(EditOperation editOperation) { if (isDirectiveAdded(directive.getName())) { return; } + if (isArgumentNewForExistingDirective(directive.getName(), addedArgument.getName())) { + return; + } getDirectiveModification(directive.getName()).getDetails().add(new DirectiveArgumentAddition(addedArgument.getName())); } } @@ -1754,6 +1889,11 @@ private void changedEnum(EditOperation editOperation) { String oldName = editOperation.getSourceVertex().getName(); String newName = editOperation.getTargetVertex().getName(); + if (oldName.equals(newName)) { + // Something else like description could have changed + return; + } + EnumModification modification = new EnumModification(oldName, newName); enumDifferences.put(oldName, modification); enumDifferences.put(newName, modification); @@ -1763,6 +1903,11 @@ private void changedScalar(EditOperation editOperation) { String oldName = editOperation.getSourceVertex().getName(); String newName = editOperation.getTargetVertex().getName(); + if (oldName.equals(newName)) { + // Something else like description could have changed + return; + } + ScalarModification modification = new ScalarModification(oldName, newName); scalarDifferences.put(oldName, modification); scalarDifferences.put(newName, modification); @@ -1772,6 +1917,11 @@ private void changedInputObject(EditOperation editOperation) { String oldName = editOperation.getSourceVertex().getName(); String newName = editOperation.getTargetVertex().getName(); + if (oldName.equals(newName)) { + // Something else like description could have changed + return; + } + InputObjectModification modification = new InputObjectModification(oldName, newName); inputObjectDifferences.put(oldName, modification); inputObjectDifferences.put(newName, modification); @@ -1781,6 +1931,11 @@ private void changedDirective(EditOperation editOperation) { String oldName = editOperation.getSourceVertex().getName(); String newName = editOperation.getTargetVertex().getName(); + if (oldName.equals(newName)) { + // Something else like description could have changed + return; + } + DirectiveModification modification = new DirectiveModification(oldName, newName); directiveDifferences.put(oldName, modification); directiveDifferences.put(newName, modification); @@ -1790,6 +1945,11 @@ private void changedObject(EditOperation editOperation) { String oldName = editOperation.getSourceVertex().getName(); String newName = editOperation.getTargetVertex().getName(); + if (oldName.equals(newName)) { + // Something else like description could have changed + return; + } + ObjectModification objectModification = new ObjectModification(oldName, newName); objectDifferences.put(oldName, objectModification); objectDifferences.put(newName, objectModification); @@ -1799,68 +1959,117 @@ private void changedInterface(EditOperation editOperation) { String oldName = editOperation.getSourceVertex().getName(); String newName = editOperation.getTargetVertex().getName(); + if (oldName.equals(newName)) { + // Something else like description could have changed + return; + } + InterfaceModification interfaceModification = new InterfaceModification(oldName, newName); interfaceDifferences.put(oldName, interfaceModification); interfaceDifferences.put(newName, interfaceModification); } private void changedUnion(EditOperation editOperation) { - String newUnionName = editOperation.getTargetVertex().getName(); - String oldUnionName = editOperation.getSourceVertex().getName(); - - UnionModification objectModification = new UnionModification(oldUnionName, newUnionName); - unionDifferences.put(oldUnionName, objectModification); - unionDifferences.put(newUnionName, objectModification); - } -// -// private void changedUnion(EditOperation editOperation) { -// // object changes include: adding/removing Interface, adding/removing applied directives, changing name -// String objectName = editOperation.getTargetVertex().getName(); -// -// ObjectAdded objectAdded = new ObjectAdded(objectName); -// changes.add(objectAdded); -// } -// -// private void changedEnum(EditOperation editOperation) { -// // object changes include: adding/removing Interface, adding/removing applied directives, changing name -// String objectName = editOperation.getTargetVertex().getName(); -// -// ObjectAdded objectAdded = new ObjectAdded(objectName); -// changes.add(objectAdded); -// } -// -// private void changedInputObject(EditOperation editOperation) { -// // object changes include: adding/removing Interface, adding/removing applied directives, changing name -// String objectName = editOperation.getTargetVertex().getName(); -// -// ObjectAdded objectAdded = new ObjectAdded(objectName); -// changes.add(objectAdded); -// } -// -// private void changedScalar(EditOperation editOperation) { -// // object changes include: adding/removing Interface, adding/removing applied directives, changing name -// String objectName = editOperation.getTargetVertex().getName(); -// -// ObjectAdded objectAdded = new ObjectAdded(objectName); -// changes.add(objectAdded); -// } -// -// private void changedField(EditOperation editOperation) { -// // object changes include: adding/removing Interface, adding/removing applied directives, changing name -// Vertex field = editOperation.getTargetVertex(); -// Vertex fieldsContainerForField = newSchemaGraph.getFieldsContainerForField(field); -// -// FieldModification objectAdded = new FieldModification(field.getName(), fieldsContainerForField.getName()); -// changes.add(objectAdded); -// } -// -// private void changedInputField(EditOperation editOperation) { -// // object changes include: adding/removing Interface, adding/removing applied directives, changing name -// String objectName = editOperation.getTargetVertex().getName(); -// -// ObjectAdded objectAdded = new ObjectAdded(objectName); -// changes.add(objectAdded); -// } + String newName = editOperation.getTargetVertex().getName(); + String oldName = editOperation.getSourceVertex().getName(); + if (oldName.equals(newName)) { + // Something else like description could have changed + return; + } + UnionModification objectModification = new UnionModification(oldName, newName); + unionDifferences.put(oldName, objectModification); + unionDifferences.put(newName, objectModification); + } + + /** + * The order to traverse edit operations according to the operation. + * + * @see #getTraversalOrder(List) + */ + private static final List OPERATION_TRAVERSAL_ORDER = List.of( + EditOperation.Operation.CHANGE_VERTEX, + EditOperation.Operation.INSERT_VERTEX, + EditOperation.Operation.DELETE_VERTEX, + EditOperation.Operation.CHANGE_EDGE, + EditOperation.Operation.INSERT_EDGE, + EditOperation.Operation.DELETE_EDGE + ); + + /** + * The order to traverse edit operations according to the vertex types involved. + * + * @see #getTraversalOrder(List) + */ + private static final List TYPE_TRAVERSAL_ORDER = List.of( + // These are all top level declarations + SchemaGraph.SCHEMA, + SchemaGraph.OBJECT, + SchemaGraph.INTERFACE, + SchemaGraph.UNION, + SchemaGraph.SCALAR, + SchemaGraph.ENUM, + SchemaGraph.INPUT_OBJECT, + SchemaGraph.DIRECTIVE, + // These are all direct descendants of top level declarations + SchemaGraph.FIELD, + SchemaGraph.INPUT_FIELD, + SchemaGraph.ENUM_VALUE, + // Everything else + SchemaGraph.ARGUMENT, + SchemaGraph.APPLIED_DIRECTIVE, + SchemaGraph.APPLIED_ARGUMENT, + SchemaGraph.ISOLATED + ); + + /** + * The input list of {@link EditOperation}s does not conform to any order. + *

+ * We need to sort it as we sometimes rely on the parents being processed first. + *

+ * e.g. we ignore a new argument if the parent of the argument is new. + * However, if the argument addition is processed before the + */ + @VisibleForTesting + static List getTraversalOrder(List editOperations) { + ArrayList sorted = new ArrayList<>(editOperations); + + sorted.sort( + Comparator + .comparingInt((editOperation) -> { + int i = OPERATION_TRAVERSAL_ORDER.indexOf(editOperation.getOperation()); + if (i < 0) { + return Assert.assertShouldNeverHappen("Unknown operation: " + editOperation.getOperation()); + } + return i; + }) + .thenComparing((editOperation) -> { + // Converts this editOperation into an index from the order + // The lower the index, the earlier it appears in the sorted list + for (int i = 0; i < TYPE_TRAVERSAL_ORDER.size(); i++) { + String type = TYPE_TRAVERSAL_ORDER.get(i); + + if (isAnyVertexOfType(editOperation, type)) { + return i; + } + } + + return Assert.assertShouldNeverHappen("Unable to determine edit operation subject for: " + editOperation); + }) + ); + + return sorted; + } + + private static boolean isAnyVertexOfType(EditOperation edit, String type) { + return (edit.getSourceVertex() != null && edit.getSourceVertex().isOfType(type)) + || (edit.getTargetVertex() != null && edit.getTargetVertex().isOfType(type)) + || (edit.getSourceEdge() != null && isAnyVertexOfType(edit.getSourceEdge(), type)) + || (edit.getTargetEdge() != null && isAnyVertexOfType(edit.getTargetEdge(), type)); + } + + private static boolean isAnyVertexOfType(Edge edge, String type) { + return edge.getFrom().isOfType(type) || edge.getTo().isOfType(type); + } } diff --git a/src/main/java/graphql/schema/diffing/dot/Dotfile.java b/src/main/java/graphql/schema/diffing/dot/Dotfile.java deleted file mode 100644 index 6e7e51dfc4..0000000000 --- a/src/main/java/graphql/schema/diffing/dot/Dotfile.java +++ /dev/null @@ -1,124 +0,0 @@ -package graphql.schema.diffing.dot; - -import java.util.ArrayList; -import java.util.List; - - -public class Dotfile { - - public static class Node { - public Node(String id, String label, String color) { - this.id = id; - this.label = label; - this.color = color; - } - - String id; - String label; - String color; - } - - public static class Edge { - public Edge(String from, String to, String label) { - this.from = from; - this.to = to; - this.label = label; - } - - String from; - String to; - String label; - } - - public static class SubGraph { - - String id; - String label; - List edges = new ArrayList<>(); - List nodes = new ArrayList<>(); - - public SubGraph(String id, String label) { - this.id = id; - this.label = label; - } - - public String getId() { - return id; - } - - public void addEdge(Edge e) { - edges.add(e); - } - - public void addNode(Node node) { - nodes.add(node); - } - - } - - - private List nodes = new ArrayList<>(); - private List edges = new ArrayList<>(); -// private List subGraphs = new ArrayList<>(); - - - public void addNode(Node node) { - nodes.add(node); - } - - public void addNode(String id, String label, String color) { - nodes.add(new Node(id, label, color)); - } - - public void addEdge(String from, String to, String label) { - edges.add(new Edge(from, to, label)); - } - - public void addEdge(Edge e) { - edges.add(e); - } - -// public void addSubgraph(SubGraph subGraph) { -// subGraphs.add(subGraph); -// } - - public String getId() { - return ""; - } - - public String print() { - StringBuilder result = new StringBuilder(); - result.append("graph G {\n"); - for (Node node : nodes) { - result.append(node.id).append("[label=\"").append(node.label).append("\" color=").append(node.color).append(" style=filled").append("];\n"); - } - for (Edge edge : edges) { - result.append(edge.from).append(" -- ").append(edge.to).append("[label=\"").append(edge.label).append("\"];\n"); - } -// for (SubGraph subGraph : subGraphs) { -// result.append("subgraph cluster_").append(subGraph.id).append("{\n").append("label=\"").append(subGraph.label).append("\";\n"); -// for (Node node : subGraph.nodes) { -// result.append(node.id).append("[label=\"").append(node.label).append("\" color=").append(node.color).append(" style=filled").append("];\n"); -// } -// for (Edge edge : subGraph.edges) { -// result.append(edge.from).append(" -- ").append(edge.to).append("[label=\"").append(edge.label).append("\"];\n"); -// } -// result.append("}"); -// -// } -// result.append(explanation()); - result.append("}"); - return result.toString(); - } - - String explanation() { - return "subgraph cluster_explanation {\n" + - "label=\"Explanation\";\n" + - "concept [color=green style=filled label=\"Concept\"];\n" + - "orgEntity [color=lightblue style=filled label=\"Org Entity\"];\n" + - "product [color=red style=filled label=\"Product\"];\n" + - "concept -> product [style=invis];\n" + - "orgEntity -> concept [style=invis];\n" + - "}"; - } -} diff --git a/src/main/java/graphql/schema/fetching/LambdaFetchingSupport.java b/src/main/java/graphql/schema/fetching/LambdaFetchingSupport.java index c3f4fd486f..51ced4aba2 100644 --- a/src/main/java/graphql/schema/fetching/LambdaFetchingSupport.java +++ b/src/main/java/graphql/schema/fetching/LambdaFetchingSupport.java @@ -204,14 +204,16 @@ static Function mkCallFunction(Class targetClass, String targ return getterFunction; } - private static MethodHandles.Lookup getLookup(Class targetClass) throws IllegalAccessException { + private static MethodHandles.Lookup getLookup(Class targetClass) { MethodHandles.Lookup lookupMe = MethodHandles.lookup(); // - // This is a Java 9 approach to method look up allowing private access - // which we don't want to use yet until we get to Java 11 + // This is a Java 9+ approach to method look up allowing private access // - //lookupMe = MethodHandles.privateLookupIn(targetClass, lookupMe); - return lookupMe; + try { + return MethodHandles.privateLookupIn(targetClass, lookupMe); + } catch (IllegalAccessException e) { + return lookupMe; + } } } diff --git a/src/main/java/graphql/schema/idl/SchemaDirectiveWiringEnvironmentImpl.java b/src/main/java/graphql/schema/idl/SchemaDirectiveWiringEnvironmentImpl.java index 634c98c8f5..94353f8956 100644 --- a/src/main/java/graphql/schema/idl/SchemaDirectiveWiringEnvironmentImpl.java +++ b/src/main/java/graphql/schema/idl/SchemaDirectiveWiringEnvironmentImpl.java @@ -130,7 +130,7 @@ public GraphQLFieldDefinition getFieldDefinition() { public DataFetcher getFieldDataFetcher() { assertNotNull(fieldDefinition, () -> "An output field must be in context to call this method"); assertNotNull(fieldsContainer, () -> "An output field container must be in context to call this method"); - return codeRegistry.getDataFetcher(fieldsContainer, fieldDefinition); + return codeRegistry.getDataFetcher(FieldCoordinates.coordinates(fieldsContainer, fieldDefinition), fieldDefinition); } @Override diff --git a/src/main/java/graphql/schema/idl/SchemaGenerator.java b/src/main/java/graphql/schema/idl/SchemaGenerator.java index bb74a7a75a..819f820734 100644 --- a/src/main/java/graphql/schema/idl/SchemaGenerator.java +++ b/src/main/java/graphql/schema/idl/SchemaGenerator.java @@ -18,6 +18,33 @@ /** * This can generate a working runtime schema from a type registry and runtime wiring + *

+ * The generator uses the {@link RuntimeWiring} to insert code that runs behind the schema + * elements such as {@link graphql.schema.DataFetcher}s, {@link graphql.schema.TypeResolver}s + * and scalar {@link graphql.schema.Coercing}. + *

+ * The order of {@link graphql.schema.DataFetcher} resolution is as follows: + *

    + *
  1. If the {@link WiringFactory} provides the {@link graphql.schema.DataFetcherFactory} for a field in its parent type then that is used
  2. + *
  3. If the {@link WiringFactory} provides the {@link graphql.schema.DataFetcher} for a field in its parent type then that is used
  4. + *
  5. If the {@link RuntimeWiring} provides the {@link graphql.schema.DataFetcher} for a field in its parent type, then that is used
  6. + *
  7. If the {@link RuntimeWiring} provides a default {@link graphql.schema.DataFetcher} for a fields parent type, then that is used
  8. + *
  9. If the {@link WiringFactory} provides a default {@link graphql.schema.DataFetcherFactory} for any element then that is used
  10. + *
  11. If the {@link GraphQLCodeRegistry.Builder#getDefaultDataFetcherFactory()} provides a {@link graphql.schema.DataFetcherFactory} for a value then that is used
  12. + *
  13. Finally a {@link graphql.schema.PropertyDataFetcher} is used as a last resort for the field
  14. + *
+ *

+ * The order of {@link graphql.schema.TypeResolver} resolution is as follows: + *

    + *
  1. If the {@link WiringFactory} provides a {@link graphql.schema.TypeResolver} then that is used
  2. + *
  3. If the {@link TypeRuntimeWiring} provides a {@link graphql.schema.TypeResolver} then that is used
  4. + *
+ *

+ * The order of {@link graphql.schema.GraphQLScalarType} resolution is as follows: + *

    + *
  1. If the {@link WiringFactory} provides a {@link graphql.schema.GraphQLScalarType} then that is used
  2. + *
  3. Otherwise {@link RuntimeWiring#getScalars()} is used
  4. + *
*/ @PublicApi public class SchemaGenerator { diff --git a/src/main/java/graphql/schema/visitor/GraphQLSchemaTraversalControl.java b/src/main/java/graphql/schema/visitor/GraphQLSchemaTraversalControl.java new file mode 100644 index 0000000000..7020f088cc --- /dev/null +++ b/src/main/java/graphql/schema/visitor/GraphQLSchemaTraversalControl.java @@ -0,0 +1,81 @@ +package graphql.schema.visitor; + +import graphql.PublicApi; +import graphql.schema.GraphQLSchemaElement; +import graphql.util.TraversalControl; +import graphql.util.TraverserContext; +import graphql.util.TreeTransformerUtil; + +/** + * This indicates what traversal control to apply during the visitation + * and can be created via calls to methods like {@link GraphQLSchemaVisitorEnvironment#ok()} + * or {@link GraphQLSchemaVisitorEnvironment#changeNode(GraphQLSchemaElement)} say + */ +@PublicApi +public class GraphQLSchemaTraversalControl { + private final GraphQLSchemaElement element; + private final Control control; + + enum Control { + CONTINUE(TraversalControl.CONTINUE), + QUIT(TraversalControl.QUIT), + CHANGE(TraversalControl.CONTINUE), + DELETE(TraversalControl.CONTINUE), + INSERT_BEFORE(TraversalControl.CONTINUE), + INSERT_AFTER(TraversalControl.CONTINUE); + + private final TraversalControl traversalControl; + + Control(TraversalControl traversalControl) { + this.traversalControl = traversalControl; + } + + public TraversalControl toTraversalControl() { + return traversalControl; + } + } + + static final GraphQLSchemaTraversalControl CONTINUE = new GraphQLSchemaTraversalControl(Control.CONTINUE, null); + static final GraphQLSchemaTraversalControl QUIT = new GraphQLSchemaTraversalControl(Control.QUIT, null); + static final GraphQLSchemaTraversalControl DELETE = new GraphQLSchemaTraversalControl(Control.DELETE, null); + + GraphQLSchemaTraversalControl(Control control, GraphQLSchemaElement element) { + this.element = element; + this.control = control; + } + + GraphQLSchemaElement getElement() { + return element; + } + + Control getControl() { + return control; + } + + boolean isAbortive() { + return control == Control.QUIT; + } + + boolean isMutative() { + return control == Control.DELETE || control == Control.CHANGE || control == Control.INSERT_AFTER || control == Control.INSERT_BEFORE; + } + + TraversalControl toTraversalControl(TraverserContext context) { + if (control == Control.CONTINUE || control == Control.QUIT) { + return control.toTraversalControl(); + } + if (control == Control.DELETE) { + TreeTransformerUtil.deleteNode(context); + } + if (control == Control.CHANGE) { + TreeTransformerUtil.changeNode(context, element); + } + if (control == Control.INSERT_AFTER) { + TreeTransformerUtil.insertAfter(context, element); + } + if (control == Control.INSERT_BEFORE) { + TreeTransformerUtil.insertAfter(context, element); + } + return TraversalControl.CONTINUE; + } +} diff --git a/src/main/java/graphql/schema/visitor/GraphQLSchemaVisitor.java b/src/main/java/graphql/schema/visitor/GraphQLSchemaVisitor.java new file mode 100644 index 0000000000..ff632a70ba --- /dev/null +++ b/src/main/java/graphql/schema/visitor/GraphQLSchemaVisitor.java @@ -0,0 +1,329 @@ +package graphql.schema.visitor; + +import graphql.PublicSpi; +import graphql.schema.GraphQLAppliedDirective; +import graphql.schema.GraphQLAppliedDirectiveArgument; +import graphql.schema.GraphQLArgument; +import graphql.schema.GraphQLDirective; +import graphql.schema.GraphQLDirectiveContainer; +import graphql.schema.GraphQLEnumType; +import graphql.schema.GraphQLEnumValueDefinition; +import graphql.schema.GraphQLFieldDefinition; +import graphql.schema.GraphQLFieldsContainer; +import graphql.schema.GraphQLInputObjectField; +import graphql.schema.GraphQLInputObjectType; +import graphql.schema.GraphQLInputType; +import graphql.schema.GraphQLInterfaceType; +import graphql.schema.GraphQLNamedInputType; +import graphql.schema.GraphQLNamedOutputType; +import graphql.schema.GraphQLNamedSchemaElement; +import graphql.schema.GraphQLObjectType; +import graphql.schema.GraphQLOutputType; +import graphql.schema.GraphQLScalarType; +import graphql.schema.GraphQLSchemaElement; +import graphql.schema.GraphQLTypeVisitor; +import graphql.schema.GraphQLUnionType; +import graphql.util.TraversalControl; + +/** + * This visitor interface offers more "smarts" above {@link GraphQLTypeVisitor} and aims to be easier to use + * with more type safe helpers. + *

+ * You would use it places that need a {@link GraphQLTypeVisitor} by doing `new GraphQLSchemaVisitor() { ...}.toTypeVisitor()` + */ +@PublicSpi +public interface GraphQLSchemaVisitor { + + /** + * A {@link GraphQLSchemaVisitorEnvironment} environment specific to {@link GraphQLAppliedDirective} + */ + interface AppliedDirectiveVisitorEnvironment extends GraphQLSchemaVisitorEnvironment { + GraphQLDirectiveContainer getContainer(); + } + + /** + * Called when visiting a GraphQLAppliedDirective in the schema + * + * @param appliedDirective the schema element being visited + * @param environment the visiting environment + * + * @return a control value which is typically {@link GraphQLSchemaVisitorEnvironment#ok()}} + */ + default GraphQLSchemaTraversalControl visitAppliedDirective(GraphQLAppliedDirective appliedDirective, AppliedDirectiveVisitorEnvironment environment) { + return environment.ok(); + } + + /** + * A {@link GraphQLSchemaVisitorEnvironment} environment specific to {@link GraphQLAppliedDirectiveArgument} + */ + interface AppliedDirectiveArgumentVisitorEnvironment extends GraphQLSchemaVisitorEnvironment { + + GraphQLAppliedDirective getContainer(); + + /** + * @return this elements type that has been unwrapped of {@link graphql.schema.GraphQLNonNull} and {@link graphql.schema.GraphQLList} + */ + GraphQLNamedInputType getUnwrappedType(); + } + + /** + * Called when visiting a {@link GraphQLAppliedDirectiveArgument} in the schema + * + * @param appliedDirectiveArgument the schema element being visited + * @param environment the visiting environment + * + * @return a control value which is typically {@link GraphQLSchemaVisitorEnvironment#ok()}} + */ + default GraphQLSchemaTraversalControl visitAppliedDirectiveArgument(GraphQLAppliedDirectiveArgument appliedDirectiveArgument, AppliedDirectiveArgumentVisitorEnvironment environment) { + return environment.ok(); + } + + /** + * A {@link GraphQLSchemaVisitorEnvironment} environment specific to {@link GraphQLArgument} + */ + interface ArgumentVisitorEnvironment extends GraphQLSchemaVisitorEnvironment { + /** + * @return either a {@link GraphQLFieldDefinition} or a {@link graphql.schema.GraphQLDirective} + */ + GraphQLNamedSchemaElement getContainer(); + + /** + * @return this elements type that has been unwrapped of {@link graphql.schema.GraphQLNonNull} and {@link graphql.schema.GraphQLList} + */ + GraphQLNamedInputType getUnwrappedType(); + } + + /** + * Called when visiting a {@link GraphQLArgument} in the schema + * + * @param argument the schema element being visited + * @param environment the visiting environment + * + * @return a control value which is typically {@link GraphQLSchemaVisitorEnvironment#ok()}} + */ + default GraphQLSchemaTraversalControl visitArgument(GraphQLArgument argument, ArgumentVisitorEnvironment environment) { + return environment.ok(); + } + + interface DirectiveVisitorEnvironment extends GraphQLSchemaVisitorEnvironment { + } + + /** + * Called when visiting a {@link GraphQLArgument} in the schema + * + * @param directive the schema element being visited + * @param environment the visiting environment + * + * @return a control value which is typically {@link GraphQLSchemaVisitorEnvironment#ok()}} + */ + default GraphQLSchemaTraversalControl visitDirective(GraphQLDirective directive, DirectiveVisitorEnvironment environment) { + return environment.ok(); + } + /** + * A {@link GraphQLSchemaVisitorEnvironment} environment specific to {@link GraphQLEnumType} + */ + interface EnumTypeVisitorEnvironment extends GraphQLSchemaVisitorEnvironment { + } + + /** + * Called when visiting a {@link GraphQLEnumType} in the schema + * + * @param enumType the schema element being visited + * @param environment the visiting environment + * + * @return a control value which is typically {@link GraphQLSchemaVisitorEnvironment#ok()}} + */ + default GraphQLSchemaTraversalControl visitEnumType(GraphQLEnumType enumType, EnumTypeVisitorEnvironment environment) { + return environment.ok(); + } + + /** + * A {@link GraphQLSchemaVisitorEnvironment} environment specific to {@link GraphQLEnumValueDefinition} + */ + interface EnumValueDefinitionVisitorEnvironment extends GraphQLSchemaVisitorEnvironment { + GraphQLEnumType getContainer(); + } + + /** + * Called when visiting a {@link GraphQLEnumValueDefinition} in the schema + * + * @param enumValueDefinition the schema element being visited + * @param environment the visiting environment + * + * @return a control value which is typically {@link GraphQLSchemaVisitorEnvironment#ok()}} + */ + default GraphQLSchemaTraversalControl visitEnumValueDefinition(GraphQLEnumValueDefinition enumValueDefinition, EnumValueDefinitionVisitorEnvironment environment) { + return environment.ok(); + } + + + /** + * A {@link GraphQLSchemaVisitorEnvironment} environment specific to {@link GraphQLFieldDefinition} + */ + interface FieldDefinitionVisitorEnvironment extends GraphQLSchemaVisitorEnvironment { + GraphQLFieldsContainer getContainer(); + + /** + * @return this elements type that has been unwrapped of {@link graphql.schema.GraphQLNonNull} and {@link graphql.schema.GraphQLList} + */ + GraphQLNamedOutputType getUnwrappedType(); + } + + /** + * Called when visiting a {@link GraphQLFieldDefinition} in the schema + * + * @param fieldDefinition the schema element being visited + * @param environment the visiting environment + * + * @return a control value which is typically {@link GraphQLSchemaVisitorEnvironment#ok()}} + */ + default GraphQLSchemaTraversalControl visitFieldDefinition(GraphQLFieldDefinition fieldDefinition, FieldDefinitionVisitorEnvironment environment) { + return environment.ok(); + } + + /** + * A {@link GraphQLSchemaVisitorEnvironment} environment specific to {@link GraphQLInputObjectField} + */ + interface InputObjectFieldVisitorEnvironment extends GraphQLSchemaVisitorEnvironment { + GraphQLInputObjectType getContainer(); + + /** + * @return this elements type that has been unwrapped of {@link graphql.schema.GraphQLNonNull} and {@link graphql.schema.GraphQLList} + */ + GraphQLNamedInputType getUnwrappedType(); + } + + /** + * Called when visiting a {@link GraphQLInputObjectField} in the schema + * + * @param inputObjectField the schema element being visited + * @param environment the visiting environment + * + * @return a control value which is typically {@link GraphQLSchemaVisitorEnvironment#ok()}} + */ + default GraphQLSchemaTraversalControl visitInputObjectField(GraphQLInputObjectField inputObjectField, InputObjectFieldVisitorEnvironment environment) { + return environment.ok(); + } + + /** + * A {@link GraphQLSchemaVisitorEnvironment} environment specific to {@link GraphQLInputObjectType} + */ + interface InputObjectTypeVisitorEnvironment extends GraphQLSchemaVisitorEnvironment { + } + + /** + * Called when visiting a {@link GraphQLInputObjectType} in the schema + * + * @param inputObjectType the schema element being visited + * @param environment the visiting environment + * + * @return a control value which is typically {@link GraphQLSchemaVisitorEnvironment#ok()}} + */ + default GraphQLSchemaTraversalControl visitInputObjectType(GraphQLInputObjectType inputObjectType, InputObjectTypeVisitorEnvironment environment) { + return environment.ok(); + } + + /** + * A {@link GraphQLSchemaVisitorEnvironment} environment specific to {@link GraphQLInterfaceType} + */ + interface InterfaceTypeVisitorEnvironment extends GraphQLSchemaVisitorEnvironment { + } + + /** + * Called when visiting a {@link GraphQLInterfaceType} in the schema + * + * @param interfaceType the schema element being visited + * @param environment the visiting environment + * + * @return a control value which is typically {@link GraphQLSchemaVisitorEnvironment#ok()}} + */ + default GraphQLSchemaTraversalControl visitInterfaceType(GraphQLInterfaceType interfaceType, InterfaceTypeVisitorEnvironment environment) { + return environment.ok(); + } + + + /** + * A {@link GraphQLSchemaVisitorEnvironment} environment specific to {@link GraphQLObjectType} + */ + interface ObjectVisitorEnvironment extends GraphQLSchemaVisitorEnvironment { + } + + /** + * Called when visiting a {@link GraphQLObjectType} in the schema + * + * @param objectType the schema element being visited + * @param environment the visiting environment + * + * @return a control value which is typically {@link GraphQLSchemaVisitorEnvironment#ok()}} + */ + default GraphQLSchemaTraversalControl visitObjectType(GraphQLObjectType objectType, ObjectVisitorEnvironment environment) { + return environment.ok(); + } + + /** + * A {@link GraphQLSchemaVisitorEnvironment} environment specific to {@link GraphQLScalarType} + */ + interface ScalarTypeVisitorEnvironment extends GraphQLSchemaVisitorEnvironment { + } + + /** + * Called when visiting a {@link GraphQLScalarType} in the schema + * + * @param scalarType the schema element being visited + * @param environment the visiting environment + * + * @return a control value which is typically {@link GraphQLSchemaVisitorEnvironment#ok()}} + */ + default GraphQLSchemaTraversalControl visitScalarType(GraphQLScalarType scalarType, ScalarTypeVisitorEnvironment environment) { + return environment.ok(); + } + + /** + * A {@link GraphQLSchemaVisitorEnvironment} environment specific to {@link GraphQLUnionType} + */ + interface UnionTypeVisitorEnvironment extends GraphQLSchemaVisitorEnvironment { + } + + /** + * Called when visiting a {@link GraphQLUnionType} in the schema + * + * @param unionType the schema element being visited + * @param environment the visiting environment + * + * @return a control value which is typically {@link GraphQLSchemaVisitorEnvironment#ok()}} + */ + default GraphQLSchemaTraversalControl visitUnionType(GraphQLUnionType unionType, UnionTypeVisitorEnvironment environment) { + return environment.ok(); + } + + + /** + * A {@link GraphQLSchemaVisitorEnvironment} environment specific to {@link GraphQLSchemaElement} + */ + interface SchemaElementVisitorEnvironment extends GraphQLSchemaVisitorEnvironment { + } + + /** + * Called when visiting any {@link GraphQLSchemaElement} in the schema. Since every element in the schema + * is a schema element, this visitor method will be called back for every element in the schema + * + * @param schemaElement the schema element being visited + * @param environment the visiting environment + * + * @return a control value which is typically {@link GraphQLSchemaVisitorEnvironment#ok()}} + */ + default GraphQLSchemaTraversalControl visitSchemaElement(GraphQLSchemaElement schemaElement, SchemaElementVisitorEnvironment environment) { + return environment.ok(); + } + + /** + * This allows you to turn this smarter visitor into the base {@link graphql.schema.GraphQLTypeVisitor} interface + * + * @return a type visitor + */ + default GraphQLTypeVisitor toTypeVisitor() { + return new GraphQLSchemaVisitorAdapter(this); + } + + +} diff --git a/src/main/java/graphql/schema/visitor/GraphQLSchemaVisitorAdapter.java b/src/main/java/graphql/schema/visitor/GraphQLSchemaVisitorAdapter.java new file mode 100644 index 0000000000..7b42b4f01a --- /dev/null +++ b/src/main/java/graphql/schema/visitor/GraphQLSchemaVisitorAdapter.java @@ -0,0 +1,265 @@ +package graphql.schema.visitor; + +import graphql.Internal; +import graphql.schema.GraphQLAppliedDirective; +import graphql.schema.GraphQLAppliedDirectiveArgument; +import graphql.schema.GraphQLArgument; +import graphql.schema.GraphQLDirective; +import graphql.schema.GraphQLDirectiveContainer; +import graphql.schema.GraphQLEnumType; +import graphql.schema.GraphQLEnumValueDefinition; +import graphql.schema.GraphQLFieldDefinition; +import graphql.schema.GraphQLFieldsContainer; +import graphql.schema.GraphQLInputObjectField; +import graphql.schema.GraphQLInputObjectType; +import graphql.schema.GraphQLInterfaceType; +import graphql.schema.GraphQLNamedInputType; +import graphql.schema.GraphQLNamedOutputType; +import graphql.schema.GraphQLNamedSchemaElement; +import graphql.schema.GraphQLObjectType; +import graphql.schema.GraphQLScalarType; +import graphql.schema.GraphQLSchemaElement; +import graphql.schema.GraphQLTypeUtil; +import graphql.schema.GraphQLTypeVisitorStub; +import graphql.schema.GraphQLUnionType; +import graphql.util.TraversalControl; +import graphql.util.TraverserContext; + +import java.util.function.Supplier; + +import static graphql.schema.visitor.GraphQLSchemaVisitor.FieldDefinitionVisitorEnvironment; +import static graphql.schema.visitor.GraphQLSchemaVisitor.ObjectVisitorEnvironment; + +@Internal +class GraphQLSchemaVisitorAdapter extends GraphQLTypeVisitorStub { + + private final GraphQLSchemaVisitor schemaVisitor; + + GraphQLSchemaVisitorAdapter(GraphQLSchemaVisitor schemaVisitor) { + this.schemaVisitor = schemaVisitor; + } + + static class SchemaElementEnv extends GraphQLSchemaVisitorEnvironmentImpl implements GraphQLSchemaVisitor.SchemaElementVisitorEnvironment { + public SchemaElementEnv(TraverserContext context) { + super(context); + } + } + + private TraversalControl visitE(TraverserContext context, Supplier visitCall) { + + GraphQLSchemaTraversalControl generalCall = schemaVisitor.visitSchemaElement(context.thisNode(), new SchemaElementEnv(context)); + // if they have changed anything in the general schema element visitation then we don't call the specific visit method + if (generalCall.isAbortive() || generalCall.isMutative()) { + return generalCall.toTraversalControl(context); + } + GraphQLSchemaTraversalControl specificCall = visitCall.get(); + return specificCall.toTraversalControl(context); + } + + static class AppliedDirectiveArgumentEnv extends GraphQLSchemaVisitorEnvironmentImpl implements GraphQLSchemaVisitor.AppliedDirectiveArgumentVisitorEnvironment { + public AppliedDirectiveArgumentEnv(TraverserContext context) { + super(context); + } + + @Override + public GraphQLAppliedDirective getContainer() { + return (GraphQLAppliedDirective) context.getParentNode(); + } + + @Override + public GraphQLNamedInputType getUnwrappedType() { + return GraphQLTypeUtil.unwrapAllAs(getElement().getType()); + } + } + + @Override + public TraversalControl visitGraphQLAppliedDirectiveArgument(GraphQLAppliedDirectiveArgument node, TraverserContext context) { + return visitE(context, () -> schemaVisitor.visitAppliedDirectiveArgument(node, new AppliedDirectiveArgumentEnv(context))); + } + + static class AppliedDirectiveEnv extends GraphQLSchemaVisitorEnvironmentImpl implements GraphQLSchemaVisitor.AppliedDirectiveVisitorEnvironment { + public AppliedDirectiveEnv(TraverserContext context) { + super(context); + } + + @Override + public GraphQLDirectiveContainer getContainer() { + return (GraphQLDirectiveContainer) context.getParentNode(); + } + } + + @Override + public TraversalControl visitGraphQLAppliedDirective(GraphQLAppliedDirective node, TraverserContext context) { + return visitE(context, () -> schemaVisitor.visitAppliedDirective(node, new AppliedDirectiveEnv(context))); + } + + static class ArgumentEnv extends GraphQLSchemaVisitorEnvironmentImpl implements GraphQLSchemaVisitor.ArgumentVisitorEnvironment { + public ArgumentEnv(TraverserContext context) { + super(context); + } + + @Override + public GraphQLNamedSchemaElement getContainer() { + return (GraphQLNamedSchemaElement) context.getParentNode(); + } + + @Override + public GraphQLNamedInputType getUnwrappedType() { + return GraphQLTypeUtil.unwrapAllAs(getElement().getType()); + } + } + + @Override + public TraversalControl visitGraphQLArgument(GraphQLArgument node, TraverserContext context) { + return visitE(context, () -> schemaVisitor.visitArgument(node, new ArgumentEnv(context))); + } + + + static class DirectiveEnv extends GraphQLSchemaVisitorEnvironmentImpl implements GraphQLSchemaVisitor.DirectiveVisitorEnvironment { + public DirectiveEnv(TraverserContext context) { + super(context); + } + } + + @Override + public TraversalControl visitGraphQLDirective(GraphQLDirective node, TraverserContext context) { + // + // we only want to visit directive definitions at the schema level + // this is our chance to fix up the applied directive problem + // of one class used in two contexts. + // + if (context.getParentNode() == null) { + return visitE(context, () -> schemaVisitor.visitDirective(node, new DirectiveEnv(context))); + + } + return TraversalControl.CONTINUE; + } + + static class EnumTypeEnv extends GraphQLSchemaVisitorEnvironmentImpl implements GraphQLSchemaVisitor.EnumTypeVisitorEnvironment { + public EnumTypeEnv(TraverserContext context) { + super(context); + } + } + + @Override + public TraversalControl visitGraphQLEnumType(GraphQLEnumType node, TraverserContext context) { + return visitE(context, () -> schemaVisitor.visitEnumType(node, new EnumTypeEnv(context))); + } + + static class EnumValueDefinitionEnv extends GraphQLSchemaVisitorEnvironmentImpl implements GraphQLSchemaVisitor.EnumValueDefinitionVisitorEnvironment { + public EnumValueDefinitionEnv(TraverserContext context) { + super(context); + } + + @Override + public GraphQLEnumType getContainer() { + return (GraphQLEnumType) context.getParentNode(); + } + } + + @Override + public TraversalControl visitGraphQLEnumValueDefinition(GraphQLEnumValueDefinition node, TraverserContext context) { + return visitE(context, () -> schemaVisitor.visitEnumValueDefinition(node, new EnumValueDefinitionEnv(context))); + } + + static class FieldDefinitionEnv extends GraphQLSchemaVisitorEnvironmentImpl implements FieldDefinitionVisitorEnvironment { + + public FieldDefinitionEnv(TraverserContext context) { + super(context); + } + + @Override + public GraphQLFieldsContainer getContainer() { + return (GraphQLFieldsContainer) context.getParentNode(); + } + + @Override + public GraphQLNamedOutputType getUnwrappedType() { + return GraphQLTypeUtil.unwrapAllAs(getElement().getType()); + } + } + + @Override + public TraversalControl visitGraphQLFieldDefinition(GraphQLFieldDefinition node, TraverserContext context) { + return visitE(context, () -> schemaVisitor.visitFieldDefinition(node, new FieldDefinitionEnv(context))); + } + + static class InputObjectFieldEnv extends GraphQLSchemaVisitorEnvironmentImpl implements GraphQLSchemaVisitor.InputObjectFieldVisitorEnvironment { + public InputObjectFieldEnv(TraverserContext context) { + super(context); + } + + @Override + public GraphQLInputObjectType getContainer() { + return (GraphQLInputObjectType) context.getParentNode(); + } + + @Override + public GraphQLNamedInputType getUnwrappedType() { + return GraphQLTypeUtil.unwrapAllAs(getElement().getType()); + } + } + + @Override + public TraversalControl visitGraphQLInputObjectField(GraphQLInputObjectField node, TraverserContext context) { + return visitE(context, () -> schemaVisitor.visitInputObjectField(node, new InputObjectFieldEnv(context))); + } + + static class InputObjectTypeEnv extends GraphQLSchemaVisitorEnvironmentImpl implements GraphQLSchemaVisitor.InputObjectTypeVisitorEnvironment { + public InputObjectTypeEnv(TraverserContext context) { + super(context); + } + } + + @Override + public TraversalControl visitGraphQLInputObjectType(GraphQLInputObjectType node, TraverserContext context) { + return visitE(context, () -> schemaVisitor.visitInputObjectType(node, new InputObjectTypeEnv(context))); + } + + + static class InterfaceTypeEnv extends GraphQLSchemaVisitorEnvironmentImpl implements GraphQLSchemaVisitor.InterfaceTypeVisitorEnvironment { + public InterfaceTypeEnv(TraverserContext context) { + super(context); + } + } + + @Override + public TraversalControl visitGraphQLInterfaceType(GraphQLInterfaceType node, TraverserContext context) { + return visitE(context, () -> schemaVisitor.visitInterfaceType(node, new InterfaceTypeEnv(context))); + } + + static class ObjectEnv extends GraphQLSchemaVisitorEnvironmentImpl implements ObjectVisitorEnvironment { + public ObjectEnv(TraverserContext context) { + super(context); + } + + } + + @Override + public TraversalControl visitGraphQLObjectType(GraphQLObjectType node, TraverserContext context) { + return visitE(context, () -> schemaVisitor.visitObjectType(node, new ObjectEnv(context))); + } + + + static class ScalarTypeEnv extends GraphQLSchemaVisitorEnvironmentImpl implements GraphQLSchemaVisitor.ScalarTypeVisitorEnvironment { + public ScalarTypeEnv(TraverserContext context) { + super(context); + } + } + + @Override + public TraversalControl visitGraphQLScalarType(GraphQLScalarType node, TraverserContext context) { + return visitE(context, () -> schemaVisitor.visitScalarType(node, new ScalarTypeEnv(context))); + } + + static class UnionTypeEnv extends GraphQLSchemaVisitorEnvironmentImpl implements GraphQLSchemaVisitor.UnionTypeVisitorEnvironment { + public UnionTypeEnv(TraverserContext context) { + super(context); + } + } + + @Override + public TraversalControl visitGraphQLUnionType(GraphQLUnionType node, TraverserContext context) { + return visitE(context, () -> schemaVisitor.visitUnionType(node, new UnionTypeEnv(context))); + } +} diff --git a/src/main/java/graphql/schema/visitor/GraphQLSchemaVisitorEnvironment.java b/src/main/java/graphql/schema/visitor/GraphQLSchemaVisitorEnvironment.java new file mode 100644 index 0000000000..99cc4988e8 --- /dev/null +++ b/src/main/java/graphql/schema/visitor/GraphQLSchemaVisitorEnvironment.java @@ -0,0 +1,89 @@ +package graphql.schema.visitor; + +import graphql.schema.GraphQLCodeRegistry; +import graphql.schema.GraphQLSchema; +import graphql.schema.GraphQLSchemaElement; + +import java.util.List; + +public interface GraphQLSchemaVisitorEnvironment { + + /** + * @return the element that is being visited + */ + T getElement(); + + /** + * This returns the schema element that led to this element, eg a field is contained + * in a type which is pointed to be another field say. + * + * @return a list of schema elements leading to this current element + */ + List getLeadingElements(); + + /** + * This returns the schema element that led to this element but with {@link graphql.schema.GraphQLModifiedType} wrappers + * removed. + * + * @return a list of schema elements leading to this current element + */ + List getUnwrappedLeadingElements(); + + /** + * @return the schema that is being visited upon + */ + GraphQLSchema getSchema(); + + /** + * This will return a value if the visitation call was via {@link graphql.schema.SchemaTransformer} + * + * @return a code registry builder + */ + GraphQLCodeRegistry.Builder getCodeRegistry(); + + + /** + * @return When returned the traversal will continue as planned. + */ + GraphQLSchemaTraversalControl ok(); + + /** + * @return When returned from a {@link GraphQLSchemaVisitor}'s method, indicates exiting the traversal. + */ + GraphQLSchemaTraversalControl quit(); + + /** + * Called to change the current node to the specific node + * + * @param schemaElement the schema element to change + * + * @return a control that changes the current node to a the given node + */ + GraphQLSchemaTraversalControl changeNode(T schemaElement); + + /** + * Called to delete the current node + * + * @return a control that deletes the current node + */ + GraphQLSchemaTraversalControl deleteNode(); + + /** + * Called to insert the current schema element after the specified schema element + * + * @param toInsertAfter the schema element to after before + * + * @return a control that inserts the given node after the current node + */ + GraphQLSchemaTraversalControl insertAfter(T toInsertAfter); + + /** + * Called to insert the current schema element before the specified schema element + * + * @param toInsertBefore the schema element to insert before + * + * @return a control that inserts the given node before the current node + */ + GraphQLSchemaTraversalControl insertBefore(T toInsertBefore); + +} diff --git a/src/main/java/graphql/schema/visitor/GraphQLSchemaVisitorEnvironmentImpl.java b/src/main/java/graphql/schema/visitor/GraphQLSchemaVisitorEnvironmentImpl.java new file mode 100644 index 0000000000..08880c0d49 --- /dev/null +++ b/src/main/java/graphql/schema/visitor/GraphQLSchemaVisitorEnvironmentImpl.java @@ -0,0 +1,99 @@ +package graphql.schema.visitor; + +import graphql.Internal; +import graphql.schema.GraphQLCodeRegistry; +import graphql.schema.GraphQLModifiedType; +import graphql.schema.GraphQLSchema; +import graphql.schema.GraphQLSchemaElement; +import graphql.util.TraverserContext; +import org.jetbrains.annotations.NotNull; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Predicate; + +import static graphql.schema.visitor.GraphQLSchemaTraversalControl.*; + +@Internal +class GraphQLSchemaVisitorEnvironmentImpl implements GraphQLSchemaVisitorEnvironment { + + protected final TraverserContext context; + + GraphQLSchemaVisitorEnvironmentImpl(TraverserContext context) { + this.context = context; + } + + @Override + public GraphQLSchema getSchema() { + return context.getVarFromParents(GraphQLSchema.class); + } + + @Override + public GraphQLCodeRegistry.Builder getCodeRegistry() { + return context.getVarFromParents(GraphQLCodeRegistry.Builder.class); + } + + @Override + public T getElement() { + //noinspection unchecked + return (T) context.thisNode(); + } + + + @Override + public List getLeadingElements() { + return buildParentsImpl(schemaElement -> true); + } + + @Override + public List getUnwrappedLeadingElements() { + return buildParentsImpl(schemaElement -> !(schemaElement instanceof GraphQLModifiedType)); + } + + @NotNull + private List buildParentsImpl(Predicate predicate) { + List list = new ArrayList<>(); + TraverserContext parentContext = context.getParentContext(); + while (parentContext != null) { + GraphQLSchemaElement parentNode = parentContext.thisNode(); + if (parentNode != null) { + if (predicate.test(parentNode)) { + list.add(parentNode); + } + } + parentContext = parentContext.getParentContext(); + } + return list; + } + + @Override + public GraphQLSchemaTraversalControl ok() { + return CONTINUE; + } + + @Override + public GraphQLSchemaTraversalControl quit() { + return QUIT; + } + + + @Override + public GraphQLSchemaTraversalControl changeNode(T schemaElement) { + return new GraphQLSchemaTraversalControl(Control.CHANGE, schemaElement); + } + + @Override + public GraphQLSchemaTraversalControl deleteNode() { + return DELETE; + } + + @Override + public GraphQLSchemaTraversalControl insertAfter(T toInsertAfter) { + return new GraphQLSchemaTraversalControl(Control.INSERT_AFTER, toInsertAfter); + } + + @Override + public GraphQLSchemaTraversalControl insertBefore(T toInsertBefore) { + return new GraphQLSchemaTraversalControl(Control.INSERT_BEFORE, toInsertBefore); + } +} diff --git a/src/main/java/graphql/util/MutableRef.java b/src/main/java/graphql/util/MutableRef.java new file mode 100644 index 0000000000..d538f5eb95 --- /dev/null +++ b/src/main/java/graphql/util/MutableRef.java @@ -0,0 +1,15 @@ +package graphql.util; + +import graphql.Internal; + +/** + * This class is useful for creating a mutable reference to a variable that can be changed when you are in an + * effectively final bit of code. Its more performant than an {@link java.util.concurrent.atomic.AtomicReference} + * to gain mutability. Use this very carefully - Its not expected to be commonly used. + * + * @param for two + */ +@Internal +public class MutableRef { + public T value; +} diff --git a/src/main/resources/i18n/Parsing_de.properties b/src/main/resources/i18n/Parsing_de.properties index 9438eaf8aa..919e5fee9c 100644 --- a/src/main/resources/i18n/Parsing_de.properties +++ b/src/main/resources/i18n/Parsing_de.properties @@ -10,20 +10,17 @@ # REMEMBER - a single quote ' in MessageFormat means things that are never replaced within them # so use 2 '' characters to make it one ' on output. This will take for the form ''{0}'' # -# Prior to Java 9, properties files are encoded in ISO-8859-1. -# We have to use \u00fc instead of the German ue character, \u00e4 for ae, \u00f6 for oe, \u00df for ss -# -InvalidSyntax.noMessage=Ung\u00fcltige Syntax in Zeile {0} Spalte {1} -InvalidSyntax.full=Ung\u00fcltige Syntax, ANTLR-Fehler ''{0}'' in Zeile {1} Spalte {2} +InvalidSyntax.noMessage=Ungültige Syntax in Zeile {0} Spalte {1} +InvalidSyntax.full=Ungültige Syntax, ANTLR-Fehler ''{0}'' in Zeile {1} Spalte {2} -InvalidSyntaxBail.noToken=Ung\u00fcltige Syntax in Zeile {0} Spalte {1} -InvalidSyntaxBail.full=Ung\u00fcltige Syntax wegen des ung\u00fcltigen Tokens ''{0}'' in Zeile {1} Spalte {2} +InvalidSyntaxBail.noToken=Ungültige Syntax in Zeile {0} Spalte {1} +InvalidSyntaxBail.full=Ungültige Syntax wegen des ungültigen Tokens ''{0}'' in Zeile {1} Spalte {2} # -InvalidSyntaxMoreTokens.full=Es wurde eine ung\u00fcltige Syntax festgestellt. Es gibt zus\u00e4tzliche Token im Text, die nicht konsumiert wurden. Ung\u00fcltiges Token ''{0}'' in Zeile {1} Spalte {2} +InvalidSyntaxMoreTokens.full=Es wurde eine ungültige Syntax festgestellt. Es gibt zusätzliche Token im Text, die nicht konsumiert wurden. Ungültiges Token ''{0}'' in Zeile {1} Spalte {2} # -ParseCancelled.full=Es wurden mehr als {0} ''{1}'' Token pr\u00e4sentiert. Um Denial-of-Service-Angriffe zu verhindern, wurde das Parsing abgebrochen +ParseCancelled.full=Es wurden mehr als {0} ''{1}'' Token präsentiert. Um Denial-of-Service-Angriffe zu verhindern, wurde das Parsing abgebrochen # -InvalidUnicode.trailingLeadingSurrogate=Ung\u00fcltiger Unicode gefunden. Trailing surrogate muss ein leading surrogate vorangestellt werden. Ung\u00fcltiges Token ''{0}'' in Zeile {1} Spalte {2} -InvalidUnicode.leadingTrailingSurrogate=Ung\u00fcltiger Unicode gefunden. Auf ein leading surrogate muss ein trailing surrogate folgen. Ung\u00fcltiges Token ''{0}'' in Zeile {1} Spalte {2} -InvalidUnicode.invalidCodePoint=Ung\u00fcltiger Unicode gefunden. Kein g\u00fcltiger code point. Ung\u00fcltiges Token ''{0}'' in Zeile {1} Spalte {2} -InvalidUnicode.incorrectEscape=Ung\u00fcltiger Unicode gefunden. Falsch formatierte Escape-Sequenz. Ung\u00fcltiges Token ''{0}'' in Zeile {1} Spalte {2} \ No newline at end of file +InvalidUnicode.trailingLeadingSurrogate=Ungültiger Unicode gefunden. Trailing surrogate muss ein leading surrogate vorangestellt werden. Ungültiges Token ''{0}'' in Zeile {1} Spalte {2} +InvalidUnicode.leadingTrailingSurrogate=Ungültiger Unicode gefunden. Auf ein leading surrogate muss ein trailing surrogate folgen. Ungültiges Token ''{0}'' in Zeile {1} Spalte {2} +InvalidUnicode.invalidCodePoint=Ungültiger Unicode gefunden. Kein gültiger code point. Ungültiges Token ''{0}'' in Zeile {1} Spalte {2} +InvalidUnicode.incorrectEscape=Ungültiger Unicode gefunden. Falsch formatierte Escape-Sequenz. Ungültiges Token ''{0}'' in Zeile {1} Spalte {2} diff --git a/src/main/resources/i18n/Scalars.properties b/src/main/resources/i18n/Scalars.properties index 39fc6b4105..0897fe58eb 100644 --- a/src/main/resources/i18n/Scalars.properties +++ b/src/main/resources/i18n/Scalars.properties @@ -24,10 +24,6 @@ ID.unexpectedAstType=Expected an AST type of ''IntValue'' or ''StringValue'' but # Float.notFloat=Expected a value that can be converted to type ''Float'' but it was a ''{0}'' Float.unexpectedAstType=Expected an AST type of ''IntValue'' or ''FloatValue'' but it was a ''{0}'' -Float.unexpectedRawValueType=Expected a Number input, but it was a ''{0}'' # Boolean.notBoolean=Expected a value that can be converted to type ''Boolean'' but it was a ''{0}'' Boolean.unexpectedAstType=Expected an AST type of ''BooleanValue'' but it was a ''{0}'' -Boolean.unexpectedRawValueType=Expected a Boolean input, but it was a ''{0}'' -# -String.unexpectedRawValueType=Expected a String input, but it was a ''{0}'' diff --git a/src/main/resources/i18n/Scalars_de.properties b/src/main/resources/i18n/Scalars_de.properties index 8c86f64261..645ed5e7ed 100644 --- a/src/main/resources/i18n/Scalars_de.properties +++ b/src/main/resources/i18n/Scalars_de.properties @@ -10,14 +10,11 @@ # REMEMBER - a single quote ' in MessageFormat means things that are never replaced within them # so use 2 '' characters to make it one ' on output. This will take for the form ''{0}'' # -# Prior to Java 9, properties files are encoded in ISO-8859-1. -# We have to use \u00fc instead of the German ue character, \u00e4 for ae, \u00f6 for oe, \u00df for ss -# Scalar.unexpectedAstType=Erwartet wurde ein AST type von ''{0}'', aber es war ein ''{1}'' # -Enum.badInput=Ung\u00fcltige Eingabe f\u00fcr enum ''{0}''. Unbekannter Wert ''{1}'' -Enum.badName=Ung\u00fcltige Eingabe f\u00fcr enum ''{0}''. Kein Wert f\u00fcr den Namen ''{1}'' gefunden -Enum.unallowableValue=Literal nicht in den zul\u00e4ssigen Werten f\u00fcr enum ''{0}'' - ''{1}'' +Enum.badInput=Ungültige Eingabe für enum ''{0}''. Unbekannter Wert ''{1}'' +Enum.badName=Ungültige Eingabe für enum ''{0}''. Kein Wert für den Namen ''{1}'' gefunden +Enum.unallowableValue=Literal nicht in den zulässigen Werten für enum ''{0}'' - ''{1}'' # Int.notInt=Erwartet wurde ein Wert, der in den Typ ''Int'' konvertiert werden kann, aber es war ein ''{0}'' Int.outsideRange=Erwarteter Wert im Integer-Bereich, aber es war ein ''{0}'' @@ -27,10 +24,6 @@ ID.unexpectedAstType=Erwartet wurde ein AST type von ''IntValue'' oder ''StringV # Float.notFloat=Erwartet wurde ein Wert, der in den Typ ''Float'' konvertiert werden kann, aber es war ein ''{0}'' Float.unexpectedAstType=Erwartet wurde ein AST type von ''IntValue'' oder ''FloatValue'', aber es war ein ''{0}'' -Float.unexpectedRawValueType=Erwartet wurde eine Number-Eingabe, aber es war ein ''{0}'' # Boolean.notBoolean=Erwartet wurde ein Wert, der in den Typ ''Boolean'' konvertiert werden kann, aber es war ein ''{0}'' Boolean.unexpectedAstType=Erwartet wurde ein AST type ''BooleanValue'', aber es war ein ''{0}'' -Boolean.unexpectedRawValueType=Erwartet wurde eine Boolean-Eingabe, aber es war ein ''{0}'' -# -String.unexpectedRawValueType=Erwartet wurde eine String-Eingabe, aber es war ein ''{0}'' \ No newline at end of file diff --git a/src/main/resources/i18n/Validation_de.properties b/src/main/resources/i18n/Validation_de.properties index 603e931d07..def39f94ea 100644 --- a/src/main/resources/i18n/Validation_de.properties +++ b/src/main/resources/i18n/Validation_de.properties @@ -10,18 +10,15 @@ # REMEMBER - a single quote ' in MessageFormat means things that are never replaced within them # so use 2 '' characters to make it one ' on output. This will take for the form ''{0}'' # -# Prior to Java 9, properties files are encoded in ISO-8859-1. -# We have to use \u00fc instead of the German ue character, \u00e4 for ae, \u00f6 for oe, \u00df for ss -# -ExecutableDefinitions.notExecutableType=Validierungsfehler ({0}) : Type definition ''{1}'' ist nicht ausf\u00fchrbar -ExecutableDefinitions.notExecutableSchema=Validierungsfehler ({0}) : Schema definition ist nicht ausf\u00fchrbar -ExecutableDefinitions.notExecutableDirective=Validierungsfehler ({0}) : Directive definition ''{1}'' ist nicht ausf\u00fchrbar -ExecutableDefinitions.notExecutableDefinition=Validierungsfehler ({0}) : Die angegebene Definition ist nicht ausf\u00fchrbar +ExecutableDefinitions.notExecutableType=Validierungsfehler ({0}) : Type definition ''{1}'' ist nicht ausführbar +ExecutableDefinitions.notExecutableSchema=Validierungsfehler ({0}) : Schema definition ist nicht ausführbar +ExecutableDefinitions.notExecutableDirective=Validierungsfehler ({0}) : Directive definition ''{1}'' ist nicht ausführbar +ExecutableDefinitions.notExecutableDefinition=Validierungsfehler ({0}) : Die angegebene Definition ist nicht ausführbar # FieldsOnCorrectType.unknownField=Validierungsfehler ({0}) : Feld ''{1}'' vom Typ ''{2}'' ist nicht definiert # -FragmentsOnCompositeType.invalidInlineTypeCondition=Validierungsfehler ({0}) : Inline fragment type condition ist ung\u00fcltig, muss auf Object/Interface/Union stehen -FragmentsOnCompositeType.invalidFragmentTypeCondition=Validierungsfehler ({0}) : Fragment type condition ist ung\u00fcltig, muss auf Object/Interface/Union stehen +FragmentsOnCompositeType.invalidInlineTypeCondition=Validierungsfehler ({0}) : Inline fragment type condition ist ungültig, muss auf Object/Interface/Union stehen +FragmentsOnCompositeType.invalidFragmentTypeCondition=Validierungsfehler ({0}) : Fragment type condition ist ungültig, muss auf Object/Interface/Union stehen # KnownArgumentNames.unknownDirectiveArg=Validierungsfehler ({0}) : Unbekanntes directive argument ''{1}'' KnownArgumentNames.unknownFieldArg=Validierungsfehler ({0}) : Unbekanntes field argument ''{1}'' @@ -48,17 +45,17 @@ OverlappingFieldsCanBeMerged.differentFields=Validierungsfehler ({0}) : ''{1}'' OverlappingFieldsCanBeMerged.differentArgs=Validierungsfehler ({0}) : ''{1}'' : Felder haben unterschiedliche Argumente OverlappingFieldsCanBeMerged.differentNullability=Validierungsfehler ({0}) : ''{1}'' : Felder haben unterschiedliche nullability shapes OverlappingFieldsCanBeMerged.differentLists=Validierungsfehler ({0}) : ''{1}'' : Felder haben unterschiedliche list shapes -OverlappingFieldsCanBeMerged.differentReturnTypes=Validierungsfehler ({0}) : ''{1}'' : gibt verschiedene Typen ''{2}'' und ''{3}'' zur\u00fcck +OverlappingFieldsCanBeMerged.differentReturnTypes=Validierungsfehler ({0}) : ''{1}'' : gibt verschiedene Typen ''{2}'' und ''{3}'' zurück # -PossibleFragmentSpreads.inlineIncompatibleTypes=Validierungsfehler ({0}) : Fragment kann hier nicht verbreitet werden, da object vom Typ ''{1}'' niemals vom Typ ''{2}'' sein k\u00f6nnen -PossibleFragmentSpreads.fragmentIncompatibleTypes=Validierungsfehler ({0}) : Fragment ''{1}'' kann hier nicht verbreitet werden, da object vom Typ ''{2}'' niemals vom Typ ''{3}'' sein k\u00f6nnen +PossibleFragmentSpreads.inlineIncompatibleTypes=Validierungsfehler ({0}) : Fragment kann hier nicht verbreitet werden, da object vom Typ ''{1}'' niemals vom Typ ''{2}'' sein können +PossibleFragmentSpreads.fragmentIncompatibleTypes=Validierungsfehler ({0}) : Fragment ''{1}'' kann hier nicht verbreitet werden, da object vom Typ ''{2}'' niemals vom Typ ''{3}'' sein können # ProvidedNonNullArguments.missingFieldArg=Validierungsfehler ({0}) : Fehlendes field argument ''{1}'' ProvidedNonNullArguments.missingDirectiveArg=Validierungsfehler ({0}) : Fehlendes directive argument ''{1}'' -ProvidedNonNullArguments.nullValue=Validierungsfehler ({0}) : Nullwert f\u00fcr non-null field argument ''{1}'' +ProvidedNonNullArguments.nullValue=Validierungsfehler ({0}) : Nullwert für non-null field argument ''{1}'' # -ScalarLeaves.subselectionOnLeaf=Validierungsfehler ({0}) : Unterauswahl f\u00fcr Blatttyp ''{1}'' von Feld ''{2}'' nicht zul\u00e4ssig -ScalarLeaves.subselectionRequired=Validierungsfehler ({0}) : Unterauswahl erforderlich f\u00fcr Typ ''{1}'' des Feldes ''{2}'' +ScalarLeaves.subselectionOnLeaf=Validierungsfehler ({0}) : Unterauswahl für Blatttyp ''{1}'' von Feld ''{2}'' nicht zulässig +ScalarLeaves.subselectionRequired=Validierungsfehler ({0}) : Unterauswahl erforderlich für Typ ''{1}'' des Feldes ''{2}'' # SubscriptionUniqueRootField.multipleRootFields=Validierungsfehler ({0}) : Subscription operation ''{1}'' muss genau ein root field haben SubscriptionUniqueRootField.multipleRootFieldsWithFragment=Validierungsfehler ({0}) : Subscription operation ''{1}'' muss genau ein root field mit Fragmenten haben @@ -67,7 +64,7 @@ SubscriptionIntrospectionRootField.introspectionRootFieldWithFragment=Validierun # UniqueArgumentNames.uniqueArgument=Validierungsfehler ({0}) : Es kann nur ein Argument namens ''{1}'' geben # -UniqueDirectiveNamesPerLocation.uniqueDirectives=Validierungsfehler ({0}) : Nicht wiederholbare directive m\u00fcssen innerhalb einer Lokation eindeutig benannt werden. Directive ''{1}'', die auf einem ''{2}'' verwendet wird, ist nicht eindeutig +UniqueDirectiveNamesPerLocation.uniqueDirectives=Validierungsfehler ({0}) : Nicht wiederholbare directive müssen innerhalb einer Lokation eindeutig benannt werden. Directive ''{1}'', die auf einem ''{2}'' verwendet wird, ist nicht eindeutig # UniqueFragmentNames.oneFragment=Validierungsfehler ({0}) : Es kann nur ein Fragment namens ''{1}'' geben # @@ -75,28 +72,28 @@ UniqueOperationNames.oneOperation=Validierungsfehler ({0}) : Es kann nur eine Op # UniqueVariableNames.oneVariable=Validierungsfehler ({0}) : Es kann nur eine Variable namens ''{1}'' geben # -VariableDefaultValuesOfCorrectType.badDefault=Validierungsfehler ({0}) : Ung\u00fcltiger Standardwert ''{1}'' f\u00fcr Typ ''{2}'' +VariableDefaultValuesOfCorrectType.badDefault=Validierungsfehler ({0}) : Ungültiger Standardwert ''{1}'' für Typ ''{2}'' # VariablesAreInputTypes.wrongType=Validierungsfehler ({0}) : Eingabevariable ''{1}'' Typ ''{2}'' ist kein Eingabetyp # -VariableTypesMatchRule.unexpectedType=Validierungsfehler ({0}) : Der Variablentyp ''{1}'' stimmt nicht mit dem erwarteten Typ ''{2}'' \u00fcberein +VariableTypesMatchRule.unexpectedType=Validierungsfehler ({0}) : Der Variablentyp ''{1}'' stimmt nicht mit dem erwarteten Typ ''{2}'' überein # # These are used but IDEA cant find them easily as being called # # suppress inspection "UnusedProperty" ArgumentValidationUtil.handleNullError=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' darf nicht null sein # suppress inspection "UnusedProperty" -ArgumentValidationUtil.handleScalarError=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' ist kein g\u00fcltiges ''{3}'' +ArgumentValidationUtil.handleScalarError=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' ist kein gültiges ''{3}'' # suppress inspection "UnusedProperty" -ArgumentValidationUtil.handleScalarErrorCustomMessage=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' ist kein g\u00fcltiges ''{3}'' - {4} +ArgumentValidationUtil.handleScalarErrorCustomMessage=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' ist kein gültiges ''{3}'' - {4} # suppress inspection "UnusedProperty" -ArgumentValidationUtil.handleEnumError=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' ist kein g\u00fcltiges ''{3}'' +ArgumentValidationUtil.handleEnumError=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' ist kein gültiges ''{3}'' # suppress inspection "UnusedProperty" -ArgumentValidationUtil.handleEnumErrorCustomMessage=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' ist kein g\u00fcltiges ''{3}'' - {4} +ArgumentValidationUtil.handleEnumErrorCustomMessage=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' ist kein gültiges ''{3}'' - {4} # suppress inspection "UnusedProperty" ArgumentValidationUtil.handleNotObjectError=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' muss ein object type sein # suppress inspection "UnusedProperty" ArgumentValidationUtil.handleMissingFieldsError=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' fehlen Pflichtfelder ''{3}'' # suppress inspection "UnusedProperty" -ArgumentValidationUtil.handleExtraFieldError=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' enth\u00e4lt ein Feld nicht in ''{3}'': ''{4}'' -# \ No newline at end of file +ArgumentValidationUtil.handleExtraFieldError=Validierungsfehler ({0}) : Argument ''{1}'' mit Wert ''{2}'' enthält ein Feld nicht in ''{3}'': ''{4}'' +# diff --git a/src/test/groovy/example/http/ExecutionResultJSONTesting.java b/src/test/groovy/example/http/ExecutionResultJSONTesting.java index 74f5328a0d..965ac68e4b 100644 --- a/src/test/groovy/example/http/ExecutionResultJSONTesting.java +++ b/src/test/groovy/example/http/ExecutionResultJSONTesting.java @@ -20,7 +20,7 @@ import graphql.validation.ValidationError; import graphql.validation.ValidationErrorType; -import javax.servlet.http.HttpServletResponse; +import jakarta.servlet.http.HttpServletResponse; import java.io.IOException; import java.util.ArrayList; import java.util.List; diff --git a/src/test/groovy/example/http/HttpMain.java b/src/test/groovy/example/http/HttpMain.java index e1c001ce87..4f3b7c8936 100644 --- a/src/test/groovy/example/http/HttpMain.java +++ b/src/test/groovy/example/http/HttpMain.java @@ -27,9 +27,9 @@ import org.eclipse.jetty.server.handler.HandlerList; import org.eclipse.jetty.server.handler.ResourceHandler; -import javax.servlet.ServletException; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; +import jakarta.servlet.ServletException; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; diff --git a/src/test/groovy/example/http/JsonKit.java b/src/test/groovy/example/http/JsonKit.java index 7d30ac92d2..822aaa425d 100644 --- a/src/test/groovy/example/http/JsonKit.java +++ b/src/test/groovy/example/http/JsonKit.java @@ -3,8 +3,8 @@ import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.reflect.TypeToken; +import jakarta.servlet.http.HttpServletResponse; -import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.util.Collections; import java.util.Map; diff --git a/src/test/groovy/example/http/QueryParameters.java b/src/test/groovy/example/http/QueryParameters.java index ca22ac6b81..7855e9d316 100644 --- a/src/test/groovy/example/http/QueryParameters.java +++ b/src/test/groovy/example/http/QueryParameters.java @@ -1,6 +1,6 @@ package example.http; -import javax.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletRequest; import java.io.BufferedReader; import java.io.IOException; import java.util.Collections; diff --git a/src/test/groovy/graphql/ScalarsBooleanTest.groovy b/src/test/groovy/graphql/ScalarsBooleanTest.groovy index 5b316765ad..a045d1249d 100644 --- a/src/test/groovy/graphql/ScalarsBooleanTest.groovy +++ b/src/test/groovy/graphql/ScalarsBooleanTest.groovy @@ -131,6 +131,27 @@ class ScalarsBooleanTest extends Specification { false | false } + @Unroll + def "parseValue parses non-Boolean input #value"() { + expect: + Scalars.GraphQLBoolean.getCoercing().parseValue(value, GraphQLContext.default, Locale.default) == result + + where: + value | result + true | true + "false" | false + "true" | true + "True" | true + 0 | false + 1 | true + -1 | true + new Long(42345784398534785l) | true + new Double(42.3) | true + new Float(42.3) | true + Integer.MAX_VALUE + 1l | true + Integer.MIN_VALUE - 1l | true + } + @Unroll def "parseValue throws exception for invalid input #value"() { when: @@ -141,17 +162,6 @@ class ScalarsBooleanTest extends Specification { where: value | _ new Object() | _ - "false" | _ - "true" | _ - "True" | _ - 0 | _ - 1 | _ - -1 | _ - new Long(42345784398534785l) | _ - new Double(42.3) | _ - new Float(42.3) | _ - Integer.MAX_VALUE + 1l | _ - Integer.MIN_VALUE - 1l | _ } } diff --git a/src/test/groovy/graphql/ScalarsFloatTest.groovy b/src/test/groovy/graphql/ScalarsFloatTest.groovy index 6f6a195d65..18846f788e 100644 --- a/src/test/groovy/graphql/ScalarsFloatTest.groovy +++ b/src/test/groovy/graphql/ScalarsFloatTest.groovy @@ -149,6 +149,9 @@ class ScalarsFloatTest extends Specification { new AtomicInteger(42) | 42 Double.MAX_VALUE | Double.MAX_VALUE Double.MIN_VALUE | Double.MIN_VALUE + "42" | 42d + "42.123" | 42.123d + "-1" | -1 } @Unroll @@ -171,6 +174,9 @@ class ScalarsFloatTest extends Specification { new AtomicInteger(42) | 42 Double.MAX_VALUE | Double.MAX_VALUE Double.MIN_VALUE | Double.MIN_VALUE + "42" | 42d + "42.123" | 42.123d + "-1" | -1 } @@ -197,9 +203,6 @@ class ScalarsFloatTest extends Specification { Float.POSITIVE_INFINITY.toString() | _ Float.NEGATIVE_INFINITY | _ Float.NEGATIVE_INFINITY.toString() | _ - "42" | _ - "42.123" | _ - "-1" | _ } } diff --git a/src/test/groovy/graphql/ScalarsIntTest.groovy b/src/test/groovy/graphql/ScalarsIntTest.groovy index 7a5b43ed9e..a38de1a49f 100644 --- a/src/test/groovy/graphql/ScalarsIntTest.groovy +++ b/src/test/groovy/graphql/ScalarsIntTest.groovy @@ -137,12 +137,15 @@ class ScalarsIntTest extends Specification { new Short("42") | 42 1234567l | 1234567 new AtomicInteger(42) | 42 - Integer.MAX_VALUE | Integer.MAX_VALUE - Integer.MIN_VALUE | Integer.MIN_VALUE 42.0000d | 42 new BigDecimal("42") | 42 42.0f | 42 42.0d | 42 + Integer.MAX_VALUE | Integer.MAX_VALUE + Integer.MIN_VALUE | Integer.MIN_VALUE + "42" | 42 + "42.0000" | 42 + "-1" | -1 } @Unroll @@ -152,18 +155,21 @@ class ScalarsIntTest extends Specification { where: value | result - 42.0000d | 42 new Integer(42) | 42 new BigInteger("42") | 42 - new BigDecimal("42") | 42 - 42.0f | 42 - 42.0d | 42 new Byte("42") | 42 new Short("42") | 42 1234567l | 1234567 new AtomicInteger(42) | 42 + 42.0000d | 42 + new BigDecimal("42") | 42 + 42.0f | 42 + 42.0d | 42 Integer.MAX_VALUE | Integer.MAX_VALUE Integer.MIN_VALUE | Integer.MIN_VALUE + "42" | 42 + "42.0000" | 42 + "-1" | -1 } @Unroll @@ -184,9 +190,6 @@ class ScalarsIntTest extends Specification { Integer.MAX_VALUE + 1l | _ Integer.MIN_VALUE - 1l | _ new Object() | _ - "42" | _ - "42.0000" | _ - "-1" | _ } } diff --git a/src/test/groovy/graphql/ScalarsStringTest.groovy b/src/test/groovy/graphql/ScalarsStringTest.groovy index 8a725eb122..536dd07216 100644 --- a/src/test/groovy/graphql/ScalarsStringTest.groovy +++ b/src/test/groovy/graphql/ScalarsStringTest.groovy @@ -4,7 +4,6 @@ import graphql.execution.CoercedVariables import graphql.language.BooleanValue import graphql.language.StringValue import graphql.schema.CoercingParseLiteralException -import graphql.schema.CoercingParseValueException import spock.lang.Shared import spock.lang.Specification import spock.lang.Unroll @@ -86,24 +85,15 @@ class ScalarsStringTest extends Specification { } @Unroll - def "String parseValue throws exception for non-String values"() { - when: - Scalars.GraphQLString.getCoercing().parseValue(literal, GraphQLContext.default, Locale.default) - then: - def ex = thrown(CoercingParseValueException) + def "String parseValue can parse non-String values"() { + expect: + Scalars.GraphQLString.getCoercing().parseValue(value, GraphQLContext.default, Locale.default) == result where: - literal | _ - 123 | _ - true | _ - customObject | _ + value | result + 123 | "123" + true | "true" + customObject | "foo" } - def "String parseValue English exception message"() { - when: - Scalars.GraphQLString.getCoercing().parseValue(9001, GraphQLContext.default, Locale.ENGLISH) - then: - def ex = thrown(CoercingParseValueException) - ex.message == "Expected a String input, but it was a 'Integer'" - } } diff --git a/src/test/groovy/graphql/analysis/QueryComplexityCalculatorTest.groovy b/src/test/groovy/graphql/analysis/QueryComplexityCalculatorTest.groovy new file mode 100644 index 0000000000..465c4de598 --- /dev/null +++ b/src/test/groovy/graphql/analysis/QueryComplexityCalculatorTest.groovy @@ -0,0 +1,52 @@ +package graphql.analysis + + +import graphql.TestUtil +import graphql.execution.CoercedVariables +import graphql.language.Document +import graphql.parser.Parser +import spock.lang.Specification + +class QueryComplexityCalculatorTest extends Specification { + + Document createQuery(String query) { + Parser parser = new Parser() + parser.parseDocument(query) + } + + def "can calculator complexity"() { + given: + def schema = TestUtil.schema(""" + type Query{ + foo: Foo + bar: String + } + type Foo { + scalar: String + foo: Foo + } + """) + def query = createQuery(""" + query q { + f2: foo {scalar foo{scalar}} + f1: foo { foo {foo {foo {foo{foo{scalar}}}}}} } + """) + + + when: + FieldComplexityCalculator fieldComplexityCalculator = new FieldComplexityCalculator() { + @Override + int calculate(FieldComplexityEnvironment environment, int childComplexity) { + return environment.getField().name.startsWith("foo") ? 10 : 1 + } + } + QueryComplexityCalculator calculator = QueryComplexityCalculator.newCalculator() + .fieldComplexityCalculator(fieldComplexityCalculator).schema(schema).document(query).variables(CoercedVariables.emptyVariables()) + .build() + def complexityScore = calculator.calculate() + then: + complexityScore == 20 + + + } +} diff --git a/src/test/groovy/graphql/analysis/values/ValueTraverserTest.groovy b/src/test/groovy/graphql/analysis/values/ValueTraverserTest.groovy index dabb380e53..8d524f8bcc 100644 --- a/src/test/groovy/graphql/analysis/values/ValueTraverserTest.groovy +++ b/src/test/groovy/graphql/analysis/values/ValueTraverserTest.groovy @@ -17,6 +17,7 @@ import graphql.schema.GraphQLInputObjectType import graphql.schema.GraphQLInputSchemaElement import graphql.schema.GraphQLList import graphql.schema.GraphQLNamedSchemaElement +import graphql.schema.GraphQLObjectType import graphql.schema.GraphQLScalarType import graphql.schema.idl.SchemaDirectiveWiring import graphql.schema.idl.SchemaDirectiveWiringEnvironment @@ -861,8 +862,12 @@ type Profile { GraphQLFieldDefinition onField(SchemaDirectiveWiringEnvironment env) { GraphQLFieldsContainer fieldsContainer = env.getFieldsContainer() GraphQLFieldDefinition fieldDefinition = env.getFieldDefinition() + if (! fieldsContainer instanceof GraphQLObjectType) { + return fieldDefinition + } + GraphQLObjectType containingObjectType = env.getFieldsContainer() as GraphQLObjectType - final DataFetcher originalDF = env.getCodeRegistry().getDataFetcher(fieldsContainer, fieldDefinition) + final DataFetcher originalDF = env.getCodeRegistry().getDataFetcher(containingObjectType, fieldDefinition) final DataFetcher newDF = { DataFetchingEnvironment originalEnv -> ValueVisitor visitor = new ValueVisitor() { @Override @@ -884,7 +889,7 @@ type Profile { return originalDF.get(newEnv); } - env.getCodeRegistry().dataFetcher(fieldsContainer, fieldDefinition, newDF) + env.getCodeRegistry().dataFetcher(containingObjectType, fieldDefinition, newDF) return fieldDefinition } diff --git a/src/test/groovy/graphql/cachecontrol/CacheControlTest.groovy b/src/test/groovy/graphql/cachecontrol/CacheControlTest.groovy deleted file mode 100644 index c05482e3c9..0000000000 --- a/src/test/groovy/graphql/cachecontrol/CacheControlTest.groovy +++ /dev/null @@ -1,186 +0,0 @@ -package graphql.cachecontrol - -import graphql.ExecutionInput -import graphql.ExecutionResult -import graphql.GraphQLContext -import graphql.TestUtil -import graphql.execution.CoercedVariables -import graphql.execution.ExecutionContextBuilder -import graphql.execution.ExecutionId -import graphql.execution.ExecutionStrategy -import graphql.execution.ResultPath -import graphql.execution.instrumentation.Instrumentation -import graphql.language.Document -import graphql.language.FragmentDefinition -import graphql.language.OperationDefinition -import graphql.parser.Parser -import graphql.schema.DataFetcher -import graphql.schema.GraphQLSchema -import org.dataloader.DataLoaderRegistry -import spock.lang.Specification - -class CacheControlTest extends Specification { - // All tests in this file will be deleted when CacheControl code is removed. - - def "can build up hints when there is no extensions present"() { - def cc = CacheControl.newCacheControl() - cc.hint(ResultPath.parse("/hint/99"), 99) - cc.hint(ResultPath.parse("/hint/66"), 66) - cc.hint(ResultPath.parse("/hint/33/private"), 33, CacheControl.Scope.PRIVATE) - cc.hint(ResultPath.parse("/hint/private"), CacheControl.Scope.PRIVATE) - - def er = ExecutionResult.newExecutionResult().data("data").build() - - when: - def newER = cc.addTo(er) - then: - newER.data == "data" // left alone - newER.extensions == [ - cacheControl: [ - version: 1, - hints : [ - [path: ["hint", "99"], maxAge: 99, scope: "PUBLIC"], - [path: ["hint", "66"], maxAge: 66, scope: "PUBLIC"], - [path: ["hint", "33", "private"], maxAge: 33, scope: "PRIVATE"], - [path: ["hint", "private"], scope: "PRIVATE"], - ] - ] - ] - - } - - def "can build up hints when extensions are present"() { - def cc = CacheControl.newCacheControl() - cc.hint(ResultPath.parse("/hint/99"), 99) - cc.hint(ResultPath.parse("/hint/66"), 66) - - def startingExtensions = ["someExistingExt": "data"] - - def er = ExecutionResult.newExecutionResult().data("data").extensions(startingExtensions).build() - - when: - def newER = cc.addTo(er) - then: - newER.data == "data" // left alone - newER.extensions.size() == 2 - newER.extensions["someExistingExt"] == "data" - newER.extensions["cacheControl"] == [ - version: 1, - hints : [ - [path: ["hint", "99"], maxAge: 99, scope: "PUBLIC"], - [path: ["hint", "66"], maxAge: 66, scope: "PUBLIC"], - ] - ] - } - - def "integration test of cache control"() { - def sdl = ''' - type Query { - levelA : LevelB - } - - type LevelB { - levelB : LevelC - } - - type LevelC { - levelC : String - } - ''' - - DataFetcher dfA = { env -> - CacheControl cc = env.getGraphQlContext().get("cacheControl") - cc.hint(env, 100) - } as DataFetcher - DataFetcher dfB = { env -> - CacheControl cc = env.getGraphQlContext().get("cacheControl") - cc.hint(env, 999) - } as DataFetcher - - DataFetcher dfC = { env -> - CacheControl cc = env.getGraphQlContext().get("cacheControl") - cc.hint(env, CacheControl.Scope.PRIVATE) - } as DataFetcher - - def graphQL = TestUtil.graphQL(sdl, [ - Query : [levelA: dfA,], - LevelB: [levelB: dfB], - LevelC: [levelC: dfC] - ]).build() - - def cacheControl = CacheControl.newCacheControl() - when: - ExecutionInput ei = ExecutionInput.newExecutionInput(' { levelA { levelB { levelC } } }') - .graphQLContext(["cacheControl": cacheControl]) - .build() - def er = graphQL.execute(ei) - er = cacheControl.addTo(er) - then: - er.errors.isEmpty() - er.extensions == [ - cacheControl: [ - version: 1, - hints : [ - [path: ["levelA"], maxAge: 100, scope: "PUBLIC"], - [path: ["levelA", "levelB"], maxAge: 999, scope: "PUBLIC"], - [path: ["levelA", "levelB", "levelC"], scope: "PRIVATE"], - ] - ] - ] - } - - def "transform works and copies values with cache control"() { - // Retain this ExecutionContext CacheControl test for coverage - given: - def cacheControl = CacheControl.newCacheControl() - def oldCoercedVariables = CoercedVariables.emptyVariables() - Instrumentation instrumentation = Mock(Instrumentation) - ExecutionStrategy queryStrategy = Mock(ExecutionStrategy) - ExecutionStrategy mutationStrategy = Mock(ExecutionStrategy) - ExecutionStrategy subscriptionStrategy = Mock(ExecutionStrategy) - GraphQLSchema schema = Mock(GraphQLSchema) - def executionId = ExecutionId.generate() - def graphQLContext = GraphQLContext.newContext().build() - def root = "root" - Document document = new Parser().parseDocument("query myQuery(\$var: String){...MyFragment} fragment MyFragment on Query{foo}") - def operation = document.definitions[0] as OperationDefinition - def fragment = document.definitions[1] as FragmentDefinition - def dataLoaderRegistry = new DataLoaderRegistry() - - def executionContextOld = new ExecutionContextBuilder() - .cacheControl(cacheControl) - .executionId(executionId) - .instrumentation(instrumentation) - .graphQLSchema(schema) - .queryStrategy(queryStrategy) - .mutationStrategy(mutationStrategy) - .subscriptionStrategy(subscriptionStrategy) - .root(root) - .graphQLContext(graphQLContext) - .coercedVariables(oldCoercedVariables) - .fragmentsByName([MyFragment: fragment]) - .operationDefinition(operation) - .dataLoaderRegistry(dataLoaderRegistry) - .build() - - when: - def coercedVariables = CoercedVariables.of([var: 'value']) - def executionContext = executionContextOld.transform(builder -> builder - .coercedVariables(coercedVariables)) - - then: - executionContext.cacheControl == cacheControl - executionContext.executionId == executionId - executionContext.instrumentation == instrumentation - executionContext.graphQLSchema == schema - executionContext.queryStrategy == queryStrategy - executionContext.mutationStrategy == mutationStrategy - executionContext.subscriptionStrategy == subscriptionStrategy - executionContext.root == root - executionContext.graphQLContext == graphQLContext - executionContext.coercedVariables == coercedVariables - executionContext.getFragmentsByName() == [MyFragment: fragment] - executionContext.operationDefinition == operation - executionContext.dataLoaderRegistry == dataLoaderRegistry - } -} diff --git a/src/test/groovy/graphql/execution/AbortExecutionExceptionTest.groovy b/src/test/groovy/graphql/execution/AbortExecutionExceptionTest.groovy index 770a894114..da2c9b8cc4 100644 --- a/src/test/groovy/graphql/execution/AbortExecutionExceptionTest.groovy +++ b/src/test/groovy/graphql/execution/AbortExecutionExceptionTest.groovy @@ -1,10 +1,24 @@ package graphql.execution import graphql.ErrorType +import graphql.ExecutionInput +import graphql.ExecutionResult +import graphql.GraphQL import graphql.GraphQLError +import graphql.TestUtil +import graphql.execution.instrumentation.Instrumentation +import graphql.execution.instrumentation.InstrumentationContext +import graphql.execution.instrumentation.InstrumentationState +import graphql.execution.instrumentation.SimplePerformantInstrumentation +import graphql.execution.instrumentation.parameters.InstrumentationExecuteOperationParameters +import graphql.execution.instrumentation.parameters.InstrumentationExecutionParameters +import graphql.execution.instrumentation.parameters.InstrumentationValidationParameters import graphql.language.SourceLocation +import graphql.validation.ValidationError import spock.lang.Specification +import java.util.concurrent.CompletableFuture + class AbortExecutionExceptionTest extends Specification { class BasicError implements GraphQLError { @@ -35,10 +49,73 @@ class AbortExecutionExceptionTest extends Specification { e.toExecutionResult().getErrors()[0].message == "No underlying errors" when: - e = new AbortExecutionException([new BasicError(message:"UnderlyingA"), new BasicError(message:"UnderlyingB")]) + e = new AbortExecutionException([new BasicError(message: "UnderlyingA"), new BasicError(message: "UnderlyingB")]) then: e.toExecutionResult().getErrors().size() == 2 e.toExecutionResult().getErrors()[0].message == "UnderlyingA" e.toExecutionResult().getErrors()[1].message == "UnderlyingB" } + + def "will call instrumentation.instrumentExecutionResult() at the end"() { + def sdl = """ + type Query { + q : Q + } + + type Q { + name : String + } + """ + + + def schema = TestUtil.schema(sdl) + + def throwOnEarlyPhase = true + Instrumentation instrumentation = new SimplePerformantInstrumentation() { + @Override + InstrumentationContext> beginValidation(InstrumentationValidationParameters parameters, InstrumentationState state) { + if (throwOnEarlyPhase) { + throw new AbortExecutionException("early") + } + return super.beginValidation(parameters, state) + } + + @Override + InstrumentationContext beginExecuteOperation(InstrumentationExecuteOperationParameters parameters, InstrumentationState state) { + if (!throwOnEarlyPhase) { + throw new AbortExecutionException("later") + } + return super.beginExecuteOperation(parameters, state) + } + + @Override + CompletableFuture instrumentExecutionResult(ExecutionResult executionResult, InstrumentationExecutionParameters parameters, InstrumentationState state) { + def newER = executionResult.transform { it.extensions([extra: "extensions"]) } + return CompletableFuture.completedFuture(newER) + } + } + def graphQL = GraphQL.newGraphQL(schema).instrumentation(instrumentation).build() + + + def executionInput = ExecutionInput.newExecutionInput("query q { q {name}}") + .root([q: [name: "nameV"]]) + .build() + + when: + def er = graphQL.execute(executionInput) + + then: + !er.errors.isEmpty() + er.errors[0].message == "early" + er.extensions == [extra: "extensions"] + + when: + throwOnEarlyPhase = false + er = graphQL.execute(executionInput) + + then: + !er.errors.isEmpty() + er.errors[0].message == "later" + er.extensions == [extra: "extensions"] + } } diff --git a/src/test/groovy/graphql/execution/AsyncTest.groovy b/src/test/groovy/graphql/execution/AsyncTest.groovy index c79f483b6d..e124f00220 100644 --- a/src/test/groovy/graphql/execution/AsyncTest.groovy +++ b/src/test/groovy/graphql/execution/AsyncTest.groovy @@ -4,6 +4,7 @@ import spock.lang.Specification import java.util.concurrent.CompletableFuture import java.util.concurrent.CompletionException +import java.util.function.Function import java.util.function.BiFunction import static java.util.concurrent.CompletableFuture.completedFuture @@ -13,7 +14,7 @@ class AsyncTest extends Specification { def "eachSequentially test"() { given: def input = ['a', 'b', 'c'] - def cfFactory = Mock(Async.CFFactory) + def cfFactory = Mock(BiFunction) def cf1 = new CompletableFuture() def cf2 = new CompletableFuture() def cf3 = new CompletableFuture() @@ -23,21 +24,21 @@ class AsyncTest extends Specification { then: !result.isDone() - 1 * cfFactory.apply('a', 0, []) >> cf1 + 1 * cfFactory.apply('a', []) >> cf1 when: cf1.complete('x') then: !result.isDone() - 1 * cfFactory.apply('b', 1, ['x']) >> cf2 + 1 * cfFactory.apply('b', ['x']) >> cf2 when: cf2.complete('y') then: !result.isDone() - 1 * cfFactory.apply('c', 2, ['x', 'y']) >> cf3 + 1 * cfFactory.apply('c', ['x', 'y']) >> cf3 when: cf3.complete('z') @@ -50,9 +51,9 @@ class AsyncTest extends Specification { def "eachSequentially propagates exception"() { given: def input = ['a', 'b', 'c'] - def cfFactory = Mock(Async.CFFactory) - cfFactory.apply('a', 0, _) >> completedFuture("x") - cfFactory.apply('b', 1, _) >> { + def cfFactory = Mock(BiFunction) + cfFactory.apply('a', _) >> completedFuture("x") + cfFactory.apply('b', _) >> { def cf = new CompletableFuture<>() cf.completeExceptionally(new RuntimeException("some error")) cf @@ -74,9 +75,9 @@ class AsyncTest extends Specification { def "eachSequentially catches factory exception"() { given: def input = ['a', 'b', 'c'] - def cfFactory = Mock(Async.CFFactory) - cfFactory.apply('a', 0, _) >> completedFuture("x") - cfFactory.apply('b', 1, _) >> { throw new RuntimeException("some error") } + def cfFactory = Mock(BiFunction) + cfFactory.apply('a', _) >> completedFuture("x") + cfFactory.apply('b', _) >> { throw new RuntimeException("some error") } when: def result = Async.eachSequentially(input, cfFactory) @@ -94,10 +95,10 @@ class AsyncTest extends Specification { def "each works for mapping function"() { given: def input = ['a', 'b', 'c'] - def cfFactory = Mock(BiFunction) - cfFactory.apply('a', 0) >> completedFuture('x') - cfFactory.apply('b', 1) >> completedFuture('y') - cfFactory.apply('c', 2) >> completedFuture('z') + def cfFactory = Mock(Function) + cfFactory.apply('a') >> completedFuture('x') + cfFactory.apply('b') >> completedFuture('y') + cfFactory.apply('c') >> completedFuture('z') when: @@ -111,16 +112,15 @@ class AsyncTest extends Specification { def "each with mapping function propagates factory exception"() { given: def input = ['a', 'b', 'c'] - def cfFactory = Mock(BiFunction) - + def cfFactory = Mock(Function) when: def result = Async.each(input, cfFactory) then: - 1 * cfFactory.apply('a', 0) >> completedFuture('x') - 1 * cfFactory.apply('b', 1) >> { throw new RuntimeException('some error') } - 1 * cfFactory.apply('c', 2) >> completedFuture('z') + 1 * cfFactory.apply('a') >> completedFuture('x') + 1 * cfFactory.apply('b') >> { throw new RuntimeException('some error') } + 1 * cfFactory.apply('c') >> completedFuture('z') result.isCompletedExceptionally() Throwable exception result.exceptionally({ e -> diff --git a/src/test/groovy/graphql/execution/ConditionalNodesTest.groovy b/src/test/groovy/graphql/execution/ConditionalNodesTest.groovy index 7c76600727..629f1fde98 100644 --- a/src/test/groovy/graphql/execution/ConditionalNodesTest.groovy +++ b/src/test/groovy/graphql/execution/ConditionalNodesTest.groovy @@ -1,9 +1,18 @@ package graphql.execution - +import graphql.ExecutionInput +import graphql.GraphQLContext +import graphql.TestUtil +import graphql.execution.conditional.ConditionalNodeDecision +import graphql.execution.conditional.ConditionalNodeDecisionEnvironment +import graphql.execution.conditional.ConditionalNodes import graphql.language.Argument import graphql.language.BooleanValue import graphql.language.Directive +import graphql.language.Field +import graphql.language.NodeUtil +import graphql.schema.DataFetcher +import graphql.schema.DataFetchingEnvironment import spock.lang.Specification class ConditionalNodesTest extends Specification { @@ -13,11 +22,43 @@ class ConditionalNodesTest extends Specification { def variables = new LinkedHashMap() ConditionalNodes conditionalNodes = new ConditionalNodes() - def argument = Argument.newArgument("if", new BooleanValue(true)).build() - def directives = [Directive.newDirective().name("skip").arguments([argument]).build()] + def directives = directive("skip", ifArg(true)) + + expect: + !conditionalNodes.shouldInclude(mkField(directives), variables, null, GraphQLContext.getDefault()) + } + + def "should include true for skip = false"() { + given: + def variables = new LinkedHashMap() + ConditionalNodes conditionalNodes = new ConditionalNodes() + + def directives = directive("skip", ifArg(false)) expect: - !conditionalNodes.shouldInclude(variables, directives) + conditionalNodes.shouldInclude(mkField(directives), variables, null, GraphQLContext.getDefault()) + } + + def "should include false for include = false"() { + given: + def variables = new LinkedHashMap() + ConditionalNodes conditionalNodes = new ConditionalNodes() + + def directives = directive("include", ifArg(false)) + + expect: + !conditionalNodes.shouldInclude(mkField(directives), variables, null, GraphQLContext.getDefault()) + } + + def "should include true for include = true"() { + given: + def variables = new LinkedHashMap() + ConditionalNodes conditionalNodes = new ConditionalNodes() + + def directives = directive("include", ifArg(true)) + + expect: + conditionalNodes.shouldInclude(mkField(directives), variables, null, GraphQLContext.getDefault()) } def "no directives means include"() { @@ -26,6 +67,133 @@ class ConditionalNodesTest extends Specification { ConditionalNodes conditionalNodes = new ConditionalNodes() expect: - conditionalNodes.shouldInclude(variables, []) + conditionalNodes.shouldInclude(mkField([]), variables, null, GraphQLContext.getDefault()) + } + + + def "allows a custom implementation to check conditional nodes"() { + given: + def variables = ["x": "y"] + def graphQLSchema = TestUtil.schema("type Query { f : String} ") + ConditionalNodes conditionalNodes = new ConditionalNodes() + + def graphQLContext = GraphQLContext.getDefault() + + def directives = directive("featureFlag", ifArg(true)) + def field = mkField(directives) + + def called = false + ConditionalNodeDecision conditionalDecision = new ConditionalNodeDecision() { + @Override + boolean shouldInclude(ConditionalNodeDecisionEnvironment env) { + called = true + assert env.variables.toMap() == variables + assert env.directivesContainer == field + assert env.graphQlSchema == graphQLSchema + assert env.graphQLContext.get("assert") != null + return false + } + } + graphQLContext.put(ConditionalNodeDecision.class, conditionalDecision) + graphQLContext.put("assert", true) + expect: + + !conditionalNodes.shouldInclude(field, variables, graphQLSchema, graphQLContext) + called == true + } + + def "integration test showing conditional nodes can be custom included"() { + + def sdl = """ + + directive @featureFlag(flagName: String!) repeatable on FIELD + + type Query { + in : String + out : String + } + """ + DataFetcher df = { DataFetchingEnvironment env -> env.getFieldDefinition().name } + def graphQL = TestUtil.graphQL(sdl, [Query: ["in": df, "out": df]]).build() + ConditionalNodeDecision customDecision = new ConditionalNodeDecision() { + @Override + boolean shouldInclude(ConditionalNodeDecisionEnvironment env) { + + Directive foundDirective = NodeUtil.findNodeByName(env.getDirectives(), "featureFlag") + if (foundDirective != null) { + + def arguments = env.getGraphQlSchema().getDirective("featureFlag") + .getArguments() + Map argumentValues = ValuesResolver.getArgumentValues( + arguments, foundDirective.getArguments(), + env.variables, env.graphQLContext, Locale.getDefault()) + Object flagName = argumentValues.get("flagName") + return String.valueOf(flagName) == "ON" + } + return true + } + } + + def contextMap = [:] + contextMap.put(ConditionalNodeDecision.class, customDecision) + + when: + def ei = ExecutionInput.newExecutionInput() + .graphQLContext(contextMap) + .query(""" + query q { + in + out @featureFlag(flagName : "OFF") + } + """ + ).build() + def er = graphQL.execute(ei) + + then: + er["data"] == ["in": "in"] + + when: + ei = ExecutionInput.newExecutionInput() + .graphQLContext(contextMap) + .query(""" + query q { + in + out @featureFlag(flagName : "ON") + } + """ + ).build() + er = graphQL.execute(ei) + + then: + er["data"] == ["in": "in", "out": "out"] + + when: + ei = ExecutionInput.newExecutionInput() + .graphQLContext(contextMap) + .query(''' + query vars_should_work($v : String!) { + in + out @featureFlag(flagName : $v) + } + ''' + ) + .variables([v: "ON"]) + .build() + er = graphQL.execute(ei) + + then: + er["data"] == ["in": "in", "out": "out"] + } + + private ArrayList directive(String name, Argument argument) { + [Directive.newDirective().name(name).arguments([argument]).build()] + } + + private Argument ifArg(Boolean b) { + Argument.newArgument("if", new BooleanValue(b)).build() + } + + Field mkField(List directives) { + return Field.newField("name").directives(directives).build() } } diff --git a/src/test/groovy/graphql/execution/DataFetcherResultTest.groovy b/src/test/groovy/graphql/execution/DataFetcherResultTest.groovy index 3b91a9d62e..a7bc5b7fa7 100644 --- a/src/test/groovy/graphql/execution/DataFetcherResultTest.groovy +++ b/src/test/groovy/graphql/execution/DataFetcherResultTest.groovy @@ -55,14 +55,56 @@ class DataFetcherResultTest extends Specification { !result.hasErrors() } - def "transforming"() { + def "can set extensions"() { + + when: + def dfr = DataFetcherResult.newResult() + .extensions([x: "y"]).build() + + then: + dfr.getExtensions() == [x : "y"] + + when: + dfr = DataFetcherResult.newResult() + .data("x") + .build() + + then: + dfr.getExtensions() == null + + } + + def "mapping works"() { when: def original = DataFetcherResult.newResult().data("hello") - .errors([error1]).localContext("world").build() + .errors([error1]).localContext("world") + .extensions([x: "y"]).build() + def result = original.map({ data -> data.length() }) + then: + result.getData() == 5 + result.getLocalContext() == "world" + result.getExtensions() == [x: "y"] + result.getErrors() == [error1] + } + + def "transforming works"() { + when: + def original = DataFetcherResult.newResult().data("hello") + .errors([error1]).localContext("world") + .extensions([x: "y"]).build() def result = original.transform({ builder -> builder.error(error2) }) then: result.getData() == "hello" result.getLocalContext() == "world" + result.getExtensions() == [x : "y"] + result.getErrors() == [error1, error2] + + when: + result = result.transform({ builder -> builder.extensions(a : "b") }) + then: + result.getData() == "hello" + result.getLocalContext() == "world" + result.getExtensions() == [a : "b"] result.getErrors() == [error1, error2] } } diff --git a/src/test/groovy/graphql/execution/SimpleDataFetcherExceptionHandlerTest.groovy b/src/test/groovy/graphql/execution/SimpleDataFetcherExceptionHandlerTest.groovy index 1980db0816..2ec5f380e0 100644 --- a/src/test/groovy/graphql/execution/SimpleDataFetcherExceptionHandlerTest.groovy +++ b/src/test/groovy/graphql/execution/SimpleDataFetcherExceptionHandlerTest.groovy @@ -21,49 +21,29 @@ class SimpleDataFetcherExceptionHandlerTest extends Specification { def "will wrap general exceptions"() { when: def handlerParameters = mkParams(new RuntimeException("RTE")) - def result = handler.onException(handlerParameters) + def result = handler.handleException(handlerParameters) then: - result.errors[0] instanceof ExceptionWhileDataFetching - result.errors[0].getMessage().contains("RTE") + result.join().errors[0] instanceof ExceptionWhileDataFetching + result.join().errors[0].getMessage().contains("RTE") } def "can unwrap certain exceptions"() { when: - def result = handler.onException(mkParams(new CompletionException(new RuntimeException("RTE")))) + def result = handler.handleException(mkParams(new CompletionException(new RuntimeException("RTE")))) then: - result.errors[0] instanceof ExceptionWhileDataFetching - result.errors[0].getMessage().contains("RTE") + result.join().errors[0] instanceof ExceptionWhileDataFetching + result.join().errors[0].getMessage().contains("RTE") } def "wont unwrap other exceptions"() { when: - def result = handler.onException(mkParams(new RuntimeException("RTE",new RuntimeException("BANG")))) + def result = handler.handleException(mkParams(new RuntimeException("RTE",new RuntimeException("BANG")))) then: - result.errors[0] instanceof ExceptionWhileDataFetching - ! result.errors[0].getMessage().contains("BANG") - } - - static class MyHandler implements DataFetcherExceptionHandler {} - - def "a class can work without implementing anything"() { - when: - DataFetcherExceptionHandler handler = new MyHandler() - def handlerParameters = mkParams(new RuntimeException("RTE")) - def result = handler.onException(handlerParameters) // Retain deprecated method for test coverage - - then: - result.errors[0] instanceof ExceptionWhileDataFetching - result.errors[0].getMessage().contains("RTE") - - when: - def resultCF = handler.handleException(handlerParameters) - - then: - resultCF.join().errors[0] instanceof ExceptionWhileDataFetching - resultCF.join().errors[0].getMessage().contains("RTE") + result.join().errors[0] instanceof ExceptionWhileDataFetching + ! result.join().errors[0].getMessage().contains("BANG") } private static DataFetcherExceptionHandlerParameters mkParams(Exception exception) { diff --git a/src/test/groovy/graphql/execution/ValuesResolverTest.groovy b/src/test/groovy/graphql/execution/ValuesResolverTest.groovy index 9c2ec5d2d0..3d9a94ebf5 100644 --- a/src/test/groovy/graphql/execution/ValuesResolverTest.groovy +++ b/src/test/groovy/graphql/execution/ValuesResolverTest.groovy @@ -641,7 +641,7 @@ class ValuesResolverTest extends Specification { executionResult.data == null executionResult.errors.size() == 1 executionResult.errors[0].errorType == ErrorType.ValidationError - executionResult.errors[0].message == "Variable 'input' has an invalid value: Expected a Boolean input, but it was a 'String'" + executionResult.errors[0].message == "Variable 'input' has an invalid value: Expected a value that can be converted to type 'Boolean' but it was a 'String'" executionResult.errors[0].locations == [new SourceLocation(2, 35)] } @@ -679,7 +679,7 @@ class ValuesResolverTest extends Specification { executionResult.data == null executionResult.errors.size() == 1 executionResult.errors[0].errorType == ErrorType.ValidationError - executionResult.errors[0].message == "Variable 'input' has an invalid value: Expected a Number input, but it was a 'String'" + executionResult.errors[0].message == "Variable 'input' has an invalid value: Expected a value that can be converted to type 'Float' but it was a 'String'" executionResult.errors[0].locations == [new SourceLocation(2, 35)] } } \ No newline at end of file diff --git a/src/test/groovy/graphql/execution/directives/QueryDirectivesImplTest.groovy b/src/test/groovy/graphql/execution/directives/QueryDirectivesImplTest.groovy index 1c25262b48..80c2a861d1 100644 --- a/src/test/groovy/graphql/execution/directives/QueryDirectivesImplTest.groovy +++ b/src/test/groovy/graphql/execution/directives/QueryDirectivesImplTest.groovy @@ -2,6 +2,7 @@ package graphql.execution.directives import graphql.GraphQLContext import graphql.TestUtil +import graphql.execution.CoercedVariables import graphql.execution.MergedField import spock.lang.Specification @@ -66,4 +67,26 @@ class QueryDirectivesImplTest extends Specification { appliedResult[1].getArgument("forMillis").getValue() == 10 } + def "builder works as expected"() { + + def f1 = TestUtil.parseField("f1 @cached @upper") + def f2 = TestUtil.parseField("f2 @cached(forMillis : \$var) @timeout") + + def mergedField = MergedField.newMergedField([f1, f2]).build() + + def queryDirectives = QueryDirectives.newQueryDirectives() + .mergedField(mergedField) + .schema(schema) + .coercedVariables(CoercedVariables.of([var: 10])) + .graphQLContext(GraphQLContext.getDefault()) + .locale(Locale.getDefault()) + .build() + + when: + def appliedDirectivesByName = queryDirectives.getImmediateAppliedDirectivesByName() + + then: + appliedDirectivesByName.keySet().sort() == ["cached", "timeout", "upper"] + + } } diff --git a/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderHangingTest.groovy b/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderHangingTest.groovy index b76152f75d..cda31ba34b 100644 --- a/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderHangingTest.groovy +++ b/src/test/groovy/graphql/execution/instrumentation/dataloader/DataLoaderHangingTest.groovy @@ -241,8 +241,13 @@ class DataLoaderHangingTest extends Specification { """ DataFetcherExceptionHandler customExceptionHandlerThatThrows = new DataFetcherExceptionHandler() { + @Override - DataFetcherExceptionHandlerResult onException(DataFetcherExceptionHandlerParameters handlerParameters) { // Retain for test coverage, intentionally using sync version. + CompletableFuture handleException(DataFetcherExceptionHandlerParameters handlerParameters) { + // + // this is a weird test case - its not actually handling the exception - its a test + // case where the handler code itself throws an exception during the handling + // and that will not stop the DataLoader from being dispatched throw handlerParameters.exception } } diff --git a/src/test/groovy/graphql/execution/values/InputInterceptorTest.groovy b/src/test/groovy/graphql/execution/values/InputInterceptorTest.groovy new file mode 100644 index 0000000000..037dd27486 --- /dev/null +++ b/src/test/groovy/graphql/execution/values/InputInterceptorTest.groovy @@ -0,0 +1,139 @@ +package graphql.execution.values + +import graphql.ExecutionInput +import graphql.GraphQL +import graphql.GraphQLContext +import graphql.Scalars +import graphql.TestUtil +import graphql.execution.RawVariables +import graphql.execution.ValuesResolver +import graphql.schema.DataFetcher +import graphql.schema.DataFetchingEnvironment +import graphql.schema.GraphQLInputType +import org.jetbrains.annotations.NotNull +import org.jetbrains.annotations.Nullable +import spock.lang.Specification + +import static graphql.language.TypeName.newTypeName +import static graphql.language.VariableDefinition.newVariableDefinition + +class InputInterceptorTest extends Specification { + + def sdl = """ + type Query { + f(inputArg : InputArg, intArg : Int, stringArg : String,booleanArg : Boolean) : String + } + + input InputArg { + intArg : Int + stringArg : String + booleanArg : Boolean + } + """ + + def schema = TestUtil.schema(sdl) + + InputInterceptor interceptor = new InputInterceptor() { + @Override + Object intercept(@Nullable Object value, @NotNull GraphQLInputType graphQLType, @NotNull GraphQLContext graphqlContext, @NotNull Locale locale) { + if (graphQLType == Scalars.GraphQLBoolean) { + return "truthy" == value ? false : value + } + if (graphQLType == Scalars.GraphQLString) { + return String.valueOf(value).reverse() + } + return value + } + } + + + def "the input interceptor can be called"() { + def inputArgDef = newVariableDefinition("inputArg", + newTypeName("InputArg").build()).build() + def booleanArgDef = newVariableDefinition("booleanArg", + newTypeName("Boolean").build()).build() + def stringArgDef = newVariableDefinition("stringArg", + newTypeName("String").build()).build() + + def graphQLContext = GraphQLContext.newContext() + .put(InputInterceptor.class, interceptor).build() + + when: + def coercedVariables = ValuesResolver.coerceVariableValues( + this.schema, + [inputArgDef, booleanArgDef, stringArgDef], + RawVariables.of([ + "booleanArg": "truthy", + "stringArg" : "sdrawkcab", + "inputArg" : [ + "stringArg": "sdrawkcab osla" + ] + ]), + graphQLContext, + Locale.CANADA + ) + + then: + coercedVariables.toMap() == [ + "booleanArg": false, + "stringArg" : "backwards", + "inputArg" : [ + "stringArg": "also backwards" + ] + ] + } + + def "integration test of interceptor being called"() { + DataFetcher df = { DataFetchingEnvironment env -> + return env.getArguments().entrySet() + .collect({ String.valueOf(it.key) + ":" + String.valueOf(it.value) }) + .join(" ") + } + def schema = TestUtil.schema(sdl, ["Query": ["f": df]]) + def graphQL = GraphQL.newGraphQL(schema).build() + def ei = ExecutionInput.newExecutionInput().query(''' + query q($booleanArg : Boolean, $stringArg : String) { + f(booleanArg : $booleanArg, stringArg : $stringArg) + } + ''') + .graphQLContext({ it.put(InputInterceptor.class, interceptor) }) + .variables( + "booleanArg": "truthy", + "stringArg": "sdrawkcab" + + ) + .build() + + when: + def er = graphQL.execute(ei) + + then: + er.errors.isEmpty() + er.data == [f: "stringArg:backwards booleanArg:false"] + } + + + def "integration test showing the presence of an interceptor wont stop scalar coercing"() { + def schema = TestUtil.schema(sdl) + def graphQL = GraphQL.newGraphQL(schema).build() + def ei = ExecutionInput.newExecutionInput().query(''' + query q($booleanArg : Boolean, $stringArg : String) { + f(booleanArg : $booleanArg, stringArg : $stringArg) + } + ''') + .graphQLContext({ it.put(InputInterceptor.class, interceptor) }) + .variables( + "booleanArg": [not: "a boolean"], + "stringArg": "sdrawkcab" + + ) + .build() + + when: + def er = graphQL.execute(ei) + + then: + !er.errors.isEmpty() + er.errors[0].message == "Variable 'booleanArg' has an invalid value: Expected a value that can be converted to type 'Boolean' but it was a 'LinkedHashMap'" + } +} diff --git a/src/test/groovy/graphql/execution/values/legacycoercing/LegacyCoercingInputInterceptorTest.groovy b/src/test/groovy/graphql/execution/values/legacycoercing/LegacyCoercingInputInterceptorTest.groovy new file mode 100644 index 0000000000..90a46bb1ed --- /dev/null +++ b/src/test/groovy/graphql/execution/values/legacycoercing/LegacyCoercingInputInterceptorTest.groovy @@ -0,0 +1,201 @@ +package graphql.execution.values.legacycoercing + +import graphql.GraphQLContext +import graphql.schema.GraphQLInputType +import spock.lang.Specification + +import java.util.function.BiConsumer + +import static graphql.Scalars.GraphQLBoolean +import static graphql.Scalars.GraphQLFloat +import static graphql.Scalars.GraphQLInt +import static graphql.Scalars.GraphQLString + +class LegacyCoercingInputInterceptorTest extends Specification { + + def "can detect legacy boolean values"() { + when: + def isLegacyValue = LegacyCoercingInputInterceptor.isLegacyValue(input, inputType) + then: + isLegacyValue == expected + + where: + input | inputType | expected + "true" | GraphQLBoolean | true + "false" | GraphQLBoolean | true + "TRUE" | GraphQLBoolean | true + "FALSE" | GraphQLBoolean | true + "junk" | GraphQLBoolean | true + // not acceptable to the old + true | GraphQLBoolean | false + false | GraphQLBoolean | false + ["rubbish"] | GraphQLBoolean | false + } + + def "can change legacy boolean values"() { + def interceptor = LegacyCoercingInputInterceptor.migratesValues() + when: + def value = interceptor.intercept(input, inputType, GraphQLContext.getDefault(), Locale.getDefault()) + then: + value == expected + + where: + input | inputType | expected + "true" | GraphQLBoolean | true + "false" | GraphQLBoolean | false + "TRUE" | GraphQLBoolean | true + "FALSE" | GraphQLBoolean | false + + // left alone + "junk" | GraphQLBoolean | "junk" + true | GraphQLBoolean | true + false | GraphQLBoolean | false + ["rubbish"] | GraphQLBoolean | ["rubbish"] + } + + def "can detect legacy float values"() { + when: + def isLegacyValue = LegacyCoercingInputInterceptor.isLegacyValue(input, inputType) + then: + isLegacyValue == expected + + where: + input | inputType | expected + "1.0" | GraphQLFloat | true + "1" | GraphQLFloat | true + "junk" | GraphQLFloat | true + // not acceptable to the old + 666.0F | GraphQLFloat | false + 666 | GraphQLFloat | false + ["rubbish"] | GraphQLFloat | false + } + + def "can change legacy float values"() { + def interceptor = LegacyCoercingInputInterceptor.migratesValues() + when: + def value = interceptor.intercept(input, inputType, GraphQLContext.getDefault(), Locale.getDefault()) + then: + value == expected + + where: + input | inputType | expected + "1.0" | GraphQLFloat | 1.0F + "1" | GraphQLFloat | 1.0F + + // left alone + "junk" | GraphQLFloat | "junk" + 666.0F | GraphQLFloat | 666.0F + 666 | GraphQLFloat | 666 + ["rubbish"] | GraphQLFloat | ["rubbish"] + } + + def "can detect legacy int values"() { + when: + def isLegacyValue = LegacyCoercingInputInterceptor.isLegacyValue(input, inputType) + then: + isLegacyValue == expected + + where: + input | inputType | expected + "1.0" | GraphQLInt | true + "1" | GraphQLInt | true + "junk" | GraphQLInt | true + // not acceptable to the old + 666.0F | GraphQLInt | false + 666 | GraphQLInt | false + ["rubbish"] | GraphQLInt | false + } + + def "can change legacy int values"() { + def interceptor = LegacyCoercingInputInterceptor.migratesValues() + when: + def value = interceptor.intercept(input, inputType, GraphQLContext.getDefault(), Locale.getDefault()) + then: + value == expected + + where: + input | inputType | expected + "1.0" | GraphQLInt | 1 + "1" | GraphQLInt | 1 + + // left alone + "junk" | GraphQLInt | "junk" + 666.0F | GraphQLInt | 666.0F + 666 | GraphQLInt | 666 + ["rubbish"] | GraphQLInt | ["rubbish"] + } + + def "can detect legacy String values"() { + when: + def isLegacyValue = LegacyCoercingInputInterceptor.isLegacyValue(input, inputType) + then: + isLegacyValue == expected + + where: + input | inputType | expected + 666.0F | GraphQLString | true + 666 | GraphQLString | true + ["rubbish"] | GraphQLString | true + + // strings that are strings dont need to change + "xyz" | GraphQLString | false + "abc" | GraphQLString | false + "junk" | GraphQLString | false + + } + + def "can change legacy String values"() { + def interceptor = LegacyCoercingInputInterceptor.migratesValues() + when: + def value = interceptor.intercept(input, inputType, GraphQLContext.getDefault(), Locale.getDefault()) + then: + value == expected + where: + // its just String.valueOf() + input | inputType | expected + "xyz" | GraphQLString | "xyz" + "abc" | GraphQLString | "abc" + "junk" | GraphQLString | "junk" + 666.0F | GraphQLString | "666.0" + 666 | GraphQLString | "666" + ["rubbish"] | GraphQLString | "[rubbish]" + } + + def "can observe values "() { + def lastValue = null + def lastType = null + + def callback = new BiConsumer() { + @Override + void accept(Object o, GraphQLInputType graphQLInputType) { + lastValue = o + lastType = graphQLInputType + } + } + def interceptor = LegacyCoercingInputInterceptor.observesValues(callback) + when: + lastValue = null + lastType = null + def value = interceptor.intercept(input, inputType, GraphQLContext.getDefault(), Locale.getDefault()) + + then: + // nothing changes - it observes only + value == input + lastValue == expectedLastValue + lastType == expectedLastType + + where: + input | inputType | expectedLastValue | expectedLastType + "true" | GraphQLBoolean | "true" | GraphQLBoolean + "1.0" | GraphQLFloat | "1.0" | GraphQLFloat + "1" | GraphQLInt | "1" | GraphQLInt + 1 | GraphQLString | 1 | GraphQLString + + // no observation if its not needed + true | GraphQLBoolean | null | null + 1.0F | GraphQLFloat | null | null + 1 | GraphQLInt | null | null + "x" | GraphQLString | null | null + + } +} diff --git a/src/test/groovy/graphql/extensions/ExtensionsBuilderTest.groovy b/src/test/groovy/graphql/extensions/ExtensionsBuilderTest.groovy index 326248cf76..12be824cf8 100644 --- a/src/test/groovy/graphql/extensions/ExtensionsBuilderTest.groovy +++ b/src/test/groovy/graphql/extensions/ExtensionsBuilderTest.groovy @@ -1,14 +1,15 @@ package graphql.extensions -import graphql.ExecutionInput import graphql.ExecutionResult import graphql.TestUtil +import graphql.execution.DataFetcherResult import graphql.schema.DataFetcher import graphql.schema.DataFetchingEnvironment import graphql.schema.GraphQLTypeUtil import org.jetbrains.annotations.NotNull import spock.lang.Specification +import static graphql.ExecutionInput.newExecutionInput import static graphql.extensions.ExtensionsBuilder.newExtensionsBuilder import static graphql.schema.idl.RuntimeWiring.newRuntimeWiring import static graphql.schema.idl.TypeRuntimeWiring.newTypeWiring @@ -39,6 +40,27 @@ class ExtensionsBuilderTest extends Specification { extensions == [x: "overwrite3", y: "25", z: "overwriteZ", a: "1"] } + def "wont add empty changes"() { + def builder = newExtensionsBuilder() + when: + builder.addValues([:]) + + then: + builder.getChangeCount() == 0 + + when: + builder.addValues([:]) + + then: + builder.getChangeCount() == 0 + + when: + def extensions = builder.buildExtensions() + then: + extensions.isEmpty() + + } + def "can handle no changes"() { when: def extensions = newExtensionsBuilder() @@ -122,11 +144,12 @@ class ExtensionsBuilderTest extends Specification { """ def extensionsBuilder = newExtensionsBuilder() - extensionsBuilder.addValue("added","explicitly") + extensionsBuilder.addValue("added", "explicitly") - def ei = ExecutionInput.newExecutionInput("query q { name street id }") + def ei = newExecutionInput("query q { name street id }") .graphQLContext({ ctx -> - ctx.put(ExtensionsBuilder.class, extensionsBuilder) }) + ctx.put(ExtensionsBuilder.class, extensionsBuilder) + }) .build() @@ -144,12 +167,53 @@ class ExtensionsBuilderTest extends Specification { er.errors.isEmpty() er.extensions == [ "added": "explicitly", - common: [ + common : [ name : "String!", street: "String", id : "ID!", ], // we break them out so we have common and not common entries + name : "String!", + street : "String", + id : "ID!", + ] + } + + + def "integration test that shows it working when they use DataFetcherResult and defaulted values"() { + def sdl = """ + type Query { + name : String! + street : String + id : ID! + } + """ + + DataFetcher dfrDF = new DataFetcher() { + @Override + Object get(DataFetchingEnvironment env) throws Exception { + def fieldMap = [:] + fieldMap.put(env.getFieldDefinition().name, GraphQLTypeUtil.simplePrint(env.getFieldDefinition().type)) + return DataFetcherResult.newResult().data("ignored").extensions(fieldMap).build() + } + } + + def graphQL = TestUtil.graphQL(sdl, newRuntimeWiring() + .type(newTypeWiring("Query").dataFetchers([ + name : dfrDF, + street: dfrDF, + id : dfrDF, + ]))) + .build() + + when: + def ei = newExecutionInput("query q { name street id }") + .build() + + def er = graphQL.execute(ei) + then: + er.errors.isEmpty() + er.extensions == [ name : "String!", street: "String", id : "ID!", @@ -165,7 +229,7 @@ class ExtensionsBuilderTest extends Specification { } """ - def ei = ExecutionInput.newExecutionInput("query q { name street id }") + def ei = newExecutionInput("query q { name street id }") .build() @@ -203,8 +267,8 @@ class ExtensionsBuilderTest extends Specification { } """ - def ei = ExecutionInput.newExecutionInput("query q { name street id }") - .root(["name" : "Brad", "id" :1234]) + def ei = newExecutionInput("query q { name street id }") + .root(["name": "Brad", "id": 1234]) .build() diff --git a/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryTest.groovy b/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryTest.groovy index 9472680f47..e5e6724abd 100644 --- a/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryTest.groovy +++ b/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryTest.groovy @@ -6,6 +6,7 @@ import graphql.TestUtil import graphql.execution.CoercedVariables import graphql.execution.MergedField import graphql.execution.RawVariables +import graphql.execution.directives.QueryAppliedDirective import graphql.language.Document import graphql.language.Field import graphql.language.FragmentDefinition @@ -709,6 +710,57 @@ type Dog implements Animal{ ] } + def "query with fragment and type condition merged together 2"() { + def graphQLSchema = TestUtil.schema(""" + type Query { + pet : Pet + } + interface Pet { + name : String + } + + type Dog implements Pet { + name : String + } + + type Bird implements Pet { + name : String + } + + type Cat implements Pet { + name : String + } + """) + def query = """ + { + pet { + name + ... on Dog { + name + } + ... CatFrag + } + } + + fragment CatFrag on Cat { + name + } + """ + assertValidQuery(graphQLSchema, query) + + Document document = TestUtil.parseQuery(query) + + ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) + + expect: + printedTree == ['-Query.pet: Pet', + '--[Bird, Cat, Dog].name: String' + ] + } + + def "query with interface in between"() { def graphQLSchema = schema(""" type Query { @@ -896,6 +948,40 @@ type Dog implements Animal{ result } + List printTreeAndDirectives(ExecutableNormalizedOperation queryExecutionTree) { + def result = [] + Traverser traverser = Traverser.depthFirst({ it.getChildren() }) + traverser.traverse(queryExecutionTree.getTopLevelFields(), new TraverserVisitorStub() { + @Override + TraversalControl enter(TraverserContext context) { + ExecutableNormalizedField queryExecutionField = context.thisNode() + def queryDirectives = queryExecutionTree.getQueryDirectives(queryExecutionField) + + def fieldDetails = queryExecutionField.printDetails() + if (queryDirectives != null) { + def appliedDirectivesByName = queryDirectives.getImmediateAppliedDirectivesByName() + if (!appliedDirectivesByName.isEmpty()) { + fieldDetails += " " + printDirectives(appliedDirectivesByName) + } + } + result << fieldDetails + return TraversalControl.CONTINUE + } + + String printDirectives(Map> stringListMap) { + String s = stringListMap.collect { entry -> + entry.value.collect { + " @" + it.name + "(" + it.getArguments().collect { + it.name + " : " + '"' + it.value + '"' + }.join(",") + ")" + }.join(' ') + }.join(" ") + return s + } + }) + result + } + static List printTreeWithLevelInfo(ExecutableNormalizedOperation queryExecutionTree, GraphQLSchema schema) { def result = [] Traverser traverser = Traverser.depthFirst({ it.getChildren() }) @@ -1654,14 +1740,16 @@ schema { ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() when: def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.emptyVariables()) - println String.join("\n", printTree(tree)) + def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) - /** - * This is a test for two fields with the same key (friend), - * but backed by two different fields (Cat.dogFriend,Dog.dogFriend) - * which end up being two different NormalizedField - */ then: + // the two friend fields are not in on ENF + printedTree == ['-Query.pets: Pet', + '--friend: Cat.catFriend: CatFriend', + '---CatFriend.catFriendName: String', + '--friend: Dog.dogFriend: DogFriend', + '---DogFriend.dogFriendName: String'] + tree.normalizedFieldToMergedField.size() == 5 tree.fieldToNormalizedField.size() == 7 } @@ -1703,12 +1791,17 @@ schema { def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) then: + /** + * the two name fields are not merged, because they are backed by different fields with different arguments + * If the arguments are the same, it would be one ENF. + */ printedTree == ['-Query.pets: Pet', '--Cat.name: String', '--Dog.name: String' ] } + def "diverging fields with the same parent type on deeper level"() { given: def schema = schema(''' @@ -2384,6 +2477,59 @@ schema { ] } + + def "query directives are captured is respected"() { + given: + String schema = """ + directive @fieldDirective(target : String!) on FIELD + directive @fieldXDirective(target : String!) on FIELD + + type Query { + pets: Pet + } + interface Pet { + name: String + } + type Cat implements Pet { + name: String + } + type Dog implements Pet { + name: String + } + """ + GraphQLSchema graphQLSchema = TestUtil.schema(schema) + + String query = ''' + query q { + pets { + ... on Cat { + cName : name @fieldDirective(target : "Cat.name") + } + ... on Dog { + dName : name @fieldDirective(target : "Dog.name") @fieldXDirective(target : "Dog.name") + } + ... on Pet { + pName : name @fieldDirective(target : "Pet.name") + } + }} + ''' + + def variables = [:] + assertValidQuery(graphQLSchema, query, variables) + Document document = TestUtil.parseQuery(query) + ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + when: + def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables)) + def printedTree = printTreeAndDirectives(tree) + + then: + printedTree == ['Query.pets', + 'cName: Cat.name @fieldDirective(target : "Cat.name")', + 'dName: Dog.name @fieldDirective(target : "Dog.name") @fieldXDirective(target : "Dog.name")', + 'pName: [Cat, Dog].name @fieldDirective(target : "Pet.name")', + ] + } + def "missing argument"() { given: String schema = """ @@ -2406,4 +2552,147 @@ schema { printedTree == ['Query.hello'] tree.getTopLevelFields().get(0).getNormalizedArguments().isEmpty() } + + def "reused field via fragments"() { + String schema = """ + type Query { + pet: Pet + } + type Pet { + owner: Person + emergencyContact: Person + } + type Person { + name: String + } + """ + GraphQLSchema graphQLSchema = TestUtil.schema(schema) + + String query = """ +{ pet { + owner { ...personName } + emergencyContact { ...personName } +}} +fragment personName on Person { + name +} + """ + + assertValidQuery(graphQLSchema, query) + + Document document = TestUtil.parseQuery(query) + + ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory() + def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) + + expect: + printedTree == ['-Query.pet: Pet', + '--Pet.owner: Person', + '---Person.name: String', + '--Pet.emergencyContact: Person', + '---Person.name: String' + ] + + } + + + def "test interface fields with three different output types (covariance) on the implementations"() { + def graphQLSchema = schema(""" + interface Animal { + parent: Animal + name: String + } + type Cat implements Animal { + name: String + parent: Cat + } + type Dog implements Animal { + name: String + parent: Dog + isGoodBoy: Boolean + } + type Bird implements Animal { + name: String + parent: Bird + } + type Query { + animal: Animal + } + """) + + def query = """ + { + animal { + parent { + name + } + } + } + """ + + assertValidQuery(graphQLSchema, query) + + Document document = TestUtil.parseQuery(query) + + def dependencyGraph = new ExecutableNormalizedOperationFactory() + def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) + + expect: + printedTree == [ + "-Query.animal: Animal", + "--[Bird, Cat, Dog].parent: Bird, Cat, Dog", + "---[Bird, Cat, Dog].name: String", + ] + } + + def "covariants with union fields"() { + def graphQLSchema = schema(""" + type Query { + animal: Animal + } + interface Animal { + parent: DogOrCat + name: String + } + type Cat implements Animal { + name: String + parent: Cat + } + type Dog implements Animal { + name: String + parent: Dog + isGoodBoy: Boolean + } + union DogOrCat = Dog | Cat + """) + + def query = """ + { + animal { + parent { + __typename + } + } + } + """ + + assertValidQuery(graphQLSchema, query) + + Document document = TestUtil.parseQuery(query) + + def dependencyGraph = new ExecutableNormalizedOperationFactory() + def tree = dependencyGraph.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables()) + def printedTree = printTreeWithLevelInfo(tree, graphQLSchema) + + expect: + printedTree == [ + "-Query.animal: Animal", + "--[Cat, Dog].parent: Cat, Dog", + "---[Cat, Dog].__typename: String!", + ] + } + + } diff --git a/src/test/groovy/graphql/schema/PropertyDataFetcherTest.groovy b/src/test/groovy/graphql/schema/PropertyDataFetcherTest.groovy index 3d289c3688..f771835575 100644 --- a/src/test/groovy/graphql/schema/PropertyDataFetcherTest.groovy +++ b/src/test/groovy/graphql/schema/PropertyDataFetcherTest.groovy @@ -33,7 +33,7 @@ class PropertyDataFetcherTest extends Specification { .build() } - class SomeObject { + static class SomeObject { String value } @@ -483,7 +483,7 @@ class PropertyDataFetcherTest extends Specification { } - class ProductDTO { + static class ProductDTO { String name String model } @@ -537,7 +537,7 @@ class PropertyDataFetcherTest extends Specification { private static class Bar implements Foo { @Override String getSomething() { - return "bar"; + return "bar" } } @@ -562,4 +562,133 @@ class PropertyDataFetcherTest extends Specification { then: result == "bar" } + + def "issue 3247 - record like statics should not be used"() { + given: + def payload = new UpdateOrganizerSubscriptionPayload(true, new OrganizerSubscriptionError()) + PropertyDataFetcher propertyDataFetcher = new PropertyDataFetcher("success") + def dfe = Mock(DataFetchingEnvironment) + dfe.getSource() >> payload + when: + def result = propertyDataFetcher.get(dfe) + + then: + result == true + + // repeat - should be cached + when: + result = propertyDataFetcher.get(dfe) + + then: + result == true + } + + def "issue 3247 - record like statics should not be found"() { + given: + def errorShape = new OrganizerSubscriptionError() + PropertyDataFetcher propertyDataFetcher = new PropertyDataFetcher("message") + def dfe = Mock(DataFetchingEnvironment) + dfe.getSource() >> errorShape + when: + def result = propertyDataFetcher.get(dfe) + + then: + result == null // not found as its a static recordLike() method + + // repeat - should be cached + when: + result = propertyDataFetcher.get(dfe) + + then: + result == null + } + + def "issue 3247 - getter statics should be found"() { + given: + def objectInQuestion = new BarClassWithStaticProperties() + PropertyDataFetcher propertyDataFetcher = new PropertyDataFetcher("foo") + def dfe = Mock(DataFetchingEnvironment) + dfe.getSource() >> objectInQuestion + when: + def result = propertyDataFetcher.get(dfe) + + then: + result == "foo" + + // repeat - should be cached + when: + result = propertyDataFetcher.get(dfe) + + then: + result == "foo" + + when: + propertyDataFetcher = new PropertyDataFetcher("bar") + result = propertyDataFetcher.get(dfe) + + then: + result == "bar" + + // repeat - should be cached + when: + result = propertyDataFetcher.get(dfe) + + then: + result == "bar" + } + + /** + * Classes from issue to ensure we reproduce as reported by customers + * + * In the UpdateOrganizerSubscriptionPayload class we will find the getSuccess() because static recordLike() methods are no longer allowed + */ + static class OrganizerSubscriptionError { + static String message() { return "error " } + } + + static class UpdateOrganizerSubscriptionPayload { + private final Boolean success + private final OrganizerSubscriptionError error + + UpdateOrganizerSubscriptionPayload(Boolean success, OrganizerSubscriptionError error) { + this.success = success + this.error = error + } + + static UpdateOrganizerSubscriptionPayload success() { + // 👈 note the static factory method for creating a success payload + return new UpdateOrganizerSubscriptionPayload(Boolean.TRUE, null) + } + + static UpdateOrganizerSubscriptionPayload error(OrganizerSubscriptionError error) { + // 👈 note the static factory method for creating a success payload + return new UpdateOrganizerSubscriptionPayload(null, error) + } + + Boolean getSuccess() { + return success + } + + OrganizerSubscriptionError getError() { + return error + } + + + @Override + String toString() { + return new StringJoiner( + ", ", UpdateOrganizerSubscriptionPayload.class.getSimpleName() + "[", "]") + .add("success=" + success) + .add("error=" + error) + .toString() + } + } + + static class FooClassWithStaticProperties { + static String getFoo() { return "foo" } + } + + static class BarClassWithStaticProperties extends FooClassWithStaticProperties { + static String getBar() { return "bar" } + } } diff --git a/src/test/groovy/graphql/schema/SchemaTransformerTest.groovy b/src/test/groovy/graphql/schema/SchemaTransformerTest.groovy index 6922852633..339378f7a8 100644 --- a/src/test/groovy/graphql/schema/SchemaTransformerTest.groovy +++ b/src/test/groovy/graphql/schema/SchemaTransformerTest.groovy @@ -931,4 +931,32 @@ type Query { (appliedDirective.getArgument("fooArgOnDirective").getType() as GraphQLScalarType).getName() == "Bar" newSchema.getType("Foo") == null } + + def "has access to common variables"() { + def schema = TestUtil.schema(""" + type Query { + foo : String + } + """) + + def visitedSchema = null + def visitedCodeRegistry = null + def visitor = new GraphQLTypeVisitorStub() { + + @Override + TraversalControl visitGraphQLFieldDefinition(GraphQLFieldDefinition node, TraverserContext context) { + visitedSchema = context.getVarFromParents(GraphQLSchema.class) + visitedCodeRegistry = context.getVarFromParents(GraphQLCodeRegistry.Builder.class) + return super.visitGraphQLFieldDefinition(node, context) + } + + } + + when: + SchemaTransformer.transformSchema(schema, visitor) + + then: + visitedSchema == schema + visitedCodeRegistry instanceof GraphQLCodeRegistry.Builder + } } diff --git a/src/test/groovy/graphql/schema/SchemaTraverserTest.groovy b/src/test/groovy/graphql/schema/SchemaTraverserTest.groovy index 889e7e2198..10a8d53a93 100644 --- a/src/test/groovy/graphql/schema/SchemaTraverserTest.groovy +++ b/src/test/groovy/graphql/schema/SchemaTraverserTest.groovy @@ -1,6 +1,7 @@ package graphql.schema import graphql.Scalars +import graphql.TestUtil import graphql.util.TraversalControl import graphql.util.TraverserContext import spock.lang.Specification @@ -379,7 +380,50 @@ class SchemaTraverserTest extends Specification { visitor.getStack() == ["argument: Test1", "fallback: Test1", "reference: String", "fallback: String", "argument: Test2", "fallback: Test2", "backRef: String" ] + } + + def "can quit the schema traverser"() { + def sdl = """ + type Query { + f : ObjType + f2NeverVisited : String + } + + type ObjType { + fQuit : ObjType2 + } + + type ObjType2 { + neverVisited : String + } + """ + + def schema = TestUtil.schema(sdl) + + def visitor = new GraphQLTestingVisitor() { + @Override + TraversalControl visitGraphQLFieldDefinition(GraphQLFieldDefinition node, TraverserContext context) { + super.visitGraphQLFieldDefinition(node, context) + if (node.name.contains("Quit")) { + return TraversalControl.QUIT + } + return TraversalControl.CONTINUE + } + } + when: + new SchemaTraverser().depthFirstFullSchema(visitor, schema) + + then: + visitor.getStack() == ["object: Query", + "fallback: Query", + "field: f", + "fallback: f", + "object: ObjType", + "fallback: ObjType", + "field: fQuit", + "fallback: fQuit", + ] } class GraphQLTestingVisitor extends GraphQLTypeVisitorStub { diff --git a/src/test/groovy/graphql/schema/diffing/SchemaDiffingTest.groovy b/src/test/groovy/graphql/schema/diffing/SchemaDiffingTest.groovy index e9d5bf5202..11f1dea63b 100644 --- a/src/test/groovy/graphql/schema/diffing/SchemaDiffingTest.groovy +++ b/src/test/groovy/graphql/schema/diffing/SchemaDiffingTest.groovy @@ -1187,8 +1187,9 @@ class SchemaDiffingTest extends Specification { then: /** * The test here is that the context of the applied argument is considered and that a2 is deleted and one b is inserted and another one changed. + * Note: this is not longer true */ - operations.size() == 5 + operations.size() == 8 } def "with directives"() { diff --git a/src/test/groovy/graphql/schema/diffing/ana/EditOperationAnalyzerTest.groovy b/src/test/groovy/graphql/schema/diffing/ana/EditOperationAnalyzerTest.groovy index 0c9fcff02e..ebc9463eb5 100644 --- a/src/test/groovy/graphql/schema/diffing/ana/EditOperationAnalyzerTest.groovy +++ b/src/test/groovy/graphql/schema/diffing/ana/EditOperationAnalyzerTest.groovy @@ -1,7 +1,11 @@ package graphql.schema.diffing.ana import graphql.TestUtil +import graphql.schema.diffing.Edge +import graphql.schema.diffing.EditOperation import graphql.schema.diffing.SchemaDiffing +import graphql.schema.diffing.SchemaGraph +import graphql.schema.diffing.Vertex import spock.lang.Specification import static graphql.schema.diffing.ana.SchemaDifference.AppliedDirectiveDeletion @@ -68,7 +72,6 @@ import static graphql.schema.diffing.ana.SchemaDifference.UnionMemberDeletion import static graphql.schema.diffing.ana.SchemaDifference.UnionModification class EditOperationAnalyzerTest extends Specification { - def "object renamed"() { given: def oldSdl = ''' @@ -1010,7 +1013,7 @@ class EditOperationAnalyzerTest extends Specification { interface Node2 { id: ID! } - type Foo implements Node2 & NewI{ + type Foo implements Node2 & NewI { id: ID! hello: String } @@ -2173,6 +2176,874 @@ class EditOperationAnalyzerTest extends Specification { changes.interfaceDifferences["User"] instanceof InterfaceDeletion } + def "argument removed and similar argument added on separate object fields"() { + given: + def oldSdl = ''' + type Query { + issues: IssueQuery + } + type IssueQuery { + issue: Issue + issues(id: [ID!]!): [Issue] + } + type Issue { + id: ID! + } + ''' + def newSdl = ''' + type Query { + issues: IssueQuery + } + type IssueQuery { + issue(id: ID): Issue + issues: [Issue] + } + type Issue { + id: ID! + } + ''' + + when: + def changes = calcDiff(oldSdl, newSdl) + + then: + changes.objectDifferences["IssueQuery"] instanceof ObjectModification + def issueQueryChanges = changes.objectDifferences["IssueQuery"] as ObjectModification + issueQueryChanges.details.size() == 2 + + def argumentAddition = issueQueryChanges.getDetails(ObjectFieldArgumentAddition) + argumentAddition.size() == 1 + argumentAddition[0].fieldName == "issue" + argumentAddition[0].name == "id" + + def argumentDeletion = issueQueryChanges.getDetails(ObjectFieldArgumentDeletion) + argumentDeletion.size() == 1 + argumentDeletion[0].fieldName == "issues" + argumentDeletion[0].name == "id" + } + + def "argument removed and similar argument added on separate interface fields"() { + given: + def oldSdl = ''' + type Query { + issues: IssueQuery + } + interface IssueQuery { + issue: Issue + issues(id: [ID!]!): [Issue] + } + type Issue { + id: ID! + } + ''' + def newSdl = ''' + type Query { + issues: IssueQuery + } + interface IssueQuery { + issue(id: ID): Issue + issues: [Issue] + } + type Issue { + id: ID! + } + ''' + + when: + def changes = calcDiff(oldSdl, newSdl) + + then: + changes.interfaceDifferences["IssueQuery"] instanceof InterfaceModification + def issueQueryChanges = changes.interfaceDifferences["IssueQuery"] as InterfaceModification + issueQueryChanges.details.size() == 2 + + def argumentAddition = issueQueryChanges.getDetails(InterfaceFieldArgumentAddition) + argumentAddition.size() == 1 + argumentAddition[0].fieldName == "issue" + argumentAddition[0].name == "id" + + def argumentDeletion = issueQueryChanges.getDetails(InterfaceFieldArgumentDeletion) + argumentDeletion.size() == 1 + argumentDeletion[0].fieldName == "issues" + argumentDeletion[0].name == "id" + } + + def "argument removed and similar argument added on separate directives"() { + given: + def oldSdl = ''' + directive @dog(name: String) on FIELD_DEFINITION + directive @cat on FIELD_DEFINITION + type Query { + pet: String @dog + } + ''' + def newSdl = ''' + directive @dog on FIELD_DEFINITION + directive @cat(name: [String]) on FIELD_DEFINITION + type Query { + pet: String @dog + } + ''' + + when: + def changes = calcDiff(oldSdl, newSdl) + + then: + changes.directiveDifferences["cat"] instanceof DirectiveModification + def catChanges = changes.directiveDifferences["cat"] as DirectiveModification + catChanges.details.size() == 1 + def argumentAdditions = catChanges.getDetails(DirectiveArgumentAddition) + argumentAdditions.size() == 1 + argumentAdditions[0].name == "name" + + changes.directiveDifferences["dog"] instanceof DirectiveModification + def dogChanges = changes.directiveDifferences["dog"] as DirectiveModification + dogChanges.details.size() == 1 + def argumentDeletions = dogChanges.getDetails(DirectiveArgumentDeletion) + argumentDeletions.size() == 1 + argumentDeletions[0].name == "name" + } + + def "argument removed and added on renamed object field"() { + given: + def oldSdl = ''' + type Query { + issues: IssueQuery + } + type IssueQuery { + issues(id: [ID!]): [Issue] + } + type Issue { + id: ID! + } + ''' + def newSdl = ''' + type Query { + issues: IssueQuery + } + type IssueQuery { + issuesById(ids: [ID!]!): [Issue] + } + type Issue { + id: ID! + } + ''' + + when: + def changes = calcDiff(oldSdl, newSdl) + + then: + changes.objectDifferences["IssueQuery"] instanceof ObjectModification + def issueQueryChanges = changes.objectDifferences["IssueQuery"] as ObjectModification + issueQueryChanges.details.size() == 3 + + def rename = issueQueryChanges.getDetails(ObjectFieldRename) + rename.size() == 1 + rename[0].oldName == "issues" + rename[0].newName == "issuesById" + + def argumentRename = issueQueryChanges.getDetails(ObjectFieldArgumentRename) + argumentRename.size() == 1 + argumentRename[0].fieldName == "issuesById" + argumentRename[0].oldName == "id" + argumentRename[0].newName == "ids" + + def argumentTypeModification = issueQueryChanges.getDetails(ObjectFieldArgumentTypeModification) + argumentTypeModification.size() == 1 + argumentTypeModification[0].fieldName == "issuesById" + argumentTypeModification[0].argumentName == "ids" + argumentTypeModification[0].oldType == "[ID!]" + argumentTypeModification[0].newType == "[ID!]!" + } + + def "argument removed and added on renamed interface field"() { + given: + def oldSdl = ''' + type Query { + issues: IssueQuery + } + interface IssueQuery { + issues(id: [ID!]): [Issue] + } + type Issue { + id: ID! + } + ''' + def newSdl = ''' + type Query { + issues: IssueQuery + } + interface IssueQuery { + issuesById(ids: [ID!]!): [Issue] + } + type Issue { + id: ID! + } + ''' + + when: + def changes = calcDiff(oldSdl, newSdl) + + then: + changes.interfaceDifferences["IssueQuery"] instanceof InterfaceModification + def issueQueryChanges = changes.interfaceDifferences["IssueQuery"] as InterfaceModification + issueQueryChanges.details.size() == 3 + + def rename = issueQueryChanges.getDetails(InterfaceFieldRename) + rename.size() == 1 + rename[0].oldName == "issues" + rename[0].newName == "issuesById" + + def argumentRename = issueQueryChanges.getDetails(InterfaceFieldArgumentRename) + argumentRename.size() == 1 + argumentRename[0].fieldName == "issuesById" + argumentRename[0].oldName == "id" + argumentRename[0].newName == "ids" + + def argumentTypeModification = issueQueryChanges.getDetails(InterfaceFieldArgumentTypeModification) + argumentTypeModification.size() == 1 + argumentTypeModification[0].fieldName == "issuesById" + argumentTypeModification[0].argumentName == "ids" + argumentTypeModification[0].oldType == "[ID!]" + argumentTypeModification[0].newType == "[ID!]!" + } + + def "argument removed and added on renamed directive"() { + given: + def oldSdl = ''' + directive @dog(name: String) on FIELD_DEFINITION + type Query { + pet: String @dog + } + ''' + def newSdl = ''' + directive @cat(names: [String]) on FIELD_DEFINITION + type Query { + pet: String @cat + } + ''' + + when: + def changes = calcDiff(oldSdl, newSdl) + + then: + changes.directiveDifferences["cat"] instanceof DirectiveModification + def catChanges = changes.directiveDifferences["cat"] as DirectiveModification + catChanges.oldName == "dog" + catChanges.newName == "cat" + catChanges.isNameChanged() + catChanges.details.size() == 2 + + def argumentRename = catChanges.getDetails(DirectiveArgumentRename) + argumentRename.size() == 1 + argumentRename[0].oldName == "name" + argumentRename[0].newName == "names" + + def argumentTypeModification = catChanges.getDetails(DirectiveArgumentTypeModification) + argumentTypeModification.size() == 1 + argumentTypeModification[0].argumentName == "names" + argumentTypeModification[0].oldType == "String" + argumentTypeModification[0].newType == "[String]" + } + + + def "object field argument type and default value changed"() { + given: + def oldSdl = ''' + type Query { + echo(message: String! = "Hello World"): String + } + ''' + def newSdl = ''' + type Query { + echo(message: ID! = "1"): String + } + ''' + + when: + def changes = calcDiff(oldSdl, newSdl) + + then: + changes.objectDifferences["Query"] instanceof ObjectModification + def queryChanges = changes.objectDifferences["Query"] as ObjectModification + queryChanges.details.size() == 2 + + def argumentTypeModification = queryChanges.getDetails(ObjectFieldArgumentTypeModification) + argumentTypeModification.size() == 1 + argumentTypeModification[0].fieldName == "echo" + argumentTypeModification[0].argumentName == "message" + argumentTypeModification[0].oldType == "String!" + argumentTypeModification[0].newType == "ID!" + + def defaultValueModification = queryChanges.getDetails(ObjectFieldArgumentDefaultValueModification) + defaultValueModification.size() == 1 + defaultValueModification[0].fieldName == "echo" + defaultValueModification[0].argumentName == "message" + defaultValueModification[0].oldValue == '"Hello World"' + defaultValueModification[0].newValue == '"1"' + } + + def "interface field argument type and default value changed"() { + given: + def oldSdl = ''' + type Query { + echo: EchoProvider + } + interface EchoProvider { + send(message: String! = "Hello World"): String + } + ''' + def newSdl = ''' + type Query { + echo: EchoProvider + } + interface EchoProvider { + send(message: ID! = "1"): String + } + ''' + + when: + def changes = calcDiff(oldSdl, newSdl) + + then: + changes.interfaceDifferences["EchoProvider"] instanceof InterfaceModification + def echoProviderChanges = changes.interfaceDifferences["EchoProvider"] as InterfaceModification + echoProviderChanges.details.size() == 2 + + def argumentTypeModification = echoProviderChanges.getDetails(InterfaceFieldArgumentTypeModification) + argumentTypeModification.size() == 1 + argumentTypeModification[0].fieldName == "send" + argumentTypeModification[0].argumentName == "message" + argumentTypeModification[0].oldType == "String!" + argumentTypeModification[0].newType == "ID!" + + def defaultValueModification = echoProviderChanges.getDetails(InterfaceFieldArgumentDefaultValueModification) + defaultValueModification.size() == 1 + defaultValueModification[0].fieldName == "send" + defaultValueModification[0].argumentName == "message" + defaultValueModification[0].oldValue == '"Hello World"' + defaultValueModification[0].newValue == '"1"' + } + + def "directive argument type and default value changed"() { + given: + def oldSdl = ''' + directive @deleteBy(date: String = "+1 week") on FIELD_DEFINITION + type Query { + echo: String @deleteBy + } + ''' + def newSdl = ''' + directive @deleteBy(date: Int = 1000) on FIELD_DEFINITION + type Query { + echo: String @deleteBy + } + ''' + + when: + def changes = calcDiff(oldSdl, newSdl) + + then: + changes.directiveDifferences["deleteBy"] instanceof DirectiveModification + def deleteByChanges = changes.directiveDifferences["deleteBy"] as DirectiveModification + deleteByChanges.details.size() == 2 + + def argumentTypeModification = deleteByChanges.getDetails(DirectiveArgumentTypeModification) + argumentTypeModification.size() == 1 + argumentTypeModification[0].argumentName == "date" + argumentTypeModification[0].oldType == "String" + argumentTypeModification[0].newType == "Int" + + def defaultValueModification = deleteByChanges.getDetails(DirectiveArgumentDefaultValueModification) + defaultValueModification.size() == 1 + defaultValueModification[0].argumentName == "date" + defaultValueModification[0].oldValue == '"+1 week"' + defaultValueModification[0].newValue == '1000' + } + + def "object field with argument removed and similarly named argument added"() { + given: + def oldSdl = """ + type Query { + issues: IssueQuery + } + type IssueQuery { + issues(id: [ID!]): [Issue] + issuesById: [Issue] + } + type Issue { + id: ID! + } + """ + def newSdl = ''' + type Query { + issues: IssueQuery + } + type IssueQuery { + issuesById(ids: [ID!]!): [Issue] + } + type Issue { + id: ID! + } + ''' + + when: + def changes = calcDiff(oldSdl, newSdl) + + then: + changes.objectDifferences["IssueQuery"] instanceof ObjectModification + def issueQueryChanges = changes.objectDifferences["IssueQuery"] as ObjectModification + issueQueryChanges.details.size() == 2 + + def fieldDeletion = issueQueryChanges.getDetails(ObjectFieldDeletion) + fieldDeletion.size() == 1 + fieldDeletion[0].name == "issues" + + def fieldArgumentAddition = issueQueryChanges.getDetails(ObjectFieldArgumentAddition) + fieldArgumentAddition.size() == 1 + fieldArgumentAddition[0].fieldName == "issuesById" + fieldArgumentAddition[0].name == "ids" + } + + def "interface field with argument removed and similarly named argument added"() { + given: + def oldSdl = """ + type Query { + issues: IssueQuery + } + interface IssueQuery { + issues(id: [ID!]): [Issue] + issuesById: [Issue] + } + type Issue { + id: ID! + } + """ + def newSdl = ''' + type Query { + issues: IssueQuery + } + interface IssueQuery { + issuesById(ids: [ID!]!): [Issue] + } + type Issue { + id: ID! + } + ''' + + when: + def changes = calcDiff(oldSdl, newSdl) + + then: + changes.interfaceDifferences["IssueQuery"] instanceof InterfaceModification + def issueQueryChanges = changes.interfaceDifferences["IssueQuery"] as InterfaceModification + issueQueryChanges.details.size() == 2 + + def fieldDeletion = issueQueryChanges.getDetails(InterfaceFieldDeletion) + fieldDeletion.size() == 1 + fieldDeletion[0].name == "issues" + + def fieldArgumentAddition = issueQueryChanges.getDetails(InterfaceFieldArgumentAddition) + fieldArgumentAddition.size() == 1 + fieldArgumentAddition[0].fieldName == "issuesById" + fieldArgumentAddition[0].name == "ids" + } + + def "directive removed and similarly named argument added"() { + given: + def oldSdl = ''' + directive @dog(name: String) on FIELD_DEFINITION + directive @cat on FIELD_DEFINITION + type Query { + pet: String + } + ''' + def newSdl = ''' + directive @cat(names: String) on FIELD_DEFINITION + type Query { + pet: String + } + ''' + + when: + def changes = calcDiff(oldSdl, newSdl) + + then: + changes.directiveDifferences["dog"] instanceof DirectiveDeletion + def dogChanges = changes.directiveDifferences["dog"] as DirectiveDeletion + dogChanges.name == "dog" + + changes.directiveDifferences["cat"] instanceof DirectiveModification + def catChanges = changes.directiveDifferences["cat"] as DirectiveModification + !catChanges.isNameChanged() + catChanges.oldName == catChanges.newName + catChanges.newName == "cat" + catChanges.details.size() == 1 + + def argumentAddition = catChanges.getDetails(DirectiveArgumentAddition) + argumentAddition.size() == 1 + argumentAddition[0].name == "names" + } + + def "change object description"() { + given: + def oldSdl = ''' + "HELLO" + type Query { + pet: String + } + ''' + def newSdl = ''' + type Query { + pet: String + } + ''' + + when: + def changes = calcDiff(oldSdl, newSdl) + + then: + changes.objectDifferences.isEmpty() + } + + def "change object field argument description"() { + given: + def oldSdl = ''' + type Query { + pet( + age: Int + ): String + } + ''' + def newSdl = ''' + type Query { + pet( + "The age of the pet" + age: Int + ): String + } + ''' + + when: + def changes = calcDiff(oldSdl, newSdl) + + then: + changes.objectDifferences.isEmpty() + } + + def "change interface description"() { + given: + def oldSdl = ''' + type Query { + pet: Pet + } + interface Pet { + name: String + } + ''' + def newSdl = ''' + type Query { + pet: Pet + } + "Hello World" + interface Pet { + name: String + } + ''' + + when: + def changes = calcDiff(oldSdl, newSdl) + + then: + changes.interfaceDifferences.isEmpty() + } + + def "change union description"() { + given: + def oldSdl = ''' + type Query { + pet: Pet + } + union Pet = Dog | Cat + type Dog { + name: String + } + type Cat { + name: String + } + ''' + def newSdl = ''' + type Query { + pet: Pet + } + "----------------" + union Pet = Dog | Cat + type Dog { + name: String + } + type Cat { + name: String + } + ''' + + when: + def changes = calcDiff(oldSdl, newSdl) + + then: + changes.unionDifferences.isEmpty() + } + + def "change input object and field description"() { + given: + def oldSdl = ''' + type Query { + pets(filter: PetFilter): [ID] + } + "Pet" + input PetFilter { + age: Int + } + ''' + def newSdl = ''' + type Query { + pets(filter: PetFilter): [ID] + } + "Only pets matching the filter will be returned" + input PetFilter { + "The age in years" + age: Int + } + ''' + + when: + def changes = calcDiff(oldSdl, newSdl) + + then: + changes.inputObjectDifferences.isEmpty() + } + + def "change enum type and value description"() { + given: + def oldSdl = ''' + type Query { + pet(kind: PetKind): ID + } + enum PetKind { + "doggo" + DOG, + CAT, + } + ''' + def newSdl = ''' + type Query { + pet(kind: PetKind): ID + } + "The kind of pet" + enum PetKind { + DOG, + CAT, + } + ''' + + when: + def changes = calcDiff(oldSdl, newSdl) + + then: + changes.enumDifferences.isEmpty() + } + + def "change scalar description"() { + given: + def oldSdl = ''' + scalar Age + type Query { + pet(age: Age): ID + } + ''' + def newSdl = ''' + "Represents age in years" + scalar Age + type Query { + pet(age: Age): ID + } + ''' + + when: + def changes = calcDiff(oldSdl, newSdl) + + then: + changes.scalarDifferences.isEmpty() + } + + def "change directive description"() { + given: + def oldSdl = ''' + directive @cat on FIELD_DEFINITION + type Query { + pet: String @cat + } + ''' + def newSdl = ''' + "A cat or something" + directive @cat on FIELD_DEFINITION + type Query { + pet: String @cat + } + ''' + + when: + def changes = calcDiff(oldSdl, newSdl) + + then: + changes.directiveDifferences.isEmpty() + } + + def "traversal order puts field changes before arguments"() { + def objectOld = new Vertex(SchemaGraph.OBJECT, "target-1") + objectOld.add("name", "Obey") + def objectNew = new Vertex(SchemaGraph.OBJECT, "target-1") + objectNew.add("name", "Ob") + def changeObjectVertex = EditOperation.changeVertex( + "Change object", + objectOld, + objectNew, + ) + + def newField = new Vertex(SchemaGraph.FIELD, "target-1") + newField.add("name", "fried") + def insertNewFieldVertex = EditOperation.insertVertex( + "Insert new field", + Vertex.newIsolatedNode("source-isolated-Field-1"), + newField, + ) + + def newArgument = new Vertex(SchemaGraph.ARGUMENT, "target-1") + newArgument.add("name", "alone") + def insertNewArgumentVertex = EditOperation.insertVertex( + "Insert argument", + Vertex.newIsolatedNode("source-isolated-Argument-1"), + newArgument, + ) + + def insertNewFieldEdge = EditOperation.insertEdge( + "Insert Object -> Field Edge", + new Edge(objectNew, newField), + ) + + def insertNewArgumentEdge = EditOperation.insertEdge( + "Insert Field -> Argument Edge", + new Edge(newField, newArgument), + ) + + when: + def result = EditOperationAnalyzer.getTraversalOrder([ + insertNewArgumentVertex, + insertNewFieldEdge, + insertNewArgumentEdge, + changeObjectVertex, + insertNewFieldVertex, + ]) + + then: + result == [ + changeObjectVertex, + insertNewFieldVertex, + insertNewArgumentVertex, + insertNewFieldEdge, + insertNewArgumentEdge, + ] + } + + def "less fields in the renamed object"() { + given: + def oldSdl = ''' + type Query { + user(id: ID!): User + } + type User { + id: String + name: String + account: String + email: Boolean + age: Int + } + ''' + def newSdl = ''' + type Query { + account(id: ID!): Account + } + type Account { + id: String + name: String + yearsOld: Int + } + ''' + + when: + def changes = calcDiff(oldSdl, newSdl) + + then: + changes.objectDifferences["User"] instanceof ObjectModification + def userModification = changes.objectDifferences["User"] as ObjectModification + userModification.isNameChanged() + userModification.oldName == "User" + userModification.newName == "Account" + + def deletions = userModification.getDetails(ObjectFieldDeletion) + deletions.size() == 2 + deletions.collect { it.name }.toSet() == ["account", "email"] as Set + + def rename = userModification.getDetails(ObjectFieldRename) + rename.size() == 1 + rename[0].oldName == "age" + rename[0].newName == "yearsOld" + } + + def "two possible mappings for object rename where one has less fields"() { + given: + def oldSdl = ''' + type Query { + user(id: ID!): User + } + type User { + id: String + name: String + account: String + email: String + age: Int + } + ''' + def newSdl = ''' + type Query { + account(id: ID!): Account + } + type Account { + yearsOld: Int + } + type Profile { + id: String + name: String + account: String + email: String + age: Int + } + ''' + + when: + def changes = calcDiff(oldSdl, newSdl) + + then: + changes.objectDifferences["Account"] instanceof ObjectAddition + + changes.objectDifferences["User"] instanceof ObjectModification + def userModification = changes.objectDifferences["User"] as ObjectModification + userModification.isNameChanged() + userModification.oldName == "User" + userModification.newName == "Profile" + + userModification.details.isEmpty() + } + EditOperationAnalysisResult calcDiff( String oldSdl, String newSdl diff --git a/src/test/groovy/graphql/schema/fetching/LambdaFetchingSupportTest.groovy b/src/test/groovy/graphql/schema/fetching/LambdaFetchingSupportTest.groovy index 9343af806f..7dd5e4cb93 100644 --- a/src/test/groovy/graphql/schema/fetching/LambdaFetchingSupportTest.groovy +++ b/src/test/groovy/graphql/schema/fetching/LambdaFetchingSupportTest.groovy @@ -182,18 +182,15 @@ class LambdaFetchingSupportTest extends Specification { def getter = LambdaFetchingSupport.createGetter(customClass, "hello") then: + // with Java 9+ we can get access to methods across class loaders getter.isPresent() - try { - getter.get().apply(targetObject) - assert false, "We expect this to fail on Java 8 without access to MethodHandles.privateLookupIn" - } catch (LinkageError | ClassCastException ignored) { - } + def value = getter.get().apply(targetObject) + value == "world" - // show that a DF can still be used access this because of the reflection fallback - // in the future it will work via MethodHandles.privateLookupIn + // show that a DF can be used when: def ageDF = PropertyDataFetcher.fetching("hello") - def value = ageDF.get(fld("hello"), targetObject, { -> null }) + value = ageDF.get(fld("hello"), targetObject, { -> null }) then: value == "world" } diff --git a/src/test/groovy/graphql/schema/idl/SchemaGeneratorDirectiveHelperTest.groovy b/src/test/groovy/graphql/schema/idl/SchemaGeneratorDirectiveHelperTest.groovy index e54df617c3..0570df2770 100644 --- a/src/test/groovy/graphql/schema/idl/SchemaGeneratorDirectiveHelperTest.groovy +++ b/src/test/groovy/graphql/schema/idl/SchemaGeneratorDirectiveHelperTest.groovy @@ -325,9 +325,14 @@ class SchemaGeneratorDirectiveHelperTest extends Specification { @Override GraphQLFieldDefinition onField(SchemaDirectiveWiringEnvironment directiveEnv) { GraphQLFieldDefinition field = directiveEnv.getElement() + def container = directiveEnv.fieldsContainer + if (!container instanceof GraphQLObjectType) { + return field + } // // we use the non shortcut path to the data fetcher here so prove it still works - def fetcher = directiveEnv.getCodeRegistry().getDataFetcher(directiveEnv.fieldsContainer, field) + + def fetcher = directiveEnv.getCodeRegistry().getDataFetcher(container as GraphQLObjectType, field) def newFetcher = wrapDataFetcher(fetcher, { dfEnv, value -> def directiveName = directiveEnv.appliedDirective.name if (directiveName == "uppercase") { @@ -484,10 +489,15 @@ class SchemaGeneratorDirectiveHelperTest extends Specification { @Override GraphQLFieldDefinition onField(SchemaDirectiveWiringEnvironment environment) { GraphQLFieldDefinition element = environment.getElement() - return wrapField(environment.fieldsContainer, element, environment.getBuildContext(), environment.getCodeRegistry()) + + def container = environment.fieldsContainer + if (! container instanceof GraphQLObjectType) { + return element + } + return wrapField(container as GraphQLObjectType, element, environment.getBuildContext(), environment.getCodeRegistry()) } - private GraphQLFieldDefinition wrapField(GraphQLFieldsContainer parentType, GraphQLFieldDefinition field, Map contextMap, GraphQLCodeRegistry.Builder codeRegistry) { + private GraphQLFieldDefinition wrapField(GraphQLObjectType parentType, GraphQLFieldDefinition field, Map contextMap, GraphQLCodeRegistry.Builder codeRegistry) { def originalFetcher = codeRegistry.getDataFetcher(parentType, field) String key = mkFieldKey(parentType.getName(), field.getName()) diff --git a/src/test/groovy/graphql/schema/visitor/GraphQLSchemaVisitorTest.groovy b/src/test/groovy/graphql/schema/visitor/GraphQLSchemaVisitorTest.groovy new file mode 100644 index 0000000000..9629920db6 --- /dev/null +++ b/src/test/groovy/graphql/schema/visitor/GraphQLSchemaVisitorTest.groovy @@ -0,0 +1,372 @@ +package graphql.schema.visitor + +import graphql.Assert +import graphql.TestUtil +import graphql.schema.GraphQLAppliedDirective +import graphql.schema.GraphQLAppliedDirectiveArgument +import graphql.schema.GraphQLArgument +import graphql.schema.GraphQLDirective +import graphql.schema.GraphQLEnumType +import graphql.schema.GraphQLEnumValueDefinition +import graphql.schema.GraphQLFieldDefinition +import graphql.schema.GraphQLInputObjectField +import graphql.schema.GraphQLInputObjectType +import graphql.schema.GraphQLInterfaceType +import graphql.schema.GraphQLModifiedType +import graphql.schema.GraphQLNamedSchemaElement +import graphql.schema.GraphQLObjectType +import graphql.schema.GraphQLScalarType +import graphql.schema.GraphQLSchemaElement +import graphql.schema.GraphQLTypeUtil +import graphql.schema.GraphQLUnionType +import graphql.schema.SchemaTransformer +import graphql.schema.SchemaTraverser +import spock.lang.Specification + +import static graphql.schema.FieldCoordinates.coordinates + +class GraphQLSchemaVisitorTest extends Specification { + + + def toNames(GraphQLSchemaElement start, List elements) { + def l = elements.collect({ + return GraphQLTypeUtil.simplePrint(it) + }) + l.add(0, GraphQLTypeUtil.simplePrint((start))) + return l + } + + class CapturingSchemaVisitor implements GraphQLSchemaVisitor { + + Map> pathsToElement = [:] + def types = [:] + def leafs = [:] + def schema + + @Override + GraphQLSchemaTraversalControl visitSchemaElement(GraphQLSchemaElement schemaElement, SchemaElementVisitorEnvironment environment) { + this.schema = environment.getSchema() + def leadingElements = environment.getLeadingElements() + pathsToElement.put(schemaElement, leadingElements) + return environment.ok() + } + + @Override + GraphQLSchemaTraversalControl visitFieldDefinition(GraphQLFieldDefinition fieldDefinition, FieldDefinitionVisitorEnvironment environment) { + def key = environment.container.getName() + "." + fieldDefinition.getName() + ":" + environment.getUnwrappedType().getName() + leafs[key] = fieldDefinition + return environment.ok() + } + + @Override + GraphQLSchemaTraversalControl visitAppliedDirective(GraphQLAppliedDirective appliedDirective, AppliedDirectiveVisitorEnvironment environment) { + def key = "@" + environment.container.getName() + "." + appliedDirective.getName() + leafs[key] = appliedDirective + return environment.ok() + } + + @Override + GraphQLSchemaTraversalControl visitAppliedDirectiveArgument(GraphQLAppliedDirectiveArgument appliedDirectiveArgument, AppliedDirectiveArgumentVisitorEnvironment environment) { + def key = "@" + environment.container.getName() + "." + appliedDirectiveArgument.getName() + ":" + environment.getUnwrappedType().getName() + leafs[key] = appliedDirectiveArgument + return environment.ok() + } + + @Override + GraphQLSchemaTraversalControl visitArgument(GraphQLArgument argument, ArgumentVisitorEnvironment environment) { + def key = environment.container.getName() + "." + argument.getName() + ":" + environment.getUnwrappedType().getName() + leafs[key] = argument + return environment.ok() + } + + @Override + GraphQLSchemaTraversalControl visitDirective(GraphQLDirective directive, DirectiveVisitorEnvironment environment) { + leafs[directive.getName()] = directive + return environment.ok() + } + + @Override + GraphQLSchemaTraversalControl visitEnumType(GraphQLEnumType enumType, EnumTypeVisitorEnvironment environment) { + types[enumType.getName()] = enumType + return environment.ok() + } + + @Override + GraphQLSchemaTraversalControl visitEnumValueDefinition(GraphQLEnumValueDefinition enumValueDefinition, EnumValueDefinitionVisitorEnvironment environment) { + leafs[environment.container.getName() + "." + enumValueDefinition.getName()] = enumValueDefinition + return environment.ok() + } + + @Override + GraphQLSchemaTraversalControl visitInputObjectField(GraphQLInputObjectField inputObjectField, InputObjectFieldVisitorEnvironment environment) { + def key = environment.container.getName() + "." + inputObjectField.getName() + ":" + environment.getUnwrappedType().getName() + leafs[key] = inputObjectField + return environment.ok() + } + + @Override + GraphQLSchemaTraversalControl visitInputObjectType(GraphQLInputObjectType inputObjectType, InputObjectTypeVisitorEnvironment environment) { + types[inputObjectType.getName()] = inputObjectType + return environment.ok() + } + + @Override + GraphQLSchemaTraversalControl visitInterfaceType(GraphQLInterfaceType interfaceType, InterfaceTypeVisitorEnvironment environment) { + types[interfaceType.getName()] = interfaceType + return environment.ok() + } + + @Override + GraphQLSchemaTraversalControl visitScalarType(GraphQLScalarType scalarType, ScalarTypeVisitorEnvironment environment) { + types[scalarType.getName()] = scalarType + return environment.ok() + } + + @Override + GraphQLSchemaTraversalControl visitUnionType(GraphQLUnionType unionType, UnionTypeVisitorEnvironment environment) { + types[unionType.getName()] = unionType + return environment.ok() + } + + @Override + GraphQLSchemaTraversalControl visitObjectType(GraphQLObjectType objectType, ObjectVisitorEnvironment environment) { + types[objectType.getName()] = objectType + return environment.ok() + } + } + + def uberSDL = ''' + directive @directive(directiveArgument : String) on FIELD_DEFINITION + + type Query { + object(arg : InputObjectTypeA) : [ObjectTypeA!]! @directive(directiveArgument : "directiveArgument") + } + + input InputObjectTypeA { + fieldA : String! + } + + input InputObjectTypeB { + fieldB : String + } + + interface InterfaceTypeA { + fieldA : [String!]! + } + + type ObjectTypeA implements InterfaceTypeA { + fieldA : [String!]! + fieldAToX : [ObjectTypeX!]! + } + + type ObjectTypeB { + fieldB : String + } + + type ObjectTypeX { + fieldX(arg : InputObjectTypeA) : ObjectTypeX # self referential + } + + union UnionTypeA = ObjectTypeA | ObjectTypeB + + enum EnumTypeA { + enumDefA + enumDefB + } + ''' + + def schema = TestUtil.schema(uberSDL) + + + def "will visit things"() { + + def visitor = new CapturingSchemaVisitor() + + when: + new SchemaTraverser().depthFirstFullSchema(visitor.toTypeVisitor(), this.schema) + + then: + + visitor.schema == this.schema + visitor.types["Query"] instanceof GraphQLObjectType + + visitor.leafs["directive"] instanceof GraphQLDirective + visitor.leafs["directive.directiveArgument:String"] instanceof GraphQLArgument + + visitor.leafs["@object.directive"] instanceof GraphQLAppliedDirective + visitor.leafs["@directive.directiveArgument:String"] instanceof GraphQLAppliedDirectiveArgument + + visitor.types["EnumTypeA"] instanceof GraphQLEnumType + visitor.leafs["EnumTypeA.enumDefA"] instanceof GraphQLEnumValueDefinition + visitor.leafs["EnumTypeA.enumDefB"] instanceof GraphQLEnumValueDefinition + + visitor.types["InputObjectTypeA"] instanceof GraphQLInputObjectType + visitor.types["InputObjectTypeB"] instanceof GraphQLInputObjectType + visitor.leafs["InputObjectTypeA.fieldA:String"] instanceof GraphQLInputObjectField + + visitor.types["InterfaceTypeA"] instanceof GraphQLInterfaceType + visitor.leafs["InterfaceTypeA.fieldA:String"] instanceof GraphQLFieldDefinition + + visitor.types["ObjectTypeA"] instanceof GraphQLObjectType + visitor.types["ObjectTypeB"] instanceof GraphQLObjectType + visitor.leafs["ObjectTypeA.fieldA:String"] instanceof GraphQLFieldDefinition + + visitor.types["String"] instanceof GraphQLScalarType + + visitor.types["UnionTypeA"] instanceof GraphQLUnionType + + // schema paths + + + def fieldX = schema.getFieldDefinition(coordinates("ObjectTypeX", "fieldX")) + def fieldXArg = fieldX.getArgument("arg") + toNames(fieldXArg, visitor.pathsToElement[fieldXArg]) == [ + "arg", + "fieldX", "ObjectTypeX", "ObjectTypeX!", "[ObjectTypeX!]", "[ObjectTypeX!]!", + "fieldAToX", "ObjectTypeA", "ObjectTypeA!", "[ObjectTypeA!]", "[ObjectTypeA!]!", + "object", "Query"] + + def argInputType = fieldXArg.getType() as GraphQLInputObjectType + def inputFieldA = argInputType.getFieldDefinition("fieldA") + + toNames(inputFieldA, visitor.pathsToElement[inputFieldA]) == [ + "fieldA", "InputObjectTypeA", "arg", + "fieldX", "ObjectTypeX", "ObjectTypeX!", "[ObjectTypeX!]", "[ObjectTypeX!]!", + "fieldAToX", "ObjectTypeA", "ObjectTypeA!", "[ObjectTypeA!]", "[ObjectTypeA!]!", + "object", "Query"] + + } + + def "can transform schemas via this pattern"() { + def sdl = """ + type Query { + f : xfoo + } + + type xfoo { + bar : xbar + } + + type xbar { + baz : String + } + + """ + + def schema = TestUtil.schema(sdl) + + def schemaVisitor = new GraphQLSchemaVisitor() { + + @Override + GraphQLSchemaTraversalControl visitObjectType(GraphQLObjectType objectType, GraphQLSchemaVisitor.ObjectVisitorEnvironment environment) { + if (objectType.name.startsWith("x")) { + def newName = objectType.name.replaceFirst("x", "").capitalize() + def newType = objectType.transform { it.name(newName) } + return environment.changeNode(newType) + } + return environment.ok(); + } + } + + when: + def newSchema = new SchemaTransformer().transform(schema, schemaVisitor.toTypeVisitor()) + then: + newSchema.getType("Foo") instanceof GraphQLObjectType + newSchema.getType("Bar") instanceof GraphQLObjectType + } + + def "can change things at the schema element level and its does not continue"() { + def sdl = """ + type Query { + f : xfoo + } + + type xfoo { + bar : xbar + } + + type xbar { + baz : String + } + + """ + + def schema = TestUtil.schema(sdl) + + def schemaVisitor = new GraphQLSchemaVisitor() { + + @Override + GraphQLSchemaTraversalControl visitSchemaElement(GraphQLSchemaElement schemaElement, GraphQLSchemaVisitor.SchemaElementVisitorEnvironment environment) { + if (schemaElement instanceof GraphQLObjectType) { + GraphQLObjectType objectType = schemaElement + if (objectType.name.startsWith("x")) { + def newName = objectType.name.replaceFirst("x", "y").capitalize() + def newType = objectType.transform { it.name(newName) } + return environment.changeNode(newType) + } + } + return environment.ok(); + } + + @Override + GraphQLSchemaTraversalControl visitObjectType(GraphQLObjectType objectType, GraphQLSchemaVisitor.ObjectVisitorEnvironment environment) { + // this wont be called if we changed it + if (objectType.name.startsWith("x")) { + assert false, "This should not be called for X object types" + } + return environment.ok(); + } + } + + when: + def newSchema = new SchemaTransformer().transform(schema, schemaVisitor.toTypeVisitor()) + then: + newSchema.getType("Yfoo") instanceof GraphQLObjectType + newSchema.getType("Ybar") instanceof GraphQLObjectType + } + + def "can quit visitation"() { + + def visited = [] + def schemaVisitor = new GraphQLSchemaVisitor() { + + @Override + GraphQLSchemaTraversalControl visitSchemaElement(GraphQLSchemaElement schemaElement, GraphQLSchemaVisitor.SchemaElementVisitorEnvironment environment) { + def name = GraphQLTypeUtil.simplePrint(schemaElement) + if (name.toLowerCase().startsWith("x")) { + visited.add(name) + if (name.contains("Quit")) { + return environment.quit() + } + } + return environment.ok() + } + } + when: // test quit + + def sdl = """ + type Query { + xField(xQuit : XInputType) : XObjectType + } + + type XObjectType { + xObj(xArg : String) : XObjectType2 + } + + type XObjectType2 { + xObj2 : XObjectType2 + } + + input XInputType { + xinA : String + } + + """ + + def schema = TestUtil.schema(sdl) + new SchemaTransformer().transform(schema,schemaVisitor.toTypeVisitor()) + + then: + visited == ["xField", "xQuit",] + } +} diff --git a/src/test/groovy/readme/ExecutionExamples.java b/src/test/groovy/readme/ExecutionExamples.java index 0f8f01c7fe..a18197a63c 100644 --- a/src/test/groovy/readme/ExecutionExamples.java +++ b/src/test/groovy/readme/ExecutionExamples.java @@ -141,13 +141,16 @@ private void exceptionHandler() { DataFetcherExceptionHandler handler = new DataFetcherExceptionHandler() { @Override - public DataFetcherExceptionHandlerResult onException(DataFetcherExceptionHandlerParameters handlerParameters) { + public CompletableFuture handleException(DataFetcherExceptionHandlerParameters handlerParameters) { // // do your custom handling here. The parameters have all you need GraphQLError buildCustomError = buildCustomError(handlerParameters); - return DataFetcherExceptionHandlerResult.newResult() - .error(buildCustomError).build(); + DataFetcherExceptionHandlerResult exceptionResult = DataFetcherExceptionHandlerResult + .newResult() + .error(buildCustomError) + .build(); + return CompletableFuture.completedFuture(exceptionResult); } }; ExecutionStrategy executionStrategy = new AsyncExecutionStrategy(handler); diff --git a/src/test/java/benchmark/BenchMark.java b/src/test/java/benchmark/BenchMark.java index 626fafc87f..29875d7841 100644 --- a/src/test/java/benchmark/BenchMark.java +++ b/src/test/java/benchmark/BenchMark.java @@ -1,11 +1,13 @@ package benchmark; +import graphql.Assert; +import graphql.ExecutionResult; import graphql.GraphQL; import graphql.execution.ExecutionStepInfo; -import graphql.execution.instrumentation.tracing.TracingInstrumentation; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import graphql.schema.GraphQLSchema; +import graphql.schema.TypeResolver; import graphql.schema.idl.RuntimeWiring; import graphql.schema.idl.SchemaGenerator; import graphql.schema.idl.SchemaParser; @@ -24,10 +26,10 @@ import static graphql.schema.idl.TypeRuntimeWiring.newTypeWiring; /** - * See https://github.com/openjdk/jmh/tree/master/jmh-samples/src/main/java/org/openjdk/jmh/samples/ for more samples - * on what you can do with JMH + * See this link for more samples + * on what you can do with JMH. *

- * You MUST have the JMH plugin for IDEA in place for this to work : https://github.com/artyushov/idea-jmh-plugin + * You MUST have the JMH plugin for IDEA in place for this to work : idea-jmh-plugin *

* Install it and then just hit "Run" on a certain benchmark method */ @@ -36,44 +38,41 @@ public class BenchMark { private static final int NUMBER_OF_FRIENDS = 10 * 100; - - static GraphQL graphQL = buildGraphQL(); + private static final GraphQL GRAPHQL = buildGraphQL(); @Benchmark @BenchmarkMode(Mode.Throughput) @OutputTimeUnit(TimeUnit.SECONDS) - public void benchMarkSimpleQueriesThroughput() { - executeQuery(); + public ExecutionResult benchMarkSimpleQueriesThroughput() { + return executeQuery(); } @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MILLISECONDS) - public void benchMarkSimpleQueriesAvgTime() { - executeQuery(); + public ExecutionResult benchMarkSimpleQueriesAvgTime() { + return executeQuery(); } - public static void executeQuery() { + public static ExecutionResult executeQuery() { String query = "{ hero { name friends { name friends { name } } } }"; - graphQL.execute(query); + return GRAPHQL.execute(query); } private static GraphQL buildGraphQL() { TypeDefinitionRegistry definitionRegistry = new SchemaParser().parse(BenchmarkUtils.loadResource("starWarsSchema.graphqls")); - DataFetcher heroDataFetcher = environment -> CharacterDTO.mkCharacter(environment, "r2d2", NUMBER_OF_FRIENDS); + DataFetcher heroDataFetcher = environment -> CharacterDTO.mkCharacter(environment, "r2d2", NUMBER_OF_FRIENDS); + TypeResolver typeResolver = env -> env.getSchema().getObjectType("Human"); RuntimeWiring runtimeWiring = RuntimeWiring.newRuntimeWiring() - .type( - newTypeWiring("QueryType").dataFetcher("hero", heroDataFetcher)) - .type(newTypeWiring("Character").typeResolver( - env -> env.getSchema().getObjectType("Human") - )) + .type(newTypeWiring("QueryType").dataFetcher("hero", heroDataFetcher)) + .type(newTypeWiring("Character").typeResolver(typeResolver)) .build(); + GraphQLSchema graphQLSchema = new SchemaGenerator().makeExecutableSchema(definitionRegistry, runtimeWiring); return GraphQL.newGraphQL(graphQLSchema) - .instrumentation(new TracingInstrumentation()) .build(); } @@ -96,7 +95,9 @@ public List getFriends() { public static CharacterDTO mkCharacter(DataFetchingEnvironment environment, String name, int friendCount) { Object sideEffect = environment.getArgument("episode"); + Assert.assertNull(sideEffect); ExecutionStepInfo anotherSideEffect = environment.getExecutionStepInfo(); + Assert.assertNotNull(anotherSideEffect); List friends = new ArrayList<>(friendCount); for (int i = 0; i < friendCount; i++) { friends.add(mkCharacter(environment, "friend" + i, 0));