Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import graphql.execution.ValuesResolver;
import graphql.language.Directive;
import graphql.language.NodeUtil;
import org.jetbrains.annotations.Nullable;

import java.util.List;
import java.util.Locale;
Expand All @@ -20,7 +21,7 @@ public class IncrementalUtils {
private IncrementalUtils() {
}

public static <T> T createDeferredExecution(
public static @Nullable <T> T createDeferredExecution(
Map<String, Object> variables,
List<Directive> directives,
Function<String, T> builderFunction
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -457,6 +457,8 @@ private static class ExecutableNormalizedOperationFactoryImpl {
private int fieldCount = 0;
private int maxDepthSeen = 0;

private final List<ExecutableNormalizedField> rootEnfs = new ArrayList<>();

private ExecutableNormalizedOperationFactoryImpl(
GraphQLSchema graphQLSchema,
OperationDefinition operationDefinition,
Expand All @@ -477,34 +479,16 @@ private ExecutableNormalizedOperationFactoryImpl(
* Creates a new ExecutableNormalizedOperation for the provided query
*/
private ExecutableNormalizedOperation createNormalizedQueryImpl() {
GraphQLObjectType rootType = SchemaUtil.getOperationRootType(graphQLSchema, operationDefinition);

CollectNFResult collectFromOperationResult = collectFromOperation(rootType);

for (ExecutableNormalizedField topLevel : collectFromOperationResult.children) {
ImmutableList<FieldAndAstParent> fieldAndAstParents = collectFromOperationResult.normalizedFieldToAstFields.get(topLevel);
MergedField mergedField = newMergedField(fieldAndAstParents);
buildEnfsRecursively(null, null, 0);

captureMergedField(topLevel, mergedField);

updateFieldToNFMap(topLevel, fieldAndAstParents);
updateCoordinatedToNFMap(topLevel);

int depthSeen = buildFieldWithChildren(
topLevel,
fieldAndAstParents,
1);
maxDepthSeen = Math.max(maxDepthSeen, depthSeen);
}
// getPossibleMergerList
for (PossibleMerger possibleMerger : possibleMergerList) {
List<ExecutableNormalizedField> childrenWithSameResultKey = possibleMerger.parent.getChildrenWithSameResultKey(possibleMerger.resultKey);
ENFMerger.merge(possibleMerger.parent, childrenWithSameResultKey, graphQLSchema, options.deferSupport);
}
return new ExecutableNormalizedOperation(
operationDefinition.getOperation(),
operationDefinition.getName(),
new ArrayList<>(collectFromOperationResult.children),
new ArrayList<>(rootEnfs),
fieldToNormalizedField.build(),
normalizedFieldToMergedField.build(),
normalizedFieldToQueryDirectives.build(),
Expand All @@ -521,32 +505,76 @@ private void captureMergedField(ExecutableNormalizedField enf, MergedField merge
normalizedFieldToMergedField.put(enf, mergedFld);
}

private int buildFieldWithChildren(ExecutableNormalizedField executableNormalizedField,
ImmutableList<FieldAndAstParent> fieldAndAstParents,
int curLevel) {
checkMaxDepthExceeded(curLevel);
private void buildEnfsRecursively(@Nullable ExecutableNormalizedField executableNormalizedField,
@Nullable ImmutableList<CollectedField> fieldAndAstParents,
int curLevel) {
if (this.maxDepthSeen < curLevel) {
this.maxDepthSeen = curLevel;
checkMaxDepthExceeded(curLevel);
}
Set<GraphQLObjectType> possibleObjects;
List<CollectedField> collectedFields;

CollectNFResult nextLevel = collectFromMergedField(executableNormalizedField, fieldAndAstParents, curLevel + 1);
// special handling for the root selection Set
if (executableNormalizedField == null) {
GraphQLObjectType rootType = SchemaUtil.getOperationRootType(graphQLSchema, operationDefinition);
possibleObjects = ImmutableSet.of(rootType);
collectedFields = new ArrayList<>();
collectFromSelectionSet(operationDefinition.getSelectionSet(), collectedFields, rootType, possibleObjects, null);
} else {
List<GraphQLFieldDefinition> fieldDefs = executableNormalizedField.getFieldDefinitions(graphQLSchema);
possibleObjects = resolvePossibleObjects(fieldDefs);
if (possibleObjects.isEmpty()) {
return;
}
collectedFields = new ArrayList<>();
for (CollectedField fieldAndAstParent : fieldAndAstParents) {
if (fieldAndAstParent.field.getSelectionSet() == null) {
continue;
}
// the AST parent comes from the previous collect from selection set call
// and is the type to which the field belongs (the container type of the field) and output type
// of the field needs to be determined based on the field name
GraphQLFieldDefinition fieldDefinition = Introspection.getFieldDef(graphQLSchema, fieldAndAstParent.astTypeCondition, fieldAndAstParent.field.getName());
// it must a composite type, because the field has a selection set
GraphQLCompositeType selectionSetType = (GraphQLCompositeType) unwrapAll(fieldDefinition.getType());
this.collectFromSelectionSet(fieldAndAstParent.field.getSelectionSet(),
collectedFields,
selectionSetType,
possibleObjects,
null
);
}
}

int maxDepthSeen = curLevel;
for (ExecutableNormalizedField childENF : nextLevel.children) {
executableNormalizedField.addChild(childENF);
ImmutableList<FieldAndAstParent> childFieldAndAstParents = nextLevel.normalizedFieldToAstFields.get(childENF);
Map<String, List<CollectedField>> fieldsByName = fieldsByResultKey(collectedFields);
ImmutableList.Builder<ExecutableNormalizedField> resultNFs = ImmutableList.builder();
ImmutableListMultimap.Builder<ExecutableNormalizedField, CollectedField> normalizedFieldToAstFields = ImmutableListMultimap.builder();
createNFs(resultNFs, fieldsByName, normalizedFieldToAstFields, curLevel + 1, executableNormalizedField);

ImmutableList<ExecutableNormalizedField> nextLevelChildren = resultNFs.build();
ImmutableListMultimap<ExecutableNormalizedField, CollectedField> nextLevelNormalizedFieldToAstFields = normalizedFieldToAstFields.build();

for (ExecutableNormalizedField childENF : nextLevelChildren) {
if (executableNormalizedField == null) {
// all root ENFs don't have a parent, but are collected in the rootEnfs list
rootEnfs.add(childENF);
} else {
executableNormalizedField.addChild(childENF);
}
ImmutableList<CollectedField> childFieldAndAstParents = nextLevelNormalizedFieldToAstFields.get(childENF);

MergedField mergedField = newMergedField(childFieldAndAstParents);
captureMergedField(childENF, mergedField);

updateFieldToNFMap(childENF, childFieldAndAstParents);
updateCoordinatedToNFMap(childENF);

int depthSeen = buildFieldWithChildren(childENF,
// recursive call
buildEnfsRecursively(childENF,
childFieldAndAstParents,
curLevel + 1);
maxDepthSeen = Math.max(maxDepthSeen, depthSeen);

checkMaxDepthExceeded(maxDepthSeen);
}
return maxDepthSeen;
}

private void checkMaxDepthExceeded(int depthSeen) {
Expand All @@ -555,13 +583,13 @@ private void checkMaxDepthExceeded(int depthSeen) {
}
}

private static MergedField newMergedField(ImmutableList<FieldAndAstParent> fieldAndAstParents) {
private static MergedField newMergedField(ImmutableList<CollectedField> fieldAndAstParents) {
return MergedField.newMergedField(map(fieldAndAstParents, fieldAndAstParent -> fieldAndAstParent.field)).build();
}

private void updateFieldToNFMap(ExecutableNormalizedField executableNormalizedField,
ImmutableList<FieldAndAstParent> mergedField) {
for (FieldAndAstParent astField : mergedField) {
ImmutableList<CollectedField> mergedField) {
for (CollectedField astField : mergedField) {
fieldToNormalizedField.put(astField.field, executableNormalizedField);
}
}
Expand All @@ -573,37 +601,6 @@ private void updateCoordinatedToNFMap(ExecutableNormalizedField topLevel) {
}
}

public CollectNFResult collectFromMergedField(ExecutableNormalizedField executableNormalizedField,
ImmutableList<FieldAndAstParent> mergedField,
int level) {
List<GraphQLFieldDefinition> fieldDefs = executableNormalizedField.getFieldDefinitions(graphQLSchema);
Set<GraphQLObjectType> possibleObjects = resolvePossibleObjects(fieldDefs);
if (possibleObjects.isEmpty()) {
return new CollectNFResult(ImmutableKit.emptyList(), ImmutableListMultimap.of());
}

List<CollectedField> collectedFields = new ArrayList<>();
for (FieldAndAstParent fieldAndAstParent : mergedField) {
if (fieldAndAstParent.field.getSelectionSet() == null) {
continue;
}
GraphQLFieldDefinition fieldDefinition = Introspection.getFieldDef(graphQLSchema, fieldAndAstParent.astParentType, fieldAndAstParent.field.getName());
GraphQLUnmodifiedType astParentType = unwrapAll(fieldDefinition.getType());
this.collectFromSelectionSet(fieldAndAstParent.field.getSelectionSet(),
collectedFields,
(GraphQLCompositeType) astParentType,
possibleObjects,
null
);
}
Map<String, List<CollectedField>> fieldsByName = fieldsByResultKey(collectedFields);
ImmutableList.Builder<ExecutableNormalizedField> resultNFs = ImmutableList.builder();
ImmutableListMultimap.Builder<ExecutableNormalizedField, FieldAndAstParent> normalizedFieldToAstFields = ImmutableListMultimap.builder();

createNFs(resultNFs, fieldsByName, normalizedFieldToAstFields, level, executableNormalizedField);

return new CollectNFResult(resultNFs.build(), normalizedFieldToAstFields.build());
}

private Map<String, List<CollectedField>> fieldsByResultKey(List<CollectedField> collectedFields) {
Map<String, List<CollectedField>> fieldsByName = new LinkedHashMap<>();
Expand All @@ -613,25 +610,10 @@ private Map<String, List<CollectedField>> fieldsByResultKey(List<CollectedField>
return fieldsByName;
}

public CollectNFResult collectFromOperation(GraphQLObjectType rootType) {


Set<GraphQLObjectType> possibleObjects = ImmutableSet.of(rootType);
List<CollectedField> collectedFields = new ArrayList<>();
collectFromSelectionSet(operationDefinition.getSelectionSet(), collectedFields, rootType, possibleObjects, null);
// group by result key
Map<String, List<CollectedField>> fieldsByName = fieldsByResultKey(collectedFields);
ImmutableList.Builder<ExecutableNormalizedField> resultNFs = ImmutableList.builder();
ImmutableListMultimap.Builder<ExecutableNormalizedField, FieldAndAstParent> normalizedFieldToAstFields = ImmutableListMultimap.builder();

createNFs(resultNFs, fieldsByName, normalizedFieldToAstFields, 1, null);

return new CollectNFResult(resultNFs.build(), normalizedFieldToAstFields.build());
}

private void createNFs(ImmutableList.Builder<ExecutableNormalizedField> nfListBuilder,
Map<String, List<CollectedField>> fieldsByName,
ImmutableListMultimap.Builder<ExecutableNormalizedField, FieldAndAstParent> normalizedFieldToAstFields,
ImmutableListMultimap.Builder<ExecutableNormalizedField, CollectedField> normalizedFieldToAstFields,
int level,
ExecutableNormalizedField parent) {
for (String resultKey : fieldsByName.keySet()) {
Expand All @@ -643,7 +625,7 @@ private void createNFs(ImmutableList.Builder<ExecutableNormalizedField> nfListBu
continue;
}
for (CollectedField collectedField : fieldGroup.fields) {
normalizedFieldToAstFields.put(nf, new FieldAndAstParent(collectedField.field, collectedField.astTypeCondition));
normalizedFieldToAstFields.put(nf, collectedField);
}
nfListBuilder.add(nf);

Expand All @@ -657,6 +639,7 @@ private void createNFs(ImmutableList.Builder<ExecutableNormalizedField> nfListBu
}
}

// new single ENF
private ExecutableNormalizedField createNF(CollectedFieldGroup collectedFieldGroup,
int level,
ExecutableNormalizedField parent) {
Expand Down Expand Up @@ -847,7 +830,7 @@ private void collectInlineFragment(List<CollectedField> result,
collectFromSelectionSet(inlineFragment.getSelectionSet(), result, newAstTypeCondition, newPossibleObjects, newDeferredExecution);
}

private NormalizedDeferredExecution buildDeferredExecution(
private @Nullable NormalizedDeferredExecution buildDeferredExecution(
List<Directive> directives,
Set<GraphQLObjectType> newPossibleObjects) {
if (!options.deferSupport) {
Expand Down Expand Up @@ -942,26 +925,6 @@ public CollectedField(Field field, Set<GraphQLObjectType> objectTypes, GraphQLCo
}
}

public static class CollectNFResult {
private final Collection<ExecutableNormalizedField> children;
private final ImmutableListMultimap<ExecutableNormalizedField, FieldAndAstParent> normalizedFieldToAstFields;

public CollectNFResult(Collection<ExecutableNormalizedField> children, ImmutableListMultimap<ExecutableNormalizedField, FieldAndAstParent> normalizedFieldToAstFields) {
this.children = children;
this.normalizedFieldToAstFields = normalizedFieldToAstFields;
}
}

private static class FieldAndAstParent {
final Field field;
final GraphQLCompositeType astParentType;

private FieldAndAstParent(Field field, GraphQLCompositeType astParentType) {
this.field = field;
this.astParentType = astParentType;
}
}

private static class CollectedFieldGroup {
Set<GraphQLObjectType> objectTypes;
Set<CollectedField> fields;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ import static graphql.language.AstPrinter.printAst
import static graphql.parser.Parser.parseValue
import static graphql.schema.FieldCoordinates.coordinates

abstract class ExecutableNormalizedOperationFactoryTest extends Specification {
class ExecutableNormalizedOperationFactoryTest extends Specification {
static boolean deferSupport


Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package benchmark;
package performance;

import graphql.execution.CoercedVariables;
import graphql.language.Document;
Expand All @@ -25,7 +25,7 @@
@Warmup(iterations = 2, time = 5)
@Measurement(iterations = 3)
@Fork(3)
public class ENFBenchmark1 {
public class ENF1Performance {

@State(Scope.Benchmark)
public static class MyState {
Expand All @@ -36,10 +36,10 @@ public static class MyState {
@Setup
public void setup() {
try {
String schemaString = BenchmarkUtils.loadResource("large-schema-1.graphqls");
String schemaString = PerformanceTestingUtils.loadResource("large-schema-1.graphqls");
schema = SchemaGenerator.createdMockedSchema(schemaString);

String query = BenchmarkUtils.loadResource("large-schema-1-query.graphql");
String query = PerformanceTestingUtils.loadResource("large-schema-1-query.graphql");
document = Parser.parse(query);
} catch (Exception e) {
throw new RuntimeException(e);
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package benchmark;
package performance;

import graphql.execution.CoercedVariables;
import graphql.language.Document;
Expand All @@ -24,7 +24,7 @@
@Warmup(iterations = 2, time = 5)
@Measurement(iterations = 3)
@Fork(3)
public class ENFBenchmark2 {
public class ENF2Performance {

@State(Scope.Benchmark)
public static class MyState {
Expand All @@ -35,10 +35,10 @@ public static class MyState {
@Setup
public void setup() {
try {
String schemaString = BenchmarkUtils.loadResource("large-schema-2.graphqls");
String schemaString = PerformanceTestingUtils.loadResource("large-schema-2.graphqls");
schema = SchemaGenerator.createdMockedSchema(schemaString);

String query = BenchmarkUtils.loadResource("large-schema-2-query.graphql");
String query = PerformanceTestingUtils.loadResource("large-schema-2-query.graphql");
document = Parser.parse(query);
} catch (Exception e) {
throw new RuntimeException(e);
Expand Down
Loading
Loading