-
Notifications
You must be signed in to change notification settings - Fork 1.2k
This is some performance optimisations for NormalisedOperations #2732
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
8485d91
f5bda81
861127a
d231660
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -37,10 +37,10 @@ | |
| import java.util.Collection; | ||
| import java.util.Collections; | ||
| import java.util.LinkedHashMap; | ||
| import java.util.LinkedHashSet; | ||
| import java.util.List; | ||
| import java.util.Map; | ||
| import java.util.Set; | ||
| import java.util.function.Predicate; | ||
|
|
||
| import static graphql.Assert.assertNotNull; | ||
| import static graphql.Assert.assertShouldNeverHappen; | ||
|
|
@@ -235,41 +235,44 @@ public CollectNFResult collectFromMergedField(FieldCollectorNormalizedQueryParam | |
| possibleObjects | ||
| ); | ||
| } | ||
| Map<String, List<CollectedField>> fieldsByName = fieldsByResultKey(collectedFields); | ||
| ImmutableList.Builder<ExecutableNormalizedField> resultNFs = ImmutableList.builder(); | ||
| ImmutableListMultimap.Builder<ExecutableNormalizedField, FieldAndAstParent> normalizedFieldToAstFields = ImmutableListMultimap.builder(); | ||
|
|
||
| createNFs(resultNFs, parameters, fieldsByName, normalizedFieldToAstFields, level, executableNormalizedField); | ||
|
|
||
| return new CollectNFResult(resultNFs.build(), normalizedFieldToAstFields.build()); | ||
| } | ||
|
|
||
| private Map<String, List<CollectedField>> fieldsByResultKey(List<CollectedField> collectedFields) { | ||
| Map<String, List<CollectedField>> fieldsByName = new LinkedHashMap<>(); | ||
| for (CollectedField collectedField : collectedFields) { | ||
| fieldsByName.computeIfAbsent(collectedField.field.getResultKey(), ignored -> new ArrayList<>()).add(collectedField); | ||
| } | ||
| List<ExecutableNormalizedField> resultNFs = new ArrayList<>(); | ||
| ImmutableListMultimap.Builder<ExecutableNormalizedField, FieldAndAstParent> normalizedFieldToAstFields = ImmutableListMultimap.builder(); | ||
|
|
||
| createNFs(parameters, fieldsByName, resultNFs, normalizedFieldToAstFields, level, executableNormalizedField); | ||
|
|
||
| return new CollectNFResult(resultNFs, normalizedFieldToAstFields.build()); | ||
| return fieldsByName; | ||
|
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. common code extracted - not performance work per se |
||
| } | ||
|
|
||
| public CollectNFResult collectFromOperation(FieldCollectorNormalizedQueryParams parameters, | ||
| OperationDefinition operationDefinition, | ||
| GraphQLObjectType rootType) { | ||
| Set<GraphQLObjectType> possibleObjects = new LinkedHashSet<>(); | ||
| possibleObjects.add(rootType); | ||
|
|
||
|
|
||
| Set<GraphQLObjectType> possibleObjects = ImmutableSet.of(rootType); | ||
| List<CollectedField> collectedFields = new ArrayList<>(); | ||
| collectFromSelectionSet(parameters, operationDefinition.getSelectionSet(), collectedFields, rootType, possibleObjects); | ||
| // group by result key | ||
| Map<String, List<CollectedField>> fieldsByName = new LinkedHashMap<>(); | ||
| for (CollectedField collectedField : collectedFields) { | ||
| fieldsByName.computeIfAbsent(collectedField.field.getResultKey(), ignored -> new ArrayList<>()).add(collectedField); | ||
| } | ||
| List<ExecutableNormalizedField> resultNFs = new ArrayList<>(); | ||
| Map<String, List<CollectedField>> fieldsByName = fieldsByResultKey(collectedFields); | ||
| ImmutableList.Builder<ExecutableNormalizedField> resultNFs = ImmutableList.builder(); | ||
| ImmutableListMultimap.Builder<ExecutableNormalizedField, FieldAndAstParent> normalizedFieldToAstFields = ImmutableListMultimap.builder(); | ||
|
|
||
| createNFs(parameters, fieldsByName, resultNFs, normalizedFieldToAstFields, 1, null); | ||
| createNFs(resultNFs, parameters, fieldsByName, normalizedFieldToAstFields, 1, null); | ||
|
|
||
| return new CollectNFResult(resultNFs, normalizedFieldToAstFields.build()); | ||
| return new CollectNFResult(resultNFs.build(), normalizedFieldToAstFields.build()); | ||
| } | ||
|
|
||
| private void createNFs(FieldCollectorNormalizedQueryParams parameters, | ||
| private void createNFs(ImmutableList.Builder<ExecutableNormalizedField> nfListBuilder, | ||
| FieldCollectorNormalizedQueryParams parameters, | ||
| Map<String, List<CollectedField>> fieldsByName, | ||
| List<ExecutableNormalizedField> resultNFs, | ||
| ImmutableListMultimap.Builder<ExecutableNormalizedField, FieldAndAstParent> normalizedFieldToAstFields, | ||
| int level, | ||
| ExecutableNormalizedField parent) { | ||
|
|
@@ -284,7 +287,7 @@ private void createNFs(FieldCollectorNormalizedQueryParams parameters, | |
| for (CollectedField collectedField : fieldGroup.fields) { | ||
| normalizedFieldToAstFields.put(nf, new FieldAndAstParent(collectedField.field, collectedField.astTypeCondition)); | ||
| } | ||
| resultNFs.add(nf); | ||
| nfListBuilder.add(nf); | ||
| } | ||
| if (commonParentsGroups.size() > 1) { | ||
| parameters.addPossibleMergers(parent, resultKey); | ||
|
|
@@ -308,7 +311,8 @@ private ExecutableNormalizedField createNF(FieldCollectorNormalizedQueryParams p | |
| normalizedArgumentValues = valuesResolver.getNormalizedArgumentValues(fieldDefinition.getArguments(), field.getArguments(), parameters.getNormalizedVariableValues()); | ||
| } | ||
| ImmutableList<String> objectTypeNames = map(objectTypes, GraphQLObjectType::getName); | ||
| ExecutableNormalizedField executableNormalizedField = ExecutableNormalizedField.newNormalizedField() | ||
|
|
||
| return ExecutableNormalizedField.newNormalizedField() | ||
| .alias(field.getAlias()) | ||
| .resolvedArguments(argumentValues) | ||
| .normalizedArguments(normalizedArgumentValues) | ||
|
|
@@ -318,8 +322,6 @@ private ExecutableNormalizedField createNF(FieldCollectorNormalizedQueryParams p | |
| .level(level) | ||
| .parent(parent) | ||
| .build(); | ||
|
|
||
| return executableNormalizedField; | ||
|
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. inlined the returned field |
||
| } | ||
|
|
||
| private static class CollectedFieldGroup { | ||
|
|
@@ -333,20 +335,21 @@ public CollectedFieldGroup(Set<CollectedField> fields, Set<GraphQLObjectType> ob | |
| } | ||
|
|
||
| private List<CollectedFieldGroup> groupByCommonParents(Collection<CollectedField> fields) { | ||
| Set<GraphQLObjectType> allRelevantObjects = new LinkedHashSet<>(); | ||
| ImmutableSet.Builder<GraphQLObjectType> objectTypes = ImmutableSet.builder(); | ||
| for (CollectedField collectedField : fields) { | ||
| allRelevantObjects.addAll(collectedField.objectTypes); | ||
| objectTypes.addAll(collectedField.objectTypes); | ||
| } | ||
| Set<GraphQLObjectType> allRelevantObjects = objectTypes.build(); | ||
| Map<GraphQLType, ImmutableList<CollectedField>> groupByAstParent = groupingBy(fields, fieldAndType -> fieldAndType.astTypeCondition); | ||
| if (groupByAstParent.size() == 1) { | ||
| return singletonList(new CollectedFieldGroup(new LinkedHashSet<>(fields), allRelevantObjects)); | ||
| return singletonList(new CollectedFieldGroup(ImmutableSet.copyOf(fields), allRelevantObjects)); | ||
| } | ||
| List<CollectedFieldGroup> result = new ArrayList<>(); | ||
| ImmutableList.Builder<CollectedFieldGroup> result = ImmutableList.builder(); | ||
| for (GraphQLObjectType objectType : allRelevantObjects) { | ||
| Set<CollectedField> relevantFields = filterSet(fields, field -> field.objectTypes.contains(objectType)); | ||
| result.add(new CollectedFieldGroup(relevantFields, singleton(objectType))); | ||
| } | ||
| return result; | ||
| return result.build(); | ||
| } | ||
|
|
||
|
|
||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -2,6 +2,7 @@ | |
|
|
||
|
|
||
| import com.google.common.collect.ImmutableList; | ||
| import com.google.common.collect.ImmutableSet; | ||
| import graphql.Internal; | ||
|
|
||
| import java.lang.reflect.Array; | ||
|
|
@@ -279,13 +280,13 @@ public static <T> List<T> filterList(Collection<T> list, Predicate<T> filter) { | |
| } | ||
|
|
||
| public static <T> Set<T> filterSet(Collection<T> input, Predicate<T> filter) { | ||
| LinkedHashSet<T> result = new LinkedHashSet<>(); | ||
| ImmutableSet.Builder<T> result = ImmutableSet.builder(); | ||
| for (T t : input) { | ||
| if (filter.test(t)) { | ||
| result.add(t); | ||
| } | ||
| } | ||
| return result; | ||
| return result.build(); | ||
| } | ||
|
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is used in 2 places (overlapping fields code as well) and both work as expected |
||
|
|
||
| /** | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -19,11 +19,11 @@ | |
| import org.openjdk.jmh.annotations.State; | ||
| import org.openjdk.jmh.annotations.Threads; | ||
| import org.openjdk.jmh.annotations.Warmup; | ||
| import org.openjdk.jmh.infra.Blackhole; | ||
|
|
||
| import java.io.IOException; | ||
| import java.net.URL; | ||
| import java.util.Collections; | ||
| import java.util.concurrent.ExecutionException; | ||
| import java.util.concurrent.TimeUnit; | ||
|
|
||
| import static com.google.common.io.Resources.getResource; | ||
|
|
@@ -32,9 +32,9 @@ | |
| @BenchmarkMode(Mode.Throughput) | ||
| @Warmup(iterations = 2) | ||
| @Measurement(iterations = 2, timeUnit = TimeUnit.NANOSECONDS) | ||
| public class NQBenchmark { | ||
| public class NQBenchmark1 { | ||
|
|
||
| @org.openjdk.jmh.annotations.State(Scope.Benchmark) | ||
| @State(Scope.Benchmark) | ||
| public static class MyState { | ||
|
|
||
| GraphQLSchema schema; | ||
|
|
@@ -62,15 +62,29 @@ private String readFromClasspath(String file) throws IOException { | |
|
|
||
| @Benchmark | ||
| @Warmup(iterations = 2) | ||
| @Measurement(iterations = 100, time = 10) | ||
| @Measurement(iterations = 3, time = 10) | ||
| @Threads(1) | ||
| @Fork(3) | ||
| @BenchmarkMode(Mode.AverageTime) | ||
| @OutputTimeUnit(TimeUnit.MILLISECONDS) | ||
| public ExecutableNormalizedOperation benchMarkAvgTime(MyState myState) throws ExecutionException, InterruptedException { | ||
| public void benchMarkAvgTime(MyState myState, Blackhole blackhole ) { | ||
| runImpl(myState, blackhole); | ||
| } | ||
|
|
||
| @Benchmark | ||
| @Warmup(iterations = 2) | ||
| @Measurement(iterations = 3, time = 10) | ||
| @Threads(1) | ||
| @Fork(3) | ||
| @BenchmarkMode(Mode.Throughput) | ||
| @OutputTimeUnit(TimeUnit.SECONDS) | ||
| public void benchMarkThroughput(MyState myState, Blackhole blackhole ) { | ||
| runImpl(myState, blackhole); | ||
| } | ||
|
|
||
| private void runImpl(MyState myState, Blackhole blackhole) { | ||
| ExecutableNormalizedOperation executableNormalizedOperation = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(myState.schema, myState.document, null, Collections.emptyMap()); | ||
| // System.out.println("fields size:" + normalizedQuery.getFieldToNormalizedField().size()); | ||
| return executableNormalizedOperation; | ||
| blackhole.consume(executableNormalizedOperation); | ||
| } | ||
|
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Added throughput |
||
|
|
||
|
|
||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -25,6 +25,7 @@ | |
| import org.openjdk.jmh.annotations.State; | ||
| import org.openjdk.jmh.annotations.Threads; | ||
| import org.openjdk.jmh.annotations.Warmup; | ||
| import org.openjdk.jmh.infra.Blackhole; | ||
|
|
||
| import java.io.IOException; | ||
| import java.net.URL; | ||
|
|
@@ -75,8 +76,15 @@ private String readFromClasspath(String file) throws IOException { | |
| } | ||
|
|
||
| @Benchmark | ||
| public List<ValidationError> overlappingFieldValidation(MyState myState) { | ||
| return validateQuery(myState.schema, myState.document); | ||
| public void overlappingFieldValidationAbgTime(MyState myState, Blackhole blackhole) { | ||
| blackhole.consume(validateQuery(myState.schema, myState.document)); | ||
| } | ||
|
|
||
| @Benchmark | ||
| @BenchmarkMode(Mode.Throughput) | ||
| @OutputTimeUnit(TimeUnit.SECONDS) | ||
| public void overlappingFieldValidationThroughput(MyState myState, Blackhole blackhole) { | ||
| blackhole.consume(validateQuery(myState.schema, myState.document)); | ||
|
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Added throughput I tried to find some perf improvements in Overlapping fields code but none to be easily found |
||
| } | ||
|
|
||
|
|
||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
We only took the GraphQLCodeRegistry so we could get the field visibility
So we collapsed it so we no longer instantiate an object to get to static