Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import graphql.language.SelectionSet;
import graphql.language.TypeName;
import graphql.language.Value;
import graphql.schema.GraphQLSchema;

import java.util.ArrayList;
import java.util.LinkedHashMap;
Expand All @@ -31,12 +32,11 @@

@Internal
public class ExecutableNormalizedOperationToAstCompiler {
public static Document compileToDocument(
OperationDefinition.Operation operationKind,
String operationName,
List<ExecutableNormalizedField> topLevelFields
) {
List<Selection<?>> selections = selectionsForNormalizedFields(topLevelFields);
public static Document compileToDocument(GraphQLSchema schema,
OperationDefinition.Operation operationKind,
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ok now that we have the schema again, we can avoid explicitly specifying the operationKind and derive it from the ENF and GraphQLSchema but as I said in the PR desc, my IDE is broken, and that approach required some refactoring which I wasn't prepared to do without the IDE. What a massive pain…

String operationName,
List<ExecutableNormalizedField> topLevelFields) {
List<Selection<?>> selections = selectionsForNormalizedFields(schema, topLevelFields);
SelectionSet selectionSet = new SelectionSet(selections);

return Document.newDocument()
Expand All @@ -48,36 +48,44 @@ public static Document compileToDocument(
.build();
}

private static List<Selection<?>> selectionsForNormalizedFields(List<ExecutableNormalizedField> executableNormalizedFields) {
ImmutableList.Builder<Selection<?>> result = ImmutableList.builder();
private static List<Selection<?>> selectionsForNormalizedFields(GraphQLSchema schema,
List<ExecutableNormalizedField> executableNormalizedFields) {
ImmutableList.Builder<Selection<?>> selections = ImmutableList.builder();

Map<String, List<Field>> overallGroupedFields = new LinkedHashMap<>();
for (ExecutableNormalizedField nf : executableNormalizedFields) {
Map<String, List<Field>> groupFieldsForChild = selectionForNormalizedField(nf);

groupFieldsForChild.forEach((objectTypeName, fields) -> {
List<Field> fieldList = overallGroupedFields.computeIfAbsent(objectTypeName, ignored -> new ArrayList<>());
fieldList.addAll(fields);
});
// All conditional fields go here instead of directly to selections so they can be grouped together
// in the same inline fragement in the output
Map<String, List<Field>> conditionalFieldsByObjectTypeName = new LinkedHashMap<>();

for (ExecutableNormalizedField nf : executableNormalizedFields) {
Map<String, List<Field>> groupFieldsForChild = selectionForNormalizedField(schema, nf);
if (nf.isConditional(schema)) {
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is the change, we check if the NF is conditional, if it is then we add it to the conditional field Map to group the type conditions, otherwise we add it straight to the output as a raw Field.

groupFieldsForChild.forEach((objectTypeName, fields) -> {
List<Field> fieldList = conditionalFieldsByObjectTypeName.computeIfAbsent(objectTypeName, ignored -> new ArrayList<>());
fieldList.addAll(fields);
});
} else {
List<Field> fields = groupFieldsForChild.values().iterator().next();
selections.addAll(fields);
}
}

overallGroupedFields.forEach((objectTypeName, fields) -> {
conditionalFieldsByObjectTypeName.forEach((objectTypeName, fields) -> {
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

As usual, we group the condition fields by object type conditions so that they can be grouped together in the same inline fragment.

TypeName typeName = newTypeName(objectTypeName).build();
InlineFragment inlineFragment = newInlineFragment().
typeCondition(typeName)
.selectionSet(selectionSet(fields))
.build();
result.add(inlineFragment);
selections.add(inlineFragment);
});

return result.build();
return selections.build();
}

private static Map<String, List<Field>> selectionForNormalizedField(ExecutableNormalizedField executableNormalizedField) {
private static Map<String, List<Field>> selectionForNormalizedField(GraphQLSchema schema,
ExecutableNormalizedField executableNormalizedField) {
Map<String, List<Field>> groupedFields = new LinkedHashMap<>();
for (String objectTypeName : executableNormalizedField.getObjectTypeNames()) {
List<Selection<?>> subSelections = selectionsForNormalizedFields(executableNormalizedField.getChildren());
List<Selection<?>> subSelections = selectionsForNormalizedFields(schema, executableNormalizedField.getChildren());
SelectionSet selectionSet = null;
if (subSelections.size() > 0) {
selectionSet = newSelectionSet()
Expand Down
Loading