Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions src/main/java/graphql/parser/ParseCancelledTooDeepException.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
package graphql.parser;

import graphql.Internal;
import graphql.language.SourceLocation;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;

@Internal
public class ParseCancelledTooDeepException extends InvalidSyntaxException {

@Internal
public ParseCancelledTooDeepException(String msg, @Nullable SourceLocation sourceLocation, @Nullable String offendingToken, int maxTokens, @NotNull String tokenType) {
super(sourceLocation, msg, null, offendingToken, null);
}
}
47 changes: 40 additions & 7 deletions src/main/java/graphql/parser/Parser.java
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,11 @@ public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int
// this lexer wrapper allows us to stop lexing when too many tokens are in place. This prevents DOS attacks.
int maxTokens = parserOptions.getMaxTokens();
int maxWhitespaceTokens = parserOptions.getMaxWhitespaceTokens();
BiConsumer<Integer, Token> onTooManyTokens = (maxTokenCount, token) -> throwCancelParseIfTooManyTokens(token, maxTokenCount, multiSourceReader);
BiConsumer<Integer, Token> onTooManyTokens = (maxTokenCount, token) -> throwIfTokenProblems(
token,
maxTokenCount,
multiSourceReader,
ParseCancelledException.class);
SafeTokenSource safeTokenSource = new SafeTokenSource(lexer, maxTokens, maxWhitespaceTokens, onTooManyTokens);

CommonTokenStream tokens = new CommonTokenStream(safeTokenSource);
Expand Down Expand Up @@ -285,9 +289,30 @@ private void setupParserListener(MultiSourceReader multiSourceReader, GraphqlPar
ParserOptions parserOptions = toLanguage.getParserOptions();
ParsingListener parsingListener = parserOptions.getParsingListener();
int maxTokens = parserOptions.getMaxTokens();
int maxRuleDepth = parserOptions.getMaxRuleDepth();
// prevent a billion laugh attacks by restricting how many tokens we allow
ParseTreeListener listener = new GraphqlBaseListener() {
int count = 0;
int depth = 0;


@Override
public void enterEveryRule(ParserRuleContext ctx) {
depth++;
if (depth > maxRuleDepth) {
throwIfTokenProblems(
ctx.getStart(),
maxRuleDepth,
multiSourceReader,
ParseCancelledTooDeepException.class
);
}
}

@Override
public void exitEveryRule(ParserRuleContext ctx) {
depth--;
}

@Override
public void visitTerminal(TerminalNode node) {
Expand All @@ -312,15 +337,20 @@ public int getCharPositionInLine() {

count++;
if (count > maxTokens) {
throwCancelParseIfTooManyTokens(token, maxTokens, multiSourceReader);
throwIfTokenProblems(
token,
maxTokens,
multiSourceReader,
ParseCancelledException.class
);
}
}
};
parser.addParseListener(listener);
}

private void throwCancelParseIfTooManyTokens(Token token, int maxTokens, MultiSourceReader multiSourceReader) throws ParseCancelledException {
String tokenType = "grammar";
private void throwIfTokenProblems(Token token, int maxLimit, MultiSourceReader multiSourceReader, Class<? extends InvalidSyntaxException> targetException) throws ParseCancelledException {
String tokenType = "grammar";
SourceLocation sourceLocation = null;
String offendingToken = null;
if (token != null) {
Expand All @@ -330,9 +360,12 @@ private void throwCancelParseIfTooManyTokens(Token token, int maxTokens, MultiSo
offendingToken = token.getText();
sourceLocation = AntlrHelper.createSourceLocation(multiSourceReader, token.getLine(), token.getCharPositionInLine());
}
String msg = String.format("More than %d %s tokens have been presented. To prevent Denial Of Service attacks, parsing has been cancelled.", maxTokens, tokenType);
throw new ParseCancelledException(msg, sourceLocation, offendingToken);
}
if (targetException.equals(ParseCancelledTooDeepException.class)) {
String msg = String.format("More than %d deep %s rules have been entered. To prevent Denial Of Service attacks, parsing has been cancelled.", maxLimit, tokenType);
throw new ParseCancelledTooDeepException(msg, sourceLocation, offendingToken, maxLimit, tokenType);
}
String msg = String.format("More than %d %s tokens have been presented. To prevent Denial Of Service attacks, parsing has been cancelled.", maxLimit, tokenType);
throw new ParseCancelledException(msg, sourceLocation, offendingToken); }

/**
* Allows you to override the ANTLR to AST code.
Expand Down
41 changes: 37 additions & 4 deletions src/main/java/graphql/parser/ParserOptions.java
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@ public class ParserOptions {
/**
* A graphql hacking vector is to send nonsensical queries that burn lots of parsing CPU time and burn
* memory representing a document that won't ever execute. To prevent this for most users, graphql-java
* set this value to 15000. ANTLR parsing time is linear to the number of tokens presented. The more you
* sets this value to 15000. ANTLR parsing time is linear to the number of tokens presented. The more you
* allow the longer it takes.
*
* <p>
* If you want to allow more, then {@link #setDefaultParserOptions(ParserOptions)} allows you to change this
* JVM wide.
*/
Expand All @@ -26,18 +26,29 @@ public class ParserOptions {
* Another graphql hacking vector is to send large amounts of whitespace in operations that burn lots of parsing CPU time and burn
* memory representing a document. Whitespace token processing in ANTLR is 2 orders of magnitude faster than grammar token processing
* however it still takes some time to happen.
*
* <p>
* If you want to allow more, then {@link #setDefaultParserOptions(ParserOptions)} allows you to change this
* JVM wide.
*/
public static final int MAX_WHITESPACE_TOKENS = 200_000;

/**
* A graphql hacking vector is to send nonsensical queries that have lots of grammar rule depth to them which
* can cause stack overflow exceptions during the query parsing. To prevent this for most users, graphql-java
* sets this value to 500 grammar rules deep.
* <p>
* If you want to allow more, then {@link #setDefaultParserOptions(ParserOptions)} allows you to change this
* JVM wide.
*/
public static final int MAX_RULE_DEPTH = 500;

private static ParserOptions defaultJvmParserOptions = newParserOptions()
.captureIgnoredChars(false)
.captureSourceLocation(true)
.captureLineComments(true)
.maxTokens(MAX_QUERY_TOKENS) // to prevent a billion laughs style attacks, we set a default for graphql-java
.maxWhitespaceTokens(MAX_WHITESPACE_TOKENS)
.maxRuleDepth(MAX_RULE_DEPTH)
.build();

private static ParserOptions defaultJvmOperationParserOptions = newParserOptions()
Expand All @@ -46,6 +57,7 @@ public class ParserOptions {
.captureLineComments(false) // #comments are not useful in query parsing
.maxTokens(MAX_QUERY_TOKENS) // to prevent a billion laughs style attacks, we set a default for graphql-java
.maxWhitespaceTokens(MAX_WHITESPACE_TOKENS)
.maxRuleDepth(MAX_RULE_DEPTH)
.build();

private static ParserOptions defaultJvmSdlParserOptions = newParserOptions()
Expand All @@ -54,6 +66,7 @@ public class ParserOptions {
.captureLineComments(true) // #comments are useful in SDL parsing
.maxTokens(Integer.MAX_VALUE) // we are less worried about a billion laughs with SDL parsing since the call path is not facing attackers
.maxWhitespaceTokens(Integer.MAX_VALUE)
.maxRuleDepth(Integer.MAX_VALUE)
.build();

/**
Expand Down Expand Up @@ -156,6 +169,7 @@ public static void setDefaultSdlParserOptions(ParserOptions options) {
private final boolean captureLineComments;
private final int maxTokens;
private final int maxWhitespaceTokens;
private final int maxRuleDepth;
private final ParsingListener parsingListener;

private ParserOptions(Builder builder) {
Expand All @@ -164,6 +178,7 @@ private ParserOptions(Builder builder) {
this.captureLineComments = builder.captureLineComments;
this.maxTokens = builder.maxTokens;
this.maxWhitespaceTokens = builder.maxWhitespaceTokens;
this.maxRuleDepth = builder.maxRuleDepth;
this.parsingListener = builder.parsingListener;
}

Expand Down Expand Up @@ -226,6 +241,17 @@ public int getMaxWhitespaceTokens() {
return maxWhitespaceTokens;
}

/**
* A graphql hacking vector is to send nonsensical queries that have lots of rule depth to them which
* can cause stack overflow exceptions during the query parsing. To prevent this you can set a value
* that is the maximum depth allowed before an exception is thrown and the parsing is stopped.
*
* @return the maximum token depth the parser will accept, after which an exception will be thrown.
*/
public int getMaxRuleDepth() {
return maxRuleDepth;
}

public ParsingListener getParsingListener() {
return parsingListener;
}
Expand All @@ -245,9 +271,10 @@ public static class Builder {
private boolean captureIgnoredChars = false;
private boolean captureSourceLocation = true;
private boolean captureLineComments = true;
private int maxTokens = MAX_QUERY_TOKENS;
private ParsingListener parsingListener = ParsingListener.NOOP;
private int maxTokens = MAX_QUERY_TOKENS;
private int maxWhitespaceTokens = MAX_WHITESPACE_TOKENS;
private int maxRuleDepth = MAX_RULE_DEPTH;

Builder() {
}
Expand All @@ -258,6 +285,7 @@ public static class Builder {
this.captureLineComments = parserOptions.captureLineComments;
this.maxTokens = parserOptions.maxTokens;
this.maxWhitespaceTokens = parserOptions.maxWhitespaceTokens;
this.maxRuleDepth = parserOptions.maxRuleDepth;
this.parsingListener = parserOptions.parsingListener;
}

Expand Down Expand Up @@ -286,6 +314,11 @@ public Builder maxWhitespaceTokens(int maxWhitespaceTokens) {
return this;
}

public Builder maxRuleDepth(int maxRuleDepth) {
this.maxRuleDepth = maxRuleDepth;
return this;
}

public Builder parsingListener(ParsingListener parsingListener) {
this.parsingListener = assertNotNull(parsingListener);
return this;
Expand Down
Loading