Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -569,6 +569,7 @@ jobs:
mvn -Dtest=TestSQLitePQS test
mvn -Dtest=TestSQLiteTLP test
mvn -Dtest=TestSQLiteNoREC test
mvn -Dtest=TestSQLiteCODDTest test

sqlite-qpg:
name: QPG Tests (SQLite)
Expand Down
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -378,7 +378,7 @@
<artifactId>flight-sql-jdbc-driver</artifactId>
<version>16.1.0</version>
</dependency>
<dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>4.0.1</version>
Expand Down
25 changes: 25 additions & 0 deletions src/sqlancer/common/oracle/CODDTestBase.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
package sqlancer.common.oracle;

import sqlancer.Main.StateLogger;
import sqlancer.MainOptions;
import sqlancer.SQLConnection;
import sqlancer.SQLGlobalState;
import sqlancer.common.query.ExpectedErrors;

public abstract class CODDTestBase<S extends SQLGlobalState<?, ?>> implements TestOracle<S> {
protected final S state;
protected final ExpectedErrors errors = new ExpectedErrors();
protected final StateLogger logger;
protected final MainOptions options;
protected final SQLConnection con;
protected String auxiliaryQueryString;
protected String foldedQueryString;
protected String originalQueryString;

public CODDTestBase(S state) {
this.state = state;
this.con = state.getConnection();
this.logger = state.getLogger();
this.options = state.getOptions();
}
}
23 changes: 23 additions & 0 deletions src/sqlancer/common/schema/AbstractTables.java
Original file line number Diff line number Diff line change
Expand Up @@ -34,4 +34,27 @@ public String columnNamesAsString(Function<C, String> function) {
return getColumns().stream().map(function).collect(Collectors.joining(", "));
}

public void addTable(T table) {
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ideally, we would also add test cases later on for these new methods.

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I add the unit tests for these new methods and an unit tests for SQLite3 CODDTest oracle.

if (!this.tables.contains(table)) {
this.tables.add(table);
columns.addAll(table.getColumns());
}
}

public void removeTable(T table) {
if (this.tables.contains(table)) {
this.tables.remove(table);
for (C c : table.getColumns()) {
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is it actually necessary to remove the columns?

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think it's necessary for CODDTest. I introduced the removeTable method because, when using a join clause in the Original query, we need the same clause in the Auxiliary query. Since the Auxiliary query is generated first, the join clauses are fixed when generating the Original query. To avoid redundant tables in the FROM clause, I remove them accordingly.
While it's not essential to remove columns in CODDTest—since they're used to generate other expressions—it might still be better to do so. If removeTable is reused by other components, it could leave behind columns in WHERE or other clauses that no longer belong to any table.

columns.remove(c);
}
}
}

public boolean isContained(T table) {
return this.tables.contains(table);
}

public int getSize() {
return this.tables.size();
}
}
2 changes: 1 addition & 1 deletion src/sqlancer/hive/HiveErrors.java
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

import sqlancer.common.query.ExpectedErrors;

public class HiveErrors {
public final class HiveErrors {

private HiveErrors() {
}
Expand Down
16 changes: 8 additions & 8 deletions src/sqlancer/hive/HiveOptions.java
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
package sqlancer.hive;

import java.sql.SQLException;
import java.util.Arrays;
import java.util.List;

import com.beust.jcommander.Parameter;
import com.beust.jcommander.Parameters;

import sqlancer.DBMSSpecificOptions;
import sqlancer.OracleFactory;
import sqlancer.common.oracle.TLPWhereOracle;
import sqlancer.common.oracle.TestOracle;
import sqlancer.common.query.ExpectedErrors;
import sqlancer.DBMSSpecificOptions;
import sqlancer.hive.gen.HiveExpressionGenerator;
import sqlancer.OracleFactory;

import java.sql.SQLException;
import java.util.Arrays;
import java.util.List;

@Parameters(separators = "=", commandDescription = "Hive (default port: " + HiveOptions.DEFAULT_PORT
+ ", default host: " + HiveOptions.DEFAULT_HOST + ")")
Expand All @@ -28,8 +28,8 @@ public enum HiveOracleFactory implements OracleFactory<HiveGlobalState> {
@Override
public TestOracle<HiveGlobalState> create(HiveGlobalState globalState) throws SQLException {
HiveExpressionGenerator gen = new HiveExpressionGenerator(globalState);
ExpectedErrors expectedErrors = ExpectedErrors.newErrors()
.with(HiveErrors.getExpressionErrors()).build();
ExpectedErrors expectedErrors = ExpectedErrors.newErrors().with(HiveErrors.getExpressionErrors())
.build();

return new TLPWhereOracle<>(globalState, gen, expectedErrors);
}
Expand Down
24 changes: 11 additions & 13 deletions src/sqlancer/hive/HiveProvider.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,12 @@
package sqlancer.hive;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;

import com.google.auto.service.AutoService;

import sqlancer.AbstractAction;
import sqlancer.DatabaseProvider;
import sqlancer.IgnoreMeException;
Expand All @@ -13,13 +20,6 @@
import sqlancer.hive.gen.HiveInsertGenerator;
import sqlancer.hive.gen.HiveTableGenerator;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;

import com.google.auto.service.AutoService;

@AutoService(DatabaseProvider.class)
public class HiveProvider extends SQLProviderAdapter<HiveGlobalState, HiveOptions> {

Expand Down Expand Up @@ -61,14 +61,13 @@ public void generateDatabase(HiveGlobalState globalState) throws Exception {
String tableName = globalState.getSchema().getFreeTableName();
SQLQueryAdapter qt = HiveTableGenerator.generate(globalState, tableName);
success = globalState.executeStatement(qt);
} while(!success);
} while (!success);
}
if (globalState.getSchema().getDatabaseTables().isEmpty()) {
throw new IgnoreMeException(); // TODO
}

StatementExecutor<HiveGlobalState, Action> se = new StatementExecutor<HiveGlobalState, Action>(
globalState, Action.values(),
StatementExecutor<HiveGlobalState, Action> se = new StatementExecutor<>(globalState, Action.values(),
HiveProvider::mapActions, (q) -> {
if (globalState.getSchema().getDatabaseTables().isEmpty()) {
throw new IgnoreMeException();
Expand Down Expand Up @@ -107,9 +106,8 @@ public SQLConnection createDatabase(HiveGlobalState globalState) throws SQLExcep
s.execute("USE " + databaseName);
}
con.close();
con = DriverManager.getConnection(
String.format("jdbc:hive2://%s:%d/%s", host, port, databaseName,
username, password));
con = DriverManager
.getConnection(String.format("jdbc:hive2://%s:%d/%s", host, port, databaseName, username, password));

return new SQLConnection(con);
}
Expand Down
16 changes: 8 additions & 8 deletions src/sqlancer/hive/HiveSchema.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
package sqlancer.hive;

import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;

import sqlancer.Randomly;
import sqlancer.SQLConnection;
import sqlancer.common.schema.AbstractRelationalTable;
Expand All @@ -9,14 +17,6 @@
import sqlancer.common.schema.TableIndex;
import sqlancer.hive.HiveSchema.HiveTable;

import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;

public class HiveSchema extends AbstractSchema<HiveGlobalState, HiveTable> {

public enum HiveDataType {
Expand Down
42 changes: 21 additions & 21 deletions src/sqlancer/hive/HiveToStringVisitor.java
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,11 @@

import sqlancer.common.ast.newast.NewToStringVisitor;
import sqlancer.common.ast.newast.TableReferenceNode;
import sqlancer.hive.ast.HiveCastOperation;
import sqlancer.hive.ast.HiveConstant;
import sqlancer.hive.ast.HiveExpression;
import sqlancer.hive.ast.HiveJoin;
import sqlancer.hive.ast.HiveSelect;
import sqlancer.hive.ast.HiveCastOperation;

public class HiveToStringVisitor extends NewToStringVisitor<HiveExpression> {

Expand Down Expand Up @@ -71,26 +71,26 @@ private void visit(HiveSelect select) {

private void visit(HiveJoin join) {
switch (join.getJoinType()) {
case INNER:
sb.append(" INNER JOIN ");
break;
case LEFT_OUTER:
sb.append(" LEFT JOIN ");
break;
case RIGHT_OUTER:
sb.append(" RIGHT JOIN ");
break;
case FULL_OUTER:
sb.append(" FULL JOIN ");
break;
case LEFT_SEMI:
sb.append(" LEFT SEMI JOIN ");
break;
case CROSS:
sb.append(" CROSS JOIN ");
break;
default:
throw new UnsupportedOperationException();
case INNER:
sb.append(" INNER JOIN ");
break;
case LEFT_OUTER:
sb.append(" LEFT JOIN ");
break;
case RIGHT_OUTER:
sb.append(" RIGHT JOIN ");
break;
case FULL_OUTER:
sb.append(" FULL JOIN ");
break;
case LEFT_SEMI:
sb.append(" LEFT SEMI JOIN ");
break;
case CROSS:
sb.append(" CROSS JOIN ");
break;
default:
throw new UnsupportedOperationException();
}
visit((TableReferenceNode<HiveExpression, HiveSchema.HiveTable>) join.getRightTable());
if (join.getOnClause() != null) {
Expand Down
8 changes: 3 additions & 5 deletions src/sqlancer/hive/ast/HiveBetweenOperation.java
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,9 @@

import sqlancer.common.ast.newast.NewBetweenOperatorNode;

public class HiveBetweenOperation extends NewBetweenOperatorNode<HiveExpression>
implements HiveExpression {

public HiveBetweenOperation(HiveExpression left, HiveExpression middle, HiveExpression right,
boolean isTrue) {
public class HiveBetweenOperation extends NewBetweenOperatorNode<HiveExpression> implements HiveExpression {

public HiveBetweenOperation(HiveExpression left, HiveExpression middle, HiveExpression right, boolean isTrue) {
super(left, middle, right, isTrue);
}
}
5 changes: 2 additions & 3 deletions src/sqlancer/hive/ast/HiveBinaryOperation.java
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,8 @@
import sqlancer.common.ast.BinaryOperatorNode.Operator;
import sqlancer.common.ast.newast.NewBinaryOperatorNode;

public class HiveBinaryOperation extends NewBinaryOperatorNode<HiveExpression>
implements HiveExpression {

public class HiveBinaryOperation extends NewBinaryOperatorNode<HiveExpression> implements HiveExpression {

public HiveBinaryOperation(HiveExpression left, HiveExpression right, Operator op) {
super(left, right, op);
}
Expand Down
2 changes: 1 addition & 1 deletion src/sqlancer/hive/ast/HiveCaseOperation.java
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import sqlancer.common.ast.newast.NewCaseOperatorNode;

public class HiveCaseOperation extends NewCaseOperatorNode<HiveExpression> implements HiveExpression {

public HiveCaseOperation(HiveExpression switchCondition, List<HiveExpression> conditions,
List<HiveExpression> expressions, HiveExpression elseExpr) {
super(switchCondition, conditions, expressions, elseExpr);
Expand Down
8 changes: 4 additions & 4 deletions src/sqlancer/hive/ast/HiveCastOperation.java
Original file line number Diff line number Diff line change
Expand Up @@ -3,22 +3,22 @@
import sqlancer.hive.HiveSchema.HiveDataType;

public class HiveCastOperation implements HiveExpression {

private final HiveExpression expression;
private final HiveDataType type;

public HiveCastOperation(HiveExpression expression, HiveDataType type) {
if (expression == null) {
throw new AssertionError();
}
this.expression = expression;
this.type = type;
}

public HiveExpression getExpression() {
return expression;
}

public HiveDataType getType() {
return type;
}
Expand Down
2 changes: 1 addition & 1 deletion src/sqlancer/hive/ast/HiveFunction.java
Original file line number Diff line number Diff line change
Expand Up @@ -9,5 +9,5 @@ public class HiveFunction<F> extends NewFunctionNode<HiveExpression, F> implemen
public HiveFunction(List<HiveExpression> args, F func) {
super(args, func);
}

}
2 changes: 1 addition & 1 deletion src/sqlancer/hive/ast/HiveInOperation.java
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import sqlancer.common.ast.newast.NewInOperatorNode;

public class HiveInOperation extends NewInOperatorNode<HiveExpression> implements HiveExpression {

public HiveInOperation(HiveExpression left, List<HiveExpression> right, boolean isNegated) {
super(left, right, isNegated);
}
Expand Down
2 changes: 1 addition & 1 deletion src/sqlancer/hive/ast/HiveJoin.java
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ public enum JoinType {
INNER, LEFT_OUTER, RIGHT_OUTER, FULL_OUTER, LEFT_SEMI, CROSS;
}

public HiveJoin(HiveTableReference leftTable, HiveTableReference rightTable, JoinType joinType,
public HiveJoin(HiveTableReference leftTable, HiveTableReference rightTable, JoinType joinType,
HiveExpression onClause) {
this.leftTable = leftTable;
this.rightTable = rightTable;
Expand Down
4 changes: 2 additions & 2 deletions src/sqlancer/hive/ast/HiveSelect.java
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@

import sqlancer.common.ast.SelectBase;
import sqlancer.common.ast.newast.Select;
import sqlancer.hive.HiveToStringVisitor;
import sqlancer.hive.HiveSchema.HiveColumn;
import sqlancer.hive.HiveSchema.HiveTable;
import sqlancer.hive.HiveToStringVisitor;

public class HiveSelect extends SelectBase<HiveExpression>
public class HiveSelect extends SelectBase<HiveExpression>
implements Select<HiveJoin, HiveExpression, HiveTable, HiveColumn>, HiveExpression {

private boolean isDistinct;
Expand Down
4 changes: 2 additions & 2 deletions src/sqlancer/hive/ast/HiveTableReference.java
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@
import sqlancer.common.ast.newast.TableReferenceNode;
import sqlancer.hive.HiveSchema;

public class HiveTableReference extends TableReferenceNode<HiveExpression, HiveSchema.HiveTable>
public class HiveTableReference extends TableReferenceNode<HiveExpression, HiveSchema.HiveTable>
implements HiveExpression {

public HiveTableReference(HiveSchema.HiveTable table) {
super(table);
}

}
}
3 changes: 1 addition & 2 deletions src/sqlancer/hive/ast/HiveUnaryPostfixOperation.java
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,7 @@
import sqlancer.common.ast.BinaryOperatorNode.Operator;
import sqlancer.common.ast.newast.NewUnaryPostfixOperatorNode;

public class HiveUnaryPostfixOperation extends NewUnaryPostfixOperatorNode<HiveExpression>
implements HiveExpression {
public class HiveUnaryPostfixOperation extends NewUnaryPostfixOperatorNode<HiveExpression> implements HiveExpression {

public HiveUnaryPostfixOperation(HiveExpression expr, Operator op) {
super(expr, op);
Expand Down
Loading
Loading